Compare commits
77 Commits
1791fe612a
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 9fd3ae6563 | |||
| cde3361458 | |||
| f055011b86 | |||
| 16429a44b4 | |||
| fe0b7e88e6 | |||
| 28dd9da6ac | |||
| 02c16a2972 | |||
| 324427513c | |||
| c12b9b360a | |||
| cc06142050 | |||
| 6726cafff4 | |||
| 35312ea1ff | |||
| 721eee9c88 | |||
| 421f9d0293 | |||
| 7eb4e41708 | |||
| a2254bbe80 | |||
| 1b5bdadf10 | |||
| 882b29d7a1 | |||
| c60f6f511e | |||
| 07955286f1 | |||
| 258eeddc74 | |||
| b09e9e6aaa | |||
| 0df371adb7 | |||
| 8144986a48 | |||
| d2aab7369d | |||
| 946d4e8c2c | |||
| 0b5f25a263 | |||
| c669d66969 | |||
| 082e08e7f4 | |||
| a3fd03d32a | |||
| 5645645c5f | |||
| ac7d1efb8d | |||
| a676061b3e | |||
| 1c34cebbde | |||
| 22a2ce0559 | |||
| 313b1bf1ba | |||
| 66413f92e3 | |||
| 0fcbc6bbe9 | |||
| f1d8318500 | |||
| b2f327d48f | |||
| b772937354 | |||
| a3b9543d0e | |||
| d0a423df9f | |||
| 8600bf216a | |||
| bf270e4e87 | |||
| 760cb2344e | |||
| ed2c1038e6 | |||
| d4bdc677a9 | |||
| 9d58d961bd | |||
| 791b75b3af | |||
| a7f52fe0eb | |||
| 439353d318 | |||
| f245a68790 | |||
| 3fe0538468 | |||
| dbded94324 | |||
| 4301f1dbab | |||
| 9fdb778330 | |||
| 0300d7baf6 | |||
| ee1b1ae5e6 | |||
| b20f7cd1ad | |||
| eb8f0be004 | |||
| 68aa13e216 | |||
| 2a5c2b18e7 | |||
| e64086b7cf | |||
| 08946059ad | |||
| 10b17dc66b | |||
| 1ff8a311ae | |||
| 2686fa1d76 | |||
| 376d2b28d3 | |||
| a3609696c7 | |||
| b0c12acbc6 | |||
| 3dfcf2a536 | |||
| ecb56013a5 | |||
| 27b52f7fd1 | |||
| 234441e6a3 | |||
| 011f62829a | |||
| c34584a215 |
@@ -2,32 +2,4 @@
|
|||||||
recorder-playground = "run -p recorder --example playground -- --environment development"
|
recorder-playground = "run -p recorder --example playground -- --environment development"
|
||||||
|
|
||||||
[build]
|
[build]
|
||||||
rustflags = ["-Zthreads=8"]
|
rustflags = ["-Zthreads=8", "-Zshare-generics=y"]
|
||||||
|
|
||||||
[target.x86_64-unknown-linux-gnu]
|
|
||||||
linker = "clang"
|
|
||||||
rustflags = ["-Zthreads=8", "-Clink-arg=-fuse-ld=lld", "-Zshare-generics=y"]
|
|
||||||
|
|
||||||
[target.x86_64-pc-windows-msvc]
|
|
||||||
linker = "rust-lld.exe"
|
|
||||||
rustflags = ["-Zthreads=8", "-Zshare-generics=n"]
|
|
||||||
|
|
||||||
# NOTE: you must install [Mach-O LLD Port](https://lld.llvm.org/MachO/index.html) on mac. you can easily do this by installing llvm which includes lld with the "brew" package manager:
|
|
||||||
# `brew install llvm`
|
|
||||||
#[target.x86_64-apple-darwin]
|
|
||||||
#rustflags = [
|
|
||||||
# "-Zthreads=8",
|
|
||||||
# "-C",
|
|
||||||
# "link-arg=-fuse-ld=/usr/local/opt/llvm/bin/ld64.lld",
|
|
||||||
# "-Zshare-generics=y",
|
|
||||||
#]
|
|
||||||
|
|
||||||
# NOTE: you must install [Mach-O LLD Port](https://lld.llvm.org/MachO/index.html) on mac. you can easily do this by installing llvm which includes lld with the "brew" package manager:
|
|
||||||
# `brew install llvm`
|
|
||||||
#[target.aarch64-apple-darwin]
|
|
||||||
#rustflags = [
|
|
||||||
# "-Zthreads=8",
|
|
||||||
# "-C",
|
|
||||||
# "link-arg=-fuse-ld=/opt/homebrew/opt/llvm/bin/ld64.lld",
|
|
||||||
# "-Zshare-generics=y",
|
|
||||||
#]
|
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
FROM mcr.microsoft.com/vscode/devcontainers/rust:0-1
|
|
||||||
|
|
||||||
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
|
||||||
&& apt-get -y install --no-install-recommends postgresql-client \
|
|
||||||
&& cargo install sea-orm-cli cargo-insta \
|
|
||||||
&& chown -R vscode /usr/local/cargo
|
|
||||||
|
|
||||||
COPY .env /.env
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "Konobangu Recorder",
|
|
||||||
"dockerComposeFile": "docker-compose.yml",
|
|
||||||
"service": "app",
|
|
||||||
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
|
|
||||||
"forwardPorts": [5001]
|
|
||||||
}
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
version: "3"
|
|
||||||
|
|
||||||
services:
|
|
||||||
app:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
command: sleep infinity
|
|
||||||
networks:
|
|
||||||
- db
|
|
||||||
- redis
|
|
||||||
volumes:
|
|
||||||
- ../..:/workspaces:cached
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
db:
|
|
||||||
image: postgres:15.3-alpine
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
networks:
|
|
||||||
- db
|
|
||||||
volumes:
|
|
||||||
- postgres-data:/var/lib/postgresql/data
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
redis:
|
|
||||||
image: redis:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- 6379:6379
|
|
||||||
networks:
|
|
||||||
- redis
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
postgres-data:
|
|
||||||
|
|
||||||
networks:
|
|
||||||
db:
|
|
||||||
redis:
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
name: CI
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
- main
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_TOOLCHAIN: stable
|
|
||||||
TOOLCHAIN_PROFILE: minimal
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
rustfmt:
|
|
||||||
name: Check Style
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout the code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
|
||||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
|
||||||
override: true
|
|
||||||
components: rustfmt
|
|
||||||
- name: Run cargo fmt
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: fmt
|
|
||||||
args: --all -- --check
|
|
||||||
|
|
||||||
clippy:
|
|
||||||
name: Run Clippy
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout the code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
|
||||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
|
||||||
override: true
|
|
||||||
- name: Setup Rust cache
|
|
||||||
uses: Swatinem/rust-cache@v2
|
|
||||||
- name: Run cargo clippy
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: clippy
|
|
||||||
args: --all-features -- -D warnings -W clippy::pedantic -W clippy::nursery -W rust-2018-idioms
|
|
||||||
|
|
||||||
test:
|
|
||||||
name: Run Tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
services:
|
|
||||||
redis:
|
|
||||||
image: redis
|
|
||||||
options: >-
|
|
||||||
--health-cmd "redis-cli ping"
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
ports:
|
|
||||||
- "6379:6379"
|
|
||||||
postgres:
|
|
||||||
image: postgres
|
|
||||||
env:
|
|
||||||
POSTGRES_DB: postgress_test
|
|
||||||
POSTGRES_USER: postgress
|
|
||||||
POSTGRES_PASSWORD: postgress
|
|
||||||
ports:
|
|
||||||
- "5432:5432"
|
|
||||||
# Set health checks to wait until postgres has started
|
|
||||||
options: --health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout the code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
|
||||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
|
||||||
override: true
|
|
||||||
- name: Setup Rust cache
|
|
||||||
uses: Swatinem/rust-cache@v2
|
|
||||||
- name: Run cargo test
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: test
|
|
||||||
args: --all-features --all
|
|
||||||
env:
|
|
||||||
REDIS_URL: redis://localhost:${{job.services.redis.ports[6379]}}
|
|
||||||
DATABASE_URL: postgres://postgress:postgress@localhost:5432/postgress_test
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
name: Build and Push Testing Torrents Container
|
name: Testing Torrents Container
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
@@ -11,6 +11,9 @@ env:
|
|||||||
jobs:
|
jobs:
|
||||||
build-container:
|
build-container:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -26,8 +29,8 @@ jobs:
|
|||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: 'packages/testing-torrents'
|
context: 'packages/testing-torrents'
|
||||||
file: './Dockerfile'
|
file: 'packages/testing-torrents/Dockerfile'
|
||||||
push: true
|
push: true
|
||||||
tags: 'ghcr.io/${{ env.ORG }}/${{ env.PROJECT }}-testing-torrents:latest'
|
tags: '${{ env.REGISTRY }}/${{ env.ORG }}/${{ env.PROJECT }}-testing-torrents:latest'
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
|||||||
3
.vscode/extensions.json
vendored
3
.vscode/extensions.json
vendored
@@ -5,6 +5,7 @@
|
|||||||
"unifiedjs.vscode-mdx",
|
"unifiedjs.vscode-mdx",
|
||||||
"mikestead.dotenv",
|
"mikestead.dotenv",
|
||||||
"christian-kohler.npm-intellisense",
|
"christian-kohler.npm-intellisense",
|
||||||
"skellock.just"
|
"skellock.just",
|
||||||
|
"zerotaskx.rust-extension-pack"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
82
.vscode/settings.json
vendored
82
.vscode/settings.json
vendored
@@ -1,32 +1,52 @@
|
|||||||
{
|
{
|
||||||
"npm.packageManager": "pnpm",
|
"npm.packageManager": "pnpm",
|
||||||
"rust-analyzer.showUnlinkedFileNotification": false,
|
"[javascript]": {
|
||||||
"[javascript]": {
|
"editor.defaultFormatter": "vscode.typescript-language-features",
|
||||||
"editor.defaultFormatter": "vscode.typescript-language-features",
|
"editor.formatOnSave": true
|
||||||
"editor.formatOnSave": true
|
},
|
||||||
},
|
"[json]": {
|
||||||
"[json]": {
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
"editor.formatOnSave": true
|
||||||
"editor.formatOnSave": true
|
},
|
||||||
},
|
"[jsonc]": {
|
||||||
"[jsonc]": {
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
"editor.formatOnSave": true
|
||||||
"editor.formatOnSave": true
|
},
|
||||||
},
|
"[typescript]": {
|
||||||
"[typescript]": {
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
"editor.formatOnSave": true
|
||||||
"editor.formatOnSave": true
|
},
|
||||||
},
|
"[typescriptreact]": {
|
||||||
"[typescriptreact]": {
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
"editor.formatOnSave": true
|
||||||
"editor.formatOnSave": true
|
},
|
||||||
},
|
"editor.codeActionsOnSave": {
|
||||||
"editor.codeActionsOnSave": {
|
"quickfix.biome": "explicit",
|
||||||
"quickfix.biome": "explicit",
|
"source.organizeImports.biome": "explicit"
|
||||||
"source.organizeImports.biome": "explicit"
|
},
|
||||||
},
|
"emmet.showExpandedAbbreviation": "never",
|
||||||
"emmet.showExpandedAbbreviation": "never",
|
"prettier.enable": false,
|
||||||
"prettier.enable": false,
|
"typescript.tsdk": "node_modules/typescript/lib",
|
||||||
"typescript.tsdk": "node_modules/typescript/lib",
|
"rust-analyzer.showUnlinkedFileNotification": false,
|
||||||
"rust-analyzer.cargo.features": ["testcontainers"]
|
"sqltools.connections": [
|
||||||
}
|
{
|
||||||
|
"previewLimit": 50,
|
||||||
|
"server": "localhost",
|
||||||
|
"port": 5432,
|
||||||
|
"driver": "PostgreSQL",
|
||||||
|
"name": "konobangu-dev",
|
||||||
|
"database": "konobangu",
|
||||||
|
"username": "konobangu"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"rust-analyzer.cargo.features": "all",
|
||||||
|
"rust-analyzer.testExplorer": true
|
||||||
|
// https://github.com/rust-lang/rust/issues/141540
|
||||||
|
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
|
||||||
|
// "rust-analyzer.check.extraEnv": {
|
||||||
|
// "CARGO_TARGET_DIR": "target/rust-analyzer"
|
||||||
|
// },
|
||||||
|
// "rust-analyzer.cargo.extraEnv": {
|
||||||
|
// "CARGO_TARGET_DIR": "target/analyzer"
|
||||||
|
// }
|
||||||
|
}
|
||||||
112
.vscode/tasks.json
vendored
Normal file
112
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "dev-all",
|
||||||
|
"dependsOn": [
|
||||||
|
"dev-webui",
|
||||||
|
"dev-recorder",
|
||||||
|
"dev-proxy",
|
||||||
|
"dev-codegen-wait",
|
||||||
|
"dev-deps",
|
||||||
|
],
|
||||||
|
"dependsOrder": "parallel",
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": false,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"group": "new-group",
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "shared",
|
||||||
|
"clear": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-webui",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-webui"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": true,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-deps",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-deps"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-codegen-wait",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-codegen-wait"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-recorder",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-recorder"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-proxy",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-proxy",
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
4595
Cargo.lock
generated
4595
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
91
Cargo.toml
91
Cargo.toml
@@ -1,9 +1,90 @@
|
|||||||
|
# cargo-features = ["codegen-backend"]
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = ["apps/recorder"]
|
members = [
|
||||||
|
"packages/testing-torrents",
|
||||||
|
"packages/util",
|
||||||
|
"packages/util-derive",
|
||||||
|
"packages/fetch",
|
||||||
|
"packages/downloader",
|
||||||
|
"apps/recorder",
|
||||||
|
"apps/proxy",
|
||||||
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[patch.crates-io]
|
[profile.dev]
|
||||||
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
|
debug = 0
|
||||||
|
# https://github.com/rust-lang/rust/issues/141540
|
||||||
|
incremental = false
|
||||||
|
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
|
||||||
|
# codegen-backend = "cranelift"
|
||||||
|
|
||||||
# [patch."https://github.com/lonelyhentxi/qbit.git"]
|
[workspace.dependencies]
|
||||||
# qbit-rs = { path = "./patches/qbit-rs" }
|
testing-torrents = { path = "./packages/testing-torrents" }
|
||||||
|
util = { path = "./packages/util" }
|
||||||
|
util-derive = { path = "./packages/util-derive" }
|
||||||
|
fetch = { path = "./packages/fetch" }
|
||||||
|
downloader = { path = "./packages/downloader" }
|
||||||
|
recorder = { path = "./apps/recorder" }
|
||||||
|
|
||||||
|
reqwest = { version = "0.12.20", features = [
|
||||||
|
"charset",
|
||||||
|
"http2",
|
||||||
|
"json",
|
||||||
|
"macos-system-configuration",
|
||||||
|
"cookies",
|
||||||
|
] }
|
||||||
|
moka = "0.12"
|
||||||
|
futures = "0.3"
|
||||||
|
quirks_path = "0.1"
|
||||||
|
snafu = { version = "0.8", features = ["futures"] }
|
||||||
|
testcontainers = { version = "0.24" }
|
||||||
|
testcontainers-modules = { version = "0.12.1" }
|
||||||
|
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
tokio = { version = "1.45.1", features = [
|
||||||
|
"macros",
|
||||||
|
"fs",
|
||||||
|
"rt-multi-thread",
|
||||||
|
"signal",
|
||||||
|
] }
|
||||||
|
serde_json = "1"
|
||||||
|
async-trait = "0.1"
|
||||||
|
tracing = "0.1"
|
||||||
|
url = "2.5.2"
|
||||||
|
anyhow = "1"
|
||||||
|
itertools = "0.14"
|
||||||
|
chrono = "0.4"
|
||||||
|
bytes = "1"
|
||||||
|
serde_with = "3"
|
||||||
|
regex = "1.11"
|
||||||
|
lazy_static = "1.5"
|
||||||
|
axum = { version = "0.8.3", features = ["macros"] }
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
|
||||||
|
axum-extra = { version = "0.10", features = ["typed-header"] }
|
||||||
|
mockito = { version = "1.6.1" }
|
||||||
|
convert_case = "0.8"
|
||||||
|
color-eyre = "0.6.5"
|
||||||
|
inquire = "0.7.5"
|
||||||
|
image = "0.25.6"
|
||||||
|
uuid = { version = "1.6.0", features = ["v4"] }
|
||||||
|
maplit = "1.0.2"
|
||||||
|
once_cell = "1.20.2"
|
||||||
|
rand = "0.9.1"
|
||||||
|
rust_decimal = "1.37.2"
|
||||||
|
base64 = "0.22.1"
|
||||||
|
nom = "8.0.0"
|
||||||
|
percent-encoding = "2.3.1"
|
||||||
|
num-traits = "0.2.19"
|
||||||
|
http = "1.2.0"
|
||||||
|
async-stream = "0.3.6"
|
||||||
|
serde_variant = "0.1.3"
|
||||||
|
tracing-appender = "0.2.3"
|
||||||
|
clap = "4.5.40"
|
||||||
|
ipnetwork = "0.21.1"
|
||||||
|
typed-builder = "0.21.0"
|
||||||
|
nanoid = "0.4.0"
|
||||||
|
webp = "0.3.0"
|
||||||
|
|
||||||
|
[patch.crates-io]
|
||||||
|
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" }
|
||||||
|
|||||||
@@ -6,13 +6,14 @@
|
|||||||
"build": "email build",
|
"build": "email build",
|
||||||
"dev": "email dev --port 5003",
|
"dev": "email dev --port 5003",
|
||||||
"export": "email export",
|
"export": "email export",
|
||||||
"clean": "git clean -xdf .cache .turbo dist node_modules",
|
"clean": "git clean -xdf .cache dist node_modules",
|
||||||
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
|
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@react-email/components": "0.0.31",
|
"@react-email/components": "^0.0.42",
|
||||||
"react": "^19.0.0",
|
"react": "^19.0.0",
|
||||||
"react-email": "3.0.4"
|
"react-email": "^4.0.16",
|
||||||
|
"@konobangu/email": "workspace:*"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/react": "19.0.1"
|
"@types/react": "19.0.1"
|
||||||
|
|||||||
@@ -2,8 +2,12 @@
|
|||||||
"extends": "../../tsconfig.base.json",
|
"extends": "../../tsconfig.base.json",
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"composite": true,
|
"composite": true,
|
||||||
"jsx": "react-jsx"
|
"jsx": "react-jsx",
|
||||||
|
"jsxImportSource": "react",
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleResolution": "bundler"
|
||||||
},
|
},
|
||||||
|
"references": [{ "path": "../../packages/email" }],
|
||||||
"include": ["**/*.ts", "**/*.tsx"],
|
"include": ["**/*.ts", "**/*.tsx"],
|
||||||
"exclude": ["node_modules"]
|
"exclude": ["node_modules"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
^https://konobangu.com/api*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/api$1
|
#^https://konobangu.com/api*** statusCode://500
|
||||||
|
^https://konobangu.com/api*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/api$1
|
||||||
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000/$1 excludeFilter://^https://konobangu.com/api***
|
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000/$1 excludeFilter://^https://konobangu.com/api***
|
||||||
^wss://konobangu.com/*** reqHeaders://{x-forwarded.json} ws://127.0.0.1:5000/$1 excludeFilter://^wss://konobangu.com/api
|
^wss://konobangu.com/*** reqHeaders://{x-forwarded.json} ws://127.0.0.1:5000/$1 excludeFilter://^wss://konobangu.com/api
|
||||||
1
apps/proxy/.whistle/rules/files/1.mikan_doppel
Normal file
1
apps/proxy/.whistle/rules/files/1.mikan_doppel
Normal file
@@ -0,0 +1 @@
|
|||||||
|
^https://mikanani.me/*** http://127.0.0.1:5005/$1 excludeFilter://^**/***.svg excludeFilter://^**/***.css excludeFilter://^**/***.js
|
||||||
@@ -1 +1 @@
|
|||||||
{"filesOrder":["konobangu"],"selectedList":["konobangu"],"disabledDefalutRules":true}
|
{"filesOrder":["konobangu","mikan_doppel"],"selectedList":["konobangu","mikan_doppel"],"disabledDefalutRules":true,"defalutRules":""}
|
||||||
|
|||||||
19
apps/proxy/Cargo.toml
Normal file
19
apps/proxy/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[package]
|
||||||
|
name = "proxy"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
[lib]
|
||||||
|
name = "proxy"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "mikan_doppel"
|
||||||
|
path = "src/bin/mikan_doppel.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
recorder = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
tracing-subscriber = { workspace = true }
|
||||||
|
tracing = { workspace = true }
|
||||||
@@ -3,13 +3,13 @@
|
|||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
|
"whistle": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
|
||||||
"dev": "pnpm run start"
|
"mikan_doppel": "cargo run -p proxy --bin mikan_doppel",
|
||||||
|
"dev": "npm-run-all -p mikan_doppel whistle"
|
||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"cross-env": "^7.0.3",
|
"whistle": "^2.9.99"
|
||||||
"whistle": "^2.9.93"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use recorder::{errors::RecorderResult, test_utils::mikan::MikanMockServer};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
#[allow(unused_variables)]
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(Level::DEBUG)
|
||||||
|
.init();
|
||||||
|
|
||||||
|
let mut mikan_server = MikanMockServer::new_with_port(5005).await.unwrap();
|
||||||
|
|
||||||
|
let resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,8 +1,17 @@
|
|||||||
|
HOST="konobangu.com"
|
||||||
|
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
|
||||||
|
STORAGE_DATA_DIR = "./data"
|
||||||
AUTH_TYPE = "basic" # or oidc
|
AUTH_TYPE = "basic" # or oidc
|
||||||
BASIC_USER = "konobangu"
|
BASIC_USER = "konobangu"
|
||||||
BASIC_PASSWORD = "konobangu"
|
BASIC_PASSWORD = "konobangu"
|
||||||
# OIDC_ISSUER="https://auth.logto.io/oidc"
|
# OIDC_ISSUER="https://auth.logto.io/oidc"
|
||||||
# OIDC_API_AUDIENCE = "https://konobangu.com/api"
|
# OIDC_AUDIENCE = "https://konobangu.com/api"
|
||||||
# OIDC_CLIENT_ID = "client_id"
|
# OIDC_CLIENT_ID = "client_id"
|
||||||
# OIDC_CLIENT_SECRET = "client_secret" # optional
|
# OIDC_CLIENT_SECRET = "client_secret" # optional
|
||||||
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
||||||
|
# OIDC_EXTRA_CLAIM_KEY = ""
|
||||||
|
# OIDC_EXTRA_CLAIM_VALUE = ""
|
||||||
|
# MIKAN_PROXY = ""
|
||||||
|
# MIKAN_PROXY_AUTH_HEADER = ""
|
||||||
|
# MIKAN_NO_PROXY = ""
|
||||||
|
# MIKAN_PROXY_ACCEPT_INVALID_CERTS = "true"
|
||||||
|
|||||||
17
apps/recorder/.env.dev
Normal file
17
apps/recorder/.env.dev
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
HOST="konobangu.com"
|
||||||
|
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
|
||||||
|
STORAGE_DATA_DIR = "./data"
|
||||||
|
AUTH_TYPE = "basic" # or oidc
|
||||||
|
BASIC_USER = "konobangu"
|
||||||
|
BASIC_PASSWORD = "konobangu"
|
||||||
|
# OIDC_ISSUER="https://auth.logto.io/oidc"
|
||||||
|
# OIDC_AUDIENCE = "https://konobangu.com/api"
|
||||||
|
# OIDC_CLIENT_ID = "client_id"
|
||||||
|
# OIDC_CLIENT_SECRET = "client_secret" # optional
|
||||||
|
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
||||||
|
# OIDC_EXTRA_CLAIM_KEY = ""
|
||||||
|
# OIDC_EXTRA_CLAIM_VALUE = ""
|
||||||
|
MIKAN_PROXY = "http://127.0.0.1:8899"
|
||||||
|
# MIKAN_PROXY_AUTH_HEADER = ""
|
||||||
|
# MIKAN_NO_PROXY = ""
|
||||||
|
MIKAN_PROXY_ACCEPT_INVALID_CERTS = true
|
||||||
4
apps/recorder/.gitignore
vendored
4
apps/recorder/.gitignore
vendored
@@ -25,3 +25,7 @@ Cargo.lock
|
|||||||
# Dist
|
# Dist
|
||||||
node_modules
|
node_modules
|
||||||
dist/
|
dist/
|
||||||
|
temp/*
|
||||||
|
!temp/.gitkeep
|
||||||
|
tests/resources/mikan/classic_episodes/*/*
|
||||||
|
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet
|
||||||
@@ -2,8 +2,20 @@
|
|||||||
name = "recorder"
|
name = "recorder"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["jxl"]
|
||||||
|
playground = ["dep:inquire", "dep:color-eyre", "dep:polars"]
|
||||||
|
testcontainers = [
|
||||||
|
"dep:testcontainers",
|
||||||
|
"dep:testcontainers-modules",
|
||||||
|
"dep:testcontainers-ext",
|
||||||
|
"downloader/testcontainers",
|
||||||
|
"testcontainers-modules/postgres",
|
||||||
|
]
|
||||||
|
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "recorder"
|
name = "recorder"
|
||||||
path = "src/lib.rs"
|
path = "src/lib.rs"
|
||||||
@@ -13,86 +25,108 @@ name = "recorder_cli"
|
|||||||
path = "src/bin/main.rs"
|
path = "src/bin/main.rs"
|
||||||
required-features = []
|
required-features = []
|
||||||
|
|
||||||
[features]
|
[[example]]
|
||||||
default = []
|
name = "mikan_collect_classic_eps"
|
||||||
testcontainers = [
|
path = "examples/mikan_collect_classic_eps.rs"
|
||||||
"dep:testcontainers",
|
required-features = ["playground"]
|
||||||
"dep:testcontainers-modules",
|
|
||||||
"dep:bollard",
|
[[example]]
|
||||||
]
|
name = "mikan_doppel_season_subscription"
|
||||||
|
path = "examples/mikan_doppel_season_subscription.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "mikan_doppel_subscriber_subscription"
|
||||||
|
path = "examples/mikan_doppel_subscriber_subscription.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "playground"
|
||||||
|
path = "examples/playground.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
downloader = { workspace = true }
|
||||||
|
util = { workspace = true }
|
||||||
|
util-derive = { workspace = true }
|
||||||
|
fetch = { workspace = true }
|
||||||
|
|
||||||
|
serde = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
serde_json = { workspace = true }
|
||||||
|
async-trait = { workspace = true }
|
||||||
|
testcontainers = { workspace = true, optional = true }
|
||||||
|
testcontainers-modules = { workspace = true, optional = true }
|
||||||
|
testcontainers-ext = { workspace = true, optional = true, features = [
|
||||||
|
"tracing",
|
||||||
|
] }
|
||||||
|
tracing = { workspace = true }
|
||||||
|
axum = { workspace = true }
|
||||||
|
axum-extra = { workspace = true }
|
||||||
|
snafu = { workspace = true }
|
||||||
|
itertools = { workspace = true }
|
||||||
|
url = { workspace = true }
|
||||||
|
regex = { workspace = true }
|
||||||
|
lazy_static = { workspace = true }
|
||||||
|
quirks_path = { workspace = true }
|
||||||
|
futures = { workspace = true }
|
||||||
|
bytes = { workspace = true }
|
||||||
|
serde_with = { workspace = true }
|
||||||
|
moka = { workspace = true }
|
||||||
|
chrono = { workspace = true }
|
||||||
|
tracing-subscriber = { workspace = true }
|
||||||
|
mockito = { workspace = true }
|
||||||
|
color-eyre = { workspace = true, optional = true }
|
||||||
|
inquire = { workspace = true, optional = true }
|
||||||
|
convert_case = { workspace = true }
|
||||||
|
image = { workspace = true }
|
||||||
|
uuid = { workspace = true }
|
||||||
|
maplit = { workspace = true }
|
||||||
|
once_cell = { workspace = true }
|
||||||
|
rand = { workspace = true }
|
||||||
|
rust_decimal = { workspace = true }
|
||||||
|
base64 = { workspace = true }
|
||||||
|
nom = { workspace = true }
|
||||||
|
percent-encoding = { workspace = true }
|
||||||
|
num-traits = { workspace = true }
|
||||||
|
http = { workspace = true }
|
||||||
|
async-stream = { workspace = true }
|
||||||
|
serde_variant = { workspace = true }
|
||||||
|
tracing-appender = { workspace = true }
|
||||||
|
clap = { workspace = true }
|
||||||
|
ipnetwork = { workspace = true }
|
||||||
|
typed-builder = { workspace = true }
|
||||||
|
nanoid = { workspace = true }
|
||||||
|
webp = { workspace = true }
|
||||||
|
|
||||||
serde = { version = "1", features = ["derive"] }
|
|
||||||
serde_json = "1"
|
|
||||||
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
|
|
||||||
async-trait = "0.1.83"
|
|
||||||
tracing = "0.1"
|
|
||||||
chrono = "0.4"
|
|
||||||
sea-orm = { version = "1.1", features = [
|
sea-orm = { version = "1.1", features = [
|
||||||
"sqlx-sqlite",
|
"sqlx-sqlite",
|
||||||
"sqlx-postgres",
|
"sqlx-postgres",
|
||||||
"runtime-tokio-rustls",
|
"runtime-tokio",
|
||||||
"macros",
|
"macros",
|
||||||
"debug-print",
|
"debug-print",
|
||||||
] }
|
] }
|
||||||
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
||||||
axum = "0.8"
|
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
|
||||||
uuid = { version = "1.6.0", features = ["v4"] }
|
|
||||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
|
|
||||||
sea-orm-migration = { version = "1.1", features = ["runtime-tokio-rustls"] }
|
|
||||||
reqwest = { version = "0.12", default-features = false, features = [
|
|
||||||
"charset",
|
|
||||||
"http2",
|
|
||||||
"json",
|
|
||||||
"macos-system-configuration",
|
|
||||||
"rustls-tls",
|
|
||||||
"cookies",
|
|
||||||
] }
|
|
||||||
rss = "2"
|
rss = "2"
|
||||||
bytes = "1.9"
|
|
||||||
itertools = "0.14"
|
|
||||||
url = "2.5"
|
|
||||||
fancy-regex = "0.14"
|
fancy-regex = "0.14"
|
||||||
regex = "1.11"
|
lightningcss = "1.0.0-alpha.66"
|
||||||
lazy_static = "1.5"
|
|
||||||
maplit = "1.0.2"
|
|
||||||
lightningcss = "1.0.0-alpha.61"
|
|
||||||
html-escape = "0.2.13"
|
html-escape = "0.2.13"
|
||||||
opendal = { version = "0.51.0", features = ["default", "services-fs"] }
|
opendal = { version = "0.53", features = ["default", "services-fs"] }
|
||||||
zune-image = "0.4.15"
|
scraper = "0.23.1"
|
||||||
once_cell = "1.20.2"
|
async-graphql = { version = "7", features = ["dynamic-schema"] }
|
||||||
reqwest-middleware = "0.4.0"
|
|
||||||
reqwest-retry = "0.7.0"
|
|
||||||
reqwest-tracing = "0.5.5"
|
|
||||||
scraper = "0.23"
|
|
||||||
leaky-bucket = "1.1.2"
|
|
||||||
serde_with = "3"
|
|
||||||
jwt-authorizer = "0.15.0"
|
|
||||||
futures = "0.3.31"
|
|
||||||
librqbit-core = "4"
|
|
||||||
qbit-rs = { git = "https://github.com/lonelyhentxi/qbit.git", rev = "72d53138ebe", features = [
|
|
||||||
"default",
|
|
||||||
"builder",
|
|
||||||
] }
|
|
||||||
testcontainers = { version = "0.23.3", features = [
|
|
||||||
"default",
|
|
||||||
"properties-config",
|
|
||||||
"watchdog",
|
|
||||||
"http_wait",
|
|
||||||
"reusable-containers",
|
|
||||||
], optional = true }
|
|
||||||
testcontainers-modules = { version = "0.11.4", optional = true }
|
|
||||||
log = "0.4.22"
|
|
||||||
bollard = { version = "0.18", optional = true }
|
|
||||||
async-graphql = { version = "7", features = [] }
|
|
||||||
async-graphql-axum = "7"
|
async-graphql-axum = "7"
|
||||||
fastrand = "2.3.0"
|
seaography = { version = "1.1", features = [
|
||||||
seaography = { version = "1.1" }
|
"with-json",
|
||||||
quirks_path = "0.1.1"
|
"with-chrono",
|
||||||
base64 = "0.22.1"
|
"with-time",
|
||||||
|
"with-uuid",
|
||||||
|
"with-decimal",
|
||||||
|
"with-bigdecimal",
|
||||||
|
"with-postgres-array",
|
||||||
|
"with-json-as-scalar",
|
||||||
|
] }
|
||||||
tower = "0.5.2"
|
tower = "0.5.2"
|
||||||
axum-extra = "0.10"
|
|
||||||
tower-http = { version = "0.6", features = [
|
tower-http = { version = "0.6", features = [
|
||||||
"trace",
|
"trace",
|
||||||
"catch-panic",
|
"catch-panic",
|
||||||
@@ -103,36 +137,33 @@ tower-http = { version = "0.6", features = [
|
|||||||
"set-header",
|
"set-header",
|
||||||
"compression-full",
|
"compression-full",
|
||||||
] }
|
] }
|
||||||
serde_yaml = "0.9.34"
|
|
||||||
tera = "1.20.0"
|
tera = "1.20.0"
|
||||||
openidconnect = { version = "4", features = ["rustls-tls"] }
|
openidconnect = { version = "4" }
|
||||||
http-cache-reqwest = { version = "0.15", features = [
|
dotenvy = "0.15.7"
|
||||||
"manager-cacache",
|
jpegxl-rs = { version = "0.11.2", optional = true }
|
||||||
"manager-moka",
|
jpegxl-sys = { version = "0.11.2", optional = true }
|
||||||
] }
|
|
||||||
moka = "0.12.10"
|
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
|
||||||
http-cache = { version = "0.20.0", features = [
|
apalis-sql = { version = "0.7", features = ["postgres"] }
|
||||||
"cacache-tokio",
|
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
|
||||||
"manager-cacache",
|
reqwest_cookie_store = "0.8.0"
|
||||||
"manager-moka",
|
jwtk = "0.4.0"
|
||||||
], default-features = false }
|
mime_guess = "2.0.5"
|
||||||
http-cache-semantics = "2.1.0"
|
icu_properties = "2.0.1"
|
||||||
dotenv = "0.15.0"
|
icu = "2.0.0"
|
||||||
nom = "8.0.0"
|
tracing-tree = "0.4.0"
|
||||||
http = "1.2.0"
|
num_cpus = "1.17.0"
|
||||||
cookie = "0.18.1"
|
headers-accept = "0.1.4"
|
||||||
async-stream = "0.3.6"
|
polars = { version = "0.49.1", features = [
|
||||||
serde_variant = "0.1.3"
|
"parquet",
|
||||||
tracing-appender = "0.2.3"
|
"lazy",
|
||||||
clap = "4.5.31"
|
"diagonal_concat",
|
||||||
futures-util = "0.3.31"
|
], optional = true }
|
||||||
ipnetwork = "0.21.1"
|
|
||||||
ctor = "0.4.0"
|
|
||||||
librqbit = "8.0.0"
|
|
||||||
typed-builder = "0.21.0"
|
|
||||||
snafu = { version = "0.8.5", features = ["futures"] }
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
inquire = { workspace = true }
|
||||||
|
color-eyre = { workspace = true }
|
||||||
serial_test = "3"
|
serial_test = "3"
|
||||||
insta = { version = "1", features = ["redactions", "yaml", "filters"] }
|
insta = { version = "1", features = ["redactions", "toml", "filters"] }
|
||||||
mockito = "1.6.1"
|
|
||||||
rstest = "0.25"
|
rstest = "0.25"
|
||||||
|
ctor = "0.4.0"
|
||||||
|
|||||||
584
apps/recorder/examples/mikan_collect_classic_eps.rs
Normal file
584
apps/recorder/examples/mikan_collect_classic_eps.rs
Normal file
@@ -0,0 +1,584 @@
|
|||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use chrono::{DateTime, Duration, FixedOffset, NaiveDate, NaiveTime, TimeZone, Utc};
|
||||||
|
use fetch::{HttpClientConfig, fetch_html};
|
||||||
|
use itertools::Itertools;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use nom::{
|
||||||
|
IResult, Parser,
|
||||||
|
branch::alt,
|
||||||
|
bytes::complete::{tag, take, take_till1},
|
||||||
|
character::complete::space1,
|
||||||
|
combinator::map,
|
||||||
|
};
|
||||||
|
use recorder::{
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
extract::{
|
||||||
|
html::extract_inner_text_from_element_ref,
|
||||||
|
mikan::{MikanClient, MikanConfig, MikanEpisodeHash, MikanFansubHash},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use regex::Regex;
|
||||||
|
use scraper::{ElementRef, Html, Selector};
|
||||||
|
use snafu::FromString;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TEST_FOLDER: std::path::PathBuf =
|
||||||
|
if cfg!(any(test, debug_assertions, feature = "playground")) {
|
||||||
|
std::path::PathBuf::from(format!(
|
||||||
|
"{}/tests/resources/mikan/classic_episodes",
|
||||||
|
env!("CARGO_MANIFEST_DIR")
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
std::path::PathBuf::from("tests/resources/mikan/classic_episodes")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TOTAL_PAGE_REGEX: Regex =
|
||||||
|
Regex::new(r#"\$\(\'\.classic-view-pagination2\'\)\.bootpag\(\{\s*total:\s*(\d+)"#)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MikanClassicEpisodeTableRow {
|
||||||
|
pub id: i32,
|
||||||
|
pub publish_at: DateTime<Utc>,
|
||||||
|
pub mikan_fansub_id: Option<String>,
|
||||||
|
pub fansub_name: Option<String>,
|
||||||
|
pub mikan_episode_id: String,
|
||||||
|
pub original_name: String,
|
||||||
|
pub magnet_link: Option<String>,
|
||||||
|
pub file_size: Option<String>,
|
||||||
|
pub torrent_link: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanClassicEpisodeTableRow {
|
||||||
|
fn timezone() -> FixedOffset {
|
||||||
|
FixedOffset::east_opt(8 * 3600).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fixed_date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
alt((
|
||||||
|
map(tag("今天"), move |_| {
|
||||||
|
Utc::now().with_timezone(&Self::timezone()).date_naive()
|
||||||
|
}),
|
||||||
|
map(tag("昨天"), move |_| {
|
||||||
|
Utc::now().with_timezone(&Self::timezone()).date_naive() - Duration::days(1)
|
||||||
|
}),
|
||||||
|
))
|
||||||
|
.parse(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn formatted_date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
let (remain, date_str) = take_till1(|c: char| c.is_whitespace()).parse(input)?;
|
||||||
|
let date = NaiveDate::parse_from_str(date_str, "%Y/%m/%d").map_err(|_| {
|
||||||
|
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
|
||||||
|
})?;
|
||||||
|
Ok((remain, date))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
alt((Self::fixed_date_parser, Self::formatted_date_parser)).parse(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn time_parser(input: &str) -> IResult<&str, NaiveTime> {
|
||||||
|
let (remain, time_str) = take(5usize).parse(input)?;
|
||||||
|
let time = NaiveTime::parse_from_str(time_str, "%H:%M").map_err(|_| {
|
||||||
|
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
|
||||||
|
})?;
|
||||||
|
Ok((remain, time))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_publish_at(text: &str) -> Option<DateTime<Utc>> {
|
||||||
|
let (_, (date, _, time)) = (Self::date_parser, space1, Self::time_parser)
|
||||||
|
.parse(text)
|
||||||
|
.ok()?;
|
||||||
|
let local_dt = Self::timezone()
|
||||||
|
.from_local_datetime(&date.and_time(time))
|
||||||
|
.single()?;
|
||||||
|
Some(local_dt.with_timezone(&Utc))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_element_ref(
|
||||||
|
row: ElementRef<'_>,
|
||||||
|
rev_id: i32,
|
||||||
|
idx: i32,
|
||||||
|
mikan_base_url: &Url,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let publish_at_selector = &Selector::parse("td:nth-of-type(1)").unwrap();
|
||||||
|
let fansub_selector = &Selector::parse("td:nth-of-type(2) > a").unwrap();
|
||||||
|
let original_name_selector =
|
||||||
|
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(1)").unwrap();
|
||||||
|
let magnet_link_selector =
|
||||||
|
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(2)").unwrap();
|
||||||
|
let file_size_selector = &Selector::parse("td:nth-of-type(4)").unwrap();
|
||||||
|
let torrent_link_selector = &Selector::parse("td:nth-of-type(5) > a").unwrap();
|
||||||
|
|
||||||
|
let publish_at = row
|
||||||
|
.select(publish_at_selector)
|
||||||
|
.next()
|
||||||
|
.map(extract_inner_text_from_element_ref)
|
||||||
|
.and_then(|e| Self::extract_publish_at(&e));
|
||||||
|
|
||||||
|
let (mikan_fansub_hash, fansub_name) = row
|
||||||
|
.select(fansub_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|e| {
|
||||||
|
e.attr("href")
|
||||||
|
.and_then(|s| mikan_base_url.join(s).ok())
|
||||||
|
.and_then(|u| MikanFansubHash::from_homepage_url(&u))
|
||||||
|
.map(|h| (h, extract_inner_text_from_element_ref(e)))
|
||||||
|
})
|
||||||
|
.unzip();
|
||||||
|
|
||||||
|
let (mikan_episode_hash, original_name) = row
|
||||||
|
.select(original_name_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| {
|
||||||
|
el.attr("href")
|
||||||
|
.and_then(|s| mikan_base_url.join(s).ok())
|
||||||
|
.and_then(|u| MikanEpisodeHash::from_homepage_url(&u))
|
||||||
|
.map(|h| (h, extract_inner_text_from_element_ref(el)))
|
||||||
|
})
|
||||||
|
.unzip();
|
||||||
|
|
||||||
|
let magnet_link = row
|
||||||
|
.select(magnet_link_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| el.attr("data-clipboard-text"));
|
||||||
|
|
||||||
|
let file_size = row
|
||||||
|
.select(file_size_selector)
|
||||||
|
.next()
|
||||||
|
.map(extract_inner_text_from_element_ref);
|
||||||
|
|
||||||
|
let torrent_link = row
|
||||||
|
.select(torrent_link_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| el.attr("href"));
|
||||||
|
|
||||||
|
if let (Some(mikan_episode_hash), Some(original_name), Some(publish_at)) = (
|
||||||
|
mikan_episode_hash.as_ref(),
|
||||||
|
original_name.as_ref(),
|
||||||
|
publish_at.as_ref(),
|
||||||
|
) {
|
||||||
|
Ok(Self {
|
||||||
|
id: rev_id * 1000 + idx,
|
||||||
|
publish_at: *publish_at,
|
||||||
|
mikan_fansub_id: mikan_fansub_hash.map(|h| h.mikan_fansub_id.clone()),
|
||||||
|
fansub_name,
|
||||||
|
mikan_episode_id: mikan_episode_hash.mikan_episode_id.clone(),
|
||||||
|
original_name: original_name.clone(),
|
||||||
|
magnet_link: magnet_link.map(|s| s.to_string()),
|
||||||
|
file_size: file_size.map(|s| s.to_string()),
|
||||||
|
torrent_link: torrent_link.map(|s| s.to_string()),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
let mut missing_fields = vec![];
|
||||||
|
if mikan_episode_hash.is_none() {
|
||||||
|
missing_fields.push("mikan_episode_id");
|
||||||
|
}
|
||||||
|
if original_name.is_none() {
|
||||||
|
missing_fields.push("original_name");
|
||||||
|
}
|
||||||
|
if publish_at.is_none() {
|
||||||
|
missing_fields.push("publish_at");
|
||||||
|
}
|
||||||
|
Err(RecorderError::without_source(format!(
|
||||||
|
"Failed to parse episode table row, missing fields: {missing_fields:?}, row \
|
||||||
|
index: {idx}"
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MikanClassicEpisodeTablePage {
|
||||||
|
pub page: i32,
|
||||||
|
pub total: i32,
|
||||||
|
pub html: String,
|
||||||
|
pub rows: Vec<MikanClassicEpisodeTableRow>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanClassicEpisodeTablePage {
|
||||||
|
pub fn from_html(
|
||||||
|
html: String,
|
||||||
|
mikan_base_url: &Url,
|
||||||
|
page: i32,
|
||||||
|
updated_info: Option<(i32, i32)>,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let tr_selector = &Selector::parse("tbody tr").unwrap();
|
||||||
|
let doc = Html::parse_document(&html);
|
||||||
|
if let Some(mut total) = TOTAL_PAGE_REGEX
|
||||||
|
.captures(&html)
|
||||||
|
.and_then(|c| c.get(1))
|
||||||
|
.and_then(|s| s.as_str().parse::<i32>().ok())
|
||||||
|
{
|
||||||
|
if let Some((_, update_total)) = updated_info {
|
||||||
|
total = update_total;
|
||||||
|
}
|
||||||
|
|
||||||
|
let rev_id = total - page;
|
||||||
|
let rows = doc
|
||||||
|
.select(tr_selector)
|
||||||
|
.rev()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(idx, tr)| {
|
||||||
|
MikanClassicEpisodeTableRow::from_element_ref(
|
||||||
|
tr,
|
||||||
|
rev_id,
|
||||||
|
idx as i32,
|
||||||
|
mikan_base_url,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<RecorderResult<Vec<_>>>()?;
|
||||||
|
Ok(Self {
|
||||||
|
page,
|
||||||
|
total,
|
||||||
|
html,
|
||||||
|
rows,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(RecorderError::without_source(
|
||||||
|
"Failed to parse pagination meta and rows".into(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save_to_files(&self) -> RecorderResult<()> {
|
||||||
|
use polars::prelude::*;
|
||||||
|
|
||||||
|
let rev_id = self.total - self.page;
|
||||||
|
let parquet_path = TEST_FOLDER.join(format!("parquet/rev_{rev_id}.parquet"));
|
||||||
|
let csv_path = TEST_FOLDER.join(format!("csv/rev_{rev_id}.csv"));
|
||||||
|
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
|
||||||
|
|
||||||
|
std::fs::write(html_path, self.html.clone())?;
|
||||||
|
|
||||||
|
let mut id_vec = Vec::new();
|
||||||
|
let mut publish_at_vec = Vec::new();
|
||||||
|
let mut mikan_fansub_id_vec = Vec::new();
|
||||||
|
let mut fansub_name_vec = Vec::new();
|
||||||
|
let mut mikan_episode_id_vec = Vec::new();
|
||||||
|
let mut original_name_vec = Vec::new();
|
||||||
|
let mut magnet_link_vec = Vec::new();
|
||||||
|
let mut file_size_vec = Vec::new();
|
||||||
|
let mut torrent_link_vec = Vec::new();
|
||||||
|
|
||||||
|
for row in &self.rows {
|
||||||
|
id_vec.push(row.id);
|
||||||
|
publish_at_vec.push(row.publish_at.to_rfc3339());
|
||||||
|
mikan_fansub_id_vec.push(row.mikan_fansub_id.clone());
|
||||||
|
fansub_name_vec.push(row.fansub_name.clone());
|
||||||
|
mikan_episode_id_vec.push(row.mikan_episode_id.clone());
|
||||||
|
original_name_vec.push(row.original_name.clone());
|
||||||
|
magnet_link_vec.push(row.magnet_link.clone());
|
||||||
|
file_size_vec.push(row.file_size.clone());
|
||||||
|
torrent_link_vec.push(row.torrent_link.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let df = df! [
|
||||||
|
"id" => id_vec,
|
||||||
|
"publish_at_timestamp" => publish_at_vec,
|
||||||
|
"mikan_fansub_id" => mikan_fansub_id_vec,
|
||||||
|
"fansub_name" => fansub_name_vec,
|
||||||
|
"mikan_episode_id" => mikan_episode_id_vec,
|
||||||
|
"original_name" => original_name_vec,
|
||||||
|
"magnet_link" => magnet_link_vec,
|
||||||
|
"file_size" => file_size_vec,
|
||||||
|
"torrent_link" => torrent_link_vec,
|
||||||
|
]
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to create DataFrame: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut parquet_file = std::fs::File::create(&parquet_path)?;
|
||||||
|
|
||||||
|
ParquetWriter::new(&mut parquet_file)
|
||||||
|
.finish(&mut df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write parquet file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut csv_file = std::fs::File::create(&csv_path)?;
|
||||||
|
|
||||||
|
CsvWriter::new(&mut csv_file)
|
||||||
|
.include_header(true)
|
||||||
|
.with_quote_style(QuoteStyle::Always)
|
||||||
|
.finish(&mut df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write csv file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"[{}/{}] Saved {} rows to rev_{}.{{parquet,html,csv}}",
|
||||||
|
self.page,
|
||||||
|
self.total,
|
||||||
|
self.rows.len(),
|
||||||
|
rev_id
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn waiting_rev_ids(total: i32) -> RecorderResult<Vec<i32>> {
|
||||||
|
let dir = TEST_FOLDER.join("csv");
|
||||||
|
|
||||||
|
let files = std::fs::read_dir(dir)?;
|
||||||
|
|
||||||
|
let rev_ids = files
|
||||||
|
.filter_map(|f| f.ok())
|
||||||
|
.filter_map(|f| {
|
||||||
|
f.path().file_stem().and_then(|s| {
|
||||||
|
s.to_str().and_then(|s| {
|
||||||
|
if s.starts_with("rev_") {
|
||||||
|
s.replace("rev_", "").parse::<i32>().ok()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<HashSet<_>>();
|
||||||
|
|
||||||
|
Ok((0..total)
|
||||||
|
.filter(|rev_id| !rev_ids.contains(rev_id))
|
||||||
|
.collect::<Vec<_>>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn scrape_mikan_classic_episode_table_page(
|
||||||
|
mikan_client: &MikanClient,
|
||||||
|
page: i32,
|
||||||
|
updated_info: Option<(i32, i32)>,
|
||||||
|
) -> RecorderResult<MikanClassicEpisodeTablePage> {
|
||||||
|
let mikan_base_url = mikan_client.base_url();
|
||||||
|
let url = mikan_base_url.join(&format!("/Home/Classic/{page}"))?;
|
||||||
|
|
||||||
|
if let Some((rev_id, update_total)) = updated_info.as_ref() {
|
||||||
|
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
|
||||||
|
if html_path.exists() {
|
||||||
|
let html = std::fs::read_to_string(&html_path)?;
|
||||||
|
println!("[{page}/{update_total}] html exists, skipping fetch");
|
||||||
|
return MikanClassicEpisodeTablePage::from_html(
|
||||||
|
html,
|
||||||
|
mikan_base_url,
|
||||||
|
page,
|
||||||
|
updated_info,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let total = if let Some((_, update_total)) = updated_info.as_ref() {
|
||||||
|
update_total.to_string()
|
||||||
|
} else {
|
||||||
|
"Unknown".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
println!("[{page}/{total}] fetching html...");
|
||||||
|
|
||||||
|
let html = fetch_html(mikan_client, url).await?;
|
||||||
|
|
||||||
|
println!("[{page}/{total}] fetched html done");
|
||||||
|
|
||||||
|
std::fs::write(TEST_FOLDER.join("html/temp.html"), html.clone())?;
|
||||||
|
|
||||||
|
MikanClassicEpisodeTablePage::from_html(html, mikan_base_url, page, updated_info)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn scrape_mikan_classic_episode_table_page_from_rev_id(
|
||||||
|
mikan_client: &MikanClient,
|
||||||
|
total: i32,
|
||||||
|
rev_idx: i32,
|
||||||
|
) -> RecorderResult<MikanClassicEpisodeTablePage> {
|
||||||
|
let page = total - rev_idx;
|
||||||
|
|
||||||
|
scrape_mikan_classic_episode_table_page(mikan_client, page, Some((rev_idx, total))).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn merge_mikan_classic_episodes_and_strip_columns() -> RecorderResult<()> {
|
||||||
|
use polars::prelude::*;
|
||||||
|
|
||||||
|
let dir = TEST_FOLDER.join("parquet");
|
||||||
|
let files = std::fs::read_dir(dir)?;
|
||||||
|
|
||||||
|
let parquet_paths = files
|
||||||
|
.filter_map(|f| f.ok())
|
||||||
|
.filter_map(|f| {
|
||||||
|
let path = f.path();
|
||||||
|
if let Some(ext) = path.extension()
|
||||||
|
&& ext == "parquet"
|
||||||
|
&& path
|
||||||
|
.file_stem()
|
||||||
|
.is_some_and(|f| f.to_string_lossy().starts_with("rev_"))
|
||||||
|
{
|
||||||
|
Some(path)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if parquet_paths.is_empty() {
|
||||||
|
return Err(RecorderError::without_source(
|
||||||
|
"No parquet files found to merge".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Found {} parquet files to merge", parquet_paths.len());
|
||||||
|
|
||||||
|
// 读取并合并所有 parquet 文件
|
||||||
|
let mut all_dfs = Vec::new();
|
||||||
|
for path in &parquet_paths {
|
||||||
|
println!("Reading {path:?}");
|
||||||
|
let file = std::fs::File::open(path)?;
|
||||||
|
let df = ParquetReader::new(file).finish().map_err(|e| {
|
||||||
|
let message = format!("Failed to read parquet file {path:?}: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
all_dfs.push(df);
|
||||||
|
}
|
||||||
|
|
||||||
|
let lazy_frames: Vec<LazyFrame> = all_dfs.into_iter().map(|df| df.lazy()).collect();
|
||||||
|
|
||||||
|
let merged_df = concat_lf_diagonal(&lazy_frames, UnionArgs::default())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to concat DataFrames: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?
|
||||||
|
.sort(
|
||||||
|
["publish_at_timestamp"],
|
||||||
|
SortMultipleOptions::default().with_order_descending(true),
|
||||||
|
)
|
||||||
|
.unique(
|
||||||
|
Some(vec![
|
||||||
|
"mikan_fansub_id".to_string(),
|
||||||
|
"mikan_episode_id".to_string(),
|
||||||
|
]),
|
||||||
|
UniqueKeepStrategy::First,
|
||||||
|
)
|
||||||
|
.collect()
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to collect lazy DataFrame: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
fn select_columns_and_write(
|
||||||
|
merged_df: DataFrame,
|
||||||
|
name: &str,
|
||||||
|
columns: &[&str],
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let result_df = merged_df
|
||||||
|
.lazy()
|
||||||
|
.sort(["publish_at_timestamp"], SortMultipleOptions::default())
|
||||||
|
.select(columns.iter().map(|c| col(*c)).collect_vec())
|
||||||
|
.collect()
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to sort and select columns: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let output_path = TEST_FOLDER.join(format!("parquet/{name}.parquet"));
|
||||||
|
let mut output_file = std::fs::File::create(&output_path)?;
|
||||||
|
|
||||||
|
ParquetWriter::new(&mut output_file)
|
||||||
|
.set_parallel(true)
|
||||||
|
.with_compression(ParquetCompression::Zstd(Some(
|
||||||
|
ZstdLevel::try_new(22).unwrap(),
|
||||||
|
)))
|
||||||
|
.finish(&mut result_df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write merged parquet file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
println!("Merged {} rows into {output_path:?}", result_df.height());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
select_columns_and_write(merged_df.clone(), "tiny", &["fansub_name", "original_name"])?;
|
||||||
|
// select_columns_and_write(
|
||||||
|
// merged_df.clone(),
|
||||||
|
// "lite",
|
||||||
|
// &[
|
||||||
|
// "mikan_fansub_id",
|
||||||
|
// "fansub_name",
|
||||||
|
// "mikan_episode_id",
|
||||||
|
// "original_name",
|
||||||
|
// ],
|
||||||
|
// )?;
|
||||||
|
// select_columns_and_write(
|
||||||
|
// merged_df,
|
||||||
|
// "full",
|
||||||
|
// &[
|
||||||
|
// "id",
|
||||||
|
// "publish_at_timestamp",
|
||||||
|
// "mikan_fansub_id",
|
||||||
|
// "fansub_name",
|
||||||
|
// "mikan_episode_id",
|
||||||
|
// "original_name",
|
||||||
|
// "magnet_link",
|
||||||
|
// "file_size",
|
||||||
|
// "torrent_link",
|
||||||
|
// ],
|
||||||
|
// )?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("html"))?;
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("parquet"))?;
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("csv"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(std::time::Duration::from_millis(1000)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let first_page_and_pagination_info =
|
||||||
|
scrape_mikan_classic_episode_table_page(&mikan_scrape_client, 1, None).await?;
|
||||||
|
|
||||||
|
let total_page = first_page_and_pagination_info.total;
|
||||||
|
|
||||||
|
first_page_and_pagination_info.save_to_files()?;
|
||||||
|
|
||||||
|
let next_rev_ids = MikanClassicEpisodeTablePage::waiting_rev_ids(total_page)?;
|
||||||
|
|
||||||
|
for todo_rev_id in next_rev_ids {
|
||||||
|
let page = scrape_mikan_classic_episode_table_page_from_rev_id(
|
||||||
|
&mikan_scrape_client,
|
||||||
|
total_page,
|
||||||
|
todo_rev_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
page.save_to_files()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 合并所有 parquet 文件
|
||||||
|
println!("\nMerging all parquet files...");
|
||||||
|
|
||||||
|
merge_mikan_classic_episodes_and_strip_columns().await?;
|
||||||
|
|
||||||
|
println!("Merge completed!");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
249
apps/recorder/examples/mikan_doppel_season_subscription.rs
Normal file
249
apps/recorder/examples/mikan_doppel_season_subscription.rs
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use color_eyre::{Result, eyre::OptionExt};
|
||||||
|
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||||
|
use inquire::{Password, Text, validator::Validation};
|
||||||
|
use recorder::{
|
||||||
|
crypto::UserPassCredential,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanClient, MikanConfig, MikanRssEpisodeItem, build_mikan_bangumi_expand_subscribed_url,
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||||
|
},
|
||||||
|
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||||
|
};
|
||||||
|
use scraper::Html;
|
||||||
|
use tokio::fs;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(tracing::Level::INFO)
|
||||||
|
.init();
|
||||||
|
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(0),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(Duration::from_millis(1000)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let username_validator = |input: &str| {
|
||||||
|
if input.trim().is_empty() {
|
||||||
|
Ok(Validation::Invalid("Username cannot be empty".into()))
|
||||||
|
} else {
|
||||||
|
Ok(Validation::Valid)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let password_validator = |input: &str| {
|
||||||
|
if input.trim().is_empty() {
|
||||||
|
Ok(Validation::Invalid("Password cannot be empty".into()))
|
||||||
|
} else {
|
||||||
|
Ok(Validation::Valid)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let username = Text::new("Please enter your mikan username:")
|
||||||
|
.with_validator(username_validator)
|
||||||
|
.prompt()?;
|
||||||
|
let password = Password::new("Please enter your mikan password:")
|
||||||
|
.without_confirmation()
|
||||||
|
.with_display_mode(inquire::PasswordDisplayMode::Masked)
|
||||||
|
.with_validator(password_validator)
|
||||||
|
.prompt()?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = mikan_scrape_client
|
||||||
|
.fork_with_userpass_credential(UserPassCredential {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
user_agent: None,
|
||||||
|
cookies: None,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
tracing::info!("Checking if logged in...");
|
||||||
|
if !mikan_scrape_client.has_login().await? {
|
||||||
|
tracing::info!("Logging in to mikan...");
|
||||||
|
mikan_scrape_client.login().await?;
|
||||||
|
tracing::info!("Logged in to mikan");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_scrape_client.base_url().clone();
|
||||||
|
tracing::info!("Scraping season subscription...");
|
||||||
|
let season_subscription =
|
||||||
|
fs::read("tests/resources/mikan/BangumiCoverFlow-2025-spring.html").await?;
|
||||||
|
let html = Html::parse_fragment(String::from_utf8(season_subscription)?.as_str());
|
||||||
|
let bangumi_index_list =
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(&html, &mikan_base_url);
|
||||||
|
|
||||||
|
for bangumi_index in bangumi_index_list {
|
||||||
|
let bangumi_meta = {
|
||||||
|
let bangumi_expand_subscribed_url = build_mikan_bangumi_expand_subscribed_url(
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
bangumi_index.mikan_bangumi_id.as_ref(),
|
||||||
|
);
|
||||||
|
let bangumi_expand_subscribed_doppel_path =
|
||||||
|
MikanDoppelPath::new(bangumi_expand_subscribed_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Scraping bangumi expand subscribed..."
|
||||||
|
);
|
||||||
|
let bangumi_expand_subscribed_data =
|
||||||
|
if !bangumi_expand_subscribed_doppel_path.exists_any() {
|
||||||
|
let bangumi_expand_subscribed_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_expand_subscribed_url).await?;
|
||||||
|
bangumi_expand_subscribed_doppel_path.write(&bangumi_expand_subscribed_data)?;
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Bangumi expand subscribed saved"
|
||||||
|
);
|
||||||
|
bangumi_expand_subscribed_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Bangumi expand subscribed already exists"
|
||||||
|
);
|
||||||
|
String::from_utf8(bangumi_expand_subscribed_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let html = Html::parse_fragment(&bangumi_expand_subscribed_data);
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment(
|
||||||
|
&html,
|
||||||
|
bangumi_index.clone(),
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
)
|
||||||
|
.ok_or_eyre(format!(
|
||||||
|
"Failed to extract bangumi meta from expand subscribed fragment: {:?}",
|
||||||
|
bangumi_index.bangumi_title
|
||||||
|
))
|
||||||
|
}?;
|
||||||
|
{
|
||||||
|
if let Some(poster_url) = bangumi_meta.origin_poster_src.as_ref() {
|
||||||
|
let poster_doppel_path = MikanDoppelPath::new(poster_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi poster..."
|
||||||
|
);
|
||||||
|
if !poster_doppel_path.exists_any() {
|
||||||
|
let poster_data = fetch_image(&mikan_scrape_client, poster_url.clone()).await?;
|
||||||
|
poster_doppel_path.write(&poster_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi poster saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi poster already exists"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let bangumi_homepage_url = bangumi_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi homepage..."
|
||||||
|
);
|
||||||
|
if !bangumi_homepage_doppel_path.exists_any() {
|
||||||
|
let bangumi_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
|
||||||
|
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi homepage already exists"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let rss_items = {
|
||||||
|
let bangumi_rss_url = bangumi_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_rss_url(mikan_base_url.clone());
|
||||||
|
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi rss..."
|
||||||
|
);
|
||||||
|
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
|
||||||
|
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
|
||||||
|
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi rss saved");
|
||||||
|
bangumi_rss_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi rss already exists"
|
||||||
|
);
|
||||||
|
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items;
|
||||||
|
rss_items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssEpisodeItem::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
}?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
{
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode...");
|
||||||
|
if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode already exists");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source
|
||||||
|
.status()
|
||||||
|
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("Scraping season subscription done");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
215
apps/recorder/examples/mikan_doppel_subscriber_subscription.rs
Normal file
215
apps/recorder/examples/mikan_doppel_subscriber_subscription.rs
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||||
|
use recorder::{
|
||||||
|
errors::RecorderResult,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanClient, MikanConfig, MikanRssEpisodeItem,
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||||
|
},
|
||||||
|
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||||
|
};
|
||||||
|
use scraper::Html;
|
||||||
|
use tokio::fs;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(tracing::Level::INFO)
|
||||||
|
.init();
|
||||||
|
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(0),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(Duration::from_millis(500)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_scrape_client.base_url().clone();
|
||||||
|
tracing::info!("Scraping subscriber subscription...");
|
||||||
|
let subscriber_subscription =
|
||||||
|
fs::read("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
|
||||||
|
let channel = rss::Channel::read_from(&subscriber_subscription[..])?;
|
||||||
|
let rss_items: Vec<MikanRssEpisodeItem> = channel
|
||||||
|
.items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssEpisodeItem::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
let episode_homepage_meta = {
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path = MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
let episode_homepage_data = if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage saved");
|
||||||
|
episode_homepage_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage already exists");
|
||||||
|
String::from_utf8(episode_homepage_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
let html = Html::parse_document(&episode_homepage_data);
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html(
|
||||||
|
&html,
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
episode_homepage_url,
|
||||||
|
)
|
||||||
|
}?;
|
||||||
|
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source
|
||||||
|
.status()
|
||||||
|
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
if let Some(episode_poster_url) = episode_homepage_meta.origin_poster_src.as_ref() {
|
||||||
|
let episode_poster_doppel_path = MikanDoppelPath::new(episode_poster_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode poster...");
|
||||||
|
if !episode_poster_doppel_path.exists_any() {
|
||||||
|
let episode_poster_data =
|
||||||
|
fetch_image(&mikan_scrape_client, episode_poster_url.clone()).await?;
|
||||||
|
episode_poster_doppel_path.write(&episode_poster_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode poster saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode poster already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let bangumi_homepage_url = episode_homepage_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping bangumi homepage...");
|
||||||
|
if !bangumi_homepage_doppel_path.exists_any() {
|
||||||
|
let bangumi_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
|
||||||
|
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi homepage already exists");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let bangumi_rss_url = episode_homepage_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_rss_url(mikan_base_url.clone());
|
||||||
|
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping bangumi rss...");
|
||||||
|
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
|
||||||
|
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
|
||||||
|
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi rss saved");
|
||||||
|
bangumi_rss_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi rss already exists");
|
||||||
|
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(bangumi_rss_data.as_bytes())?;
|
||||||
|
let rss_items: Vec<MikanRssEpisodeItem> = channel
|
||||||
|
.items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssEpisodeItem::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
{
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage already exists");
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source.status().is_some_and(|status| {
|
||||||
|
status == reqwest::StatusCode::NOT_FOUND
|
||||||
|
})
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new \
|
||||||
|
version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("Scraping subscriber subscription done");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -1,56 +1,33 @@
|
|||||||
use recorder::errors::RResult;
|
#![feature(duration_constructors_lite)]
|
||||||
// #![allow(unused_imports)]
|
use std::{sync::Arc, time::Duration};
|
||||||
// use recorder::{
|
|
||||||
// app::{AppContext, AppContextTrait},
|
|
||||||
// errors::RResult,
|
|
||||||
// migrations::Migrator,
|
|
||||||
// models::{
|
|
||||||
// subscribers::SEED_SUBSCRIBER,
|
|
||||||
// subscriptions::{self, SubscriptionCreateFromRssDto},
|
|
||||||
// },
|
|
||||||
// };
|
|
||||||
// use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
|
|
||||||
// use sea_orm_migration::MigratorTrait;
|
|
||||||
|
|
||||||
// async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RResult<()> {
|
use apalis_sql::postgres::PostgresStorage;
|
||||||
// let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
|
use recorder::{
|
||||||
|
app::AppContextTrait,
|
||||||
// // let rss_link =
|
errors::RecorderResult,
|
||||||
// // "https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
|
test_utils::{
|
||||||
// let subscription = if let Some(subscription) =
|
app::TestingAppContext,
|
||||||
// subscriptions::Entity::find()
|
database::{TestingDatabaseServiceConfig, build_testing_database_service},
|
||||||
// .filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
|
},
|
||||||
// .one(ctx.db())
|
};
|
||||||
// .await?
|
|
||||||
// {
|
|
||||||
// subscription
|
|
||||||
// } else {
|
|
||||||
// subscriptions::Model::add_subscription(
|
|
||||||
// ctx,
|
|
||||||
//
|
|
||||||
// subscriptions::SubscriptionCreateDto::Mikan(SubscriptionCreateFromRssDto {
|
|
||||||
// rss_link: rss_link.to_string(),
|
|
||||||
// display_name: String::from("Mikan Project - 我的番组"),
|
|
||||||
// enabled: Some(true),
|
|
||||||
// }),
|
|
||||||
// 1,
|
|
||||||
// )
|
|
||||||
// .await?
|
|
||||||
// };
|
|
||||||
|
|
||||||
// subscription.pull_subscription(ctx).await?;
|
|
||||||
|
|
||||||
// Ok(())
|
|
||||||
// }
|
|
||||||
|
|
||||||
// #[tokio::main]
|
|
||||||
// async fn main() -> RResult<()> {
|
|
||||||
// pull_mikan_bangumi_rss(&ctx).await?;
|
|
||||||
|
|
||||||
// Ok(())
|
|
||||||
// }
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> RResult<()> {
|
async fn main() -> RecorderResult<()> {
|
||||||
|
let app_ctx = {
|
||||||
|
let db_service = build_testing_database_service(TestingDatabaseServiceConfig {
|
||||||
|
auto_migrate: false,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
Arc::new(TestingAppContext::builder().db(db_service).build())
|
||||||
|
};
|
||||||
|
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
PostgresStorage::setup(db.get_postgres_connection_pool()).await?;
|
||||||
|
|
||||||
|
dbg!(db.get_postgres_connection_pool().connect_options());
|
||||||
|
|
||||||
|
tokio::time::sleep(Duration::from_hours(1)).await;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
<html>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
not found :-(
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
||||||
@@ -26,25 +26,25 @@ host = '{{ get_env(name="HOST", default="localhost") }}'
|
|||||||
enable = true
|
enable = true
|
||||||
|
|
||||||
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
|
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
|
||||||
[server.middleware.request_id]
|
[server.middlewares.request_id]
|
||||||
enable = true
|
enable = true
|
||||||
|
|
||||||
[server.middleware.logger]
|
[server.middlewares.logger]
|
||||||
enable = true
|
enable = true
|
||||||
|
|
||||||
# when your code is panicked, the request still returns 500 status code.
|
# when your code is panicked, the request still returns 500 status code.
|
||||||
[server.middleware.catch_panic]
|
[server.middlewares.catch_panic]
|
||||||
enable = true
|
enable = true
|
||||||
|
|
||||||
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
|
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
|
||||||
[server.middleware.timeout_request]
|
[server.middlewares.timeout_request]
|
||||||
enable = false
|
enable = false
|
||||||
# Duration time in milliseconds.
|
# Duration time in milliseconds.
|
||||||
timeout = 5000
|
timeout = 5000
|
||||||
|
|
||||||
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
|
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
|
||||||
# allow_origins:
|
# allow_origins:
|
||||||
# - https://loco.rs
|
# - https://konobangu.com
|
||||||
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
|
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
|
||||||
# allow_headers:
|
# allow_headers:
|
||||||
# - Content-Type
|
# - Content-Type
|
||||||
@@ -53,7 +53,10 @@ timeout = 5000
|
|||||||
# - POST
|
# - POST
|
||||||
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
|
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
|
||||||
# max_age: 3600
|
# max_age: 3600
|
||||||
[server.middleware.cors]
|
[server.middlewares.cors]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
[server.middlewares.compression]
|
||||||
enable = true
|
enable = true
|
||||||
|
|
||||||
# Database Configuration
|
# Database Configuration
|
||||||
@@ -86,6 +89,14 @@ leaky_bucket_initial_tokens = 1
|
|||||||
leaky_bucket_refill_tokens = 1
|
leaky_bucket_refill_tokens = 1
|
||||||
leaky_bucket_refill_interval = 500
|
leaky_bucket_refill_interval = 500
|
||||||
|
|
||||||
|
|
||||||
|
[mikan.http_client.proxy]
|
||||||
|
server = '{{ get_env(name="MIKAN_PROXY", default = "") }}'
|
||||||
|
auth_header = '{{ get_env(name="MIKAN_PROXY_AUTH_HEADER", default = "") }}'
|
||||||
|
no_proxy = '{{ get_env(name="MIKAN_NO_PROXY", default = "") }}'
|
||||||
|
accept_invalid_certs = '{{ get_env(name="MIKAN_PROXY_ACCEPT_INVALID_CERTS", default = "false") }}'
|
||||||
|
|
||||||
|
|
||||||
[auth]
|
[auth]
|
||||||
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
|
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
|
||||||
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
|
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use clap::{Parser, command};
|
use clap::{Parser, command};
|
||||||
|
|
||||||
use super::{AppContext, core::App, env::Environment};
|
use super::{AppContext, core::App, env::Environment};
|
||||||
use crate::{app::config::AppConfig, errors::RResult};
|
use crate::{app::config::AppConfig, errors::RecorderResult};
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[command(version, about, long_about = None)]
|
#[command(version, about, long_about = None)]
|
||||||
@@ -23,6 +21,9 @@ pub struct MainCliArgs {
|
|||||||
/// Explicit environment
|
/// Explicit environment
|
||||||
#[arg(short, long)]
|
#[arg(short, long)]
|
||||||
environment: Option<Environment>,
|
environment: Option<Environment>,
|
||||||
|
|
||||||
|
#[arg(long)]
|
||||||
|
graceful_shutdown: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AppBuilder {
|
pub struct AppBuilder {
|
||||||
@@ -30,10 +31,11 @@ pub struct AppBuilder {
|
|||||||
config_file: Option<String>,
|
config_file: Option<String>,
|
||||||
working_dir: String,
|
working_dir: String,
|
||||||
environment: Environment,
|
environment: Environment,
|
||||||
|
pub graceful_shutdown: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppBuilder {
|
impl AppBuilder {
|
||||||
pub async fn from_main_cli(environment: Option<Environment>) -> RResult<Self> {
|
pub async fn from_main_cli(environment: Option<Environment>) -> RecorderResult<Self> {
|
||||||
let args = MainCliArgs::parse();
|
let args = MainCliArgs::parse();
|
||||||
|
|
||||||
let environment = environment.unwrap_or_else(|| {
|
let environment = environment.unwrap_or_else(|| {
|
||||||
@@ -63,34 +65,44 @@ impl AppBuilder {
|
|||||||
builder = builder
|
builder = builder
|
||||||
.config_file(args.config_file)
|
.config_file(args.config_file)
|
||||||
.dotenv_file(args.dotenv_file)
|
.dotenv_file(args.dotenv_file)
|
||||||
.environment(environment);
|
.environment(environment)
|
||||||
|
.graceful_shutdown(args.graceful_shutdown.unwrap_or(true));
|
||||||
|
|
||||||
Ok(builder)
|
Ok(builder)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn build(self) -> RResult<App> {
|
pub async fn build(self) -> RecorderResult<App> {
|
||||||
|
self.load_env().await?;
|
||||||
|
|
||||||
|
let config = self.load_config().await?;
|
||||||
|
|
||||||
|
let app_context =
|
||||||
|
AppContext::new(self.environment.clone(), config, self.working_dir.clone()).await?;
|
||||||
|
|
||||||
|
Ok(App {
|
||||||
|
context: app_context,
|
||||||
|
builder: self,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_env(&self) -> RecorderResult<()> {
|
||||||
AppConfig::load_dotenv(
|
AppConfig::load_dotenv(
|
||||||
&self.environment,
|
&self.environment,
|
||||||
&self.working_dir,
|
&self.working_dir,
|
||||||
self.dotenv_file.as_deref(),
|
self.dotenv_file.as_deref(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_config(&self) -> RecorderResult<AppConfig> {
|
||||||
let config = AppConfig::load_config(
|
let config = AppConfig::load_config(
|
||||||
&self.environment,
|
&self.environment,
|
||||||
&self.working_dir,
|
&self.working_dir,
|
||||||
self.config_file.as_deref(),
|
self.config_file.as_deref(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
Ok(config)
|
||||||
let app_context = Arc::new(
|
|
||||||
AppContext::new(self.environment.clone(), config, self.working_dir.clone()).await?,
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(App {
|
|
||||||
context: app_context,
|
|
||||||
builder: self,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn working_dir(self, working_dir: String) -> Self {
|
pub fn working_dir(self, working_dir: String) -> Self {
|
||||||
@@ -111,6 +123,12 @@ impl AppBuilder {
|
|||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn graceful_shutdown(self, graceful_shutdown: bool) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.graceful_shutdown = graceful_shutdown;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
|
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
|
||||||
let mut ret = self;
|
let mut ret = self;
|
||||||
ret.dotenv_file = dotenv_file;
|
ret.dotenv_file = dotenv_file;
|
||||||
@@ -134,6 +152,7 @@ impl Default for AppBuilder {
|
|||||||
dotenv_file: None,
|
dotenv_file: None,
|
||||||
config_file: None,
|
config_file: None,
|
||||||
working_dir: String::from("."),
|
working_dir: String::from("."),
|
||||||
|
graceful_shutdown: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,8 +11,21 @@ leaky_bucket_initial_tokens = 0
|
|||||||
leaky_bucket_refill_tokens = 1
|
leaky_bucket_refill_tokens = 1
|
||||||
leaky_bucket_refill_interval = 500
|
leaky_bucket_refill_interval = 500
|
||||||
|
|
||||||
|
|
||||||
|
[mikan.http_client.proxy]
|
||||||
|
|
||||||
|
[mikan.http_client.proxy.headers]
|
||||||
|
|
||||||
[graphql]
|
[graphql]
|
||||||
depth_limit = inf
|
depth_limit = inf
|
||||||
complexity_limit = inf
|
complexity_limit = inf
|
||||||
|
|
||||||
[cache]
|
[cache]
|
||||||
|
|
||||||
|
[crypto]
|
||||||
|
|
||||||
|
[task]
|
||||||
|
|
||||||
|
[message]
|
||||||
|
|
||||||
|
[media]
|
||||||
|
|||||||
@@ -9,9 +9,10 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
use super::env::Environment;
|
use super::env::Environment;
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::RResult,
|
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
|
||||||
extract::mikan::MikanConfig, graphql::GraphQLConfig, logger::LoggerConfig,
|
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
|
||||||
storage::StorageConfig, web::WebServerConfig,
|
logger::LoggerConfig, media::MediaConfig, message::MessageConfig, storage::StorageConfig,
|
||||||
|
task::TaskConfig, web::WebServerConfig,
|
||||||
};
|
};
|
||||||
|
|
||||||
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
|
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
|
||||||
@@ -24,9 +25,13 @@ pub struct AppConfig {
|
|||||||
pub auth: AuthConfig,
|
pub auth: AuthConfig,
|
||||||
pub storage: StorageConfig,
|
pub storage: StorageConfig,
|
||||||
pub mikan: MikanConfig,
|
pub mikan: MikanConfig,
|
||||||
|
pub crypto: CryptoConfig,
|
||||||
pub graphql: GraphQLConfig,
|
pub graphql: GraphQLConfig,
|
||||||
|
pub media: MediaConfig,
|
||||||
pub logger: LoggerConfig,
|
pub logger: LoggerConfig,
|
||||||
pub database: DatabaseConfig,
|
pub database: DatabaseConfig,
|
||||||
|
pub task: TaskConfig,
|
||||||
|
pub message: MessageConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppConfig {
|
impl AppConfig {
|
||||||
@@ -64,7 +69,7 @@ impl AppConfig {
|
|||||||
fig: Figment,
|
fig: Figment,
|
||||||
filepath: impl AsRef<Path>,
|
filepath: impl AsRef<Path>,
|
||||||
ext: &str,
|
ext: &str,
|
||||||
) -> RResult<Figment> {
|
) -> RecorderResult<Figment> {
|
||||||
let content = fs::read_to_string(filepath)?;
|
let content = fs::read_to_string(filepath)?;
|
||||||
|
|
||||||
let rendered = tera::Tera::one_off(
|
let rendered = tera::Tera::one_off(
|
||||||
@@ -85,7 +90,7 @@ impl AppConfig {
|
|||||||
environment: &Environment,
|
environment: &Environment,
|
||||||
working_dir: &str,
|
working_dir: &str,
|
||||||
dotenv_file: Option<&str>,
|
dotenv_file: Option<&str>,
|
||||||
) -> RResult<()> {
|
) -> RecorderResult<()> {
|
||||||
let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
|
let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
|
||||||
vec![dotenv_file]
|
vec![dotenv_file]
|
||||||
} else {
|
} else {
|
||||||
@@ -106,12 +111,12 @@ impl AppConfig {
|
|||||||
for f in try_filenames.iter() {
|
for f in try_filenames.iter() {
|
||||||
let p = try_dotenv_file_or_dir_path.join(f);
|
let p = try_dotenv_file_or_dir_path.join(f);
|
||||||
if p.exists() && p.is_file() {
|
if p.exists() && p.is_file() {
|
||||||
dotenv::from_path(p)?;
|
dotenvy::from_path(p)?;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if try_dotenv_file_or_dir_path.is_file() {
|
} else if try_dotenv_file_or_dir_path.is_file() {
|
||||||
dotenv::from_path(try_dotenv_file_or_dir_path)?;
|
dotenvy::from_path(try_dotenv_file_or_dir_path)?;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -124,7 +129,7 @@ impl AppConfig {
|
|||||||
environment: &Environment,
|
environment: &Environment,
|
||||||
working_dir: &str,
|
working_dir: &str,
|
||||||
config_file: Option<&str>,
|
config_file: Option<&str>,
|
||||||
) -> RResult<AppConfig> {
|
) -> RecorderResult<AppConfig> {
|
||||||
let try_config_file_or_dirs = if config_file.is_some() {
|
let try_config_file_or_dirs = if config_file.is_some() {
|
||||||
vec![config_file]
|
vec![config_file]
|
||||||
} else {
|
} else {
|
||||||
@@ -140,7 +145,7 @@ impl AppConfig {
|
|||||||
.flat_map(|ps| {
|
.flat_map(|ps| {
|
||||||
allowed_extensions
|
allowed_extensions
|
||||||
.iter()
|
.iter()
|
||||||
.map(move |ext| (format!("{}{}{}", convention_prefix, ps, ext), ext))
|
.map(move |ext| (format!("{convention_prefix}{ps}{ext}"), ext))
|
||||||
})
|
})
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,16 @@
|
|||||||
|
use std::{fmt::Debug, sync::Arc};
|
||||||
|
|
||||||
|
use tokio::sync::OnceCell;
|
||||||
|
|
||||||
use super::{Environment, config::AppConfig};
|
use super::{Environment, config::AppConfig};
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::AuthService, cache::CacheService, database::DatabaseService, errors::RResult,
|
auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
|
||||||
extract::mikan::MikanClient, graphql::GraphQLService, logger::LoggerService,
|
errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
|
||||||
storage::StorageService,
|
logger::LoggerService, media::MediaService, message::MessageService, storage::StorageService,
|
||||||
|
task::TaskService,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait AppContextTrait: Send + Sync {
|
pub trait AppContextTrait: Send + Sync + Debug {
|
||||||
fn logger(&self) -> &LoggerService;
|
fn logger(&self) -> &LoggerService;
|
||||||
fn db(&self) -> &DatabaseService;
|
fn db(&self) -> &DatabaseService;
|
||||||
fn config(&self) -> &AppConfig;
|
fn config(&self) -> &AppConfig;
|
||||||
@@ -16,6 +21,10 @@ pub trait AppContextTrait: Send + Sync {
|
|||||||
fn storage(&self) -> &StorageService;
|
fn storage(&self) -> &StorageService;
|
||||||
fn working_dir(&self) -> &String;
|
fn working_dir(&self) -> &String;
|
||||||
fn environment(&self) -> &Environment;
|
fn environment(&self) -> &Environment;
|
||||||
|
fn crypto(&self) -> &CryptoService;
|
||||||
|
fn task(&self) -> &TaskService;
|
||||||
|
fn message(&self) -> &MessageService;
|
||||||
|
fn media(&self) -> &MediaService;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AppContext {
|
pub struct AppContext {
|
||||||
@@ -25,10 +34,14 @@ pub struct AppContext {
|
|||||||
cache: CacheService,
|
cache: CacheService,
|
||||||
mikan: MikanClient,
|
mikan: MikanClient,
|
||||||
auth: AuthService,
|
auth: AuthService,
|
||||||
graphql: GraphQLService,
|
|
||||||
storage: StorageService,
|
storage: StorageService,
|
||||||
|
crypto: CryptoService,
|
||||||
working_dir: String,
|
working_dir: String,
|
||||||
environment: Environment,
|
environment: Environment,
|
||||||
|
message: MessageService,
|
||||||
|
media: MediaService,
|
||||||
|
task: OnceCell<TaskService>,
|
||||||
|
graphql: OnceCell<GraphQLService>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppContext {
|
impl AppContext {
|
||||||
@@ -36,18 +49,20 @@ impl AppContext {
|
|||||||
environment: Environment,
|
environment: Environment,
|
||||||
config: AppConfig,
|
config: AppConfig,
|
||||||
working_dir: impl ToString,
|
working_dir: impl ToString,
|
||||||
) -> RResult<Self> {
|
) -> RecorderResult<Arc<Self>> {
|
||||||
let config_cloned = config.clone();
|
let config_cloned = config.clone();
|
||||||
|
|
||||||
let logger = LoggerService::from_config(config.logger).await?;
|
let logger = LoggerService::from_config(config.logger).await?;
|
||||||
let cache = CacheService::from_config(config.cache).await?;
|
let cache = CacheService::from_config(config.cache).await?;
|
||||||
let db = DatabaseService::from_config(config.database).await?;
|
let db = DatabaseService::from_config(config.database).await?;
|
||||||
let storage = StorageService::from_config(config.storage).await?;
|
let storage = StorageService::from_config(config.storage).await?;
|
||||||
|
let message = MessageService::from_config(config.message).await?;
|
||||||
let auth = AuthService::from_conf(config.auth).await?;
|
let auth = AuthService::from_conf(config.auth).await?;
|
||||||
let mikan = MikanClient::from_config(config.mikan).await?;
|
let mikan = MikanClient::from_config(config.mikan).await?;
|
||||||
let graphql = GraphQLService::from_config_and_database(config.graphql, db.clone()).await?;
|
let crypto = CryptoService::from_config(config.crypto).await?;
|
||||||
|
let media = MediaService::from_config(config.media).await?;
|
||||||
|
|
||||||
Ok(AppContext {
|
let ctx = Arc::new(AppContext {
|
||||||
config: config_cloned,
|
config: config_cloned,
|
||||||
environment,
|
environment,
|
||||||
logger,
|
logger,
|
||||||
@@ -57,10 +72,35 @@ impl AppContext {
|
|||||||
storage,
|
storage,
|
||||||
mikan,
|
mikan,
|
||||||
working_dir: working_dir.to_string(),
|
working_dir: working_dir.to_string(),
|
||||||
graphql,
|
crypto,
|
||||||
})
|
message,
|
||||||
|
media,
|
||||||
|
task: OnceCell::new(),
|
||||||
|
graphql: OnceCell::new(),
|
||||||
|
});
|
||||||
|
|
||||||
|
ctx.task
|
||||||
|
.get_or_try_init(async || {
|
||||||
|
TaskService::from_config_and_ctx(config.task, ctx.clone()).await
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
ctx.graphql
|
||||||
|
.get_or_try_init(async || {
|
||||||
|
GraphQLService::from_config_and_ctx(config.graphql, ctx.clone()).await
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Debug for AppContext {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "AppContext")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl AppContextTrait for AppContext {
|
impl AppContextTrait for AppContext {
|
||||||
fn logger(&self) -> &LoggerService {
|
fn logger(&self) -> &LoggerService {
|
||||||
&self.logger
|
&self.logger
|
||||||
@@ -81,7 +121,7 @@ impl AppContextTrait for AppContext {
|
|||||||
&self.auth
|
&self.auth
|
||||||
}
|
}
|
||||||
fn graphql(&self) -> &GraphQLService {
|
fn graphql(&self) -> &GraphQLService {
|
||||||
&self.graphql
|
self.graphql.get().expect("graphql should be set")
|
||||||
}
|
}
|
||||||
fn storage(&self) -> &StorageService {
|
fn storage(&self) -> &StorageService {
|
||||||
&self.storage
|
&self.storage
|
||||||
@@ -92,4 +132,16 @@ impl AppContextTrait for AppContext {
|
|||||||
fn environment(&self) -> &Environment {
|
fn environment(&self) -> &Environment {
|
||||||
&self.environment
|
&self.environment
|
||||||
}
|
}
|
||||||
|
fn crypto(&self) -> &CryptoService {
|
||||||
|
&self.crypto
|
||||||
|
}
|
||||||
|
fn task(&self) -> &TaskService {
|
||||||
|
self.task.get().expect("task should be set")
|
||||||
|
}
|
||||||
|
fn message(&self) -> &MessageService {
|
||||||
|
&self.message
|
||||||
|
}
|
||||||
|
fn media(&self) -> &MediaService {
|
||||||
|
&self.media
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,18 +1,20 @@
|
|||||||
use std::{net::SocketAddr, sync::Arc};
|
use std::{net::SocketAddr, sync::Arc};
|
||||||
|
|
||||||
use axum::Router;
|
use axum::Router;
|
||||||
use futures::try_join;
|
use tokio::{net::TcpSocket, signal};
|
||||||
use tokio::signal;
|
use tracing::instrument;
|
||||||
|
|
||||||
use super::{builder::AppBuilder, context::AppContextTrait};
|
use super::{builder::AppBuilder, context::AppContextTrait};
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::RResult,
|
errors::{RecorderError, RecorderResult},
|
||||||
web::{
|
web::{
|
||||||
controller::{self, core::ControllerTrait},
|
controller::{self, core::ControllerTrait},
|
||||||
middleware::default_middleware_stack,
|
middleware::default_middleware_stack,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub const PROJECT_NAME: &str = "konobangu";
|
||||||
|
|
||||||
pub struct App {
|
pub struct App {
|
||||||
pub context: Arc<dyn AppContextTrait>,
|
pub context: Arc<dyn AppContextTrait>,
|
||||||
pub builder: AppBuilder,
|
pub builder: AppBuilder,
|
||||||
@@ -23,43 +25,104 @@ impl App {
|
|||||||
AppBuilder::default()
|
AppBuilder::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn serve(&self) -> RResult<()> {
|
#[instrument(err, skip(self))]
|
||||||
|
pub async fn serve(&self) -> RecorderResult<()> {
|
||||||
let context = &self.context;
|
let context = &self.context;
|
||||||
let config = context.config();
|
let config = context.config();
|
||||||
let listener = tokio::net::TcpListener::bind(&format!(
|
|
||||||
"{}:{}",
|
let listener = {
|
||||||
config.server.binding, config.server.port
|
let addr: SocketAddr =
|
||||||
))
|
format!("{}:{}", config.server.binding, config.server.port).parse()?;
|
||||||
.await?;
|
|
||||||
|
let socket = if addr.is_ipv4() {
|
||||||
|
TcpSocket::new_v4()
|
||||||
|
} else {
|
||||||
|
TcpSocket::new_v6()
|
||||||
|
}?;
|
||||||
|
|
||||||
|
socket.set_reuseaddr(true)?;
|
||||||
|
|
||||||
|
#[cfg(all(unix, not(target_os = "solaris")))]
|
||||||
|
if let Err(e) = socket.set_reuseport(true) {
|
||||||
|
tracing::warn!("Failed to set SO_REUSEPORT: {}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
socket.bind(addr)?;
|
||||||
|
socket.listen(1024)
|
||||||
|
}?;
|
||||||
|
|
||||||
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
||||||
|
|
||||||
let (graphql_c, oidc_c, metadata_c) = try_join!(
|
let (graphql_c, oidc_c, metadata_c, static_c, feeds_c) = futures::try_join!(
|
||||||
controller::graphql::create(context.clone()),
|
controller::graphql::create(context.clone()),
|
||||||
controller::oidc::create(context.clone()),
|
controller::oidc::create(context.clone()),
|
||||||
controller::metadata::create(context.clone())
|
controller::metadata::create(context.clone()),
|
||||||
|
controller::r#static::create(context.clone()),
|
||||||
|
controller::feeds::create(context.clone()),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
for c in [graphql_c, oidc_c, metadata_c] {
|
for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
|
||||||
router = c.apply_to(router);
|
router = c.apply_to(router);
|
||||||
}
|
}
|
||||||
|
|
||||||
let middlewares = default_middleware_stack(context.clone());
|
let middlewares = default_middleware_stack(context.clone());
|
||||||
for mid in middlewares {
|
for mid in middlewares {
|
||||||
router = mid.apply(router)?;
|
if mid.is_enabled() {
|
||||||
tracing::info!(name = mid.name(), "+middleware");
|
router = mid.apply(router)?;
|
||||||
|
tracing::info!(name = mid.name(), "+middleware");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let router = router
|
let router = router
|
||||||
.with_state(context.clone())
|
.with_state(context.clone())
|
||||||
.into_make_service_with_connect_info::<SocketAddr>();
|
.into_make_service_with_connect_info::<SocketAddr>();
|
||||||
|
|
||||||
axum::serve(listener, router)
|
let task = context.task();
|
||||||
.with_graceful_shutdown(async move {
|
|
||||||
Self::shutdown_signal().await;
|
let graceful_shutdown = self.builder.graceful_shutdown;
|
||||||
tracing::info!("shutting down...");
|
|
||||||
})
|
tokio::try_join!(
|
||||||
.await?;
|
async {
|
||||||
|
let axum_serve = axum::serve(listener, router);
|
||||||
|
|
||||||
|
if graceful_shutdown {
|
||||||
|
axum_serve
|
||||||
|
.with_graceful_shutdown(async move {
|
||||||
|
Self::shutdown_signal().await;
|
||||||
|
tracing::info!("axum shutting down...");
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
axum_serve.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
},
|
||||||
|
async {
|
||||||
|
{
|
||||||
|
let monitor = task.setup_monitor().await?;
|
||||||
|
if graceful_shutdown {
|
||||||
|
monitor
|
||||||
|
.run_with_signal(async move {
|
||||||
|
Self::shutdown_signal().await;
|
||||||
|
tracing::info!("apalis shutting down...");
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
monitor.run().await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
},
|
||||||
|
async {
|
||||||
|
let listener = task.setup_listener().await?;
|
||||||
|
listener.listen().await?;
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
}
|
||||||
|
)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -79,12 +142,25 @@ impl App {
|
|||||||
.await;
|
.await;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[cfg(all(unix, debug_assertions))]
|
||||||
|
let quit = async {
|
||||||
|
signal::unix::signal(signal::unix::SignalKind::quit())
|
||||||
|
.expect("Failed to install SIGQUIT handler")
|
||||||
|
.recv()
|
||||||
|
.await;
|
||||||
|
println!("Received SIGQUIT");
|
||||||
|
};
|
||||||
|
|
||||||
#[cfg(not(unix))]
|
#[cfg(not(unix))]
|
||||||
let terminate = std::future::pending::<()>();
|
let terminate = std::future::pending::<()>();
|
||||||
|
|
||||||
|
#[cfg(not(all(unix, debug_assertions)))]
|
||||||
|
let quit = std::future::pending::<()>();
|
||||||
|
|
||||||
tokio::select! {
|
tokio::select! {
|
||||||
() = ctrl_c => {},
|
() = ctrl_c => {},
|
||||||
() = terminate => {},
|
() = terminate => {},
|
||||||
|
() = quit => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ pub mod context;
|
|||||||
pub mod core;
|
pub mod core;
|
||||||
pub mod env;
|
pub mod env;
|
||||||
|
|
||||||
pub use core::App;
|
pub use core::{App, PROJECT_NAME};
|
||||||
|
|
||||||
pub use builder::AppBuilder;
|
pub use builder::AppBuilder;
|
||||||
pub use config::AppConfig;
|
pub use config::AppConfig;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use axum::http::{HeaderValue, request::Parts};
|
use axum::http::{HeaderValue, request::Parts};
|
||||||
use base64::{self, Engine};
|
use base64::{self, Engine};
|
||||||
use reqwest::header::AUTHORIZATION;
|
use http::header::AUTHORIZATION;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
config::BasicAuthConfig,
|
config::BasicAuthConfig,
|
||||||
@@ -9,7 +9,7 @@ use super::{
|
|||||||
service::{AuthServiceTrait, AuthUserInfo},
|
service::{AuthServiceTrait, AuthUserInfo},
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::{AppContextTrait, PROJECT_NAME},
|
||||||
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
|
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -71,24 +71,22 @@ impl AuthServiceTrait for BasicAuthService {
|
|||||||
user: found_user,
|
user: found_user,
|
||||||
password: found_password,
|
password: found_password,
|
||||||
}) = AuthBasic::decode_request_parts(request)
|
}) = AuthBasic::decode_request_parts(request)
|
||||||
|
&& self.config.user == found_user
|
||||||
|
&& self.config.password == found_password.unwrap_or_default()
|
||||||
{
|
{
|
||||||
if self.config.user == found_user
|
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER)
|
||||||
&& self.config.password == found_password.unwrap_or_default()
|
.await
|
||||||
{
|
.map_err(|_| AuthError::FindAuthRecordError)?;
|
||||||
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER)
|
return Ok(AuthUserInfo {
|
||||||
.await
|
subscriber_auth,
|
||||||
.map_err(|_| AuthError::FindAuthRecordError)?;
|
auth_type: AuthType::Basic,
|
||||||
return Ok(AuthUserInfo {
|
});
|
||||||
subscriber_auth,
|
|
||||||
auth_type: AuthType::Basic,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Err(AuthError::BasicInvalidCredentials)
|
Err(AuthError::BasicInvalidCredentials)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
||||||
Some(HeaderValue::from_static(r#"Basic realm="konobangu""#))
|
Some(HeaderValue::from_str(format!("Basic realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn auth_type(&self) -> AuthType {
|
fn auth_type(&self) -> AuthType {
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
use jwt_authorizer::OneOrArray;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use jwtk::OneOrMany;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_with::{NoneAsEmptyString, serde_as};
|
use serde_with::serde_as;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
pub struct BasicAuthConfig {
|
pub struct BasicAuthConfig {
|
||||||
@@ -22,13 +24,9 @@ pub struct OidcAuthConfig {
|
|||||||
#[serde(rename = "oidc_client_secret")]
|
#[serde(rename = "oidc_client_secret")]
|
||||||
pub client_secret: String,
|
pub client_secret: String,
|
||||||
#[serde(rename = "oidc_extra_scopes")]
|
#[serde(rename = "oidc_extra_scopes")]
|
||||||
pub extra_scopes: Option<OneOrArray<String>>,
|
pub extra_scopes: Option<OneOrMany<String>>,
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
#[serde(rename = "oidc_extra_claims")]
|
||||||
#[serde(rename = "oidc_extra_claim_key")]
|
pub extra_claims: Option<HashMap<String, Option<String>>>,
|
||||||
pub extra_claim_key: Option<String>,
|
|
||||||
#[serde(rename = "oidc_extra_claim_value")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub extra_claim_value: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ use axum::{
|
|||||||
http::StatusCode,
|
http::StatusCode,
|
||||||
response::{IntoResponse, Response},
|
response::{IntoResponse, Response},
|
||||||
};
|
};
|
||||||
|
use fetch::HttpClientError;
|
||||||
use openidconnect::{
|
use openidconnect::{
|
||||||
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
|
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
|
||||||
StandardErrorResponse, core::CoreErrorResponseType,
|
StandardErrorResponse, core::CoreErrorResponseType,
|
||||||
@@ -11,11 +12,13 @@ use openidconnect::{
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use snafu::prelude::*;
|
use snafu::prelude::*;
|
||||||
|
|
||||||
use crate::{fetch::HttpClientError, models::auth::AuthType};
|
use crate::models::auth::AuthType;
|
||||||
|
|
||||||
#[derive(Debug, Snafu)]
|
#[derive(Debug, Snafu)]
|
||||||
#[snafu(visibility(pub(crate)))]
|
#[snafu(visibility(pub(crate)))]
|
||||||
pub enum AuthError {
|
pub enum AuthError {
|
||||||
|
#[snafu(display("Permission denied"))]
|
||||||
|
PermissionError,
|
||||||
#[snafu(display("Not support auth method"))]
|
#[snafu(display("Not support auth method"))]
|
||||||
NotSupportAuthMethod {
|
NotSupportAuthMethod {
|
||||||
supported: Vec<AuthType>,
|
supported: Vec<AuthType>,
|
||||||
@@ -25,10 +28,6 @@ pub enum AuthError {
|
|||||||
FindAuthRecordError,
|
FindAuthRecordError,
|
||||||
#[snafu(display("Invalid credentials"))]
|
#[snafu(display("Invalid credentials"))]
|
||||||
BasicInvalidCredentials,
|
BasicInvalidCredentials,
|
||||||
#[snafu(transparent)]
|
|
||||||
OidcInitError {
|
|
||||||
source: jwt_authorizer::error::InitError,
|
|
||||||
},
|
|
||||||
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
|
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
|
||||||
OidcProviderHttpClientError { source: HttpClientError },
|
OidcProviderHttpClientError { source: HttpClientError },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
@@ -64,8 +63,10 @@ pub enum AuthError {
|
|||||||
OidcSignatureVerificationError { source: SignatureVerificationError },
|
OidcSignatureVerificationError { source: SignatureVerificationError },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
OidcSigningError { source: SigningError },
|
OidcSigningError { source: SigningError },
|
||||||
|
#[snafu(display("Missing Bearer token"))]
|
||||||
|
OidcMissingBearerToken,
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
OidcJwtAuthError { source: jwt_authorizer::AuthError },
|
OidcJwtkError { source: jwtk::Error },
|
||||||
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
|
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
|
||||||
OidcExtraScopesMatchError { expected: String, found: String },
|
OidcExtraScopesMatchError { expected: String, found: String },
|
||||||
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
|
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
|
||||||
@@ -86,7 +87,7 @@ pub enum AuthError {
|
|||||||
(if column.is_empty() { "" } else { "." }),
|
(if column.is_empty() { "" } else { "." }),
|
||||||
source.message
|
source.message
|
||||||
))]
|
))]
|
||||||
GraphQLPermissionError {
|
GraphqlDynamicPermissionError {
|
||||||
#[snafu(source(false))]
|
#[snafu(source(false))]
|
||||||
source: Box<async_graphql::Error>,
|
source: Box<async_graphql::Error>,
|
||||||
field: String,
|
field: String,
|
||||||
@@ -96,13 +97,13 @@ pub enum AuthError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl AuthError {
|
impl AuthError {
|
||||||
pub fn from_graphql_subscribe_id_guard(
|
pub fn from_graphql_dynamic_subscribe_id_guard(
|
||||||
source: async_graphql::Error,
|
source: async_graphql::Error,
|
||||||
context: &ResolverContext,
|
context: &ResolverContext,
|
||||||
field_name: &str,
|
field_name: &str,
|
||||||
column_name: &str,
|
column_name: &str,
|
||||||
) -> AuthError {
|
) -> AuthError {
|
||||||
AuthError::GraphQLPermissionError {
|
AuthError::GraphqlDynamicPermissionError {
|
||||||
source: Box::new(source),
|
source: Box::new(source),
|
||||||
field: field_name.to_string(),
|
field: field_name.to_string(),
|
||||||
column: column_name.to_string(),
|
column: column_name.to_string(),
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ use axum::{
|
|||||||
|
|
||||||
use crate::{app::AppContextTrait, auth::AuthServiceTrait};
|
use crate::{app::AppContextTrait, auth::AuthServiceTrait};
|
||||||
|
|
||||||
pub async fn header_www_authenticate_middleware(
|
pub async fn auth_middleware(
|
||||||
State(ctx): State<Arc<dyn AppContextTrait>>,
|
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||||
request: Request,
|
request: Request,
|
||||||
next: Next,
|
next: Next,
|
||||||
|
|||||||
@@ -7,5 +7,5 @@ pub mod service;
|
|||||||
|
|
||||||
pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig};
|
pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig};
|
||||||
pub use errors::AuthError;
|
pub use errors::AuthError;
|
||||||
pub use middleware::header_www_authenticate_middleware;
|
pub use middleware::auth_middleware;
|
||||||
pub use service::{AuthService, AuthServiceTrait, AuthUserInfo};
|
pub use service::{AuthService, AuthServiceTrait, AuthUserInfo};
|
||||||
|
|||||||
@@ -1,12 +1,20 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::{HashMap, HashSet},
|
collections::{HashMap, HashSet},
|
||||||
|
future::Future,
|
||||||
|
ops::Deref,
|
||||||
|
pin::Pin,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use axum::http::{HeaderValue, request::Parts};
|
use axum::{
|
||||||
|
http,
|
||||||
|
http::{HeaderValue, request::Parts},
|
||||||
|
};
|
||||||
|
use fetch::{HttpClient, client::HttpClientError};
|
||||||
|
use http::header::AUTHORIZATION;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer};
|
use jwtk::jwk::RemoteJwksVerifier;
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
use openidconnect::{
|
use openidconnect::{
|
||||||
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
|
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
|
||||||
@@ -24,25 +32,56 @@ use super::{
|
|||||||
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
|
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
|
||||||
service::{AuthServiceTrait, AuthUserInfo},
|
service::{AuthServiceTrait, AuthUserInfo},
|
||||||
};
|
};
|
||||||
use crate::{app::AppContextTrait, errors::RError, fetch::HttpClient, models::auth::AuthType};
|
use crate::{
|
||||||
|
app::{AppContextTrait, PROJECT_NAME},
|
||||||
|
errors::RecorderError,
|
||||||
|
models::auth::AuthType,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct OidcHttpClient(pub Arc<HttpClient>);
|
||||||
|
|
||||||
|
impl Deref for OidcHttpClient {
|
||||||
|
type Target = HttpClient;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'c> openidconnect::AsyncHttpClient<'c> for OidcHttpClient {
|
||||||
|
type Error = HttpClientError;
|
||||||
|
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
type Future =
|
||||||
|
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + 'c>>;
|
||||||
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
|
type Future =
|
||||||
|
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + Send + 'c>>;
|
||||||
|
|
||||||
|
fn call(&'c self, request: openidconnect::HttpRequest) -> Self::Future {
|
||||||
|
Box::pin(async move {
|
||||||
|
let response = self.execute(request.try_into()?).await?;
|
||||||
|
|
||||||
|
let mut builder = http::Response::builder().status(response.status());
|
||||||
|
|
||||||
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
|
{
|
||||||
|
builder = builder.version(response.version());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (name, value) in response.headers().iter() {
|
||||||
|
builder = builder.header(name, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder
|
||||||
|
.body(response.bytes().await?.to_vec())
|
||||||
|
.map_err(HttpClientError::from)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize, Clone, Debug)]
|
#[derive(Deserialize, Serialize, Clone, Debug)]
|
||||||
pub struct OidcAuthClaims {
|
pub struct OidcAuthClaims {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub iss: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub sub: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub aud: Option<OneOrArray<String>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub exp: Option<NumericDate>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub nbf: Option<NumericDate>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub iat: Option<NumericDate>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub jti: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub scope: Option<String>,
|
pub scope: Option<String>,
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub custom: HashMap<String, Value>,
|
pub custom: HashMap<String, Value>,
|
||||||
@@ -52,40 +91,6 @@ impl OidcAuthClaims {
|
|||||||
pub fn scopes(&self) -> std::str::Split<'_, char> {
|
pub fn scopes(&self) -> std::str::Split<'_, char> {
|
||||||
self.scope.as_deref().unwrap_or_default().split(',')
|
self.scope.as_deref().unwrap_or_default().split(',')
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_claim(&self, key: &str) -> Option<String> {
|
|
||||||
match key {
|
|
||||||
"iss" => self.iss.clone(),
|
|
||||||
"sub" => self.sub.clone(),
|
|
||||||
"aud" => self.aud.as_ref().map(|s| s.iter().join(",")),
|
|
||||||
"exp" => self.exp.clone().map(|s| s.0.to_string()),
|
|
||||||
"nbf" => self.nbf.clone().map(|s| s.0.to_string()),
|
|
||||||
"iat" => self.iat.clone().map(|s| s.0.to_string()),
|
|
||||||
"jti" => self.jti.clone(),
|
|
||||||
"scope" => self.scope.clone(),
|
|
||||||
key => self.custom.get(key).map(|s| s.to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn has_claim(&self, key: &str) -> bool {
|
|
||||||
match key {
|
|
||||||
"iss" => self.iss.is_some(),
|
|
||||||
"sub" => self.sub.is_some(),
|
|
||||||
"aud" => self.aud.is_some(),
|
|
||||||
"exp" => self.exp.is_some(),
|
|
||||||
"nbf" => self.nbf.is_some(),
|
|
||||||
"iat" => self.iat.is_some(),
|
|
||||||
"jti" => self.jti.is_some(),
|
|
||||||
"scope" => self.scope.is_some(),
|
|
||||||
key => self.custom.contains_key(key),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn contains_audience(&self, aud: &str) -> bool {
|
|
||||||
self.aud
|
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|arr| arr.iter().any(|s| s == aud))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize)]
|
#[derive(Debug, Clone, Serialize)]
|
||||||
@@ -115,8 +120,8 @@ pub struct OidcAuthCallbackPayload {
|
|||||||
|
|
||||||
pub struct OidcAuthService {
|
pub struct OidcAuthService {
|
||||||
pub config: OidcAuthConfig,
|
pub config: OidcAuthConfig,
|
||||||
pub api_authorizer: Authorizer<OidcAuthClaims>,
|
pub jwk_verifier: RemoteJwksVerifier,
|
||||||
pub oidc_provider_client: HttpClient,
|
pub oidc_provider_client: Arc<HttpClient>,
|
||||||
pub oidc_request_cache: Cache<String, OidcAuthRequest>,
|
pub oidc_request_cache: Cache<String, OidcAuthRequest>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -125,9 +130,10 @@ impl OidcAuthService {
|
|||||||
&self,
|
&self,
|
||||||
redirect_uri: &str,
|
redirect_uri: &str,
|
||||||
) -> Result<OidcAuthRequest, AuthError> {
|
) -> Result<OidcAuthRequest, AuthError> {
|
||||||
|
let oidc_provider_client = OidcHttpClient(self.oidc_provider_client.clone());
|
||||||
let provider_metadata = CoreProviderMetadata::discover_async(
|
let provider_metadata = CoreProviderMetadata::discover_async(
|
||||||
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
|
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
|
||||||
&self.oidc_provider_client,
|
&oidc_provider_client,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -201,6 +207,7 @@ impl OidcAuthService {
|
|||||||
&self,
|
&self,
|
||||||
query: OidcAuthCallbackQuery,
|
query: OidcAuthCallbackQuery,
|
||||||
) -> Result<OidcAuthCallbackPayload, AuthError> {
|
) -> Result<OidcAuthCallbackPayload, AuthError> {
|
||||||
|
let oidc_http_client = OidcHttpClient(self.oidc_provider_client.clone());
|
||||||
let csrf_token = query.state.ok_or(AuthError::OidcInvalidStateError)?;
|
let csrf_token = query.state.ok_or(AuthError::OidcInvalidStateError)?;
|
||||||
|
|
||||||
let code = query.code.ok_or(AuthError::OidcInvalidCodeError)?;
|
let code = query.code.ok_or(AuthError::OidcInvalidCodeError)?;
|
||||||
@@ -209,7 +216,7 @@ impl OidcAuthService {
|
|||||||
|
|
||||||
let provider_metadata = CoreProviderMetadata::discover_async(
|
let provider_metadata = CoreProviderMetadata::discover_async(
|
||||||
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
|
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
|
||||||
&self.oidc_provider_client,
|
&oidc_http_client,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -225,7 +232,7 @@ impl OidcAuthService {
|
|||||||
let token_response = oidc_client
|
let token_response = oidc_client
|
||||||
.exchange_code(AuthorizationCode::new(code))?
|
.exchange_code(AuthorizationCode::new(code))?
|
||||||
.set_pkce_verifier(pkce_verifier)
|
.set_pkce_verifier(pkce_verifier)
|
||||||
.request_async(&HttpClient::default())
|
.request_async(&oidc_http_client)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let id_token = token_response
|
let id_token = token_response
|
||||||
@@ -246,10 +253,10 @@ impl OidcAuthService {
|
|||||||
id_token.signing_key(id_token_verifier)?,
|
id_token.signing_key(id_token_verifier)?,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
if let Some(expected_access_token_hash) = claims.access_token_hash() {
|
if let Some(expected_access_token_hash) = claims.access_token_hash()
|
||||||
if actual_access_token_hash != *expected_access_token_hash {
|
&& actual_access_token_hash != *expected_access_token_hash
|
||||||
return Err(AuthError::OidcInvalidAccessTokenError);
|
{
|
||||||
}
|
return Err(AuthError::OidcInvalidAccessTokenError);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(OidcAuthCallbackPayload {
|
Ok(OidcAuthCallbackPayload {
|
||||||
@@ -266,56 +273,80 @@ impl AuthServiceTrait for OidcAuthService {
|
|||||||
request: &mut Parts,
|
request: &mut Parts,
|
||||||
) -> Result<AuthUserInfo, AuthError> {
|
) -> Result<AuthUserInfo, AuthError> {
|
||||||
let config = &self.config;
|
let config = &self.config;
|
||||||
let token = self
|
let token = request
|
||||||
.api_authorizer
|
.headers
|
||||||
.extract_token(&request.headers)
|
.get(AUTHORIZATION)
|
||||||
.ok_or(jwt_authorizer::AuthError::MissingToken())?;
|
.and_then(|authorization| {
|
||||||
|
authorization
|
||||||
|
.to_str()
|
||||||
|
.ok()
|
||||||
|
.and_then(|s| s.strip_prefix("Bearer "))
|
||||||
|
})
|
||||||
|
.ok_or(AuthError::OidcMissingBearerToken)?;
|
||||||
|
|
||||||
let token_data = self.api_authorizer.check_auth(&token).await?;
|
let token_data = self.jwk_verifier.verify::<OidcAuthClaims>(token).await?;
|
||||||
let claims = token_data.claims;
|
let claims = token_data.claims();
|
||||||
let sub = if let Some(sub) = claims.sub.as_deref() {
|
let sub = if let Some(sub) = claims.sub.as_deref() {
|
||||||
sub
|
sub
|
||||||
} else {
|
} else {
|
||||||
return Err(AuthError::OidcSubMissingError);
|
return Err(AuthError::OidcSubMissingError);
|
||||||
};
|
};
|
||||||
if !claims.contains_audience(&config.audience) {
|
if !claims.aud.iter().any(|aud| aud == &config.audience) {
|
||||||
return Err(AuthError::OidcAudMissingError {
|
return Err(AuthError::OidcAudMissingError {
|
||||||
aud: config.audience.clone(),
|
aud: config.audience.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
let extra_claims = &claims.extra;
|
||||||
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
|
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
|
||||||
let found_scopes = claims.scopes().collect::<HashSet<_>>();
|
let found_scopes = extra_claims.scopes().collect::<HashSet<_>>();
|
||||||
if !expected_scopes
|
if !expected_scopes
|
||||||
.iter()
|
.iter()
|
||||||
.all(|es| found_scopes.contains(es as &str))
|
.all(|es| found_scopes.contains(es as &str))
|
||||||
{
|
{
|
||||||
return Err(AuthError::OidcExtraScopesMatchError {
|
return Err(AuthError::OidcExtraScopesMatchError {
|
||||||
expected: expected_scopes.iter().join(","),
|
expected: expected_scopes.iter().join(","),
|
||||||
found: claims.scope.unwrap_or_default(),
|
found: extra_claims
|
||||||
|
.scope
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(key) = config.extra_claim_key.as_ref() {
|
if let Some(expected_extra_claims) = config.extra_claims.as_ref() {
|
||||||
if !claims.has_claim(key) {
|
for (expected_key, expected_value) in expected_extra_claims.iter() {
|
||||||
return Err(AuthError::OidcExtraClaimMissingError { claim: key.clone() });
|
match (extra_claims.custom.get(expected_key), expected_value) {
|
||||||
}
|
(found_value, Some(expected_value)) => {
|
||||||
if let Some(value) = config.extra_claim_value.as_ref() {
|
if let Some(Value::String(found_value)) = found_value
|
||||||
if claims.get_claim(key).is_none_or(|v| &v != value) {
|
&& expected_value == found_value
|
||||||
return Err(AuthError::OidcExtraClaimMatchError {
|
{
|
||||||
expected: value.clone(),
|
} else {
|
||||||
found: claims.get_claim(key).unwrap_or_default().to_string(),
|
return Err(AuthError::OidcExtraClaimMatchError {
|
||||||
key: key.clone(),
|
expected: expected_value.clone(),
|
||||||
});
|
found: found_value.map(|v| v.to_string()).unwrap_or_default(),
|
||||||
|
key: expected_key.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(None, None) => {
|
||||||
|
return Err(AuthError::OidcExtraClaimMissingError {
|
||||||
|
claim: expected_key.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
||||||
Err(RError::DbError {
|
Err(RecorderError::DbError {
|
||||||
source: DbErr::RecordNotFound(..),
|
source: DbErr::RecordNotFound(..),
|
||||||
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await,
|
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await,
|
||||||
r => r,
|
r => r,
|
||||||
}
|
}
|
||||||
.map_err(|_| AuthError::FindAuthRecordError)?;
|
.map_err(|e| {
|
||||||
|
tracing::error!("Error finding auth record: {:?}", e);
|
||||||
|
AuthError::FindAuthRecordError
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(AuthUserInfo {
|
Ok(AuthUserInfo {
|
||||||
subscriber_auth,
|
subscriber_auth,
|
||||||
@@ -324,7 +355,7 @@ impl AuthServiceTrait for OidcAuthService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
||||||
Some(HeaderValue::from_static(r#"Bearer realm="konobangu""#))
|
Some(HeaderValue::from_str(format!("Bearer realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn auth_type(&self) -> AuthType {
|
fn auth_type(&self) -> AuthType {
|
||||||
|
|||||||
@@ -1,30 +1,24 @@
|
|||||||
use std::{sync::Arc, time::Duration};
|
use std::{sync::Arc, time::Duration};
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use axum::{
|
use axum::http::request::Parts;
|
||||||
extract::FromRequestParts,
|
use fetch::{
|
||||||
http::request::Parts,
|
HttpClient, HttpClientConfig,
|
||||||
response::{IntoResponse as _, Response},
|
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
|
||||||
};
|
};
|
||||||
use jwt_authorizer::{JwtAuthorizer, Validation};
|
use http::header::HeaderValue;
|
||||||
|
use jwtk::jwk::RemoteJwksVerifier;
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
use reqwest::header::HeaderValue;
|
use openidconnect::{IssuerUrl, core::CoreProviderMetadata};
|
||||||
use snafu::prelude::*;
|
use snafu::prelude::*;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
AuthConfig,
|
AuthConfig,
|
||||||
basic::BasicAuthService,
|
basic::BasicAuthService,
|
||||||
errors::{AuthError, OidcProviderHttpClientSnafu},
|
errors::{AuthError, OidcProviderHttpClientSnafu, OidcProviderUrlSnafu},
|
||||||
oidc::{OidcAuthClaims, OidcAuthService},
|
oidc::{OidcAuthService, OidcHttpClient},
|
||||||
};
|
|
||||||
use crate::{
|
|
||||||
app::AppContextTrait,
|
|
||||||
fetch::{
|
|
||||||
HttpClient, HttpClientConfig,
|
|
||||||
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
|
|
||||||
},
|
|
||||||
models::auth::AuthType,
|
|
||||||
};
|
};
|
||||||
|
use crate::{app::AppContextTrait, models::auth::AuthType};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct AuthUserInfo {
|
pub struct AuthUserInfo {
|
||||||
@@ -32,22 +26,6 @@ pub struct AuthUserInfo {
|
|||||||
pub auth_type: AuthType,
|
pub auth_type: AuthType,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromRequestParts<Arc<dyn AppContextTrait>> for AuthUserInfo {
|
|
||||||
type Rejection = Response;
|
|
||||||
|
|
||||||
async fn from_request_parts(
|
|
||||||
parts: &mut Parts,
|
|
||||||
state: &Arc<dyn AppContextTrait>,
|
|
||||||
) -> Result<Self, Self::Rejection> {
|
|
||||||
let auth_service = state.auth();
|
|
||||||
|
|
||||||
auth_service
|
|
||||||
.extract_user_info(state.as_ref(), parts)
|
|
||||||
.await
|
|
||||||
.map_err(|err| err.into_response())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait AuthServiceTrait {
|
pub trait AuthServiceTrait {
|
||||||
async fn extract_user_info(
|
async fn extract_user_info(
|
||||||
@@ -69,26 +47,32 @@ impl AuthService {
|
|||||||
let result = match config {
|
let result = match config {
|
||||||
AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
|
AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
|
||||||
AuthConfig::Oidc(config) => {
|
AuthConfig::Oidc(config) => {
|
||||||
let validation = Validation::new()
|
let oidc_provider_client = Arc::new(
|
||||||
.iss(&[&config.issuer])
|
HttpClient::from_config(HttpClientConfig {
|
||||||
.aud(&[&config.audience]);
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
cache_backend: Some(HttpClientCacheBackendConfig::Moka { cache_size: 1 }),
|
||||||
|
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.context(OidcProviderHttpClientSnafu)?,
|
||||||
|
);
|
||||||
|
|
||||||
let oidc_provider_client = HttpClient::from_config(HttpClientConfig {
|
let provider_metadata = {
|
||||||
exponential_backoff_max_retries: Some(3),
|
let client = OidcHttpClient(oidc_provider_client.clone());
|
||||||
cache_backend: Some(HttpClientCacheBackendConfig::Moka { cache_size: 1 }),
|
let issuer_url =
|
||||||
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
|
IssuerUrl::new(config.issuer.clone()).context(OidcProviderUrlSnafu)?;
|
||||||
..Default::default()
|
CoreProviderMetadata::discover_async(issuer_url, &client).await
|
||||||
})
|
}?;
|
||||||
.context(OidcProviderHttpClientSnafu)?;
|
|
||||||
|
|
||||||
let api_authorizer = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer)
|
let jwk_verifier = RemoteJwksVerifier::new(
|
||||||
.validation(validation)
|
provider_metadata.jwks_uri().to_string().clone(),
|
||||||
.build()
|
None,
|
||||||
.await?;
|
Duration::from_secs(300),
|
||||||
|
);
|
||||||
|
|
||||||
AuthService::Oidc(Box::new(OidcAuthService {
|
AuthService::Oidc(Box::new(OidcAuthService {
|
||||||
config,
|
config,
|
||||||
api_authorizer,
|
jwk_verifier,
|
||||||
oidc_provider_client,
|
oidc_provider_client,
|
||||||
oidc_request_cache: Cache::builder()
|
oidc_request_cache: Cache::builder()
|
||||||
.time_to_live(Duration::from_mins(5))
|
.time_to_live(Duration::from_mins(5))
|
||||||
@@ -103,6 +87,7 @@ impl AuthService {
|
|||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl AuthServiceTrait for AuthService {
|
impl AuthServiceTrait for AuthService {
|
||||||
|
#[tracing::instrument(skip(self, ctx, request))]
|
||||||
async fn extract_user_info(
|
async fn extract_user_info(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use recorder::{app::AppBuilder, errors::RResult};
|
use recorder::{app::AppBuilder, errors::RecorderResult};
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> RResult<()> {
|
async fn main() -> RecorderResult<()> {
|
||||||
let builder = AppBuilder::from_main_cli(None).await?;
|
let builder = AppBuilder::from_main_cli(None).await?;
|
||||||
|
|
||||||
let app = builder.build().await?;
|
let app = builder.build().await?;
|
||||||
|
|||||||
16
apps/recorder/src/bin/migrate_down.rs
Normal file
16
apps/recorder/src/bin/migrate_down.rs
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
use recorder::{app::AppBuilder, database::DatabaseService, errors::RecorderResult};
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
let builder = AppBuilder::from_main_cli(None).await?;
|
||||||
|
|
||||||
|
builder.load_env().await?;
|
||||||
|
let mut database_config = builder.load_config().await?.database;
|
||||||
|
database_config.auto_migrate = false;
|
||||||
|
|
||||||
|
let database_service = DatabaseService::from_config(database_config).await?;
|
||||||
|
|
||||||
|
database_service.migrate_down().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
4
apps/recorder/src/cache/service.rs
vendored
4
apps/recorder/src/cache/service.rs
vendored
@@ -1,10 +1,10 @@
|
|||||||
use super::CacheConfig;
|
use super::CacheConfig;
|
||||||
use crate::errors::RResult;
|
use crate::errors::RecorderResult;
|
||||||
|
|
||||||
pub struct CacheService {}
|
pub struct CacheService {}
|
||||||
|
|
||||||
impl CacheService {
|
impl CacheService {
|
||||||
pub async fn from_config(_config: CacheConfig) -> RResult<Self> {
|
pub async fn from_config(_config: CacheConfig) -> RecorderResult<Self> {
|
||||||
Ok(Self {})
|
Ok(Self {})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
4
apps/recorder/src/crypto/config.rs
Normal file
4
apps/recorder/src/crypto/config.rs
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CryptoConfig {}
|
||||||
20
apps/recorder/src/crypto/error.rs
Normal file
20
apps/recorder/src/crypto/error.rs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
use async_graphql::Error as AsyncGraphQLError;
|
||||||
|
use seaography::SeaographyError;
|
||||||
|
|
||||||
|
#[derive(Debug, snafu::Snafu)]
|
||||||
|
pub enum CryptoError {
|
||||||
|
#[snafu(transparent)]
|
||||||
|
Base64DecodeError { source: base64::DecodeError },
|
||||||
|
#[snafu(display("CocoonError: {source:?}"), context(false))]
|
||||||
|
CocoonError { source: cocoon::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
FromUtf8Error { source: std::string::FromUtf8Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
SerdeJsonError { source: serde_json::Error },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CryptoError> for SeaographyError {
|
||||||
|
fn from(error: CryptoError) -> Self {
|
||||||
|
SeaographyError::AsyncGraphQLError(AsyncGraphQLError::new(error.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
9
apps/recorder/src/crypto/mod.rs
Normal file
9
apps/recorder/src/crypto/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
pub mod config;
|
||||||
|
pub mod error;
|
||||||
|
pub mod service;
|
||||||
|
pub mod userpass;
|
||||||
|
|
||||||
|
pub use config::CryptoConfig;
|
||||||
|
pub use error::CryptoError;
|
||||||
|
pub use service::CryptoService;
|
||||||
|
pub use userpass::UserPassCredential;
|
||||||
62
apps/recorder/src/crypto/service.rs
Normal file
62
apps/recorder/src/crypto/service.rs
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
use base64::prelude::{BASE64_URL_SAFE, *};
|
||||||
|
use cocoon::Cocoon;
|
||||||
|
use rand::Rng;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use super::CryptoConfig;
|
||||||
|
use crate::crypto::error::CryptoError;
|
||||||
|
|
||||||
|
pub struct CryptoService {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
config: CryptoConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CryptoService {
|
||||||
|
pub async fn from_config(config: CryptoConfig) -> Result<Self, CryptoError> {
|
||||||
|
Ok(Self { config })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn encrypt_string(&self, data: String) -> Result<String, CryptoError> {
|
||||||
|
let key = rand::rng().random::<[u8; 32]>();
|
||||||
|
let mut cocoon = Cocoon::new(&key);
|
||||||
|
|
||||||
|
let mut data = data.into_bytes();
|
||||||
|
|
||||||
|
let detached_prefix = cocoon.encrypt(&mut data)?;
|
||||||
|
|
||||||
|
let mut combined = Vec::with_capacity(key.len() + detached_prefix.len() + data.len());
|
||||||
|
combined.extend_from_slice(&key);
|
||||||
|
combined.extend_from_slice(&detached_prefix);
|
||||||
|
combined.extend_from_slice(&data);
|
||||||
|
|
||||||
|
Ok(BASE64_URL_SAFE.encode(combined))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decrypt_string(&self, data: &str) -> Result<String, CryptoError> {
|
||||||
|
let decoded = BASE64_URL_SAFE.decode(data)?;
|
||||||
|
|
||||||
|
let (key, remain) = decoded.split_at(32);
|
||||||
|
let (detached_prefix, data) = remain.split_at(60);
|
||||||
|
let mut data = data.to_vec();
|
||||||
|
let cocoon = Cocoon::new(key);
|
||||||
|
|
||||||
|
cocoon.decrypt(&mut data, detached_prefix)?;
|
||||||
|
|
||||||
|
String::from_utf8(data).map_err(CryptoError::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn encrypt_serialize<T: Serialize>(&self, credentials: &T) -> Result<String, CryptoError> {
|
||||||
|
let json = serde_json::to_string(credentials)?;
|
||||||
|
|
||||||
|
self.encrypt_string(json)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decrypt_deserialize<T: for<'de> Deserialize<'de>>(
|
||||||
|
&self,
|
||||||
|
encrypted: &str,
|
||||||
|
) -> Result<T, CryptoError> {
|
||||||
|
let data = self.decrypt_string(encrypted)?;
|
||||||
|
|
||||||
|
serde_json::from_str(&data).map_err(CryptoError::from)
|
||||||
|
}
|
||||||
|
}
|
||||||
19
apps/recorder/src/crypto/userpass.rs
Normal file
19
apps/recorder/src/crypto/userpass.rs
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
pub struct UserPassCredential {
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
pub user_agent: Option<String>,
|
||||||
|
pub cookies: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for UserPassCredential {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("UserPassCredential")
|
||||||
|
.field("username", &"[Secret]")
|
||||||
|
.field("password", &"[Secret]")
|
||||||
|
.field("cookies", &"[Secret]")
|
||||||
|
.field("user_agent", &self.user_agent)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,20 +1,26 @@
|
|||||||
use std::{ops::Deref, time::Duration};
|
use std::{ops::Deref, time::Duration};
|
||||||
|
|
||||||
|
use apalis_sql::postgres::PostgresStorage;
|
||||||
use sea_orm::{
|
use sea_orm::{
|
||||||
ConnectOptions, ConnectionTrait, Database, DatabaseBackend, DatabaseConnection, DbBackend,
|
ConnectOptions, ConnectionTrait, Database, DatabaseConnection, DbBackend, DbErr, ExecResult,
|
||||||
DbErr, ExecResult, QueryResult, Statement,
|
QueryResult, Statement,
|
||||||
};
|
};
|
||||||
use sea_orm_migration::MigratorTrait;
|
use sea_orm_migration::MigratorTrait;
|
||||||
|
|
||||||
use super::DatabaseConfig;
|
use super::DatabaseConfig;
|
||||||
use crate::{errors::RResult, migrations::Migrator};
|
use crate::{errors::RecorderResult, migrations::Migrator};
|
||||||
|
|
||||||
pub struct DatabaseService {
|
pub struct DatabaseService {
|
||||||
|
pub config: DatabaseConfig,
|
||||||
connection: DatabaseConnection,
|
connection: DatabaseConnection,
|
||||||
|
#[cfg(feature = "testcontainers")]
|
||||||
|
pub container:
|
||||||
|
Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DatabaseService {
|
impl DatabaseService {
|
||||||
pub async fn from_config(config: DatabaseConfig) -> RResult<Self> {
|
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
|
||||||
|
let db_config = config.clone();
|
||||||
let mut opt = ConnectOptions::new(&config.uri);
|
let mut opt = ConnectOptions::new(&config.uri);
|
||||||
opt.max_connections(config.max_connections)
|
opt.max_connections(config.max_connections)
|
||||||
.min_connections(config.min_connections)
|
.min_connections(config.min_connections)
|
||||||
@@ -28,26 +34,52 @@ impl DatabaseService {
|
|||||||
|
|
||||||
let db = Database::connect(opt).await?;
|
let db = Database::connect(opt).await?;
|
||||||
|
|
||||||
if db.get_database_backend() == DatabaseBackend::Sqlite {
|
// only support postgres for now
|
||||||
db.execute(Statement::from_string(
|
// if db.get_database_backend() == DatabaseBackend::Sqlite {
|
||||||
DatabaseBackend::Sqlite,
|
// db.execute(Statement::from_string(
|
||||||
"
|
// DatabaseBackend::Sqlite,
|
||||||
PRAGMA foreign_keys = ON;
|
// "
|
||||||
PRAGMA journal_mode = WAL;
|
// PRAGMA foreign_keys = ON;
|
||||||
PRAGMA synchronous = NORMAL;
|
// PRAGMA journal_mode = WAL;
|
||||||
PRAGMA mmap_size = 134217728;
|
// PRAGMA synchronous = NORMAL;
|
||||||
PRAGMA journal_size_limit = 67108864;
|
// PRAGMA mmap_size = 134217728;
|
||||||
PRAGMA cache_size = 2000;
|
// PRAGMA journal_size_limit = 67108864;
|
||||||
",
|
// PRAGMA cache_size = 2000;
|
||||||
))
|
// ",
|
||||||
.await?;
|
// ))
|
||||||
}
|
// .await?;
|
||||||
|
// }
|
||||||
|
|
||||||
|
let me = Self {
|
||||||
|
connection: db,
|
||||||
|
#[cfg(feature = "testcontainers")]
|
||||||
|
container: None,
|
||||||
|
config: db_config,
|
||||||
|
};
|
||||||
|
|
||||||
if config.auto_migrate {
|
if config.auto_migrate {
|
||||||
Migrator::up(&db, None).await?;
|
me.migrate_up().await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self { connection: db })
|
Ok(me)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn migrate_up(&self) -> RecorderResult<()> {
|
||||||
|
{
|
||||||
|
let pool = &self.get_postgres_connection_pool();
|
||||||
|
PostgresStorage::setup(pool).await?;
|
||||||
|
}
|
||||||
|
Migrator::up(&self.connection, None).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn migrate_down(&self) -> RecorderResult<()> {
|
||||||
|
Migrator::down(&self.connection, None).await?;
|
||||||
|
{
|
||||||
|
self.execute_unprepared(r#"DROP SCHEMA IF EXISTS apalis CASCADE"#)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,297 +0,0 @@
|
|||||||
use std::fmt::Debug;
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use itertools::Itertools;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use librqbit_core::{
|
|
||||||
magnet::Magnet,
|
|
||||||
torrent_metainfo::{TorrentMetaV1Owned, torrent_from_bytes},
|
|
||||||
};
|
|
||||||
use quirks_path::{Path, PathBuf};
|
|
||||||
use regex::Regex;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use snafu::prelude::*;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use super::{DownloaderError, QbitTorrent, QbitTorrentContent, errors::DownloadFetchSnafu};
|
|
||||||
use crate::fetch::{HttpClientTrait, fetch_bytes};
|
|
||||||
|
|
||||||
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
|
|
||||||
pub const MAGNET_SCHEMA: &str = "magnet";
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum TorrentFilter {
|
|
||||||
All,
|
|
||||||
Downloading,
|
|
||||||
Completed,
|
|
||||||
Paused,
|
|
||||||
Active,
|
|
||||||
Inactive,
|
|
||||||
Resumed,
|
|
||||||
Stalled,
|
|
||||||
StalledUploading,
|
|
||||||
StalledDownloading,
|
|
||||||
Errored,
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref TORRENT_HASH_RE: Regex = Regex::new(r"[a-fA-F0-9]{40}").unwrap();
|
|
||||||
static ref TORRENT_EXT_RE: Regex = Regex::new(r"\.torrent$").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq)]
|
|
||||||
pub enum TorrentSource {
|
|
||||||
MagnetUrl {
|
|
||||||
url: Url,
|
|
||||||
hash: String,
|
|
||||||
},
|
|
||||||
TorrentUrl {
|
|
||||||
url: Url,
|
|
||||||
hash: String,
|
|
||||||
},
|
|
||||||
TorrentFile {
|
|
||||||
torrent: Vec<u8>,
|
|
||||||
hash: String,
|
|
||||||
name: Option<String>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TorrentSource {
|
|
||||||
pub async fn parse<H: HttpClientTrait>(client: &H, url: &str) -> Result<Self, DownloaderError> {
|
|
||||||
let url = Url::parse(url)?;
|
|
||||||
let source = if url.scheme() == MAGNET_SCHEMA {
|
|
||||||
TorrentSource::from_magnet_url(url)?
|
|
||||||
} else if let Some(basename) = url
|
|
||||||
.clone()
|
|
||||||
.path_segments()
|
|
||||||
.and_then(|mut segments| segments.next_back())
|
|
||||||
{
|
|
||||||
if let (Some(match_hash), true) = (
|
|
||||||
TORRENT_HASH_RE.find(basename),
|
|
||||||
TORRENT_EXT_RE.is_match(basename),
|
|
||||||
) {
|
|
||||||
TorrentSource::from_torrent_url(url, match_hash.as_str().to_string())?
|
|
||||||
} else {
|
|
||||||
let contents = fetch_bytes(client, url)
|
|
||||||
.await
|
|
||||||
.boxed()
|
|
||||||
.context(DownloadFetchSnafu)?;
|
|
||||||
TorrentSource::from_torrent_file(contents.to_vec(), Some(basename.to_string()))?
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let contents = fetch_bytes(client, url)
|
|
||||||
.await
|
|
||||||
.boxed()
|
|
||||||
.context(DownloadFetchSnafu)?;
|
|
||||||
TorrentSource::from_torrent_file(contents.to_vec(), None)?
|
|
||||||
};
|
|
||||||
Ok(source)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_torrent_file(file: Vec<u8>, name: Option<String>) -> Result<Self, DownloaderError> {
|
|
||||||
let torrent: TorrentMetaV1Owned =
|
|
||||||
torrent_from_bytes(&file).map_err(|_| DownloaderError::TorrentFileFormatError)?;
|
|
||||||
let hash = torrent.info_hash.as_string();
|
|
||||||
Ok(TorrentSource::TorrentFile {
|
|
||||||
torrent: file,
|
|
||||||
hash,
|
|
||||||
name,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_magnet_url(url: Url) -> Result<Self, DownloaderError> {
|
|
||||||
if url.scheme() != MAGNET_SCHEMA {
|
|
||||||
Err(DownloaderError::DownloadSchemaError {
|
|
||||||
found: url.scheme().to_string(),
|
|
||||||
expected: MAGNET_SCHEMA.to_string(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
let magnet =
|
|
||||||
Magnet::parse(url.as_str()).map_err(|_| DownloaderError::MagnetFormatError {
|
|
||||||
url: url.as_str().to_string(),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let hash = magnet
|
|
||||||
.as_id20()
|
|
||||||
.ok_or_else(|| DownloaderError::MagnetFormatError {
|
|
||||||
url: url.as_str().to_string(),
|
|
||||||
})?
|
|
||||||
.as_string();
|
|
||||||
Ok(TorrentSource::MagnetUrl { url, hash })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_torrent_url(url: Url, hash: String) -> Result<Self, DownloaderError> {
|
|
||||||
Ok(TorrentSource::TorrentUrl { url, hash })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash(&self) -> &str {
|
|
||||||
match self {
|
|
||||||
TorrentSource::MagnetUrl { hash, .. } => hash,
|
|
||||||
TorrentSource::TorrentUrl { hash, .. } => hash,
|
|
||||||
TorrentSource::TorrentFile { hash, .. } => hash,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for TorrentSource {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
TorrentSource::MagnetUrl { url, .. } => {
|
|
||||||
write!(f, "MagnetUrl {{ url: {} }}", url.as_str())
|
|
||||||
}
|
|
||||||
TorrentSource::TorrentUrl { url, .. } => {
|
|
||||||
write!(f, "TorrentUrl {{ url: {} }}", url.as_str())
|
|
||||||
}
|
|
||||||
TorrentSource::TorrentFile { name, hash, .. } => write!(
|
|
||||||
f,
|
|
||||||
"TorrentFile {{ name: \"{}\", hash: \"{hash}\" }}",
|
|
||||||
name.as_deref().unwrap_or_default()
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait TorrentContent {
|
|
||||||
fn get_name(&self) -> &str;
|
|
||||||
|
|
||||||
fn get_all_size(&self) -> u64;
|
|
||||||
|
|
||||||
fn get_progress(&self) -> f64;
|
|
||||||
|
|
||||||
fn get_curr_size(&self) -> u64;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TorrentContent for QbitTorrentContent {
|
|
||||||
fn get_name(&self) -> &str {
|
|
||||||
self.name.as_str()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_all_size(&self) -> u64 {
|
|
||||||
self.size
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_progress(&self) -> f64 {
|
|
||||||
self.progress
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_curr_size(&self) -> u64 {
|
|
||||||
u64::clamp(
|
|
||||||
f64::round(self.get_all_size() as f64 * self.get_progress()) as u64,
|
|
||||||
0,
|
|
||||||
self.get_all_size(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum Torrent {
|
|
||||||
Qbit {
|
|
||||||
torrent: QbitTorrent,
|
|
||||||
contents: Vec<QbitTorrentContent>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Torrent {
|
|
||||||
pub fn iter_files(&self) -> impl Iterator<Item = &dyn TorrentContent> {
|
|
||||||
match self {
|
|
||||||
Torrent::Qbit { contents, .. } => {
|
|
||||||
contents.iter().map(|item| item as &dyn TorrentContent)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_name(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
Torrent::Qbit { torrent, .. } => torrent.name.as_deref(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_hash(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
Torrent::Qbit { torrent, .. } => torrent.hash.as_deref(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_save_path(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
Torrent::Qbit { torrent, .. } => torrent.save_path.as_deref(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_content_path(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
Torrent::Qbit { torrent, .. } => torrent.content_path.as_deref(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_tags(&self) -> Vec<&str> {
|
|
||||||
match self {
|
|
||||||
Torrent::Qbit { torrent, .. } => torrent.tags.as_deref().map_or_else(Vec::new, |s| {
|
|
||||||
s.split(',')
|
|
||||||
.map(|s| s.trim())
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.collect_vec()
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_category(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
Torrent::Qbit { torrent, .. } => torrent.category.as_deref(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
pub trait TorrentDownloader {
|
|
||||||
async fn get_torrents_info(
|
|
||||||
&self,
|
|
||||||
status_filter: TorrentFilter,
|
|
||||||
category: Option<String>,
|
|
||||||
tag: Option<String>,
|
|
||||||
) -> Result<Vec<Torrent>, DownloaderError>;
|
|
||||||
|
|
||||||
async fn add_torrents(
|
|
||||||
&self,
|
|
||||||
source: TorrentSource,
|
|
||||||
save_path: String,
|
|
||||||
category: Option<&str>,
|
|
||||||
) -> Result<(), DownloaderError>;
|
|
||||||
|
|
||||||
async fn delete_torrents(&self, hashes: Vec<String>) -> Result<(), DownloaderError>;
|
|
||||||
|
|
||||||
async fn rename_torrent_file(
|
|
||||||
&self,
|
|
||||||
hash: &str,
|
|
||||||
old_path: &str,
|
|
||||||
new_path: &str,
|
|
||||||
) -> Result<(), DownloaderError>;
|
|
||||||
|
|
||||||
async fn move_torrents(
|
|
||||||
&self,
|
|
||||||
hashes: Vec<String>,
|
|
||||||
new_path: &str,
|
|
||||||
) -> Result<(), DownloaderError>;
|
|
||||||
|
|
||||||
async fn get_torrent_path(&self, hashes: String) -> Result<Option<String>, DownloaderError>;
|
|
||||||
|
|
||||||
async fn check_connection(&self) -> Result<(), DownloaderError>;
|
|
||||||
|
|
||||||
async fn set_torrents_category(
|
|
||||||
&self,
|
|
||||||
hashes: Vec<String>,
|
|
||||||
category: &str,
|
|
||||||
) -> Result<(), DownloaderError>;
|
|
||||||
|
|
||||||
async fn add_torrent_tags(
|
|
||||||
&self,
|
|
||||||
hashes: Vec<String>,
|
|
||||||
tags: Vec<String>,
|
|
||||||
) -> Result<(), DownloaderError>;
|
|
||||||
|
|
||||||
async fn add_category(&self, category: &str) -> Result<(), DownloaderError>;
|
|
||||||
|
|
||||||
fn get_save_path(&self, sub_path: &Path) -> PathBuf;
|
|
||||||
}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
pub mod core;
|
|
||||||
pub mod errors;
|
|
||||||
pub mod qbit;
|
|
||||||
pub mod rqbit;
|
|
||||||
pub mod utils;
|
|
||||||
|
|
||||||
pub use core::{
|
|
||||||
Torrent, TorrentContent, TorrentDownloader, TorrentFilter, TorrentSource, BITTORRENT_MIME_TYPE,
|
|
||||||
MAGNET_SCHEMA,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub use errors::DownloaderError;
|
|
||||||
pub use qbit::{
|
|
||||||
QBittorrentDownloader, QBittorrentDownloaderCreation, QbitTorrent, QbitTorrentContent,
|
|
||||||
QbitTorrentFile, QbitTorrentFilter, QbitTorrentSource,
|
|
||||||
};
|
|
||||||
@@ -1,719 +0,0 @@
|
|||||||
use std::{
|
|
||||||
borrow::Cow, collections::HashSet, fmt::Debug, future::Future, sync::Arc, time::Duration,
|
|
||||||
};
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use futures::future::try_join_all;
|
|
||||||
pub use qbit_rs::model::{
|
|
||||||
Torrent as QbitTorrent, TorrentContent as QbitTorrentContent, TorrentFile as QbitTorrentFile,
|
|
||||||
TorrentFilter as QbitTorrentFilter, TorrentSource as QbitTorrentSource,
|
|
||||||
};
|
|
||||||
use qbit_rs::{
|
|
||||||
Qbit,
|
|
||||||
model::{AddTorrentArg, Credential, GetTorrentListArg, NonEmptyStr, SyncData},
|
|
||||||
};
|
|
||||||
use quirks_path::{Path, PathBuf};
|
|
||||||
use snafu::prelude::*;
|
|
||||||
use tokio::time::sleep;
|
|
||||||
use tracing::instrument;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use super::{
|
|
||||||
DownloaderError, Torrent, TorrentDownloader, TorrentFilter, TorrentSource,
|
|
||||||
utils::path_equals_as_file_url,
|
|
||||||
};
|
|
||||||
|
|
||||||
impl From<TorrentSource> for QbitTorrentSource {
|
|
||||||
fn from(value: TorrentSource) -> Self {
|
|
||||||
match value {
|
|
||||||
TorrentSource::MagnetUrl { url, .. } => QbitTorrentSource::Urls {
|
|
||||||
urls: qbit_rs::model::Sep::from([url]),
|
|
||||||
},
|
|
||||||
TorrentSource::TorrentUrl { url, .. } => QbitTorrentSource::Urls {
|
|
||||||
urls: qbit_rs::model::Sep::from([url]),
|
|
||||||
},
|
|
||||||
TorrentSource::TorrentFile {
|
|
||||||
torrent: torrents,
|
|
||||||
name,
|
|
||||||
..
|
|
||||||
} => QbitTorrentSource::TorrentFiles {
|
|
||||||
torrents: vec![QbitTorrentFile {
|
|
||||||
filename: name.unwrap_or_default(),
|
|
||||||
data: torrents,
|
|
||||||
}],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<TorrentFilter> for QbitTorrentFilter {
|
|
||||||
fn from(val: TorrentFilter) -> Self {
|
|
||||||
match val {
|
|
||||||
TorrentFilter::All => QbitTorrentFilter::All,
|
|
||||||
TorrentFilter::Downloading => QbitTorrentFilter::Downloading,
|
|
||||||
TorrentFilter::Completed => QbitTorrentFilter::Completed,
|
|
||||||
TorrentFilter::Paused => QbitTorrentFilter::Paused,
|
|
||||||
TorrentFilter::Active => QbitTorrentFilter::Active,
|
|
||||||
TorrentFilter::Inactive => QbitTorrentFilter::Inactive,
|
|
||||||
TorrentFilter::Resumed => QbitTorrentFilter::Resumed,
|
|
||||||
TorrentFilter::Stalled => QbitTorrentFilter::Stalled,
|
|
||||||
TorrentFilter::StalledUploading => QbitTorrentFilter::StalledUploading,
|
|
||||||
TorrentFilter::StalledDownloading => QbitTorrentFilter::StalledDownloading,
|
|
||||||
TorrentFilter::Errored => QbitTorrentFilter::Errored,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct QBittorrentDownloaderCreation {
|
|
||||||
pub endpoint: String,
|
|
||||||
pub username: String,
|
|
||||||
pub password: String,
|
|
||||||
pub save_path: String,
|
|
||||||
pub subscriber_id: i32,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct QBittorrentDownloader {
|
|
||||||
pub subscriber_id: i32,
|
|
||||||
pub endpoint_url: Url,
|
|
||||||
pub client: Arc<Qbit>,
|
|
||||||
pub save_path: PathBuf,
|
|
||||||
pub wait_sync_timeout: Duration,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl QBittorrentDownloader {
|
|
||||||
pub async fn from_creation(
|
|
||||||
creation: QBittorrentDownloaderCreation,
|
|
||||||
) -> Result<Self, DownloaderError> {
|
|
||||||
let endpoint_url = Url::parse(&creation.endpoint)?;
|
|
||||||
|
|
||||||
let credential = Credential::new(creation.username, creation.password);
|
|
||||||
|
|
||||||
let client = Qbit::new(endpoint_url.clone(), credential);
|
|
||||||
|
|
||||||
client.login(false).await?;
|
|
||||||
|
|
||||||
client.sync(None).await?;
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
client: Arc::new(client),
|
|
||||||
endpoint_url,
|
|
||||||
subscriber_id: creation.subscriber_id,
|
|
||||||
save_path: creation.save_path.into(),
|
|
||||||
wait_sync_timeout: Duration::from_millis(10000),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "debug")]
|
|
||||||
pub async fn api_version(&self) -> Result<String, DownloaderError> {
|
|
||||||
let result = self.client.get_webapi_version().await?;
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn wait_until<G, Fut, F, D, H, E>(
|
|
||||||
&self,
|
|
||||||
capture_fn: H,
|
|
||||||
fetch_data_fn: G,
|
|
||||||
mut stop_wait_fn: F,
|
|
||||||
timeout: Option<Duration>,
|
|
||||||
) -> Result<(), DownloaderError>
|
|
||||||
where
|
|
||||||
H: FnOnce() -> E,
|
|
||||||
G: Fn(Arc<Qbit>, E) -> Fut,
|
|
||||||
Fut: Future<Output = Result<D, DownloaderError>>,
|
|
||||||
F: FnMut(&D) -> bool,
|
|
||||||
E: Clone,
|
|
||||||
D: Debug + serde::Serialize,
|
|
||||||
{
|
|
||||||
let mut next_wait_ms = 32u64;
|
|
||||||
let mut all_wait_ms = 0u64;
|
|
||||||
let timeout = timeout.unwrap_or(self.wait_sync_timeout);
|
|
||||||
let env = capture_fn();
|
|
||||||
loop {
|
|
||||||
sleep(Duration::from_millis(next_wait_ms)).await;
|
|
||||||
all_wait_ms += next_wait_ms;
|
|
||||||
if all_wait_ms >= timeout.as_millis() as u64 {
|
|
||||||
// full update
|
|
||||||
let sync_data = fetch_data_fn(self.client.clone(), env.clone()).await?;
|
|
||||||
if stop_wait_fn(&sync_data) {
|
|
||||||
break;
|
|
||||||
} else {
|
|
||||||
tracing::warn!(name = "wait_until timeout", sync_data = serde_json::to_string(&sync_data).unwrap(), timeout = ?timeout);
|
|
||||||
return Err(DownloaderError::DownloadTimeoutError {
|
|
||||||
action: Cow::Borrowed("QBittorrentDownloader::wait_unit"),
|
|
||||||
timeout,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let sync_data = fetch_data_fn(self.client.clone(), env.clone()).await?;
|
|
||||||
if stop_wait_fn(&sync_data) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
next_wait_ms *= 2;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "trace", skip(self, stop_wait_fn))]
|
|
||||||
pub async fn wait_torrents_until<F>(
|
|
||||||
&self,
|
|
||||||
arg: GetTorrentListArg,
|
|
||||||
stop_wait_fn: F,
|
|
||||||
timeout: Option<Duration>,
|
|
||||||
) -> Result<(), DownloaderError>
|
|
||||||
where
|
|
||||||
F: FnMut(&Vec<QbitTorrent>) -> bool,
|
|
||||||
{
|
|
||||||
self.wait_until(
|
|
||||||
|| arg,
|
|
||||||
async move |client: Arc<Qbit>,
|
|
||||||
arg: GetTorrentListArg|
|
|
||||||
-> Result<Vec<QbitTorrent>, DownloaderError> {
|
|
||||||
let data = client.get_torrent_list(arg).await?;
|
|
||||||
Ok(data)
|
|
||||||
},
|
|
||||||
stop_wait_fn,
|
|
||||||
timeout,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self, stop_wait_fn))]
|
|
||||||
pub async fn wait_sync_until<F: FnMut(&SyncData) -> bool>(
|
|
||||||
&self,
|
|
||||||
stop_wait_fn: F,
|
|
||||||
timeout: Option<Duration>,
|
|
||||||
) -> Result<(), DownloaderError> {
|
|
||||||
self.wait_until(
|
|
||||||
|| (),
|
|
||||||
async move |client: Arc<Qbit>, _| -> Result<SyncData, DownloaderError> {
|
|
||||||
let data = client.sync(None).await?;
|
|
||||||
Ok(data)
|
|
||||||
},
|
|
||||||
stop_wait_fn,
|
|
||||||
timeout,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self, stop_wait_fn))]
|
|
||||||
async fn wait_torrent_contents_until<F: FnMut(&Vec<QbitTorrentContent>) -> bool>(
|
|
||||||
&self,
|
|
||||||
hash: &str,
|
|
||||||
stop_wait_fn: F,
|
|
||||||
timeout: Option<Duration>,
|
|
||||||
) -> Result<(), DownloaderError> {
|
|
||||||
self.wait_until(
|
|
||||||
|| Arc::new(hash.to_string()),
|
|
||||||
async move |client: Arc<Qbit>,
|
|
||||||
hash_arc: Arc<String>|
|
|
||||||
-> Result<Vec<QbitTorrentContent>, DownloaderError> {
|
|
||||||
let data = client.get_torrent_contents(hash_arc.as_str(), None).await?;
|
|
||||||
Ok(data)
|
|
||||||
},
|
|
||||||
stop_wait_fn,
|
|
||||||
timeout,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl TorrentDownloader for QBittorrentDownloader {
|
|
||||||
#[instrument(level = "debug", skip(self))]
|
|
||||||
async fn get_torrents_info(
|
|
||||||
&self,
|
|
||||||
status_filter: TorrentFilter,
|
|
||||||
category: Option<String>,
|
|
||||||
tag: Option<String>,
|
|
||||||
) -> Result<Vec<Torrent>, DownloaderError> {
|
|
||||||
let arg = GetTorrentListArg {
|
|
||||||
filter: Some(status_filter.into()),
|
|
||||||
category,
|
|
||||||
tag,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
let torrent_list = self.client.get_torrent_list(arg).await?;
|
|
||||||
let torrent_contents = try_join_all(torrent_list.iter().map(|s| async {
|
|
||||||
if let Some(hash) = &s.hash {
|
|
||||||
self.client.get_torrent_contents(hash as &str, None).await
|
|
||||||
} else {
|
|
||||||
Ok(vec![])
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
.await?;
|
|
||||||
Ok(torrent_list
|
|
||||||
.into_iter()
|
|
||||||
.zip(torrent_contents)
|
|
||||||
.map(|(torrent, contents)| Torrent::Qbit { torrent, contents })
|
|
||||||
.collect::<Vec<_>>())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self))]
|
|
||||||
async fn add_torrents(
|
|
||||||
&self,
|
|
||||||
source: TorrentSource,
|
|
||||||
save_path: String,
|
|
||||||
category: Option<&str>,
|
|
||||||
) -> Result<(), DownloaderError> {
|
|
||||||
let arg = AddTorrentArg {
|
|
||||||
source: source.clone().into(),
|
|
||||||
savepath: Some(save_path),
|
|
||||||
category: category.map(String::from),
|
|
||||||
auto_torrent_management: Some(false),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
let add_result = self.client.add_torrent(arg.clone()).await;
|
|
||||||
if let (
|
|
||||||
Err(qbit_rs::Error::ApiError(qbit_rs::ApiError::CategoryNotFound)),
|
|
||||||
Some(category),
|
|
||||||
) = (&add_result, category)
|
|
||||||
{
|
|
||||||
self.add_category(category).await?;
|
|
||||||
self.client.add_torrent(arg).await?;
|
|
||||||
} else {
|
|
||||||
add_result?;
|
|
||||||
}
|
|
||||||
let source_hash = source.hash();
|
|
||||||
self.wait_sync_until(
|
|
||||||
|sync_data| {
|
|
||||||
sync_data
|
|
||||||
.torrents
|
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|t| t.contains_key(source_hash))
|
|
||||||
},
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self))]
|
|
||||||
async fn delete_torrents(&self, hashes: Vec<String>) -> Result<(), DownloaderError> {
|
|
||||||
self.client
|
|
||||||
.delete_torrents(hashes.clone(), Some(true))
|
|
||||||
.await?;
|
|
||||||
self.wait_torrents_until(
|
|
||||||
GetTorrentListArg::builder()
|
|
||||||
.hashes(hashes.join("|"))
|
|
||||||
.build(),
|
|
||||||
|torrents| -> bool { torrents.is_empty() },
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self))]
|
|
||||||
async fn rename_torrent_file(
|
|
||||||
&self,
|
|
||||||
hash: &str,
|
|
||||||
old_path: &str,
|
|
||||||
new_path: &str,
|
|
||||||
) -> Result<(), DownloaderError> {
|
|
||||||
self.client.rename_file(hash, old_path, new_path).await?;
|
|
||||||
let new_path = self.save_path.join(new_path);
|
|
||||||
let save_path = self.save_path.as_path();
|
|
||||||
self.wait_torrent_contents_until(
|
|
||||||
hash,
|
|
||||||
|contents| -> bool {
|
|
||||||
contents.iter().any(|c| {
|
|
||||||
path_equals_as_file_url(save_path.join(&c.name), &new_path)
|
|
||||||
.inspect_err(|error| {
|
|
||||||
tracing::warn!(name = "path_equals_as_file_url", error = ?error);
|
|
||||||
})
|
|
||||||
.unwrap_or(false)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self))]
|
|
||||||
async fn move_torrents(
|
|
||||||
&self,
|
|
||||||
hashes: Vec<String>,
|
|
||||||
new_path: &str,
|
|
||||||
) -> Result<(), DownloaderError> {
|
|
||||||
self.client
|
|
||||||
.set_torrent_location(hashes.clone(), new_path)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
self.wait_torrents_until(
|
|
||||||
GetTorrentListArg::builder()
|
|
||||||
.hashes(hashes.join("|"))
|
|
||||||
.build(),
|
|
||||||
|torrents| -> bool {
|
|
||||||
torrents.iter().flat_map(|t| t.save_path.as_ref()).any(|p| {
|
|
||||||
path_equals_as_file_url(p, new_path)
|
|
||||||
.inspect_err(|error| {
|
|
||||||
tracing::warn!(name = "path_equals_as_file_url", error = ?error);
|
|
||||||
})
|
|
||||||
.unwrap_or(false)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_torrent_path(&self, hashes: String) -> Result<Option<String>, DownloaderError> {
|
|
||||||
let mut torrent_list = self
|
|
||||||
.client
|
|
||||||
.get_torrent_list(GetTorrentListArg {
|
|
||||||
hashes: Some(hashes),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
let torrent = torrent_list
|
|
||||||
.first_mut()
|
|
||||||
.whatever_context::<_, DownloaderError>("No torrent found")?;
|
|
||||||
Ok(torrent.save_path.take())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self))]
|
|
||||||
async fn check_connection(&self) -> Result<(), DownloaderError> {
|
|
||||||
self.api_version().await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self))]
|
|
||||||
async fn set_torrents_category(
|
|
||||||
&self,
|
|
||||||
hashes: Vec<String>,
|
|
||||||
category: &str,
|
|
||||||
) -> Result<(), DownloaderError> {
|
|
||||||
let result = self
|
|
||||||
.client
|
|
||||||
.set_torrent_category(hashes.clone(), category)
|
|
||||||
.await;
|
|
||||||
if let Err(qbit_rs::Error::ApiError(qbit_rs::ApiError::CategoryNotFound)) = &result {
|
|
||||||
self.add_category(category).await?;
|
|
||||||
self.client
|
|
||||||
.set_torrent_category(hashes.clone(), category)
|
|
||||||
.await?;
|
|
||||||
} else {
|
|
||||||
result?;
|
|
||||||
}
|
|
||||||
self.wait_torrents_until(
|
|
||||||
GetTorrentListArg::builder()
|
|
||||||
.hashes(hashes.join("|"))
|
|
||||||
.build(),
|
|
||||||
|torrents| {
|
|
||||||
torrents
|
|
||||||
.iter()
|
|
||||||
.all(|t| t.category.as_ref().is_some_and(|c| c == category))
|
|
||||||
},
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self))]
|
|
||||||
async fn add_torrent_tags(
|
|
||||||
&self,
|
|
||||||
hashes: Vec<String>,
|
|
||||||
tags: Vec<String>,
|
|
||||||
) -> Result<(), DownloaderError> {
|
|
||||||
if tags.is_empty() {
|
|
||||||
whatever!("add torrent tags can not be empty");
|
|
||||||
}
|
|
||||||
self.client
|
|
||||||
.add_torrent_tags(hashes.clone(), tags.clone())
|
|
||||||
.await?;
|
|
||||||
let tag_sets = tags.iter().map(|s| s.as_str()).collect::<HashSet<&str>>();
|
|
||||||
self.wait_torrents_until(
|
|
||||||
GetTorrentListArg::builder()
|
|
||||||
.hashes(hashes.join("|"))
|
|
||||||
.build(),
|
|
||||||
|torrents| {
|
|
||||||
torrents.iter().all(|t| {
|
|
||||||
t.tags.as_ref().is_some_and(|t| {
|
|
||||||
t.split(',')
|
|
||||||
.map(|s| s.trim())
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.collect::<HashSet<&str>>()
|
|
||||||
.is_superset(&tag_sets)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
},
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self))]
|
|
||||||
async fn add_category(&self, category: &str) -> Result<(), DownloaderError> {
|
|
||||||
self.client
|
|
||||||
.add_category(
|
|
||||||
NonEmptyStr::new(category)
|
|
||||||
.whatever_context::<_, DownloaderError>("category can not be empty")?,
|
|
||||||
self.save_path.as_str(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
self.wait_sync_until(
|
|
||||||
|sync_data| {
|
|
||||||
sync_data
|
|
||||||
.categories
|
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|s| s.contains_key(category))
|
|
||||||
},
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_save_path(&self, sub_path: &Path) -> PathBuf {
|
|
||||||
self.save_path.join(sub_path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for QBittorrentDownloader {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_struct("QBittorrentDownloader")
|
|
||||||
.field("subscriber_id", &self.subscriber_id)
|
|
||||||
.field("client", &self.endpoint_url.as_str())
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub mod tests {
|
|
||||||
use itertools::Itertools;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
use crate::{errors::RResult, test_utils::fetch::build_testing_http_client};
|
|
||||||
|
|
||||||
fn get_tmp_qbit_test_folder() -> &'static str {
|
|
||||||
if cfg!(all(windows, not(feature = "testcontainers"))) {
|
|
||||||
"C:\\Windows\\Temp\\konobangu\\qbit"
|
|
||||||
} else {
|
|
||||||
"/tmp/konobangu/qbit"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "testcontainers")]
|
|
||||||
pub async fn create_qbit_testcontainer()
|
|
||||||
-> RResult<testcontainers::ContainerRequest<testcontainers::GenericImage>> {
|
|
||||||
use testcontainers::{
|
|
||||||
GenericImage,
|
|
||||||
core::{
|
|
||||||
ContainerPort,
|
|
||||||
// ReuseDirective,
|
|
||||||
WaitFor,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
use testcontainers_modules::testcontainers::ImageExt;
|
|
||||||
|
|
||||||
use crate::test_utils::testcontainers::ContainerRequestEnhancedExt;
|
|
||||||
|
|
||||||
let container = GenericImage::new("linuxserver/qbittorrent", "latest")
|
|
||||||
.with_wait_for(WaitFor::message_on_stderr("Connection to localhost"))
|
|
||||||
.with_env_var("WEBUI_PORT", "8080")
|
|
||||||
.with_env_var("TZ", "Asia/Singapore")
|
|
||||||
.with_env_var("TORRENTING_PORT", "6881")
|
|
||||||
.with_mapped_port(6881, ContainerPort::Tcp(6881))
|
|
||||||
.with_mapped_port(8080, ContainerPort::Tcp(8080))
|
|
||||||
// .with_reuse(ReuseDirective::Always)
|
|
||||||
.with_default_log_consumer()
|
|
||||||
.with_prune_existed_label("qbit-downloader", true, true)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(container)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(feature = "testcontainers"))]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_qbittorrent_downloader() {
|
|
||||||
test_qbittorrent_downloader_impl(None, None).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "testcontainers")]
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
|
||||||
async fn test_qbittorrent_downloader() -> RResult<()> {
|
|
||||||
use testcontainers::runners::AsyncRunner;
|
|
||||||
use tokio::io::AsyncReadExt;
|
|
||||||
|
|
||||||
tracing_subscriber::fmt()
|
|
||||||
.with_max_level(tracing::Level::DEBUG)
|
|
||||||
.with_test_writer()
|
|
||||||
.init();
|
|
||||||
|
|
||||||
let image = create_qbit_testcontainer().await?;
|
|
||||||
|
|
||||||
let container = image.start().await?;
|
|
||||||
|
|
||||||
let mut logs = String::new();
|
|
||||||
|
|
||||||
container.stdout(false).read_to_string(&mut logs).await?;
|
|
||||||
|
|
||||||
let username = logs
|
|
||||||
.lines()
|
|
||||||
.find_map(|line| {
|
|
||||||
if line.contains("The WebUI administrator username is") {
|
|
||||||
line.split_whitespace().last()
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.expect("should have username")
|
|
||||||
.trim();
|
|
||||||
|
|
||||||
let password = logs
|
|
||||||
.lines()
|
|
||||||
.find_map(|line| {
|
|
||||||
if line.contains("A temporary password is provided for this session") {
|
|
||||||
line.split_whitespace().last()
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.expect("should have password")
|
|
||||||
.trim();
|
|
||||||
|
|
||||||
tracing::info!(username, password);
|
|
||||||
|
|
||||||
test_qbittorrent_downloader_impl(Some(username), Some(password)).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn test_qbittorrent_downloader_impl(
|
|
||||||
username: Option<&str>,
|
|
||||||
password: Option<&str>,
|
|
||||||
) -> RResult<()> {
|
|
||||||
let http_client = build_testing_http_client()?;
|
|
||||||
let base_save_path = Path::new(get_tmp_qbit_test_folder());
|
|
||||||
|
|
||||||
let mut downloader = QBittorrentDownloader::from_creation(QBittorrentDownloaderCreation {
|
|
||||||
endpoint: "http://127.0.0.1:8080".to_string(),
|
|
||||||
password: password.unwrap_or_default().to_string(),
|
|
||||||
username: username.unwrap_or_default().to_string(),
|
|
||||||
subscriber_id: 0,
|
|
||||||
save_path: base_save_path.to_string(),
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
downloader.wait_sync_timeout = Duration::from_secs(3);
|
|
||||||
|
|
||||||
downloader.check_connection().await?;
|
|
||||||
|
|
||||||
downloader
|
|
||||||
.delete_torrents(vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()])
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let torrent_source = TorrentSource::parse(
|
|
||||||
&http_client,
|
|
||||||
"https://mikanani.me/Download/20240301/47ee2d69e7f19af783ad896541a07b012676f858.torrent"
|
|
||||||
).await?;
|
|
||||||
|
|
||||||
let save_path = base_save_path.join(format!(
|
|
||||||
"test_add_torrents_{}",
|
|
||||||
chrono::Utc::now().timestamp()
|
|
||||||
));
|
|
||||||
|
|
||||||
downloader
|
|
||||||
.add_torrents(torrent_source, save_path.to_string(), Some("bangumi"))
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let get_torrent = async || -> Result<Torrent, DownloaderError> {
|
|
||||||
let torrent_infos = downloader
|
|
||||||
.get_torrents_info(TorrentFilter::All, None, None)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let result = torrent_infos
|
|
||||||
.into_iter()
|
|
||||||
.find(|t| (t.get_hash() == Some("47ee2d69e7f19af783ad896541a07b012676f858")))
|
|
||||||
.whatever_context::<_, DownloaderError>("no torrent")?;
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
};
|
|
||||||
|
|
||||||
let target_torrent = get_torrent().await?;
|
|
||||||
|
|
||||||
let files = target_torrent.iter_files().collect_vec();
|
|
||||||
assert!(!files.is_empty());
|
|
||||||
|
|
||||||
let first_file = files[0];
|
|
||||||
assert_eq!(
|
|
||||||
first_file.get_name(),
|
|
||||||
r#"[Nekomoe kissaten&LoliHouse] Boku no Kokoro no Yabai Yatsu - 20 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"#
|
|
||||||
);
|
|
||||||
|
|
||||||
let test_tag = format!("test_tag_{}", chrono::Utc::now().timestamp());
|
|
||||||
|
|
||||||
downloader
|
|
||||||
.add_torrent_tags(
|
|
||||||
vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()],
|
|
||||||
vec![test_tag.clone()],
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let target_torrent = get_torrent().await?;
|
|
||||||
|
|
||||||
assert!(target_torrent.get_tags().iter().any(|s| s == &test_tag));
|
|
||||||
|
|
||||||
let test_category = format!("test_category_{}", chrono::Utc::now().timestamp());
|
|
||||||
|
|
||||||
downloader
|
|
||||||
.set_torrents_category(
|
|
||||||
vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()],
|
|
||||||
&test_category,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let target_torrent = get_torrent().await?;
|
|
||||||
|
|
||||||
assert_eq!(Some(test_category.as_str()), target_torrent.get_category());
|
|
||||||
|
|
||||||
let moved_save_path = base_save_path.join(format!(
|
|
||||||
"moved_test_add_torrents_{}",
|
|
||||||
chrono::Utc::now().timestamp()
|
|
||||||
));
|
|
||||||
|
|
||||||
downloader
|
|
||||||
.move_torrents(
|
|
||||||
vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()],
|
|
||||||
moved_save_path.as_str(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let target_torrent = get_torrent().await?;
|
|
||||||
|
|
||||||
let content_path = target_torrent.iter_files().next().unwrap().get_name();
|
|
||||||
|
|
||||||
let new_content_path = &format!("new_{}", content_path);
|
|
||||||
|
|
||||||
downloader
|
|
||||||
.rename_torrent_file(
|
|
||||||
"47ee2d69e7f19af783ad896541a07b012676f858",
|
|
||||||
content_path,
|
|
||||||
new_content_path,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let target_torrent = get_torrent().await?;
|
|
||||||
|
|
||||||
let content_path = target_torrent.iter_files().next().unwrap().get_name();
|
|
||||||
|
|
||||||
assert_eq!(content_path, new_content_path);
|
|
||||||
|
|
||||||
downloader
|
|
||||||
.delete_torrents(vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()])
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let torrent_infos1 = downloader
|
|
||||||
.get_torrents_info(TorrentFilter::All, None, None)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
assert!(torrent_infos1.is_empty());
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
|
|
||||||
297
apps/recorder/src/errors/app_error.rs
Normal file
297
apps/recorder/src/errors/app_error.rs
Normal file
@@ -0,0 +1,297 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use axum::{
|
||||||
|
Json,
|
||||||
|
response::{IntoResponse, Response},
|
||||||
|
};
|
||||||
|
use fetch::{FetchError, HttpClientError, reqwest, reqwest_middleware};
|
||||||
|
use http::{HeaderMap, StatusCode};
|
||||||
|
use snafu::Snafu;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
auth::AuthError,
|
||||||
|
crypto::CryptoError,
|
||||||
|
downloader::DownloaderError,
|
||||||
|
errors::{OptDynErr, response::StandardErrorResponse},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Snafu, Debug)]
|
||||||
|
#[snafu(visibility(pub(crate)))]
|
||||||
|
pub enum RecorderError {
|
||||||
|
#[snafu(display(
|
||||||
|
"HTTP {status} {reason}, source = {source:?}",
|
||||||
|
status = status,
|
||||||
|
reason = status.canonical_reason().unwrap_or("Unknown")
|
||||||
|
))]
|
||||||
|
HttpResponseError {
|
||||||
|
status: StatusCode,
|
||||||
|
headers: Option<HeaderMap>,
|
||||||
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
|
source: OptDynErr,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
ImageError { source: image::ImageError },
|
||||||
|
#[cfg(feature = "jxl")]
|
||||||
|
#[snafu(transparent)]
|
||||||
|
JxlEncodeError { source: jpegxl_rs::EncodeError },
|
||||||
|
#[snafu(transparent, context(false))]
|
||||||
|
HttpError { source: http::Error },
|
||||||
|
#[snafu(transparent, context(false))]
|
||||||
|
FancyRegexError {
|
||||||
|
#[snafu(source(from(fancy_regex::Error, Box::new)))]
|
||||||
|
source: Box<fancy_regex::Error>,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
NetAddrParseError { source: std::net::AddrParseError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
RegexError { source: regex::Error },
|
||||||
|
#[snafu(display("Invalid method"))]
|
||||||
|
InvalidMethodError,
|
||||||
|
#[snafu(display("Invalid header value"))]
|
||||||
|
InvalidHeaderValueError,
|
||||||
|
#[snafu(display("Invalid header name"))]
|
||||||
|
InvalidHeaderNameError,
|
||||||
|
#[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))]
|
||||||
|
MissingOriginError,
|
||||||
|
#[snafu(transparent)]
|
||||||
|
TracingAppenderInitError {
|
||||||
|
source: tracing_appender::rolling::InitError,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
GraphQLSchemaError {
|
||||||
|
source: async_graphql::dynamic::SchemaError,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
AuthError { source: AuthError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
DownloadError { source: DownloaderError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
RSSError { source: rss::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
DotEnvError { source: dotenvy::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
TeraError { source: tera::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
IOError { source: std::io::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
DbError { source: sea_orm::DbErr },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
DbSqlxError { source: sea_orm::SqlxError },
|
||||||
|
#[snafu(transparent, context(false))]
|
||||||
|
FigmentError {
|
||||||
|
#[snafu(source(from(figment::Error, Box::new)))]
|
||||||
|
source: Box<figment::Error>,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
SerdeJsonError { source: serde_json::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
ParseUrlError { source: url::ParseError },
|
||||||
|
#[snafu(display("{source}"), context(false))]
|
||||||
|
OpenDALError {
|
||||||
|
#[snafu(source(from(opendal::Error, Box::new)))]
|
||||||
|
source: Box<opendal::Error>,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
HttpClientError { source: HttpClientError },
|
||||||
|
#[cfg(feature = "testcontainers")]
|
||||||
|
#[snafu(transparent)]
|
||||||
|
TestcontainersError {
|
||||||
|
source: testcontainers::TestcontainersError,
|
||||||
|
},
|
||||||
|
#[snafu(display("Extract {desc} with mime error, expected {expected}, but got {found}"))]
|
||||||
|
MimeError {
|
||||||
|
desc: String,
|
||||||
|
expected: String,
|
||||||
|
found: String,
|
||||||
|
},
|
||||||
|
#[snafu(display("Invalid or unknown format in extracting mikan rss"))]
|
||||||
|
MikanRssInvalidFormatError,
|
||||||
|
#[snafu(display("Invalid field {field} in extracting mikan rss"))]
|
||||||
|
MikanRssInvalidFieldError {
|
||||||
|
field: Cow<'static, str>,
|
||||||
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
|
source: OptDynErr,
|
||||||
|
},
|
||||||
|
#[snafu(display("Missing field {field} in extracting mikan meta"))]
|
||||||
|
MikanMetaMissingFieldError {
|
||||||
|
field: Cow<'static, str>,
|
||||||
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
|
source: OptDynErr,
|
||||||
|
},
|
||||||
|
#[snafu(display("Model Entity {entity} not found or not belong to subscriber"))]
|
||||||
|
ModelEntityNotFound { entity: Cow<'static, str> },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
FetchError { source: FetchError },
|
||||||
|
#[snafu(display("Credential3rdError: {message}, source = {source}"))]
|
||||||
|
Credential3rdError {
|
||||||
|
message: String,
|
||||||
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
|
source: OptDynErr,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
CryptoError { source: CryptoError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
StringFromUtf8Error { source: std::string::FromUtf8Error },
|
||||||
|
#[snafu(display("{message}"))]
|
||||||
|
Whatever {
|
||||||
|
message: String,
|
||||||
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
|
source: OptDynErr,
|
||||||
|
},
|
||||||
|
#[snafu(display("Invalid task id: {message}"))]
|
||||||
|
InvalidTaskId { message: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RecorderError {
|
||||||
|
pub fn from_status(status: StatusCode) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: None,
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_status_and_headers(status: StatusCode, headers: HeaderMap) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: Some(headers),
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
|
||||||
|
Self::MikanMetaMissingFieldError {
|
||||||
|
field,
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
|
||||||
|
Self::MikanRssInvalidFieldError {
|
||||||
|
field,
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_mikan_rss_invalid_field_and_source(
|
||||||
|
field: Cow<'static, str>,
|
||||||
|
source: impl std::error::Error + Send + Sync + 'static,
|
||||||
|
) -> Self {
|
||||||
|
Self::MikanRssInvalidFieldError {
|
||||||
|
field,
|
||||||
|
source: OptDynErr::some_boxed(source),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
|
||||||
|
Self::DbError {
|
||||||
|
source: sea_orm::DbErr::RecordNotFound(detail.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl snafu::FromString for RecorderError {
|
||||||
|
type Source = Box<dyn std::error::Error + Send + Sync>;
|
||||||
|
|
||||||
|
fn without_source(message: String) -> Self {
|
||||||
|
Self::Whatever {
|
||||||
|
message,
|
||||||
|
source: OptDynErr::none(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_source(source: Self::Source, message: String) -> Self {
|
||||||
|
Self::Whatever {
|
||||||
|
message,
|
||||||
|
source: OptDynErr::some(source),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<StatusCode> for RecorderError {
|
||||||
|
fn from(status: StatusCode) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: None,
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<(StatusCode, HeaderMap)> for RecorderError {
|
||||||
|
fn from((status, headers): (StatusCode, HeaderMap)) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: Some(headers),
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoResponse for RecorderError {
|
||||||
|
fn into_response(self) -> Response {
|
||||||
|
match self {
|
||||||
|
Self::AuthError { source: auth_error } => auth_error.into_response(),
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers,
|
||||||
|
source,
|
||||||
|
} => {
|
||||||
|
let message = source
|
||||||
|
.into_inner()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
String::from(status.canonical_reason().unwrap_or("Unknown"))
|
||||||
|
});
|
||||||
|
(
|
||||||
|
status,
|
||||||
|
headers,
|
||||||
|
Json::<StandardErrorResponse>(StandardErrorResponse::from(message)),
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
|
}
|
||||||
|
Self::ModelEntityNotFound { entity } => (
|
||||||
|
StatusCode::NOT_FOUND,
|
||||||
|
Json::<StandardErrorResponse>(StandardErrorResponse::from(entity.to_string())),
|
||||||
|
)
|
||||||
|
.into_response(),
|
||||||
|
err => (
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
|
||||||
|
)
|
||||||
|
.into_response(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<reqwest::Error> for RecorderError {
|
||||||
|
fn from(error: reqwest::Error) -> Self {
|
||||||
|
FetchError::from(error).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<reqwest_middleware::Error> for RecorderError {
|
||||||
|
fn from(error: reqwest_middleware::Error) -> Self {
|
||||||
|
FetchError::from(error).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<http::header::InvalidHeaderValue> for RecorderError {
|
||||||
|
fn from(_error: http::header::InvalidHeaderValue) -> Self {
|
||||||
|
Self::InvalidHeaderValueError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<http::header::InvalidHeaderName> for RecorderError {
|
||||||
|
fn from(_error: http::header::InvalidHeaderName) -> Self {
|
||||||
|
Self::InvalidHeaderNameError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<http::method::InvalidMethod> for RecorderError {
|
||||||
|
fn from(_error: http::method::InvalidMethod) -> Self {
|
||||||
|
Self::InvalidMethodError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type RecorderResult<T> = Result<T, RecorderError>;
|
||||||
@@ -1,217 +1,6 @@
|
|||||||
pub mod whatever;
|
pub mod app_error;
|
||||||
use std::borrow::Cow;
|
pub mod response;
|
||||||
|
|
||||||
use axum::{
|
pub use app_error::{RecorderError, RecorderResult};
|
||||||
Json,
|
pub use response::StandardErrorResponse;
|
||||||
response::{IntoResponse, Response},
|
pub use util::errors::OptDynErr;
|
||||||
};
|
|
||||||
use http::StatusCode;
|
|
||||||
use serde::{Deserialize, Deserializer, Serialize};
|
|
||||||
use snafu::prelude::*;
|
|
||||||
pub use whatever::OptionWhateverAsync;
|
|
||||||
|
|
||||||
use crate::{auth::AuthError, downloader::DownloaderError, fetch::HttpClientError};
|
|
||||||
|
|
||||||
#[derive(Snafu, Debug)]
|
|
||||||
#[snafu(visibility(pub(crate)))]
|
|
||||||
pub enum RError {
|
|
||||||
#[snafu(transparent, context(false))]
|
|
||||||
FancyRegexError {
|
|
||||||
#[snafu(source(from(fancy_regex::Error, Box::new)))]
|
|
||||||
source: Box<fancy_regex::Error>,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
RegexError { source: regex::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
InvalidMethodError { source: http::method::InvalidMethod },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
InvalidHeaderNameError {
|
|
||||||
source: http::header::InvalidHeaderName,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
TracingAppenderInitError {
|
|
||||||
source: tracing_appender::rolling::InitError,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
GraphQLSchemaError {
|
|
||||||
source: async_graphql::dynamic::SchemaError,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
AuthError { source: AuthError },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
DownloadError { source: DownloaderError },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
RSSError { source: rss::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
DotEnvError { source: dotenv::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
TeraError { source: tera::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
IOError { source: std::io::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
DbError { source: sea_orm::DbErr },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
CookieParseError { source: cookie::ParseError },
|
|
||||||
#[snafu(transparent, context(false))]
|
|
||||||
FigmentError {
|
|
||||||
#[snafu(source(from(figment::Error, Box::new)))]
|
|
||||||
source: Box<figment::Error>,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
SerdeJsonError { source: serde_json::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
ReqwestMiddlewareError { source: reqwest_middleware::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
ReqwestError { source: reqwest::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
ParseUrlError { source: url::ParseError },
|
|
||||||
#[snafu(display("{source}"), context(false))]
|
|
||||||
OpenDALError {
|
|
||||||
#[snafu(source(from(opendal::Error, Box::new)))]
|
|
||||||
source: Box<opendal::Error>,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
InvalidHeaderValueError {
|
|
||||||
source: http::header::InvalidHeaderValue,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
HttpClientError { source: HttpClientError },
|
|
||||||
#[cfg(all(feature = "testcontainers", test))]
|
|
||||||
#[snafu(transparent)]
|
|
||||||
TestcontainersError {
|
|
||||||
source: testcontainers::TestcontainersError,
|
|
||||||
},
|
|
||||||
#[snafu(display("Extract {desc} with mime error, expected {expected}, but got {found}"))]
|
|
||||||
MimeError {
|
|
||||||
desc: String,
|
|
||||||
expected: String,
|
|
||||||
found: String,
|
|
||||||
},
|
|
||||||
#[snafu(display("Invalid or unknown format in extracting mikan rss"))]
|
|
||||||
MikanRssInvalidFormatError,
|
|
||||||
#[snafu(display("Invalid field {field} in extracting mikan rss"))]
|
|
||||||
MikanRssInvalidFieldError {
|
|
||||||
field: Cow<'static, str>,
|
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
|
|
||||||
source: OptionWhateverAsync,
|
|
||||||
},
|
|
||||||
#[snafu(display("Missing field {field} in extracting mikan meta"))]
|
|
||||||
MikanMetaMissingFieldError {
|
|
||||||
field: Cow<'static, str>,
|
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
|
|
||||||
source: OptionWhateverAsync,
|
|
||||||
},
|
|
||||||
#[snafu(display("Model Entity {entity} not found"))]
|
|
||||||
ModelEntityNotFound { entity: Cow<'static, str> },
|
|
||||||
#[snafu(display("{message}"))]
|
|
||||||
Whatever {
|
|
||||||
message: String,
|
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
|
|
||||||
source: OptionWhateverAsync,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RError {
|
|
||||||
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
|
|
||||||
Self::MikanMetaMissingFieldError {
|
|
||||||
field,
|
|
||||||
source: None.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
|
|
||||||
Self::MikanRssInvalidFieldError {
|
|
||||||
field,
|
|
||||||
source: None.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_mikan_rss_invalid_field_and_source(
|
|
||||||
field: Cow<'static, str>,
|
|
||||||
source: impl std::error::Error + Send + Sync + 'static,
|
|
||||||
) -> Self {
|
|
||||||
Self::MikanRssInvalidFieldError {
|
|
||||||
field,
|
|
||||||
source: OptionWhateverAsync::some_boxed(source),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
|
|
||||||
Self::DbError {
|
|
||||||
source: sea_orm::DbErr::RecordNotFound(detail.to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl snafu::FromString for RError {
|
|
||||||
type Source = Box<dyn std::error::Error + Send + Sync>;
|
|
||||||
|
|
||||||
fn without_source(message: String) -> Self {
|
|
||||||
Self::Whatever {
|
|
||||||
message,
|
|
||||||
source: OptionWhateverAsync::none(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_source(source: Self::Source, message: String) -> Self {
|
|
||||||
Self::Whatever {
|
|
||||||
message,
|
|
||||||
source: OptionWhateverAsync::some(source),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Debug, Clone)]
|
|
||||||
pub struct StandardErrorResponse<T = ()> {
|
|
||||||
pub success: bool,
|
|
||||||
pub message: String,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub result: Option<T>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> From<String> for StandardErrorResponse<T> {
|
|
||||||
fn from(value: String) -> Self {
|
|
||||||
StandardErrorResponse {
|
|
||||||
success: false,
|
|
||||||
message: value,
|
|
||||||
result: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoResponse for RError {
|
|
||||||
fn into_response(self) -> Response {
|
|
||||||
match self {
|
|
||||||
Self::AuthError { source: auth_error } => auth_error.into_response(),
|
|
||||||
err => (
|
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
|
||||||
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
|
|
||||||
)
|
|
||||||
.into_response(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for RError {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: serde::Serializer,
|
|
||||||
{
|
|
||||||
serializer.serialize_str(&self.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for RError {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let s = String::deserialize(deserializer)?;
|
|
||||||
Ok(Self::Whatever {
|
|
||||||
message: s,
|
|
||||||
source: None.into(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type RResult<T> = Result<T, RError>;
|
|
||||||
|
|||||||
19
apps/recorder/src/errors/response.rs
Normal file
19
apps/recorder/src/errors/response.rs
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
#[derive(Serialize, Debug, Clone)]
|
||||||
|
pub struct StandardErrorResponse<T = ()> {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub result: Option<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> From<String> for StandardErrorResponse<T> {
|
||||||
|
fn from(value: String) -> Self {
|
||||||
|
StandardErrorResponse {
|
||||||
|
success: false,
|
||||||
|
message: value,
|
||||||
|
result: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
use chrono::{DateTime, Utc};
|
||||||
use fancy_regex::Regex as FancyRegex;
|
use fancy_regex::Regex as FancyRegex;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use quirks_path::Path;
|
use quirks_path::Path;
|
||||||
@@ -6,7 +7,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
use snafu::{OptionExt, whatever};
|
use snafu::{OptionExt, whatever};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::{RError, RResult},
|
errors::app_error::{RecorderError, RecorderResult},
|
||||||
extract::defs::SUBTITLE_LANG,
|
extract::defs::SUBTITLE_LANG,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -33,6 +34,14 @@ lazy_static! {
|
|||||||
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
|
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct EpisodeEnclosureMeta {
|
||||||
|
pub magnet_link: Option<String>,
|
||||||
|
pub torrent_link: Option<String>,
|
||||||
|
pub pub_date: Option<DateTime<Utc>>,
|
||||||
|
pub content_length: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
pub struct TorrentEpisodeMediaMeta {
|
pub struct TorrentEpisodeMediaMeta {
|
||||||
pub fansub: Option<String>,
|
pub fansub: Option<String>,
|
||||||
@@ -104,11 +113,11 @@ pub fn parse_episode_media_meta_from_torrent(
|
|||||||
torrent_path: &Path,
|
torrent_path: &Path,
|
||||||
torrent_name: Option<&str>,
|
torrent_name: Option<&str>,
|
||||||
season: Option<i32>,
|
season: Option<i32>,
|
||||||
) -> RResult<TorrentEpisodeMediaMeta> {
|
) -> RecorderResult<TorrentEpisodeMediaMeta> {
|
||||||
let media_name = torrent_path
|
let media_name = torrent_path
|
||||||
.file_name()
|
.file_name()
|
||||||
.with_whatever_context::<_, _, RError>(|| {
|
.with_whatever_context::<_, _, RecorderError>(|| {
|
||||||
format!("failed to get file name of {}", torrent_path)
|
format!("failed to get file name of {torrent_path}")
|
||||||
})?;
|
})?;
|
||||||
let mut match_obj = None;
|
let mut match_obj = None;
|
||||||
for rule in TORRENT_EP_PARSE_RULES.iter() {
|
for rule in TORRENT_EP_PARSE_RULES.iter() {
|
||||||
@@ -124,7 +133,7 @@ pub fn parse_episode_media_meta_from_torrent(
|
|||||||
if let Some(match_obj) = match_obj {
|
if let Some(match_obj) = match_obj {
|
||||||
let group_season_and_title = match_obj
|
let group_season_and_title = match_obj
|
||||||
.get(1)
|
.get(1)
|
||||||
.whatever_context::<_, RError>("should have 1 group")?
|
.whatever_context::<_, RecorderError>("should have 1 group")?
|
||||||
.as_str();
|
.as_str();
|
||||||
let (fansub, season_and_title) = get_fansub(group_season_and_title);
|
let (fansub, season_and_title) = get_fansub(group_season_and_title);
|
||||||
let (title, season) = if let Some(season) = season {
|
let (title, season) = if let Some(season) = season {
|
||||||
@@ -135,13 +144,13 @@ pub fn parse_episode_media_meta_from_torrent(
|
|||||||
};
|
};
|
||||||
let episode_index = match_obj
|
let episode_index = match_obj
|
||||||
.get(2)
|
.get(2)
|
||||||
.whatever_context::<_, RError>("should have 2 group")?
|
.whatever_context::<_, RecorderError>("should have 2 group")?
|
||||||
.as_str()
|
.as_str()
|
||||||
.parse::<i32>()
|
.parse::<i32>()
|
||||||
.unwrap_or(1);
|
.unwrap_or(1);
|
||||||
let extname = torrent_path
|
let extname = torrent_path
|
||||||
.extension()
|
.extension()
|
||||||
.map(|e| format!(".{}", e))
|
.map(|e| format!(".{e}"))
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
Ok(TorrentEpisodeMediaMeta {
|
Ok(TorrentEpisodeMediaMeta {
|
||||||
fansub: fansub.map(|s| s.to_string()),
|
fansub: fansub.map(|s| s.to_string()),
|
||||||
@@ -163,12 +172,12 @@ pub fn parse_episode_subtitle_meta_from_torrent(
|
|||||||
torrent_path: &Path,
|
torrent_path: &Path,
|
||||||
torrent_name: Option<&str>,
|
torrent_name: Option<&str>,
|
||||||
season: Option<i32>,
|
season: Option<i32>,
|
||||||
) -> RResult<TorrentEpisodeSubtitleMeta> {
|
) -> RecorderResult<TorrentEpisodeSubtitleMeta> {
|
||||||
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
|
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
|
||||||
let media_name = torrent_path
|
let media_name = torrent_path
|
||||||
.file_name()
|
.file_name()
|
||||||
.with_whatever_context::<_, _, RError>(|| {
|
.with_whatever_context::<_, _, RecorderError>(|| {
|
||||||
format!("failed to get file name of {}", torrent_path)
|
format!("failed to get file name of {torrent_path}")
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let lang = get_subtitle_lang(media_name);
|
let lang = get_subtitle_lang(media_name);
|
||||||
@@ -268,17 +277,17 @@ mod tests {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) {
|
pub fn test_torrent_ep_parser(origin_name: &str, expected: &str) {
|
||||||
let extname = Path::new(raw_name)
|
let extname = Path::new(origin_name)
|
||||||
.extension()
|
.extension()
|
||||||
.map(|e| format!(".{}", e))
|
.map(|e| format!(".{e}"))
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.to_lowercase();
|
.to_lowercase();
|
||||||
|
|
||||||
if extname == ".srt" || extname == ".ass" {
|
if extname == ".srt" || extname == ".ass" {
|
||||||
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
|
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
|
||||||
let found_raw =
|
let found_raw =
|
||||||
parse_episode_subtitle_meta_from_torrent(Path::new(raw_name), None, None);
|
parse_episode_subtitle_meta_from_torrent(Path::new(origin_name), None, None);
|
||||||
let found = found_raw.as_ref().ok().cloned();
|
let found = found_raw.as_ref().ok().cloned();
|
||||||
|
|
||||||
if expected != found {
|
if expected != found {
|
||||||
@@ -299,7 +308,8 @@ mod tests {
|
|||||||
assert_eq!(expected, found);
|
assert_eq!(expected, found);
|
||||||
} else {
|
} else {
|
||||||
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
|
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
|
||||||
let found_raw = parse_episode_media_meta_from_torrent(Path::new(raw_name), None, None);
|
let found_raw =
|
||||||
|
parse_episode_media_meta_from_torrent(Path::new(origin_name), None, None);
|
||||||
let found = found_raw.as_ref().ok().cloned();
|
let found = found_raw.as_ref().ok().cloned();
|
||||||
|
|
||||||
if expected != found {
|
if expected != found {
|
||||||
3
apps/recorder/src/extract/bittorrent/mod.rs
Normal file
3
apps/recorder/src/extract/bittorrent/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
pub mod extract;
|
||||||
|
|
||||||
|
pub use extract::*;
|
||||||
@@ -1,8 +1,5 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use fancy_regex::Regex as FancyRegex;
|
use fancy_regex::Regex as FancyRegex;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use maplit::hashmap;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
const LANG_ZH_TW: &str = "zh-tw";
|
const LANG_ZH_TW: &str = "zh-tw";
|
||||||
@@ -34,40 +31,4 @@ lazy_static! {
|
|||||||
(LANG_JP, vec!["jp", "jpn", "日"]),
|
(LANG_JP, vec!["jp", "jpn", "日"]),
|
||||||
]
|
]
|
||||||
};
|
};
|
||||||
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
|
|
||||||
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
|
|
||||||
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {
|
|
||||||
hashmap! {
|
|
||||||
"〇" => 0,
|
|
||||||
"一" => 1,
|
|
||||||
"二" => 2,
|
|
||||||
"三" => 3,
|
|
||||||
"四" => 4,
|
|
||||||
"五" => 5,
|
|
||||||
"六" => 6,
|
|
||||||
"七" => 7,
|
|
||||||
"八" => 8,
|
|
||||||
"九" => 9,
|
|
||||||
"十" => 10,
|
|
||||||
"廿" => 20,
|
|
||||||
"百" => 100,
|
|
||||||
"千" => 1000,
|
|
||||||
"零" => 0,
|
|
||||||
"壹" => 1,
|
|
||||||
"贰" => 2,
|
|
||||||
"叁" => 3,
|
|
||||||
"肆" => 4,
|
|
||||||
"伍" => 5,
|
|
||||||
"陆" => 6,
|
|
||||||
"柒" => 7,
|
|
||||||
"捌" => 8,
|
|
||||||
"玖" => 9,
|
|
||||||
"拾" => 10,
|
|
||||||
"念" => 20,
|
|
||||||
"佰" => 100,
|
|
||||||
"仟" => 1000,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
pub static ref ZH_NUM_RE: Regex =
|
|
||||||
Regex::new(r"[〇一二三四五六七八九十廿百千零壹贰叁肆伍陆柒捌玖拾念佰仟]").unwrap();
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,21 +19,19 @@ pub fn extract_background_image_src_from_style_attr(
|
|||||||
match prop {
|
match prop {
|
||||||
Property::BackgroundImage(images) => {
|
Property::BackgroundImage(images) => {
|
||||||
for img in images {
|
for img in images {
|
||||||
if let CSSImage::Url(path) = img {
|
if let CSSImage::Url(path) = img
|
||||||
if let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
|
&& let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
|
||||||
{
|
{
|
||||||
return Some(url);
|
return Some(url);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Property::Background(backgrounds) => {
|
Property::Background(backgrounds) => {
|
||||||
for bg in backgrounds {
|
for bg in backgrounds {
|
||||||
if let CSSImage::Url(path) = &bg.image {
|
if let CSSImage::Url(path) = &bg.image
|
||||||
if let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
|
&& let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
|
||||||
{
|
{
|
||||||
return Some(url);
|
return Some(url);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,12 @@
|
|||||||
use axum::http::{header, request::Parts, HeaderName, HeaderValue, Uri};
|
use axum::{
|
||||||
|
extract::FromRequestParts,
|
||||||
|
http::{HeaderName, HeaderValue, Uri, header, request::Parts},
|
||||||
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::errors::RecorderError;
|
||||||
|
|
||||||
/// Fields from a "Forwarded" header per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4)
|
/// Fields from a "Forwarded" header per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4)
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ForwardedHeader {
|
pub struct ForwardedHeader {
|
||||||
@@ -101,9 +106,13 @@ pub struct ForwardedRelatedInfo {
|
|||||||
pub origin: Option<String>,
|
pub origin: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ForwardedRelatedInfo {
|
impl<T> FromRequestParts<T> for ForwardedRelatedInfo {
|
||||||
pub fn from_request_parts(request_parts: &Parts) -> ForwardedRelatedInfo {
|
type Rejection = RecorderError;
|
||||||
let headers = &request_parts.headers;
|
fn from_request_parts(
|
||||||
|
parts: &mut Parts,
|
||||||
|
_state: &T,
|
||||||
|
) -> impl Future<Output = Result<Self, Self::Rejection>> + Send {
|
||||||
|
let headers = &parts.headers;
|
||||||
let forwarded = headers
|
let forwarded = headers
|
||||||
.get(header::FORWARDED)
|
.get(header::FORWARDED)
|
||||||
.and_then(|s| ForwardedHeader::try_from(s.clone()).ok());
|
.and_then(|s| ForwardedHeader::try_from(s.clone()).ok());
|
||||||
@@ -121,11 +130,7 @@ impl ForwardedRelatedInfo {
|
|||||||
.and_then(|s| s.to_str().ok())
|
.and_then(|s| s.to_str().ok())
|
||||||
.and_then(|s| {
|
.and_then(|s| {
|
||||||
let l = s.split(",").map(|s| s.trim().to_string()).collect_vec();
|
let l = s.split(",").map(|s| s.trim().to_string()).collect_vec();
|
||||||
if l.is_empty() {
|
if l.is_empty() { None } else { Some(l) }
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(l)
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let host = headers
|
let host = headers
|
||||||
@@ -136,17 +141,19 @@ impl ForwardedRelatedInfo {
|
|||||||
.get(header::ORIGIN)
|
.get(header::ORIGIN)
|
||||||
.and_then(|s| s.to_str().map(String::from).ok());
|
.and_then(|s| s.to_str().map(String::from).ok());
|
||||||
|
|
||||||
ForwardedRelatedInfo {
|
futures::future::ready(Ok(ForwardedRelatedInfo {
|
||||||
host,
|
host,
|
||||||
x_forwarded_for,
|
x_forwarded_for,
|
||||||
x_forwarded_host,
|
x_forwarded_host,
|
||||||
x_forwarded_proto,
|
x_forwarded_proto,
|
||||||
forwarded,
|
forwarded,
|
||||||
uri: request_parts.uri.clone(),
|
uri: parts.uri.clone(),
|
||||||
origin,
|
origin,
|
||||||
}
|
}))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ForwardedRelatedInfo {
|
||||||
pub fn resolved_protocol(&self) -> Option<&str> {
|
pub fn resolved_protocol(&self) -> Option<&str> {
|
||||||
self.forwarded
|
self.forwarded
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@@ -165,7 +172,7 @@ impl ForwardedRelatedInfo {
|
|||||||
|
|
||||||
pub fn resolved_origin(&self) -> Option<Url> {
|
pub fn resolved_origin(&self) -> Option<Url> {
|
||||||
if let (Some(protocol), Some(host)) = (self.resolved_protocol(), self.resolved_host()) {
|
if let (Some(protocol), Some(host)) = (self.resolved_protocol(), self.resolved_host()) {
|
||||||
let origin = format!("{}://{}", protocol, host);
|
let origin = format!("{protocol}://{host}");
|
||||||
Url::parse(&origin).ok()
|
Url::parse(&origin).ok()
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
|||||||
@@ -3,6 +3,5 @@ use url::Url;
|
|||||||
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
|
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
|
||||||
let mut image_url = base_url.join(image_src).ok()?;
|
let mut image_url = base_url.join(image_src).ok()?;
|
||||||
image_url.set_query(None);
|
image_url.set_query(None);
|
||||||
image_url.set_fragment(None);
|
|
||||||
Some(image_url)
|
Some(image_url)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,66 +1,238 @@
|
|||||||
use std::{fmt::Debug, ops::Deref};
|
use std::{fmt::Debug, ops::Deref};
|
||||||
|
|
||||||
use reqwest_middleware::ClientWithMiddleware;
|
use fetch::{HttpClient, HttpClientTrait};
|
||||||
use serde::{Deserialize, Serialize};
|
use maplit::hashmap;
|
||||||
use url::Url;
|
use scraper::{Html, Selector};
|
||||||
|
use sea_orm::{
|
||||||
use super::MikanConfig;
|
ActiveModelTrait, ActiveValue::Set, ColumnTrait, DbErr, EntityTrait, QueryFilter, TryIntoModel,
|
||||||
use crate::{
|
|
||||||
errors::RError,
|
|
||||||
fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth},
|
|
||||||
};
|
};
|
||||||
|
use url::Url;
|
||||||
|
use util::OptDynErr;
|
||||||
|
|
||||||
#[derive(Default, Clone, Deserialize, Serialize)]
|
use super::{MikanConfig, MikanCredentialForm, constants::MIKAN_ACCOUNT_MANAGE_PAGE_PATH};
|
||||||
pub struct MikanAuthSecrecy {
|
use crate::{
|
||||||
pub cookie: String,
|
app::AppContextTrait,
|
||||||
pub user_agent: Option<String>,
|
crypto::UserPassCredential,
|
||||||
}
|
errors::{RecorderError, RecorderResult},
|
||||||
|
extract::mikan::constants::{MIKAN_LOGIN_PAGE_PATH, MIKAN_LOGIN_PAGE_SEARCH},
|
||||||
impl Debug for MikanAuthSecrecy {
|
models::credential_3rd::{self, Credential3rdType},
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
};
|
||||||
f.debug_struct("MikanAuthSecrecy")
|
|
||||||
.field("cookie", &String::from("[secrecy]"))
|
|
||||||
.field("user_agent", &String::from("[secrecy]"))
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MikanAuthSecrecy {
|
|
||||||
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RError> {
|
|
||||||
HttpClientCookiesAuth::from_cookies(&self.cookie, url, self.user_agent)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct MikanClient {
|
pub struct MikanClient {
|
||||||
http_client: HttpClient,
|
http_client: HttpClient,
|
||||||
base_url: Url,
|
base_url: Url,
|
||||||
|
origin_url: Url,
|
||||||
|
userpass_credential: Option<UserPassCredential>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MikanClient {
|
impl MikanClient {
|
||||||
pub async fn from_config(config: MikanConfig) -> Result<Self, RError> {
|
pub async fn from_config(config: MikanConfig) -> Result<Self, RecorderError> {
|
||||||
let http_client = HttpClient::from_config(config.http_client)?;
|
let http_client = HttpClient::from_config(config.http_client)?;
|
||||||
let base_url = config.base_url;
|
let base_url = config.base_url;
|
||||||
|
let origin_url = Url::parse(&base_url.origin().unicode_serialization())?;
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
http_client,
|
http_client,
|
||||||
base_url,
|
base_url,
|
||||||
|
origin_url,
|
||||||
|
userpass_credential: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fork_with_auth(&self, secrecy: Option<MikanAuthSecrecy>) -> Result<Self, RError> {
|
pub async fn has_login(&self) -> RecorderResult<bool> {
|
||||||
let mut fork = self.http_client.fork();
|
let account_manage_page_url = self.base_url.join(MIKAN_ACCOUNT_MANAGE_PAGE_PATH)?;
|
||||||
|
let res = self.http_client.get(account_manage_page_url).send().await?;
|
||||||
if let Some(secrecy) = secrecy {
|
let status = res.status();
|
||||||
let cookie_auth = secrecy.into_cookie_auth(&self.base_url)?;
|
if status.is_success() {
|
||||||
fork = fork.attach_secrecy(cookie_auth);
|
Ok(true)
|
||||||
|
} else if status.is_redirection()
|
||||||
|
&& res.headers().get("location").is_some_and(|location| {
|
||||||
|
location
|
||||||
|
.to_str()
|
||||||
|
.is_ok_and(|location_str| location_str.contains(MIKAN_LOGIN_PAGE_PATH))
|
||||||
|
})
|
||||||
|
{
|
||||||
|
Ok(false)
|
||||||
|
} else {
|
||||||
|
Err(RecorderError::Credential3rdError {
|
||||||
|
message: format!("mikan account check has login failed, status = {status}"),
|
||||||
|
source: None.into(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn login(&self) -> RecorderResult<()> {
|
||||||
|
let userpass_credential =
|
||||||
|
self.userpass_credential
|
||||||
|
.as_ref()
|
||||||
|
.ok_or_else(|| RecorderError::Credential3rdError {
|
||||||
|
message: "mikan login failed, credential required".to_string(),
|
||||||
|
source: None.into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let login_page_url = {
|
||||||
|
let mut u = self.base_url.join(MIKAN_LOGIN_PAGE_PATH)?;
|
||||||
|
u.set_query(Some(MIKAN_LOGIN_PAGE_SEARCH));
|
||||||
|
u
|
||||||
|
};
|
||||||
|
|
||||||
|
let antiforgery_token = {
|
||||||
|
// access login page to get antiforgery cookie
|
||||||
|
let login_page_html = self
|
||||||
|
.http_client
|
||||||
|
.get(login_page_url.clone())
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|error| RecorderError::Credential3rdError {
|
||||||
|
message: "failed to get mikan login page".to_string(),
|
||||||
|
source: OptDynErr::some_boxed(error),
|
||||||
|
})?
|
||||||
|
.text()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let login_page_html = Html::parse_document(&login_page_html);
|
||||||
|
|
||||||
|
let antiforgery_selector =
|
||||||
|
Selector::parse("input[name='__RequestVerificationToken']").unwrap();
|
||||||
|
|
||||||
|
login_page_html
|
||||||
|
.select(&antiforgery_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|element| element.value().attr("value").map(|value| value.to_string()))
|
||||||
|
.ok_or_else(|| RecorderError::Credential3rdError {
|
||||||
|
message: "mikan login failed, failed to get antiforgery token".to_string(),
|
||||||
|
source: None.into(),
|
||||||
|
})
|
||||||
|
}?;
|
||||||
|
|
||||||
|
let login_post_form = hashmap! {
|
||||||
|
"__RequestVerificationToken".to_string() => antiforgery_token,
|
||||||
|
"UserName".to_string() => userpass_credential.username.clone(),
|
||||||
|
"Password".to_string() => userpass_credential.password.clone(),
|
||||||
|
"RememberMe".to_string() => "true".to_string(),
|
||||||
|
};
|
||||||
|
let login_post_res = self
|
||||||
|
.http_client
|
||||||
|
.post(login_page_url.clone())
|
||||||
|
.form(&login_post_form)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|err| RecorderError::Credential3rdError {
|
||||||
|
message: "mikan login failed".to_string(),
|
||||||
|
source: OptDynErr::some_boxed(err),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if login_post_res.status().is_redirection()
|
||||||
|
&& login_post_res.headers().contains_key("location")
|
||||||
|
{
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(RecorderError::Credential3rdError {
|
||||||
|
message: "mikan login failed, no redirecting".to_string(),
|
||||||
|
source: None.into(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn submit_credential_form(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
subscriber_id: i32,
|
||||||
|
credential_form: MikanCredentialForm,
|
||||||
|
) -> RecorderResult<credential_3rd::Model> {
|
||||||
|
let db = ctx.db();
|
||||||
|
let am = credential_3rd::ActiveModel {
|
||||||
|
username: Set(Some(credential_form.username)),
|
||||||
|
password: Set(Some(credential_form.password)),
|
||||||
|
user_agent: Set(Some(credential_form.user_agent)),
|
||||||
|
credential_type: Set(Credential3rdType::Mikan),
|
||||||
|
subscriber_id: Set(subscriber_id),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
.try_encrypt(ctx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let credential: credential_3rd::Model = am.save(db).await?.try_into_model()?;
|
||||||
|
Ok(credential)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn sync_credential_cookies(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
credential_id: i32,
|
||||||
|
subscriber_id: i32,
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let cookies = self.http_client.save_cookie_store_to_json()?;
|
||||||
|
if let Some(cookies) = cookies {
|
||||||
|
let am = credential_3rd::ActiveModel {
|
||||||
|
cookies: Set(Some(cookies)),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
.try_encrypt(ctx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
credential_3rd::Entity::update_many()
|
||||||
|
.set(am)
|
||||||
|
.filter(credential_3rd::Column::Id.eq(credential_id))
|
||||||
|
.filter(credential_3rd::Column::SubscriberId.eq(subscriber_id))
|
||||||
|
.exec(ctx.db())
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn fork_with_userpass_credential(
|
||||||
|
&self,
|
||||||
|
userpass_credential: UserPassCredential,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let mut fork = self
|
||||||
|
.http_client
|
||||||
|
.fork()
|
||||||
|
.attach_cookies(userpass_credential.cookies.as_deref())?;
|
||||||
|
|
||||||
|
if let Some(user_agent) = userpass_credential.user_agent.as_ref() {
|
||||||
|
fork = fork.attach_user_agent(user_agent);
|
||||||
|
}
|
||||||
|
|
||||||
|
let userpass_credential_opt = Some(userpass_credential);
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
http_client: HttpClient::from_fork(fork)?,
|
http_client: HttpClient::from_fork(fork)?,
|
||||||
base_url: self.base_url.clone(),
|
base_url: self.base_url.clone(),
|
||||||
|
origin_url: self.origin_url.clone(),
|
||||||
|
userpass_credential: userpass_credential_opt,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn fork_with_credential_id(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
credential_id: i32,
|
||||||
|
subscriber_id: i32,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let credential =
|
||||||
|
credential_3rd::Model::find_by_id_and_subscriber_id(ctx, credential_id, subscriber_id)
|
||||||
|
.await?;
|
||||||
|
if let Some(credential) = credential {
|
||||||
|
if credential.credential_type != Credential3rdType::Mikan {
|
||||||
|
return Err(RecorderError::Credential3rdError {
|
||||||
|
message: "credential is not a mikan credential".to_string(),
|
||||||
|
source: None.into(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let userpass_credential: UserPassCredential =
|
||||||
|
credential.try_into_userpass_credential(ctx)?;
|
||||||
|
|
||||||
|
self.fork_with_userpass_credential(userpass_credential)
|
||||||
|
.await
|
||||||
|
} else {
|
||||||
|
Err(RecorderError::from_db_record_not_found(
|
||||||
|
DbErr::RecordNotFound(format!("credential={credential_id} not found")),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn base_url(&self) -> &Url {
|
pub fn base_url(&self) -> &Url {
|
||||||
&self.base_url
|
&self.base_url
|
||||||
}
|
}
|
||||||
@@ -71,11 +243,102 @@ impl MikanClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Deref for MikanClient {
|
impl Deref for MikanClient {
|
||||||
type Target = ClientWithMiddleware;
|
type Target = fetch::reqwest_middleware::ClientWithMiddleware;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
self.http_client.deref()
|
&self.http_client
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HttpClientTrait for MikanClient {}
|
impl HttpClientTrait for MikanClient {}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
#![allow(unused_variables)]
|
||||||
|
use std::{assert_matches::assert_matches, sync::Arc};
|
||||||
|
|
||||||
|
use rstest::{fixture, rstest};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::test_utils::{
|
||||||
|
app::TestingAppContext,
|
||||||
|
crypto::build_testing_crypto_service,
|
||||||
|
database::build_testing_database_service,
|
||||||
|
mikan::{MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential_form},
|
||||||
|
tracing::try_init_testing_tracing,
|
||||||
|
};
|
||||||
|
|
||||||
|
async fn create_testing_context(
|
||||||
|
mikan_base_url: Url,
|
||||||
|
) -> RecorderResult<Arc<dyn AppContextTrait>> {
|
||||||
|
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||||
|
let db_service = build_testing_database_service(Default::default()).await?;
|
||||||
|
let crypto_service = build_testing_crypto_service().await?;
|
||||||
|
let ctx = TestingAppContext::builder()
|
||||||
|
.db(db_service)
|
||||||
|
.crypto(crypto_service)
|
||||||
|
.mikan(mikan_client)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Ok(Arc::new(ctx))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
fn before_each() {
|
||||||
|
try_init_testing_tracing(Level::DEBUG);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_client_submit_credential_form(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let mut mikan_server = MikanMockServer::new().await?;
|
||||||
|
|
||||||
|
let app_ctx = create_testing_context(mikan_server.base_url().clone()).await?;
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let mikan_client = app_ctx.mikan();
|
||||||
|
let crypto_service = app_ctx.crypto();
|
||||||
|
|
||||||
|
let credential_form = build_testing_mikan_credential_form();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let credential_model = mikan_client
|
||||||
|
.submit_credential_form(app_ctx.as_ref(), subscriber_id, credential_form.clone())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let expected_username = &credential_form.username;
|
||||||
|
let expected_password = &credential_form.password;
|
||||||
|
|
||||||
|
let found_username = crypto_service
|
||||||
|
.decrypt_string(credential_model.username.as_deref().unwrap_or_default())?;
|
||||||
|
let found_password = crypto_service
|
||||||
|
.decrypt_string(credential_model.password.as_deref().unwrap_or_default())?;
|
||||||
|
|
||||||
|
assert_eq!(&found_username, expected_username);
|
||||||
|
assert_eq!(&found_password, expected_password);
|
||||||
|
|
||||||
|
let has_login = mikan_client.has_login().await?;
|
||||||
|
|
||||||
|
assert!(!has_login);
|
||||||
|
|
||||||
|
assert_matches!(
|
||||||
|
mikan_client.login().await,
|
||||||
|
Err(RecorderError::Credential3rdError { .. })
|
||||||
|
);
|
||||||
|
|
||||||
|
let mikan_client = mikan_client
|
||||||
|
.fork_with_credential_id(app_ctx.as_ref(), credential_model.id, subscriber_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
mikan_client.login().await?;
|
||||||
|
|
||||||
|
let has_login = mikan_client.has_login().await?;
|
||||||
|
|
||||||
|
assert!(has_login);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
|
use fetch::HttpClientConfig;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use crate::fetch::HttpClientConfig;
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
pub struct MikanConfig {
|
pub struct MikanConfig {
|
||||||
pub http_client: HttpClientConfig,
|
pub http_client: HttpClientConfig,
|
||||||
pub base_url: Url,
|
pub base_url: Url,
|
||||||
|
|||||||
@@ -1,3 +1,20 @@
|
|||||||
pub const MIKAN_BUCKET_KEY: &str = "mikan";
|
pub const MIKAN_POSTER_BUCKET_KEY: &str = "mikan_poster";
|
||||||
pub const MIKAN_UNKNOWN_FANSUB_NAME: &str = "生肉/不明字幕";
|
pub const MIKAN_UNKNOWN_FANSUB_NAME: &str = "生肉/不明字幕";
|
||||||
pub const MIKAN_UNKNOWN_FANSUB_ID: &str = "202";
|
pub const MIKAN_UNKNOWN_FANSUB_ID: &str = "202";
|
||||||
|
pub const MIKAN_LOGIN_PAGE_PATH: &str = "/Account/Login";
|
||||||
|
pub const MIKAN_LOGIN_PAGE_SEARCH: &str = "ReturnUrl=%2F";
|
||||||
|
pub const MIKAN_ACCOUNT_MANAGE_PAGE_PATH: &str = "/Account/Manage";
|
||||||
|
pub const MIKAN_SEASON_FLOW_PAGE_PATH: &str = "/Home/BangumiCoverFlow";
|
||||||
|
pub const MIKAN_BANGUMI_HOMEPAGE_PATH: &str = "/Home/Bangumi";
|
||||||
|
pub const MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH: &str = "/Home/ExpandBangumi";
|
||||||
|
pub const MIKAN_EPISODE_HOMEPAGE_PATH: &str = "/Home/Episode";
|
||||||
|
pub const MIKAN_BANGUMI_POSTER_PATH: &str = "/images/Bangumi";
|
||||||
|
pub const MIKAN_EPISODE_TORRENT_PATH: &str = "/Download";
|
||||||
|
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH: &str = "/RSS/MyBangumi";
|
||||||
|
pub const MIKAN_BANGUMI_RSS_PATH: &str = "/RSS/Bangumi";
|
||||||
|
pub const MIKAN_FANSUB_HOMEPAGE_PATH: &str = "/Home/PublishGroup";
|
||||||
|
pub const MIKAN_BANGUMI_ID_QUERY_KEY: &str = "bangumiId";
|
||||||
|
pub const MIKAN_FANSUB_ID_QUERY_KEY: &str = "subgroupid";
|
||||||
|
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY: &str = "token";
|
||||||
|
pub const MIKAN_SEASON_STR_QUERY_KEY: &str = "seasonStr";
|
||||||
|
pub const MIKAN_YEAR_QUERY_KEY: &str = "year";
|
||||||
|
|||||||
20
apps/recorder/src/extract/mikan/credential.rs
Normal file
20
apps/recorder/src/extract/mikan/credential.rs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Default, Clone, Deserialize, Serialize)]
|
||||||
|
pub struct MikanCredentialForm {
|
||||||
|
pub password: String,
|
||||||
|
pub username: String,
|
||||||
|
pub user_agent: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for MikanCredentialForm {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("MikanCredentialForm")
|
||||||
|
.field("username", &String::from("[secrecy]"))
|
||||||
|
.field("password", &String::from("[secrecy]"))
|
||||||
|
.field("user_agent", &String::from("[secrecy]"))
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,21 +1,41 @@
|
|||||||
pub mod client;
|
mod client;
|
||||||
pub mod config;
|
mod config;
|
||||||
pub mod constants;
|
mod constants;
|
||||||
pub mod rss_extract;
|
mod credential;
|
||||||
pub mod web_extract;
|
mod subscription;
|
||||||
|
mod web;
|
||||||
|
|
||||||
pub use client::{MikanAuthSecrecy, MikanClient};
|
pub use client::MikanClient;
|
||||||
pub use config::MikanConfig;
|
pub use config::MikanConfig;
|
||||||
pub use constants::MIKAN_BUCKET_KEY;
|
pub use constants::{
|
||||||
pub use rss_extract::{
|
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH,
|
||||||
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanBangumiRssLink,
|
MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH,
|
||||||
MikanRssChannel, MikanRssItem, MikanSubscriberAggregationRssChannel,
|
MIKAN_BANGUMI_RSS_PATH, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_EPISODE_TORRENT_PATH,
|
||||||
MikanSubscriberAggregationRssLink, build_mikan_bangumi_rss_link,
|
MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_LOGIN_PAGE_PATH,
|
||||||
build_mikan_subscriber_aggregation_rss_link, extract_mikan_bangumi_id_from_rss_link,
|
MIKAN_LOGIN_PAGE_SEARCH, MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH,
|
||||||
extract_mikan_rss_channel_from_rss_link, extract_mikan_subscriber_aggregation_id_from_rss_link,
|
MIKAN_SEASON_STR_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH,
|
||||||
|
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_UNKNOWN_FANSUB_ID,
|
||||||
|
MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
|
||||||
};
|
};
|
||||||
pub use web_extract::{
|
pub use credential::MikanCredentialForm;
|
||||||
MikanBangumiMeta, MikanEpisodeMeta, build_mikan_bangumi_homepage, build_mikan_episode_homepage,
|
pub use subscription::{
|
||||||
extract_mikan_bangumi_meta_from_bangumi_homepage,
|
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
|
||||||
extract_mikan_episode_meta_from_episode_homepage,
|
};
|
||||||
|
pub use web::{
|
||||||
|
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
|
||||||
|
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanFansubHash,
|
||||||
|
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||||
|
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_expand_subscribed_url,
|
||||||
|
build_mikan_bangumi_homepage_url, build_mikan_bangumi_subscription_rss_url,
|
||||||
|
build_mikan_episode_homepage_url, build_mikan_season_flow_url,
|
||||||
|
build_mikan_subscriber_subscription_rss_url,
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||||
|
scrape_mikan_bangumi_index_meta_from_bangumi_homepage_url,
|
||||||
|
scrape_mikan_bangumi_meta_from_bangumi_homepage_url,
|
||||||
|
scrape_mikan_bangumi_meta_list_from_season_flow_url,
|
||||||
|
scrape_mikan_bangumi_meta_stream_from_season_flow_url,
|
||||||
|
scrape_mikan_episode_meta_from_episode_homepage_url, scrape_mikan_poster_data_from_image_url,
|
||||||
|
scrape_mikan_poster_meta_from_image_url,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,424 +0,0 @@
|
|||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
use chrono::DateTime;
|
|
||||||
use itertools::Itertools;
|
|
||||||
use reqwest::IntoUrl;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use tracing::instrument;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
downloader::core::BITTORRENT_MIME_TYPE,
|
|
||||||
errors::{RError, RResult},
|
|
||||||
extract::mikan::{
|
|
||||||
MikanClient,
|
|
||||||
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
|
|
||||||
},
|
|
||||||
fetch::bytes::fetch_bytes,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub struct MikanRssItem {
|
|
||||||
pub title: String,
|
|
||||||
pub homepage: Url,
|
|
||||||
pub url: Url,
|
|
||||||
pub content_length: Option<u64>,
|
|
||||||
pub mime: String,
|
|
||||||
pub pub_date: Option<i64>,
|
|
||||||
pub mikan_episode_id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub struct MikanBangumiRssChannel {
|
|
||||||
pub name: String,
|
|
||||||
pub url: Url,
|
|
||||||
pub mikan_bangumi_id: String,
|
|
||||||
pub mikan_fansub_id: String,
|
|
||||||
pub items: Vec<MikanRssItem>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub struct MikanBangumiAggregationRssChannel {
|
|
||||||
pub name: String,
|
|
||||||
pub url: Url,
|
|
||||||
pub mikan_bangumi_id: String,
|
|
||||||
pub items: Vec<MikanRssItem>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub struct MikanSubscriberAggregationRssChannel {
|
|
||||||
pub mikan_aggregation_id: String,
|
|
||||||
pub url: Url,
|
|
||||||
pub items: Vec<MikanRssItem>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub enum MikanRssChannel {
|
|
||||||
Bangumi(MikanBangumiRssChannel),
|
|
||||||
BangumiAggregation(MikanBangumiAggregationRssChannel),
|
|
||||||
SubscriberAggregation(MikanSubscriberAggregationRssChannel),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MikanRssChannel {
|
|
||||||
pub fn items(&self) -> &[MikanRssItem] {
|
|
||||||
match &self {
|
|
||||||
Self::Bangumi(MikanBangumiRssChannel { items, .. })
|
|
||||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { items, .. })
|
|
||||||
| Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { items, .. }) => {
|
|
||||||
items
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn into_items(self) -> Vec<MikanRssItem> {
|
|
||||||
match self {
|
|
||||||
Self::Bangumi(MikanBangumiRssChannel { items, .. })
|
|
||||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { items, .. })
|
|
||||||
| Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { items, .. }) => {
|
|
||||||
items
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn name(&self) -> Option<&str> {
|
|
||||||
match &self {
|
|
||||||
Self::Bangumi(MikanBangumiRssChannel { name, .. })
|
|
||||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { name, .. }) => {
|
|
||||||
Some(name.as_str())
|
|
||||||
}
|
|
||||||
Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { .. }) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn url(&self) -> &Url {
|
|
||||||
match &self {
|
|
||||||
Self::Bangumi(MikanBangumiRssChannel { url, .. })
|
|
||||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { url, .. })
|
|
||||||
| Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { url, .. }) => url,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<rss::Item> for MikanRssItem {
|
|
||||||
type Error = RError;
|
|
||||||
|
|
||||||
fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
|
|
||||||
let enclosure = item
|
|
||||||
.enclosure
|
|
||||||
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure")))?;
|
|
||||||
|
|
||||||
let mime_type = enclosure.mime_type;
|
|
||||||
if mime_type != BITTORRENT_MIME_TYPE {
|
|
||||||
return Err(RError::MimeError {
|
|
||||||
expected: String::from(BITTORRENT_MIME_TYPE),
|
|
||||||
found: mime_type.to_string(),
|
|
||||||
desc: String::from("MikanRssItem"),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let title = item
|
|
||||||
.title
|
|
||||||
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?;
|
|
||||||
|
|
||||||
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
|
|
||||||
RError::from_mikan_rss_invalid_field_and_source(
|
|
||||||
"enclosure_url:enclosure.link".into(),
|
|
||||||
err,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let homepage = item
|
|
||||||
.link
|
|
||||||
.and_then(|link| Url::parse(&link).ok())
|
|
||||||
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link")))?;
|
|
||||||
|
|
||||||
let MikanEpisodeHomepage {
|
|
||||||
mikan_episode_id, ..
|
|
||||||
} = extract_mikan_episode_id_from_homepage(&homepage).ok_or_else(|| {
|
|
||||||
RError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(MikanRssItem {
|
|
||||||
title,
|
|
||||||
homepage,
|
|
||||||
url: enclosure_url,
|
|
||||||
content_length: enclosure.length.parse().ok(),
|
|
||||||
mime: mime_type,
|
|
||||||
pub_date: item
|
|
||||||
.pub_date
|
|
||||||
.and_then(|s| DateTime::parse_from_rfc2822(&s).ok())
|
|
||||||
.map(|s| s.timestamp_millis()),
|
|
||||||
mikan_episode_id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct MikanBangumiRssLink {
|
|
||||||
pub mikan_bangumi_id: String,
|
|
||||||
pub mikan_fansub_id: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct MikanSubscriberAggregationRssLink {
|
|
||||||
pub mikan_aggregation_id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_mikan_bangumi_rss_link(
|
|
||||||
mikan_base_url: impl IntoUrl,
|
|
||||||
mikan_bangumi_id: &str,
|
|
||||||
mikan_fansub_id: Option<&str>,
|
|
||||||
) -> RResult<Url> {
|
|
||||||
let mut url = mikan_base_url.into_url()?;
|
|
||||||
url.set_path("/RSS/Bangumi");
|
|
||||||
url.query_pairs_mut()
|
|
||||||
.append_pair("bangumiId", mikan_bangumi_id);
|
|
||||||
if let Some(mikan_fansub_id) = mikan_fansub_id {
|
|
||||||
url.query_pairs_mut()
|
|
||||||
.append_pair("subgroupid", mikan_fansub_id);
|
|
||||||
};
|
|
||||||
Ok(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_mikan_subscriber_aggregation_rss_link(
|
|
||||||
mikan_base_url: &str,
|
|
||||||
mikan_aggregation_id: &str,
|
|
||||||
) -> RResult<Url> {
|
|
||||||
let mut url = Url::parse(mikan_base_url)?;
|
|
||||||
url.set_path("/RSS/MyBangumi");
|
|
||||||
url.query_pairs_mut()
|
|
||||||
.append_pair("token", mikan_aggregation_id);
|
|
||||||
Ok(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract_mikan_bangumi_id_from_rss_link(url: &Url) -> Option<MikanBangumiRssLink> {
|
|
||||||
if url.path() == "/RSS/Bangumi" {
|
|
||||||
url.query_pairs()
|
|
||||||
.find(|(k, _)| k == "bangumiId")
|
|
||||||
.map(|(_, v)| MikanBangumiRssLink {
|
|
||||||
mikan_bangumi_id: v.to_string(),
|
|
||||||
mikan_fansub_id: url
|
|
||||||
.query_pairs()
|
|
||||||
.find(|(k, _)| k == "subgroupid")
|
|
||||||
.map(|(_, v)| v.to_string()),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract_mikan_subscriber_aggregation_id_from_rss_link(
|
|
||||||
url: &Url,
|
|
||||||
) -> Option<MikanSubscriberAggregationRssLink> {
|
|
||||||
if url.path() == "/RSS/MyBangumi" {
|
|
||||||
url.query_pairs().find(|(k, _)| k == "token").map(|(_, v)| {
|
|
||||||
MikanSubscriberAggregationRssLink {
|
|
||||||
mikan_aggregation_id: v.to_string(),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip_all, fields(channel_rss_link = channel_rss_link.as_str()))]
|
|
||||||
pub async fn extract_mikan_rss_channel_from_rss_link(
|
|
||||||
http_client: &MikanClient,
|
|
||||||
channel_rss_link: impl IntoUrl,
|
|
||||||
) -> RResult<MikanRssChannel> {
|
|
||||||
let bytes = fetch_bytes(http_client, channel_rss_link.as_str()).await?;
|
|
||||||
|
|
||||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
|
||||||
|
|
||||||
let channel_link = Url::parse(channel.link())?;
|
|
||||||
|
|
||||||
if let Some(MikanBangumiRssLink {
|
|
||||||
mikan_bangumi_id,
|
|
||||||
mikan_fansub_id,
|
|
||||||
}) = extract_mikan_bangumi_id_from_rss_link(&channel_link)
|
|
||||||
{
|
|
||||||
tracing::trace!(
|
|
||||||
mikan_bangumi_id,
|
|
||||||
mikan_fansub_id,
|
|
||||||
"MikanBangumiRssLink extracting..."
|
|
||||||
);
|
|
||||||
|
|
||||||
let channel_name = channel.title().replace("Mikan Project - ", "");
|
|
||||||
|
|
||||||
let items = channel
|
|
||||||
.items
|
|
||||||
.into_iter()
|
|
||||||
.enumerate()
|
|
||||||
.flat_map(|(idx, item)| {
|
|
||||||
MikanRssItem::try_from(item).inspect_err(
|
|
||||||
|error| tracing::warn!(error = %error, "failed to extract rss item idx = {}", idx),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect_vec();
|
|
||||||
|
|
||||||
if let Some(mikan_fansub_id) = mikan_fansub_id {
|
|
||||||
tracing::trace!(
|
|
||||||
channel_name,
|
|
||||||
channel_link = channel_link.as_str(),
|
|
||||||
mikan_bangumi_id,
|
|
||||||
mikan_fansub_id,
|
|
||||||
"MikanBangumiRssChannel extracted"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(MikanRssChannel::Bangumi(MikanBangumiRssChannel {
|
|
||||||
name: channel_name,
|
|
||||||
mikan_bangumi_id,
|
|
||||||
mikan_fansub_id,
|
|
||||||
url: channel_link,
|
|
||||||
items,
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
tracing::trace!(
|
|
||||||
channel_name,
|
|
||||||
channel_link = channel_link.as_str(),
|
|
||||||
mikan_bangumi_id,
|
|
||||||
"MikanBangumiAggregationRssChannel extracted"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(MikanRssChannel::BangumiAggregation(
|
|
||||||
MikanBangumiAggregationRssChannel {
|
|
||||||
name: channel_name,
|
|
||||||
mikan_bangumi_id,
|
|
||||||
url: channel_link,
|
|
||||||
items,
|
|
||||||
},
|
|
||||||
))
|
|
||||||
}
|
|
||||||
} else if let Some(MikanSubscriberAggregationRssLink {
|
|
||||||
mikan_aggregation_id,
|
|
||||||
..
|
|
||||||
}) = extract_mikan_subscriber_aggregation_id_from_rss_link(&channel_link)
|
|
||||||
{
|
|
||||||
tracing::trace!(
|
|
||||||
mikan_aggregation_id,
|
|
||||||
"MikanSubscriberAggregationRssLink extracting..."
|
|
||||||
);
|
|
||||||
|
|
||||||
let items = channel
|
|
||||||
.items
|
|
||||||
.into_iter()
|
|
||||||
.enumerate()
|
|
||||||
.flat_map(|(idx, item)| {
|
|
||||||
MikanRssItem::try_from(item).inspect_err(
|
|
||||||
|error| tracing::warn!(error = %error, "failed to extract rss item idx = {}", idx),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect_vec();
|
|
||||||
|
|
||||||
tracing::trace!(
|
|
||||||
channel_link = channel_link.as_str(),
|
|
||||||
mikan_aggregation_id,
|
|
||||||
"MikanSubscriberAggregationRssChannel extracted"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(MikanRssChannel::SubscriberAggregation(
|
|
||||||
MikanSubscriberAggregationRssChannel {
|
|
||||||
mikan_aggregation_id,
|
|
||||||
items,
|
|
||||||
url: channel_link,
|
|
||||||
},
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
Err(RError::MikanRssInvalidFormatError).inspect_err(|error| {
|
|
||||||
tracing::warn!(error = %error);
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::assert_matches::assert_matches;
|
|
||||||
|
|
||||||
use rstest::rstest;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
downloader::core::BITTORRENT_MIME_TYPE,
|
|
||||||
errors::RResult,
|
|
||||||
extract::mikan::{
|
|
||||||
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
|
|
||||||
extract_mikan_rss_channel_from_rss_link,
|
|
||||||
},
|
|
||||||
test_utils::mikan::build_testing_mikan_client,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_parse_mikan_rss_channel_from_rss_link() -> RResult<()> {
|
|
||||||
let mut mikan_server = mockito::Server::new_async().await;
|
|
||||||
|
|
||||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
|
||||||
|
|
||||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
|
||||||
|
|
||||||
{
|
|
||||||
let bangumi_rss_url =
|
|
||||||
mikan_base_url.join("/RSS/Bangumi?bangumiId=3141&subgroupid=370")?;
|
|
||||||
let bangumi_rss_mock = mikan_server
|
|
||||||
.mock("GET", bangumi_rss_url.path())
|
|
||||||
.with_body_from_file("tests/resources/mikan/Bangumi-3141-370.rss")
|
|
||||||
.match_query(mockito::Matcher::Any)
|
|
||||||
.create_async()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let channel = extract_mikan_rss_channel_from_rss_link(&mikan_client, bangumi_rss_url)
|
|
||||||
.await
|
|
||||||
.expect("should get mikan channel from rss url");
|
|
||||||
|
|
||||||
assert_matches!(
|
|
||||||
&channel,
|
|
||||||
MikanRssChannel::Bangumi(MikanBangumiRssChannel { .. })
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_matches!(&channel.name(), Some("葬送的芙莉莲"));
|
|
||||||
|
|
||||||
let items = channel.items();
|
|
||||||
let first_sub_item = items
|
|
||||||
.first()
|
|
||||||
.expect("mikan subscriptions should have at least one subs");
|
|
||||||
|
|
||||||
assert_eq!(first_sub_item.mime, BITTORRENT_MIME_TYPE);
|
|
||||||
|
|
||||||
assert!(
|
|
||||||
&first_sub_item
|
|
||||||
.homepage
|
|
||||||
.as_str()
|
|
||||||
.starts_with("https://mikanani.me/Home/Episode")
|
|
||||||
);
|
|
||||||
|
|
||||||
let name = first_sub_item.title.as_str();
|
|
||||||
assert!(name.contains("葬送的芙莉莲"));
|
|
||||||
|
|
||||||
bangumi_rss_mock.expect(1);
|
|
||||||
}
|
|
||||||
{
|
|
||||||
let bangumi_rss_url = mikan_base_url.join("/RSS/Bangumi?bangumiId=3416")?;
|
|
||||||
|
|
||||||
let bangumi_rss_mock = mikan_server
|
|
||||||
.mock("GET", bangumi_rss_url.path())
|
|
||||||
.match_query(mockito::Matcher::Any)
|
|
||||||
.with_body_from_file("tests/resources/mikan/Bangumi-3416.rss")
|
|
||||||
.create_async()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let channel = extract_mikan_rss_channel_from_rss_link(&mikan_client, bangumi_rss_url)
|
|
||||||
.await
|
|
||||||
.expect("should get mikan channel from rss url");
|
|
||||||
|
|
||||||
assert_matches!(
|
|
||||||
&channel,
|
|
||||||
MikanRssChannel::BangumiAggregation(MikanBangumiAggregationRssChannel { .. })
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_matches!(&channel.name(), Some("叹气的亡灵想隐退"));
|
|
||||||
|
|
||||||
bangumi_rss_mock.expect(1);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
778
apps/recorder/src/extract/mikan/subscription.rs
Normal file
778
apps/recorder/src/extract/mikan/subscription.rs
Normal file
@@ -0,0 +1,778 @@
|
|||||||
|
use std::{
|
||||||
|
collections::{HashMap, HashSet},
|
||||||
|
fmt::Debug,
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
|
use async_graphql::{InputObject, SimpleObject};
|
||||||
|
use async_stream::try_stream;
|
||||||
|
use fetch::fetch_bytes;
|
||||||
|
use futures::{Stream, TryStreamExt, pin_mut, try_join};
|
||||||
|
use maplit::hashmap;
|
||||||
|
use sea_orm::{
|
||||||
|
ColumnTrait, Condition, EntityTrait, JoinType, QueryFilter, QuerySelect, RelationTrait,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use snafu::{OptionExt, ResultExt};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use super::scrape_mikan_bangumi_meta_stream_from_season_flow_url;
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
extract::{
|
||||||
|
bittorrent::EpisodeEnclosureMeta,
|
||||||
|
mikan::{
|
||||||
|
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
|
||||||
|
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||||
|
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url,
|
||||||
|
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
|
||||||
|
scrape_mikan_episode_meta_from_episode_homepage_url,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
models::{
|
||||||
|
bangumi, episodes, subscription_bangumi, subscription_episode,
|
||||||
|
subscriptions::{self, SubscriptionTrait},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[tracing::instrument(err, skip(ctx, rss_item_list))]
|
||||||
|
async fn sync_mikan_feeds_from_rss_item_list(
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
rss_item_list: Vec<MikanRssEpisodeItem>,
|
||||||
|
subscriber_id: i32,
|
||||||
|
subscription_id: i32,
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
|
let (new_episode_meta_list, existed_episode_hash2id_map) = {
|
||||||
|
let existed_episode_hash2id_map = episodes::Model::get_existed_mikan_episode_list(
|
||||||
|
ctx,
|
||||||
|
rss_item_list.iter().map(|s| MikanEpisodeHash {
|
||||||
|
mikan_episode_id: s.mikan_episode_id.clone(),
|
||||||
|
}),
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.map(|(episode_id, hash, bangumi_id)| (hash.mikan_episode_id, (episode_id, bangumi_id)))
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
|
let mut new_episode_meta_list: Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)> = vec![];
|
||||||
|
|
||||||
|
let mikan_client = ctx.mikan();
|
||||||
|
for to_insert_rss_item in rss_item_list.into_iter().filter(|rss_item| {
|
||||||
|
!existed_episode_hash2id_map.contains_key(&rss_item.mikan_episode_id)
|
||||||
|
}) {
|
||||||
|
let episode_meta = scrape_mikan_episode_meta_from_episode_homepage_url(
|
||||||
|
mikan_client,
|
||||||
|
to_insert_rss_item.build_homepage_url(mikan_base_url.clone()),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let episode_enclosure_meta = EpisodeEnclosureMeta::from(to_insert_rss_item);
|
||||||
|
new_episode_meta_list.push((episode_meta, episode_enclosure_meta));
|
||||||
|
}
|
||||||
|
|
||||||
|
(new_episode_meta_list, existed_episode_hash2id_map)
|
||||||
|
};
|
||||||
|
|
||||||
|
// subscribe existed but not subscribed episode and bangumi
|
||||||
|
let (existed_episode_id_list, existed_episode_bangumi_id_set): (Vec<i32>, HashSet<i32>) =
|
||||||
|
existed_episode_hash2id_map.into_values().unzip();
|
||||||
|
|
||||||
|
try_join!(
|
||||||
|
subscription_episode::Model::add_episodes_for_subscription(
|
||||||
|
ctx,
|
||||||
|
existed_episode_id_list.into_iter(),
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
),
|
||||||
|
subscription_bangumi::Model::add_bangumis_for_subscription(
|
||||||
|
ctx,
|
||||||
|
existed_episode_bangumi_id_set.into_iter(),
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let new_episode_meta_list_group_by_bangumi_hash: HashMap<
|
||||||
|
MikanBangumiHash,
|
||||||
|
Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)>,
|
||||||
|
> = {
|
||||||
|
let mut m = hashmap! {};
|
||||||
|
for (episode_meta, episode_enclosure_meta) in new_episode_meta_list {
|
||||||
|
let bangumi_hash = episode_meta.bangumi_hash();
|
||||||
|
|
||||||
|
m.entry(bangumi_hash)
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push((episode_meta, episode_enclosure_meta));
|
||||||
|
}
|
||||||
|
m
|
||||||
|
};
|
||||||
|
|
||||||
|
for (group_bangumi_hash, group_episode_meta_list) in new_episode_meta_list_group_by_bangumi_hash
|
||||||
|
{
|
||||||
|
let (first_episode_meta, _) = group_episode_meta_list.first().unwrap();
|
||||||
|
let group_bangumi_model = bangumi::Model::get_or_insert_from_mikan(
|
||||||
|
ctx,
|
||||||
|
group_bangumi_hash,
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
async || {
|
||||||
|
let bangumi_meta: MikanBangumiMeta = first_episode_meta.clone().into();
|
||||||
|
let bangumi_am = bangumi::ActiveModel::from_mikan_bangumi_meta(
|
||||||
|
ctx,
|
||||||
|
bangumi_meta,
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(bangumi_am)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let group_episode_creation_list =
|
||||||
|
group_episode_meta_list
|
||||||
|
.into_iter()
|
||||||
|
.map(|(episode_meta, episode_enclosure_meta)| {
|
||||||
|
(&group_bangumi_model, episode_meta, episode_enclosure_meta)
|
||||||
|
});
|
||||||
|
|
||||||
|
episodes::Model::add_mikan_episodes_for_subscription(
|
||||||
|
ctx,
|
||||||
|
group_episode_creation_list.into_iter(),
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct MikanSubscriberSubscription {
|
||||||
|
pub subscription_id: i32,
|
||||||
|
pub mikan_subscription_token: String,
|
||||||
|
pub subscriber_id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl SubscriptionTrait for MikanSubscriberSubscription {
|
||||||
|
fn get_subscriber_id(&self) -> i32 {
|
||||||
|
self.subscriber_id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_subscription_id(&self) -> i32 {
|
||||||
|
self.subscription_id
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
let rss_item_list = self.get_rss_item_list_from_source_url(ctx.as_ref()).await?;
|
||||||
|
|
||||||
|
sync_mikan_feeds_from_rss_item_list(
|
||||||
|
ctx.as_ref(),
|
||||||
|
rss_item_list,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
self.sync_feeds_incremental(ctx.clone()).await?;
|
||||||
|
|
||||||
|
let rss_item_list = self
|
||||||
|
.get_rss_item_list_from_subsribed_url_rss_link(ctx.as_ref())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
sync_mikan_feeds_from_rss_item_list(
|
||||||
|
ctx.as_ref(),
|
||||||
|
rss_item_list,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_sources(&self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
|
||||||
|
let source_url = Url::parse(&model.source_url)?;
|
||||||
|
|
||||||
|
let meta = MikanSubscriberSubscriptionRssUrlMeta::from_rss_url(&source_url)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"MikanSubscriberSubscription should extract mikan_subscription_token from \
|
||||||
|
source_url = {}, subscription_id = {}",
|
||||||
|
source_url, model.id
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
subscription_id: model.id,
|
||||||
|
mikan_subscription_token: meta.mikan_subscription_token,
|
||||||
|
subscriber_id: model.subscriber_id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanSubscriberSubscription {
|
||||||
|
#[tracing::instrument(err, skip(ctx))]
|
||||||
|
async fn get_rss_item_list_from_source_url(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
|
let rss_url = build_mikan_subscriber_subscription_rss_url(
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
&self.mikan_subscription_token,
|
||||||
|
);
|
||||||
|
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
|
let mut result = vec![];
|
||||||
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!("failed to extract rss item at idx {idx}")
|
||||||
|
})?;
|
||||||
|
result.push(item);
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(err, skip(ctx))]
|
||||||
|
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
|
let subscribed_bangumi_list =
|
||||||
|
bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.subscription_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut rss_item_list = vec![];
|
||||||
|
for subscribed_bangumi in subscribed_bangumi_list {
|
||||||
|
let rss_url = subscribed_bangumi
|
||||||
|
.rss_link
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"rss link is required, subscription_id = {:?}, bangumi_name = {}",
|
||||||
|
self.subscription_id, subscribed_bangumi.display_name
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!("failed to extract rss item at idx {idx}")
|
||||||
|
})?;
|
||||||
|
rss_item_list.push(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(rss_item_list)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct MikanSeasonSubscription {
|
||||||
|
pub subscription_id: i32,
|
||||||
|
pub year: i32,
|
||||||
|
pub season_str: MikanSeasonStr,
|
||||||
|
pub credential_id: i32,
|
||||||
|
pub subscriber_id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl SubscriptionTrait for MikanSeasonSubscription {
|
||||||
|
fn get_subscriber_id(&self) -> i32 {
|
||||||
|
self.subscriber_id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_subscription_id(&self) -> i32 {
|
||||||
|
self.subscription_id
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
let rss_item_stream = self.get_rss_item_stream_from_subsribed_url_rss_link(ctx.as_ref());
|
||||||
|
|
||||||
|
pin_mut!(rss_item_stream);
|
||||||
|
|
||||||
|
while let Some(rss_item_chunk_list) = rss_item_stream.try_next().await? {
|
||||||
|
sync_mikan_feeds_from_rss_item_list(
|
||||||
|
ctx.as_ref(),
|
||||||
|
rss_item_chunk_list,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
self.sync_sources(ctx.clone()).await?;
|
||||||
|
self.sync_feeds_incremental(ctx).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
let bangumi_meta_list = self.get_bangumi_meta_stream_from_source_url(ctx.clone());
|
||||||
|
|
||||||
|
pin_mut!(bangumi_meta_list);
|
||||||
|
|
||||||
|
while let Some(bangumi_meta) = bangumi_meta_list.try_next().await? {
|
||||||
|
let bangumi_hash = bangumi_meta.bangumi_hash();
|
||||||
|
bangumi::Model::get_or_insert_from_mikan(
|
||||||
|
ctx.as_ref(),
|
||||||
|
bangumi_hash,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
async || {
|
||||||
|
let bangumi_am = bangumi::ActiveModel::from_mikan_bangumi_meta(
|
||||||
|
ctx.as_ref(),
|
||||||
|
bangumi_meta,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(bangumi_am)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
|
||||||
|
let source_url = Url::parse(&model.source_url)?;
|
||||||
|
|
||||||
|
let source_url_meta = MikanSeasonFlowUrlMeta::from_url(&source_url)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"season_str and year is required when extracting MikanSeasonSubscription from \
|
||||||
|
source_url, source_url = {}, subscription_id = {}",
|
||||||
|
source_url, model.id
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let credential_id = model
|
||||||
|
.credential_id
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"credential_id is required when extracting MikanSeasonSubscription, \
|
||||||
|
subscription_id = {}",
|
||||||
|
model.id
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
subscription_id: model.id,
|
||||||
|
year: source_url_meta.year,
|
||||||
|
season_str: source_url_meta.season_str,
|
||||||
|
credential_id,
|
||||||
|
subscriber_id: model.subscriber_id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanSeasonSubscription {
|
||||||
|
pub fn get_bangumi_meta_stream_from_source_url(
|
||||||
|
&self,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
) -> impl Stream<Item = RecorderResult<MikanBangumiMeta>> {
|
||||||
|
let credential_id = self.credential_id;
|
||||||
|
let year = self.year;
|
||||||
|
let season_str = self.season_str;
|
||||||
|
|
||||||
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
|
let mikan_season_flow_url = build_mikan_season_flow_url(mikan_base_url, year, season_str);
|
||||||
|
|
||||||
|
scrape_mikan_bangumi_meta_stream_from_season_flow_url(
|
||||||
|
ctx,
|
||||||
|
mikan_season_flow_url,
|
||||||
|
credential_id,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_rss_item_stream_from_subsribed_url_rss_link(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
) -> impl Stream<Item = RecorderResult<Vec<MikanRssEpisodeItem>>> {
|
||||||
|
try_stream! {
|
||||||
|
|
||||||
|
let db = ctx.db();
|
||||||
|
|
||||||
|
let subscribed_bangumi_list = bangumi::Entity::find()
|
||||||
|
.filter(
|
||||||
|
Condition::all()
|
||||||
|
.add(subscription_bangumi::Column::SubscriptionId.eq(self.subscription_id)),
|
||||||
|
)
|
||||||
|
.join_rev(
|
||||||
|
JoinType::InnerJoin,
|
||||||
|
subscription_bangumi::Relation::Bangumi.def(),
|
||||||
|
)
|
||||||
|
.all(db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
|
||||||
|
for subscribed_bangumi in subscribed_bangumi_list {
|
||||||
|
let rss_url = subscribed_bangumi
|
||||||
|
.rss_link
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"rss_link is required, subscription_id = {}, bangumi_name = {}",
|
||||||
|
self.subscription_id, subscribed_bangumi.display_name
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
|
let mut rss_item_list = vec![];
|
||||||
|
|
||||||
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!("failed to extract rss item at idx {idx}")
|
||||||
|
})?;
|
||||||
|
rss_item_list.push(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
yield rss_item_list;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)]
|
||||||
|
pub struct MikanBangumiSubscription {
|
||||||
|
pub subscription_id: i32,
|
||||||
|
pub mikan_bangumi_id: String,
|
||||||
|
pub mikan_fansub_id: String,
|
||||||
|
pub subscriber_id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl SubscriptionTrait for MikanBangumiSubscription {
|
||||||
|
fn get_subscriber_id(&self) -> i32 {
|
||||||
|
self.subscriber_id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_subscription_id(&self) -> i32 {
|
||||||
|
self.subscription_id
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
let rss_item_list = self.get_rss_item_list_from_source_url(ctx.as_ref()).await?;
|
||||||
|
|
||||||
|
sync_mikan_feeds_from_rss_item_list(
|
||||||
|
ctx.as_ref(),
|
||||||
|
rss_item_list,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_full(&self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
self.sync_feeds_incremental(_ctx).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_sources(&self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
|
||||||
|
let source_url = Url::parse(&model.source_url)?;
|
||||||
|
|
||||||
|
let meta = MikanBangumiHash::from_rss_url(&source_url)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"bangumi_id and fansub_id is required when extracting \
|
||||||
|
MikanBangumiSubscription, source_url = {}, subscription_id = {}",
|
||||||
|
source_url, model.id
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
subscription_id: model.id,
|
||||||
|
mikan_bangumi_id: meta.mikan_bangumi_id,
|
||||||
|
mikan_fansub_id: meta.mikan_fansub_id,
|
||||||
|
subscriber_id: model.subscriber_id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanBangumiSubscription {
|
||||||
|
#[tracing::instrument(err, skip(ctx))]
|
||||||
|
async fn get_rss_item_list_from_source_url(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
|
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
&self.mikan_bangumi_id,
|
||||||
|
Some(&self.mikan_fansub_id),
|
||||||
|
);
|
||||||
|
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
|
let mut result = vec![];
|
||||||
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!("failed to extract rss item at idx {idx}")
|
||||||
|
})?;
|
||||||
|
result.push(item);
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[allow(unused_variables)]
|
||||||
|
mod tests {
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use rstest::{fixture, rstest};
|
||||||
|
use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::RecorderResult,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||||
|
MikanSubscriberSubscriptionRssUrlMeta,
|
||||||
|
},
|
||||||
|
models::{
|
||||||
|
bangumi, episodes,
|
||||||
|
subscriptions::{self, SubscriptionTrait},
|
||||||
|
},
|
||||||
|
test_utils::{
|
||||||
|
app::{TestingAppContext, TestingAppContextPreset},
|
||||||
|
mikan::{MikanMockServer, build_testing_mikan_credential_form},
|
||||||
|
tracing::try_init_testing_tracing,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
struct TestingResources {
|
||||||
|
pub app_ctx: Arc<dyn AppContextTrait>,
|
||||||
|
pub mikan_server: MikanMockServer,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn build_testing_app_context() -> RecorderResult<TestingResources> {
|
||||||
|
let mikan_server = MikanMockServer::new().await?;
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_server.base_url().clone();
|
||||||
|
|
||||||
|
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
|
||||||
|
mikan_base_url: mikan_base_url.to_string(),
|
||||||
|
database_config: None,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mikan_server,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
fn before_each() {
|
||||||
|
try_init_testing_tracing(Level::DEBUG);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let mikan_client = app_ctx.mikan();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let credential = mikan_client
|
||||||
|
.submit_credential_form(
|
||||||
|
app_ctx.as_ref(),
|
||||||
|
subscriber_id,
|
||||||
|
build_testing_mikan_credential_form(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSeason),
|
||||||
|
source_url: ActiveValue::Set(
|
||||||
|
MikanSeasonFlowUrlMeta {
|
||||||
|
year: 2025,
|
||||||
|
season_str: MikanSeasonStr::Spring,
|
||||||
|
}
|
||||||
|
.build_season_flow_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
credential_id: ActiveValue::Set(Some(credential.id)),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
{
|
||||||
|
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(bangumi_list.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
subscription.sync_feeds_full(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_subscriber_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber),
|
||||||
|
source_url: ActiveValue::Set(
|
||||||
|
MikanSubscriberSubscriptionRssUrlMeta {
|
||||||
|
mikan_subscription_token: "test".into(),
|
||||||
|
}
|
||||||
|
.build_rss_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let (incremental_bangumi_list, incremental_episode_list) = {
|
||||||
|
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
|
||||||
|
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
|
||||||
|
let episode_list = episodes::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!episode_list.is_empty());
|
||||||
|
|
||||||
|
(bangumi_list, episode_list)
|
||||||
|
};
|
||||||
|
|
||||||
|
let (full_bangumi_list, full_episode_list) = {
|
||||||
|
subscription.sync_feeds_full(app_ctx.clone()).await?;
|
||||||
|
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
|
||||||
|
let episode_list = episodes::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!episode_list.is_empty());
|
||||||
|
|
||||||
|
(bangumi_list, episode_list)
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(incremental_bangumi_list.len(), full_bangumi_list.len());
|
||||||
|
assert!(incremental_episode_list.len() < full_episode_list.len());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanBangumi),
|
||||||
|
source_url: ActiveValue::Set(
|
||||||
|
MikanBangumiHash {
|
||||||
|
mikan_bangumi_id: "3600".into(),
|
||||||
|
mikan_fansub_id: "370".into(),
|
||||||
|
}
|
||||||
|
.build_rss_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
{
|
||||||
|
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
};
|
||||||
|
|
||||||
|
{
|
||||||
|
subscription.sync_feeds_full(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
1525
apps/recorder/src/extract/mikan/web.rs
Normal file
1525
apps/recorder/src/extract/mikan/web.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,712 +0,0 @@
|
|||||||
use std::{borrow::Cow, sync::Arc};
|
|
||||||
|
|
||||||
use async_stream::try_stream;
|
|
||||||
use bytes::Bytes;
|
|
||||||
use futures::Stream;
|
|
||||||
use itertools::Itertools;
|
|
||||||
use scraper::{Html, Selector};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use tracing::instrument;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use super::{
|
|
||||||
MIKAN_BUCKET_KEY, MikanAuthSecrecy, MikanBangumiRssLink, MikanClient,
|
|
||||||
extract_mikan_bangumi_id_from_rss_link,
|
|
||||||
};
|
|
||||||
use crate::{
|
|
||||||
app::AppContextTrait,
|
|
||||||
errors::{RError, RResult},
|
|
||||||
extract::{
|
|
||||||
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
|
|
||||||
media::extract_image_src_from_str,
|
|
||||||
},
|
|
||||||
fetch::{html::fetch_html, image::fetch_image},
|
|
||||||
storage::StorageContentCategory,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct MikanEpisodeMeta {
|
|
||||||
pub homepage: Url,
|
|
||||||
pub origin_poster_src: Option<Url>,
|
|
||||||
pub bangumi_title: String,
|
|
||||||
pub episode_title: String,
|
|
||||||
pub fansub: String,
|
|
||||||
pub mikan_bangumi_id: String,
|
|
||||||
pub mikan_fansub_id: String,
|
|
||||||
pub mikan_episode_id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
|
||||||
pub struct MikanBangumiMeta {
|
|
||||||
pub homepage: Url,
|
|
||||||
pub origin_poster_src: Option<Url>,
|
|
||||||
pub bangumi_title: String,
|
|
||||||
pub mikan_bangumi_id: String,
|
|
||||||
pub mikan_fansub_id: Option<String>,
|
|
||||||
pub fansub: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct MikanBangumiPosterMeta {
|
|
||||||
pub origin_poster_src: Url,
|
|
||||||
pub poster_data: Option<Bytes>,
|
|
||||||
pub poster_src: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct MikanEpisodeHomepage {
|
|
||||||
pub mikan_episode_id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct MikanBangumiHomepage {
|
|
||||||
pub mikan_bangumi_id: String,
|
|
||||||
pub mikan_fansub_id: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_mikan_bangumi_homepage(
|
|
||||||
mikan_base_url: Url,
|
|
||||||
mikan_bangumi_id: &str,
|
|
||||||
mikan_fansub_id: Option<&str>,
|
|
||||||
) -> Url {
|
|
||||||
let mut url = mikan_base_url;
|
|
||||||
url.set_path(&format!("/Home/Bangumi/{mikan_bangumi_id}"));
|
|
||||||
url.set_fragment(mikan_fansub_id);
|
|
||||||
url
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_mikan_episode_homepage(mikan_base_url: Url, mikan_episode_id: &str) -> Url {
|
|
||||||
let mut url = mikan_base_url;
|
|
||||||
url.set_path(&format!("/Home/Episode/{mikan_episode_id}"));
|
|
||||||
url
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_mikan_bangumi_expand_info_url(mikan_base_url: Url, mikan_bangumi_id: &str) -> Url {
|
|
||||||
let mut url = mikan_base_url;
|
|
||||||
url.set_path("/ExpandBangumi");
|
|
||||||
url.query_pairs_mut()
|
|
||||||
.append_pair("bangumiId", mikan_bangumi_id)
|
|
||||||
.append_pair("showSubscribed", "true");
|
|
||||||
url
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract_mikan_bangumi_id_from_homepage(url: &Url) -> Option<MikanBangumiHomepage> {
|
|
||||||
if url.path().starts_with("/Home/Bangumi/") {
|
|
||||||
let mikan_bangumi_id = url.path().replace("/Home/Bangumi/", "");
|
|
||||||
|
|
||||||
Some(MikanBangumiHomepage {
|
|
||||||
mikan_bangumi_id,
|
|
||||||
mikan_fansub_id: url.fragment().map(String::from),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeHomepage> {
|
|
||||||
if url.path().starts_with("/Home/Episode/") {
|
|
||||||
let mikan_episode_id = url.path().replace("/Home/Episode/", "");
|
|
||||||
Some(MikanEpisodeHomepage { mikan_episode_id })
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn extract_mikan_poster_meta_from_src(
|
|
||||||
http_client: &MikanClient,
|
|
||||||
origin_poster_src_url: Url,
|
|
||||||
) -> Result<MikanBangumiPosterMeta, RError> {
|
|
||||||
let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?;
|
|
||||||
Ok(MikanBangumiPosterMeta {
|
|
||||||
origin_poster_src: origin_poster_src_url,
|
|
||||||
poster_data: Some(poster_data),
|
|
||||||
poster_src: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
|
|
||||||
ctx: &dyn AppContextTrait,
|
|
||||||
origin_poster_src_url: Url,
|
|
||||||
subscriber_id: i32,
|
|
||||||
) -> RResult<MikanBangumiPosterMeta> {
|
|
||||||
let dal_client = ctx.storage();
|
|
||||||
let mikan_client = ctx.mikan();
|
|
||||||
if let Some(poster_src) = dal_client
|
|
||||||
.exists_object(
|
|
||||||
StorageContentCategory::Image,
|
|
||||||
subscriber_id,
|
|
||||||
Some(MIKAN_BUCKET_KEY),
|
|
||||||
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
return Ok(MikanBangumiPosterMeta {
|
|
||||||
origin_poster_src: origin_poster_src_url,
|
|
||||||
poster_data: None,
|
|
||||||
poster_src: Some(poster_src.to_string()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let poster_data = fetch_image(mikan_client, origin_poster_src_url.clone()).await?;
|
|
||||||
|
|
||||||
let poster_str = dal_client
|
|
||||||
.store_object(
|
|
||||||
StorageContentCategory::Image,
|
|
||||||
subscriber_id,
|
|
||||||
Some(MIKAN_BUCKET_KEY),
|
|
||||||
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
|
|
||||||
poster_data.clone(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(MikanBangumiPosterMeta {
|
|
||||||
origin_poster_src: origin_poster_src_url,
|
|
||||||
poster_data: Some(poster_data),
|
|
||||||
poster_src: Some(poster_str.to_string()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))]
|
|
||||||
pub async fn extract_mikan_episode_meta_from_episode_homepage(
|
|
||||||
http_client: &MikanClient,
|
|
||||||
mikan_episode_homepage_url: Url,
|
|
||||||
) -> Result<MikanEpisodeMeta, RError> {
|
|
||||||
let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?;
|
|
||||||
let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?;
|
|
||||||
|
|
||||||
let html = Html::parse_document(&content);
|
|
||||||
|
|
||||||
let bangumi_title_selector =
|
|
||||||
&Selector::parse(".bangumi-title > a[href^='/Home/Bangumi/']").unwrap();
|
|
||||||
let mikan_bangumi_id_selector =
|
|
||||||
&Selector::parse(".bangumi-title > a.mikan-rss[data-original-title='RSS']").unwrap();
|
|
||||||
let bangumi_poster_selector = &Selector::parse(".bangumi-poster").unwrap();
|
|
||||||
|
|
||||||
let bangumi_title = html
|
|
||||||
.select(bangumi_title_selector)
|
|
||||||
.next()
|
|
||||||
.map(extract_inner_text_from_element_ref)
|
|
||||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
|
|
||||||
.inspect_err(|error| {
|
|
||||||
tracing::warn!(error = %error);
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let MikanBangumiRssLink {
|
|
||||||
mikan_bangumi_id,
|
|
||||||
mikan_fansub_id,
|
|
||||||
..
|
|
||||||
} = html
|
|
||||||
.select(mikan_bangumi_id_selector)
|
|
||||||
.next()
|
|
||||||
.and_then(|el| el.value().attr("href"))
|
|
||||||
.and_then(|s| mikan_episode_homepage_url.join(s).ok())
|
|
||||||
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
|
|
||||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")))
|
|
||||||
.inspect_err(|error| tracing::error!(error = %error))?;
|
|
||||||
|
|
||||||
let mikan_fansub_id = mikan_fansub_id
|
|
||||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id")))
|
|
||||||
.inspect_err(|error| tracing::error!(error = %error))?;
|
|
||||||
|
|
||||||
let episode_title = html
|
|
||||||
.select(&Selector::parse("title").unwrap())
|
|
||||||
.next()
|
|
||||||
.map(extract_inner_text_from_element_ref)
|
|
||||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
|
|
||||||
.inspect_err(|error| {
|
|
||||||
tracing::warn!(error = %error);
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let MikanEpisodeHomepage {
|
|
||||||
mikan_episode_id, ..
|
|
||||||
} = extract_mikan_episode_id_from_homepage(&mikan_episode_homepage_url)
|
|
||||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id")))
|
|
||||||
.inspect_err(|error| {
|
|
||||||
tracing::warn!(error = %error);
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let fansub_name = html
|
|
||||||
.select(
|
|
||||||
&Selector::parse(".bangumi-info a.magnet-link-wrap[href^='/Home/PublishGroup/']")
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.next()
|
|
||||||
.map(extract_inner_text_from_element_ref)
|
|
||||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
|
|
||||||
.inspect_err(|error| {
|
|
||||||
tracing::warn!(error = %error);
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
|
|
||||||
el.value()
|
|
||||||
.attr("data-src")
|
|
||||||
.and_then(|data_src| extract_image_src_from_str(data_src, &mikan_base_url))
|
|
||||||
.or_else(|| {
|
|
||||||
el.value().attr("style").and_then(|style| {
|
|
||||||
extract_background_image_src_from_style_attr(style, &mikan_base_url)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
tracing::trace!(
|
|
||||||
bangumi_title,
|
|
||||||
mikan_bangumi_id,
|
|
||||||
episode_title,
|
|
||||||
mikan_episode_id,
|
|
||||||
origin_poster_src = origin_poster_src.as_ref().map(|url| url.as_str()),
|
|
||||||
fansub_name,
|
|
||||||
mikan_fansub_id,
|
|
||||||
"mikan episode meta extracted"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(MikanEpisodeMeta {
|
|
||||||
mikan_bangumi_id,
|
|
||||||
mikan_fansub_id,
|
|
||||||
bangumi_title,
|
|
||||||
episode_title,
|
|
||||||
homepage: mikan_episode_homepage_url,
|
|
||||||
origin_poster_src,
|
|
||||||
fansub: fansub_name,
|
|
||||||
mikan_episode_id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))]
|
|
||||||
pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
|
|
||||||
http_client: &MikanClient,
|
|
||||||
mikan_bangumi_homepage_url: Url,
|
|
||||||
) -> Result<MikanBangumiMeta, RError> {
|
|
||||||
let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?;
|
|
||||||
let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?;
|
|
||||||
let html = Html::parse_document(&content);
|
|
||||||
|
|
||||||
let bangumi_title_selector = &Selector::parse(".bangumi-title").unwrap();
|
|
||||||
let mikan_bangumi_id_selector =
|
|
||||||
&Selector::parse(".bangumi-title > .mikan-rss[data-original-title='RSS']").unwrap();
|
|
||||||
let bangumi_poster_selector = &Selector::parse(".bangumi-poster").unwrap();
|
|
||||||
|
|
||||||
let bangumi_title = html
|
|
||||||
.select(bangumi_title_selector)
|
|
||||||
.next()
|
|
||||||
.map(extract_inner_text_from_element_ref)
|
|
||||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
|
|
||||||
.inspect_err(|error| tracing::warn!(error = %error))?;
|
|
||||||
|
|
||||||
let mikan_bangumi_id = html
|
|
||||||
.select(mikan_bangumi_id_selector)
|
|
||||||
.next()
|
|
||||||
.and_then(|el| el.value().attr("href"))
|
|
||||||
.and_then(|s| mikan_bangumi_homepage_url.join(s).ok())
|
|
||||||
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
|
|
||||||
.map(
|
|
||||||
|MikanBangumiRssLink {
|
|
||||||
mikan_bangumi_id, ..
|
|
||||||
}| mikan_bangumi_id,
|
|
||||||
)
|
|
||||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")))
|
|
||||||
.inspect_err(|error| tracing::error!(error = %error))?;
|
|
||||||
|
|
||||||
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
|
|
||||||
el.value()
|
|
||||||
.attr("data-src")
|
|
||||||
.and_then(|data_src| extract_image_src_from_str(data_src, &mikan_base_url))
|
|
||||||
.or_else(|| {
|
|
||||||
el.value().attr("style").and_then(|style| {
|
|
||||||
extract_background_image_src_from_style_attr(style, &mikan_base_url)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
let (mikan_fansub_id, fansub_name) = mikan_bangumi_homepage_url
|
|
||||||
.fragment()
|
|
||||||
.and_then(|id| {
|
|
||||||
html.select(
|
|
||||||
&Selector::parse(&format!("a.subgroup-name[data-anchor='#{}']", id)).unwrap(),
|
|
||||||
)
|
|
||||||
.next()
|
|
||||||
.map(extract_inner_text_from_element_ref)
|
|
||||||
.map(|fansub_name| (id.to_string(), fansub_name))
|
|
||||||
})
|
|
||||||
.unzip();
|
|
||||||
|
|
||||||
tracing::trace!(
|
|
||||||
bangumi_title,
|
|
||||||
mikan_bangumi_id,
|
|
||||||
origin_poster_src = origin_poster_src.as_ref().map(|url| url.as_str()),
|
|
||||||
fansub_name,
|
|
||||||
mikan_fansub_id,
|
|
||||||
"mikan bangumi meta extracted"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(MikanBangumiMeta {
|
|
||||||
homepage: mikan_bangumi_homepage_url,
|
|
||||||
bangumi_title,
|
|
||||||
origin_poster_src,
|
|
||||||
mikan_bangumi_id,
|
|
||||||
fansub: fansub_name,
|
|
||||||
mikan_fansub_id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip_all, fields(my_bangumi_page_url, auth_secrecy = ?auth_secrecy, history = history.len()))]
|
|
||||||
pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
|
|
||||||
context: Arc<dyn AppContextTrait>,
|
|
||||||
my_bangumi_page_url: Url,
|
|
||||||
auth_secrecy: Option<MikanAuthSecrecy>,
|
|
||||||
history: &[Arc<RResult<MikanBangumiMeta>>],
|
|
||||||
) -> impl Stream<Item = RResult<MikanBangumiMeta>> {
|
|
||||||
try_stream! {
|
|
||||||
let http_client = &context.mikan().fork_with_auth(auth_secrecy.clone())?;
|
|
||||||
|
|
||||||
let mikan_base_url = Url::parse(&my_bangumi_page_url.origin().unicode_serialization())?;
|
|
||||||
|
|
||||||
let content = fetch_html(http_client, my_bangumi_page_url.clone()).await?;
|
|
||||||
|
|
||||||
let fansub_container_selector =
|
|
||||||
&Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap();
|
|
||||||
let fansub_title_selector = &Selector::parse(".tag-res-name[title]").unwrap();
|
|
||||||
let fansub_id_selector =
|
|
||||||
&Selector::parse(".active[data-subtitlegroupid][data-bangumiid]").unwrap();
|
|
||||||
|
|
||||||
let bangumi_items = {
|
|
||||||
let html = Html::parse_document(&content);
|
|
||||||
|
|
||||||
let bangumi_container_selector = &Selector::parse(".sk-bangumi .an-ul>li").unwrap();
|
|
||||||
let bangumi_info_selector = &Selector::parse(".an-info a.an-text").unwrap();
|
|
||||||
let bangumi_poster_selector =
|
|
||||||
&Selector::parse("span[data-src][data-bangumiid], span[data-bangumiid][style]")
|
|
||||||
.unwrap();
|
|
||||||
html.select(bangumi_container_selector)
|
|
||||||
.filter_map(|bangumi_elem| {
|
|
||||||
let title_and_href_elem =
|
|
||||||
bangumi_elem.select(bangumi_info_selector).next();
|
|
||||||
let poster_elem = bangumi_elem.select(bangumi_poster_selector).next();
|
|
||||||
if let (Some(bangumi_home_page_url), Some(bangumi_title)) = (
|
|
||||||
title_and_href_elem.and_then(|elem| elem.attr("href")),
|
|
||||||
title_and_href_elem.and_then(|elem| elem.attr("title")),
|
|
||||||
) {
|
|
||||||
let origin_poster_src = poster_elem.and_then(|ele| {
|
|
||||||
ele.attr("data-src")
|
|
||||||
.and_then(|data_src| {
|
|
||||||
extract_image_src_from_str(data_src, &mikan_base_url)
|
|
||||||
})
|
|
||||||
.or_else(|| {
|
|
||||||
ele.attr("style").and_then(|style| {
|
|
||||||
extract_background_image_src_from_style_attr(
|
|
||||||
style,
|
|
||||||
&mikan_base_url,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
});
|
|
||||||
let bangumi_title = bangumi_title.to_string();
|
|
||||||
let bangumi_home_page_url =
|
|
||||||
my_bangumi_page_url.join(bangumi_home_page_url).ok()?;
|
|
||||||
let MikanBangumiHomepage {
|
|
||||||
mikan_bangumi_id, ..
|
|
||||||
} = extract_mikan_bangumi_id_from_homepage(&bangumi_home_page_url)?;
|
|
||||||
if let Some(origin_poster_src) = origin_poster_src.as_ref() {
|
|
||||||
tracing::trace!(
|
|
||||||
origin_poster_src = origin_poster_src.as_str(),
|
|
||||||
bangumi_title,
|
|
||||||
mikan_bangumi_id,
|
|
||||||
"bangumi info extracted"
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
tracing::warn!(
|
|
||||||
bangumi_title,
|
|
||||||
mikan_bangumi_id,
|
|
||||||
"bangumi info extracted, but failed to extract poster_src"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let bangumi_expand_info_url = build_mikan_bangumi_expand_info_url(
|
|
||||||
mikan_base_url.clone(),
|
|
||||||
&mikan_bangumi_id,
|
|
||||||
);
|
|
||||||
Some((
|
|
||||||
bangumi_title,
|
|
||||||
mikan_bangumi_id,
|
|
||||||
bangumi_expand_info_url,
|
|
||||||
origin_poster_src,
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect_vec()
|
|
||||||
};
|
|
||||||
|
|
||||||
for (idx, (bangumi_title, mikan_bangumi_id, bangumi_expand_info_url, origin_poster_src)) in
|
|
||||||
bangumi_items.iter().enumerate()
|
|
||||||
{
|
|
||||||
|
|
||||||
if history.get(idx).is_some() {
|
|
||||||
continue;
|
|
||||||
} else if let Some((fansub_name, mikan_fansub_id)) = {
|
|
||||||
let bangumi_expand_info_content =
|
|
||||||
fetch_html(http_client, bangumi_expand_info_url.clone()).await?;
|
|
||||||
let bangumi_expand_info_fragment =
|
|
||||||
Html::parse_fragment(&bangumi_expand_info_content);
|
|
||||||
bangumi_expand_info_fragment
|
|
||||||
.select(fansub_container_selector)
|
|
||||||
.next()
|
|
||||||
.and_then(|fansub_info| {
|
|
||||||
if let (Some(fansub_name), Some(mikan_fansub_id)) = (
|
|
||||||
fansub_info
|
|
||||||
.select(fansub_title_selector)
|
|
||||||
.next()
|
|
||||||
.and_then(|ele| ele.attr("title"))
|
|
||||||
.map(String::from),
|
|
||||||
fansub_info
|
|
||||||
.select(fansub_id_selector)
|
|
||||||
.next()
|
|
||||||
.and_then(|ele| ele.attr("data-subtitlegroupid"))
|
|
||||||
.map(String::from),
|
|
||||||
) {
|
|
||||||
Some((fansub_name, mikan_fansub_id))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
} {
|
|
||||||
tracing::trace!(fansub_name, mikan_fansub_id, "subscribed fansub extracted");
|
|
||||||
let item = MikanBangumiMeta {
|
|
||||||
homepage: build_mikan_bangumi_homepage(
|
|
||||||
mikan_base_url.clone(),
|
|
||||||
mikan_bangumi_id,
|
|
||||||
Some(&mikan_fansub_id),
|
|
||||||
),
|
|
||||||
bangumi_title: bangumi_title.to_string(),
|
|
||||||
mikan_bangumi_id: mikan_bangumi_id.to_string(),
|
|
||||||
mikan_fansub_id: Some(mikan_fansub_id),
|
|
||||||
fansub: Some(fansub_name),
|
|
||||||
origin_poster_src: origin_poster_src.clone(),
|
|
||||||
};
|
|
||||||
yield item;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
#![allow(unused_variables)]
|
|
||||||
use futures::{TryStreamExt, pin_mut};
|
|
||||||
use http::header;
|
|
||||||
use rstest::{fixture, rstest};
|
|
||||||
use tracing::Level;
|
|
||||||
use url::Url;
|
|
||||||
use zune_image::{codecs::ImageFormat, image::Image};
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
use crate::test_utils::{
|
|
||||||
app::UnitTestAppContext, mikan::build_testing_mikan_client,
|
|
||||||
tracing::try_init_testing_tracing,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
fn before_each() {
|
|
||||||
try_init_testing_tracing(Level::INFO);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_extract_mikan_poster_from_src(before_each: ()) -> RResult<()> {
|
|
||||||
let mut mikan_server = mockito::Server::new_async().await;
|
|
||||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
|
||||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
|
||||||
|
|
||||||
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;
|
|
||||||
|
|
||||||
let bangumi_poster_mock = mikan_server
|
|
||||||
.mock("GET", bangumi_poster_url.path())
|
|
||||||
.with_body_from_file("tests/resources/mikan/Bangumi-202309-5ce9fed1.jpg")
|
|
||||||
.create_async()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let bgm_poster =
|
|
||||||
extract_mikan_poster_meta_from_src(&mikan_client, bangumi_poster_url).await?;
|
|
||||||
bangumi_poster_mock.expect(1);
|
|
||||||
let u8_data = bgm_poster.poster_data.expect("should have poster data");
|
|
||||||
let image = Image::read(u8_data.to_vec(), Default::default());
|
|
||||||
assert!(
|
|
||||||
image.is_ok_and(|img| img
|
|
||||||
.metadata()
|
|
||||||
.get_image_format()
|
|
||||||
.is_some_and(|fmt| matches!(fmt, ImageFormat::JPEG))),
|
|
||||||
"should start with valid jpeg data magic number"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_extract_mikan_episode(before_each: ()) -> RResult<()> {
|
|
||||||
let mut mikan_server = mockito::Server::new_async().await;
|
|
||||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
|
||||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
|
||||||
|
|
||||||
let episode_homepage_url =
|
|
||||||
mikan_base_url.join("/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a")?;
|
|
||||||
|
|
||||||
let episode_homepage_mock = mikan_server
|
|
||||||
.mock("GET", episode_homepage_url.path())
|
|
||||||
.with_body_from_file(
|
|
||||||
"tests/resources/mikan/Episode-475184dce83ea2b82902592a5ac3343f6d54b36a.htm",
|
|
||||||
)
|
|
||||||
.create_async()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let ep_meta = extract_mikan_episode_meta_from_episode_homepage(
|
|
||||||
&mikan_client,
|
|
||||||
episode_homepage_url.clone(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
assert_eq!(ep_meta.homepage, episode_homepage_url);
|
|
||||||
assert_eq!(ep_meta.bangumi_title, "葬送的芙莉莲");
|
|
||||||
assert_eq!(
|
|
||||||
ep_meta
|
|
||||||
.origin_poster_src
|
|
||||||
.as_ref()
|
|
||||||
.map(|s| s.path().to_string()),
|
|
||||||
Some(String::from("/images/Bangumi/202309/5ce9fed1.jpg"))
|
|
||||||
);
|
|
||||||
assert_eq!(ep_meta.fansub, "LoliHouse");
|
|
||||||
assert_eq!(ep_meta.mikan_fansub_id, "370");
|
|
||||||
assert_eq!(ep_meta.mikan_bangumi_id, "3141");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(before_each: ()) -> RResult<()> {
|
|
||||||
let mut mikan_server = mockito::Server::new_async().await;
|
|
||||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
|
||||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
|
||||||
|
|
||||||
let bangumi_homepage_url = mikan_base_url.join("/Home/Bangumi/3416#370")?;
|
|
||||||
|
|
||||||
let bangumi_homepage_mock = mikan_server
|
|
||||||
.mock("GET", bangumi_homepage_url.path())
|
|
||||||
.with_body_from_file("tests/resources/mikan/Bangumi-3416-370.htm")
|
|
||||||
.create_async()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage(
|
|
||||||
&mikan_client,
|
|
||||||
bangumi_homepage_url.clone(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
assert_eq!(bgm_meta.homepage, bangumi_homepage_url);
|
|
||||||
assert_eq!(bgm_meta.bangumi_title, "叹气的亡灵想隐退");
|
|
||||||
assert_eq!(
|
|
||||||
bgm_meta
|
|
||||||
.origin_poster_src
|
|
||||||
.as_ref()
|
|
||||||
.map(|s| s.path().to_string()),
|
|
||||||
Some(String::from("/images/Bangumi/202410/480ef127.jpg"))
|
|
||||||
);
|
|
||||||
assert_eq!(bgm_meta.fansub, Some(String::from("LoliHouse")));
|
|
||||||
assert_eq!(bgm_meta.mikan_fansub_id, Some(String::from("370")));
|
|
||||||
assert_eq!(bgm_meta.mikan_bangumi_id, "3416");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(before_each: ()) -> RResult<()> {
|
|
||||||
let mut mikan_server = mockito::Server::new_async().await;
|
|
||||||
|
|
||||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
|
||||||
|
|
||||||
let my_bangumi_page_url = mikan_base_url.join("/Home/MyBangumi")?;
|
|
||||||
|
|
||||||
let context = Arc::new(
|
|
||||||
UnitTestAppContext::builder()
|
|
||||||
.mikan(build_testing_mikan_client(mikan_base_url.clone()).await?)
|
|
||||||
.build(),
|
|
||||||
);
|
|
||||||
|
|
||||||
{
|
|
||||||
let my_bangumi_without_cookie_mock = mikan_server
|
|
||||||
.mock("GET", my_bangumi_page_url.path())
|
|
||||||
.match_header(header::COOKIE, mockito::Matcher::Missing)
|
|
||||||
.with_body_from_file("tests/resources/mikan/MyBangumi-noauth.htm")
|
|
||||||
.create_async()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
|
|
||||||
context.clone(),
|
|
||||||
my_bangumi_page_url.clone(),
|
|
||||||
None,
|
|
||||||
&[],
|
|
||||||
);
|
|
||||||
|
|
||||||
pin_mut!(bangumi_metas);
|
|
||||||
|
|
||||||
let bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
|
|
||||||
|
|
||||||
assert!(bangumi_metas.is_empty());
|
|
||||||
|
|
||||||
assert!(my_bangumi_without_cookie_mock.matched_async().await);
|
|
||||||
}
|
|
||||||
{
|
|
||||||
let my_bangumi_with_cookie_mock = mikan_server
|
|
||||||
.mock("GET", my_bangumi_page_url.path())
|
|
||||||
.match_header(
|
|
||||||
header::COOKIE,
|
|
||||||
mockito::Matcher::AllOf(vec![
|
|
||||||
mockito::Matcher::Regex(String::from(".*\\.AspNetCore\\.Antiforgery.*")),
|
|
||||||
mockito::Matcher::Regex(String::from(
|
|
||||||
".*\\.AspNetCore\\.Identity\\.Application.*",
|
|
||||||
)),
|
|
||||||
]),
|
|
||||||
)
|
|
||||||
.with_body_from_file("tests/resources/mikan/MyBangumi.htm")
|
|
||||||
.create_async()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let expand_bangumi_mock = mikan_server
|
|
||||||
.mock("GET", "/ExpandBangumi")
|
|
||||||
.match_query(mockito::Matcher::Any)
|
|
||||||
.with_body_from_file("tests/resources/mikan/ExpandBangumi.htm")
|
|
||||||
.create_async()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let auth_secrecy = Some(MikanAuthSecrecy {
|
|
||||||
cookie: String::from(
|
|
||||||
"mikan-announcement=1; .AspNetCore.Antiforgery.abc=abc; \
|
|
||||||
.AspNetCore.Identity.Application=abc; ",
|
|
||||||
),
|
|
||||||
user_agent: Some(String::from(
|
|
||||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like \
|
|
||||||
Gecko) Chrome/133.0.0.0 Safari/537.36 Edg/133.0.0.0",
|
|
||||||
)),
|
|
||||||
});
|
|
||||||
|
|
||||||
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
|
|
||||||
context.clone(),
|
|
||||||
my_bangumi_page_url,
|
|
||||||
auth_secrecy,
|
|
||||||
&[],
|
|
||||||
);
|
|
||||||
pin_mut!(bangumi_metas);
|
|
||||||
let bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
|
|
||||||
|
|
||||||
assert!(!bangumi_metas.is_empty());
|
|
||||||
|
|
||||||
assert!(bangumi_metas[0].origin_poster_src.is_some());
|
|
||||||
|
|
||||||
assert!(my_bangumi_with_cookie_mock.matched_async().await);
|
|
||||||
|
|
||||||
expand_bangumi_mock.expect(bangumi_metas.len());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
|
pub mod bittorrent;
|
||||||
pub mod defs;
|
pub mod defs;
|
||||||
pub mod html;
|
pub mod html;
|
||||||
pub mod http;
|
pub mod http;
|
||||||
pub mod media;
|
pub mod media;
|
||||||
pub mod mikan;
|
pub mod mikan;
|
||||||
pub mod rawname;
|
pub mod origin;
|
||||||
pub mod torrent;
|
|
||||||
|
|||||||
1479
apps/recorder/src/extract/origin/mod.rs
Normal file
1479
apps/recorder/src/extract/origin/mod.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +0,0 @@
|
|||||||
pub mod parser;
|
|
||||||
|
|
||||||
pub use parser::{
|
|
||||||
extract_season_from_title_body, parse_episode_meta_from_raw_name, RawEpisodeMeta,
|
|
||||||
};
|
|
||||||
@@ -1,847 +0,0 @@
|
|||||||
/**
|
|
||||||
* @TODO: rewrite with nom
|
|
||||||
*/
|
|
||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
use itertools::Itertools;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use regex::Regex;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use snafu::whatever;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
errors::RResult,
|
|
||||||
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
|
|
||||||
};
|
|
||||||
|
|
||||||
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref TITLE_RE: Regex = Regex::new(
|
|
||||||
r#"(.*|\[.*])( -? \d+|\[\d+]|\[\d+.?[vV]\d]|第\d+[话話集]|\[第?\d+[话話集]]|\[\d+.?END]|[Ee][Pp]?\d+|\[\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*[话話集]\s*])(.*)"#
|
|
||||||
).unwrap();
|
|
||||||
static ref EP_COLLECTION_RE:Regex = Regex::new(r#"\[?\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*合?[话話集]\s*]?"#).unwrap();
|
|
||||||
static ref MOVIE_TITLE_RE:Regex = Regex::new(r#"(.*|\[.*])(剧场版|[Mm]ovie|电影)(.*?)$"#).unwrap();
|
|
||||||
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
|
|
||||||
static ref SOURCE_L1_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|W[Ee][Bb][Rr][Ii][Pp]|Sentai|B[Dd][Rr][Ii][Pp]|UHD[Rr][Ii][Pp]|NETFLIX").unwrap();
|
|
||||||
static ref SOURCE_L2_RE: Regex = Regex::new(r"AMZ|CR|W[Ee][Bb]|B[Dd]").unwrap();
|
|
||||||
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap();
|
|
||||||
static ref PREFIX_RE: Regex =
|
|
||||||
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
|
|
||||||
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
|
|
||||||
static ref MOVIE_SEASON_EXTRACT_RE: Regex = Regex::new(r"剧场版|Movie|电影").unwrap();
|
|
||||||
static ref MAIN_TITLE_PREFIX_PROCESS_RE1: Regex = Regex::new(r"新番|月?番").unwrap();
|
|
||||||
static ref MAIN_TITLE_PREFIX_PROCESS_RE2: Regex = Regex::new(r"[港澳台]{1,3}地区").unwrap();
|
|
||||||
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE: Regex = Regex::new(r"\[.+\]").unwrap();
|
|
||||||
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1: Regex = Regex::new(r"^.*?\[").unwrap();
|
|
||||||
static ref SEASON_EXTRACT_SEASON_ALL_RE: Regex = Regex::new(r"S\d{1,2}|Season \d{1,2}|[第].[季期]|1st|2nd|3rd|\d{1,2}th").unwrap();
|
|
||||||
static ref SEASON_EXTRACT_SEASON_EN_PREFIX_RE: Regex = Regex::new(r"Season|S").unwrap();
|
|
||||||
static ref SEASON_EXTRACT_SEASON_EN_NTH_RE: Regex = Regex::new(r"1st|2nd|3rd|\d{1,2}th").unwrap();
|
|
||||||
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_RE: Regex = Regex::new(r"[第 ].*[季期(部分)]|部分").unwrap();
|
|
||||||
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE: Regex = Regex::new(r"[第季期 ]").unwrap();
|
|
||||||
static ref NAME_EXTRACT_REMOVE_RE: Regex = Regex::new(r"[((]仅限[港澳台]{1,3}地区[))]").unwrap();
|
|
||||||
static ref NAME_EXTRACT_SPLIT_RE: Regex = Regex::new(r"/|\s{2}|-\s{2}|\]\[").unwrap();
|
|
||||||
static ref NAME_EXTRACT_REPLACE_ADHOC1_RE: Regex = Regex::new(r"([\p{scx=Han}\s\(\)]{5,})_([a-zA-Z]{2,})").unwrap();
|
|
||||||
static ref NAME_JP_TEST: Regex = Regex::new(r"[\p{scx=Hira}\p{scx=Kana}]{2,}").unwrap();
|
|
||||||
static ref NAME_ZH_TEST: Regex = Regex::new(r"[\p{scx=Han}]{2,}").unwrap();
|
|
||||||
static ref NAME_EN_TEST: Regex = Regex::new(r"[a-zA-Z]{3,}").unwrap();
|
|
||||||
static ref TAGS_EXTRACT_SPLIT_RE: Regex = Regex::new(r"[\[\]()()_]").unwrap();
|
|
||||||
static ref CLEAR_SUB_RE: Regex = Regex::new(r"_MP4|_MKV").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
|
|
||||||
pub struct RawEpisodeMeta {
|
|
||||||
pub name_en: Option<String>,
|
|
||||||
pub name_en_no_season: Option<String>,
|
|
||||||
pub name_jp: Option<String>,
|
|
||||||
pub name_jp_no_season: Option<String>,
|
|
||||||
pub name_zh: Option<String>,
|
|
||||||
pub name_zh_no_season: Option<String>,
|
|
||||||
pub season: i32,
|
|
||||||
pub season_raw: Option<String>,
|
|
||||||
pub episode_index: i32,
|
|
||||||
pub subtitle: Option<String>,
|
|
||||||
pub source: Option<String>,
|
|
||||||
pub fansub: Option<String>,
|
|
||||||
pub resolution: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_fansub(raw_name: &str) -> Option<&str> {
|
|
||||||
let mut groups = EN_BRACKET_SPLIT_RE.splitn(raw_name, 3);
|
|
||||||
groups.nth(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn replace_ch_bracket_to_en(raw_name: &str) -> String {
|
|
||||||
raw_name.replace('【', "[").replace('】', "]")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RResult<String> {
|
|
||||||
let raw_without_fansub = if let Some(fansub) = fansub {
|
|
||||||
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
|
|
||||||
fan_sub_re.replace_all(title_body, "")
|
|
||||||
} else {
|
|
||||||
Cow::Borrowed(title_body)
|
|
||||||
};
|
|
||||||
let raw_with_prefix_replaced = PREFIX_RE.replace_all(&raw_without_fansub, "/");
|
|
||||||
let mut arg_group = raw_with_prefix_replaced
|
|
||||||
.split('/')
|
|
||||||
.map(|s| s.trim())
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
if arg_group.len() == 1 {
|
|
||||||
arg_group = arg_group.first_mut().unwrap().split(' ').collect();
|
|
||||||
}
|
|
||||||
let mut raw = raw_without_fansub.to_string();
|
|
||||||
for arg in arg_group.iter() {
|
|
||||||
if (arg_group.len() <= 5 && MAIN_TITLE_PREFIX_PROCESS_RE1.is_match(arg))
|
|
||||||
|| (MAIN_TITLE_PREFIX_PROCESS_RE2.is_match(arg))
|
|
||||||
{
|
|
||||||
let sub = Regex::new(&format!(".{arg}."))?;
|
|
||||||
raw = sub.replace_all(&raw, "").to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw) {
|
|
||||||
if m.len() as f32 > (raw.len() as f32) * 0.5 {
|
|
||||||
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1
|
|
||||||
.replace(&raw, "")
|
|
||||||
.chars()
|
|
||||||
.collect_vec();
|
|
||||||
while let Some(ch) = raw1.pop() {
|
|
||||||
if ch == ']' {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
raw = raw1.into_iter().collect();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(raw.to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, i32) {
|
|
||||||
let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " ");
|
|
||||||
let seasons = SEASON_EXTRACT_SEASON_ALL_RE
|
|
||||||
.find(&name_and_season)
|
|
||||||
.into_iter()
|
|
||||||
.map(|s| s.as_str())
|
|
||||||
.collect_vec();
|
|
||||||
|
|
||||||
if seasons.is_empty() {
|
|
||||||
return (title_body.to_string(), None, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut season = 1;
|
|
||||||
let mut season_raw = None;
|
|
||||||
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
|
|
||||||
|
|
||||||
for s in seasons {
|
|
||||||
season_raw = Some(s);
|
|
||||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s) {
|
|
||||||
if let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
|
|
||||||
.replace_all(m.as_str(), "")
|
|
||||||
.parse::<i32>()
|
|
||||||
{
|
|
||||||
season = s;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s) {
|
|
||||||
if let Some(s) = DIGIT_1PLUS_REG
|
|
||||||
.find(m.as_str())
|
|
||||||
.and_then(|s| s.as_str().parse::<i32>().ok())
|
|
||||||
{
|
|
||||||
season = s;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
|
|
||||||
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
|
|
||||||
.replace(m.as_str(), "")
|
|
||||||
.parse::<i32>()
|
|
||||||
{
|
|
||||||
season = s;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if let Some(m) = ZH_NUM_RE.find(m.as_str()) {
|
|
||||||
season = ZH_NUM_MAP[m.as_str()];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(name.to_string(), season_raw.map(|s| s.to_string()), season)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_name_from_title_body_name_section(
|
|
||||||
title_body_name_section: &str,
|
|
||||||
) -> (Option<String>, Option<String>, Option<String>) {
|
|
||||||
let mut name_en = None;
|
|
||||||
let mut name_zh = None;
|
|
||||||
let mut name_jp = None;
|
|
||||||
let replaced1 = NAME_EXTRACT_REMOVE_RE.replace_all(title_body_name_section, "");
|
|
||||||
let replaced2 = NAME_EXTRACT_REPLACE_ADHOC1_RE
|
|
||||||
.replace_all(&replaced1, NAME_EXTRACT_REPLACE_ADHOC1_REPLACED);
|
|
||||||
let trimmed = replaced2.trim();
|
|
||||||
let mut split = NAME_EXTRACT_SPLIT_RE
|
|
||||||
.split(trimmed)
|
|
||||||
.map(|s| s.trim())
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect_vec();
|
|
||||||
if split.len() == 1 {
|
|
||||||
let mut split_space = split[0].split(' ').collect_vec();
|
|
||||||
let mut search_indices = vec![0];
|
|
||||||
if split_space.len() > 1 {
|
|
||||||
search_indices.push(split_space.len() - 1);
|
|
||||||
}
|
|
||||||
for i in search_indices {
|
|
||||||
if NAME_ZH_TEST.is_match(split_space[i]) {
|
|
||||||
let chs = split_space[i];
|
|
||||||
split_space.remove(i);
|
|
||||||
split = vec![chs.to_string(), split_space.join(" ")];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for item in split {
|
|
||||||
if NAME_JP_TEST.is_match(&item) && name_jp.is_none() {
|
|
||||||
name_jp = Some(item);
|
|
||||||
} else if NAME_ZH_TEST.is_match(&item) && name_zh.is_none() {
|
|
||||||
name_zh = Some(item);
|
|
||||||
} else if NAME_EN_TEST.is_match(&item) && name_en.is_none() {
|
|
||||||
name_en = Some(item);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(name_en, name_zh, name_jp)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32> {
|
|
||||||
DIGIT_1PLUS_REG
|
|
||||||
.find(title_episode)?
|
|
||||||
.as_str()
|
|
||||||
.parse::<i32>()
|
|
||||||
.ok()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn clear_sub(sub: Option<String>) -> Option<String> {
|
|
||||||
sub.map(|s| CLEAR_SUB_RE.replace_all(&s, "").to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_tags_from_title_extra(
|
|
||||||
title_extra: &str,
|
|
||||||
) -> (Option<String>, Option<String>, Option<String>) {
|
|
||||||
let replaced = TAGS_EXTRACT_SPLIT_RE.replace_all(title_extra, " ");
|
|
||||||
let elements = replaced
|
|
||||||
.split(' ')
|
|
||||||
.map(|s| s.trim())
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.collect_vec();
|
|
||||||
|
|
||||||
let mut sub = None;
|
|
||||||
let mut resolution = None;
|
|
||||||
let mut source = None;
|
|
||||||
for element in elements.iter() {
|
|
||||||
if SUB_RE.is_match(element) {
|
|
||||||
sub = Some(element.to_string())
|
|
||||||
} else if RESOLUTION_RE.is_match(element) {
|
|
||||||
resolution = Some(element.to_string())
|
|
||||||
} else if SOURCE_L1_RE.is_match(element) {
|
|
||||||
source = Some(element.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if source.is_none() {
|
|
||||||
for element in elements {
|
|
||||||
if SOURCE_L2_RE.is_match(element) {
|
|
||||||
source = Some(element.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(clear_sub(sub), resolution, source)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn check_is_movie(title: &str) -> bool {
|
|
||||||
MOVIE_TITLE_RE.is_match(title)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_episode_meta_from_raw_name(s: &str) -> RResult<RawEpisodeMeta> {
|
|
||||||
let raw_title = s.trim();
|
|
||||||
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
|
||||||
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
|
||||||
let movie_capture = check_is_movie(&raw_title_without_ch_brackets);
|
|
||||||
if let Some(title_re_match_obj) = MOVIE_TITLE_RE
|
|
||||||
.captures(&raw_title_without_ch_brackets)
|
|
||||||
.or(TITLE_RE.captures(&raw_title_without_ch_brackets))
|
|
||||||
{
|
|
||||||
let mut title_body = title_re_match_obj
|
|
||||||
.get(1)
|
|
||||||
.map(|s| s.as_str().trim())
|
|
||||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"))
|
|
||||||
.to_string();
|
|
||||||
let mut title_episode = title_re_match_obj
|
|
||||||
.get(2)
|
|
||||||
.map(|s| s.as_str().trim())
|
|
||||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
|
|
||||||
let title_extra = title_re_match_obj
|
|
||||||
.get(3)
|
|
||||||
.map(|s| s.as_str().trim())
|
|
||||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
|
|
||||||
|
|
||||||
if movie_capture {
|
|
||||||
title_body += title_episode;
|
|
||||||
title_episode = "";
|
|
||||||
} else if EP_COLLECTION_RE.is_match(title_episode) {
|
|
||||||
title_episode = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
let title_body = title_body_pre_process(&title_body, fansub)?;
|
|
||||||
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
|
|
||||||
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
|
|
||||||
let (name_en_no_season, name_zh_no_season, name_jp_no_season) =
|
|
||||||
extract_name_from_title_body_name_section(&name_without_season);
|
|
||||||
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(1);
|
|
||||||
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
|
|
||||||
Ok(RawEpisodeMeta {
|
|
||||||
name_en,
|
|
||||||
name_en_no_season,
|
|
||||||
name_jp,
|
|
||||||
name_jp_no_season,
|
|
||||||
name_zh,
|
|
||||||
name_zh_no_season,
|
|
||||||
season,
|
|
||||||
season_raw,
|
|
||||||
episode_index,
|
|
||||||
subtitle: sub,
|
|
||||||
source,
|
|
||||||
fansub: fansub.map(|s| s.to_string()),
|
|
||||||
resolution,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
whatever!("Can not parse episode meta from raw filename {}", raw_title)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
|
|
||||||
use super::{RawEpisodeMeta, parse_episode_meta_from_raw_name};
|
|
||||||
|
|
||||||
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
|
|
||||||
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap_or_default();
|
|
||||||
let found = parse_episode_meta_from_raw_name(raw_name).ok();
|
|
||||||
|
|
||||||
if expected != found {
|
|
||||||
println!(
|
|
||||||
"expected {} and found {} are not equal",
|
|
||||||
serde_json::to_string_pretty(&expected).unwrap(),
|
|
||||||
serde_json::to_string_pretty(&found).unwrap()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
assert_eq!(expected, found);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_all_parts_wrapped() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
|
|
||||||
r#"{
|
|
||||||
"name_zh": "我心里危险的东西",
|
|
||||||
"name_zh_no_season": "我心里危险的东西",
|
|
||||||
"season": 2,
|
|
||||||
"season_raw": "第二季",
|
|
||||||
"episode_index": 5,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"source": null,
|
|
||||||
"fansub": "新Sub",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_title_wrapped_by_one_square_bracket_and_season_prefix() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Boku no Kokoro no Yabai Yatsu",
|
|
||||||
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu",
|
|
||||||
"name_zh": "我内心的糟糕念头",
|
|
||||||
"name_zh_no_season": "我内心的糟糕念头",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 18,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"source": null,
|
|
||||||
"fansub": "喵萌奶茶屋",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_ep_and_version() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Shin no Nakama 2nd",
|
|
||||||
"name_en_no_season": "Shin no Nakama",
|
|
||||||
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
|
|
||||||
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
|
|
||||||
"season": 2,
|
|
||||||
"season_raw": "2nd",
|
|
||||||
"episode_index": 8,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_en_title_only() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
|
|
||||||
r#"{
|
|
||||||
"name_en": "THE MARGINAL SERVICE",
|
|
||||||
"name_en_no_season": "THE MARGINAL SERVICE",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 8,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "动漫国字幕组&LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_two_zh_title() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Nozomanu Fushi no Boukensha",
|
|
||||||
"name_en_no_season": "Nozomanu Fushi no Boukensha",
|
|
||||||
"name_zh": "事与愿违的不死冒险者",
|
|
||||||
"name_zh_no_season": "事与愿违的不死冒险者",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_en_zh_jp_titles() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Pon no Michi",
|
|
||||||
"name_jp": "ぽんのみち",
|
|
||||||
"name_zh": "碰之道",
|
|
||||||
"name_en_no_season": "Pon no Michi",
|
|
||||||
"name_jp_no_season": "ぽんのみち",
|
|
||||||
"name_zh_no_season": "碰之道",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 7,
|
|
||||||
"subtitle": "简繁日内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "喵萌奶茶屋&LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_nth_season() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Yowai Character Tomozakikun",
|
|
||||||
"name_en_no_season": "Yowai Character Tomozakikun",
|
|
||||||
"name_zh": "弱角友崎同学 2nd STAGE",
|
|
||||||
"name_zh_no_season": "弱角友崎同学",
|
|
||||||
"season": 2,
|
|
||||||
"season_raw": "2nd",
|
|
||||||
"episode_index": 9,
|
|
||||||
"subtitle": "CHT",
|
|
||||||
"source": "Baha",
|
|
||||||
"fansub": "ANi",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_season_en_and_season_zh() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Kingdom S5",
|
|
||||||
"name_en_no_season": "Kingdom",
|
|
||||||
"name_zh": "王者天下 第五季",
|
|
||||||
"name_zh_no_season": "王者天下",
|
|
||||||
"season": 5,
|
|
||||||
"season_raw": "第五季",
|
|
||||||
"episode_index": 7,
|
|
||||||
"subtitle": "简繁外挂字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "豌豆字幕组&LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_airota_fansub_style_case1() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Alice to Therese no Maboroshi Koujou",
|
|
||||||
"name_en_no_season": "Alice to Therese no Maboroshi Koujou",
|
|
||||||
"name_zh": "爱丽丝与特蕾丝的虚幻工厂",
|
|
||||||
"name_zh_no_season": "爱丽丝与特蕾丝的虚幻工厂",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁内封",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "千夏字幕组",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_airota_fansub_style_case2() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Yuru Camp Movie",
|
|
||||||
"name_en_no_season": "Yuru Camp Movie",
|
|
||||||
"name_zh": "电影 轻旅轻营 (摇曳露营)",
|
|
||||||
"name_zh_no_season": "电影 轻旅轻营 (摇曳露营)",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "繁体",
|
|
||||||
"source": "UHDRip",
|
|
||||||
"fansub": "千夏字幕组&喵萌奶茶屋",
|
|
||||||
"resolution": "2160p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_large_episode_style() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "New Doraemon",
|
|
||||||
"name_en_no_season": "New Doraemon",
|
|
||||||
"name_zh": "哆啦A梦新番",
|
|
||||||
"name_zh_no_season": "哆啦A梦新番",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 747,
|
|
||||||
"subtitle": "GB",
|
|
||||||
"fansub": "梦蓝字幕组",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_many_square_brackets_split_title() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Yuru Camp",
|
|
||||||
"name_en_no_season": "Yuru Camp",
|
|
||||||
"name_zh": "剧场版-摇曳露营",
|
|
||||||
"name_zh_no_season": "剧场版-摇曳露营",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"fansub": "MCE汉化组",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_implicit_lang_title_sep() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "NieR Automata Ver1.1a",
|
|
||||||
"name_en_no_season": "NieR Automata Ver1.1a",
|
|
||||||
"name_zh": "尼尔:机械纪元",
|
|
||||||
"name_zh_no_season": "尼尔:机械纪元",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 2,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"fansub": "织梦字幕组",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_square_brackets_wrapped_and_space_split() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[天月搬运组][迷宫饭 Delicious in Dungeon][03][日语中字][MKV][1080P][NETFLIX][高画质版]"#,
|
|
||||||
r#"
|
|
||||||
{
|
|
||||||
"name_en": "Delicious in Dungeon",
|
|
||||||
"name_en_no_season": "Delicious in Dungeon",
|
|
||||||
"name_zh": "迷宫饭",
|
|
||||||
"name_zh_no_season": "迷宫饭",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 3,
|
|
||||||
"subtitle": "日语中字",
|
|
||||||
"source": "NETFLIX",
|
|
||||||
"fansub": "天月搬运组",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_start_with_brackets_wrapped_season_info_prefix() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Dungeon Meshi",
|
|
||||||
"name_en_no_season": "Dungeon Meshi",
|
|
||||||
"name_zh": "迷宫饭",
|
|
||||||
"name_zh_no_season": "迷宫饭",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"fansub": "爱恋字幕社",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_small_no_title_extra_brackets_case() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Mahou Shoujo ni Akogarete",
|
|
||||||
"name_en_no_season": "Mahou Shoujo ni Akogarete",
|
|
||||||
"name_zh": "梦想成为魔法少女 [年龄限制版]",
|
|
||||||
"name_zh_no_season": "梦想成为魔法少女 [年龄限制版]",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 9,
|
|
||||||
"subtitle": "CHT",
|
|
||||||
"source": "Baha",
|
|
||||||
"fansub": "ANi",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_title_leading_space_style() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[ANi] 16bit 的感动 ANOTHER LAYER - 01 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
|
||||||
r#"{
|
|
||||||
"name_zh": "16bit 的感动 ANOTHER LAYER",
|
|
||||||
"name_zh_no_season": "16bit 的感动 ANOTHER LAYER",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "CHT",
|
|
||||||
"source": "Baha",
|
|
||||||
"fansub": "ANi",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_title_leading_month_and_wrapped_brackets_style() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【喵萌奶茶屋】★07月新番★[银砂糖师与黑妖精 ~ Sugar Apple Fairy Tale ~][13][1080p][简日双语][招募翻译]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "~ Sugar Apple Fairy Tale ~",
|
|
||||||
"name_en_no_season": "~ Sugar Apple Fairy Tale ~",
|
|
||||||
"name_zh": "银砂糖师与黑妖精",
|
|
||||||
"name_zh_no_season": "银砂糖师与黑妖精",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 13,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"fansub": "喵萌奶茶屋",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_title_leading_month_style() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【极影字幕社】★4月新番 天国大魔境 Tengoku Daimakyou 第05话 GB 720P MP4(字幕社招人内详)"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Tengoku Daimakyou",
|
|
||||||
"name_en_no_season": "Tengoku Daimakyou",
|
|
||||||
"name_zh": "天国大魔境",
|
|
||||||
"name_zh_no_season": "天国大魔境",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 5,
|
|
||||||
"subtitle": "字幕社招人内详",
|
|
||||||
"source": null,
|
|
||||||
"fansub": "极影字幕社",
|
|
||||||
"resolution": "720P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_tokusatsu_style() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[MagicStar] 假面骑士Geats / 仮面ライダーギーツ EP33 [WEBDL] [1080p] [TTFC]【生】"#,
|
|
||||||
r#"{
|
|
||||||
"name_jp": "仮面ライダーギーツ",
|
|
||||||
"name_jp_no_season": "仮面ライダーギーツ",
|
|
||||||
"name_zh": "假面骑士Geats",
|
|
||||||
"name_zh_no_season": "假面骑士Geats",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 33,
|
|
||||||
"source": "WEBDL",
|
|
||||||
"fansub": "MagicStar",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_multi_lang_zh_title() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[百冬练习组&LoliHouse] BanG Dream! 少女乐团派对!☆PICO FEVER! / Garupa Pico: Fever! - 26 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕][END] [101.69 MB]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Garupa Pico: Fever!",
|
|
||||||
"name_en_no_season": "Garupa Pico: Fever!",
|
|
||||||
"name_zh": "BanG Dream! 少女乐团派对!☆PICO FEVER!",
|
|
||||||
"name_zh_no_season": "BanG Dream! 少女乐团派对!☆PICO FEVER!",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 26,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "百冬练习组&LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ep_collections() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[奶²&LoliHouse] 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简日内封字幕]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Kinokoinu: Mushroom Pup",
|
|
||||||
"name_en_no_season": "Kinokoinu: Mushroom Pup",
|
|
||||||
"name_zh": "蘑菇狗",
|
|
||||||
"name_zh_no_season": "蘑菇狗",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简日内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "奶²&LoliHouse",
|
|
||||||
"resolution": "1080p",
|
|
||||||
"name": " 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集]"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[LoliHouse] 叹气的亡灵想隐退 / Nageki no Bourei wa Intai shitai [01-13 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Nageki no Bourei wa Intai shitai",
|
|
||||||
"name_en_no_season": "Nageki no Bourei wa Intai shitai",
|
|
||||||
"name_jp": null,
|
|
||||||
"name_jp_no_season": null,
|
|
||||||
"name_zh": "叹气的亡灵想隐退",
|
|
||||||
"name_zh_no_season": "叹气的亡灵想隐退",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[LoliHouse] 精灵幻想记 第二季 / Seirei Gensouki S2 [01-12 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Seirei Gensouki S2",
|
|
||||||
"name_en_no_season": "Seirei Gensouki",
|
|
||||||
"name_zh": "精灵幻想记 第二季",
|
|
||||||
"name_zh_no_season": "精灵幻想记",
|
|
||||||
"season": 2,
|
|
||||||
"season_raw": "第二季",
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[喵萌奶茶屋&LoliHouse] 超自然武装当哒当 / 胆大党 / Dandadan [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简繁日内封字幕][Fin]"#,
|
|
||||||
r#" {
|
|
||||||
"name_en": "Dandadan",
|
|
||||||
"name_en_no_season": "Dandadan",
|
|
||||||
"name_zh": "超自然武装当哒当",
|
|
||||||
"name_zh_no_season": "超自然武装当哒当",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁日内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "喵萌奶茶屋&LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: FIXME
|
|
||||||
#[test]
|
|
||||||
fn test_bad_cases() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
|
|
||||||
r#"{
|
|
||||||
"name_zh": "摇曳露营△剧场版",
|
|
||||||
"name_zh_no_season": "摇曳露营△剧场版",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁字幕",
|
|
||||||
"source": "BDrip",
|
|
||||||
"fansub": "7³ACG x 桜都字幕组",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【幻樱字幕组】【4月新番】【古见同学有交流障碍症 第二季 Komi-san wa, Komyushou Desu. S02】【22】【GB_MP4】【1920X1080】"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "第二季 Komi-san wa, Komyushou Desu. S02",
|
|
||||||
"name_en_no_season": "Komi-san wa, Komyushou Desu.",
|
|
||||||
"name_zh": "古见同学有交流障碍症",
|
|
||||||
"name_zh_no_season": "古见同学有交流障碍症",
|
|
||||||
"season": 2,
|
|
||||||
"season_raw": "第二季",
|
|
||||||
"episode_index": 22,
|
|
||||||
"subtitle": "GB",
|
|
||||||
"fansub": "幻樱字幕组",
|
|
||||||
"resolution": "1920X1080"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
mod parser;
|
|
||||||
|
|
||||||
pub use parser::*;
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
pub mod core;
|
|
||||||
pub mod secrecy;
|
|
||||||
|
|
||||||
pub use core::{
|
|
||||||
HttpClient, HttpClientCacheBackendConfig, HttpClientCachePresetConfig, HttpClientConfig,
|
|
||||||
HttpClientError, HttpClientTrait,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub use secrecy::{HttpClientCookiesAuth, HttpClientSecrecyDataTrait};
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use cookie::Cookie;
|
|
||||||
use reqwest::{ClientBuilder, cookie::Jar};
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::errors::RError;
|
|
||||||
|
|
||||||
pub trait HttpClientSecrecyDataTrait {
|
|
||||||
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
|
|
||||||
client_builder
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct HttpClientCookiesAuth {
|
|
||||||
pub cookie_jar: Arc<Jar>,
|
|
||||||
pub user_agent: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HttpClientCookiesAuth {
|
|
||||||
pub fn from_cookies(
|
|
||||||
cookies: &str,
|
|
||||||
url: &Url,
|
|
||||||
user_agent: Option<String>,
|
|
||||||
) -> Result<Self, RError> {
|
|
||||||
let cookie_jar = Arc::new(Jar::default());
|
|
||||||
for cookie in Cookie::split_parse(cookies).try_collect::<Vec<_>>()? {
|
|
||||||
cookie_jar.add_cookie_str(&cookie.to_string(), url);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
cookie_jar,
|
|
||||||
user_agent,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HttpClientSecrecyDataTrait for HttpClientCookiesAuth {
|
|
||||||
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
|
|
||||||
let mut client_builder = client_builder.cookie_provider(self.cookie_jar.clone());
|
|
||||||
if let Some(ref user_agent) = self.user_agent {
|
|
||||||
client_builder = client_builder.user_agent(user_agent);
|
|
||||||
}
|
|
||||||
client_builder
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
pub mod bytes;
|
|
||||||
pub mod client;
|
|
||||||
pub mod core;
|
|
||||||
pub mod html;
|
|
||||||
pub mod image;
|
|
||||||
pub mod oidc;
|
|
||||||
|
|
||||||
pub use core::get_random_mobile_ua;
|
|
||||||
|
|
||||||
pub use bytes::fetch_bytes;
|
|
||||||
pub use client::{
|
|
||||||
HttpClient, HttpClientConfig, HttpClientCookiesAuth, HttpClientError,
|
|
||||||
HttpClientSecrecyDataTrait, HttpClientTrait,
|
|
||||||
};
|
|
||||||
pub use html::fetch_html;
|
|
||||||
pub use image::fetch_image;
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
use std::{future::Future, pin::Pin};
|
|
||||||
|
|
||||||
use axum::http;
|
|
||||||
|
|
||||||
use super::{HttpClient, client::HttpClientError};
|
|
||||||
|
|
||||||
impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
|
|
||||||
type Error = HttpClientError;
|
|
||||||
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<HttpResponse, Self::Error>> + 'c>>;
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
type Future =
|
|
||||||
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + Send + 'c>>;
|
|
||||||
|
|
||||||
fn call(&'c self, request: openidconnect::HttpRequest) -> Self::Future {
|
|
||||||
Box::pin(async move {
|
|
||||||
let response = self.execute(request.try_into()?).await?;
|
|
||||||
|
|
||||||
let mut builder = http::Response::builder().status(response.status());
|
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
{
|
|
||||||
builder = builder.version(response.version());
|
|
||||||
}
|
|
||||||
|
|
||||||
for (name, value) in response.headers().iter() {
|
|
||||||
builder = builder.header(name, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
builder
|
|
||||||
.body(response.bytes().await?.to_vec())
|
|
||||||
.map_err(HttpClientError::from)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
14
apps/recorder/src/graphql/domains/bangumi.rs
Normal file
14
apps/recorder/src/graphql/domains/bangumi.rs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::bangumi};
|
||||||
|
|
||||||
|
pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<bangumi::BangumiType>();
|
||||||
|
seaography::register_entity!(builder, bangumi);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
168
apps/recorder/src/graphql/domains/credential_3rd.rs
Normal file
168
apps/recorder/src/graphql/domains/credential_3rd.rs
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{
|
||||||
|
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef,
|
||||||
|
};
|
||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use util_derive::DynamicGraphql;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
auth::AuthUserInfo,
|
||||||
|
errors::RecorderError,
|
||||||
|
graphql::{
|
||||||
|
domains::subscribers::restrict_subscriber_for_entity,
|
||||||
|
infra::crypto::{
|
||||||
|
register_crypto_column_input_conversion_to_schema_context,
|
||||||
|
register_crypto_column_output_conversion_to_schema_context,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
models::credential_3rd,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
|
||||||
|
struct Credential3rdCheckAvailableInput {
|
||||||
|
pub id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Credential3rdCheckAvailableInput {
|
||||||
|
fn input_type_name() -> &'static str {
|
||||||
|
"Credential3rdCheckAvailableInput"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn arg_name() -> &'static str {
|
||||||
|
"filter"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_input_object() -> InputObject {
|
||||||
|
InputObject::new(Self::input_type_name())
|
||||||
|
.description("The input of the credential3rdCheckAvailable query")
|
||||||
|
.field(InputValue::new(
|
||||||
|
Credential3rdCheckAvailableInputFieldEnum::Id.as_str(),
|
||||||
|
TypeRef::named_nn(TypeRef::INT),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
|
||||||
|
pub struct Credential3rdCheckAvailableInfo {
|
||||||
|
pub available: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Credential3rdCheckAvailableInfo {
|
||||||
|
fn object_type_name() -> &'static str {
|
||||||
|
"Credential3rdCheckAvailableInfo"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_output_object() -> Object {
|
||||||
|
Object::new(Self::object_type_name())
|
||||||
|
.description("The output of the credential3rdCheckAvailable query")
|
||||||
|
.field(Field::new(
|
||||||
|
Credential3rdCheckAvailableInfoFieldEnum::Available,
|
||||||
|
TypeRef::named_nn(TypeRef::BOOLEAN),
|
||||||
|
move |ctx| {
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let subscription_info = ctx.parent_value.try_downcast_ref::<Self>()?;
|
||||||
|
Ok(Some(async_graphql::Value::from(
|
||||||
|
subscription_info.available,
|
||||||
|
)))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_credential3rd_to_schema_context(
|
||||||
|
context: &mut BuilderContext,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
) {
|
||||||
|
restrict_subscriber_for_entity::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
&credential_3rd::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Cookies,
|
||||||
|
);
|
||||||
|
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Username,
|
||||||
|
);
|
||||||
|
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Password,
|
||||||
|
);
|
||||||
|
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Cookies,
|
||||||
|
);
|
||||||
|
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Username,
|
||||||
|
);
|
||||||
|
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx,
|
||||||
|
&credential_3rd::Column::Password,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_credential3rd_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<credential_3rd::Credential3rdType>();
|
||||||
|
seaography::register_entity!(builder, credential_3rd);
|
||||||
|
|
||||||
|
builder.schema = builder
|
||||||
|
.schema
|
||||||
|
.register(Credential3rdCheckAvailableInput::generate_input_object());
|
||||||
|
builder.schema = builder
|
||||||
|
.schema
|
||||||
|
.register(Credential3rdCheckAvailableInfo::generate_output_object());
|
||||||
|
|
||||||
|
builder.queries.push(
|
||||||
|
Field::new(
|
||||||
|
"credential3rdCheckAvailable",
|
||||||
|
TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()),
|
||||||
|
move |ctx| {
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let auth_user_info = ctx.data::<AuthUserInfo>()?;
|
||||||
|
let input: Credential3rdCheckAvailableInput = ctx
|
||||||
|
.args
|
||||||
|
.get(Credential3rdCheckAvailableInput::arg_name())
|
||||||
|
.unwrap()
|
||||||
|
.deserialize()?;
|
||||||
|
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
||||||
|
|
||||||
|
let credential_model = credential_3rd::Model::find_by_id_and_subscriber_id(
|
||||||
|
app_ctx.as_ref(),
|
||||||
|
input.id,
|
||||||
|
auth_user_info.subscriber_auth.subscriber_id,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::Credential3rdError {
|
||||||
|
message: format!("credential = {} not found", input.id),
|
||||||
|
source: None.into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let available = credential_model.check_available(app_ctx.as_ref()).await?;
|
||||||
|
Ok(Some(FieldValue::owned_any(
|
||||||
|
Credential3rdCheckAvailableInfo { available },
|
||||||
|
)))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.argument(InputValue::new(
|
||||||
|
Credential3rdCheckAvailableInput::arg_name(),
|
||||||
|
TypeRef::named_nn(Credential3rdCheckAvailableInput::input_type_name()),
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
17
apps/recorder/src/graphql/domains/downloaders.rs
Normal file
17
apps/recorder/src/graphql/domains/downloaders.rs
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloaders};
|
||||||
|
|
||||||
|
pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<downloaders::Entity>(
|
||||||
|
context,
|
||||||
|
&downloaders::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<downloaders::DownloaderCategory>();
|
||||||
|
seaography::register_entity!(builder, downloaders);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
15
apps/recorder/src/graphql/domains/downloads.rs
Normal file
15
apps/recorder/src/graphql/domains/downloads.rs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloads};
|
||||||
|
|
||||||
|
pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<downloads::DownloadStatus>();
|
||||||
|
builder.register_enumeration::<downloads::DownloadMime>();
|
||||||
|
seaography::register_entity!(builder, downloads);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
14
apps/recorder/src/graphql/domains/episodes.rs
Normal file
14
apps/recorder/src/graphql/domains/episodes.rs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::episodes};
|
||||||
|
|
||||||
|
pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<episodes::EpisodeType>();
|
||||||
|
seaography::register_entity!(builder, episodes);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
56
apps/recorder/src/graphql/domains/feeds.rs
Normal file
56
apps/recorder/src/graphql/domains/feeds.rs
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::ResolverContext;
|
||||||
|
use sea_orm::Value as SeaValue;
|
||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
graphql::{
|
||||||
|
domains::subscribers::restrict_subscriber_for_entity,
|
||||||
|
infra::util::{get_entity_column_key, get_entity_key},
|
||||||
|
},
|
||||||
|
models::feeds,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId);
|
||||||
|
{
|
||||||
|
let entity_column_key =
|
||||||
|
get_entity_column_key::<feeds::Entity>(context, &feeds::Column::Token);
|
||||||
|
let entity_key = get_entity_key::<feeds::Entity>(context);
|
||||||
|
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
||||||
|
let entity_create_one_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name, context.entity_create_one_mutation.mutation_suffix
|
||||||
|
));
|
||||||
|
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name,
|
||||||
|
context.entity_create_batch_mutation.mutation_suffix.clone()
|
||||||
|
));
|
||||||
|
|
||||||
|
context.types.input_none_conversions.insert(
|
||||||
|
entity_column_key,
|
||||||
|
Box::new(
|
||||||
|
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
|
||||||
|
let field_name = context.field().name();
|
||||||
|
if field_name == entity_create_one_mutation_field_name.as_str()
|
||||||
|
|| field_name == entity_create_batch_mutation_field_name.as_str()
|
||||||
|
{
|
||||||
|
Ok(Some(SeaValue::String(Some(Box::new(nanoid::nanoid!())))))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<feeds::FeedType>();
|
||||||
|
builder.register_enumeration::<feeds::FeedSource>();
|
||||||
|
seaography::register_entity!(builder, feeds);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
12
apps/recorder/src/graphql/domains/mod.rs
Normal file
12
apps/recorder/src/graphql/domains/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
pub mod credential_3rd;
|
||||||
|
|
||||||
|
pub mod bangumi;
|
||||||
|
pub mod downloaders;
|
||||||
|
pub mod downloads;
|
||||||
|
pub mod episodes;
|
||||||
|
pub mod feeds;
|
||||||
|
pub mod subscriber_tasks;
|
||||||
|
pub mod subscribers;
|
||||||
|
pub mod subscription_bangumi;
|
||||||
|
pub mod subscription_episode;
|
||||||
|
pub mod subscriptions;
|
||||||
151
apps/recorder/src/graphql/domains/subscriber_tasks.rs
Normal file
151
apps/recorder/src/graphql/domains/subscriber_tasks.rs
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
use std::{ops::Deref, sync::Arc};
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{FieldValue, TypeRef};
|
||||||
|
use sea_orm::{
|
||||||
|
ColumnTrait, ConnectionTrait, EntityTrait, QueryFilter, QuerySelect, QueryTrait, prelude::Expr,
|
||||||
|
sea_query::Query,
|
||||||
|
};
|
||||||
|
use seaography::{
|
||||||
|
Builder as SeaographyBuilder, BuilderContext, EntityDeleteMutationBuilder, EntityObjectBuilder,
|
||||||
|
EntityQueryFieldBuilder, get_filter_conditions,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
errors::RecorderError,
|
||||||
|
graphql::{
|
||||||
|
domains::subscribers::restrict_subscriber_for_entity,
|
||||||
|
infra::{
|
||||||
|
custom::generate_entity_filter_mutation_field,
|
||||||
|
json::{convert_jsonb_output_case_for_entity, restrict_jsonb_filter_input_for_entity},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
models::subscriber_tasks,
|
||||||
|
task::{ApalisJobs, ApalisSchema},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_subscriber_tasks_entity_mutations(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
let context = builder.context;
|
||||||
|
{
|
||||||
|
let entitity_delete_mutation_builder = EntityDeleteMutationBuilder { context };
|
||||||
|
let delete_mutation = generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>(
|
||||||
|
context,
|
||||||
|
entitity_delete_mutation_builder.type_name::<subscriber_tasks::Entity>(),
|
||||||
|
TypeRef::named_nn(TypeRef::INT),
|
||||||
|
Arc::new(|resolver_ctx, app_ctx, filters| {
|
||||||
|
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>(
|
||||||
|
resolver_ctx,
|
||||||
|
context,
|
||||||
|
filters,
|
||||||
|
);
|
||||||
|
Box::pin(async move {
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
let select_subquery = subscriber_tasks::Entity::find()
|
||||||
|
.select_only()
|
||||||
|
.column(subscriber_tasks::Column::Id)
|
||||||
|
.filter(filters_condition);
|
||||||
|
|
||||||
|
let delete_query = Query::delete()
|
||||||
|
.from_table((ApalisSchema::Schema, ApalisJobs::Table))
|
||||||
|
.and_where(
|
||||||
|
Expr::col(ApalisJobs::Id).in_subquery(select_subquery.into_query()),
|
||||||
|
)
|
||||||
|
.to_owned();
|
||||||
|
|
||||||
|
let db_backend = db.deref().get_database_backend();
|
||||||
|
let delete_statement = db_backend.build(&delete_query);
|
||||||
|
|
||||||
|
let result = db.execute(delete_statement).await?;
|
||||||
|
|
||||||
|
Ok::<_, RecorderError>(Some(FieldValue::value(result.rows_affected() as i32)))
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
builder.mutations.push(delete_mutation);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let entity_object_builder = EntityObjectBuilder { context };
|
||||||
|
let entity_query_field = EntityQueryFieldBuilder { context };
|
||||||
|
let entity_retry_one_mutation_name = format!(
|
||||||
|
"{}RetryOne",
|
||||||
|
entity_query_field.type_name::<subscriber_tasks::Entity>()
|
||||||
|
);
|
||||||
|
let retry_one_mutation =
|
||||||
|
generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>(
|
||||||
|
context,
|
||||||
|
entity_retry_one_mutation_name,
|
||||||
|
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
|
||||||
|
Arc::new(|resolver_ctx, app_ctx, filters| {
|
||||||
|
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>(
|
||||||
|
resolver_ctx,
|
||||||
|
context,
|
||||||
|
filters,
|
||||||
|
);
|
||||||
|
Box::pin(async move {
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
let job_id = subscriber_tasks::Entity::find()
|
||||||
|
.filter(filters_condition)
|
||||||
|
.select_only()
|
||||||
|
.column(subscriber_tasks::Column::Id)
|
||||||
|
.into_tuple::<String>()
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "SubscriberTask".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let task = app_ctx.task();
|
||||||
|
task.retry_subscriber_task(job_id.clone()).await?;
|
||||||
|
|
||||||
|
let task_model = subscriber_tasks::Entity::find()
|
||||||
|
.filter(subscriber_tasks::Column::Id.eq(&job_id))
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "SubscriberTask".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
builder.mutations.push(retry_one_mutation);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
|
||||||
|
context,
|
||||||
|
&subscriber_tasks::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>(
|
||||||
|
context,
|
||||||
|
&subscriber_tasks::Column::Job,
|
||||||
|
);
|
||||||
|
convert_jsonb_output_case_for_entity::<subscriber_tasks::Entity>(
|
||||||
|
context,
|
||||||
|
&subscriber_tasks::Column::Job,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscriber_tasks_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
builder.register_entity::<subscriber_tasks::Entity>(
|
||||||
|
<subscriber_tasks::RelatedEntity as sea_orm::Iterable>::iter()
|
||||||
|
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
builder = builder.register_entity_dataloader_one_to_one(subscriber_tasks::Entity, tokio::spawn);
|
||||||
|
builder =
|
||||||
|
builder.register_entity_dataloader_one_to_many(subscriber_tasks::Entity, tokio::spawn);
|
||||||
|
builder = register_subscriber_tasks_entity_mutations(builder);
|
||||||
|
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>();
|
||||||
|
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
|
||||||
|
builder
|
||||||
|
}
|
||||||
353
apps/recorder/src/graphql/domains/subscribers.rs
Normal file
353
apps/recorder/src/graphql/domains/subscribers.rs
Normal file
@@ -0,0 +1,353 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{ObjectAccessor, ResolverContext, TypeRef, ValueAccessor};
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use maplit::btreeset;
|
||||||
|
use sea_orm::{ColumnTrait, Condition, EntityTrait, Iterable, Value as SeaValue};
|
||||||
|
use seaography::{
|
||||||
|
Builder as SeaographyBuilder, BuilderContext, FilterInfo,
|
||||||
|
FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper,
|
||||||
|
FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult, SeaographyError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
auth::{AuthError, AuthUserInfo},
|
||||||
|
graphql::infra::util::{get_column_key, get_entity_column_key, get_entity_key},
|
||||||
|
models::subscribers,
|
||||||
|
};
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref SUBSCRIBER_ID_FILTER_INFO: FilterInfo = FilterInfo {
|
||||||
|
type_name: String::from("SubscriberIdFilterInput"),
|
||||||
|
base_type: TypeRef::INT.into(),
|
||||||
|
supported_operations: btreeset! { SeaographqlFilterOperation::Equals },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn guard_data_object_accessor_with_subscriber_id(
|
||||||
|
value: ValueAccessor<'_>,
|
||||||
|
column_name: &str,
|
||||||
|
subscriber_id: i32,
|
||||||
|
) -> async_graphql::Result<()> {
|
||||||
|
let obj = value.object()?;
|
||||||
|
|
||||||
|
let subscriber_id_value = obj.try_get(column_name)?;
|
||||||
|
|
||||||
|
let id = subscriber_id_value.i64()?;
|
||||||
|
|
||||||
|
if id == subscriber_id as i64 {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(async_graphql::Error::new("subscriber not match"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn guard_data_object_accessor_with_optional_subscriber_id(
|
||||||
|
value: ValueAccessor<'_>,
|
||||||
|
column_name: &str,
|
||||||
|
subscriber_id: i32,
|
||||||
|
) -> async_graphql::Result<()> {
|
||||||
|
if value.is_null() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let obj = value.object()?;
|
||||||
|
|
||||||
|
if let Some(subscriber_id_value) = obj.get(column_name) {
|
||||||
|
let id = subscriber_id_value.i64()?;
|
||||||
|
if id == subscriber_id as i64 {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(async_graphql::Error::new("subscriber not match"))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn guard_entity_with_subscriber_id<T>(_context: &BuilderContext, _column: &T::Column) -> FnGuard
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
Box::new(move |context: &ResolverContext| -> GuardAction {
|
||||||
|
match context.ctx.data::<AuthUserInfo>() {
|
||||||
|
Ok(_) => GuardAction::Allow,
|
||||||
|
Err(err) => GuardAction::Block(Some(err.message)),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn guard_field_with_subscriber_id<T>(context: &BuilderContext, column: &T::Column) -> FnGuard
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
||||||
|
let column_key = get_column_key::<T>(context, column);
|
||||||
|
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
|
||||||
|
&entity_key,
|
||||||
|
&column_key,
|
||||||
|
));
|
||||||
|
let entity_create_one_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name, context.entity_create_one_mutation.mutation_suffix
|
||||||
|
));
|
||||||
|
let entity_create_one_mutation_data_field_name =
|
||||||
|
Arc::new(context.entity_create_one_mutation.data_field.clone());
|
||||||
|
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name,
|
||||||
|
context.entity_create_batch_mutation.mutation_suffix.clone()
|
||||||
|
));
|
||||||
|
let entity_create_batch_mutation_data_field_name =
|
||||||
|
Arc::new(context.entity_create_batch_mutation.data_field.clone());
|
||||||
|
let entity_update_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name, context.entity_update_mutation.mutation_suffix
|
||||||
|
));
|
||||||
|
let entity_update_mutation_data_field_name =
|
||||||
|
Arc::new(context.entity_update_mutation.data_field.clone());
|
||||||
|
|
||||||
|
Box::new(move |context: &ResolverContext| -> GuardAction {
|
||||||
|
match context.ctx.data::<AuthUserInfo>() {
|
||||||
|
Ok(user_info) => {
|
||||||
|
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
||||||
|
let validation_result = match context.field().name() {
|
||||||
|
field if field == entity_create_one_mutation_field_name.as_str() => {
|
||||||
|
if let Some(data_value) = context
|
||||||
|
.args
|
||||||
|
.get(&entity_create_one_mutation_data_field_name)
|
||||||
|
{
|
||||||
|
guard_data_object_accessor_with_subscriber_id(
|
||||||
|
data_value,
|
||||||
|
&column_name,
|
||||||
|
subscriber_id,
|
||||||
|
)
|
||||||
|
.map_err(|inner_error| {
|
||||||
|
AuthError::from_graphql_dynamic_subscribe_id_guard(
|
||||||
|
inner_error,
|
||||||
|
context,
|
||||||
|
&entity_create_one_mutation_data_field_name,
|
||||||
|
&column_name,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
field if field == entity_create_batch_mutation_field_name.as_str() => {
|
||||||
|
if let Some(data_value) = context
|
||||||
|
.args
|
||||||
|
.get(&entity_create_batch_mutation_data_field_name)
|
||||||
|
{
|
||||||
|
data_value
|
||||||
|
.list()
|
||||||
|
.and_then(|data_list| {
|
||||||
|
data_list.iter().try_for_each(|data_item_value| {
|
||||||
|
guard_data_object_accessor_with_optional_subscriber_id(
|
||||||
|
data_item_value,
|
||||||
|
&column_name,
|
||||||
|
subscriber_id,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.map_err(|inner_error| {
|
||||||
|
AuthError::from_graphql_dynamic_subscribe_id_guard(
|
||||||
|
inner_error,
|
||||||
|
context,
|
||||||
|
&entity_create_batch_mutation_data_field_name,
|
||||||
|
&column_name,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
field if field == entity_update_mutation_field_name.as_str() => {
|
||||||
|
if let Some(data_value) =
|
||||||
|
context.args.get(&entity_update_mutation_data_field_name)
|
||||||
|
{
|
||||||
|
guard_data_object_accessor_with_optional_subscriber_id(
|
||||||
|
data_value,
|
||||||
|
&column_name,
|
||||||
|
subscriber_id,
|
||||||
|
)
|
||||||
|
.map_err(|inner_error| {
|
||||||
|
AuthError::from_graphql_dynamic_subscribe_id_guard(
|
||||||
|
inner_error,
|
||||||
|
context,
|
||||||
|
&entity_update_mutation_data_field_name,
|
||||||
|
&column_name,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Ok(()),
|
||||||
|
};
|
||||||
|
match validation_result {
|
||||||
|
Ok(_) => GuardAction::Allow,
|
||||||
|
Err(err) => GuardAction::Block(Some(err.to_string())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => GuardAction::Block(Some(err.message)),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_subscriber_id_filter_condition<T>(
|
||||||
|
_context: &BuilderContext,
|
||||||
|
column: &T::Column,
|
||||||
|
) -> FnFilterCondition
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let column = *column;
|
||||||
|
Box::new(
|
||||||
|
move |context: &ResolverContext,
|
||||||
|
mut condition: Condition,
|
||||||
|
filter: Option<&ObjectAccessor<'_>>|
|
||||||
|
-> SeaResult<Condition> {
|
||||||
|
match context.ctx.data::<AuthUserInfo>() {
|
||||||
|
Ok(user_info) => {
|
||||||
|
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
||||||
|
|
||||||
|
if let Some(filter) = filter {
|
||||||
|
for operation in &SUBSCRIBER_ID_FILTER_INFO.supported_operations {
|
||||||
|
match operation {
|
||||||
|
SeaographqlFilterOperation::Equals => {
|
||||||
|
if let Some(value) = filter.get("eq") {
|
||||||
|
let value: i32 = value.i64()?.try_into()?;
|
||||||
|
if value != subscriber_id {
|
||||||
|
return Err(SeaographyError::AsyncGraphQLError(
|
||||||
|
async_graphql::Error::new(
|
||||||
|
"subscriber_id and auth_info does not match",
|
||||||
|
),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => unreachable!("unreachable filter operation for subscriber_id"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
condition = condition.add(column.eq(subscriber_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(condition)
|
||||||
|
}
|
||||||
|
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_default_subscriber_id_input_conversion<T>(
|
||||||
|
context: &BuilderContext,
|
||||||
|
_column: &T::Column,
|
||||||
|
) -> FnInputTypeNoneConversion
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
||||||
|
let entity_create_one_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name, context.entity_create_one_mutation.mutation_suffix
|
||||||
|
));
|
||||||
|
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name,
|
||||||
|
context.entity_create_batch_mutation.mutation_suffix.clone()
|
||||||
|
));
|
||||||
|
Box::new(
|
||||||
|
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
|
||||||
|
let field_name = context.field().name();
|
||||||
|
if field_name == entity_create_one_mutation_field_name.as_str()
|
||||||
|
|| field_name == entity_create_batch_mutation_field_name.as_str()
|
||||||
|
{
|
||||||
|
match context.ctx.data::<AuthUserInfo>() {
|
||||||
|
Ok(user_info) => {
|
||||||
|
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
||||||
|
Ok(Some(SeaValue::Int(Some(subscriber_id))))
|
||||||
|
}
|
||||||
|
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn restrict_subscriber_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let entity_column_key = get_entity_column_key::<T>(context, column);
|
||||||
|
|
||||||
|
context.guards.entity_guards.insert(
|
||||||
|
entity_key.clone(),
|
||||||
|
guard_entity_with_subscriber_id::<T>(context, column),
|
||||||
|
);
|
||||||
|
context.guards.field_guards.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
guard_field_with_subscriber_id::<T>(context, column),
|
||||||
|
);
|
||||||
|
context.filter_types.overwrites.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
Some(FilterType::Custom(
|
||||||
|
SUBSCRIBER_ID_FILTER_INFO.type_name.clone(),
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
context.filter_types.condition_functions.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
generate_subscriber_id_filter_condition::<T>(context, column),
|
||||||
|
);
|
||||||
|
context.types.input_none_conversions.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
generate_default_subscriber_id_input_conversion::<T>(context, column),
|
||||||
|
);
|
||||||
|
|
||||||
|
context.entity_input.update_skips.push(entity_column_key);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<subscribers::Entity>(context, &subscribers::Column::Id);
|
||||||
|
for column in subscribers::Column::iter() {
|
||||||
|
if !matches!(column, subscribers::Column::Id) {
|
||||||
|
let key = get_entity_column_key::<subscribers::Entity>(context, &column);
|
||||||
|
context.filter_types.overwrites.insert(key, None);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
{
|
||||||
|
let filter_types_map_helper = FilterTypesMapHelper {
|
||||||
|
context: builder.context,
|
||||||
|
};
|
||||||
|
|
||||||
|
builder.schema = builder
|
||||||
|
.schema
|
||||||
|
.register(filter_types_map_helper.generate_filter_input(&SUBSCRIBER_ID_FILTER_INFO));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
builder.register_entity::<subscribers::Entity>(
|
||||||
|
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
|
||||||
|
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
|
||||||
|
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
20
apps/recorder/src/graphql/domains/subscription_bangumi.rs
Normal file
20
apps/recorder/src/graphql/domains/subscription_bangumi.rs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_bangumi,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
|
||||||
|
context,
|
||||||
|
&subscription_bangumi::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscription_bangumi_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
seaography::register_entity!(builder, subscription_bangumi);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
20
apps/recorder/src/graphql/domains/subscription_episode.rs
Normal file
20
apps/recorder/src/graphql/domains/subscription_episode.rs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_episode,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<subscription_episode::Entity>(
|
||||||
|
context,
|
||||||
|
&subscription_episode::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscription_episode_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
seaography::register_entity!(builder, subscription_episode);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
214
apps/recorder/src/graphql/domains/subscriptions.rs
Normal file
214
apps/recorder/src/graphql/domains/subscriptions.rs
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{FieldValue, TypeRef};
|
||||||
|
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
|
||||||
|
use seaography::{
|
||||||
|
Builder as SeaographyBuilder, BuilderContext, EntityObjectBuilder, EntityQueryFieldBuilder,
|
||||||
|
get_filter_conditions,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
errors::RecorderError,
|
||||||
|
graphql::{
|
||||||
|
domains::subscribers::restrict_subscriber_for_entity,
|
||||||
|
infra::custom::generate_entity_filter_mutation_field,
|
||||||
|
},
|
||||||
|
models::{
|
||||||
|
subscriber_tasks,
|
||||||
|
subscriptions::{self, SubscriptionTrait},
|
||||||
|
},
|
||||||
|
task::SubscriberTask,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<subscriptions::Entity>(
|
||||||
|
context,
|
||||||
|
&subscriptions::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscriptions_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
|
||||||
|
seaography::register_entity!(builder, subscriptions);
|
||||||
|
|
||||||
|
let context = builder.context;
|
||||||
|
|
||||||
|
let entity_object_builder = EntityObjectBuilder { context };
|
||||||
|
let entity_query_field = EntityQueryFieldBuilder { context };
|
||||||
|
|
||||||
|
{
|
||||||
|
let sync_one_feeds_incremental_mutation_name = format!(
|
||||||
|
"{}SyncOneFeedsIncremental",
|
||||||
|
entity_query_field.type_name::<subscriptions::Entity>()
|
||||||
|
);
|
||||||
|
|
||||||
|
let sync_one_feeds_incremental_mutation = generate_entity_filter_mutation_field::<
|
||||||
|
subscriptions::Entity,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
>(
|
||||||
|
builder.context,
|
||||||
|
sync_one_feeds_incremental_mutation_name,
|
||||||
|
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
|
||||||
|
Arc::new(|resolver_ctx, app_ctx, filters| {
|
||||||
|
let filters_condition =
|
||||||
|
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
|
||||||
|
|
||||||
|
Box::pin(async move {
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
let subscription_model = subscriptions::Entity::find()
|
||||||
|
.filter(filters_condition)
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "Subscription".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let subscription =
|
||||||
|
subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let task_service = app_ctx.task();
|
||||||
|
|
||||||
|
let task_id = task_service
|
||||||
|
.add_subscriber_task(
|
||||||
|
subscription_model.subscriber_id,
|
||||||
|
SubscriberTask::SyncOneSubscriptionFeedsIncremental(
|
||||||
|
subscription.into(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let task_model = subscriber_tasks::Entity::find()
|
||||||
|
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "SubscriberTask".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Some(FieldValue::owned_any(task_model)))
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutations.push(sync_one_feeds_incremental_mutation);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let sync_one_feeds_full_mutation_name = format!(
|
||||||
|
"{}SyncOneFeedsFull",
|
||||||
|
entity_query_field.type_name::<subscriptions::Entity>()
|
||||||
|
);
|
||||||
|
|
||||||
|
let sync_one_feeds_full_mutation = generate_entity_filter_mutation_field::<
|
||||||
|
subscriptions::Entity,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
>(
|
||||||
|
builder.context,
|
||||||
|
sync_one_feeds_full_mutation_name,
|
||||||
|
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
|
||||||
|
Arc::new(|resolver_ctx, app_ctx, filters| {
|
||||||
|
let filters_condition =
|
||||||
|
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
|
||||||
|
|
||||||
|
Box::pin(async move {
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
let subscription_model = subscriptions::Entity::find()
|
||||||
|
.filter(filters_condition)
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "Subscription".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let subscription =
|
||||||
|
subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let task_service = app_ctx.task();
|
||||||
|
|
||||||
|
let task_id = task_service
|
||||||
|
.add_subscriber_task(
|
||||||
|
subscription_model.subscriber_id,
|
||||||
|
SubscriberTask::SyncOneSubscriptionFeedsFull(subscription.into()),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let task_model = subscriber_tasks::Entity::find()
|
||||||
|
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "SubscriberTask".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Some(FieldValue::owned_any(task_model)))
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutations.push(sync_one_feeds_full_mutation);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let sync_one_sources_mutation_name = format!(
|
||||||
|
"{}SyncOneSources",
|
||||||
|
entity_query_field.type_name::<subscriptions::Entity>()
|
||||||
|
);
|
||||||
|
|
||||||
|
let sync_one_sources_mutation = generate_entity_filter_mutation_field::<
|
||||||
|
subscriptions::Entity,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
>(
|
||||||
|
builder.context,
|
||||||
|
sync_one_sources_mutation_name,
|
||||||
|
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
|
||||||
|
Arc::new(|resolver_ctx, app_ctx, filters| {
|
||||||
|
let filters_condition =
|
||||||
|
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
|
||||||
|
|
||||||
|
Box::pin(async move {
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
let subscription_model = subscriptions::Entity::find()
|
||||||
|
.filter(filters_condition)
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "Subscription".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let subscription =
|
||||||
|
subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let task_service = app_ctx.task();
|
||||||
|
|
||||||
|
let task_id = task_service
|
||||||
|
.add_subscriber_task(
|
||||||
|
subscription_model.subscriber_id,
|
||||||
|
SubscriberTask::SyncOneSubscriptionSources(subscription.into()),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let task_model = subscriber_tasks::Entity::find()
|
||||||
|
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "SubscriberTask".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Some(FieldValue::owned_any(task_model)))
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutations.push(sync_one_sources_mutation);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
use async_graphql::dynamic::{ObjectAccessor, TypeRef};
|
|
||||||
use maplit::btreeset;
|
|
||||||
use once_cell::sync::OnceCell;
|
|
||||||
use sea_orm::{ColumnTrait, Condition, EntityTrait, Value};
|
|
||||||
use seaography::{BuilderContext, FilterInfo, FilterOperation, SeaResult};
|
|
||||||
|
|
||||||
pub static SUBSCRIBER_ID_FILTER_INFO: OnceCell<FilterInfo> = OnceCell::new();
|
|
||||||
|
|
||||||
pub fn init_custom_filter_info() {
|
|
||||||
SUBSCRIBER_ID_FILTER_INFO.get_or_init(|| FilterInfo {
|
|
||||||
type_name: String::from("SubscriberIdFilterInput"),
|
|
||||||
base_type: TypeRef::INT.into(),
|
|
||||||
supported_operations: btreeset! { FilterOperation::Equals },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type FnFilterCondition =
|
|
||||||
Box<dyn Fn(Condition, &ObjectAccessor) -> SeaResult<Condition> + Send + Sync>;
|
|
||||||
|
|
||||||
pub fn subscriber_id_condition_function<T>(
|
|
||||||
_context: &BuilderContext,
|
|
||||||
column: &T::Column,
|
|
||||||
) -> FnFilterCondition
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let column = *column;
|
|
||||||
Box::new(move |mut condition, filter| {
|
|
||||||
let subscriber_id_filter_info = SUBSCRIBER_ID_FILTER_INFO.get().unwrap();
|
|
||||||
let operations = &subscriber_id_filter_info.supported_operations;
|
|
||||||
for operation in operations {
|
|
||||||
match operation {
|
|
||||||
FilterOperation::Equals => {
|
|
||||||
if let Some(value) = filter.get("eq") {
|
|
||||||
let value: i32 = value.i64()?.try_into()?;
|
|
||||||
let value = Value::Int(Some(value));
|
|
||||||
condition = condition.add(column.eq(value));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => unreachable!("unreachable filter operation for subscriber_id"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(condition)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,249 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use async_graphql::dynamic::{ResolverContext, ValueAccessor};
|
|
||||||
use sea_orm::EntityTrait;
|
|
||||||
use seaography::{BuilderContext, FnGuard, GuardAction};
|
|
||||||
|
|
||||||
use super::util::get_entity_key;
|
|
||||||
use crate::{
|
|
||||||
auth::{AuthError, AuthUserInfo},
|
|
||||||
graphql::util::get_column_key,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn guard_data_object_accessor_with_subscriber_id(
|
|
||||||
value: ValueAccessor<'_>,
|
|
||||||
column_name: &str,
|
|
||||||
subscriber_id: i32,
|
|
||||||
) -> async_graphql::Result<()> {
|
|
||||||
let obj = value.object()?;
|
|
||||||
|
|
||||||
let subscriber_id_value = obj.try_get(column_name)?;
|
|
||||||
|
|
||||||
let id = subscriber_id_value.i64()?;
|
|
||||||
|
|
||||||
if id == subscriber_id as i64 {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(async_graphql::Error::new("subscriber not match"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn guard_data_object_accessor_with_optional_subscriber_id(
|
|
||||||
value: ValueAccessor<'_>,
|
|
||||||
column_name: &str,
|
|
||||||
subscriber_id: i32,
|
|
||||||
) -> async_graphql::Result<()> {
|
|
||||||
if value.is_null() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
let obj = value.object()?;
|
|
||||||
|
|
||||||
if let Some(subscriber_id_value) = obj.get(column_name) {
|
|
||||||
let id = subscriber_id_value.i64()?;
|
|
||||||
if id == subscriber_id as i64 {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(async_graphql::Error::new("subscriber not match"))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn guard_filter_object_accessor_with_subscriber_id(
|
|
||||||
value: ValueAccessor<'_>,
|
|
||||||
column_name: &str,
|
|
||||||
subscriber_id: i32,
|
|
||||||
) -> async_graphql::Result<()> {
|
|
||||||
let obj = value.object()?;
|
|
||||||
let subscriber_id_filter_input_value = obj.try_get(column_name)?;
|
|
||||||
|
|
||||||
let subscriber_id_filter_input_obj = subscriber_id_filter_input_value.object()?;
|
|
||||||
|
|
||||||
let subscriber_id_value = subscriber_id_filter_input_obj.try_get("eq")?;
|
|
||||||
|
|
||||||
let id = subscriber_id_value.i64()?;
|
|
||||||
if id == subscriber_id as i64 {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(async_graphql::Error::new("subscriber not match"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn guard_entity_with_subscriber_id<T>(context: &BuilderContext, column: &T::Column) -> FnGuard
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let entity_key = get_entity_key::<T>(context);
|
|
||||||
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
|
||||||
let column_key = get_column_key::<T>(context, column);
|
|
||||||
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
|
|
||||||
&entity_key,
|
|
||||||
&column_key,
|
|
||||||
));
|
|
||||||
let entity_create_one_mutation_field_name = Arc::new(format!(
|
|
||||||
"{}{}",
|
|
||||||
entity_name, context.entity_create_one_mutation.mutation_suffix
|
|
||||||
));
|
|
||||||
let entity_create_one_mutation_data_field_name =
|
|
||||||
Arc::new(context.entity_create_one_mutation.data_field.clone());
|
|
||||||
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
|
||||||
"{}{}",
|
|
||||||
entity_name,
|
|
||||||
context.entity_create_batch_mutation.mutation_suffix.clone()
|
|
||||||
));
|
|
||||||
let entity_create_batch_mutation_data_field_name =
|
|
||||||
Arc::new(context.entity_create_batch_mutation.data_field.clone());
|
|
||||||
let entity_delete_mutation_field_name = Arc::new(format!(
|
|
||||||
"{}{}",
|
|
||||||
entity_name,
|
|
||||||
context.entity_delete_mutation.mutation_suffix.clone()
|
|
||||||
));
|
|
||||||
let entity_delete_mutation_filter_field_name =
|
|
||||||
Arc::new(context.entity_delete_mutation.filter_field.clone());
|
|
||||||
let entity_update_mutation_field_name = Arc::new(format!(
|
|
||||||
"{}{}",
|
|
||||||
entity_name, context.entity_update_mutation.mutation_suffix
|
|
||||||
));
|
|
||||||
let entity_update_mutation_filter_field_name =
|
|
||||||
Arc::new(context.entity_update_mutation.filter_field.clone());
|
|
||||||
let entity_update_mutation_data_field_name =
|
|
||||||
Arc::new(context.entity_update_mutation.data_field.clone());
|
|
||||||
let entity_query_field_name = Arc::new(entity_name);
|
|
||||||
let entity_query_filter_field_name = Arc::new(context.entity_query_field.filters.clone());
|
|
||||||
Box::new(move |context: &ResolverContext| -> GuardAction {
|
|
||||||
match context.ctx.data::<AuthUserInfo>() {
|
|
||||||
Ok(user_info) => {
|
|
||||||
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
|
||||||
let validation_result = match context.field().name() {
|
|
||||||
field if field == entity_create_one_mutation_field_name.as_str() => context
|
|
||||||
.args
|
|
||||||
.try_get(&entity_create_one_mutation_data_field_name)
|
|
||||||
.and_then(|data_value| {
|
|
||||||
guard_data_object_accessor_with_subscriber_id(
|
|
||||||
data_value,
|
|
||||||
&column_name,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.map_err(|inner_error| {
|
|
||||||
AuthError::from_graphql_subscribe_id_guard(
|
|
||||||
inner_error,
|
|
||||||
context,
|
|
||||||
&entity_create_one_mutation_data_field_name,
|
|
||||||
&column_name,
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
field if field == entity_create_batch_mutation_field_name.as_str() => context
|
|
||||||
.args
|
|
||||||
.try_get(&entity_create_batch_mutation_data_field_name)
|
|
||||||
.and_then(|data_value| {
|
|
||||||
data_value.list().and_then(|data_list| {
|
|
||||||
data_list.iter().try_for_each(|data_item_value| {
|
|
||||||
guard_data_object_accessor_with_subscriber_id(
|
|
||||||
data_item_value,
|
|
||||||
&column_name,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.map_err(|inner_error| {
|
|
||||||
AuthError::from_graphql_subscribe_id_guard(
|
|
||||||
inner_error,
|
|
||||||
context,
|
|
||||||
&entity_create_batch_mutation_data_field_name,
|
|
||||||
&column_name,
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
field if field == entity_delete_mutation_field_name.as_str() => context
|
|
||||||
.args
|
|
||||||
.try_get(&entity_delete_mutation_filter_field_name)
|
|
||||||
.and_then(|filter_value| {
|
|
||||||
guard_filter_object_accessor_with_subscriber_id(
|
|
||||||
filter_value,
|
|
||||||
&column_name,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.map_err(|inner_error| {
|
|
||||||
AuthError::from_graphql_subscribe_id_guard(
|
|
||||||
inner_error,
|
|
||||||
context,
|
|
||||||
&entity_delete_mutation_filter_field_name,
|
|
||||||
&column_name,
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
field if field == entity_update_mutation_field_name.as_str() => context
|
|
||||||
.args
|
|
||||||
.try_get(&entity_update_mutation_filter_field_name)
|
|
||||||
.and_then(|filter_value| {
|
|
||||||
guard_filter_object_accessor_with_subscriber_id(
|
|
||||||
filter_value,
|
|
||||||
&column_name,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.map_err(|inner_error| {
|
|
||||||
AuthError::from_graphql_subscribe_id_guard(
|
|
||||||
inner_error,
|
|
||||||
context,
|
|
||||||
&entity_update_mutation_filter_field_name,
|
|
||||||
&column_name,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.and_then(|_| {
|
|
||||||
match context.args.get(&entity_update_mutation_data_field_name) {
|
|
||||||
Some(data_value) => {
|
|
||||||
guard_data_object_accessor_with_optional_subscriber_id(
|
|
||||||
data_value,
|
|
||||||
&column_name,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
.map_err(|inner_error| {
|
|
||||||
AuthError::from_graphql_subscribe_id_guard(
|
|
||||||
inner_error,
|
|
||||||
context,
|
|
||||||
&entity_update_mutation_data_field_name,
|
|
||||||
&column_name,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
None => Ok(()),
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
field if field == entity_query_field_name.as_str() => context
|
|
||||||
.args
|
|
||||||
.try_get(&entity_query_filter_field_name)
|
|
||||||
.and_then(|filter_value| {
|
|
||||||
guard_filter_object_accessor_with_subscriber_id(
|
|
||||||
filter_value,
|
|
||||||
&column_name,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.map_err(|inner_error| {
|
|
||||||
AuthError::from_graphql_subscribe_id_guard(
|
|
||||||
inner_error,
|
|
||||||
context,
|
|
||||||
&entity_query_filter_field_name,
|
|
||||||
&column_name,
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
field => Err(AuthError::from_graphql_subscribe_id_guard(
|
|
||||||
async_graphql::Error::new("unsupport graphql field"),
|
|
||||||
context,
|
|
||||||
field,
|
|
||||||
"",
|
|
||||||
)),
|
|
||||||
};
|
|
||||||
match validation_result {
|
|
||||||
Ok(_) => GuardAction::Allow,
|
|
||||||
Err(err) => GuardAction::Block(Some(err.to_string())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => GuardAction::Block(Some(err.message)),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
69
apps/recorder/src/graphql/infra/crypto.rs
Normal file
69
apps/recorder/src/graphql/infra/crypto.rs
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{ResolverContext, ValueAccessor};
|
||||||
|
use sea_orm::{EntityTrait, Value as SeaValue};
|
||||||
|
use seaography::{BuilderContext, SeaResult};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
graphql::infra::util::{get_column_key, get_entity_key},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_crypto_column_input_conversion_to_schema_context<T>(
|
||||||
|
context: &mut BuilderContext,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
column: &T::Column,
|
||||||
|
) where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let column_name = get_column_key::<T>(context, column);
|
||||||
|
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
|
||||||
|
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
|
||||||
|
|
||||||
|
context.types.input_conversions.insert(
|
||||||
|
format!("{entity_name}.{column_name}"),
|
||||||
|
Box::new(
|
||||||
|
move |_resolve_context: &ResolverContext<'_>,
|
||||||
|
value: &ValueAccessor|
|
||||||
|
-> SeaResult<sea_orm::Value> {
|
||||||
|
let source = value.string()?;
|
||||||
|
let encrypted = ctx.crypto().encrypt_string(source.into())?;
|
||||||
|
Ok(encrypted.into())
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_crypto_column_output_conversion_to_schema_context<T>(
|
||||||
|
context: &mut BuilderContext,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
column: &T::Column,
|
||||||
|
) where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let column_name = get_column_key::<T>(context, column);
|
||||||
|
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
|
||||||
|
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
|
||||||
|
|
||||||
|
context.types.output_conversions.insert(
|
||||||
|
format!("{entity_name}.{column_name}"),
|
||||||
|
Box::new(
|
||||||
|
move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> {
|
||||||
|
if let SeaValue::String(s) = value {
|
||||||
|
if let Some(s) = s {
|
||||||
|
let decrypted = ctx.crypto().decrypt_string(s)?;
|
||||||
|
Ok(async_graphql::Value::String(decrypted))
|
||||||
|
} else {
|
||||||
|
Ok(async_graphql::Value::Null)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(async_graphql::Error::new("crypto column must be string column").into())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
76
apps/recorder/src/graphql/infra/custom.rs
Normal file
76
apps/recorder/src/graphql/infra/custom.rs
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
use std::{pin::Pin, sync::Arc};
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{
|
||||||
|
Field, FieldFuture, FieldValue, InputValue, ResolverContext, TypeRef, ValueAccessor,
|
||||||
|
};
|
||||||
|
use sea_orm::EntityTrait;
|
||||||
|
use seaography::{BuilderContext, EntityObjectBuilder, FilterInputBuilder, GuardAction};
|
||||||
|
|
||||||
|
use crate::{app::AppContextTrait, errors::RecorderResult};
|
||||||
|
|
||||||
|
pub type FilterMutationFn = Arc<
|
||||||
|
dyn for<'a> Fn(
|
||||||
|
&ResolverContext<'a>,
|
||||||
|
Arc<dyn AppContextTrait>,
|
||||||
|
Option<ValueAccessor<'_>>,
|
||||||
|
) -> Pin<
|
||||||
|
Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>,
|
||||||
|
> + Send
|
||||||
|
+ Sync,
|
||||||
|
>;
|
||||||
|
|
||||||
|
pub fn generate_entity_filter_mutation_field<T, N, R>(
|
||||||
|
builder_context: &'static BuilderContext,
|
||||||
|
field_name: N,
|
||||||
|
type_ref: R,
|
||||||
|
mutation_fn: FilterMutationFn,
|
||||||
|
) -> Field
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
N: Into<String>,
|
||||||
|
R: Into<TypeRef>,
|
||||||
|
{
|
||||||
|
let entity_filter_input_builder = FilterInputBuilder {
|
||||||
|
context: builder_context,
|
||||||
|
};
|
||||||
|
let entity_object_builder = EntityObjectBuilder {
|
||||||
|
context: builder_context,
|
||||||
|
};
|
||||||
|
let object_name: String = entity_object_builder.type_name::<T>();
|
||||||
|
|
||||||
|
let context = builder_context;
|
||||||
|
|
||||||
|
let guard = builder_context.guards.entity_guards.get(&object_name);
|
||||||
|
|
||||||
|
Field::new(field_name, type_ref, move |ctx| {
|
||||||
|
let mutation_fn = mutation_fn.clone();
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let guard_flag = if let Some(guard) = guard {
|
||||||
|
(*guard)(&ctx)
|
||||||
|
} else {
|
||||||
|
GuardAction::Allow
|
||||||
|
};
|
||||||
|
|
||||||
|
if let GuardAction::Block(reason) = guard_flag {
|
||||||
|
return Err::<Option<_>, async_graphql::Error>(async_graphql::Error::new(
|
||||||
|
reason.unwrap_or("Entity guard triggered.".into()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
||||||
|
|
||||||
|
let filters = ctx.args.get(&context.entity_delete_mutation.filter_field);
|
||||||
|
|
||||||
|
let result = mutation_fn(&ctx, app_ctx.clone(), filters)
|
||||||
|
.await
|
||||||
|
.map_err(async_graphql::Error::new_with_source)?;
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.argument(InputValue::new(
|
||||||
|
&context.entity_delete_mutation.filter_field,
|
||||||
|
TypeRef::named(entity_filter_input_builder.type_name(&object_name)),
|
||||||
|
))
|
||||||
|
}
|
||||||
1380
apps/recorder/src/graphql/infra/json.rs
Normal file
1380
apps/recorder/src/graphql/infra/json.rs
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user