Compare commits
39 Commits
b2f327d48f
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 9fd3ae6563 | |||
| cde3361458 | |||
| f055011b86 | |||
| 16429a44b4 | |||
| fe0b7e88e6 | |||
| 28dd9da6ac | |||
| 02c16a2972 | |||
| 324427513c | |||
| c12b9b360a | |||
| cc06142050 | |||
| 6726cafff4 | |||
| 35312ea1ff | |||
| 721eee9c88 | |||
| 421f9d0293 | |||
| 7eb4e41708 | |||
| a2254bbe80 | |||
| 1b5bdadf10 | |||
| 882b29d7a1 | |||
| c60f6f511e | |||
| 07955286f1 | |||
| 258eeddc74 | |||
| b09e9e6aaa | |||
| 0df371adb7 | |||
| 8144986a48 | |||
| d2aab7369d | |||
| 946d4e8c2c | |||
| 0b5f25a263 | |||
| c669d66969 | |||
| 082e08e7f4 | |||
| a3fd03d32a | |||
| 5645645c5f | |||
| ac7d1efb8d | |||
| a676061b3e | |||
| 1c34cebbde | |||
| 22a2ce0559 | |||
| 313b1bf1ba | |||
| 66413f92e3 | |||
| 0fcbc6bbe9 | |||
| f1d8318500 |
@@ -3,4 +3,3 @@ recorder-playground = "run -p recorder --example playground -- --environment de
|
|||||||
|
|
||||||
[build]
|
[build]
|
||||||
rustflags = ["-Zthreads=8", "-Zshare-generics=y"]
|
rustflags = ["-Zthreads=8", "-Zshare-generics=y"]
|
||||||
# rustflags = ["-Zthreads=8"]
|
|
||||||
|
|||||||
104
.vscode/settings.json
vendored
104
.vscode/settings.json
vendored
@@ -1,56 +1,52 @@
|
|||||||
{
|
{
|
||||||
"npm.packageManager": "pnpm",
|
"npm.packageManager": "pnpm",
|
||||||
"rust-analyzer.showUnlinkedFileNotification": false,
|
"[javascript]": {
|
||||||
"[javascript]": {
|
"editor.defaultFormatter": "vscode.typescript-language-features",
|
||||||
"editor.defaultFormatter": "vscode.typescript-language-features",
|
"editor.formatOnSave": true
|
||||||
"editor.formatOnSave": true
|
|
||||||
},
|
|
||||||
"[json]": {
|
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
|
||||||
"editor.formatOnSave": true
|
|
||||||
},
|
|
||||||
"[jsonc]": {
|
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
|
||||||
"editor.formatOnSave": true
|
|
||||||
},
|
|
||||||
"[typescript]": {
|
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
|
||||||
"editor.formatOnSave": true
|
|
||||||
},
|
|
||||||
"[typescriptreact]": {
|
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
|
||||||
"editor.formatOnSave": true
|
|
||||||
},
|
|
||||||
"editor.codeActionsOnSave": {
|
|
||||||
"quickfix.biome": "explicit",
|
|
||||||
"source.organizeImports.biome": "explicit"
|
|
||||||
},
|
|
||||||
"emmet.showExpandedAbbreviation": "never",
|
|
||||||
"prettier.enable": false,
|
|
||||||
"typescript.tsdk": "node_modules/typescript/lib",
|
|
||||||
"rust-analyzer.cargo.features": [
|
|
||||||
"testcontainers",
|
|
||||||
"playground"
|
|
||||||
],
|
|
||||||
"sqltools.connections": [
|
|
||||||
{
|
|
||||||
"previewLimit": 50,
|
|
||||||
"server": "localhost",
|
|
||||||
"port": 5432,
|
|
||||||
"driver": "PostgreSQL",
|
|
||||||
"name": "konobangu-dev",
|
|
||||||
"database": "konobangu",
|
|
||||||
"username": "konobangu"
|
|
||||||
},
|
},
|
||||||
{
|
"[json]": {
|
||||||
"previewLimit": 50,
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
"server": "localhost",
|
"editor.formatOnSave": true
|
||||||
"port": 32770,
|
},
|
||||||
"askForPassword": true,
|
"[jsonc]": {
|
||||||
"driver": "PostgreSQL",
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
"name": "docker-pgsql",
|
"editor.formatOnSave": true
|
||||||
"database": "konobangu",
|
},
|
||||||
"username": "konobangu"
|
"[typescript]": {
|
||||||
}
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
]
|
"editor.formatOnSave": true
|
||||||
}
|
},
|
||||||
|
"[typescriptreact]": {
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.formatOnSave": true
|
||||||
|
},
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"quickfix.biome": "explicit",
|
||||||
|
"source.organizeImports.biome": "explicit"
|
||||||
|
},
|
||||||
|
"emmet.showExpandedAbbreviation": "never",
|
||||||
|
"prettier.enable": false,
|
||||||
|
"typescript.tsdk": "node_modules/typescript/lib",
|
||||||
|
"rust-analyzer.showUnlinkedFileNotification": false,
|
||||||
|
"sqltools.connections": [
|
||||||
|
{
|
||||||
|
"previewLimit": 50,
|
||||||
|
"server": "localhost",
|
||||||
|
"port": 5432,
|
||||||
|
"driver": "PostgreSQL",
|
||||||
|
"name": "konobangu-dev",
|
||||||
|
"database": "konobangu",
|
||||||
|
"username": "konobangu"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"rust-analyzer.cargo.features": "all",
|
||||||
|
"rust-analyzer.testExplorer": true
|
||||||
|
// https://github.com/rust-lang/rust/issues/141540
|
||||||
|
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
|
||||||
|
// "rust-analyzer.check.extraEnv": {
|
||||||
|
// "CARGO_TARGET_DIR": "target/rust-analyzer"
|
||||||
|
// },
|
||||||
|
// "rust-analyzer.cargo.extraEnv": {
|
||||||
|
// "CARGO_TARGET_DIR": "target/analyzer"
|
||||||
|
// }
|
||||||
|
}
|
||||||
112
.vscode/tasks.json
vendored
Normal file
112
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "dev-all",
|
||||||
|
"dependsOn": [
|
||||||
|
"dev-webui",
|
||||||
|
"dev-recorder",
|
||||||
|
"dev-proxy",
|
||||||
|
"dev-codegen-wait",
|
||||||
|
"dev-deps",
|
||||||
|
],
|
||||||
|
"dependsOrder": "parallel",
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": false,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"group": "new-group",
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "shared",
|
||||||
|
"clear": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-webui",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-webui"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": true,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-deps",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-deps"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-codegen-wait",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-codegen-wait"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-recorder",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-recorder"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-proxy",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-proxy",
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
3267
Cargo.lock
generated
3267
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
75
Cargo.toml
75
Cargo.toml
@@ -1,23 +1,53 @@
|
|||||||
|
# cargo-features = ["codegen-backend"]
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
"packages/testing-torrents",
|
"packages/testing-torrents",
|
||||||
"packages/util",
|
"packages/util",
|
||||||
|
"packages/util-derive",
|
||||||
"packages/fetch",
|
"packages/fetch",
|
||||||
"packages/downloader",
|
"packages/downloader",
|
||||||
"apps/recorder",
|
"apps/recorder",
|
||||||
|
"apps/proxy",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
|
[profile.dev]
|
||||||
|
debug = 0
|
||||||
|
# https://github.com/rust-lang/rust/issues/141540
|
||||||
|
incremental = false
|
||||||
|
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
|
||||||
|
# codegen-backend = "cranelift"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
|
testing-torrents = { path = "./packages/testing-torrents" }
|
||||||
|
util = { path = "./packages/util" }
|
||||||
|
util-derive = { path = "./packages/util-derive" }
|
||||||
|
fetch = { path = "./packages/fetch" }
|
||||||
|
downloader = { path = "./packages/downloader" }
|
||||||
|
recorder = { path = "./apps/recorder" }
|
||||||
|
|
||||||
|
reqwest = { version = "0.12.20", features = [
|
||||||
|
"charset",
|
||||||
|
"http2",
|
||||||
|
"json",
|
||||||
|
"macos-system-configuration",
|
||||||
|
"cookies",
|
||||||
|
] }
|
||||||
moka = "0.12"
|
moka = "0.12"
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
quirks_path = "0.1"
|
quirks_path = "0.1"
|
||||||
snafu = { version = "0.8", features = ["futures"] }
|
snafu = { version = "0.8", features = ["futures"] }
|
||||||
testcontainers = { version = "0.24" }
|
testcontainers = { version = "0.24" }
|
||||||
testcontainers-modules = { version = "0.12" }
|
testcontainers-modules = { version = "0.12.1" }
|
||||||
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
|
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
tokio = { version = "1", features = ["macros", "fs", "rt-multi-thread"] }
|
tokio = { version = "1.45.1", features = [
|
||||||
|
"macros",
|
||||||
|
"fs",
|
||||||
|
"rt-multi-thread",
|
||||||
|
"signal",
|
||||||
|
] }
|
||||||
serde_json = "1"
|
serde_json = "1"
|
||||||
async-trait = "0.1"
|
async-trait = "0.1"
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
@@ -30,22 +60,31 @@ serde_with = "3"
|
|||||||
regex = "1.11"
|
regex = "1.11"
|
||||||
lazy_static = "1.5"
|
lazy_static = "1.5"
|
||||||
axum = { version = "0.8.3", features = ["macros"] }
|
axum = { version = "0.8.3", features = ["macros"] }
|
||||||
reqwest = { version = "0.12", default-features = false, features = [
|
|
||||||
"charset",
|
|
||||||
"http2",
|
|
||||||
"json",
|
|
||||||
"macos-system-configuration",
|
|
||||||
"rustls-tls",
|
|
||||||
"cookies",
|
|
||||||
] }
|
|
||||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
|
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
|
||||||
axum-extra = "0.10"
|
axum-extra = { version = "0.10", features = ["typed-header"] }
|
||||||
|
mockito = { version = "1.6.1" }
|
||||||
testing-torrents = { path = "./packages/testing-torrents" }
|
convert_case = "0.8"
|
||||||
util = { path = "./packages/util" }
|
color-eyre = "0.6.5"
|
||||||
fetch = { path = "./packages/fetch" }
|
inquire = "0.7.5"
|
||||||
downloader = { path = "./packages/downloader" }
|
image = "0.25.6"
|
||||||
|
uuid = { version = "1.6.0", features = ["v4"] }
|
||||||
|
maplit = "1.0.2"
|
||||||
|
once_cell = "1.20.2"
|
||||||
|
rand = "0.9.1"
|
||||||
|
rust_decimal = "1.37.2"
|
||||||
|
base64 = "0.22.1"
|
||||||
|
nom = "8.0.0"
|
||||||
|
percent-encoding = "2.3.1"
|
||||||
|
num-traits = "0.2.19"
|
||||||
|
http = "1.2.0"
|
||||||
|
async-stream = "0.3.6"
|
||||||
|
serde_variant = "0.1.3"
|
||||||
|
tracing-appender = "0.2.3"
|
||||||
|
clap = "4.5.40"
|
||||||
|
ipnetwork = "0.21.1"
|
||||||
|
typed-builder = "0.21.0"
|
||||||
|
nanoid = "0.4.0"
|
||||||
|
webp = "0.3.0"
|
||||||
|
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
|
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" }
|
||||||
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "10ba248" }
|
|
||||||
|
|||||||
@@ -6,13 +6,14 @@
|
|||||||
"build": "email build",
|
"build": "email build",
|
||||||
"dev": "email dev --port 5003",
|
"dev": "email dev --port 5003",
|
||||||
"export": "email export",
|
"export": "email export",
|
||||||
"clean": "git clean -xdf .cache .turbo dist node_modules",
|
"clean": "git clean -xdf .cache dist node_modules",
|
||||||
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
|
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@react-email/components": "0.0.31",
|
"@react-email/components": "^0.0.42",
|
||||||
"react": "^19.0.0",
|
"react": "^19.0.0",
|
||||||
"react-email": "3.0.4"
|
"react-email": "^4.0.16",
|
||||||
|
"@konobangu/email": "workspace:*"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/react": "19.0.1"
|
"@types/react": "19.0.1"
|
||||||
|
|||||||
@@ -2,8 +2,12 @@
|
|||||||
"extends": "../../tsconfig.base.json",
|
"extends": "../../tsconfig.base.json",
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"composite": true,
|
"composite": true,
|
||||||
"jsx": "react-jsx"
|
"jsx": "react-jsx",
|
||||||
|
"jsxImportSource": "react",
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleResolution": "bundler"
|
||||||
},
|
},
|
||||||
|
"references": [{ "path": "../../packages/email" }],
|
||||||
"include": ["**/*.ts", "**/*.tsx"],
|
"include": ["**/*.ts", "**/*.tsx"],
|
||||||
"exclude": ["node_modules"]
|
"exclude": ["node_modules"]
|
||||||
}
|
}
|
||||||
|
|||||||
1
apps/proxy/.whistle/rules/files/1.mikan_doppel
Normal file
1
apps/proxy/.whistle/rules/files/1.mikan_doppel
Normal file
@@ -0,0 +1 @@
|
|||||||
|
^https://mikanani.me/*** http://127.0.0.1:5005/$1 excludeFilter://^**/***.svg excludeFilter://^**/***.css excludeFilter://^**/***.js
|
||||||
@@ -1 +1 @@
|
|||||||
{"filesOrder":["konobangu"],"selectedList":["konobangu"],"disabledDefalutRules":true,"defalutRules":""}
|
{"filesOrder":["konobangu","mikan_doppel"],"selectedList":["konobangu","mikan_doppel"],"disabledDefalutRules":true,"defalutRules":""}
|
||||||
|
|||||||
19
apps/proxy/Cargo.toml
Normal file
19
apps/proxy/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[package]
|
||||||
|
name = "proxy"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
[lib]
|
||||||
|
name = "proxy"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "mikan_doppel"
|
||||||
|
path = "src/bin/mikan_doppel.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
recorder = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
tracing-subscriber = { workspace = true }
|
||||||
|
tracing = { workspace = true }
|
||||||
@@ -3,13 +3,13 @@
|
|||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
|
"whistle": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
|
||||||
"dev": "pnpm run start"
|
"mikan_doppel": "cargo run -p proxy --bin mikan_doppel",
|
||||||
|
"dev": "npm-run-all -p mikan_doppel whistle"
|
||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"cross-env": "^7.0.3",
|
"whistle": "^2.9.99"
|
||||||
"whistle": "^2.9.93"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use recorder::{errors::RecorderResult, test_utils::mikan::MikanMockServer};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
#[allow(unused_variables)]
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(Level::DEBUG)
|
||||||
|
.init();
|
||||||
|
|
||||||
|
let mut mikan_server = MikanMockServer::new_with_port(5005).await.unwrap();
|
||||||
|
|
||||||
|
let resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -11,3 +11,7 @@ BASIC_PASSWORD = "konobangu"
|
|||||||
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
||||||
# OIDC_EXTRA_CLAIM_KEY = ""
|
# OIDC_EXTRA_CLAIM_KEY = ""
|
||||||
# OIDC_EXTRA_CLAIM_VALUE = ""
|
# OIDC_EXTRA_CLAIM_VALUE = ""
|
||||||
|
# MIKAN_PROXY = ""
|
||||||
|
# MIKAN_PROXY_AUTH_HEADER = ""
|
||||||
|
# MIKAN_NO_PROXY = ""
|
||||||
|
# MIKAN_PROXY_ACCEPT_INVALID_CERTS = "true"
|
||||||
|
|||||||
17
apps/recorder/.env.dev
Normal file
17
apps/recorder/.env.dev
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
HOST="konobangu.com"
|
||||||
|
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
|
||||||
|
STORAGE_DATA_DIR = "./data"
|
||||||
|
AUTH_TYPE = "basic" # or oidc
|
||||||
|
BASIC_USER = "konobangu"
|
||||||
|
BASIC_PASSWORD = "konobangu"
|
||||||
|
# OIDC_ISSUER="https://auth.logto.io/oidc"
|
||||||
|
# OIDC_AUDIENCE = "https://konobangu.com/api"
|
||||||
|
# OIDC_CLIENT_ID = "client_id"
|
||||||
|
# OIDC_CLIENT_SECRET = "client_secret" # optional
|
||||||
|
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
||||||
|
# OIDC_EXTRA_CLAIM_KEY = ""
|
||||||
|
# OIDC_EXTRA_CLAIM_VALUE = ""
|
||||||
|
MIKAN_PROXY = "http://127.0.0.1:8899"
|
||||||
|
# MIKAN_PROXY_AUTH_HEADER = ""
|
||||||
|
# MIKAN_NO_PROXY = ""
|
||||||
|
MIKAN_PROXY_ACCEPT_INVALID_CERTS = true
|
||||||
2
apps/recorder/.gitignore
vendored
2
apps/recorder/.gitignore
vendored
@@ -27,3 +27,5 @@ node_modules
|
|||||||
dist/
|
dist/
|
||||||
temp/*
|
temp/*
|
||||||
!temp/.gitkeep
|
!temp/.gitkeep
|
||||||
|
tests/resources/mikan/classic_episodes/*/*
|
||||||
|
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet
|
||||||
@@ -2,8 +2,20 @@
|
|||||||
name = "recorder"
|
name = "recorder"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["jxl"]
|
||||||
|
playground = ["dep:inquire", "dep:color-eyre", "dep:polars"]
|
||||||
|
testcontainers = [
|
||||||
|
"dep:testcontainers",
|
||||||
|
"dep:testcontainers-modules",
|
||||||
|
"dep:testcontainers-ext",
|
||||||
|
"downloader/testcontainers",
|
||||||
|
"testcontainers-modules/postgres",
|
||||||
|
]
|
||||||
|
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "recorder"
|
name = "recorder"
|
||||||
path = "src/lib.rs"
|
path = "src/lib.rs"
|
||||||
@@ -13,18 +25,32 @@ name = "recorder_cli"
|
|||||||
path = "src/bin/main.rs"
|
path = "src/bin/main.rs"
|
||||||
required-features = []
|
required-features = []
|
||||||
|
|
||||||
[features]
|
[[example]]
|
||||||
default = []
|
name = "mikan_collect_classic_eps"
|
||||||
playground = ["dep:mockito"]
|
path = "examples/mikan_collect_classic_eps.rs"
|
||||||
testcontainers = [
|
required-features = ["playground"]
|
||||||
"dep:testcontainers",
|
|
||||||
"dep:testcontainers-modules",
|
[[example]]
|
||||||
"dep:testcontainers-ext",
|
name = "mikan_doppel_season_subscription"
|
||||||
"downloader/testcontainers",
|
path = "examples/mikan_doppel_season_subscription.rs"
|
||||||
"testcontainers-modules/postgres",
|
required-features = ["playground"]
|
||||||
]
|
|
||||||
|
[[example]]
|
||||||
|
name = "mikan_doppel_subscriber_subscription"
|
||||||
|
path = "examples/mikan_doppel_subscriber_subscription.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "playground"
|
||||||
|
path = "examples/playground.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
downloader = { workspace = true }
|
||||||
|
util = { workspace = true }
|
||||||
|
util-derive = { workspace = true }
|
||||||
|
fetch = { workspace = true }
|
||||||
|
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
@@ -49,30 +75,45 @@ serde_with = { workspace = true }
|
|||||||
moka = { workspace = true }
|
moka = { workspace = true }
|
||||||
chrono = { workspace = true }
|
chrono = { workspace = true }
|
||||||
tracing-subscriber = { workspace = true }
|
tracing-subscriber = { workspace = true }
|
||||||
|
mockito = { workspace = true }
|
||||||
|
color-eyre = { workspace = true, optional = true }
|
||||||
|
inquire = { workspace = true, optional = true }
|
||||||
|
convert_case = { workspace = true }
|
||||||
|
image = { workspace = true }
|
||||||
|
uuid = { workspace = true }
|
||||||
|
maplit = { workspace = true }
|
||||||
|
once_cell = { workspace = true }
|
||||||
|
rand = { workspace = true }
|
||||||
|
rust_decimal = { workspace = true }
|
||||||
|
base64 = { workspace = true }
|
||||||
|
nom = { workspace = true }
|
||||||
|
percent-encoding = { workspace = true }
|
||||||
|
num-traits = { workspace = true }
|
||||||
|
http = { workspace = true }
|
||||||
|
async-stream = { workspace = true }
|
||||||
|
serde_variant = { workspace = true }
|
||||||
|
tracing-appender = { workspace = true }
|
||||||
|
clap = { workspace = true }
|
||||||
|
ipnetwork = { workspace = true }
|
||||||
|
typed-builder = { workspace = true }
|
||||||
|
nanoid = { workspace = true }
|
||||||
|
webp = { workspace = true }
|
||||||
|
|
||||||
sea-orm = { version = "1.1", features = [
|
sea-orm = { version = "1.1", features = [
|
||||||
"sqlx-sqlite",
|
"sqlx-sqlite",
|
||||||
"sqlx-postgres",
|
"sqlx-postgres",
|
||||||
"runtime-tokio-rustls",
|
"runtime-tokio",
|
||||||
"macros",
|
"macros",
|
||||||
"debug-print",
|
"debug-print",
|
||||||
] }
|
] }
|
||||||
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
||||||
uuid = { version = "1.6.0", features = ["v4"] }
|
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
|
||||||
sea-orm-migration = { version = "1.1", features = ["runtime-tokio-rustls"] }
|
|
||||||
rss = "2"
|
rss = "2"
|
||||||
fancy-regex = "0.14"
|
fancy-regex = "0.14"
|
||||||
maplit = "1.0.2"
|
|
||||||
lightningcss = "1.0.0-alpha.66"
|
lightningcss = "1.0.0-alpha.66"
|
||||||
html-escape = "0.2.13"
|
html-escape = "0.2.13"
|
||||||
opendal = { version = "0.53", features = ["default", "services-fs"] }
|
opendal = { version = "0.53", features = ["default", "services-fs"] }
|
||||||
zune-image = "0.4.15"
|
scraper = "0.23.1"
|
||||||
once_cell = "1.20.2"
|
|
||||||
scraper = "0.23"
|
|
||||||
|
|
||||||
jwt-authorizer = "0.15.0"
|
|
||||||
log = "0.4"
|
|
||||||
async-graphql = { version = "7", features = ["dynamic-schema"] }
|
async-graphql = { version = "7", features = ["dynamic-schema"] }
|
||||||
async-graphql-axum = "7"
|
async-graphql-axum = "7"
|
||||||
seaography = { version = "1.1", features = [
|
seaography = { version = "1.1", features = [
|
||||||
@@ -83,8 +124,8 @@ seaography = { version = "1.1", features = [
|
|||||||
"with-decimal",
|
"with-decimal",
|
||||||
"with-bigdecimal",
|
"with-bigdecimal",
|
||||||
"with-postgres-array",
|
"with-postgres-array",
|
||||||
|
"with-json-as-scalar",
|
||||||
] }
|
] }
|
||||||
base64 = "0.22.1"
|
|
||||||
tower = "0.5.2"
|
tower = "0.5.2"
|
||||||
tower-http = { version = "0.6", features = [
|
tower-http = { version = "0.6", features = [
|
||||||
"trace",
|
"trace",
|
||||||
@@ -97,31 +138,32 @@ tower-http = { version = "0.6", features = [
|
|||||||
"compression-full",
|
"compression-full",
|
||||||
] }
|
] }
|
||||||
tera = "1.20.0"
|
tera = "1.20.0"
|
||||||
openidconnect = { version = "4", features = ["rustls-tls"] }
|
openidconnect = { version = "4" }
|
||||||
dotenv = "0.15.0"
|
dotenvy = "0.15.7"
|
||||||
http = "1.2.0"
|
jpegxl-rs = { version = "0.11.2", optional = true }
|
||||||
async-stream = "0.3.6"
|
jpegxl-sys = { version = "0.11.2", optional = true }
|
||||||
serde_variant = "0.1.3"
|
|
||||||
tracing-appender = "0.2.3"
|
|
||||||
clap = "4.5.31"
|
|
||||||
ipnetwork = "0.21.1"
|
|
||||||
typed-builder = "0.21.0"
|
|
||||||
serde_yaml = "0.9.34"
|
|
||||||
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
|
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
|
||||||
apalis-sql = { version = "0.7", features = ["postgres"] }
|
apalis-sql = { version = "0.7", features = ["postgres"] }
|
||||||
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
|
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
|
||||||
rand = "0.9.1"
|
|
||||||
rust_decimal = "1.37.1"
|
|
||||||
reqwest_cookie_store = "0.8.0"
|
reqwest_cookie_store = "0.8.0"
|
||||||
mockito = { version = "1.6.1", optional = true }
|
jwtk = "0.4.0"
|
||||||
|
mime_guess = "2.0.5"
|
||||||
downloader = { workspace = true }
|
icu_properties = "2.0.1"
|
||||||
util = { workspace = true }
|
icu = "2.0.0"
|
||||||
fetch = { workspace = true }
|
tracing-tree = "0.4.0"
|
||||||
nanoid = "0.4.0"
|
num_cpus = "1.17.0"
|
||||||
|
headers-accept = "0.1.4"
|
||||||
|
polars = { version = "0.49.1", features = [
|
||||||
|
"parquet",
|
||||||
|
"lazy",
|
||||||
|
"diagonal_concat",
|
||||||
|
], optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
inquire = { workspace = true }
|
||||||
|
color-eyre = { workspace = true }
|
||||||
serial_test = "3"
|
serial_test = "3"
|
||||||
insta = { version = "1", features = ["redactions", "yaml", "filters"] }
|
insta = { version = "1", features = ["redactions", "toml", "filters"] }
|
||||||
rstest = "0.25"
|
rstest = "0.25"
|
||||||
ctor = "0.4.0"
|
ctor = "0.4.0"
|
||||||
|
|||||||
584
apps/recorder/examples/mikan_collect_classic_eps.rs
Normal file
584
apps/recorder/examples/mikan_collect_classic_eps.rs
Normal file
@@ -0,0 +1,584 @@
|
|||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use chrono::{DateTime, Duration, FixedOffset, NaiveDate, NaiveTime, TimeZone, Utc};
|
||||||
|
use fetch::{HttpClientConfig, fetch_html};
|
||||||
|
use itertools::Itertools;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use nom::{
|
||||||
|
IResult, Parser,
|
||||||
|
branch::alt,
|
||||||
|
bytes::complete::{tag, take, take_till1},
|
||||||
|
character::complete::space1,
|
||||||
|
combinator::map,
|
||||||
|
};
|
||||||
|
use recorder::{
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
extract::{
|
||||||
|
html::extract_inner_text_from_element_ref,
|
||||||
|
mikan::{MikanClient, MikanConfig, MikanEpisodeHash, MikanFansubHash},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use regex::Regex;
|
||||||
|
use scraper::{ElementRef, Html, Selector};
|
||||||
|
use snafu::FromString;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TEST_FOLDER: std::path::PathBuf =
|
||||||
|
if cfg!(any(test, debug_assertions, feature = "playground")) {
|
||||||
|
std::path::PathBuf::from(format!(
|
||||||
|
"{}/tests/resources/mikan/classic_episodes",
|
||||||
|
env!("CARGO_MANIFEST_DIR")
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
std::path::PathBuf::from("tests/resources/mikan/classic_episodes")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TOTAL_PAGE_REGEX: Regex =
|
||||||
|
Regex::new(r#"\$\(\'\.classic-view-pagination2\'\)\.bootpag\(\{\s*total:\s*(\d+)"#)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MikanClassicEpisodeTableRow {
|
||||||
|
pub id: i32,
|
||||||
|
pub publish_at: DateTime<Utc>,
|
||||||
|
pub mikan_fansub_id: Option<String>,
|
||||||
|
pub fansub_name: Option<String>,
|
||||||
|
pub mikan_episode_id: String,
|
||||||
|
pub original_name: String,
|
||||||
|
pub magnet_link: Option<String>,
|
||||||
|
pub file_size: Option<String>,
|
||||||
|
pub torrent_link: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanClassicEpisodeTableRow {
|
||||||
|
fn timezone() -> FixedOffset {
|
||||||
|
FixedOffset::east_opt(8 * 3600).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fixed_date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
alt((
|
||||||
|
map(tag("今天"), move |_| {
|
||||||
|
Utc::now().with_timezone(&Self::timezone()).date_naive()
|
||||||
|
}),
|
||||||
|
map(tag("昨天"), move |_| {
|
||||||
|
Utc::now().with_timezone(&Self::timezone()).date_naive() - Duration::days(1)
|
||||||
|
}),
|
||||||
|
))
|
||||||
|
.parse(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn formatted_date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
let (remain, date_str) = take_till1(|c: char| c.is_whitespace()).parse(input)?;
|
||||||
|
let date = NaiveDate::parse_from_str(date_str, "%Y/%m/%d").map_err(|_| {
|
||||||
|
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
|
||||||
|
})?;
|
||||||
|
Ok((remain, date))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
alt((Self::fixed_date_parser, Self::formatted_date_parser)).parse(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn time_parser(input: &str) -> IResult<&str, NaiveTime> {
|
||||||
|
let (remain, time_str) = take(5usize).parse(input)?;
|
||||||
|
let time = NaiveTime::parse_from_str(time_str, "%H:%M").map_err(|_| {
|
||||||
|
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
|
||||||
|
})?;
|
||||||
|
Ok((remain, time))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_publish_at(text: &str) -> Option<DateTime<Utc>> {
|
||||||
|
let (_, (date, _, time)) = (Self::date_parser, space1, Self::time_parser)
|
||||||
|
.parse(text)
|
||||||
|
.ok()?;
|
||||||
|
let local_dt = Self::timezone()
|
||||||
|
.from_local_datetime(&date.and_time(time))
|
||||||
|
.single()?;
|
||||||
|
Some(local_dt.with_timezone(&Utc))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_element_ref(
|
||||||
|
row: ElementRef<'_>,
|
||||||
|
rev_id: i32,
|
||||||
|
idx: i32,
|
||||||
|
mikan_base_url: &Url,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let publish_at_selector = &Selector::parse("td:nth-of-type(1)").unwrap();
|
||||||
|
let fansub_selector = &Selector::parse("td:nth-of-type(2) > a").unwrap();
|
||||||
|
let original_name_selector =
|
||||||
|
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(1)").unwrap();
|
||||||
|
let magnet_link_selector =
|
||||||
|
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(2)").unwrap();
|
||||||
|
let file_size_selector = &Selector::parse("td:nth-of-type(4)").unwrap();
|
||||||
|
let torrent_link_selector = &Selector::parse("td:nth-of-type(5) > a").unwrap();
|
||||||
|
|
||||||
|
let publish_at = row
|
||||||
|
.select(publish_at_selector)
|
||||||
|
.next()
|
||||||
|
.map(extract_inner_text_from_element_ref)
|
||||||
|
.and_then(|e| Self::extract_publish_at(&e));
|
||||||
|
|
||||||
|
let (mikan_fansub_hash, fansub_name) = row
|
||||||
|
.select(fansub_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|e| {
|
||||||
|
e.attr("href")
|
||||||
|
.and_then(|s| mikan_base_url.join(s).ok())
|
||||||
|
.and_then(|u| MikanFansubHash::from_homepage_url(&u))
|
||||||
|
.map(|h| (h, extract_inner_text_from_element_ref(e)))
|
||||||
|
})
|
||||||
|
.unzip();
|
||||||
|
|
||||||
|
let (mikan_episode_hash, original_name) = row
|
||||||
|
.select(original_name_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| {
|
||||||
|
el.attr("href")
|
||||||
|
.and_then(|s| mikan_base_url.join(s).ok())
|
||||||
|
.and_then(|u| MikanEpisodeHash::from_homepage_url(&u))
|
||||||
|
.map(|h| (h, extract_inner_text_from_element_ref(el)))
|
||||||
|
})
|
||||||
|
.unzip();
|
||||||
|
|
||||||
|
let magnet_link = row
|
||||||
|
.select(magnet_link_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| el.attr("data-clipboard-text"));
|
||||||
|
|
||||||
|
let file_size = row
|
||||||
|
.select(file_size_selector)
|
||||||
|
.next()
|
||||||
|
.map(extract_inner_text_from_element_ref);
|
||||||
|
|
||||||
|
let torrent_link = row
|
||||||
|
.select(torrent_link_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| el.attr("href"));
|
||||||
|
|
||||||
|
if let (Some(mikan_episode_hash), Some(original_name), Some(publish_at)) = (
|
||||||
|
mikan_episode_hash.as_ref(),
|
||||||
|
original_name.as_ref(),
|
||||||
|
publish_at.as_ref(),
|
||||||
|
) {
|
||||||
|
Ok(Self {
|
||||||
|
id: rev_id * 1000 + idx,
|
||||||
|
publish_at: *publish_at,
|
||||||
|
mikan_fansub_id: mikan_fansub_hash.map(|h| h.mikan_fansub_id.clone()),
|
||||||
|
fansub_name,
|
||||||
|
mikan_episode_id: mikan_episode_hash.mikan_episode_id.clone(),
|
||||||
|
original_name: original_name.clone(),
|
||||||
|
magnet_link: magnet_link.map(|s| s.to_string()),
|
||||||
|
file_size: file_size.map(|s| s.to_string()),
|
||||||
|
torrent_link: torrent_link.map(|s| s.to_string()),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
let mut missing_fields = vec![];
|
||||||
|
if mikan_episode_hash.is_none() {
|
||||||
|
missing_fields.push("mikan_episode_id");
|
||||||
|
}
|
||||||
|
if original_name.is_none() {
|
||||||
|
missing_fields.push("original_name");
|
||||||
|
}
|
||||||
|
if publish_at.is_none() {
|
||||||
|
missing_fields.push("publish_at");
|
||||||
|
}
|
||||||
|
Err(RecorderError::without_source(format!(
|
||||||
|
"Failed to parse episode table row, missing fields: {missing_fields:?}, row \
|
||||||
|
index: {idx}"
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MikanClassicEpisodeTablePage {
|
||||||
|
pub page: i32,
|
||||||
|
pub total: i32,
|
||||||
|
pub html: String,
|
||||||
|
pub rows: Vec<MikanClassicEpisodeTableRow>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanClassicEpisodeTablePage {
|
||||||
|
pub fn from_html(
|
||||||
|
html: String,
|
||||||
|
mikan_base_url: &Url,
|
||||||
|
page: i32,
|
||||||
|
updated_info: Option<(i32, i32)>,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let tr_selector = &Selector::parse("tbody tr").unwrap();
|
||||||
|
let doc = Html::parse_document(&html);
|
||||||
|
if let Some(mut total) = TOTAL_PAGE_REGEX
|
||||||
|
.captures(&html)
|
||||||
|
.and_then(|c| c.get(1))
|
||||||
|
.and_then(|s| s.as_str().parse::<i32>().ok())
|
||||||
|
{
|
||||||
|
if let Some((_, update_total)) = updated_info {
|
||||||
|
total = update_total;
|
||||||
|
}
|
||||||
|
|
||||||
|
let rev_id = total - page;
|
||||||
|
let rows = doc
|
||||||
|
.select(tr_selector)
|
||||||
|
.rev()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(idx, tr)| {
|
||||||
|
MikanClassicEpisodeTableRow::from_element_ref(
|
||||||
|
tr,
|
||||||
|
rev_id,
|
||||||
|
idx as i32,
|
||||||
|
mikan_base_url,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<RecorderResult<Vec<_>>>()?;
|
||||||
|
Ok(Self {
|
||||||
|
page,
|
||||||
|
total,
|
||||||
|
html,
|
||||||
|
rows,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(RecorderError::without_source(
|
||||||
|
"Failed to parse pagination meta and rows".into(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save_to_files(&self) -> RecorderResult<()> {
|
||||||
|
use polars::prelude::*;
|
||||||
|
|
||||||
|
let rev_id = self.total - self.page;
|
||||||
|
let parquet_path = TEST_FOLDER.join(format!("parquet/rev_{rev_id}.parquet"));
|
||||||
|
let csv_path = TEST_FOLDER.join(format!("csv/rev_{rev_id}.csv"));
|
||||||
|
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
|
||||||
|
|
||||||
|
std::fs::write(html_path, self.html.clone())?;
|
||||||
|
|
||||||
|
let mut id_vec = Vec::new();
|
||||||
|
let mut publish_at_vec = Vec::new();
|
||||||
|
let mut mikan_fansub_id_vec = Vec::new();
|
||||||
|
let mut fansub_name_vec = Vec::new();
|
||||||
|
let mut mikan_episode_id_vec = Vec::new();
|
||||||
|
let mut original_name_vec = Vec::new();
|
||||||
|
let mut magnet_link_vec = Vec::new();
|
||||||
|
let mut file_size_vec = Vec::new();
|
||||||
|
let mut torrent_link_vec = Vec::new();
|
||||||
|
|
||||||
|
for row in &self.rows {
|
||||||
|
id_vec.push(row.id);
|
||||||
|
publish_at_vec.push(row.publish_at.to_rfc3339());
|
||||||
|
mikan_fansub_id_vec.push(row.mikan_fansub_id.clone());
|
||||||
|
fansub_name_vec.push(row.fansub_name.clone());
|
||||||
|
mikan_episode_id_vec.push(row.mikan_episode_id.clone());
|
||||||
|
original_name_vec.push(row.original_name.clone());
|
||||||
|
magnet_link_vec.push(row.magnet_link.clone());
|
||||||
|
file_size_vec.push(row.file_size.clone());
|
||||||
|
torrent_link_vec.push(row.torrent_link.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let df = df! [
|
||||||
|
"id" => id_vec,
|
||||||
|
"publish_at_timestamp" => publish_at_vec,
|
||||||
|
"mikan_fansub_id" => mikan_fansub_id_vec,
|
||||||
|
"fansub_name" => fansub_name_vec,
|
||||||
|
"mikan_episode_id" => mikan_episode_id_vec,
|
||||||
|
"original_name" => original_name_vec,
|
||||||
|
"magnet_link" => magnet_link_vec,
|
||||||
|
"file_size" => file_size_vec,
|
||||||
|
"torrent_link" => torrent_link_vec,
|
||||||
|
]
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to create DataFrame: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut parquet_file = std::fs::File::create(&parquet_path)?;
|
||||||
|
|
||||||
|
ParquetWriter::new(&mut parquet_file)
|
||||||
|
.finish(&mut df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write parquet file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut csv_file = std::fs::File::create(&csv_path)?;
|
||||||
|
|
||||||
|
CsvWriter::new(&mut csv_file)
|
||||||
|
.include_header(true)
|
||||||
|
.with_quote_style(QuoteStyle::Always)
|
||||||
|
.finish(&mut df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write csv file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"[{}/{}] Saved {} rows to rev_{}.{{parquet,html,csv}}",
|
||||||
|
self.page,
|
||||||
|
self.total,
|
||||||
|
self.rows.len(),
|
||||||
|
rev_id
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn waiting_rev_ids(total: i32) -> RecorderResult<Vec<i32>> {
|
||||||
|
let dir = TEST_FOLDER.join("csv");
|
||||||
|
|
||||||
|
let files = std::fs::read_dir(dir)?;
|
||||||
|
|
||||||
|
let rev_ids = files
|
||||||
|
.filter_map(|f| f.ok())
|
||||||
|
.filter_map(|f| {
|
||||||
|
f.path().file_stem().and_then(|s| {
|
||||||
|
s.to_str().and_then(|s| {
|
||||||
|
if s.starts_with("rev_") {
|
||||||
|
s.replace("rev_", "").parse::<i32>().ok()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<HashSet<_>>();
|
||||||
|
|
||||||
|
Ok((0..total)
|
||||||
|
.filter(|rev_id| !rev_ids.contains(rev_id))
|
||||||
|
.collect::<Vec<_>>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn scrape_mikan_classic_episode_table_page(
|
||||||
|
mikan_client: &MikanClient,
|
||||||
|
page: i32,
|
||||||
|
updated_info: Option<(i32, i32)>,
|
||||||
|
) -> RecorderResult<MikanClassicEpisodeTablePage> {
|
||||||
|
let mikan_base_url = mikan_client.base_url();
|
||||||
|
let url = mikan_base_url.join(&format!("/Home/Classic/{page}"))?;
|
||||||
|
|
||||||
|
if let Some((rev_id, update_total)) = updated_info.as_ref() {
|
||||||
|
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
|
||||||
|
if html_path.exists() {
|
||||||
|
let html = std::fs::read_to_string(&html_path)?;
|
||||||
|
println!("[{page}/{update_total}] html exists, skipping fetch");
|
||||||
|
return MikanClassicEpisodeTablePage::from_html(
|
||||||
|
html,
|
||||||
|
mikan_base_url,
|
||||||
|
page,
|
||||||
|
updated_info,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let total = if let Some((_, update_total)) = updated_info.as_ref() {
|
||||||
|
update_total.to_string()
|
||||||
|
} else {
|
||||||
|
"Unknown".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
println!("[{page}/{total}] fetching html...");
|
||||||
|
|
||||||
|
let html = fetch_html(mikan_client, url).await?;
|
||||||
|
|
||||||
|
println!("[{page}/{total}] fetched html done");
|
||||||
|
|
||||||
|
std::fs::write(TEST_FOLDER.join("html/temp.html"), html.clone())?;
|
||||||
|
|
||||||
|
MikanClassicEpisodeTablePage::from_html(html, mikan_base_url, page, updated_info)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn scrape_mikan_classic_episode_table_page_from_rev_id(
|
||||||
|
mikan_client: &MikanClient,
|
||||||
|
total: i32,
|
||||||
|
rev_idx: i32,
|
||||||
|
) -> RecorderResult<MikanClassicEpisodeTablePage> {
|
||||||
|
let page = total - rev_idx;
|
||||||
|
|
||||||
|
scrape_mikan_classic_episode_table_page(mikan_client, page, Some((rev_idx, total))).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn merge_mikan_classic_episodes_and_strip_columns() -> RecorderResult<()> {
|
||||||
|
use polars::prelude::*;
|
||||||
|
|
||||||
|
let dir = TEST_FOLDER.join("parquet");
|
||||||
|
let files = std::fs::read_dir(dir)?;
|
||||||
|
|
||||||
|
let parquet_paths = files
|
||||||
|
.filter_map(|f| f.ok())
|
||||||
|
.filter_map(|f| {
|
||||||
|
let path = f.path();
|
||||||
|
if let Some(ext) = path.extension()
|
||||||
|
&& ext == "parquet"
|
||||||
|
&& path
|
||||||
|
.file_stem()
|
||||||
|
.is_some_and(|f| f.to_string_lossy().starts_with("rev_"))
|
||||||
|
{
|
||||||
|
Some(path)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if parquet_paths.is_empty() {
|
||||||
|
return Err(RecorderError::without_source(
|
||||||
|
"No parquet files found to merge".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Found {} parquet files to merge", parquet_paths.len());
|
||||||
|
|
||||||
|
// 读取并合并所有 parquet 文件
|
||||||
|
let mut all_dfs = Vec::new();
|
||||||
|
for path in &parquet_paths {
|
||||||
|
println!("Reading {path:?}");
|
||||||
|
let file = std::fs::File::open(path)?;
|
||||||
|
let df = ParquetReader::new(file).finish().map_err(|e| {
|
||||||
|
let message = format!("Failed to read parquet file {path:?}: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
all_dfs.push(df);
|
||||||
|
}
|
||||||
|
|
||||||
|
let lazy_frames: Vec<LazyFrame> = all_dfs.into_iter().map(|df| df.lazy()).collect();
|
||||||
|
|
||||||
|
let merged_df = concat_lf_diagonal(&lazy_frames, UnionArgs::default())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to concat DataFrames: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?
|
||||||
|
.sort(
|
||||||
|
["publish_at_timestamp"],
|
||||||
|
SortMultipleOptions::default().with_order_descending(true),
|
||||||
|
)
|
||||||
|
.unique(
|
||||||
|
Some(vec![
|
||||||
|
"mikan_fansub_id".to_string(),
|
||||||
|
"mikan_episode_id".to_string(),
|
||||||
|
]),
|
||||||
|
UniqueKeepStrategy::First,
|
||||||
|
)
|
||||||
|
.collect()
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to collect lazy DataFrame: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
fn select_columns_and_write(
|
||||||
|
merged_df: DataFrame,
|
||||||
|
name: &str,
|
||||||
|
columns: &[&str],
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let result_df = merged_df
|
||||||
|
.lazy()
|
||||||
|
.sort(["publish_at_timestamp"], SortMultipleOptions::default())
|
||||||
|
.select(columns.iter().map(|c| col(*c)).collect_vec())
|
||||||
|
.collect()
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to sort and select columns: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let output_path = TEST_FOLDER.join(format!("parquet/{name}.parquet"));
|
||||||
|
let mut output_file = std::fs::File::create(&output_path)?;
|
||||||
|
|
||||||
|
ParquetWriter::new(&mut output_file)
|
||||||
|
.set_parallel(true)
|
||||||
|
.with_compression(ParquetCompression::Zstd(Some(
|
||||||
|
ZstdLevel::try_new(22).unwrap(),
|
||||||
|
)))
|
||||||
|
.finish(&mut result_df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write merged parquet file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
println!("Merged {} rows into {output_path:?}", result_df.height());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
select_columns_and_write(merged_df.clone(), "tiny", &["fansub_name", "original_name"])?;
|
||||||
|
// select_columns_and_write(
|
||||||
|
// merged_df.clone(),
|
||||||
|
// "lite",
|
||||||
|
// &[
|
||||||
|
// "mikan_fansub_id",
|
||||||
|
// "fansub_name",
|
||||||
|
// "mikan_episode_id",
|
||||||
|
// "original_name",
|
||||||
|
// ],
|
||||||
|
// )?;
|
||||||
|
// select_columns_and_write(
|
||||||
|
// merged_df,
|
||||||
|
// "full",
|
||||||
|
// &[
|
||||||
|
// "id",
|
||||||
|
// "publish_at_timestamp",
|
||||||
|
// "mikan_fansub_id",
|
||||||
|
// "fansub_name",
|
||||||
|
// "mikan_episode_id",
|
||||||
|
// "original_name",
|
||||||
|
// "magnet_link",
|
||||||
|
// "file_size",
|
||||||
|
// "torrent_link",
|
||||||
|
// ],
|
||||||
|
// )?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("html"))?;
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("parquet"))?;
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("csv"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(std::time::Duration::from_millis(1000)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let first_page_and_pagination_info =
|
||||||
|
scrape_mikan_classic_episode_table_page(&mikan_scrape_client, 1, None).await?;
|
||||||
|
|
||||||
|
let total_page = first_page_and_pagination_info.total;
|
||||||
|
|
||||||
|
first_page_and_pagination_info.save_to_files()?;
|
||||||
|
|
||||||
|
let next_rev_ids = MikanClassicEpisodeTablePage::waiting_rev_ids(total_page)?;
|
||||||
|
|
||||||
|
for todo_rev_id in next_rev_ids {
|
||||||
|
let page = scrape_mikan_classic_episode_table_page_from_rev_id(
|
||||||
|
&mikan_scrape_client,
|
||||||
|
total_page,
|
||||||
|
todo_rev_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
page.save_to_files()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 合并所有 parquet 文件
|
||||||
|
println!("\nMerging all parquet files...");
|
||||||
|
|
||||||
|
merge_mikan_classic_episodes_and_strip_columns().await?;
|
||||||
|
|
||||||
|
println!("Merge completed!");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
249
apps/recorder/examples/mikan_doppel_season_subscription.rs
Normal file
249
apps/recorder/examples/mikan_doppel_season_subscription.rs
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use color_eyre::{Result, eyre::OptionExt};
|
||||||
|
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||||
|
use inquire::{Password, Text, validator::Validation};
|
||||||
|
use recorder::{
|
||||||
|
crypto::UserPassCredential,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanClient, MikanConfig, MikanRssEpisodeItem, build_mikan_bangumi_expand_subscribed_url,
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||||
|
},
|
||||||
|
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||||
|
};
|
||||||
|
use scraper::Html;
|
||||||
|
use tokio::fs;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(tracing::Level::INFO)
|
||||||
|
.init();
|
||||||
|
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(0),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(Duration::from_millis(1000)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let username_validator = |input: &str| {
|
||||||
|
if input.trim().is_empty() {
|
||||||
|
Ok(Validation::Invalid("Username cannot be empty".into()))
|
||||||
|
} else {
|
||||||
|
Ok(Validation::Valid)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let password_validator = |input: &str| {
|
||||||
|
if input.trim().is_empty() {
|
||||||
|
Ok(Validation::Invalid("Password cannot be empty".into()))
|
||||||
|
} else {
|
||||||
|
Ok(Validation::Valid)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let username = Text::new("Please enter your mikan username:")
|
||||||
|
.with_validator(username_validator)
|
||||||
|
.prompt()?;
|
||||||
|
let password = Password::new("Please enter your mikan password:")
|
||||||
|
.without_confirmation()
|
||||||
|
.with_display_mode(inquire::PasswordDisplayMode::Masked)
|
||||||
|
.with_validator(password_validator)
|
||||||
|
.prompt()?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = mikan_scrape_client
|
||||||
|
.fork_with_userpass_credential(UserPassCredential {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
user_agent: None,
|
||||||
|
cookies: None,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
tracing::info!("Checking if logged in...");
|
||||||
|
if !mikan_scrape_client.has_login().await? {
|
||||||
|
tracing::info!("Logging in to mikan...");
|
||||||
|
mikan_scrape_client.login().await?;
|
||||||
|
tracing::info!("Logged in to mikan");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_scrape_client.base_url().clone();
|
||||||
|
tracing::info!("Scraping season subscription...");
|
||||||
|
let season_subscription =
|
||||||
|
fs::read("tests/resources/mikan/BangumiCoverFlow-2025-spring.html").await?;
|
||||||
|
let html = Html::parse_fragment(String::from_utf8(season_subscription)?.as_str());
|
||||||
|
let bangumi_index_list =
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(&html, &mikan_base_url);
|
||||||
|
|
||||||
|
for bangumi_index in bangumi_index_list {
|
||||||
|
let bangumi_meta = {
|
||||||
|
let bangumi_expand_subscribed_url = build_mikan_bangumi_expand_subscribed_url(
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
bangumi_index.mikan_bangumi_id.as_ref(),
|
||||||
|
);
|
||||||
|
let bangumi_expand_subscribed_doppel_path =
|
||||||
|
MikanDoppelPath::new(bangumi_expand_subscribed_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Scraping bangumi expand subscribed..."
|
||||||
|
);
|
||||||
|
let bangumi_expand_subscribed_data =
|
||||||
|
if !bangumi_expand_subscribed_doppel_path.exists_any() {
|
||||||
|
let bangumi_expand_subscribed_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_expand_subscribed_url).await?;
|
||||||
|
bangumi_expand_subscribed_doppel_path.write(&bangumi_expand_subscribed_data)?;
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Bangumi expand subscribed saved"
|
||||||
|
);
|
||||||
|
bangumi_expand_subscribed_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Bangumi expand subscribed already exists"
|
||||||
|
);
|
||||||
|
String::from_utf8(bangumi_expand_subscribed_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let html = Html::parse_fragment(&bangumi_expand_subscribed_data);
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment(
|
||||||
|
&html,
|
||||||
|
bangumi_index.clone(),
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
)
|
||||||
|
.ok_or_eyre(format!(
|
||||||
|
"Failed to extract bangumi meta from expand subscribed fragment: {:?}",
|
||||||
|
bangumi_index.bangumi_title
|
||||||
|
))
|
||||||
|
}?;
|
||||||
|
{
|
||||||
|
if let Some(poster_url) = bangumi_meta.origin_poster_src.as_ref() {
|
||||||
|
let poster_doppel_path = MikanDoppelPath::new(poster_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi poster..."
|
||||||
|
);
|
||||||
|
if !poster_doppel_path.exists_any() {
|
||||||
|
let poster_data = fetch_image(&mikan_scrape_client, poster_url.clone()).await?;
|
||||||
|
poster_doppel_path.write(&poster_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi poster saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi poster already exists"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let bangumi_homepage_url = bangumi_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi homepage..."
|
||||||
|
);
|
||||||
|
if !bangumi_homepage_doppel_path.exists_any() {
|
||||||
|
let bangumi_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
|
||||||
|
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi homepage already exists"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let rss_items = {
|
||||||
|
let bangumi_rss_url = bangumi_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_rss_url(mikan_base_url.clone());
|
||||||
|
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi rss..."
|
||||||
|
);
|
||||||
|
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
|
||||||
|
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
|
||||||
|
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi rss saved");
|
||||||
|
bangumi_rss_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi rss already exists"
|
||||||
|
);
|
||||||
|
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items;
|
||||||
|
rss_items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssEpisodeItem::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
}?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
{
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode...");
|
||||||
|
if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode already exists");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source
|
||||||
|
.status()
|
||||||
|
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("Scraping season subscription done");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
215
apps/recorder/examples/mikan_doppel_subscriber_subscription.rs
Normal file
215
apps/recorder/examples/mikan_doppel_subscriber_subscription.rs
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||||
|
use recorder::{
|
||||||
|
errors::RecorderResult,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanClient, MikanConfig, MikanRssEpisodeItem,
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||||
|
},
|
||||||
|
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||||
|
};
|
||||||
|
use scraper::Html;
|
||||||
|
use tokio::fs;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(tracing::Level::INFO)
|
||||||
|
.init();
|
||||||
|
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(0),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(Duration::from_millis(500)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_scrape_client.base_url().clone();
|
||||||
|
tracing::info!("Scraping subscriber subscription...");
|
||||||
|
let subscriber_subscription =
|
||||||
|
fs::read("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
|
||||||
|
let channel = rss::Channel::read_from(&subscriber_subscription[..])?;
|
||||||
|
let rss_items: Vec<MikanRssEpisodeItem> = channel
|
||||||
|
.items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssEpisodeItem::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
let episode_homepage_meta = {
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path = MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
let episode_homepage_data = if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage saved");
|
||||||
|
episode_homepage_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage already exists");
|
||||||
|
String::from_utf8(episode_homepage_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
let html = Html::parse_document(&episode_homepage_data);
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html(
|
||||||
|
&html,
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
episode_homepage_url,
|
||||||
|
)
|
||||||
|
}?;
|
||||||
|
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source
|
||||||
|
.status()
|
||||||
|
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
if let Some(episode_poster_url) = episode_homepage_meta.origin_poster_src.as_ref() {
|
||||||
|
let episode_poster_doppel_path = MikanDoppelPath::new(episode_poster_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode poster...");
|
||||||
|
if !episode_poster_doppel_path.exists_any() {
|
||||||
|
let episode_poster_data =
|
||||||
|
fetch_image(&mikan_scrape_client, episode_poster_url.clone()).await?;
|
||||||
|
episode_poster_doppel_path.write(&episode_poster_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode poster saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode poster already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let bangumi_homepage_url = episode_homepage_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping bangumi homepage...");
|
||||||
|
if !bangumi_homepage_doppel_path.exists_any() {
|
||||||
|
let bangumi_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
|
||||||
|
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi homepage already exists");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let bangumi_rss_url = episode_homepage_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_rss_url(mikan_base_url.clone());
|
||||||
|
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping bangumi rss...");
|
||||||
|
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
|
||||||
|
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
|
||||||
|
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi rss saved");
|
||||||
|
bangumi_rss_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi rss already exists");
|
||||||
|
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(bangumi_rss_data.as_bytes())?;
|
||||||
|
let rss_items: Vec<MikanRssEpisodeItem> = channel
|
||||||
|
.items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssEpisodeItem::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
{
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage already exists");
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source.status().is_some_and(|status| {
|
||||||
|
status == reqwest::StatusCode::NOT_FOUND
|
||||||
|
})
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new \
|
||||||
|
version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("Scraping subscriber subscription done");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -26,25 +26,25 @@ host = '{{ get_env(name="HOST", default="localhost") }}'
|
|||||||
enable = true
|
enable = true
|
||||||
|
|
||||||
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
|
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
|
||||||
[server.middleware.request_id]
|
[server.middlewares.request_id]
|
||||||
enable = true
|
enable = true
|
||||||
|
|
||||||
[server.middleware.logger]
|
[server.middlewares.logger]
|
||||||
enable = true
|
enable = true
|
||||||
|
|
||||||
# when your code is panicked, the request still returns 500 status code.
|
# when your code is panicked, the request still returns 500 status code.
|
||||||
[server.middleware.catch_panic]
|
[server.middlewares.catch_panic]
|
||||||
enable = true
|
enable = true
|
||||||
|
|
||||||
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
|
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
|
||||||
[server.middleware.timeout_request]
|
[server.middlewares.timeout_request]
|
||||||
enable = false
|
enable = false
|
||||||
# Duration time in milliseconds.
|
# Duration time in milliseconds.
|
||||||
timeout = 5000
|
timeout = 5000
|
||||||
|
|
||||||
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
|
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
|
||||||
# allow_origins:
|
# allow_origins:
|
||||||
# - https://loco.rs
|
# - https://konobangu.com
|
||||||
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
|
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
|
||||||
# allow_headers:
|
# allow_headers:
|
||||||
# - Content-Type
|
# - Content-Type
|
||||||
@@ -53,7 +53,10 @@ timeout = 5000
|
|||||||
# - POST
|
# - POST
|
||||||
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
|
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
|
||||||
# max_age: 3600
|
# max_age: 3600
|
||||||
[server.middleware.cors]
|
[server.middlewares.cors]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
[server.middlewares.compression]
|
||||||
enable = true
|
enable = true
|
||||||
|
|
||||||
# Database Configuration
|
# Database Configuration
|
||||||
@@ -86,6 +89,14 @@ leaky_bucket_initial_tokens = 1
|
|||||||
leaky_bucket_refill_tokens = 1
|
leaky_bucket_refill_tokens = 1
|
||||||
leaky_bucket_refill_interval = 500
|
leaky_bucket_refill_interval = 500
|
||||||
|
|
||||||
|
|
||||||
|
[mikan.http_client.proxy]
|
||||||
|
server = '{{ get_env(name="MIKAN_PROXY", default = "") }}'
|
||||||
|
auth_header = '{{ get_env(name="MIKAN_PROXY_AUTH_HEADER", default = "") }}'
|
||||||
|
no_proxy = '{{ get_env(name="MIKAN_NO_PROXY", default = "") }}'
|
||||||
|
accept_invalid_certs = '{{ get_env(name="MIKAN_PROXY_ACCEPT_INVALID_CERTS", default = "false") }}'
|
||||||
|
|
||||||
|
|
||||||
[auth]
|
[auth]
|
||||||
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
|
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
|
||||||
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
|
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
|
||||||
|
|||||||
@@ -21,6 +21,9 @@ pub struct MainCliArgs {
|
|||||||
/// Explicit environment
|
/// Explicit environment
|
||||||
#[arg(short, long)]
|
#[arg(short, long)]
|
||||||
environment: Option<Environment>,
|
environment: Option<Environment>,
|
||||||
|
|
||||||
|
#[arg(long)]
|
||||||
|
graceful_shutdown: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AppBuilder {
|
pub struct AppBuilder {
|
||||||
@@ -28,6 +31,7 @@ pub struct AppBuilder {
|
|||||||
config_file: Option<String>,
|
config_file: Option<String>,
|
||||||
working_dir: String,
|
working_dir: String,
|
||||||
environment: Environment,
|
environment: Environment,
|
||||||
|
pub graceful_shutdown: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppBuilder {
|
impl AppBuilder {
|
||||||
@@ -61,7 +65,8 @@ impl AppBuilder {
|
|||||||
builder = builder
|
builder = builder
|
||||||
.config_file(args.config_file)
|
.config_file(args.config_file)
|
||||||
.dotenv_file(args.dotenv_file)
|
.dotenv_file(args.dotenv_file)
|
||||||
.environment(environment);
|
.environment(environment)
|
||||||
|
.graceful_shutdown(args.graceful_shutdown.unwrap_or(true));
|
||||||
|
|
||||||
Ok(builder)
|
Ok(builder)
|
||||||
}
|
}
|
||||||
@@ -118,6 +123,12 @@ impl AppBuilder {
|
|||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn graceful_shutdown(self, graceful_shutdown: bool) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.graceful_shutdown = graceful_shutdown;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
|
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
|
||||||
let mut ret = self;
|
let mut ret = self;
|
||||||
ret.dotenv_file = dotenv_file;
|
ret.dotenv_file = dotenv_file;
|
||||||
@@ -141,6 +152,7 @@ impl Default for AppBuilder {
|
|||||||
dotenv_file: None,
|
dotenv_file: None,
|
||||||
config_file: None,
|
config_file: None,
|
||||||
working_dir: String::from("."),
|
working_dir: String::from("."),
|
||||||
|
graceful_shutdown: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,6 +11,11 @@ leaky_bucket_initial_tokens = 0
|
|||||||
leaky_bucket_refill_tokens = 1
|
leaky_bucket_refill_tokens = 1
|
||||||
leaky_bucket_refill_interval = 500
|
leaky_bucket_refill_interval = 500
|
||||||
|
|
||||||
|
|
||||||
|
[mikan.http_client.proxy]
|
||||||
|
|
||||||
|
[mikan.http_client.proxy.headers]
|
||||||
|
|
||||||
[graphql]
|
[graphql]
|
||||||
depth_limit = inf
|
depth_limit = inf
|
||||||
complexity_limit = inf
|
complexity_limit = inf
|
||||||
@@ -22,3 +27,5 @@ complexity_limit = inf
|
|||||||
[task]
|
[task]
|
||||||
|
|
||||||
[message]
|
[message]
|
||||||
|
|
||||||
|
[media]
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ use super::env::Environment;
|
|||||||
use crate::{
|
use crate::{
|
||||||
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
|
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
|
||||||
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
|
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
|
||||||
logger::LoggerConfig, message::MessageConfig, storage::StorageConfig, task::TaskConfig,
|
logger::LoggerConfig, media::MediaConfig, message::MessageConfig, storage::StorageConfig,
|
||||||
web::WebServerConfig,
|
task::TaskConfig, web::WebServerConfig,
|
||||||
};
|
};
|
||||||
|
|
||||||
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
|
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
|
||||||
@@ -27,6 +27,7 @@ pub struct AppConfig {
|
|||||||
pub mikan: MikanConfig,
|
pub mikan: MikanConfig,
|
||||||
pub crypto: CryptoConfig,
|
pub crypto: CryptoConfig,
|
||||||
pub graphql: GraphQLConfig,
|
pub graphql: GraphQLConfig,
|
||||||
|
pub media: MediaConfig,
|
||||||
pub logger: LoggerConfig,
|
pub logger: LoggerConfig,
|
||||||
pub database: DatabaseConfig,
|
pub database: DatabaseConfig,
|
||||||
pub task: TaskConfig,
|
pub task: TaskConfig,
|
||||||
@@ -110,12 +111,12 @@ impl AppConfig {
|
|||||||
for f in try_filenames.iter() {
|
for f in try_filenames.iter() {
|
||||||
let p = try_dotenv_file_or_dir_path.join(f);
|
let p = try_dotenv_file_or_dir_path.join(f);
|
||||||
if p.exists() && p.is_file() {
|
if p.exists() && p.is_file() {
|
||||||
dotenv::from_path(p)?;
|
dotenvy::from_path(p)?;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if try_dotenv_file_or_dir_path.is_file() {
|
} else if try_dotenv_file_or_dir_path.is_file() {
|
||||||
dotenv::from_path(try_dotenv_file_or_dir_path)?;
|
dotenvy::from_path(try_dotenv_file_or_dir_path)?;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,16 +4,9 @@ use tokio::sync::OnceCell;
|
|||||||
|
|
||||||
use super::{Environment, config::AppConfig};
|
use super::{Environment, config::AppConfig};
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::AuthService,
|
auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
|
||||||
cache::CacheService,
|
errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
|
||||||
crypto::CryptoService,
|
logger::LoggerService, media::MediaService, message::MessageService, storage::StorageService,
|
||||||
database::DatabaseService,
|
|
||||||
errors::RecorderResult,
|
|
||||||
extract::mikan::MikanClient,
|
|
||||||
graphql::GraphQLService,
|
|
||||||
logger::LoggerService,
|
|
||||||
message::MessageService,
|
|
||||||
storage::{StorageService, StorageServiceTrait},
|
|
||||||
task::TaskService,
|
task::TaskService,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -25,12 +18,13 @@ pub trait AppContextTrait: Send + Sync + Debug {
|
|||||||
fn mikan(&self) -> &MikanClient;
|
fn mikan(&self) -> &MikanClient;
|
||||||
fn auth(&self) -> &AuthService;
|
fn auth(&self) -> &AuthService;
|
||||||
fn graphql(&self) -> &GraphQLService;
|
fn graphql(&self) -> &GraphQLService;
|
||||||
fn storage(&self) -> &dyn StorageServiceTrait;
|
fn storage(&self) -> &StorageService;
|
||||||
fn working_dir(&self) -> &String;
|
fn working_dir(&self) -> &String;
|
||||||
fn environment(&self) -> &Environment;
|
fn environment(&self) -> &Environment;
|
||||||
fn crypto(&self) -> &CryptoService;
|
fn crypto(&self) -> &CryptoService;
|
||||||
fn task(&self) -> &TaskService;
|
fn task(&self) -> &TaskService;
|
||||||
fn message(&self) -> &MessageService;
|
fn message(&self) -> &MessageService;
|
||||||
|
fn media(&self) -> &MediaService;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AppContext {
|
pub struct AppContext {
|
||||||
@@ -40,13 +34,14 @@ pub struct AppContext {
|
|||||||
cache: CacheService,
|
cache: CacheService,
|
||||||
mikan: MikanClient,
|
mikan: MikanClient,
|
||||||
auth: AuthService,
|
auth: AuthService,
|
||||||
graphql: GraphQLService,
|
|
||||||
storage: StorageService,
|
storage: StorageService,
|
||||||
crypto: CryptoService,
|
crypto: CryptoService,
|
||||||
working_dir: String,
|
working_dir: String,
|
||||||
environment: Environment,
|
environment: Environment,
|
||||||
message: MessageService,
|
message: MessageService,
|
||||||
|
media: MediaService,
|
||||||
task: OnceCell<TaskService>,
|
task: OnceCell<TaskService>,
|
||||||
|
graphql: OnceCell<GraphQLService>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppContext {
|
impl AppContext {
|
||||||
@@ -65,7 +60,7 @@ impl AppContext {
|
|||||||
let auth = AuthService::from_conf(config.auth).await?;
|
let auth = AuthService::from_conf(config.auth).await?;
|
||||||
let mikan = MikanClient::from_config(config.mikan).await?;
|
let mikan = MikanClient::from_config(config.mikan).await?;
|
||||||
let crypto = CryptoService::from_config(config.crypto).await?;
|
let crypto = CryptoService::from_config(config.crypto).await?;
|
||||||
let graphql = GraphQLService::from_config_and_database(config.graphql, db.clone()).await?;
|
let media = MediaService::from_config(config.media).await?;
|
||||||
|
|
||||||
let ctx = Arc::new(AppContext {
|
let ctx = Arc::new(AppContext {
|
||||||
config: config_cloned,
|
config: config_cloned,
|
||||||
@@ -77,10 +72,11 @@ impl AppContext {
|
|||||||
storage,
|
storage,
|
||||||
mikan,
|
mikan,
|
||||||
working_dir: working_dir.to_string(),
|
working_dir: working_dir.to_string(),
|
||||||
graphql,
|
|
||||||
crypto,
|
crypto,
|
||||||
message,
|
message,
|
||||||
|
media,
|
||||||
task: OnceCell::new(),
|
task: OnceCell::new(),
|
||||||
|
graphql: OnceCell::new(),
|
||||||
});
|
});
|
||||||
|
|
||||||
ctx.task
|
ctx.task
|
||||||
@@ -89,6 +85,12 @@ impl AppContext {
|
|||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
ctx.graphql
|
||||||
|
.get_or_try_init(async || {
|
||||||
|
GraphQLService::from_config_and_ctx(config.graphql, ctx.clone()).await
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
Ok(ctx)
|
Ok(ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -119,9 +121,9 @@ impl AppContextTrait for AppContext {
|
|||||||
&self.auth
|
&self.auth
|
||||||
}
|
}
|
||||||
fn graphql(&self) -> &GraphQLService {
|
fn graphql(&self) -> &GraphQLService {
|
||||||
&self.graphql
|
self.graphql.get().expect("graphql should be set")
|
||||||
}
|
}
|
||||||
fn storage(&self) -> &dyn StorageServiceTrait {
|
fn storage(&self) -> &StorageService {
|
||||||
&self.storage
|
&self.storage
|
||||||
}
|
}
|
||||||
fn working_dir(&self) -> &String {
|
fn working_dir(&self) -> &String {
|
||||||
@@ -139,4 +141,7 @@ impl AppContextTrait for AppContext {
|
|||||||
fn message(&self) -> &MessageService {
|
fn message(&self) -> &MessageService {
|
||||||
&self.message
|
&self.message
|
||||||
}
|
}
|
||||||
|
fn media(&self) -> &MediaService {
|
||||||
|
&self.media
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,17 +1,20 @@
|
|||||||
use std::{net::SocketAddr, sync::Arc};
|
use std::{net::SocketAddr, sync::Arc};
|
||||||
|
|
||||||
use axum::Router;
|
use axum::Router;
|
||||||
use tokio::signal;
|
use tokio::{net::TcpSocket, signal};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
use super::{builder::AppBuilder, context::AppContextTrait};
|
use super::{builder::AppBuilder, context::AppContextTrait};
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::RecorderResult,
|
errors::{RecorderError, RecorderResult},
|
||||||
web::{
|
web::{
|
||||||
controller::{self, core::ControllerTrait},
|
controller::{self, core::ControllerTrait},
|
||||||
middleware::default_middleware_stack,
|
middleware::default_middleware_stack,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub const PROJECT_NAME: &str = "konobangu";
|
||||||
|
|
||||||
pub struct App {
|
pub struct App {
|
||||||
pub context: Arc<dyn AppContextTrait>,
|
pub context: Arc<dyn AppContextTrait>,
|
||||||
pub builder: AppBuilder,
|
pub builder: AppBuilder,
|
||||||
@@ -22,43 +25,104 @@ impl App {
|
|||||||
AppBuilder::default()
|
AppBuilder::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(err, skip(self))]
|
||||||
pub async fn serve(&self) -> RecorderResult<()> {
|
pub async fn serve(&self) -> RecorderResult<()> {
|
||||||
let context = &self.context;
|
let context = &self.context;
|
||||||
let config = context.config();
|
let config = context.config();
|
||||||
let listener = tokio::net::TcpListener::bind(&format!(
|
|
||||||
"{}:{}",
|
let listener = {
|
||||||
config.server.binding, config.server.port
|
let addr: SocketAddr =
|
||||||
))
|
format!("{}:{}", config.server.binding, config.server.port).parse()?;
|
||||||
.await?;
|
|
||||||
|
let socket = if addr.is_ipv4() {
|
||||||
|
TcpSocket::new_v4()
|
||||||
|
} else {
|
||||||
|
TcpSocket::new_v6()
|
||||||
|
}?;
|
||||||
|
|
||||||
|
socket.set_reuseaddr(true)?;
|
||||||
|
|
||||||
|
#[cfg(all(unix, not(target_os = "solaris")))]
|
||||||
|
if let Err(e) = socket.set_reuseport(true) {
|
||||||
|
tracing::warn!("Failed to set SO_REUSEPORT: {}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
socket.bind(addr)?;
|
||||||
|
socket.listen(1024)
|
||||||
|
}?;
|
||||||
|
|
||||||
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
||||||
|
|
||||||
let (graphql_c, oidc_c, metadata_c) = futures::try_join!(
|
let (graphql_c, oidc_c, metadata_c, static_c, feeds_c) = futures::try_join!(
|
||||||
controller::graphql::create(context.clone()),
|
controller::graphql::create(context.clone()),
|
||||||
controller::oidc::create(context.clone()),
|
controller::oidc::create(context.clone()),
|
||||||
controller::metadata::create(context.clone())
|
controller::metadata::create(context.clone()),
|
||||||
|
controller::r#static::create(context.clone()),
|
||||||
|
controller::feeds::create(context.clone()),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
for c in [graphql_c, oidc_c, metadata_c] {
|
for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
|
||||||
router = c.apply_to(router);
|
router = c.apply_to(router);
|
||||||
}
|
}
|
||||||
|
|
||||||
let middlewares = default_middleware_stack(context.clone());
|
let middlewares = default_middleware_stack(context.clone());
|
||||||
for mid in middlewares {
|
for mid in middlewares {
|
||||||
router = mid.apply(router)?;
|
if mid.is_enabled() {
|
||||||
tracing::info!(name = mid.name(), "+middleware");
|
router = mid.apply(router)?;
|
||||||
|
tracing::info!(name = mid.name(), "+middleware");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let router = router
|
let router = router
|
||||||
.with_state(context.clone())
|
.with_state(context.clone())
|
||||||
.into_make_service_with_connect_info::<SocketAddr>();
|
.into_make_service_with_connect_info::<SocketAddr>();
|
||||||
|
|
||||||
axum::serve(listener, router)
|
let task = context.task();
|
||||||
.with_graceful_shutdown(async move {
|
|
||||||
Self::shutdown_signal().await;
|
let graceful_shutdown = self.builder.graceful_shutdown;
|
||||||
tracing::info!("shutting down...");
|
|
||||||
})
|
tokio::try_join!(
|
||||||
.await?;
|
async {
|
||||||
|
let axum_serve = axum::serve(listener, router);
|
||||||
|
|
||||||
|
if graceful_shutdown {
|
||||||
|
axum_serve
|
||||||
|
.with_graceful_shutdown(async move {
|
||||||
|
Self::shutdown_signal().await;
|
||||||
|
tracing::info!("axum shutting down...");
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
axum_serve.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
},
|
||||||
|
async {
|
||||||
|
{
|
||||||
|
let monitor = task.setup_monitor().await?;
|
||||||
|
if graceful_shutdown {
|
||||||
|
monitor
|
||||||
|
.run_with_signal(async move {
|
||||||
|
Self::shutdown_signal().await;
|
||||||
|
tracing::info!("apalis shutting down...");
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
monitor.run().await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
},
|
||||||
|
async {
|
||||||
|
let listener = task.setup_listener().await?;
|
||||||
|
listener.listen().await?;
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
}
|
||||||
|
)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -90,7 +154,7 @@ impl App {
|
|||||||
#[cfg(not(unix))]
|
#[cfg(not(unix))]
|
||||||
let terminate = std::future::pending::<()>();
|
let terminate = std::future::pending::<()>();
|
||||||
|
|
||||||
#[cfg(all(not(unix), debug_assertions))]
|
#[cfg(not(all(unix, debug_assertions)))]
|
||||||
let quit = std::future::pending::<()>();
|
let quit = std::future::pending::<()>();
|
||||||
|
|
||||||
tokio::select! {
|
tokio::select! {
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ pub mod context;
|
|||||||
pub mod core;
|
pub mod core;
|
||||||
pub mod env;
|
pub mod env;
|
||||||
|
|
||||||
pub use core::App;
|
pub use core::{App, PROJECT_NAME};
|
||||||
|
|
||||||
pub use builder::AppBuilder;
|
pub use builder::AppBuilder;
|
||||||
pub use config::AppConfig;
|
pub use config::AppConfig;
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ use super::{
|
|||||||
service::{AuthServiceTrait, AuthUserInfo},
|
service::{AuthServiceTrait, AuthUserInfo},
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::{AppContextTrait, PROJECT_NAME},
|
||||||
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
|
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -86,7 +86,7 @@ impl AuthServiceTrait for BasicAuthService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
||||||
Some(HeaderValue::from_static(r#"Basic realm="konobangu""#))
|
Some(HeaderValue::from_str(format!("Basic realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn auth_type(&self) -> AuthType {
|
fn auth_type(&self) -> AuthType {
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
use jwt_authorizer::OneOrArray;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use jwtk::OneOrMany;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_with::{NoneAsEmptyString, serde_as};
|
use serde_with::serde_as;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
pub struct BasicAuthConfig {
|
pub struct BasicAuthConfig {
|
||||||
@@ -22,13 +24,9 @@ pub struct OidcAuthConfig {
|
|||||||
#[serde(rename = "oidc_client_secret")]
|
#[serde(rename = "oidc_client_secret")]
|
||||||
pub client_secret: String,
|
pub client_secret: String,
|
||||||
#[serde(rename = "oidc_extra_scopes")]
|
#[serde(rename = "oidc_extra_scopes")]
|
||||||
pub extra_scopes: Option<OneOrArray<String>>,
|
pub extra_scopes: Option<OneOrMany<String>>,
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
#[serde(rename = "oidc_extra_claims")]
|
||||||
#[serde(rename = "oidc_extra_claim_key")]
|
pub extra_claims: Option<HashMap<String, Option<String>>>,
|
||||||
pub extra_claim_key: Option<String>,
|
|
||||||
#[serde(rename = "oidc_extra_claim_value")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub extra_claim_value: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
|||||||
@@ -11,13 +11,14 @@ use openidconnect::{
|
|||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use snafu::prelude::*;
|
use snafu::prelude::*;
|
||||||
use util::OptDynErr;
|
|
||||||
|
|
||||||
use crate::models::auth::AuthType;
|
use crate::models::auth::AuthType;
|
||||||
|
|
||||||
#[derive(Debug, Snafu)]
|
#[derive(Debug, Snafu)]
|
||||||
#[snafu(visibility(pub(crate)))]
|
#[snafu(visibility(pub(crate)))]
|
||||||
pub enum AuthError {
|
pub enum AuthError {
|
||||||
|
#[snafu(display("Permission denied"))]
|
||||||
|
PermissionError,
|
||||||
#[snafu(display("Not support auth method"))]
|
#[snafu(display("Not support auth method"))]
|
||||||
NotSupportAuthMethod {
|
NotSupportAuthMethod {
|
||||||
supported: Vec<AuthType>,
|
supported: Vec<AuthType>,
|
||||||
@@ -27,10 +28,6 @@ pub enum AuthError {
|
|||||||
FindAuthRecordError,
|
FindAuthRecordError,
|
||||||
#[snafu(display("Invalid credentials"))]
|
#[snafu(display("Invalid credentials"))]
|
||||||
BasicInvalidCredentials,
|
BasicInvalidCredentials,
|
||||||
#[snafu(transparent)]
|
|
||||||
OidcInitError {
|
|
||||||
source: jwt_authorizer::error::InitError,
|
|
||||||
},
|
|
||||||
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
|
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
|
||||||
OidcProviderHttpClientError { source: HttpClientError },
|
OidcProviderHttpClientError { source: HttpClientError },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
@@ -66,8 +63,10 @@ pub enum AuthError {
|
|||||||
OidcSignatureVerificationError { source: SignatureVerificationError },
|
OidcSignatureVerificationError { source: SignatureVerificationError },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
OidcSigningError { source: SigningError },
|
OidcSigningError { source: SigningError },
|
||||||
|
#[snafu(display("Missing Bearer token"))]
|
||||||
|
OidcMissingBearerToken,
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
OidcJwtAuthError { source: jwt_authorizer::AuthError },
|
OidcJwtkError { source: jwtk::Error },
|
||||||
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
|
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
|
||||||
OidcExtraScopesMatchError { expected: String, found: String },
|
OidcExtraScopesMatchError { expected: String, found: String },
|
||||||
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
|
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
|
||||||
@@ -95,12 +94,6 @@ pub enum AuthError {
|
|||||||
column: String,
|
column: String,
|
||||||
context_path: String,
|
context_path: String,
|
||||||
},
|
},
|
||||||
#[snafu(display("GraphQL permission denied since {field}"))]
|
|
||||||
GraphqlStaticPermissionError {
|
|
||||||
#[snafu(source)]
|
|
||||||
source: OptDynErr,
|
|
||||||
field: String,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AuthError {
|
impl AuthError {
|
||||||
|
|||||||
@@ -12,8 +12,9 @@ use axum::{
|
|||||||
http::{HeaderValue, request::Parts},
|
http::{HeaderValue, request::Parts},
|
||||||
};
|
};
|
||||||
use fetch::{HttpClient, client::HttpClientError};
|
use fetch::{HttpClient, client::HttpClientError};
|
||||||
|
use http::header::AUTHORIZATION;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer};
|
use jwtk::jwk::RemoteJwksVerifier;
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
use openidconnect::{
|
use openidconnect::{
|
||||||
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
|
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
|
||||||
@@ -31,7 +32,11 @@ use super::{
|
|||||||
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
|
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
|
||||||
service::{AuthServiceTrait, AuthUserInfo},
|
service::{AuthServiceTrait, AuthUserInfo},
|
||||||
};
|
};
|
||||||
use crate::{app::AppContextTrait, errors::RecorderError, models::auth::AuthType};
|
use crate::{
|
||||||
|
app::{AppContextTrait, PROJECT_NAME},
|
||||||
|
errors::RecorderError,
|
||||||
|
models::auth::AuthType,
|
||||||
|
};
|
||||||
|
|
||||||
pub struct OidcHttpClient(pub Arc<HttpClient>);
|
pub struct OidcHttpClient(pub Arc<HttpClient>);
|
||||||
|
|
||||||
@@ -77,21 +82,6 @@ impl<'c> openidconnect::AsyncHttpClient<'c> for OidcHttpClient {
|
|||||||
|
|
||||||
#[derive(Deserialize, Serialize, Clone, Debug)]
|
#[derive(Deserialize, Serialize, Clone, Debug)]
|
||||||
pub struct OidcAuthClaims {
|
pub struct OidcAuthClaims {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub iss: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub sub: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub aud: Option<OneOrArray<String>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub exp: Option<NumericDate>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub nbf: Option<NumericDate>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub iat: Option<NumericDate>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub jti: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub scope: Option<String>,
|
pub scope: Option<String>,
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub custom: HashMap<String, Value>,
|
pub custom: HashMap<String, Value>,
|
||||||
@@ -101,40 +91,6 @@ impl OidcAuthClaims {
|
|||||||
pub fn scopes(&self) -> std::str::Split<'_, char> {
|
pub fn scopes(&self) -> std::str::Split<'_, char> {
|
||||||
self.scope.as_deref().unwrap_or_default().split(',')
|
self.scope.as_deref().unwrap_or_default().split(',')
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_claim(&self, key: &str) -> Option<String> {
|
|
||||||
match key {
|
|
||||||
"iss" => self.iss.clone(),
|
|
||||||
"sub" => self.sub.clone(),
|
|
||||||
"aud" => self.aud.as_ref().map(|s| s.iter().join(",")),
|
|
||||||
"exp" => self.exp.clone().map(|s| s.0.to_string()),
|
|
||||||
"nbf" => self.nbf.clone().map(|s| s.0.to_string()),
|
|
||||||
"iat" => self.iat.clone().map(|s| s.0.to_string()),
|
|
||||||
"jti" => self.jti.clone(),
|
|
||||||
"scope" => self.scope.clone(),
|
|
||||||
key => self.custom.get(key).map(|s| s.to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn has_claim(&self, key: &str) -> bool {
|
|
||||||
match key {
|
|
||||||
"iss" => self.iss.is_some(),
|
|
||||||
"sub" => self.sub.is_some(),
|
|
||||||
"aud" => self.aud.is_some(),
|
|
||||||
"exp" => self.exp.is_some(),
|
|
||||||
"nbf" => self.nbf.is_some(),
|
|
||||||
"iat" => self.iat.is_some(),
|
|
||||||
"jti" => self.jti.is_some(),
|
|
||||||
"scope" => self.scope.is_some(),
|
|
||||||
key => self.custom.contains_key(key),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn contains_audience(&self, aud: &str) -> bool {
|
|
||||||
self.aud
|
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|arr| arr.iter().any(|s| s == aud))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize)]
|
#[derive(Debug, Clone, Serialize)]
|
||||||
@@ -164,7 +120,7 @@ pub struct OidcAuthCallbackPayload {
|
|||||||
|
|
||||||
pub struct OidcAuthService {
|
pub struct OidcAuthService {
|
||||||
pub config: OidcAuthConfig,
|
pub config: OidcAuthConfig,
|
||||||
pub api_authorizer: Authorizer<OidcAuthClaims>,
|
pub jwk_verifier: RemoteJwksVerifier,
|
||||||
pub oidc_provider_client: Arc<HttpClient>,
|
pub oidc_provider_client: Arc<HttpClient>,
|
||||||
pub oidc_request_cache: Cache<String, OidcAuthRequest>,
|
pub oidc_request_cache: Cache<String, OidcAuthRequest>,
|
||||||
}
|
}
|
||||||
@@ -317,47 +273,68 @@ impl AuthServiceTrait for OidcAuthService {
|
|||||||
request: &mut Parts,
|
request: &mut Parts,
|
||||||
) -> Result<AuthUserInfo, AuthError> {
|
) -> Result<AuthUserInfo, AuthError> {
|
||||||
let config = &self.config;
|
let config = &self.config;
|
||||||
let token = self
|
let token = request
|
||||||
.api_authorizer
|
.headers
|
||||||
.extract_token(&request.headers)
|
.get(AUTHORIZATION)
|
||||||
.ok_or(jwt_authorizer::AuthError::MissingToken())?;
|
.and_then(|authorization| {
|
||||||
|
authorization
|
||||||
|
.to_str()
|
||||||
|
.ok()
|
||||||
|
.and_then(|s| s.strip_prefix("Bearer "))
|
||||||
|
})
|
||||||
|
.ok_or(AuthError::OidcMissingBearerToken)?;
|
||||||
|
|
||||||
let token_data = self.api_authorizer.check_auth(&token).await?;
|
let token_data = self.jwk_verifier.verify::<OidcAuthClaims>(token).await?;
|
||||||
let claims = token_data.claims;
|
let claims = token_data.claims();
|
||||||
let sub = if let Some(sub) = claims.sub.as_deref() {
|
let sub = if let Some(sub) = claims.sub.as_deref() {
|
||||||
sub
|
sub
|
||||||
} else {
|
} else {
|
||||||
return Err(AuthError::OidcSubMissingError);
|
return Err(AuthError::OidcSubMissingError);
|
||||||
};
|
};
|
||||||
if !claims.contains_audience(&config.audience) {
|
if !claims.aud.iter().any(|aud| aud == &config.audience) {
|
||||||
return Err(AuthError::OidcAudMissingError {
|
return Err(AuthError::OidcAudMissingError {
|
||||||
aud: config.audience.clone(),
|
aud: config.audience.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
let extra_claims = &claims.extra;
|
||||||
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
|
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
|
||||||
let found_scopes = claims.scopes().collect::<HashSet<_>>();
|
let found_scopes = extra_claims.scopes().collect::<HashSet<_>>();
|
||||||
if !expected_scopes
|
if !expected_scopes
|
||||||
.iter()
|
.iter()
|
||||||
.all(|es| found_scopes.contains(es as &str))
|
.all(|es| found_scopes.contains(es as &str))
|
||||||
{
|
{
|
||||||
return Err(AuthError::OidcExtraScopesMatchError {
|
return Err(AuthError::OidcExtraScopesMatchError {
|
||||||
expected: expected_scopes.iter().join(","),
|
expected: expected_scopes.iter().join(","),
|
||||||
found: claims.scope.unwrap_or_default(),
|
found: extra_claims
|
||||||
|
.scope
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(key) = config.extra_claim_key.as_ref() {
|
if let Some(expected_extra_claims) = config.extra_claims.as_ref() {
|
||||||
if !claims.has_claim(key) {
|
for (expected_key, expected_value) in expected_extra_claims.iter() {
|
||||||
return Err(AuthError::OidcExtraClaimMissingError { claim: key.clone() });
|
match (extra_claims.custom.get(expected_key), expected_value) {
|
||||||
}
|
(found_value, Some(expected_value)) => {
|
||||||
if let Some(value) = config.extra_claim_value.as_ref()
|
if let Some(Value::String(found_value)) = found_value
|
||||||
&& claims.get_claim(key).is_none_or(|v| &v != value)
|
&& expected_value == found_value
|
||||||
{
|
{
|
||||||
return Err(AuthError::OidcExtraClaimMatchError {
|
} else {
|
||||||
expected: value.clone(),
|
return Err(AuthError::OidcExtraClaimMatchError {
|
||||||
found: claims.get_claim(key).unwrap_or_default().to_string(),
|
expected: expected_value.clone(),
|
||||||
key: key.clone(),
|
found: found_value.map(|v| v.to_string()).unwrap_or_default(),
|
||||||
});
|
key: expected_key.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(None, None) => {
|
||||||
|
return Err(AuthError::OidcExtraClaimMissingError {
|
||||||
|
claim: expected_key.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
||||||
@@ -378,7 +355,7 @@ impl AuthServiceTrait for OidcAuthService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
||||||
Some(HeaderValue::from_static(r#"Bearer realm="konobangu""#))
|
Some(HeaderValue::from_str(format!("Bearer realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn auth_type(&self) -> AuthType {
|
fn auth_type(&self) -> AuthType {
|
||||||
|
|||||||
@@ -1,25 +1,22 @@
|
|||||||
use std::{sync::Arc, time::Duration};
|
use std::{sync::Arc, time::Duration};
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use axum::{
|
use axum::http::request::Parts;
|
||||||
extract::FromRequestParts,
|
|
||||||
http::request::Parts,
|
|
||||||
response::{IntoResponse as _, Response},
|
|
||||||
};
|
|
||||||
use fetch::{
|
use fetch::{
|
||||||
HttpClient, HttpClientConfig,
|
HttpClient, HttpClientConfig,
|
||||||
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
|
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
|
||||||
};
|
};
|
||||||
use http::header::HeaderValue;
|
use http::header::HeaderValue;
|
||||||
use jwt_authorizer::{JwtAuthorizer, Validation};
|
use jwtk::jwk::RemoteJwksVerifier;
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
|
use openidconnect::{IssuerUrl, core::CoreProviderMetadata};
|
||||||
use snafu::prelude::*;
|
use snafu::prelude::*;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
AuthConfig,
|
AuthConfig,
|
||||||
basic::BasicAuthService,
|
basic::BasicAuthService,
|
||||||
errors::{AuthError, OidcProviderHttpClientSnafu},
|
errors::{AuthError, OidcProviderHttpClientSnafu, OidcProviderUrlSnafu},
|
||||||
oidc::{OidcAuthClaims, OidcAuthService},
|
oidc::{OidcAuthService, OidcHttpClient},
|
||||||
};
|
};
|
||||||
use crate::{app::AppContextTrait, models::auth::AuthType};
|
use crate::{app::AppContextTrait, models::auth::AuthType};
|
||||||
|
|
||||||
@@ -29,22 +26,6 @@ pub struct AuthUserInfo {
|
|||||||
pub auth_type: AuthType,
|
pub auth_type: AuthType,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromRequestParts<Arc<dyn AppContextTrait>> for AuthUserInfo {
|
|
||||||
type Rejection = Response;
|
|
||||||
|
|
||||||
async fn from_request_parts(
|
|
||||||
parts: &mut Parts,
|
|
||||||
state: &Arc<dyn AppContextTrait>,
|
|
||||||
) -> Result<Self, Self::Rejection> {
|
|
||||||
let auth_service = state.auth();
|
|
||||||
|
|
||||||
auth_service
|
|
||||||
.extract_user_info(state.as_ref(), parts)
|
|
||||||
.await
|
|
||||||
.map_err(|err| err.into_response())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait AuthServiceTrait {
|
pub trait AuthServiceTrait {
|
||||||
async fn extract_user_info(
|
async fn extract_user_info(
|
||||||
@@ -66,27 +47,33 @@ impl AuthService {
|
|||||||
let result = match config {
|
let result = match config {
|
||||||
AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
|
AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
|
||||||
AuthConfig::Oidc(config) => {
|
AuthConfig::Oidc(config) => {
|
||||||
let validation = Validation::new()
|
let oidc_provider_client = Arc::new(
|
||||||
.iss(&[&config.issuer])
|
HttpClient::from_config(HttpClientConfig {
|
||||||
.aud(&[&config.audience]);
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
cache_backend: Some(HttpClientCacheBackendConfig::Moka { cache_size: 1 }),
|
||||||
|
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.context(OidcProviderHttpClientSnafu)?,
|
||||||
|
);
|
||||||
|
|
||||||
let oidc_provider_client = HttpClient::from_config(HttpClientConfig {
|
let provider_metadata = {
|
||||||
exponential_backoff_max_retries: Some(3),
|
let client = OidcHttpClient(oidc_provider_client.clone());
|
||||||
cache_backend: Some(HttpClientCacheBackendConfig::Moka { cache_size: 1 }),
|
let issuer_url =
|
||||||
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
|
IssuerUrl::new(config.issuer.clone()).context(OidcProviderUrlSnafu)?;
|
||||||
..Default::default()
|
CoreProviderMetadata::discover_async(issuer_url, &client).await
|
||||||
})
|
}?;
|
||||||
.context(OidcProviderHttpClientSnafu)?;
|
|
||||||
|
|
||||||
let api_authorizer = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer)
|
let jwk_verifier = RemoteJwksVerifier::new(
|
||||||
.validation(validation)
|
provider_metadata.jwks_uri().to_string().clone(),
|
||||||
.build()
|
None,
|
||||||
.await?;
|
Duration::from_secs(300),
|
||||||
|
);
|
||||||
|
|
||||||
AuthService::Oidc(Box::new(OidcAuthService {
|
AuthService::Oidc(Box::new(OidcAuthService {
|
||||||
config,
|
config,
|
||||||
api_authorizer,
|
jwk_verifier,
|
||||||
oidc_provider_client: Arc::new(oidc_provider_client),
|
oidc_provider_client,
|
||||||
oidc_request_cache: Cache::builder()
|
oidc_request_cache: Cache::builder()
|
||||||
.time_to_live(Duration::from_mins(5))
|
.time_to_live(Duration::from_mins(5))
|
||||||
.name("oidc_request_cache")
|
.name("oidc_request_cache")
|
||||||
@@ -100,6 +87,7 @@ impl AuthService {
|
|||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl AuthServiceTrait for AuthService {
|
impl AuthServiceTrait for AuthService {
|
||||||
|
#[tracing::instrument(skip(self, ctx, request))]
|
||||||
async fn extract_user_info(
|
async fn extract_user_info(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
use async_graphql::Error as AsyncGraphQLError;
|
||||||
|
use seaography::SeaographyError;
|
||||||
|
|
||||||
#[derive(Debug, snafu::Snafu)]
|
#[derive(Debug, snafu::Snafu)]
|
||||||
pub enum CryptoError {
|
pub enum CryptoError {
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
@@ -9,3 +12,9 @@ pub enum CryptoError {
|
|||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
SerdeJsonError { source: serde_json::Error },
|
SerdeJsonError { source: serde_json::Error },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<CryptoError> for SeaographyError {
|
||||||
|
fn from(error: CryptoError) -> Self {
|
||||||
|
SeaographyError::AsyncGraphQLError(AsyncGraphQLError::new(error.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -10,19 +10,17 @@ use sea_orm_migration::MigratorTrait;
|
|||||||
use super::DatabaseConfig;
|
use super::DatabaseConfig;
|
||||||
use crate::{errors::RecorderResult, migrations::Migrator};
|
use crate::{errors::RecorderResult, migrations::Migrator};
|
||||||
|
|
||||||
pub trait DatabaseServiceConnectionTrait {
|
|
||||||
fn get_database_connection(&self) -> &DatabaseConnection;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct DatabaseService {
|
pub struct DatabaseService {
|
||||||
|
pub config: DatabaseConfig,
|
||||||
connection: DatabaseConnection,
|
connection: DatabaseConnection,
|
||||||
#[cfg(all(any(test, feature = "playground"), feature = "testcontainers"))]
|
#[cfg(feature = "testcontainers")]
|
||||||
pub container:
|
pub container:
|
||||||
Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>,
|
Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DatabaseService {
|
impl DatabaseService {
|
||||||
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
|
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
|
||||||
|
let db_config = config.clone();
|
||||||
let mut opt = ConnectOptions::new(&config.uri);
|
let mut opt = ConnectOptions::new(&config.uri);
|
||||||
opt.max_connections(config.max_connections)
|
opt.max_connections(config.max_connections)
|
||||||
.min_connections(config.min_connections)
|
.min_connections(config.min_connections)
|
||||||
@@ -54,8 +52,9 @@ impl DatabaseService {
|
|||||||
|
|
||||||
let me = Self {
|
let me = Self {
|
||||||
connection: db,
|
connection: db,
|
||||||
#[cfg(all(any(test, feature = "playground"), feature = "testcontainers"))]
|
#[cfg(feature = "testcontainers")]
|
||||||
container: None,
|
container: None,
|
||||||
|
config: db_config,
|
||||||
};
|
};
|
||||||
|
|
||||||
if config.auto_migrate {
|
if config.auto_migrate {
|
||||||
|
|||||||
@@ -5,8 +5,7 @@ use axum::{
|
|||||||
response::{IntoResponse, Response},
|
response::{IntoResponse, Response},
|
||||||
};
|
};
|
||||||
use fetch::{FetchError, HttpClientError, reqwest, reqwest_middleware};
|
use fetch::{FetchError, HttpClientError, reqwest, reqwest_middleware};
|
||||||
use http::StatusCode;
|
use http::{HeaderMap, StatusCode};
|
||||||
use serde::{Deserialize, Deserializer, Serialize};
|
|
||||||
use snafu::Snafu;
|
use snafu::Snafu;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@@ -19,19 +18,41 @@ use crate::{
|
|||||||
#[derive(Snafu, Debug)]
|
#[derive(Snafu, Debug)]
|
||||||
#[snafu(visibility(pub(crate)))]
|
#[snafu(visibility(pub(crate)))]
|
||||||
pub enum RecorderError {
|
pub enum RecorderError {
|
||||||
|
#[snafu(display(
|
||||||
|
"HTTP {status} {reason}, source = {source:?}",
|
||||||
|
status = status,
|
||||||
|
reason = status.canonical_reason().unwrap_or("Unknown")
|
||||||
|
))]
|
||||||
|
HttpResponseError {
|
||||||
|
status: StatusCode,
|
||||||
|
headers: Option<HeaderMap>,
|
||||||
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
|
source: OptDynErr,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
ImageError { source: image::ImageError },
|
||||||
|
#[cfg(feature = "jxl")]
|
||||||
|
#[snafu(transparent)]
|
||||||
|
JxlEncodeError { source: jpegxl_rs::EncodeError },
|
||||||
|
#[snafu(transparent, context(false))]
|
||||||
|
HttpError { source: http::Error },
|
||||||
#[snafu(transparent, context(false))]
|
#[snafu(transparent, context(false))]
|
||||||
FancyRegexError {
|
FancyRegexError {
|
||||||
#[snafu(source(from(fancy_regex::Error, Box::new)))]
|
#[snafu(source(from(fancy_regex::Error, Box::new)))]
|
||||||
source: Box<fancy_regex::Error>,
|
source: Box<fancy_regex::Error>,
|
||||||
},
|
},
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
|
NetAddrParseError { source: std::net::AddrParseError },
|
||||||
|
#[snafu(transparent)]
|
||||||
RegexError { source: regex::Error },
|
RegexError { source: regex::Error },
|
||||||
#[snafu(transparent)]
|
#[snafu(display("Invalid method"))]
|
||||||
InvalidMethodError { source: http::method::InvalidMethod },
|
InvalidMethodError,
|
||||||
#[snafu(transparent)]
|
#[snafu(display("Invalid header value"))]
|
||||||
InvalidHeaderNameError {
|
InvalidHeaderValueError,
|
||||||
source: http::header::InvalidHeaderName,
|
#[snafu(display("Invalid header name"))]
|
||||||
},
|
InvalidHeaderNameError,
|
||||||
|
#[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))]
|
||||||
|
MissingOriginError,
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
TracingAppenderInitError {
|
TracingAppenderInitError {
|
||||||
source: tracing_appender::rolling::InitError,
|
source: tracing_appender::rolling::InitError,
|
||||||
@@ -47,7 +68,7 @@ pub enum RecorderError {
|
|||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
RSSError { source: rss::Error },
|
RSSError { source: rss::Error },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
DotEnvError { source: dotenv::Error },
|
DotEnvError { source: dotenvy::Error },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
TeraError { source: tera::Error },
|
TeraError { source: tera::Error },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
@@ -71,12 +92,8 @@ pub enum RecorderError {
|
|||||||
source: Box<opendal::Error>,
|
source: Box<opendal::Error>,
|
||||||
},
|
},
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
InvalidHeaderValueError {
|
|
||||||
source: http::header::InvalidHeaderValue,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
HttpClientError { source: HttpClientError },
|
HttpClientError { source: HttpClientError },
|
||||||
#[cfg(all(any(test, feature = "playground"), feature = "testcontainers"))]
|
#[cfg(feature = "testcontainers")]
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
TestcontainersError {
|
TestcontainersError {
|
||||||
source: testcontainers::TestcontainersError,
|
source: testcontainers::TestcontainersError,
|
||||||
@@ -101,11 +118,11 @@ pub enum RecorderError {
|
|||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
source: OptDynErr,
|
source: OptDynErr,
|
||||||
},
|
},
|
||||||
#[snafu(display("Model Entity {entity} not found"))]
|
#[snafu(display("Model Entity {entity} not found or not belong to subscriber"))]
|
||||||
ModelEntityNotFound { entity: Cow<'static, str> },
|
ModelEntityNotFound { entity: Cow<'static, str> },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
FetchError { source: FetchError },
|
FetchError { source: FetchError },
|
||||||
#[snafu(display("Credential3rdError: {source}"))]
|
#[snafu(display("Credential3rdError: {message}, source = {source}"))]
|
||||||
Credential3rdError {
|
Credential3rdError {
|
||||||
message: String,
|
message: String,
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
@@ -113,15 +130,35 @@ pub enum RecorderError {
|
|||||||
},
|
},
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
CryptoError { source: CryptoError },
|
CryptoError { source: CryptoError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
StringFromUtf8Error { source: std::string::FromUtf8Error },
|
||||||
#[snafu(display("{message}"))]
|
#[snafu(display("{message}"))]
|
||||||
Whatever {
|
Whatever {
|
||||||
message: String,
|
message: String,
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
source: OptDynErr,
|
source: OptDynErr,
|
||||||
},
|
},
|
||||||
|
#[snafu(display("Invalid task id: {message}"))]
|
||||||
|
InvalidTaskId { message: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RecorderError {
|
impl RecorderError {
|
||||||
|
pub fn from_status(status: StatusCode) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: None,
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_status_and_headers(status: StatusCode, headers: HeaderMap) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: Some(headers),
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
|
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
|
||||||
Self::MikanMetaMissingFieldError {
|
Self::MikanMetaMissingFieldError {
|
||||||
field,
|
field,
|
||||||
@@ -171,10 +208,53 @@ impl snafu::FromString for RecorderError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<StatusCode> for RecorderError {
|
||||||
|
fn from(status: StatusCode) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: None,
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<(StatusCode, HeaderMap)> for RecorderError {
|
||||||
|
fn from((status, headers): (StatusCode, HeaderMap)) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: Some(headers),
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl IntoResponse for RecorderError {
|
impl IntoResponse for RecorderError {
|
||||||
fn into_response(self) -> Response {
|
fn into_response(self) -> Response {
|
||||||
match self {
|
match self {
|
||||||
Self::AuthError { source: auth_error } => auth_error.into_response(),
|
Self::AuthError { source: auth_error } => auth_error.into_response(),
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers,
|
||||||
|
source,
|
||||||
|
} => {
|
||||||
|
let message = source
|
||||||
|
.into_inner()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
String::from(status.canonical_reason().unwrap_or("Unknown"))
|
||||||
|
});
|
||||||
|
(
|
||||||
|
status,
|
||||||
|
headers,
|
||||||
|
Json::<StandardErrorResponse>(StandardErrorResponse::from(message)),
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
|
}
|
||||||
|
Self::ModelEntityNotFound { entity } => (
|
||||||
|
StatusCode::NOT_FOUND,
|
||||||
|
Json::<StandardErrorResponse>(StandardErrorResponse::from(entity.to_string())),
|
||||||
|
)
|
||||||
|
.into_response(),
|
||||||
err => (
|
err => (
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
|
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
|
||||||
@@ -184,28 +264,6 @@ impl IntoResponse for RecorderError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Serialize for RecorderError {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: serde::Serializer,
|
|
||||||
{
|
|
||||||
serializer.serialize_str(&self.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for RecorderError {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let s = String::deserialize(deserializer)?;
|
|
||||||
Ok(Self::Whatever {
|
|
||||||
message: s,
|
|
||||||
source: None.into(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<reqwest::Error> for RecorderError {
|
impl From<reqwest::Error> for RecorderError {
|
||||||
fn from(error: reqwest::Error) -> Self {
|
fn from(error: reqwest::Error) -> Self {
|
||||||
FetchError::from(error).into()
|
FetchError::from(error).into()
|
||||||
@@ -218,4 +276,22 @@ impl From<reqwest_middleware::Error> for RecorderError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<http::header::InvalidHeaderValue> for RecorderError {
|
||||||
|
fn from(_error: http::header::InvalidHeaderValue) -> Self {
|
||||||
|
Self::InvalidHeaderValueError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<http::header::InvalidHeaderName> for RecorderError {
|
||||||
|
fn from(_error: http::header::InvalidHeaderName) -> Self {
|
||||||
|
Self::InvalidHeaderNameError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<http::method::InvalidMethod> for RecorderError {
|
||||||
|
fn from(_error: http::method::InvalidMethod) -> Self {
|
||||||
|
Self::InvalidMethodError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub type RecorderResult<T> = Result<T, RecorderError>;
|
pub type RecorderResult<T> = Result<T, RecorderError>;
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
use chrono::{DateTime, Utc};
|
||||||
use fancy_regex::Regex as FancyRegex;
|
use fancy_regex::Regex as FancyRegex;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use quirks_path::Path;
|
use quirks_path::Path;
|
||||||
@@ -33,6 +34,14 @@ lazy_static! {
|
|||||||
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
|
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct EpisodeEnclosureMeta {
|
||||||
|
pub magnet_link: Option<String>,
|
||||||
|
pub torrent_link: Option<String>,
|
||||||
|
pub pub_date: Option<DateTime<Utc>>,
|
||||||
|
pub content_length: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
pub struct TorrentEpisodeMediaMeta {
|
pub struct TorrentEpisodeMediaMeta {
|
||||||
pub fansub: Option<String>,
|
pub fansub: Option<String>,
|
||||||
@@ -268,8 +277,8 @@ mod tests {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) {
|
pub fn test_torrent_ep_parser(origin_name: &str, expected: &str) {
|
||||||
let extname = Path::new(raw_name)
|
let extname = Path::new(origin_name)
|
||||||
.extension()
|
.extension()
|
||||||
.map(|e| format!(".{e}"))
|
.map(|e| format!(".{e}"))
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
@@ -278,7 +287,7 @@ mod tests {
|
|||||||
if extname == ".srt" || extname == ".ass" {
|
if extname == ".srt" || extname == ".ass" {
|
||||||
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
|
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
|
||||||
let found_raw =
|
let found_raw =
|
||||||
parse_episode_subtitle_meta_from_torrent(Path::new(raw_name), None, None);
|
parse_episode_subtitle_meta_from_torrent(Path::new(origin_name), None, None);
|
||||||
let found = found_raw.as_ref().ok().cloned();
|
let found = found_raw.as_ref().ok().cloned();
|
||||||
|
|
||||||
if expected != found {
|
if expected != found {
|
||||||
@@ -299,7 +308,8 @@ mod tests {
|
|||||||
assert_eq!(expected, found);
|
assert_eq!(expected, found);
|
||||||
} else {
|
} else {
|
||||||
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
|
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
|
||||||
let found_raw = parse_episode_media_meta_from_torrent(Path::new(raw_name), None, None);
|
let found_raw =
|
||||||
|
parse_episode_media_meta_from_torrent(Path::new(origin_name), None, None);
|
||||||
let found = found_raw.as_ref().ok().cloned();
|
let found = found_raw.as_ref().ok().cloned();
|
||||||
|
|
||||||
if expected != found {
|
if expected != found {
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use fancy_regex::Regex as FancyRegex;
|
use fancy_regex::Regex as FancyRegex;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use maplit::hashmap;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
const LANG_ZH_TW: &str = "zh-tw";
|
const LANG_ZH_TW: &str = "zh-tw";
|
||||||
@@ -34,40 +31,4 @@ lazy_static! {
|
|||||||
(LANG_JP, vec!["jp", "jpn", "日"]),
|
(LANG_JP, vec!["jp", "jpn", "日"]),
|
||||||
]
|
]
|
||||||
};
|
};
|
||||||
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
|
|
||||||
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
|
|
||||||
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {
|
|
||||||
hashmap! {
|
|
||||||
"〇" => 0,
|
|
||||||
"一" => 1,
|
|
||||||
"二" => 2,
|
|
||||||
"三" => 3,
|
|
||||||
"四" => 4,
|
|
||||||
"五" => 5,
|
|
||||||
"六" => 6,
|
|
||||||
"七" => 7,
|
|
||||||
"八" => 8,
|
|
||||||
"九" => 9,
|
|
||||||
"十" => 10,
|
|
||||||
"廿" => 20,
|
|
||||||
"百" => 100,
|
|
||||||
"千" => 1000,
|
|
||||||
"零" => 0,
|
|
||||||
"壹" => 1,
|
|
||||||
"贰" => 2,
|
|
||||||
"叁" => 3,
|
|
||||||
"肆" => 4,
|
|
||||||
"伍" => 5,
|
|
||||||
"陆" => 6,
|
|
||||||
"柒" => 7,
|
|
||||||
"捌" => 8,
|
|
||||||
"玖" => 9,
|
|
||||||
"拾" => 10,
|
|
||||||
"念" => 20,
|
|
||||||
"佰" => 100,
|
|
||||||
"仟" => 1000,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
pub static ref ZH_NUM_RE: Regex =
|
|
||||||
Regex::new(r"[〇一二三四五六七八九十廿百千零壹贰叁肆伍陆柒捌玖拾念佰仟]").unwrap();
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,12 @@
|
|||||||
use axum::http::{HeaderName, HeaderValue, Uri, header, request::Parts};
|
use axum::{
|
||||||
|
extract::FromRequestParts,
|
||||||
|
http::{HeaderName, HeaderValue, Uri, header, request::Parts},
|
||||||
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::errors::RecorderError;
|
||||||
|
|
||||||
/// Fields from a "Forwarded" header per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4)
|
/// Fields from a "Forwarded" header per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4)
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ForwardedHeader {
|
pub struct ForwardedHeader {
|
||||||
@@ -101,9 +106,13 @@ pub struct ForwardedRelatedInfo {
|
|||||||
pub origin: Option<String>,
|
pub origin: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ForwardedRelatedInfo {
|
impl<T> FromRequestParts<T> for ForwardedRelatedInfo {
|
||||||
pub fn from_request_parts(request_parts: &Parts) -> ForwardedRelatedInfo {
|
type Rejection = RecorderError;
|
||||||
let headers = &request_parts.headers;
|
fn from_request_parts(
|
||||||
|
parts: &mut Parts,
|
||||||
|
_state: &T,
|
||||||
|
) -> impl Future<Output = Result<Self, Self::Rejection>> + Send {
|
||||||
|
let headers = &parts.headers;
|
||||||
let forwarded = headers
|
let forwarded = headers
|
||||||
.get(header::FORWARDED)
|
.get(header::FORWARDED)
|
||||||
.and_then(|s| ForwardedHeader::try_from(s.clone()).ok());
|
.and_then(|s| ForwardedHeader::try_from(s.clone()).ok());
|
||||||
@@ -132,17 +141,19 @@ impl ForwardedRelatedInfo {
|
|||||||
.get(header::ORIGIN)
|
.get(header::ORIGIN)
|
||||||
.and_then(|s| s.to_str().map(String::from).ok());
|
.and_then(|s| s.to_str().map(String::from).ok());
|
||||||
|
|
||||||
ForwardedRelatedInfo {
|
futures::future::ready(Ok(ForwardedRelatedInfo {
|
||||||
host,
|
host,
|
||||||
x_forwarded_for,
|
x_forwarded_for,
|
||||||
x_forwarded_host,
|
x_forwarded_host,
|
||||||
x_forwarded_proto,
|
x_forwarded_proto,
|
||||||
forwarded,
|
forwarded,
|
||||||
uri: request_parts.uri.clone(),
|
uri: parts.uri.clone(),
|
||||||
origin,
|
origin,
|
||||||
}
|
}))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ForwardedRelatedInfo {
|
||||||
pub fn resolved_protocol(&self) -> Option<&str> {
|
pub fn resolved_protocol(&self) -> Option<&str> {
|
||||||
self.forwarded
|
self.forwarded
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
|||||||
@@ -2,10 +2,6 @@ use url::Url;
|
|||||||
|
|
||||||
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
|
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
|
||||||
let mut image_url = base_url.join(image_src).ok()?;
|
let mut image_url = base_url.join(image_src).ok()?;
|
||||||
if let Some((_, value)) = image_url.query_pairs().find(|(key, _)| key == "webp") {
|
image_url.set_query(None);
|
||||||
image_url.set_query(Some(&format!("webp={value}")));
|
|
||||||
} else {
|
|
||||||
image_url.set_query(None);
|
|
||||||
}
|
|
||||||
Some(image_url)
|
Some(image_url)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
use std::{fmt::Debug, ops::Deref, sync::Arc};
|
use std::{fmt::Debug, ops::Deref};
|
||||||
|
|
||||||
use fetch::{HttpClient, HttpClientTrait};
|
use fetch::{HttpClient, HttpClientTrait};
|
||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
|
use scraper::{Html, Selector};
|
||||||
use sea_orm::{
|
use sea_orm::{
|
||||||
ActiveModelTrait, ActiveValue::Set, ColumnTrait, DbErr, EntityTrait, QueryFilter, TryIntoModel,
|
ActiveModelTrait, ActiveValue::Set, ColumnTrait, DbErr, EntityTrait, QueryFilter, TryIntoModel,
|
||||||
};
|
};
|
||||||
@@ -68,50 +69,44 @@ impl MikanClient {
|
|||||||
message: "mikan login failed, credential required".to_string(),
|
message: "mikan login failed, credential required".to_string(),
|
||||||
source: None.into(),
|
source: None.into(),
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let login_page_url = {
|
let login_page_url = {
|
||||||
let mut u = self.base_url.join(MIKAN_LOGIN_PAGE_PATH)?;
|
let mut u = self.base_url.join(MIKAN_LOGIN_PAGE_PATH)?;
|
||||||
u.set_query(Some(MIKAN_LOGIN_PAGE_SEARCH));
|
u.set_query(Some(MIKAN_LOGIN_PAGE_SEARCH));
|
||||||
u
|
u
|
||||||
};
|
};
|
||||||
|
|
||||||
// access login page to get antiforgery cookie
|
let antiforgery_token = {
|
||||||
self.http_client
|
// access login page to get antiforgery cookie
|
||||||
.get(login_page_url.clone())
|
let login_page_html = self
|
||||||
.send()
|
.http_client
|
||||||
.await
|
.get(login_page_url.clone())
|
||||||
.map_err(|error| RecorderError::Credential3rdError {
|
.send()
|
||||||
message: "failed to get mikan login page".to_string(),
|
.await
|
||||||
source: OptDynErr::some_boxed(error),
|
.map_err(|error| RecorderError::Credential3rdError {
|
||||||
})?;
|
message: "failed to get mikan login page".to_string(),
|
||||||
|
source: OptDynErr::some_boxed(error),
|
||||||
|
})?
|
||||||
|
.text()
|
||||||
|
.await?;
|
||||||
|
|
||||||
let antiforgery_cookie = {
|
let login_page_html = Html::parse_document(&login_page_html);
|
||||||
let cookie_store_lock = self.http_client.cookie_store.clone().ok_or_else(|| {
|
|
||||||
RecorderError::Credential3rdError {
|
let antiforgery_selector =
|
||||||
message: "failed to get cookie store".to_string(),
|
Selector::parse("input[name='__RequestVerificationToken']").unwrap();
|
||||||
|
|
||||||
|
login_page_html
|
||||||
|
.select(&antiforgery_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|element| element.value().attr("value").map(|value| value.to_string()))
|
||||||
|
.ok_or_else(|| RecorderError::Credential3rdError {
|
||||||
|
message: "mikan login failed, failed to get antiforgery token".to_string(),
|
||||||
source: None.into(),
|
source: None.into(),
|
||||||
}
|
})
|
||||||
})?;
|
}?;
|
||||||
let cookie_store =
|
|
||||||
cookie_store_lock
|
|
||||||
.read()
|
|
||||||
.map_err(|_| RecorderError::Credential3rdError {
|
|
||||||
message: "failed to read cookie store".to_string(),
|
|
||||||
source: None.into(),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
cookie_store
|
|
||||||
.matches(&login_page_url)
|
|
||||||
.iter()
|
|
||||||
.find(|cookie| cookie.name().starts_with(".AspNetCore.Antiforgery."))
|
|
||||||
.map(|cookie| cookie.value().to_string())
|
|
||||||
}
|
|
||||||
.ok_or_else(|| RecorderError::Credential3rdError {
|
|
||||||
message: "mikan login failed, failed to get antiforgery cookie".to_string(),
|
|
||||||
source: None.into(),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let login_post_form = hashmap! {
|
let login_post_form = hashmap! {
|
||||||
"__RequestVerificationToken".to_string() => antiforgery_cookie,
|
"__RequestVerificationToken".to_string() => antiforgery_token,
|
||||||
"UserName".to_string() => userpass_credential.username.clone(),
|
"UserName".to_string() => userpass_credential.username.clone(),
|
||||||
"Password".to_string() => userpass_credential.password.clone(),
|
"Password".to_string() => userpass_credential.password.clone(),
|
||||||
"RememberMe".to_string() => "true".to_string(),
|
"RememberMe".to_string() => "true".to_string(),
|
||||||
@@ -141,7 +136,7 @@ impl MikanClient {
|
|||||||
|
|
||||||
pub async fn submit_credential_form(
|
pub async fn submit_credential_form(
|
||||||
&self,
|
&self,
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
ctx: &dyn AppContextTrait,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
credential_form: MikanCredentialForm,
|
credential_form: MikanCredentialForm,
|
||||||
) -> RecorderResult<credential_3rd::Model> {
|
) -> RecorderResult<credential_3rd::Model> {
|
||||||
@@ -154,7 +149,7 @@ impl MikanClient {
|
|||||||
subscriber_id: Set(subscriber_id),
|
subscriber_id: Set(subscriber_id),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
}
|
||||||
.try_encrypt(ctx.clone())
|
.try_encrypt(ctx)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let credential: credential_3rd::Model = am.save(db).await?.try_into_model()?;
|
let credential: credential_3rd::Model = am.save(db).await?.try_into_model()?;
|
||||||
@@ -163,8 +158,9 @@ impl MikanClient {
|
|||||||
|
|
||||||
pub async fn sync_credential_cookies(
|
pub async fn sync_credential_cookies(
|
||||||
&self,
|
&self,
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
ctx: &dyn AppContextTrait,
|
||||||
credential_id: i32,
|
credential_id: i32,
|
||||||
|
subscriber_id: i32,
|
||||||
) -> RecorderResult<()> {
|
) -> RecorderResult<()> {
|
||||||
let cookies = self.http_client.save_cookie_store_to_json()?;
|
let cookies = self.http_client.save_cookie_store_to_json()?;
|
||||||
if let Some(cookies) = cookies {
|
if let Some(cookies) = cookies {
|
||||||
@@ -172,26 +168,51 @@ impl MikanClient {
|
|||||||
cookies: Set(Some(cookies)),
|
cookies: Set(Some(cookies)),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
}
|
||||||
.try_encrypt(ctx.clone())
|
.try_encrypt(ctx)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
credential_3rd::Entity::update_many()
|
credential_3rd::Entity::update_many()
|
||||||
.set(am)
|
.set(am)
|
||||||
.filter(credential_3rd::Column::Id.eq(credential_id))
|
.filter(credential_3rd::Column::Id.eq(credential_id))
|
||||||
|
.filter(credential_3rd::Column::SubscriberId.eq(subscriber_id))
|
||||||
.exec(ctx.db())
|
.exec(ctx.db())
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn fork_with_credential(
|
pub async fn fork_with_userpass_credential(
|
||||||
&self,
|
&self,
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
userpass_credential: UserPassCredential,
|
||||||
credential_id: i32,
|
|
||||||
) -> RecorderResult<Self> {
|
) -> RecorderResult<Self> {
|
||||||
let mut fork = self.http_client.fork();
|
let mut fork = self
|
||||||
|
.http_client
|
||||||
|
.fork()
|
||||||
|
.attach_cookies(userpass_credential.cookies.as_deref())?;
|
||||||
|
|
||||||
let credential = credential_3rd::Model::find_by_id(ctx.clone(), credential_id).await?;
|
if let Some(user_agent) = userpass_credential.user_agent.as_ref() {
|
||||||
|
fork = fork.attach_user_agent(user_agent);
|
||||||
|
}
|
||||||
|
|
||||||
|
let userpass_credential_opt = Some(userpass_credential);
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
http_client: HttpClient::from_fork(fork)?,
|
||||||
|
base_url: self.base_url.clone(),
|
||||||
|
origin_url: self.origin_url.clone(),
|
||||||
|
userpass_credential: userpass_credential_opt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn fork_with_credential_id(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
credential_id: i32,
|
||||||
|
subscriber_id: i32,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let credential =
|
||||||
|
credential_3rd::Model::find_by_id_and_subscriber_id(ctx, credential_id, subscriber_id)
|
||||||
|
.await?;
|
||||||
if let Some(credential) = credential {
|
if let Some(credential) = credential {
|
||||||
if credential.credential_type != Credential3rdType::Mikan {
|
if credential.credential_type != Credential3rdType::Mikan {
|
||||||
return Err(RecorderError::Credential3rdError {
|
return Err(RecorderError::Credential3rdError {
|
||||||
@@ -203,20 +224,8 @@ impl MikanClient {
|
|||||||
let userpass_credential: UserPassCredential =
|
let userpass_credential: UserPassCredential =
|
||||||
credential.try_into_userpass_credential(ctx)?;
|
credential.try_into_userpass_credential(ctx)?;
|
||||||
|
|
||||||
fork = fork.attach_cookies(userpass_credential.cookies.as_deref())?;
|
self.fork_with_userpass_credential(userpass_credential)
|
||||||
|
.await
|
||||||
if let Some(user_agent) = userpass_credential.user_agent.as_ref() {
|
|
||||||
fork = fork.attach_user_agent(user_agent);
|
|
||||||
}
|
|
||||||
|
|
||||||
let userpass_credential_opt = Some(userpass_credential);
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
http_client: HttpClient::from_fork(fork)?,
|
|
||||||
base_url: self.base_url.clone(),
|
|
||||||
origin_url: self.origin_url.clone(),
|
|
||||||
userpass_credential: userpass_credential_opt,
|
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
Err(RecorderError::from_db_record_not_found(
|
Err(RecorderError::from_db_record_not_found(
|
||||||
DbErr::RecordNotFound(format!("credential={credential_id} not found")),
|
DbErr::RecordNotFound(format!("credential={credential_id} not found")),
|
||||||
@@ -246,7 +255,7 @@ impl HttpClientTrait for MikanClient {}
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
#![allow(unused_variables)]
|
#![allow(unused_variables)]
|
||||||
use std::assert_matches::assert_matches;
|
use std::{assert_matches::assert_matches, sync::Arc};
|
||||||
|
|
||||||
use rstest::{fixture, rstest};
|
use rstest::{fixture, rstest};
|
||||||
use tracing::Level;
|
use tracing::Level;
|
||||||
@@ -294,8 +303,10 @@ mod tests {
|
|||||||
|
|
||||||
let credential_form = build_testing_mikan_credential_form();
|
let credential_form = build_testing_mikan_credential_form();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
let credential_model = mikan_client
|
let credential_model = mikan_client
|
||||||
.submit_credential_form(app_ctx.clone(), 1, credential_form.clone())
|
.submit_credential_form(app_ctx.as_ref(), subscriber_id, credential_form.clone())
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let expected_username = &credential_form.username;
|
let expected_username = &credential_form.username;
|
||||||
@@ -319,7 +330,7 @@ mod tests {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let mikan_client = mikan_client
|
let mikan_client = mikan_client
|
||||||
.fork_with_credential(app_ctx.clone(), credential_model.id)
|
.fork_with_credential_id(app_ctx.as_ref(), credential_model.id, subscriber_id)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
mikan_client.login().await?;
|
mikan_client.login().await?;
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ use fetch::HttpClientConfig;
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct MikanConfig {
|
pub struct MikanConfig {
|
||||||
pub http_client: HttpClientConfig,
|
pub http_client: HttpClientConfig,
|
||||||
pub base_url: Url,
|
pub base_url: Url,
|
||||||
|
|||||||
@@ -5,4 +5,16 @@ pub const MIKAN_LOGIN_PAGE_PATH: &str = "/Account/Login";
|
|||||||
pub const MIKAN_LOGIN_PAGE_SEARCH: &str = "ReturnUrl=%2F";
|
pub const MIKAN_LOGIN_PAGE_SEARCH: &str = "ReturnUrl=%2F";
|
||||||
pub const MIKAN_ACCOUNT_MANAGE_PAGE_PATH: &str = "/Account/Manage";
|
pub const MIKAN_ACCOUNT_MANAGE_PAGE_PATH: &str = "/Account/Manage";
|
||||||
pub const MIKAN_SEASON_FLOW_PAGE_PATH: &str = "/Home/BangumiCoverFlow";
|
pub const MIKAN_SEASON_FLOW_PAGE_PATH: &str = "/Home/BangumiCoverFlow";
|
||||||
|
pub const MIKAN_BANGUMI_HOMEPAGE_PATH: &str = "/Home/Bangumi";
|
||||||
pub const MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH: &str = "/Home/ExpandBangumi";
|
pub const MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH: &str = "/Home/ExpandBangumi";
|
||||||
|
pub const MIKAN_EPISODE_HOMEPAGE_PATH: &str = "/Home/Episode";
|
||||||
|
pub const MIKAN_BANGUMI_POSTER_PATH: &str = "/images/Bangumi";
|
||||||
|
pub const MIKAN_EPISODE_TORRENT_PATH: &str = "/Download";
|
||||||
|
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH: &str = "/RSS/MyBangumi";
|
||||||
|
pub const MIKAN_BANGUMI_RSS_PATH: &str = "/RSS/Bangumi";
|
||||||
|
pub const MIKAN_FANSUB_HOMEPAGE_PATH: &str = "/Home/PublishGroup";
|
||||||
|
pub const MIKAN_BANGUMI_ID_QUERY_KEY: &str = "bangumiId";
|
||||||
|
pub const MIKAN_FANSUB_ID_QUERY_KEY: &str = "subgroupid";
|
||||||
|
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY: &str = "token";
|
||||||
|
pub const MIKAN_SEASON_STR_QUERY_KEY: &str = "seasonStr";
|
||||||
|
pub const MIKAN_YEAR_QUERY_KEY: &str = "year";
|
||||||
|
|||||||
@@ -9,8 +9,13 @@ pub use client::MikanClient;
|
|||||||
pub use config::MikanConfig;
|
pub use config::MikanConfig;
|
||||||
pub use constants::{
|
pub use constants::{
|
||||||
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH,
|
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH,
|
||||||
MIKAN_LOGIN_PAGE_PATH, MIKAN_LOGIN_PAGE_SEARCH, MIKAN_POSTER_BUCKET_KEY,
|
MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH,
|
||||||
MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_UNKNOWN_FANSUB_ID, MIKAN_UNKNOWN_FANSUB_NAME,
|
MIKAN_BANGUMI_RSS_PATH, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_EPISODE_TORRENT_PATH,
|
||||||
|
MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_LOGIN_PAGE_PATH,
|
||||||
|
MIKAN_LOGIN_PAGE_SEARCH, MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH,
|
||||||
|
MIKAN_SEASON_STR_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH,
|
||||||
|
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_UNKNOWN_FANSUB_ID,
|
||||||
|
MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
|
||||||
};
|
};
|
||||||
pub use credential::MikanCredentialForm;
|
pub use credential::MikanCredentialForm;
|
||||||
pub use subscription::{
|
pub use subscription::{
|
||||||
@@ -18,14 +23,16 @@ pub use subscription::{
|
|||||||
};
|
};
|
||||||
pub use web::{
|
pub use web::{
|
||||||
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
|
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
|
||||||
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanRssItem,
|
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanFansubHash,
|
||||||
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionRssUrlMeta,
|
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||||
build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url,
|
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_expand_subscribed_url,
|
||||||
build_mikan_bangumi_subscription_rss_url, build_mikan_episode_homepage_url,
|
build_mikan_bangumi_homepage_url, build_mikan_bangumi_subscription_rss_url,
|
||||||
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
|
build_mikan_episode_homepage_url, build_mikan_season_flow_url,
|
||||||
|
build_mikan_subscriber_subscription_rss_url,
|
||||||
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||||
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||||
extract_mikan_episode_meta_from_episode_homepage_html,
|
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||||
|
scrape_mikan_bangumi_index_meta_from_bangumi_homepage_url,
|
||||||
scrape_mikan_bangumi_meta_from_bangumi_homepage_url,
|
scrape_mikan_bangumi_meta_from_bangumi_homepage_url,
|
||||||
scrape_mikan_bangumi_meta_list_from_season_flow_url,
|
scrape_mikan_bangumi_meta_list_from_season_flow_url,
|
||||||
scrape_mikan_bangumi_meta_stream_from_season_flow_url,
|
scrape_mikan_bangumi_meta_stream_from_season_flow_url,
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ use std::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use async_graphql::{InputObject, SimpleObject};
|
use async_graphql::{InputObject, SimpleObject};
|
||||||
|
use async_stream::try_stream;
|
||||||
use fetch::fetch_bytes;
|
use fetch::fetch_bytes;
|
||||||
use futures::{Stream, TryStreamExt, pin_mut, try_join};
|
use futures::{Stream, TryStreamExt, pin_mut, try_join};
|
||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
@@ -19,12 +20,15 @@ use super::scrape_mikan_bangumi_meta_stream_from_season_flow_url;
|
|||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContextTrait,
|
||||||
errors::{RecorderError, RecorderResult},
|
errors::{RecorderError, RecorderResult},
|
||||||
extract::mikan::{
|
extract::{
|
||||||
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanRssItem,
|
bittorrent::EpisodeEnclosureMeta,
|
||||||
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionRssUrlMeta,
|
mikan::{
|
||||||
build_mikan_bangumi_subscription_rss_url, build_mikan_season_flow_url,
|
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
|
||||||
build_mikan_subscriber_subscription_rss_url,
|
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||||
scrape_mikan_episode_meta_from_episode_homepage_url,
|
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url,
|
||||||
|
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
|
||||||
|
scrape_mikan_episode_meta_from_episode_homepage_url,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
models::{
|
models::{
|
||||||
bangumi, episodes, subscription_bangumi, subscription_episode,
|
bangumi, episodes, subscription_bangumi, subscription_episode,
|
||||||
@@ -35,10 +39,11 @@ use crate::{
|
|||||||
#[tracing::instrument(err, skip(ctx, rss_item_list))]
|
#[tracing::instrument(err, skip(ctx, rss_item_list))]
|
||||||
async fn sync_mikan_feeds_from_rss_item_list(
|
async fn sync_mikan_feeds_from_rss_item_list(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
rss_item_list: Vec<MikanRssItem>,
|
rss_item_list: Vec<MikanRssEpisodeItem>,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
subscription_id: i32,
|
subscription_id: i32,
|
||||||
) -> RecorderResult<()> {
|
) -> RecorderResult<()> {
|
||||||
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
let (new_episode_meta_list, existed_episode_hash2id_map) = {
|
let (new_episode_meta_list, existed_episode_hash2id_map) = {
|
||||||
let existed_episode_hash2id_map = episodes::Model::get_existed_mikan_episode_list(
|
let existed_episode_hash2id_map = episodes::Model::get_existed_mikan_episode_list(
|
||||||
ctx,
|
ctx,
|
||||||
@@ -52,7 +57,7 @@ async fn sync_mikan_feeds_from_rss_item_list(
|
|||||||
.map(|(episode_id, hash, bangumi_id)| (hash.mikan_episode_id, (episode_id, bangumi_id)))
|
.map(|(episode_id, hash, bangumi_id)| (hash.mikan_episode_id, (episode_id, bangumi_id)))
|
||||||
.collect::<HashMap<_, _>>();
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
let mut new_episode_meta_list: Vec<MikanEpisodeMeta> = vec![];
|
let mut new_episode_meta_list: Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)> = vec![];
|
||||||
|
|
||||||
let mikan_client = ctx.mikan();
|
let mikan_client = ctx.mikan();
|
||||||
for to_insert_rss_item in rss_item_list.into_iter().filter(|rss_item| {
|
for to_insert_rss_item in rss_item_list.into_iter().filter(|rss_item| {
|
||||||
@@ -60,10 +65,11 @@ async fn sync_mikan_feeds_from_rss_item_list(
|
|||||||
}) {
|
}) {
|
||||||
let episode_meta = scrape_mikan_episode_meta_from_episode_homepage_url(
|
let episode_meta = scrape_mikan_episode_meta_from_episode_homepage_url(
|
||||||
mikan_client,
|
mikan_client,
|
||||||
to_insert_rss_item.homepage,
|
to_insert_rss_item.build_homepage_url(mikan_base_url.clone()),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
new_episode_meta_list.push(episode_meta);
|
let episode_enclosure_meta = EpisodeEnclosureMeta::from(to_insert_rss_item);
|
||||||
|
new_episode_meta_list.push((episode_meta, episode_enclosure_meta));
|
||||||
}
|
}
|
||||||
|
|
||||||
(new_episode_meta_list, existed_episode_hash2id_map)
|
(new_episode_meta_list, existed_episode_hash2id_map)
|
||||||
@@ -90,22 +96,22 @@ async fn sync_mikan_feeds_from_rss_item_list(
|
|||||||
|
|
||||||
let new_episode_meta_list_group_by_bangumi_hash: HashMap<
|
let new_episode_meta_list_group_by_bangumi_hash: HashMap<
|
||||||
MikanBangumiHash,
|
MikanBangumiHash,
|
||||||
Vec<MikanEpisodeMeta>,
|
Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)>,
|
||||||
> = {
|
> = {
|
||||||
let mut m = hashmap! {};
|
let mut m = hashmap! {};
|
||||||
for episode_meta in new_episode_meta_list {
|
for (episode_meta, episode_enclosure_meta) in new_episode_meta_list {
|
||||||
let bangumi_hash = episode_meta.bangumi_hash();
|
let bangumi_hash = episode_meta.bangumi_hash();
|
||||||
|
|
||||||
m.entry(bangumi_hash)
|
m.entry(bangumi_hash)
|
||||||
.or_insert_with(Vec::new)
|
.or_insert_with(Vec::new)
|
||||||
.push(episode_meta);
|
.push((episode_meta, episode_enclosure_meta));
|
||||||
}
|
}
|
||||||
m
|
m
|
||||||
};
|
};
|
||||||
|
|
||||||
for (group_bangumi_hash, group_episode_meta_list) in new_episode_meta_list_group_by_bangumi_hash
|
for (group_bangumi_hash, group_episode_meta_list) in new_episode_meta_list_group_by_bangumi_hash
|
||||||
{
|
{
|
||||||
let first_episode_meta = group_episode_meta_list.first().unwrap();
|
let (first_episode_meta, _) = group_episode_meta_list.first().unwrap();
|
||||||
let group_bangumi_model = bangumi::Model::get_or_insert_from_mikan(
|
let group_bangumi_model = bangumi::Model::get_or_insert_from_mikan(
|
||||||
ctx,
|
ctx,
|
||||||
group_bangumi_hash,
|
group_bangumi_hash,
|
||||||
@@ -124,9 +130,12 @@ async fn sync_mikan_feeds_from_rss_item_list(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
let group_episode_creation_list = group_episode_meta_list
|
let group_episode_creation_list =
|
||||||
.into_iter()
|
group_episode_meta_list
|
||||||
.map(|episode_meta| (&group_bangumi_model, episode_meta));
|
.into_iter()
|
||||||
|
.map(|(episode_meta, episode_enclosure_meta)| {
|
||||||
|
(&group_bangumi_model, episode_meta, episode_enclosure_meta)
|
||||||
|
});
|
||||||
|
|
||||||
episodes::Model::add_mikan_episodes_for_subscription(
|
episodes::Model::add_mikan_episodes_for_subscription(
|
||||||
ctx,
|
ctx,
|
||||||
@@ -141,7 +150,7 @@ async fn sync_mikan_feeds_from_rss_item_list(
|
|||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
pub struct MikanSubscriberSubscription {
|
pub struct MikanSubscriberSubscription {
|
||||||
pub id: i32,
|
pub subscription_id: i32,
|
||||||
pub mikan_subscription_token: String,
|
pub mikan_subscription_token: String,
|
||||||
pub subscriber_id: i32,
|
pub subscriber_id: i32,
|
||||||
}
|
}
|
||||||
@@ -153,7 +162,7 @@ impl SubscriptionTrait for MikanSubscriberSubscription {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn get_subscription_id(&self) -> i32 {
|
fn get_subscription_id(&self) -> i32 {
|
||||||
self.id
|
self.subscription_id
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
@@ -203,7 +212,7 @@ impl SubscriptionTrait for MikanSubscriberSubscription {
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
id: model.id,
|
subscription_id: model.id,
|
||||||
mikan_subscription_token: meta.mikan_subscription_token,
|
mikan_subscription_token: meta.mikan_subscription_token,
|
||||||
subscriber_id: model.subscriber_id,
|
subscriber_id: model.subscriber_id,
|
||||||
})
|
})
|
||||||
@@ -215,7 +224,7 @@ impl MikanSubscriberSubscription {
|
|||||||
async fn get_rss_item_list_from_source_url(
|
async fn get_rss_item_list_from_source_url(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
) -> RecorderResult<Vec<MikanRssItem>> {
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
let mikan_base_url = ctx.mikan().base_url().clone();
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
let rss_url = build_mikan_subscriber_subscription_rss_url(
|
let rss_url = build_mikan_subscriber_subscription_rss_url(
|
||||||
mikan_base_url.clone(),
|
mikan_base_url.clone(),
|
||||||
@@ -227,7 +236,7 @@ impl MikanSubscriberSubscription {
|
|||||||
|
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
let item = MikanRssItem::try_from(item)
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
format!("failed to extract rss item at idx {idx}")
|
format!("failed to extract rss item at idx {idx}")
|
||||||
})?;
|
})?;
|
||||||
@@ -240,9 +249,10 @@ impl MikanSubscriberSubscription {
|
|||||||
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
) -> RecorderResult<Vec<MikanRssItem>> {
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
let subscribed_bangumi_list =
|
let subscribed_bangumi_list =
|
||||||
bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.id).await?;
|
bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.subscription_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let mut rss_item_list = vec![];
|
let mut rss_item_list = vec![];
|
||||||
for subscribed_bangumi in subscribed_bangumi_list {
|
for subscribed_bangumi in subscribed_bangumi_list {
|
||||||
@@ -251,7 +261,7 @@ impl MikanSubscriberSubscription {
|
|||||||
.with_whatever_context::<_, String, RecorderError>(|| {
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
format!(
|
format!(
|
||||||
"rss link is required, subscription_id = {:?}, bangumi_name = {}",
|
"rss link is required, subscription_id = {:?}, bangumi_name = {}",
|
||||||
self.id, subscribed_bangumi.display_name
|
self.subscription_id, subscribed_bangumi.display_name
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||||
@@ -259,7 +269,7 @@ impl MikanSubscriberSubscription {
|
|||||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
let item = MikanRssItem::try_from(item)
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
format!("failed to extract rss item at idx {idx}")
|
format!("failed to extract rss item at idx {idx}")
|
||||||
})?;
|
})?;
|
||||||
@@ -270,9 +280,9 @@ impl MikanSubscriberSubscription {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
pub struct MikanSeasonSubscription {
|
pub struct MikanSeasonSubscription {
|
||||||
pub id: i32,
|
pub subscription_id: i32,
|
||||||
pub year: i32,
|
pub year: i32,
|
||||||
pub season_str: MikanSeasonStr,
|
pub season_str: MikanSeasonStr,
|
||||||
pub credential_id: i32,
|
pub credential_id: i32,
|
||||||
@@ -286,21 +296,23 @@ impl SubscriptionTrait for MikanSeasonSubscription {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn get_subscription_id(&self) -> i32 {
|
fn get_subscription_id(&self) -> i32 {
|
||||||
self.id
|
self.subscription_id
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
let rss_item_list = self
|
let rss_item_stream = self.get_rss_item_stream_from_subsribed_url_rss_link(ctx.as_ref());
|
||||||
.get_rss_item_list_from_subsribed_url_rss_link(ctx.as_ref())
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
sync_mikan_feeds_from_rss_item_list(
|
pin_mut!(rss_item_stream);
|
||||||
ctx.as_ref(),
|
|
||||||
rss_item_list,
|
while let Some(rss_item_chunk_list) = rss_item_stream.try_next().await? {
|
||||||
self.get_subscriber_id(),
|
sync_mikan_feeds_from_rss_item_list(
|
||||||
self.get_subscription_id(),
|
ctx.as_ref(),
|
||||||
)
|
rss_item_chunk_list,
|
||||||
.await?;
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -362,7 +374,7 @@ impl SubscriptionTrait for MikanSeasonSubscription {
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
id: model.id,
|
subscription_id: model.id,
|
||||||
year: source_url_meta.year,
|
year: source_url_meta.year,
|
||||||
season_str: source_url_meta.season_str,
|
season_str: source_url_meta.season_str,
|
||||||
credential_id,
|
credential_id,
|
||||||
@@ -387,54 +399,63 @@ impl MikanSeasonSubscription {
|
|||||||
ctx,
|
ctx,
|
||||||
mikan_season_flow_url,
|
mikan_season_flow_url,
|
||||||
credential_id,
|
credential_id,
|
||||||
|
self.get_subscriber_id(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(err, skip(ctx))]
|
fn get_rss_item_stream_from_subsribed_url_rss_link(
|
||||||
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
) -> RecorderResult<Vec<MikanRssItem>> {
|
) -> impl Stream<Item = RecorderResult<Vec<MikanRssEpisodeItem>>> {
|
||||||
let db = ctx.db();
|
try_stream! {
|
||||||
|
|
||||||
let subscribed_bangumi_list = bangumi::Entity::find()
|
let db = ctx.db();
|
||||||
.filter(Condition::all().add(subscription_bangumi::Column::SubscriptionId.eq(self.id)))
|
|
||||||
.join_rev(
|
|
||||||
JoinType::InnerJoin,
|
|
||||||
subscription_bangumi::Relation::Bangumi.def(),
|
|
||||||
)
|
|
||||||
.all(db)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut rss_item_list = vec![];
|
let subscribed_bangumi_list = bangumi::Entity::find()
|
||||||
for subscribed_bangumi in subscribed_bangumi_list {
|
.filter(
|
||||||
let rss_url = subscribed_bangumi
|
Condition::all()
|
||||||
.rss_link
|
.add(subscription_bangumi::Column::SubscriptionId.eq(self.subscription_id)),
|
||||||
.with_whatever_context::<_, String, RecorderError>(|| {
|
)
|
||||||
format!(
|
.join_rev(
|
||||||
"rss_link is required, subscription_id = {}, bangumi_name = {}",
|
JoinType::InnerJoin,
|
||||||
self.id, subscribed_bangumi.display_name
|
subscription_bangumi::Relation::Bangumi.def(),
|
||||||
)
|
)
|
||||||
})?;
|
.all(db)
|
||||||
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
.await?;
|
||||||
|
|
||||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
|
||||||
|
|
||||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
for subscribed_bangumi in subscribed_bangumi_list {
|
||||||
let item = MikanRssItem::try_from(item)
|
let rss_url = subscribed_bangumi
|
||||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
.rss_link
|
||||||
format!("failed to extract rss item at idx {idx}")
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"rss_link is required, subscription_id = {}, bangumi_name = {}",
|
||||||
|
self.subscription_id, subscribed_bangumi.display_name
|
||||||
|
)
|
||||||
})?;
|
})?;
|
||||||
rss_item_list.push(item);
|
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
|
let mut rss_item_list = vec![];
|
||||||
|
|
||||||
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!("failed to extract rss item at idx {idx}")
|
||||||
|
})?;
|
||||||
|
rss_item_list.push(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
yield rss_item_list;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(rss_item_list)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)]
|
||||||
pub struct MikanBangumiSubscription {
|
pub struct MikanBangumiSubscription {
|
||||||
pub id: i32,
|
pub subscription_id: i32,
|
||||||
pub mikan_bangumi_id: String,
|
pub mikan_bangumi_id: String,
|
||||||
pub mikan_fansub_id: String,
|
pub mikan_fansub_id: String,
|
||||||
pub subscriber_id: i32,
|
pub subscriber_id: i32,
|
||||||
@@ -447,7 +468,7 @@ impl SubscriptionTrait for MikanBangumiSubscription {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn get_subscription_id(&self) -> i32 {
|
fn get_subscription_id(&self) -> i32 {
|
||||||
self.id
|
self.subscription_id
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
@@ -485,7 +506,7 @@ impl SubscriptionTrait for MikanBangumiSubscription {
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
id: model.id,
|
subscription_id: model.id,
|
||||||
mikan_bangumi_id: meta.mikan_bangumi_id,
|
mikan_bangumi_id: meta.mikan_bangumi_id,
|
||||||
mikan_fansub_id: meta.mikan_fansub_id,
|
mikan_fansub_id: meta.mikan_fansub_id,
|
||||||
subscriber_id: model.subscriber_id,
|
subscriber_id: model.subscriber_id,
|
||||||
@@ -498,7 +519,7 @@ impl MikanBangumiSubscription {
|
|||||||
async fn get_rss_item_list_from_source_url(
|
async fn get_rss_item_list_from_source_url(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
) -> RecorderResult<Vec<MikanRssItem>> {
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
let mikan_base_url = ctx.mikan().base_url().clone();
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
||||||
mikan_base_url.clone(),
|
mikan_base_url.clone(),
|
||||||
@@ -511,7 +532,7 @@ impl MikanBangumiSubscription {
|
|||||||
|
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
let item = MikanRssItem::try_from(item)
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
format!("failed to extract rss item at idx {idx}")
|
format!("failed to extract rss item at idx {idx}")
|
||||||
})?;
|
})?;
|
||||||
@@ -521,106 +542,237 @@ impl MikanBangumiSubscription {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// #[cfg(test)]
|
#[cfg(test)]
|
||||||
// mod tests {
|
#[allow(unused_variables)]
|
||||||
// use std::assert_matches::assert_matches;
|
mod tests {
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
// use downloader::bittorrent::BITTORRENT_MIME_TYPE;
|
use rstest::{fixture, rstest};
|
||||||
// use rstest::rstest;
|
use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait};
|
||||||
// use url::Url;
|
use tracing::Level;
|
||||||
|
|
||||||
// use crate::{
|
use crate::{
|
||||||
// errors::RecorderResult,
|
app::AppContextTrait,
|
||||||
// extract::mikan::{
|
errors::RecorderResult,
|
||||||
// MikanBangumiIndexRssChannel, MikanBangumiRssChannel,
|
extract::mikan::{
|
||||||
// MikanRssChannel, build_mikan_bangumi_subscription_rss_url,
|
MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||||
// extract_mikan_rss_channel_from_rss_link, },
|
MikanSubscriberSubscriptionRssUrlMeta,
|
||||||
// test_utils::mikan::build_testing_mikan_client,
|
},
|
||||||
// };
|
models::{
|
||||||
|
bangumi, episodes,
|
||||||
|
subscriptions::{self, SubscriptionTrait},
|
||||||
|
},
|
||||||
|
test_utils::{
|
||||||
|
app::{TestingAppContext, TestingAppContextPreset},
|
||||||
|
mikan::{MikanMockServer, build_testing_mikan_credential_form},
|
||||||
|
tracing::try_init_testing_tracing,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
// #[rstest]
|
struct TestingResources {
|
||||||
// #[tokio::test]
|
pub app_ctx: Arc<dyn AppContextTrait>,
|
||||||
// async fn test_parse_mikan_rss_channel_from_rss_link() ->
|
pub mikan_server: MikanMockServer,
|
||||||
// RecorderResult<()> { let mut mikan_server =
|
}
|
||||||
// mockito::Server::new_async().await;
|
|
||||||
|
|
||||||
// let mikan_base_url = Url::parse(&mikan_server.url())?;
|
async fn build_testing_app_context() -> RecorderResult<TestingResources> {
|
||||||
|
let mikan_server = MikanMockServer::new().await?;
|
||||||
|
|
||||||
// let mikan_client =
|
let mikan_base_url = mikan_server.base_url().clone();
|
||||||
// build_testing_mikan_client(mikan_base_url.clone()).await?;
|
|
||||||
|
|
||||||
// {
|
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
|
||||||
// let bangumi_rss_url = build_mikan_bangumi_subscription_rss_url(
|
mikan_base_url: mikan_base_url.to_string(),
|
||||||
// mikan_base_url.clone(),
|
database_config: None,
|
||||||
// "3141",
|
})
|
||||||
// Some("370"),
|
.await?;
|
||||||
// );
|
|
||||||
|
|
||||||
// let bangumi_rss_mock = mikan_server
|
Ok(TestingResources {
|
||||||
// .mock("GET", bangumi_rss_url.path())
|
app_ctx,
|
||||||
//
|
mikan_server,
|
||||||
// .with_body_from_file("tests/resources/mikan/Bangumi-3141-370.rss")
|
})
|
||||||
// .match_query(mockito::Matcher::Any)
|
}
|
||||||
// .create_async()
|
|
||||||
// .await;
|
|
||||||
|
|
||||||
// let channel =
|
#[fixture]
|
||||||
// scrape_mikan_rss_channel_from_rss_link(&mikan_client, bangumi_rss_url)
|
fn before_each() {
|
||||||
// .await
|
try_init_testing_tracing(Level::DEBUG);
|
||||||
// .expect("should get mikan channel from rss url");
|
}
|
||||||
|
|
||||||
// assert_matches!(
|
#[rstest]
|
||||||
// &channel,
|
#[tokio::test]
|
||||||
// MikanRssChannel::Bangumi(MikanBangumiRssChannel { .. })
|
async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
|
||||||
// );
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
// assert_matches!(&channel.name(), Some("葬送的芙莉莲"));
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
// let items = channel.items();
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
// let first_sub_item = items
|
|
||||||
// .first()
|
|
||||||
// .expect("mikan subscriptions should have at least one subs");
|
|
||||||
|
|
||||||
// assert_eq!(first_sub_item.mime, BITTORRENT_MIME_TYPE);
|
let mikan_client = app_ctx.mikan();
|
||||||
|
|
||||||
// assert!(
|
let subscriber_id = 1;
|
||||||
// &first_sub_item
|
|
||||||
// .homepage
|
|
||||||
// .as_str()
|
|
||||||
// .starts_with("https://mikanani.me/Home/Episode")
|
|
||||||
// );
|
|
||||||
|
|
||||||
// let name = first_sub_item.title.as_str();
|
let credential = mikan_client
|
||||||
// assert!(name.contains("葬送的芙莉莲"));
|
.submit_credential_form(
|
||||||
|
app_ctx.as_ref(),
|
||||||
|
subscriber_id,
|
||||||
|
build_testing_mikan_credential_form(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// bangumi_rss_mock.expect(1);
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
// }
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
// {
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
// let bangumi_rss_url =
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSeason),
|
||||||
// mikan_base_url.join("/RSS/Bangumi?bangumiId=3416")?;
|
source_url: ActiveValue::Set(
|
||||||
|
MikanSeasonFlowUrlMeta {
|
||||||
|
year: 2025,
|
||||||
|
season_str: MikanSeasonStr::Spring,
|
||||||
|
}
|
||||||
|
.build_season_flow_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
credential_id: ActiveValue::Set(Some(credential.id)),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
// let bangumi_rss_mock = mikan_server
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
// .mock("GET", bangumi_rss_url.path())
|
|
||||||
// .match_query(mockito::Matcher::Any)
|
|
||||||
//
|
|
||||||
// .with_body_from_file("tests/resources/mikan/Bangumi-3416.rss")
|
|
||||||
// .create_async()
|
|
||||||
// .await;
|
|
||||||
|
|
||||||
// let channel =
|
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
// scrape_mikan_rss_channel_from_rss_link(&mikan_client, bangumi_rss_url)
|
|
||||||
// .await
|
|
||||||
// .expect("should get mikan channel from rss url");
|
|
||||||
|
|
||||||
// assert_matches!(
|
{
|
||||||
// &channel,
|
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
|
||||||
// MikanRssChannel::BangumiIndex(MikanBangumiIndexRssChannel {
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
// .. }) );
|
|
||||||
|
|
||||||
// assert_matches!(&channel.name(), Some("叹气的亡灵想隐退"));
|
assert!(bangumi_list.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
// bangumi_rss_mock.expect(1);
|
{
|
||||||
// }
|
subscription.sync_feeds_full(app_ctx.clone()).await?;
|
||||||
// Ok(())
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
// }
|
|
||||||
// }
|
assert!(!bangumi_list.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_subscriber_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber),
|
||||||
|
source_url: ActiveValue::Set(
|
||||||
|
MikanSubscriberSubscriptionRssUrlMeta {
|
||||||
|
mikan_subscription_token: "test".into(),
|
||||||
|
}
|
||||||
|
.build_rss_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let (incremental_bangumi_list, incremental_episode_list) = {
|
||||||
|
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
|
||||||
|
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
|
||||||
|
let episode_list = episodes::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!episode_list.is_empty());
|
||||||
|
|
||||||
|
(bangumi_list, episode_list)
|
||||||
|
};
|
||||||
|
|
||||||
|
let (full_bangumi_list, full_episode_list) = {
|
||||||
|
subscription.sync_feeds_full(app_ctx.clone()).await?;
|
||||||
|
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
|
||||||
|
let episode_list = episodes::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!episode_list.is_empty());
|
||||||
|
|
||||||
|
(bangumi_list, episode_list)
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(incremental_bangumi_list.len(), full_bangumi_list.len());
|
||||||
|
assert!(incremental_episode_list.len() < full_episode_list.len());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanBangumi),
|
||||||
|
source_url: ActiveValue::Set(
|
||||||
|
MikanBangumiHash {
|
||||||
|
mikan_bangumi_id: "3600".into(),
|
||||||
|
mikan_fansub_id: "370".into(),
|
||||||
|
}
|
||||||
|
.build_rss_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
{
|
||||||
|
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
};
|
||||||
|
|
||||||
|
{
|
||||||
|
subscription.sync_feeds_full(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
|||||||
|
pub mod bittorrent;
|
||||||
pub mod defs;
|
pub mod defs;
|
||||||
pub mod html;
|
pub mod html;
|
||||||
pub mod http;
|
pub mod http;
|
||||||
pub mod media;
|
pub mod media;
|
||||||
pub mod mikan;
|
pub mod mikan;
|
||||||
pub mod rawname;
|
pub mod origin;
|
||||||
pub mod bittorrent;
|
|
||||||
|
|||||||
1479
apps/recorder/src/extract/origin/mod.rs
Normal file
1479
apps/recorder/src/extract/origin/mod.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +0,0 @@
|
|||||||
pub mod parser;
|
|
||||||
|
|
||||||
pub use parser::{
|
|
||||||
extract_season_from_title_body, parse_episode_meta_from_raw_name, RawEpisodeMeta,
|
|
||||||
};
|
|
||||||
@@ -1,845 +0,0 @@
|
|||||||
/**
|
|
||||||
* @TODO: rewrite with nom
|
|
||||||
*/
|
|
||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
use itertools::Itertools;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use regex::Regex;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use snafu::whatever;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
errors::RecorderResult,
|
|
||||||
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
|
|
||||||
};
|
|
||||||
|
|
||||||
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref TITLE_RE: Regex = Regex::new(
|
|
||||||
r#"(.*|\[.*])( -? \d+|\[\d+]|\[\d+.?[vV]\d]|第\d+[话話集]|\[第?\d+[话話集]]|\[\d+.?END]|[Ee][Pp]?\d+|\[\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*[话話集]\s*])(.*)"#
|
|
||||||
).unwrap();
|
|
||||||
static ref EP_COLLECTION_RE:Regex = Regex::new(r#"\[?\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*合?[话話集]\s*]?"#).unwrap();
|
|
||||||
static ref MOVIE_TITLE_RE:Regex = Regex::new(r#"(.*|\[.*])(剧场版|[Mm]ovie|电影)(.*?)$"#).unwrap();
|
|
||||||
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
|
|
||||||
static ref SOURCE_L1_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|W[Ee][Bb][Rr][Ii][Pp]|Sentai|B[Dd][Rr][Ii][Pp]|UHD[Rr][Ii][Pp]|NETFLIX").unwrap();
|
|
||||||
static ref SOURCE_L2_RE: Regex = Regex::new(r"AMZ|CR|W[Ee][Bb]|B[Dd]").unwrap();
|
|
||||||
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap();
|
|
||||||
static ref PREFIX_RE: Regex =
|
|
||||||
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
|
|
||||||
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
|
|
||||||
static ref MOVIE_SEASON_EXTRACT_RE: Regex = Regex::new(r"剧场版|Movie|电影").unwrap();
|
|
||||||
static ref MAIN_TITLE_PREFIX_PROCESS_RE1: Regex = Regex::new(r"新番|月?番").unwrap();
|
|
||||||
static ref MAIN_TITLE_PREFIX_PROCESS_RE2: Regex = Regex::new(r"[港澳台]{1,3}地区").unwrap();
|
|
||||||
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE: Regex = Regex::new(r"\[.+\]").unwrap();
|
|
||||||
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1: Regex = Regex::new(r"^.*?\[").unwrap();
|
|
||||||
static ref SEASON_EXTRACT_SEASON_ALL_RE: Regex = Regex::new(r"S\d{1,2}|Season \d{1,2}|[第].[季期]|1st|2nd|3rd|\d{1,2}th").unwrap();
|
|
||||||
static ref SEASON_EXTRACT_SEASON_EN_PREFIX_RE: Regex = Regex::new(r"Season|S").unwrap();
|
|
||||||
static ref SEASON_EXTRACT_SEASON_EN_NTH_RE: Regex = Regex::new(r"1st|2nd|3rd|\d{1,2}th").unwrap();
|
|
||||||
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_RE: Regex = Regex::new(r"[第 ].*[季期(部分)]|部分").unwrap();
|
|
||||||
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE: Regex = Regex::new(r"[第季期 ]").unwrap();
|
|
||||||
static ref NAME_EXTRACT_REMOVE_RE: Regex = Regex::new(r"[((]仅限[港澳台]{1,3}地区[))]").unwrap();
|
|
||||||
static ref NAME_EXTRACT_SPLIT_RE: Regex = Regex::new(r"/|\s{2}|-\s{2}|\]\[").unwrap();
|
|
||||||
static ref NAME_EXTRACT_REPLACE_ADHOC1_RE: Regex = Regex::new(r"([\p{scx=Han}\s\(\)]{5,})_([a-zA-Z]{2,})").unwrap();
|
|
||||||
static ref NAME_JP_TEST: Regex = Regex::new(r"[\p{scx=Hira}\p{scx=Kana}]{2,}").unwrap();
|
|
||||||
static ref NAME_ZH_TEST: Regex = Regex::new(r"[\p{scx=Han}]{2,}").unwrap();
|
|
||||||
static ref NAME_EN_TEST: Regex = Regex::new(r"[a-zA-Z]{3,}").unwrap();
|
|
||||||
static ref TAGS_EXTRACT_SPLIT_RE: Regex = Regex::new(r"[\[\]()()_]").unwrap();
|
|
||||||
static ref CLEAR_SUB_RE: Regex = Regex::new(r"_MP4|_MKV").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
|
|
||||||
pub struct RawEpisodeMeta {
|
|
||||||
pub name_en: Option<String>,
|
|
||||||
pub name_en_no_season: Option<String>,
|
|
||||||
pub name_jp: Option<String>,
|
|
||||||
pub name_jp_no_season: Option<String>,
|
|
||||||
pub name_zh: Option<String>,
|
|
||||||
pub name_zh_no_season: Option<String>,
|
|
||||||
pub season: i32,
|
|
||||||
pub season_raw: Option<String>,
|
|
||||||
pub episode_index: i32,
|
|
||||||
pub subtitle: Option<String>,
|
|
||||||
pub source: Option<String>,
|
|
||||||
pub fansub: Option<String>,
|
|
||||||
pub resolution: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_fansub(raw_name: &str) -> Option<&str> {
|
|
||||||
let mut groups = EN_BRACKET_SPLIT_RE.splitn(raw_name, 3);
|
|
||||||
groups.nth(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn replace_ch_bracket_to_en(raw_name: &str) -> String {
|
|
||||||
raw_name.replace('【', "[").replace('】', "]")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RecorderResult<String> {
|
|
||||||
let raw_without_fansub = if let Some(fansub) = fansub {
|
|
||||||
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
|
|
||||||
fan_sub_re.replace_all(title_body, "")
|
|
||||||
} else {
|
|
||||||
Cow::Borrowed(title_body)
|
|
||||||
};
|
|
||||||
let raw_with_prefix_replaced = PREFIX_RE.replace_all(&raw_without_fansub, "/");
|
|
||||||
let mut arg_group = raw_with_prefix_replaced
|
|
||||||
.split('/')
|
|
||||||
.map(|s| s.trim())
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
if arg_group.len() == 1 {
|
|
||||||
arg_group = arg_group.first_mut().unwrap().split(' ').collect();
|
|
||||||
}
|
|
||||||
let mut raw = raw_without_fansub.to_string();
|
|
||||||
for arg in arg_group.iter() {
|
|
||||||
if (arg_group.len() <= 5 && MAIN_TITLE_PREFIX_PROCESS_RE1.is_match(arg))
|
|
||||||
|| (MAIN_TITLE_PREFIX_PROCESS_RE2.is_match(arg))
|
|
||||||
{
|
|
||||||
let sub = Regex::new(&format!(".{arg}."))?;
|
|
||||||
raw = sub.replace_all(&raw, "").to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw)
|
|
||||||
&& m.len() as f32 > (raw.len() as f32) * 0.5
|
|
||||||
{
|
|
||||||
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1
|
|
||||||
.replace(&raw, "")
|
|
||||||
.chars()
|
|
||||||
.collect_vec();
|
|
||||||
while let Some(ch) = raw1.pop() {
|
|
||||||
if ch == ']' {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
raw = raw1.into_iter().collect();
|
|
||||||
}
|
|
||||||
Ok(raw.to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, i32) {
|
|
||||||
let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " ");
|
|
||||||
let seasons = SEASON_EXTRACT_SEASON_ALL_RE
|
|
||||||
.find(&name_and_season)
|
|
||||||
.into_iter()
|
|
||||||
.map(|s| s.as_str())
|
|
||||||
.collect_vec();
|
|
||||||
|
|
||||||
if seasons.is_empty() {
|
|
||||||
return (title_body.to_string(), None, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut season = 1;
|
|
||||||
let mut season_raw = None;
|
|
||||||
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
|
|
||||||
|
|
||||||
for s in seasons {
|
|
||||||
season_raw = Some(s);
|
|
||||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s)
|
|
||||||
&& let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
|
|
||||||
.replace_all(m.as_str(), "")
|
|
||||||
.parse::<i32>()
|
|
||||||
{
|
|
||||||
season = s;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s)
|
|
||||||
&& let Some(s) = DIGIT_1PLUS_REG
|
|
||||||
.find(m.as_str())
|
|
||||||
.and_then(|s| s.as_str().parse::<i32>().ok())
|
|
||||||
{
|
|
||||||
season = s;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
|
|
||||||
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
|
|
||||||
.replace(m.as_str(), "")
|
|
||||||
.parse::<i32>()
|
|
||||||
{
|
|
||||||
season = s;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if let Some(m) = ZH_NUM_RE.find(m.as_str()) {
|
|
||||||
season = ZH_NUM_MAP[m.as_str()];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(name.to_string(), season_raw.map(|s| s.to_string()), season)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_name_from_title_body_name_section(
|
|
||||||
title_body_name_section: &str,
|
|
||||||
) -> (Option<String>, Option<String>, Option<String>) {
|
|
||||||
let mut name_en = None;
|
|
||||||
let mut name_zh = None;
|
|
||||||
let mut name_jp = None;
|
|
||||||
let replaced1 = NAME_EXTRACT_REMOVE_RE.replace_all(title_body_name_section, "");
|
|
||||||
let replaced2 = NAME_EXTRACT_REPLACE_ADHOC1_RE
|
|
||||||
.replace_all(&replaced1, NAME_EXTRACT_REPLACE_ADHOC1_REPLACED);
|
|
||||||
let trimmed = replaced2.trim();
|
|
||||||
let mut split = NAME_EXTRACT_SPLIT_RE
|
|
||||||
.split(trimmed)
|
|
||||||
.map(|s| s.trim())
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect_vec();
|
|
||||||
if split.len() == 1 {
|
|
||||||
let mut split_space = split[0].split(' ').collect_vec();
|
|
||||||
let mut search_indices = vec![0];
|
|
||||||
if split_space.len() > 1 {
|
|
||||||
search_indices.push(split_space.len() - 1);
|
|
||||||
}
|
|
||||||
for i in search_indices {
|
|
||||||
if NAME_ZH_TEST.is_match(split_space[i]) {
|
|
||||||
let chs = split_space[i];
|
|
||||||
split_space.remove(i);
|
|
||||||
split = vec![chs.to_string(), split_space.join(" ")];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for item in split {
|
|
||||||
if NAME_JP_TEST.is_match(&item) && name_jp.is_none() {
|
|
||||||
name_jp = Some(item);
|
|
||||||
} else if NAME_ZH_TEST.is_match(&item) && name_zh.is_none() {
|
|
||||||
name_zh = Some(item);
|
|
||||||
} else if NAME_EN_TEST.is_match(&item) && name_en.is_none() {
|
|
||||||
name_en = Some(item);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(name_en, name_zh, name_jp)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32> {
|
|
||||||
DIGIT_1PLUS_REG
|
|
||||||
.find(title_episode)?
|
|
||||||
.as_str()
|
|
||||||
.parse::<i32>()
|
|
||||||
.ok()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn clear_sub(sub: Option<String>) -> Option<String> {
|
|
||||||
sub.map(|s| CLEAR_SUB_RE.replace_all(&s, "").to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_tags_from_title_extra(
|
|
||||||
title_extra: &str,
|
|
||||||
) -> (Option<String>, Option<String>, Option<String>) {
|
|
||||||
let replaced = TAGS_EXTRACT_SPLIT_RE.replace_all(title_extra, " ");
|
|
||||||
let elements = replaced
|
|
||||||
.split(' ')
|
|
||||||
.map(|s| s.trim())
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.collect_vec();
|
|
||||||
|
|
||||||
let mut sub = None;
|
|
||||||
let mut resolution = None;
|
|
||||||
let mut source = None;
|
|
||||||
for element in elements.iter() {
|
|
||||||
if SUB_RE.is_match(element) {
|
|
||||||
sub = Some(element.to_string())
|
|
||||||
} else if RESOLUTION_RE.is_match(element) {
|
|
||||||
resolution = Some(element.to_string())
|
|
||||||
} else if SOURCE_L1_RE.is_match(element) {
|
|
||||||
source = Some(element.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if source.is_none() {
|
|
||||||
for element in elements {
|
|
||||||
if SOURCE_L2_RE.is_match(element) {
|
|
||||||
source = Some(element.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(clear_sub(sub), resolution, source)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn check_is_movie(title: &str) -> bool {
|
|
||||||
MOVIE_TITLE_RE.is_match(title)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
|
|
||||||
let raw_title = s.trim();
|
|
||||||
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
|
||||||
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
|
||||||
let movie_capture = check_is_movie(&raw_title_without_ch_brackets);
|
|
||||||
if let Some(title_re_match_obj) = MOVIE_TITLE_RE
|
|
||||||
.captures(&raw_title_without_ch_brackets)
|
|
||||||
.or(TITLE_RE.captures(&raw_title_without_ch_brackets))
|
|
||||||
{
|
|
||||||
let mut title_body = title_re_match_obj
|
|
||||||
.get(1)
|
|
||||||
.map(|s| s.as_str().trim())
|
|
||||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"))
|
|
||||||
.to_string();
|
|
||||||
let mut title_episode = title_re_match_obj
|
|
||||||
.get(2)
|
|
||||||
.map(|s| s.as_str().trim())
|
|
||||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
|
|
||||||
let title_extra = title_re_match_obj
|
|
||||||
.get(3)
|
|
||||||
.map(|s| s.as_str().trim())
|
|
||||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
|
|
||||||
|
|
||||||
if movie_capture {
|
|
||||||
title_body += title_episode;
|
|
||||||
title_episode = "";
|
|
||||||
} else if EP_COLLECTION_RE.is_match(title_episode) {
|
|
||||||
title_episode = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
let title_body = title_body_pre_process(&title_body, fansub)?;
|
|
||||||
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
|
|
||||||
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
|
|
||||||
let (name_en_no_season, name_zh_no_season, name_jp_no_season) =
|
|
||||||
extract_name_from_title_body_name_section(&name_without_season);
|
|
||||||
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(1);
|
|
||||||
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
|
|
||||||
Ok(RawEpisodeMeta {
|
|
||||||
name_en,
|
|
||||||
name_en_no_season,
|
|
||||||
name_jp,
|
|
||||||
name_jp_no_season,
|
|
||||||
name_zh,
|
|
||||||
name_zh_no_season,
|
|
||||||
season,
|
|
||||||
season_raw,
|
|
||||||
episode_index,
|
|
||||||
subtitle: sub,
|
|
||||||
source,
|
|
||||||
fansub: fansub.map(|s| s.to_string()),
|
|
||||||
resolution,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
whatever!("Can not parse episode meta from raw filename {}", raw_title)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
|
|
||||||
use super::{RawEpisodeMeta, parse_episode_meta_from_raw_name};
|
|
||||||
|
|
||||||
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
|
|
||||||
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap_or_default();
|
|
||||||
let found = parse_episode_meta_from_raw_name(raw_name).ok();
|
|
||||||
|
|
||||||
if expected != found {
|
|
||||||
println!(
|
|
||||||
"expected {} and found {} are not equal",
|
|
||||||
serde_json::to_string_pretty(&expected).unwrap(),
|
|
||||||
serde_json::to_string_pretty(&found).unwrap()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
assert_eq!(expected, found);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_all_parts_wrapped() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
|
|
||||||
r#"{
|
|
||||||
"name_zh": "我心里危险的东西",
|
|
||||||
"name_zh_no_season": "我心里危险的东西",
|
|
||||||
"season": 2,
|
|
||||||
"season_raw": "第二季",
|
|
||||||
"episode_index": 5,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"source": null,
|
|
||||||
"fansub": "新Sub",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_title_wrapped_by_one_square_bracket_and_season_prefix() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Boku no Kokoro no Yabai Yatsu",
|
|
||||||
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu",
|
|
||||||
"name_zh": "我内心的糟糕念头",
|
|
||||||
"name_zh_no_season": "我内心的糟糕念头",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 18,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"source": null,
|
|
||||||
"fansub": "喵萌奶茶屋",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_ep_and_version() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Shin no Nakama 2nd",
|
|
||||||
"name_en_no_season": "Shin no Nakama",
|
|
||||||
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
|
|
||||||
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
|
|
||||||
"season": 2,
|
|
||||||
"season_raw": "2nd",
|
|
||||||
"episode_index": 8,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_en_title_only() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
|
|
||||||
r#"{
|
|
||||||
"name_en": "THE MARGINAL SERVICE",
|
|
||||||
"name_en_no_season": "THE MARGINAL SERVICE",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 8,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "动漫国字幕组&LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_two_zh_title() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Nozomanu Fushi no Boukensha",
|
|
||||||
"name_en_no_season": "Nozomanu Fushi no Boukensha",
|
|
||||||
"name_zh": "事与愿违的不死冒险者",
|
|
||||||
"name_zh_no_season": "事与愿违的不死冒险者",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_en_zh_jp_titles() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Pon no Michi",
|
|
||||||
"name_jp": "ぽんのみち",
|
|
||||||
"name_zh": "碰之道",
|
|
||||||
"name_en_no_season": "Pon no Michi",
|
|
||||||
"name_jp_no_season": "ぽんのみち",
|
|
||||||
"name_zh_no_season": "碰之道",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 7,
|
|
||||||
"subtitle": "简繁日内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "喵萌奶茶屋&LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_nth_season() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Yowai Character Tomozakikun",
|
|
||||||
"name_en_no_season": "Yowai Character Tomozakikun",
|
|
||||||
"name_zh": "弱角友崎同学 2nd STAGE",
|
|
||||||
"name_zh_no_season": "弱角友崎同学",
|
|
||||||
"season": 2,
|
|
||||||
"season_raw": "2nd",
|
|
||||||
"episode_index": 9,
|
|
||||||
"subtitle": "CHT",
|
|
||||||
"source": "Baha",
|
|
||||||
"fansub": "ANi",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_season_en_and_season_zh() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Kingdom S5",
|
|
||||||
"name_en_no_season": "Kingdom",
|
|
||||||
"name_zh": "王者天下 第五季",
|
|
||||||
"name_zh_no_season": "王者天下",
|
|
||||||
"season": 5,
|
|
||||||
"season_raw": "第五季",
|
|
||||||
"episode_index": 7,
|
|
||||||
"subtitle": "简繁外挂字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "豌豆字幕组&LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_airota_fansub_style_case1() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Alice to Therese no Maboroshi Koujou",
|
|
||||||
"name_en_no_season": "Alice to Therese no Maboroshi Koujou",
|
|
||||||
"name_zh": "爱丽丝与特蕾丝的虚幻工厂",
|
|
||||||
"name_zh_no_season": "爱丽丝与特蕾丝的虚幻工厂",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁内封",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "千夏字幕组",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_airota_fansub_style_case2() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Yuru Camp Movie",
|
|
||||||
"name_en_no_season": "Yuru Camp Movie",
|
|
||||||
"name_zh": "电影 轻旅轻营 (摇曳露营)",
|
|
||||||
"name_zh_no_season": "电影 轻旅轻营 (摇曳露营)",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "繁体",
|
|
||||||
"source": "UHDRip",
|
|
||||||
"fansub": "千夏字幕组&喵萌奶茶屋",
|
|
||||||
"resolution": "2160p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_large_episode_style() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "New Doraemon",
|
|
||||||
"name_en_no_season": "New Doraemon",
|
|
||||||
"name_zh": "哆啦A梦新番",
|
|
||||||
"name_zh_no_season": "哆啦A梦新番",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 747,
|
|
||||||
"subtitle": "GB",
|
|
||||||
"fansub": "梦蓝字幕组",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_many_square_brackets_split_title() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Yuru Camp",
|
|
||||||
"name_en_no_season": "Yuru Camp",
|
|
||||||
"name_zh": "剧场版-摇曳露营",
|
|
||||||
"name_zh_no_season": "剧场版-摇曳露营",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"fansub": "MCE汉化组",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_implicit_lang_title_sep() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "NieR Automata Ver1.1a",
|
|
||||||
"name_en_no_season": "NieR Automata Ver1.1a",
|
|
||||||
"name_zh": "尼尔:机械纪元",
|
|
||||||
"name_zh_no_season": "尼尔:机械纪元",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 2,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"fansub": "织梦字幕组",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_square_brackets_wrapped_and_space_split() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[天月搬运组][迷宫饭 Delicious in Dungeon][03][日语中字][MKV][1080P][NETFLIX][高画质版]"#,
|
|
||||||
r#"
|
|
||||||
{
|
|
||||||
"name_en": "Delicious in Dungeon",
|
|
||||||
"name_en_no_season": "Delicious in Dungeon",
|
|
||||||
"name_zh": "迷宫饭",
|
|
||||||
"name_zh_no_season": "迷宫饭",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 3,
|
|
||||||
"subtitle": "日语中字",
|
|
||||||
"source": "NETFLIX",
|
|
||||||
"fansub": "天月搬运组",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_start_with_brackets_wrapped_season_info_prefix() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Dungeon Meshi",
|
|
||||||
"name_en_no_season": "Dungeon Meshi",
|
|
||||||
"name_zh": "迷宫饭",
|
|
||||||
"name_zh_no_season": "迷宫饭",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"fansub": "爱恋字幕社",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_small_no_title_extra_brackets_case() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Mahou Shoujo ni Akogarete",
|
|
||||||
"name_en_no_season": "Mahou Shoujo ni Akogarete",
|
|
||||||
"name_zh": "梦想成为魔法少女 [年龄限制版]",
|
|
||||||
"name_zh_no_season": "梦想成为魔法少女 [年龄限制版]",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 9,
|
|
||||||
"subtitle": "CHT",
|
|
||||||
"source": "Baha",
|
|
||||||
"fansub": "ANi",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_title_leading_space_style() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[ANi] 16bit 的感动 ANOTHER LAYER - 01 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
|
||||||
r#"{
|
|
||||||
"name_zh": "16bit 的感动 ANOTHER LAYER",
|
|
||||||
"name_zh_no_season": "16bit 的感动 ANOTHER LAYER",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "CHT",
|
|
||||||
"source": "Baha",
|
|
||||||
"fansub": "ANi",
|
|
||||||
"resolution": "1080P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_title_leading_month_and_wrapped_brackets_style() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【喵萌奶茶屋】★07月新番★[银砂糖师与黑妖精 ~ Sugar Apple Fairy Tale ~][13][1080p][简日双语][招募翻译]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "~ Sugar Apple Fairy Tale ~",
|
|
||||||
"name_en_no_season": "~ Sugar Apple Fairy Tale ~",
|
|
||||||
"name_zh": "银砂糖师与黑妖精",
|
|
||||||
"name_zh_no_season": "银砂糖师与黑妖精",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 13,
|
|
||||||
"subtitle": "简日双语",
|
|
||||||
"fansub": "喵萌奶茶屋",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_title_leading_month_style() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【极影字幕社】★4月新番 天国大魔境 Tengoku Daimakyou 第05话 GB 720P MP4(字幕社招人内详)"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Tengoku Daimakyou",
|
|
||||||
"name_en_no_season": "Tengoku Daimakyou",
|
|
||||||
"name_zh": "天国大魔境",
|
|
||||||
"name_zh_no_season": "天国大魔境",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 5,
|
|
||||||
"subtitle": "字幕社招人内详",
|
|
||||||
"source": null,
|
|
||||||
"fansub": "极影字幕社",
|
|
||||||
"resolution": "720P"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_tokusatsu_style() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[MagicStar] 假面骑士Geats / 仮面ライダーギーツ EP33 [WEBDL] [1080p] [TTFC]【生】"#,
|
|
||||||
r#"{
|
|
||||||
"name_jp": "仮面ライダーギーツ",
|
|
||||||
"name_jp_no_season": "仮面ライダーギーツ",
|
|
||||||
"name_zh": "假面骑士Geats",
|
|
||||||
"name_zh_no_season": "假面骑士Geats",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 33,
|
|
||||||
"source": "WEBDL",
|
|
||||||
"fansub": "MagicStar",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ep_with_multi_lang_zh_title() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[百冬练习组&LoliHouse] BanG Dream! 少女乐团派对!☆PICO FEVER! / Garupa Pico: Fever! - 26 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕][END] [101.69 MB]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Garupa Pico: Fever!",
|
|
||||||
"name_en_no_season": "Garupa Pico: Fever!",
|
|
||||||
"name_zh": "BanG Dream! 少女乐团派对!☆PICO FEVER!",
|
|
||||||
"name_zh_no_season": "BanG Dream! 少女乐团派对!☆PICO FEVER!",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 26,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "百冬练习组&LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ep_collections() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[奶²&LoliHouse] 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简日内封字幕]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Kinokoinu: Mushroom Pup",
|
|
||||||
"name_en_no_season": "Kinokoinu: Mushroom Pup",
|
|
||||||
"name_zh": "蘑菇狗",
|
|
||||||
"name_zh_no_season": "蘑菇狗",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简日内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "奶²&LoliHouse",
|
|
||||||
"resolution": "1080p",
|
|
||||||
"name": " 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集]"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[LoliHouse] 叹气的亡灵想隐退 / Nageki no Bourei wa Intai shitai [01-13 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Nageki no Bourei wa Intai shitai",
|
|
||||||
"name_en_no_season": "Nageki no Bourei wa Intai shitai",
|
|
||||||
"name_jp": null,
|
|
||||||
"name_jp_no_season": null,
|
|
||||||
"name_zh": "叹气的亡灵想隐退",
|
|
||||||
"name_zh_no_season": "叹气的亡灵想隐退",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[LoliHouse] 精灵幻想记 第二季 / Seirei Gensouki S2 [01-12 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "Seirei Gensouki S2",
|
|
||||||
"name_en_no_season": "Seirei Gensouki",
|
|
||||||
"name_zh": "精灵幻想记 第二季",
|
|
||||||
"name_zh_no_season": "精灵幻想记",
|
|
||||||
"season": 2,
|
|
||||||
"season_raw": "第二季",
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[喵萌奶茶屋&LoliHouse] 超自然武装当哒当 / 胆大党 / Dandadan [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简繁日内封字幕][Fin]"#,
|
|
||||||
r#" {
|
|
||||||
"name_en": "Dandadan",
|
|
||||||
"name_en_no_season": "Dandadan",
|
|
||||||
"name_zh": "超自然武装当哒当",
|
|
||||||
"name_zh_no_season": "超自然武装当哒当",
|
|
||||||
"season": 1,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁日内封字幕",
|
|
||||||
"source": "WebRip",
|
|
||||||
"fansub": "喵萌奶茶屋&LoliHouse",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: FIXME
|
|
||||||
#[test]
|
|
||||||
fn test_bad_cases() {
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
|
|
||||||
r#"{
|
|
||||||
"name_zh": "摇曳露营△剧场版",
|
|
||||||
"name_zh_no_season": "摇曳露营△剧场版",
|
|
||||||
"season": 1,
|
|
||||||
"season_raw": null,
|
|
||||||
"episode_index": 1,
|
|
||||||
"subtitle": "简繁字幕",
|
|
||||||
"source": "BDrip",
|
|
||||||
"fansub": "7³ACG x 桜都字幕组",
|
|
||||||
"resolution": "1080p"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_raw_ep_parser_case(
|
|
||||||
r#"【幻樱字幕组】【4月新番】【古见同学有交流障碍症 第二季 Komi-san wa, Komyushou Desu. S02】【22】【GB_MP4】【1920X1080】"#,
|
|
||||||
r#"{
|
|
||||||
"name_en": "第二季 Komi-san wa, Komyushou Desu. S02",
|
|
||||||
"name_en_no_season": "Komi-san wa, Komyushou Desu.",
|
|
||||||
"name_zh": "古见同学有交流障碍症",
|
|
||||||
"name_zh_no_season": "古见同学有交流障碍症",
|
|
||||||
"season": 2,
|
|
||||||
"season_raw": "第二季",
|
|
||||||
"episode_index": 22,
|
|
||||||
"subtitle": "GB",
|
|
||||||
"fansub": "幻樱字幕组",
|
|
||||||
"resolution": "1920X1080"
|
|
||||||
}"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
14
apps/recorder/src/graphql/domains/bangumi.rs
Normal file
14
apps/recorder/src/graphql/domains/bangumi.rs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::bangumi};
|
||||||
|
|
||||||
|
pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<bangumi::BangumiType>();
|
||||||
|
seaography::register_entity!(builder, bangumi);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
168
apps/recorder/src/graphql/domains/credential_3rd.rs
Normal file
168
apps/recorder/src/graphql/domains/credential_3rd.rs
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{
|
||||||
|
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef,
|
||||||
|
};
|
||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use util_derive::DynamicGraphql;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
auth::AuthUserInfo,
|
||||||
|
errors::RecorderError,
|
||||||
|
graphql::{
|
||||||
|
domains::subscribers::restrict_subscriber_for_entity,
|
||||||
|
infra::crypto::{
|
||||||
|
register_crypto_column_input_conversion_to_schema_context,
|
||||||
|
register_crypto_column_output_conversion_to_schema_context,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
models::credential_3rd,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
|
||||||
|
struct Credential3rdCheckAvailableInput {
|
||||||
|
pub id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Credential3rdCheckAvailableInput {
|
||||||
|
fn input_type_name() -> &'static str {
|
||||||
|
"Credential3rdCheckAvailableInput"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn arg_name() -> &'static str {
|
||||||
|
"filter"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_input_object() -> InputObject {
|
||||||
|
InputObject::new(Self::input_type_name())
|
||||||
|
.description("The input of the credential3rdCheckAvailable query")
|
||||||
|
.field(InputValue::new(
|
||||||
|
Credential3rdCheckAvailableInputFieldEnum::Id.as_str(),
|
||||||
|
TypeRef::named_nn(TypeRef::INT),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
|
||||||
|
pub struct Credential3rdCheckAvailableInfo {
|
||||||
|
pub available: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Credential3rdCheckAvailableInfo {
|
||||||
|
fn object_type_name() -> &'static str {
|
||||||
|
"Credential3rdCheckAvailableInfo"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_output_object() -> Object {
|
||||||
|
Object::new(Self::object_type_name())
|
||||||
|
.description("The output of the credential3rdCheckAvailable query")
|
||||||
|
.field(Field::new(
|
||||||
|
Credential3rdCheckAvailableInfoFieldEnum::Available,
|
||||||
|
TypeRef::named_nn(TypeRef::BOOLEAN),
|
||||||
|
move |ctx| {
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let subscription_info = ctx.parent_value.try_downcast_ref::<Self>()?;
|
||||||
|
Ok(Some(async_graphql::Value::from(
|
||||||
|
subscription_info.available,
|
||||||
|
)))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_credential3rd_to_schema_context(
|
||||||
|
context: &mut BuilderContext,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
) {
|
||||||
|
restrict_subscriber_for_entity::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
&credential_3rd::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Cookies,
|
||||||
|
);
|
||||||
|
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Username,
|
||||||
|
);
|
||||||
|
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Password,
|
||||||
|
);
|
||||||
|
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Cookies,
|
||||||
|
);
|
||||||
|
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Username,
|
||||||
|
);
|
||||||
|
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx,
|
||||||
|
&credential_3rd::Column::Password,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_credential3rd_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<credential_3rd::Credential3rdType>();
|
||||||
|
seaography::register_entity!(builder, credential_3rd);
|
||||||
|
|
||||||
|
builder.schema = builder
|
||||||
|
.schema
|
||||||
|
.register(Credential3rdCheckAvailableInput::generate_input_object());
|
||||||
|
builder.schema = builder
|
||||||
|
.schema
|
||||||
|
.register(Credential3rdCheckAvailableInfo::generate_output_object());
|
||||||
|
|
||||||
|
builder.queries.push(
|
||||||
|
Field::new(
|
||||||
|
"credential3rdCheckAvailable",
|
||||||
|
TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()),
|
||||||
|
move |ctx| {
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let auth_user_info = ctx.data::<AuthUserInfo>()?;
|
||||||
|
let input: Credential3rdCheckAvailableInput = ctx
|
||||||
|
.args
|
||||||
|
.get(Credential3rdCheckAvailableInput::arg_name())
|
||||||
|
.unwrap()
|
||||||
|
.deserialize()?;
|
||||||
|
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
||||||
|
|
||||||
|
let credential_model = credential_3rd::Model::find_by_id_and_subscriber_id(
|
||||||
|
app_ctx.as_ref(),
|
||||||
|
input.id,
|
||||||
|
auth_user_info.subscriber_auth.subscriber_id,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::Credential3rdError {
|
||||||
|
message: format!("credential = {} not found", input.id),
|
||||||
|
source: None.into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let available = credential_model.check_available(app_ctx.as_ref()).await?;
|
||||||
|
Ok(Some(FieldValue::owned_any(
|
||||||
|
Credential3rdCheckAvailableInfo { available },
|
||||||
|
)))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.argument(InputValue::new(
|
||||||
|
Credential3rdCheckAvailableInput::arg_name(),
|
||||||
|
TypeRef::named_nn(Credential3rdCheckAvailableInput::input_type_name()),
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
17
apps/recorder/src/graphql/domains/downloaders.rs
Normal file
17
apps/recorder/src/graphql/domains/downloaders.rs
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloaders};
|
||||||
|
|
||||||
|
pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<downloaders::Entity>(
|
||||||
|
context,
|
||||||
|
&downloaders::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<downloaders::DownloaderCategory>();
|
||||||
|
seaography::register_entity!(builder, downloaders);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
15
apps/recorder/src/graphql/domains/downloads.rs
Normal file
15
apps/recorder/src/graphql/domains/downloads.rs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloads};
|
||||||
|
|
||||||
|
pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<downloads::DownloadStatus>();
|
||||||
|
builder.register_enumeration::<downloads::DownloadMime>();
|
||||||
|
seaography::register_entity!(builder, downloads);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
14
apps/recorder/src/graphql/domains/episodes.rs
Normal file
14
apps/recorder/src/graphql/domains/episodes.rs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::episodes};
|
||||||
|
|
||||||
|
pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<episodes::EpisodeType>();
|
||||||
|
seaography::register_entity!(builder, episodes);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
56
apps/recorder/src/graphql/domains/feeds.rs
Normal file
56
apps/recorder/src/graphql/domains/feeds.rs
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::ResolverContext;
|
||||||
|
use sea_orm::Value as SeaValue;
|
||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
graphql::{
|
||||||
|
domains::subscribers::restrict_subscriber_for_entity,
|
||||||
|
infra::util::{get_entity_column_key, get_entity_key},
|
||||||
|
},
|
||||||
|
models::feeds,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId);
|
||||||
|
{
|
||||||
|
let entity_column_key =
|
||||||
|
get_entity_column_key::<feeds::Entity>(context, &feeds::Column::Token);
|
||||||
|
let entity_key = get_entity_key::<feeds::Entity>(context);
|
||||||
|
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
||||||
|
let entity_create_one_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name, context.entity_create_one_mutation.mutation_suffix
|
||||||
|
));
|
||||||
|
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name,
|
||||||
|
context.entity_create_batch_mutation.mutation_suffix.clone()
|
||||||
|
));
|
||||||
|
|
||||||
|
context.types.input_none_conversions.insert(
|
||||||
|
entity_column_key,
|
||||||
|
Box::new(
|
||||||
|
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
|
||||||
|
let field_name = context.field().name();
|
||||||
|
if field_name == entity_create_one_mutation_field_name.as_str()
|
||||||
|
|| field_name == entity_create_batch_mutation_field_name.as_str()
|
||||||
|
{
|
||||||
|
Ok(Some(SeaValue::String(Some(Box::new(nanoid::nanoid!())))))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<feeds::FeedType>();
|
||||||
|
builder.register_enumeration::<feeds::FeedSource>();
|
||||||
|
seaography::register_entity!(builder, feeds);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
12
apps/recorder/src/graphql/domains/mod.rs
Normal file
12
apps/recorder/src/graphql/domains/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
pub mod credential_3rd;
|
||||||
|
|
||||||
|
pub mod bangumi;
|
||||||
|
pub mod downloaders;
|
||||||
|
pub mod downloads;
|
||||||
|
pub mod episodes;
|
||||||
|
pub mod feeds;
|
||||||
|
pub mod subscriber_tasks;
|
||||||
|
pub mod subscribers;
|
||||||
|
pub mod subscription_bangumi;
|
||||||
|
pub mod subscription_episode;
|
||||||
|
pub mod subscriptions;
|
||||||
151
apps/recorder/src/graphql/domains/subscriber_tasks.rs
Normal file
151
apps/recorder/src/graphql/domains/subscriber_tasks.rs
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
use std::{ops::Deref, sync::Arc};
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{FieldValue, TypeRef};
|
||||||
|
use sea_orm::{
|
||||||
|
ColumnTrait, ConnectionTrait, EntityTrait, QueryFilter, QuerySelect, QueryTrait, prelude::Expr,
|
||||||
|
sea_query::Query,
|
||||||
|
};
|
||||||
|
use seaography::{
|
||||||
|
Builder as SeaographyBuilder, BuilderContext, EntityDeleteMutationBuilder, EntityObjectBuilder,
|
||||||
|
EntityQueryFieldBuilder, get_filter_conditions,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
errors::RecorderError,
|
||||||
|
graphql::{
|
||||||
|
domains::subscribers::restrict_subscriber_for_entity,
|
||||||
|
infra::{
|
||||||
|
custom::generate_entity_filter_mutation_field,
|
||||||
|
json::{convert_jsonb_output_case_for_entity, restrict_jsonb_filter_input_for_entity},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
models::subscriber_tasks,
|
||||||
|
task::{ApalisJobs, ApalisSchema},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_subscriber_tasks_entity_mutations(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
let context = builder.context;
|
||||||
|
{
|
||||||
|
let entitity_delete_mutation_builder = EntityDeleteMutationBuilder { context };
|
||||||
|
let delete_mutation = generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>(
|
||||||
|
context,
|
||||||
|
entitity_delete_mutation_builder.type_name::<subscriber_tasks::Entity>(),
|
||||||
|
TypeRef::named_nn(TypeRef::INT),
|
||||||
|
Arc::new(|resolver_ctx, app_ctx, filters| {
|
||||||
|
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>(
|
||||||
|
resolver_ctx,
|
||||||
|
context,
|
||||||
|
filters,
|
||||||
|
);
|
||||||
|
Box::pin(async move {
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
let select_subquery = subscriber_tasks::Entity::find()
|
||||||
|
.select_only()
|
||||||
|
.column(subscriber_tasks::Column::Id)
|
||||||
|
.filter(filters_condition);
|
||||||
|
|
||||||
|
let delete_query = Query::delete()
|
||||||
|
.from_table((ApalisSchema::Schema, ApalisJobs::Table))
|
||||||
|
.and_where(
|
||||||
|
Expr::col(ApalisJobs::Id).in_subquery(select_subquery.into_query()),
|
||||||
|
)
|
||||||
|
.to_owned();
|
||||||
|
|
||||||
|
let db_backend = db.deref().get_database_backend();
|
||||||
|
let delete_statement = db_backend.build(&delete_query);
|
||||||
|
|
||||||
|
let result = db.execute(delete_statement).await?;
|
||||||
|
|
||||||
|
Ok::<_, RecorderError>(Some(FieldValue::value(result.rows_affected() as i32)))
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
builder.mutations.push(delete_mutation);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let entity_object_builder = EntityObjectBuilder { context };
|
||||||
|
let entity_query_field = EntityQueryFieldBuilder { context };
|
||||||
|
let entity_retry_one_mutation_name = format!(
|
||||||
|
"{}RetryOne",
|
||||||
|
entity_query_field.type_name::<subscriber_tasks::Entity>()
|
||||||
|
);
|
||||||
|
let retry_one_mutation =
|
||||||
|
generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>(
|
||||||
|
context,
|
||||||
|
entity_retry_one_mutation_name,
|
||||||
|
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
|
||||||
|
Arc::new(|resolver_ctx, app_ctx, filters| {
|
||||||
|
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>(
|
||||||
|
resolver_ctx,
|
||||||
|
context,
|
||||||
|
filters,
|
||||||
|
);
|
||||||
|
Box::pin(async move {
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
let job_id = subscriber_tasks::Entity::find()
|
||||||
|
.filter(filters_condition)
|
||||||
|
.select_only()
|
||||||
|
.column(subscriber_tasks::Column::Id)
|
||||||
|
.into_tuple::<String>()
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "SubscriberTask".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let task = app_ctx.task();
|
||||||
|
task.retry_subscriber_task(job_id.clone()).await?;
|
||||||
|
|
||||||
|
let task_model = subscriber_tasks::Entity::find()
|
||||||
|
.filter(subscriber_tasks::Column::Id.eq(&job_id))
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "SubscriberTask".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
builder.mutations.push(retry_one_mutation);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
|
||||||
|
context,
|
||||||
|
&subscriber_tasks::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>(
|
||||||
|
context,
|
||||||
|
&subscriber_tasks::Column::Job,
|
||||||
|
);
|
||||||
|
convert_jsonb_output_case_for_entity::<subscriber_tasks::Entity>(
|
||||||
|
context,
|
||||||
|
&subscriber_tasks::Column::Job,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscriber_tasks_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
builder.register_entity::<subscriber_tasks::Entity>(
|
||||||
|
<subscriber_tasks::RelatedEntity as sea_orm::Iterable>::iter()
|
||||||
|
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
builder = builder.register_entity_dataloader_one_to_one(subscriber_tasks::Entity, tokio::spawn);
|
||||||
|
builder =
|
||||||
|
builder.register_entity_dataloader_one_to_many(subscriber_tasks::Entity, tokio::spawn);
|
||||||
|
builder = register_subscriber_tasks_entity_mutations(builder);
|
||||||
|
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>();
|
||||||
|
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
|
||||||
|
builder
|
||||||
|
}
|
||||||
@@ -1,14 +1,29 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_graphql::dynamic::{ResolverContext, ValueAccessor};
|
use async_graphql::dynamic::{ObjectAccessor, ResolverContext, TypeRef, ValueAccessor};
|
||||||
use sea_orm::EntityTrait;
|
use lazy_static::lazy_static;
|
||||||
use seaography::{BuilderContext, FnGuard, GuardAction};
|
use maplit::btreeset;
|
||||||
|
use sea_orm::{ColumnTrait, Condition, EntityTrait, Iterable, Value as SeaValue};
|
||||||
|
use seaography::{
|
||||||
|
Builder as SeaographyBuilder, BuilderContext, FilterInfo,
|
||||||
|
FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper,
|
||||||
|
FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult, SeaographyError,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::{AuthError, AuthUserInfo},
|
auth::{AuthError, AuthUserInfo},
|
||||||
graphql::infra::util::{get_column_key, get_entity_key},
|
graphql::infra::util::{get_column_key, get_entity_column_key, get_entity_key},
|
||||||
|
models::subscribers,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref SUBSCRIBER_ID_FILTER_INFO: FilterInfo = FilterInfo {
|
||||||
|
type_name: String::from("SubscriberIdFilterInput"),
|
||||||
|
base_type: TypeRef::INT.into(),
|
||||||
|
supported_operations: btreeset! { SeaographqlFilterOperation::Equals },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
fn guard_data_object_accessor_with_subscriber_id(
|
fn guard_data_object_accessor_with_subscriber_id(
|
||||||
value: ValueAccessor<'_>,
|
value: ValueAccessor<'_>,
|
||||||
column_name: &str,
|
column_name: &str,
|
||||||
@@ -181,3 +196,158 @@ where
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn generate_subscriber_id_filter_condition<T>(
|
||||||
|
_context: &BuilderContext,
|
||||||
|
column: &T::Column,
|
||||||
|
) -> FnFilterCondition
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let column = *column;
|
||||||
|
Box::new(
|
||||||
|
move |context: &ResolverContext,
|
||||||
|
mut condition: Condition,
|
||||||
|
filter: Option<&ObjectAccessor<'_>>|
|
||||||
|
-> SeaResult<Condition> {
|
||||||
|
match context.ctx.data::<AuthUserInfo>() {
|
||||||
|
Ok(user_info) => {
|
||||||
|
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
||||||
|
|
||||||
|
if let Some(filter) = filter {
|
||||||
|
for operation in &SUBSCRIBER_ID_FILTER_INFO.supported_operations {
|
||||||
|
match operation {
|
||||||
|
SeaographqlFilterOperation::Equals => {
|
||||||
|
if let Some(value) = filter.get("eq") {
|
||||||
|
let value: i32 = value.i64()?.try_into()?;
|
||||||
|
if value != subscriber_id {
|
||||||
|
return Err(SeaographyError::AsyncGraphQLError(
|
||||||
|
async_graphql::Error::new(
|
||||||
|
"subscriber_id and auth_info does not match",
|
||||||
|
),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => unreachable!("unreachable filter operation for subscriber_id"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
condition = condition.add(column.eq(subscriber_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(condition)
|
||||||
|
}
|
||||||
|
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_default_subscriber_id_input_conversion<T>(
|
||||||
|
context: &BuilderContext,
|
||||||
|
_column: &T::Column,
|
||||||
|
) -> FnInputTypeNoneConversion
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
||||||
|
let entity_create_one_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name, context.entity_create_one_mutation.mutation_suffix
|
||||||
|
));
|
||||||
|
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name,
|
||||||
|
context.entity_create_batch_mutation.mutation_suffix.clone()
|
||||||
|
));
|
||||||
|
Box::new(
|
||||||
|
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
|
||||||
|
let field_name = context.field().name();
|
||||||
|
if field_name == entity_create_one_mutation_field_name.as_str()
|
||||||
|
|| field_name == entity_create_batch_mutation_field_name.as_str()
|
||||||
|
{
|
||||||
|
match context.ctx.data::<AuthUserInfo>() {
|
||||||
|
Ok(user_info) => {
|
||||||
|
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
||||||
|
Ok(Some(SeaValue::Int(Some(subscriber_id))))
|
||||||
|
}
|
||||||
|
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn restrict_subscriber_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let entity_column_key = get_entity_column_key::<T>(context, column);
|
||||||
|
|
||||||
|
context.guards.entity_guards.insert(
|
||||||
|
entity_key.clone(),
|
||||||
|
guard_entity_with_subscriber_id::<T>(context, column),
|
||||||
|
);
|
||||||
|
context.guards.field_guards.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
guard_field_with_subscriber_id::<T>(context, column),
|
||||||
|
);
|
||||||
|
context.filter_types.overwrites.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
Some(FilterType::Custom(
|
||||||
|
SUBSCRIBER_ID_FILTER_INFO.type_name.clone(),
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
context.filter_types.condition_functions.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
generate_subscriber_id_filter_condition::<T>(context, column),
|
||||||
|
);
|
||||||
|
context.types.input_none_conversions.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
generate_default_subscriber_id_input_conversion::<T>(context, column),
|
||||||
|
);
|
||||||
|
|
||||||
|
context.entity_input.update_skips.push(entity_column_key);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<subscribers::Entity>(context, &subscribers::Column::Id);
|
||||||
|
for column in subscribers::Column::iter() {
|
||||||
|
if !matches!(column, subscribers::Column::Id) {
|
||||||
|
let key = get_entity_column_key::<subscribers::Entity>(context, &column);
|
||||||
|
context.filter_types.overwrites.insert(key, None);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
{
|
||||||
|
let filter_types_map_helper = FilterTypesMapHelper {
|
||||||
|
context: builder.context,
|
||||||
|
};
|
||||||
|
|
||||||
|
builder.schema = builder
|
||||||
|
.schema
|
||||||
|
.register(filter_types_map_helper.generate_filter_input(&SUBSCRIBER_ID_FILTER_INFO));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
builder.register_entity::<subscribers::Entity>(
|
||||||
|
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
|
||||||
|
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
|
||||||
|
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
20
apps/recorder/src/graphql/domains/subscription_bangumi.rs
Normal file
20
apps/recorder/src/graphql/domains/subscription_bangumi.rs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_bangumi,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
|
||||||
|
context,
|
||||||
|
&subscription_bangumi::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscription_bangumi_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
seaography::register_entity!(builder, subscription_bangumi);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
20
apps/recorder/src/graphql/domains/subscription_episode.rs
Normal file
20
apps/recorder/src/graphql/domains/subscription_episode.rs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_episode,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<subscription_episode::Entity>(
|
||||||
|
context,
|
||||||
|
&subscription_episode::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscription_episode_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
seaography::register_entity!(builder, subscription_episode);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
214
apps/recorder/src/graphql/domains/subscriptions.rs
Normal file
214
apps/recorder/src/graphql/domains/subscriptions.rs
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{FieldValue, TypeRef};
|
||||||
|
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
|
||||||
|
use seaography::{
|
||||||
|
Builder as SeaographyBuilder, BuilderContext, EntityObjectBuilder, EntityQueryFieldBuilder,
|
||||||
|
get_filter_conditions,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
errors::RecorderError,
|
||||||
|
graphql::{
|
||||||
|
domains::subscribers::restrict_subscriber_for_entity,
|
||||||
|
infra::custom::generate_entity_filter_mutation_field,
|
||||||
|
},
|
||||||
|
models::{
|
||||||
|
subscriber_tasks,
|
||||||
|
subscriptions::{self, SubscriptionTrait},
|
||||||
|
},
|
||||||
|
task::SubscriberTask,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_subscriber_for_entity::<subscriptions::Entity>(
|
||||||
|
context,
|
||||||
|
&subscriptions::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscriptions_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
|
||||||
|
seaography::register_entity!(builder, subscriptions);
|
||||||
|
|
||||||
|
let context = builder.context;
|
||||||
|
|
||||||
|
let entity_object_builder = EntityObjectBuilder { context };
|
||||||
|
let entity_query_field = EntityQueryFieldBuilder { context };
|
||||||
|
|
||||||
|
{
|
||||||
|
let sync_one_feeds_incremental_mutation_name = format!(
|
||||||
|
"{}SyncOneFeedsIncremental",
|
||||||
|
entity_query_field.type_name::<subscriptions::Entity>()
|
||||||
|
);
|
||||||
|
|
||||||
|
let sync_one_feeds_incremental_mutation = generate_entity_filter_mutation_field::<
|
||||||
|
subscriptions::Entity,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
>(
|
||||||
|
builder.context,
|
||||||
|
sync_one_feeds_incremental_mutation_name,
|
||||||
|
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
|
||||||
|
Arc::new(|resolver_ctx, app_ctx, filters| {
|
||||||
|
let filters_condition =
|
||||||
|
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
|
||||||
|
|
||||||
|
Box::pin(async move {
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
let subscription_model = subscriptions::Entity::find()
|
||||||
|
.filter(filters_condition)
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "Subscription".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let subscription =
|
||||||
|
subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let task_service = app_ctx.task();
|
||||||
|
|
||||||
|
let task_id = task_service
|
||||||
|
.add_subscriber_task(
|
||||||
|
subscription_model.subscriber_id,
|
||||||
|
SubscriberTask::SyncOneSubscriptionFeedsIncremental(
|
||||||
|
subscription.into(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let task_model = subscriber_tasks::Entity::find()
|
||||||
|
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "SubscriberTask".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Some(FieldValue::owned_any(task_model)))
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutations.push(sync_one_feeds_incremental_mutation);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let sync_one_feeds_full_mutation_name = format!(
|
||||||
|
"{}SyncOneFeedsFull",
|
||||||
|
entity_query_field.type_name::<subscriptions::Entity>()
|
||||||
|
);
|
||||||
|
|
||||||
|
let sync_one_feeds_full_mutation = generate_entity_filter_mutation_field::<
|
||||||
|
subscriptions::Entity,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
>(
|
||||||
|
builder.context,
|
||||||
|
sync_one_feeds_full_mutation_name,
|
||||||
|
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
|
||||||
|
Arc::new(|resolver_ctx, app_ctx, filters| {
|
||||||
|
let filters_condition =
|
||||||
|
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
|
||||||
|
|
||||||
|
Box::pin(async move {
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
let subscription_model = subscriptions::Entity::find()
|
||||||
|
.filter(filters_condition)
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "Subscription".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let subscription =
|
||||||
|
subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let task_service = app_ctx.task();
|
||||||
|
|
||||||
|
let task_id = task_service
|
||||||
|
.add_subscriber_task(
|
||||||
|
subscription_model.subscriber_id,
|
||||||
|
SubscriberTask::SyncOneSubscriptionFeedsFull(subscription.into()),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let task_model = subscriber_tasks::Entity::find()
|
||||||
|
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "SubscriberTask".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Some(FieldValue::owned_any(task_model)))
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutations.push(sync_one_feeds_full_mutation);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let sync_one_sources_mutation_name = format!(
|
||||||
|
"{}SyncOneSources",
|
||||||
|
entity_query_field.type_name::<subscriptions::Entity>()
|
||||||
|
);
|
||||||
|
|
||||||
|
let sync_one_sources_mutation = generate_entity_filter_mutation_field::<
|
||||||
|
subscriptions::Entity,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
>(
|
||||||
|
builder.context,
|
||||||
|
sync_one_sources_mutation_name,
|
||||||
|
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
|
||||||
|
Arc::new(|resolver_ctx, app_ctx, filters| {
|
||||||
|
let filters_condition =
|
||||||
|
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
|
||||||
|
|
||||||
|
Box::pin(async move {
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
let subscription_model = subscriptions::Entity::find()
|
||||||
|
.filter(filters_condition)
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "Subscription".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let subscription =
|
||||||
|
subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let task_service = app_ctx.task();
|
||||||
|
|
||||||
|
let task_id = task_service
|
||||||
|
.add_subscriber_task(
|
||||||
|
subscription_model.subscriber_id,
|
||||||
|
SubscriberTask::SyncOneSubscriptionSources(subscription.into()),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let task_model = subscriber_tasks::Entity::find()
|
||||||
|
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "SubscriberTask".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Some(FieldValue::owned_any(task_model)))
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutations.push(sync_one_sources_mutation);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
69
apps/recorder/src/graphql/infra/crypto.rs
Normal file
69
apps/recorder/src/graphql/infra/crypto.rs
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{ResolverContext, ValueAccessor};
|
||||||
|
use sea_orm::{EntityTrait, Value as SeaValue};
|
||||||
|
use seaography::{BuilderContext, SeaResult};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
graphql::infra::util::{get_column_key, get_entity_key},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_crypto_column_input_conversion_to_schema_context<T>(
|
||||||
|
context: &mut BuilderContext,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
column: &T::Column,
|
||||||
|
) where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let column_name = get_column_key::<T>(context, column);
|
||||||
|
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
|
||||||
|
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
|
||||||
|
|
||||||
|
context.types.input_conversions.insert(
|
||||||
|
format!("{entity_name}.{column_name}"),
|
||||||
|
Box::new(
|
||||||
|
move |_resolve_context: &ResolverContext<'_>,
|
||||||
|
value: &ValueAccessor|
|
||||||
|
-> SeaResult<sea_orm::Value> {
|
||||||
|
let source = value.string()?;
|
||||||
|
let encrypted = ctx.crypto().encrypt_string(source.into())?;
|
||||||
|
Ok(encrypted.into())
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_crypto_column_output_conversion_to_schema_context<T>(
|
||||||
|
context: &mut BuilderContext,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
column: &T::Column,
|
||||||
|
) where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let column_name = get_column_key::<T>(context, column);
|
||||||
|
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
|
||||||
|
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
|
||||||
|
|
||||||
|
context.types.output_conversions.insert(
|
||||||
|
format!("{entity_name}.{column_name}"),
|
||||||
|
Box::new(
|
||||||
|
move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> {
|
||||||
|
if let SeaValue::String(s) = value {
|
||||||
|
if let Some(s) = s {
|
||||||
|
let decrypted = ctx.crypto().decrypt_string(s)?;
|
||||||
|
Ok(async_graphql::Value::String(decrypted))
|
||||||
|
} else {
|
||||||
|
Ok(async_graphql::Value::Null)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(async_graphql::Error::new("crypto column must be string column").into())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
76
apps/recorder/src/graphql/infra/custom.rs
Normal file
76
apps/recorder/src/graphql/infra/custom.rs
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
use std::{pin::Pin, sync::Arc};
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{
|
||||||
|
Field, FieldFuture, FieldValue, InputValue, ResolverContext, TypeRef, ValueAccessor,
|
||||||
|
};
|
||||||
|
use sea_orm::EntityTrait;
|
||||||
|
use seaography::{BuilderContext, EntityObjectBuilder, FilterInputBuilder, GuardAction};
|
||||||
|
|
||||||
|
use crate::{app::AppContextTrait, errors::RecorderResult};
|
||||||
|
|
||||||
|
pub type FilterMutationFn = Arc<
|
||||||
|
dyn for<'a> Fn(
|
||||||
|
&ResolverContext<'a>,
|
||||||
|
Arc<dyn AppContextTrait>,
|
||||||
|
Option<ValueAccessor<'_>>,
|
||||||
|
) -> Pin<
|
||||||
|
Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>,
|
||||||
|
> + Send
|
||||||
|
+ Sync,
|
||||||
|
>;
|
||||||
|
|
||||||
|
pub fn generate_entity_filter_mutation_field<T, N, R>(
|
||||||
|
builder_context: &'static BuilderContext,
|
||||||
|
field_name: N,
|
||||||
|
type_ref: R,
|
||||||
|
mutation_fn: FilterMutationFn,
|
||||||
|
) -> Field
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
N: Into<String>,
|
||||||
|
R: Into<TypeRef>,
|
||||||
|
{
|
||||||
|
let entity_filter_input_builder = FilterInputBuilder {
|
||||||
|
context: builder_context,
|
||||||
|
};
|
||||||
|
let entity_object_builder = EntityObjectBuilder {
|
||||||
|
context: builder_context,
|
||||||
|
};
|
||||||
|
let object_name: String = entity_object_builder.type_name::<T>();
|
||||||
|
|
||||||
|
let context = builder_context;
|
||||||
|
|
||||||
|
let guard = builder_context.guards.entity_guards.get(&object_name);
|
||||||
|
|
||||||
|
Field::new(field_name, type_ref, move |ctx| {
|
||||||
|
let mutation_fn = mutation_fn.clone();
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let guard_flag = if let Some(guard) = guard {
|
||||||
|
(*guard)(&ctx)
|
||||||
|
} else {
|
||||||
|
GuardAction::Allow
|
||||||
|
};
|
||||||
|
|
||||||
|
if let GuardAction::Block(reason) = guard_flag {
|
||||||
|
return Err::<Option<_>, async_graphql::Error>(async_graphql::Error::new(
|
||||||
|
reason.unwrap_or("Entity guard triggered.".into()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
||||||
|
|
||||||
|
let filters = ctx.args.get(&context.entity_delete_mutation.filter_field);
|
||||||
|
|
||||||
|
let result = mutation_fn(&ctx, app_ctx.clone(), filters)
|
||||||
|
.await
|
||||||
|
.map_err(async_graphql::Error::new_with_source)?;
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.argument(InputValue::new(
|
||||||
|
&context.entity_delete_mutation.filter_field,
|
||||||
|
TypeRef::named(entity_filter_input_builder.type_name(&object_name)),
|
||||||
|
))
|
||||||
|
}
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
mod json;
|
|
||||||
mod subscriber;
|
|
||||||
|
|
||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
use async_graphql::dynamic::TypeRef;
|
|
||||||
pub use json::{JSONB_FILTER_INFO, jsonb_filter_condition_function};
|
|
||||||
use maplit::btreeset;
|
|
||||||
use seaography::{FilterInfo, FilterOperation as SeaographqlFilterOperation};
|
|
||||||
pub use subscriber::{SUBSCRIBER_ID_FILTER_INFO, subscriber_id_condition_function};
|
|
||||||
|
|
||||||
pub fn init_custom_filter_info() {
|
|
||||||
SUBSCRIBER_ID_FILTER_INFO.get_or_init(|| FilterInfo {
|
|
||||||
type_name: String::from("SubscriberIdFilterInput"),
|
|
||||||
base_type: TypeRef::INT.into(),
|
|
||||||
supported_operations: btreeset! { SeaographqlFilterOperation::Equals },
|
|
||||||
});
|
|
||||||
JSONB_FILTER_INFO.get_or_init(|| FilterInfo {
|
|
||||||
type_name: String::from("JsonbFilterInput"),
|
|
||||||
base_type: TypeRef::Named(Cow::Borrowed("serde_json::Value")).to_string(),
|
|
||||||
supported_operations: btreeset! { SeaographqlFilterOperation::Equals },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
use async_graphql::dynamic::ObjectAccessor;
|
|
||||||
use once_cell::sync::OnceCell;
|
|
||||||
use sea_orm::{ColumnTrait, Condition, EntityTrait};
|
|
||||||
use seaography::{
|
|
||||||
BuilderContext, FilterInfo, FilterOperation as SeaographqlFilterOperation, SeaResult,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub static SUBSCRIBER_ID_FILTER_INFO: OnceCell<FilterInfo> = OnceCell::new();
|
|
||||||
|
|
||||||
pub type FnFilterCondition =
|
|
||||||
Box<dyn Fn(Condition, &ObjectAccessor) -> SeaResult<Condition> + Send + Sync>;
|
|
||||||
|
|
||||||
pub fn subscriber_id_condition_function<T>(
|
|
||||||
_context: &BuilderContext,
|
|
||||||
column: &T::Column,
|
|
||||||
) -> FnFilterCondition
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let column = *column;
|
|
||||||
Box::new(move |mut condition, filter| {
|
|
||||||
let subscriber_id_filter_info = SUBSCRIBER_ID_FILTER_INFO.get().unwrap();
|
|
||||||
let operations = &subscriber_id_filter_info.supported_operations;
|
|
||||||
for operation in operations {
|
|
||||||
match operation {
|
|
||||||
SeaographqlFilterOperation::Equals => {
|
|
||||||
if let Some(value) = filter.get("eq") {
|
|
||||||
let value: i32 = value.i64()?.try_into()?;
|
|
||||||
let value = sea_orm::Value::Int(Some(value));
|
|
||||||
condition = condition.add(column.eq(value));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => unreachable!("unreachable filter operation for subscriber_id"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(condition)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,4 @@
|
|||||||
pub mod filter;
|
pub mod crypto;
|
||||||
pub mod guard;
|
pub mod custom;
|
||||||
pub mod order;
|
pub mod json;
|
||||||
pub mod pagination;
|
|
||||||
pub mod transformer;
|
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
|||||||
@@ -1,36 +0,0 @@
|
|||||||
use async_graphql::{InputObject, SimpleObject};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, InputObject)]
|
|
||||||
pub struct CursorInput {
|
|
||||||
pub cursor: Option<String>,
|
|
||||||
pub limit: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, InputObject)]
|
|
||||||
pub struct PageInput {
|
|
||||||
pub page: u64,
|
|
||||||
pub limit: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, InputObject)]
|
|
||||||
pub struct OffsetInput {
|
|
||||||
pub offset: u64,
|
|
||||||
pub limit: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, InputObject)]
|
|
||||||
pub struct PaginationInput {
|
|
||||||
pub cursor: Option<CursorInput>,
|
|
||||||
pub page: Option<PageInput>,
|
|
||||||
pub offset: Option<OffsetInput>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type PageInfo = async_graphql::connection::PageInfo;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, SimpleObject)]
|
|
||||||
pub struct PaginationInfo {
|
|
||||||
pub pages: u64,
|
|
||||||
pub current: u64,
|
|
||||||
pub offset: u64,
|
|
||||||
pub total: u64,
|
|
||||||
}
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
use std::{collections::BTreeMap, sync::Arc};
|
|
||||||
|
|
||||||
use async_graphql::dynamic::ResolverContext;
|
|
||||||
use sea_orm::{ColumnTrait, Condition, EntityTrait, Value};
|
|
||||||
use seaography::{BuilderContext, FnFilterConditionsTransformer, FnMutationInputObjectTransformer};
|
|
||||||
|
|
||||||
use super::util::{get_column_key, get_entity_key};
|
|
||||||
use crate::auth::AuthUserInfo;
|
|
||||||
|
|
||||||
pub fn filter_condition_transformer<T>(
|
|
||||||
_context: &BuilderContext,
|
|
||||||
column: &T::Column,
|
|
||||||
) -> FnFilterConditionsTransformer
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let column = *column;
|
|
||||||
Box::new(
|
|
||||||
move |context: &ResolverContext, condition: Condition| -> Condition {
|
|
||||||
match context.ctx.data::<AuthUserInfo>() {
|
|
||||||
Ok(user_info) => {
|
|
||||||
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
|
||||||
condition.add(column.eq(subscriber_id))
|
|
||||||
}
|
|
||||||
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn mutation_input_object_transformer<T>(
|
|
||||||
context: &BuilderContext,
|
|
||||||
column: &T::Column,
|
|
||||||
) -> FnMutationInputObjectTransformer
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let entity_key = get_entity_key::<T>(context);
|
|
||||||
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
|
||||||
let column_key = get_column_key::<T>(context, column);
|
|
||||||
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
|
|
||||||
&entity_key,
|
|
||||||
&column_key,
|
|
||||||
));
|
|
||||||
let entity_create_one_mutation_field_name = Arc::new(format!(
|
|
||||||
"{}{}",
|
|
||||||
entity_name, context.entity_create_one_mutation.mutation_suffix
|
|
||||||
));
|
|
||||||
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
|
||||||
"{}{}",
|
|
||||||
entity_name,
|
|
||||||
context.entity_create_batch_mutation.mutation_suffix.clone()
|
|
||||||
));
|
|
||||||
Box::new(
|
|
||||||
move |context: &ResolverContext,
|
|
||||||
mut input: BTreeMap<String, Value>|
|
|
||||||
-> BTreeMap<String, Value> {
|
|
||||||
let field_name = context.field().name();
|
|
||||||
if field_name == entity_create_one_mutation_field_name.as_str()
|
|
||||||
|| field_name == entity_create_batch_mutation_field_name.as_str()
|
|
||||||
{
|
|
||||||
match context.ctx.data::<AuthUserInfo>() {
|
|
||||||
Ok(user_info) => {
|
|
||||||
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
|
||||||
let value = input.get_mut(column_name.as_str());
|
|
||||||
if value.is_none() {
|
|
||||||
input.insert(
|
|
||||||
column_name.as_str().to_string(),
|
|
||||||
Value::Int(Some(subscriber_id)),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
input
|
|
||||||
}
|
|
||||||
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
input
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
pub mod config;
|
pub mod config;
|
||||||
|
pub mod domains;
|
||||||
pub mod infra;
|
pub mod infra;
|
||||||
pub mod schema_root;
|
mod schema;
|
||||||
pub mod service;
|
pub mod service;
|
||||||
pub mod views;
|
|
||||||
|
|
||||||
pub use config::GraphQLConfig;
|
pub use config::GraphQLConfig;
|
||||||
pub use schema_root::schema;
|
pub use schema::build_schema;
|
||||||
pub use service::GraphQLService;
|
pub use service::GraphQLService;
|
||||||
|
|||||||
113
apps/recorder/src/graphql/schema.rs
Normal file
113
apps/recorder/src/graphql/schema.rs
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::*;
|
||||||
|
use once_cell::sync::OnceCell;
|
||||||
|
use seaography::{Builder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
graphql::{
|
||||||
|
domains::{
|
||||||
|
bangumi::{register_bangumi_to_schema_builder, register_bangumi_to_schema_context},
|
||||||
|
credential_3rd::{
|
||||||
|
register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context,
|
||||||
|
},
|
||||||
|
downloaders::{
|
||||||
|
register_downloaders_to_schema_builder, register_downloaders_to_schema_context,
|
||||||
|
},
|
||||||
|
downloads::{
|
||||||
|
register_downloads_to_schema_builder, register_downloads_to_schema_context,
|
||||||
|
},
|
||||||
|
episodes::{register_episodes_to_schema_builder, register_episodes_to_schema_context},
|
||||||
|
feeds::{register_feeds_to_schema_builder, register_feeds_to_schema_context},
|
||||||
|
subscriber_tasks::{
|
||||||
|
register_subscriber_tasks_to_schema_builder,
|
||||||
|
register_subscriber_tasks_to_schema_context,
|
||||||
|
},
|
||||||
|
subscribers::{
|
||||||
|
register_subscribers_to_schema_builder, register_subscribers_to_schema_context,
|
||||||
|
},
|
||||||
|
subscription_bangumi::{
|
||||||
|
register_subscription_bangumi_to_schema_builder,
|
||||||
|
register_subscription_bangumi_to_schema_context,
|
||||||
|
},
|
||||||
|
subscription_episode::{
|
||||||
|
register_subscription_episode_to_schema_builder,
|
||||||
|
register_subscription_episode_to_schema_context,
|
||||||
|
},
|
||||||
|
subscriptions::{
|
||||||
|
register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
infra::json::register_jsonb_input_filter_to_schema_builder,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub static CONTEXT: OnceCell<BuilderContext> = OnceCell::new();
|
||||||
|
|
||||||
|
pub fn build_schema(
|
||||||
|
app_ctx: Arc<dyn AppContextTrait>,
|
||||||
|
depth: Option<usize>,
|
||||||
|
complexity: Option<usize>,
|
||||||
|
) -> Result<Schema, SchemaError> {
|
||||||
|
let database = app_ctx.db().as_ref().clone();
|
||||||
|
|
||||||
|
let context = CONTEXT.get_or_init(|| {
|
||||||
|
let mut context = BuilderContext::default();
|
||||||
|
|
||||||
|
{
|
||||||
|
// domains
|
||||||
|
register_feeds_to_schema_context(&mut context);
|
||||||
|
register_subscribers_to_schema_context(&mut context);
|
||||||
|
register_subscriptions_to_schema_context(&mut context);
|
||||||
|
register_subscriber_tasks_to_schema_context(&mut context);
|
||||||
|
register_credential3rd_to_schema_context(&mut context, app_ctx.clone());
|
||||||
|
register_downloaders_to_schema_context(&mut context);
|
||||||
|
register_downloads_to_schema_context(&mut context);
|
||||||
|
register_episodes_to_schema_context(&mut context);
|
||||||
|
register_subscription_bangumi_to_schema_context(&mut context);
|
||||||
|
register_subscription_episode_to_schema_context(&mut context);
|
||||||
|
register_bangumi_to_schema_context(&mut context);
|
||||||
|
}
|
||||||
|
context
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut builder = Builder::new(context, database.clone());
|
||||||
|
|
||||||
|
{
|
||||||
|
// infra
|
||||||
|
builder = register_jsonb_input_filter_to_schema_builder(builder);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
// domains
|
||||||
|
builder = register_subscribers_to_schema_builder(builder);
|
||||||
|
builder = register_feeds_to_schema_builder(builder);
|
||||||
|
builder = register_episodes_to_schema_builder(builder);
|
||||||
|
builder = register_subscription_bangumi_to_schema_builder(builder);
|
||||||
|
builder = register_subscription_episode_to_schema_builder(builder);
|
||||||
|
builder = register_downloaders_to_schema_builder(builder);
|
||||||
|
builder = register_downloads_to_schema_builder(builder);
|
||||||
|
builder = register_subscriptions_to_schema_builder(builder);
|
||||||
|
builder = register_credential3rd_to_schema_builder(builder);
|
||||||
|
builder = register_subscriber_tasks_to_schema_builder(builder);
|
||||||
|
builder = register_bangumi_to_schema_builder(builder);
|
||||||
|
}
|
||||||
|
|
||||||
|
let schema = builder.schema_builder();
|
||||||
|
|
||||||
|
let schema = if let Some(depth) = depth {
|
||||||
|
schema.limit_depth(depth)
|
||||||
|
} else {
|
||||||
|
schema
|
||||||
|
};
|
||||||
|
let schema = if let Some(complexity) = complexity {
|
||||||
|
schema.limit_complexity(complexity)
|
||||||
|
} else {
|
||||||
|
schema
|
||||||
|
};
|
||||||
|
schema
|
||||||
|
.data(database)
|
||||||
|
.data(app_ctx)
|
||||||
|
.finish()
|
||||||
|
.inspect_err(|e| tracing::error!(e = ?e))
|
||||||
|
}
|
||||||
@@ -1,212 +0,0 @@
|
|||||||
use async_graphql::dynamic::*;
|
|
||||||
use once_cell::sync::OnceCell;
|
|
||||||
use sea_orm::{DatabaseConnection, EntityTrait, Iterable};
|
|
||||||
use seaography::{Builder, BuilderContext, FilterType, FilterTypesMapHelper};
|
|
||||||
|
|
||||||
use crate::graphql::infra::{
|
|
||||||
filter::{
|
|
||||||
JSONB_FILTER_INFO, SUBSCRIBER_ID_FILTER_INFO, init_custom_filter_info,
|
|
||||||
subscriber_id_condition_function,
|
|
||||||
},
|
|
||||||
guard::{guard_entity_with_subscriber_id, guard_field_with_subscriber_id},
|
|
||||||
transformer::{filter_condition_transformer, mutation_input_object_transformer},
|
|
||||||
util::{get_entity_column_key, get_entity_key},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub static CONTEXT: OnceCell<BuilderContext> = OnceCell::new();
|
|
||||||
|
|
||||||
fn restrict_filter_input_for_entity<T>(
|
|
||||||
context: &mut BuilderContext,
|
|
||||||
column: &T::Column,
|
|
||||||
filter_type: Option<FilterType>,
|
|
||||||
) where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let key = get_entity_column_key::<T>(context, column);
|
|
||||||
context.filter_types.overwrites.insert(key, filter_type);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn restrict_jsonb_filter_input_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let entity_column_key = get_entity_column_key::<T>(context, column);
|
|
||||||
context.filter_types.overwrites.insert(
|
|
||||||
entity_column_key.clone(),
|
|
||||||
Some(FilterType::Custom(
|
|
||||||
JSONB_FILTER_INFO.get().unwrap().type_name.clone(),
|
|
||||||
)),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn restrict_subscriber_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let entity_key = get_entity_key::<T>(context);
|
|
||||||
let entity_column_key = get_entity_column_key::<T>(context, column);
|
|
||||||
context.guards.entity_guards.insert(
|
|
||||||
entity_key.clone(),
|
|
||||||
guard_entity_with_subscriber_id::<T>(context, column),
|
|
||||||
);
|
|
||||||
context.guards.field_guards.insert(
|
|
||||||
entity_column_key.clone(),
|
|
||||||
guard_field_with_subscriber_id::<T>(context, column),
|
|
||||||
);
|
|
||||||
context.filter_types.overwrites.insert(
|
|
||||||
entity_column_key.clone(),
|
|
||||||
Some(FilterType::Custom(
|
|
||||||
SUBSCRIBER_ID_FILTER_INFO.get().unwrap().type_name.clone(),
|
|
||||||
)),
|
|
||||||
);
|
|
||||||
context.filter_types.condition_functions.insert(
|
|
||||||
entity_column_key.clone(),
|
|
||||||
subscriber_id_condition_function::<T>(context, column),
|
|
||||||
);
|
|
||||||
context.transformers.filter_conditions_transformers.insert(
|
|
||||||
entity_key.clone(),
|
|
||||||
filter_condition_transformer::<T>(context, column),
|
|
||||||
);
|
|
||||||
context
|
|
||||||
.transformers
|
|
||||||
.mutation_input_object_transformers
|
|
||||||
.insert(
|
|
||||||
entity_key,
|
|
||||||
mutation_input_object_transformer::<T>(context, column),
|
|
||||||
);
|
|
||||||
context
|
|
||||||
.entity_input
|
|
||||||
.insert_skips
|
|
||||||
.push(entity_column_key.clone());
|
|
||||||
context.entity_input.update_skips.push(entity_column_key);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn schema(
|
|
||||||
database: DatabaseConnection,
|
|
||||||
depth: Option<usize>,
|
|
||||||
complexity: Option<usize>,
|
|
||||||
) -> Result<Schema, SchemaError> {
|
|
||||||
use crate::models::*;
|
|
||||||
init_custom_filter_info();
|
|
||||||
let context = CONTEXT.get_or_init(|| {
|
|
||||||
let mut context = BuilderContext::default();
|
|
||||||
|
|
||||||
context.pagination_input.type_name = "SeaographyPaginationInput".to_string();
|
|
||||||
context.pagination_info_object.type_name = "SeaographyPaginationInfo".to_string();
|
|
||||||
context.cursor_input.type_name = "SeaographyCursorInput".to_string();
|
|
||||||
context.offset_input.type_name = "SeaographyOffsetInput".to_string();
|
|
||||||
context.page_input.type_name = "SeaographyPageInput".to_string();
|
|
||||||
context.page_info_object.type_name = "SeaographyPageInfo".to_string();
|
|
||||||
|
|
||||||
restrict_subscriber_for_entity::<bangumi::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&bangumi::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<downloaders::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&downloaders::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<downloads::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&downloads::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<episodes::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&episodes::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<subscriptions::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&subscriptions::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<subscribers::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&subscribers::Column::Id,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&subscription_bangumi::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<subscription_episode::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&subscription_episode::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&subscriber_tasks::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&subscriber_tasks::Column::Job,
|
|
||||||
);
|
|
||||||
for column in subscribers::Column::iter() {
|
|
||||||
if !matches!(column, subscribers::Column::Id) {
|
|
||||||
restrict_filter_input_for_entity::<subscribers::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&column,
|
|
||||||
None,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
context
|
|
||||||
});
|
|
||||||
let mut builder = Builder::new(context, database.clone());
|
|
||||||
|
|
||||||
{
|
|
||||||
let filter_types_map_helper = FilterTypesMapHelper { context };
|
|
||||||
|
|
||||||
builder.schema = builder.schema.register(
|
|
||||||
filter_types_map_helper.generate_filter_input(SUBSCRIBER_ID_FILTER_INFO.get().unwrap()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
builder.register_entity::<subscribers::Entity>(
|
|
||||||
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
|
|
||||||
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
|
|
||||||
.collect(),
|
|
||||||
);
|
|
||||||
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
|
|
||||||
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
|
|
||||||
}
|
|
||||||
|
|
||||||
seaography::register_entities!(
|
|
||||||
builder,
|
|
||||||
[
|
|
||||||
bangumi,
|
|
||||||
downloaders,
|
|
||||||
downloads,
|
|
||||||
episodes,
|
|
||||||
subscription_bangumi,
|
|
||||||
subscription_episode,
|
|
||||||
subscriptions,
|
|
||||||
subscriber_tasks,
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
{
|
|
||||||
builder.register_enumeration::<downloads::DownloadStatus>();
|
|
||||||
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
|
|
||||||
builder.register_enumeration::<downloaders::DownloaderCategory>();
|
|
||||||
builder.register_enumeration::<downloads::DownloadMime>();
|
|
||||||
}
|
|
||||||
|
|
||||||
let schema = builder.schema_builder();
|
|
||||||
|
|
||||||
let schema = if let Some(depth) = depth {
|
|
||||||
schema.limit_depth(depth)
|
|
||||||
} else {
|
|
||||||
schema
|
|
||||||
};
|
|
||||||
let schema = if let Some(complexity) = complexity {
|
|
||||||
schema.limit_complexity(complexity)
|
|
||||||
} else {
|
|
||||||
schema
|
|
||||||
};
|
|
||||||
schema
|
|
||||||
.data(database)
|
|
||||||
.finish()
|
|
||||||
.inspect_err(|e| tracing::error!(e = ?e))
|
|
||||||
}
|
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
use async_graphql::dynamic::Schema;
|
use std::sync::Arc;
|
||||||
use sea_orm::DatabaseConnection;
|
|
||||||
|
|
||||||
use super::{config::GraphQLConfig, schema_root};
|
use async_graphql::dynamic::Schema;
|
||||||
use crate::errors::RecorderResult;
|
|
||||||
|
use super::{build_schema, config::GraphQLConfig};
|
||||||
|
use crate::{app::AppContextTrait, errors::RecorderResult};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct GraphQLService {
|
pub struct GraphQLService {
|
||||||
@@ -10,12 +11,12 @@ pub struct GraphQLService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl GraphQLService {
|
impl GraphQLService {
|
||||||
pub async fn from_config_and_database(
|
pub async fn from_config_and_ctx(
|
||||||
config: GraphQLConfig,
|
config: GraphQLConfig,
|
||||||
db: DatabaseConnection,
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
) -> RecorderResult<Self> {
|
) -> RecorderResult<Self> {
|
||||||
let schema = schema_root::schema(
|
let schema = build_schema(
|
||||||
db,
|
ctx,
|
||||||
config.depth_limit.and_then(|l| l.into()),
|
config.depth_limit.and_then(|l| l.into()),
|
||||||
config.complexity_limit.and_then(|l| l.into()),
|
config.complexity_limit.and_then(|l| l.into()),
|
||||||
)?;
|
)?;
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
mod subscription;
|
|
||||||
mod task;
|
|
||||||
@@ -1,124 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use async_graphql::{Context, InputObject, Object, Result as GraphQLResult, SimpleObject};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
app::AppContextTrait,
|
|
||||||
auth::AuthUserInfo,
|
|
||||||
models::subscriptions::{self, SubscriptionTrait},
|
|
||||||
task::SubscriberTaskPayload,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub struct SubscriptionMutation;
|
|
||||||
|
|
||||||
#[derive(InputObject)]
|
|
||||||
struct SyncOneSubscriptionFilterInput {
|
|
||||||
pub subscription_id: i32,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(SimpleObject)]
|
|
||||||
struct SyncOneSubscriptionTaskOutput {
|
|
||||||
pub task_id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[Object]
|
|
||||||
impl SubscriptionMutation {
|
|
||||||
async fn sync_one_subscription_feeds_incremental(
|
|
||||||
&self,
|
|
||||||
ctx: &Context<'_>,
|
|
||||||
input: SyncOneSubscriptionFilterInput,
|
|
||||||
) -> GraphQLResult<SyncOneSubscriptionTaskOutput> {
|
|
||||||
let auth_user_info = ctx.data::<AuthUserInfo>()?;
|
|
||||||
|
|
||||||
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
|
||||||
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
|
|
||||||
|
|
||||||
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
|
|
||||||
app_ctx.as_ref(),
|
|
||||||
input.subscription_id,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
|
||||||
|
|
||||||
let task_service = app_ctx.task();
|
|
||||||
|
|
||||||
let task_id = task_service
|
|
||||||
.add_subscriber_task(
|
|
||||||
auth_user_info.subscriber_auth.subscriber_id,
|
|
||||||
SubscriberTaskPayload::SyncOneSubscriptionFeedsIncremental(subscription.into()),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(SyncOneSubscriptionTaskOutput {
|
|
||||||
task_id: task_id.to_string(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn sync_one_subscription_feeds_full(
|
|
||||||
&self,
|
|
||||||
ctx: &Context<'_>,
|
|
||||||
input: SyncOneSubscriptionFilterInput,
|
|
||||||
) -> GraphQLResult<SyncOneSubscriptionTaskOutput> {
|
|
||||||
let auth_user_info = ctx.data::<AuthUserInfo>()?;
|
|
||||||
|
|
||||||
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
|
||||||
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
|
|
||||||
|
|
||||||
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
|
|
||||||
app_ctx.as_ref(),
|
|
||||||
input.subscription_id,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
|
||||||
|
|
||||||
let task_service = app_ctx.task();
|
|
||||||
|
|
||||||
let task_id = task_service
|
|
||||||
.add_subscriber_task(
|
|
||||||
auth_user_info.subscriber_auth.subscriber_id,
|
|
||||||
SubscriberTaskPayload::SyncOneSubscriptionFeedsFull(subscription.into()),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(SyncOneSubscriptionTaskOutput {
|
|
||||||
task_id: task_id.to_string(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn sync_one_subscription_sources(
|
|
||||||
&self,
|
|
||||||
ctx: &Context<'_>,
|
|
||||||
input: SyncOneSubscriptionFilterInput,
|
|
||||||
) -> GraphQLResult<SyncOneSubscriptionTaskOutput> {
|
|
||||||
let auth_user_info = ctx.data::<AuthUserInfo>()?;
|
|
||||||
|
|
||||||
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
|
||||||
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
|
|
||||||
|
|
||||||
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
|
|
||||||
app_ctx.as_ref(),
|
|
||||||
input.subscription_id,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
|
||||||
|
|
||||||
let task_service = app_ctx.task();
|
|
||||||
|
|
||||||
let task_id = task_service
|
|
||||||
.add_subscriber_task(
|
|
||||||
auth_user_info.subscriber_auth.subscriber_id,
|
|
||||||
SubscriberTaskPayload::SyncOneSubscriptionSources(subscription.into()),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(SyncOneSubscriptionTaskOutput {
|
|
||||||
task_id: task_id.to_string(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use async_graphql::{Context, InputObject, Object, Result as GraphQLResult};
|
|
||||||
|
|
||||||
use crate::{app::AppContextTrait, auth::AuthUserInfo};
|
|
||||||
|
|
||||||
struct TaskQuery;
|
|
||||||
|
|
||||||
#[derive(InputObject)]
|
|
||||||
struct SubscriberTasksFilterInput {
|
|
||||||
pub subscription_id: Option<i32>,
|
|
||||||
pub task_id: Option<String>,
|
|
||||||
pub task_type: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[Object]
|
|
||||||
impl TaskQuery {
|
|
||||||
async fn subscriber_tasks(&self, ctx: &Context<'_>) -> GraphQLResult<Vec<String>> {
|
|
||||||
let auth_user_info = ctx.data::<AuthUserInfo>()?;
|
|
||||||
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
|
||||||
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
|
|
||||||
|
|
||||||
let task_service = app_ctx.task();
|
|
||||||
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -21,11 +21,12 @@ pub mod errors;
|
|||||||
pub mod extract;
|
pub mod extract;
|
||||||
pub mod graphql;
|
pub mod graphql;
|
||||||
pub mod logger;
|
pub mod logger;
|
||||||
|
pub mod media;
|
||||||
pub mod message;
|
pub mod message;
|
||||||
pub mod migrations;
|
pub mod migrations;
|
||||||
pub mod models;
|
pub mod models;
|
||||||
pub mod storage;
|
pub mod storage;
|
||||||
pub mod task;
|
pub mod task;
|
||||||
#[cfg(any(test, feature = "playground"))]
|
|
||||||
pub mod test_utils;
|
pub mod test_utils;
|
||||||
|
pub mod utils;
|
||||||
pub mod web;
|
pub mod web;
|
||||||
|
|||||||
@@ -5,4 +5,4 @@ pub mod service;
|
|||||||
pub use core::{LogFormat, LogLevel, LogRotation};
|
pub use core::{LogFormat, LogLevel, LogRotation};
|
||||||
|
|
||||||
pub use config::{LoggerConfig, LoggerFileAppender};
|
pub use config::{LoggerConfig, LoggerFileAppender};
|
||||||
pub use service::LoggerService;
|
pub use service::{LoggerService, MODULE_WHITELIST};
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
|
|||||||
use crate::errors::RecorderResult;
|
use crate::errors::RecorderResult;
|
||||||
|
|
||||||
// Function to initialize the logger based on the provided configuration
|
// Function to initialize the logger based on the provided configuration
|
||||||
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];
|
pub const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sea_orm", "sea_query"];
|
||||||
|
|
||||||
// Keep nonblocking file appender work guard
|
// Keep nonblocking file appender work guard
|
||||||
static NONBLOCKING_WORK_GUARD_KEEP: OnceLock<WorkerGuard> = OnceLock::new();
|
static NONBLOCKING_WORK_GUARD_KEEP: OnceLock<WorkerGuard> = OnceLock::new();
|
||||||
|
|||||||
105
apps/recorder/src/media/config.rs
Normal file
105
apps/recorder/src/media/config.rs
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub enum AutoOptimizeImageFormat {
|
||||||
|
#[serde(rename = "image/webp")]
|
||||||
|
Webp,
|
||||||
|
#[serde(rename = "image/avif")]
|
||||||
|
Avif,
|
||||||
|
#[serde(rename = "image/jxl")]
|
||||||
|
Jxl,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
|
||||||
|
pub struct EncodeWebpOptions {
|
||||||
|
pub quality: Option<f32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
|
||||||
|
pub struct EncodeAvifOptions {
|
||||||
|
pub quality: Option<u8>,
|
||||||
|
pub speed: Option<u8>,
|
||||||
|
pub threads: Option<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
|
||||||
|
pub struct EncodeJxlOptions {
|
||||||
|
pub quality: Option<f32>,
|
||||||
|
pub speed: Option<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(tag = "mime_type")]
|
||||||
|
pub enum EncodeImageOptions {
|
||||||
|
#[serde(rename = "image/webp")]
|
||||||
|
Webp(EncodeWebpOptions),
|
||||||
|
#[serde(rename = "image/avif")]
|
||||||
|
Avif(EncodeAvifOptions),
|
||||||
|
#[serde(rename = "image/jxl")]
|
||||||
|
Jxl(EncodeJxlOptions),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct MediaConfig {
|
||||||
|
#[serde(default = "default_webp_quality")]
|
||||||
|
pub webp_quality: f32,
|
||||||
|
#[serde(default = "default_avif_quality")]
|
||||||
|
pub avif_quality: u8,
|
||||||
|
#[serde(default = "default_avif_speed")]
|
||||||
|
pub avif_speed: u8,
|
||||||
|
#[serde(default = "default_avif_threads")]
|
||||||
|
pub avif_threads: u8,
|
||||||
|
#[serde(default = "default_jxl_quality")]
|
||||||
|
pub jxl_quality: f32,
|
||||||
|
#[serde(default = "default_jxl_speed")]
|
||||||
|
pub jxl_speed: u8,
|
||||||
|
#[serde(default = "default_auto_optimize_formats")]
|
||||||
|
pub auto_optimize_formats: Vec<AutoOptimizeImageFormat>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for MediaConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
webp_quality: default_webp_quality(),
|
||||||
|
avif_quality: default_avif_quality(),
|
||||||
|
avif_speed: default_avif_speed(),
|
||||||
|
avif_threads: default_avif_threads(),
|
||||||
|
jxl_quality: default_jxl_quality(),
|
||||||
|
jxl_speed: default_jxl_speed(),
|
||||||
|
auto_optimize_formats: default_auto_optimize_formats(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_webp_quality() -> f32 {
|
||||||
|
80.0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_avif_quality() -> u8 {
|
||||||
|
80
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_avif_speed() -> u8 {
|
||||||
|
6
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_avif_threads() -> u8 {
|
||||||
|
1
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_jxl_quality() -> f32 {
|
||||||
|
80.0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_jxl_speed() -> u8 {
|
||||||
|
7
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_auto_optimize_formats() -> Vec<AutoOptimizeImageFormat> {
|
||||||
|
vec![
|
||||||
|
AutoOptimizeImageFormat::Webp,
|
||||||
|
// AutoOptimizeImageFormat::Avif, // TOO SLOW */
|
||||||
|
#[cfg(feature = "jxl")]
|
||||||
|
AutoOptimizeImageFormat::Jxl,
|
||||||
|
]
|
||||||
|
}
|
||||||
8
apps/recorder/src/media/mod.rs
Normal file
8
apps/recorder/src/media/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
mod config;
|
||||||
|
mod service;
|
||||||
|
|
||||||
|
pub use config::{
|
||||||
|
AutoOptimizeImageFormat, EncodeAvifOptions, EncodeImageOptions, EncodeJxlOptions,
|
||||||
|
EncodeWebpOptions, MediaConfig,
|
||||||
|
};
|
||||||
|
pub use service::MediaService;
|
||||||
199
apps/recorder/src/media/service.rs
Normal file
199
apps/recorder/src/media/service.rs
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
use std::io::Cursor;
|
||||||
|
|
||||||
|
use bytes::Bytes;
|
||||||
|
use image::{GenericImageView, ImageEncoder, ImageReader, codecs::avif::AvifEncoder};
|
||||||
|
use quirks_path::Path;
|
||||||
|
use snafu::ResultExt;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
media::{EncodeAvifOptions, EncodeJxlOptions, EncodeWebpOptions, MediaConfig},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct MediaService {
|
||||||
|
pub config: MediaConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MediaService {
|
||||||
|
pub async fn from_config(config: MediaConfig) -> RecorderResult<Self> {
|
||||||
|
Ok(Self { config })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_legacy_image_format(&self, ext: &str) -> bool {
|
||||||
|
matches!(ext, "jpeg" | "jpg" | "png")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn optimize_image_to_webp(
|
||||||
|
&self,
|
||||||
|
path: impl AsRef<Path>,
|
||||||
|
data: impl Into<Bytes>,
|
||||||
|
options: Option<EncodeWebpOptions>,
|
||||||
|
) -> RecorderResult<Bytes> {
|
||||||
|
let quality = options
|
||||||
|
.and_then(|o| o.quality)
|
||||||
|
.unwrap_or(self.config.webp_quality);
|
||||||
|
|
||||||
|
let data = data.into();
|
||||||
|
|
||||||
|
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
|
||||||
|
let cursor = Cursor::new(data);
|
||||||
|
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
|
||||||
|
|
||||||
|
let img = image_reader.decode()?;
|
||||||
|
|
||||||
|
let (width, height) = (img.width(), img.height());
|
||||||
|
|
||||||
|
let color = img.color();
|
||||||
|
|
||||||
|
let webp_data = if color.has_alpha() {
|
||||||
|
let rgba_image = img.into_rgba8();
|
||||||
|
|
||||||
|
let encoder = webp::Encoder::from_rgba(&rgba_image, width, height);
|
||||||
|
|
||||||
|
encoder.encode(quality)
|
||||||
|
} else {
|
||||||
|
let rgba_image = img.into_rgb8();
|
||||||
|
|
||||||
|
let encoder = webp::Encoder::from_rgb(&rgba_image, width, height);
|
||||||
|
|
||||||
|
encoder.encode(quality)
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Bytes::from(webp_data.to_vec()))
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!(
|
||||||
|
"failed to spawn blocking task to optimize legacy image to webp: {}",
|
||||||
|
path.as_ref().display()
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn optimize_image_to_avif(
|
||||||
|
&self,
|
||||||
|
path: impl AsRef<Path>,
|
||||||
|
data: Bytes,
|
||||||
|
options: Option<EncodeAvifOptions>,
|
||||||
|
) -> RecorderResult<Bytes> {
|
||||||
|
let quality = options
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|o| o.quality)
|
||||||
|
.unwrap_or(self.config.avif_quality);
|
||||||
|
let speed = options
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|o| o.speed)
|
||||||
|
.unwrap_or(self.config.avif_speed);
|
||||||
|
let threads = options
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|o| o.threads)
|
||||||
|
.unwrap_or(self.config.avif_threads);
|
||||||
|
|
||||||
|
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
|
||||||
|
let mut buf = vec![];
|
||||||
|
|
||||||
|
{
|
||||||
|
let cursor = Cursor::new(data);
|
||||||
|
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
|
||||||
|
|
||||||
|
let img = image_reader.decode()?;
|
||||||
|
|
||||||
|
let (width, height) = img.dimensions();
|
||||||
|
let color_type = img.color();
|
||||||
|
let encoder = AvifEncoder::new_with_speed_quality(&mut buf, speed, quality)
|
||||||
|
.with_num_threads(Some(threads as usize));
|
||||||
|
|
||||||
|
encoder.write_image(img.as_bytes(), width, height, color_type.into())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Bytes::from(buf))
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!(
|
||||||
|
"failed to spawn blocking task to optimize legacy image to avif: {}",
|
||||||
|
path.as_ref().display()
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "jxl")]
|
||||||
|
pub async fn optimize_image_to_jxl(
|
||||||
|
&self,
|
||||||
|
path: impl AsRef<Path>,
|
||||||
|
data: Bytes,
|
||||||
|
options: Option<EncodeJxlOptions>,
|
||||||
|
) -> RecorderResult<Bytes> {
|
||||||
|
let quality = options
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|o| o.quality)
|
||||||
|
.unwrap_or(self.config.jxl_quality);
|
||||||
|
let speed = options
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|o| o.speed)
|
||||||
|
.unwrap_or(self.config.jxl_speed);
|
||||||
|
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
|
||||||
|
use jpegxl_rs::encode::{ColorEncoding, EncoderResult, EncoderSpeed};
|
||||||
|
let cursor = Cursor::new(data);
|
||||||
|
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
|
||||||
|
|
||||||
|
let image = image_reader.decode()?;
|
||||||
|
let (width, height) = image.dimensions();
|
||||||
|
|
||||||
|
let color = image.color();
|
||||||
|
let has_alpha = color.has_alpha();
|
||||||
|
let libjxl_speed = {
|
||||||
|
match speed {
|
||||||
|
0 | 1 => EncoderSpeed::Lightning,
|
||||||
|
2 => EncoderSpeed::Thunder,
|
||||||
|
3 => EncoderSpeed::Falcon,
|
||||||
|
4 => EncoderSpeed::Cheetah,
|
||||||
|
5 => EncoderSpeed::Hare,
|
||||||
|
6 => EncoderSpeed::Wombat,
|
||||||
|
7 => EncoderSpeed::Squirrel,
|
||||||
|
8 => EncoderSpeed::Kitten,
|
||||||
|
_ => EncoderSpeed::Tortoise,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut encoder_builder = jpegxl_rs::encoder_builder()
|
||||||
|
.lossless(false)
|
||||||
|
.has_alpha(has_alpha)
|
||||||
|
.color_encoding(ColorEncoding::Srgb)
|
||||||
|
.speed(libjxl_speed)
|
||||||
|
.jpeg_quality(quality)
|
||||||
|
.build()?;
|
||||||
|
|
||||||
|
let buffer: EncoderResult<u8> = if color.has_alpha() {
|
||||||
|
let sample = image.into_rgba8();
|
||||||
|
encoder_builder.encode(&sample, width, height)?
|
||||||
|
} else {
|
||||||
|
let sample = image.into_rgb8();
|
||||||
|
encoder_builder.encode(&sample, width, height)?
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Bytes::from(buffer.data))
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!(
|
||||||
|
"failed to spawn blocking task to optimize legacy image to avif: {}",
|
||||||
|
path.as_ref().display()
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "jxl"))]
|
||||||
|
pub async fn optimize_image_to_jxl(
|
||||||
|
&self,
|
||||||
|
_path: impl AsRef<Path>,
|
||||||
|
_data: Bytes,
|
||||||
|
_options: Option<EncodeJxlOptions>,
|
||||||
|
) -> RecorderResult<Bytes> {
|
||||||
|
Err(RecorderError::Whatever {
|
||||||
|
message: "jxl feature is not enabled".to_string(),
|
||||||
|
source: None.into(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -43,7 +43,7 @@ pub enum Bangumi {
|
|||||||
MikanBangumiId,
|
MikanBangumiId,
|
||||||
DisplayName,
|
DisplayName,
|
||||||
SubscriberId,
|
SubscriberId,
|
||||||
RawName,
|
OriginName,
|
||||||
Season,
|
Season,
|
||||||
SeasonRaw,
|
SeasonRaw,
|
||||||
Fansub,
|
Fansub,
|
||||||
@@ -51,9 +51,13 @@ pub enum Bangumi {
|
|||||||
Filter,
|
Filter,
|
||||||
RssLink,
|
RssLink,
|
||||||
PosterLink,
|
PosterLink,
|
||||||
|
OriginPosterLink,
|
||||||
|
/**
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
SavePath,
|
SavePath,
|
||||||
Homepage,
|
Homepage,
|
||||||
Extra,
|
BangumiType,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(DeriveIden)]
|
#[derive(DeriveIden)]
|
||||||
@@ -70,22 +74,30 @@ pub enum Episodes {
|
|||||||
Table,
|
Table,
|
||||||
Id,
|
Id,
|
||||||
MikanEpisodeId,
|
MikanEpisodeId,
|
||||||
RawName,
|
OriginName,
|
||||||
DisplayName,
|
DisplayName,
|
||||||
BangumiId,
|
BangumiId,
|
||||||
SubscriberId,
|
SubscriberId,
|
||||||
DownloadId,
|
DownloadId,
|
||||||
|
/**
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
SavePath,
|
SavePath,
|
||||||
Resolution,
|
Resolution,
|
||||||
Season,
|
Season,
|
||||||
SeasonRaw,
|
SeasonRaw,
|
||||||
Fansub,
|
Fansub,
|
||||||
PosterLink,
|
PosterLink,
|
||||||
|
OriginPosterLink,
|
||||||
EpisodeIndex,
|
EpisodeIndex,
|
||||||
Homepage,
|
Homepage,
|
||||||
Subtitle,
|
Subtitle,
|
||||||
Source,
|
Source,
|
||||||
Extra,
|
EpisodeType,
|
||||||
|
EnclosureTorrentLink,
|
||||||
|
EnclosureMagnetLink,
|
||||||
|
EnclosurePubDate,
|
||||||
|
EnclosureContentLength,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(DeriveIden)]
|
#[derive(DeriveIden)]
|
||||||
@@ -101,7 +113,7 @@ pub enum SubscriptionEpisode {
|
|||||||
pub enum Downloads {
|
pub enum Downloads {
|
||||||
Table,
|
Table,
|
||||||
Id,
|
Id,
|
||||||
RawName,
|
OriginName,
|
||||||
DisplayName,
|
DisplayName,
|
||||||
SubscriberId,
|
SubscriberId,
|
||||||
DownloaderId,
|
DownloaderId,
|
||||||
@@ -148,6 +160,17 @@ pub enum Credential3rd {
|
|||||||
UserAgent,
|
UserAgent,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
pub enum Feeds {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Token,
|
||||||
|
FeedType,
|
||||||
|
FeedSource,
|
||||||
|
SubscriberId,
|
||||||
|
SubscriptionId,
|
||||||
|
}
|
||||||
|
|
||||||
macro_rules! create_postgres_enum_for_active_enum {
|
macro_rules! create_postgres_enum_for_active_enum {
|
||||||
($manager: expr, $active_enum: expr, $($enum_value:expr),+) => {
|
($manager: expr, $active_enum: expr, $($enum_value:expr),+) => {
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -96,7 +96,7 @@ impl MigrationTrait for Migration {
|
|||||||
.col(text_null(Bangumi::MikanBangumiId))
|
.col(text_null(Bangumi::MikanBangumiId))
|
||||||
.col(integer(Bangumi::SubscriberId))
|
.col(integer(Bangumi::SubscriberId))
|
||||||
.col(text(Bangumi::DisplayName))
|
.col(text(Bangumi::DisplayName))
|
||||||
.col(text(Bangumi::RawName))
|
.col(text(Bangumi::OriginName))
|
||||||
.col(integer(Bangumi::Season))
|
.col(integer(Bangumi::Season))
|
||||||
.col(text_null(Bangumi::SeasonRaw))
|
.col(text_null(Bangumi::SeasonRaw))
|
||||||
.col(text_null(Bangumi::Fansub))
|
.col(text_null(Bangumi::Fansub))
|
||||||
@@ -104,9 +104,9 @@ impl MigrationTrait for Migration {
|
|||||||
.col(json_binary_null(Bangumi::Filter))
|
.col(json_binary_null(Bangumi::Filter))
|
||||||
.col(text_null(Bangumi::RssLink))
|
.col(text_null(Bangumi::RssLink))
|
||||||
.col(text_null(Bangumi::PosterLink))
|
.col(text_null(Bangumi::PosterLink))
|
||||||
|
.col(text_null(Bangumi::OriginPosterLink))
|
||||||
.col(text_null(Bangumi::SavePath))
|
.col(text_null(Bangumi::SavePath))
|
||||||
.col(text_null(Bangumi::Homepage))
|
.col(text_null(Bangumi::Homepage))
|
||||||
.col(json_binary_null(Bangumi::Extra))
|
|
||||||
.foreign_key(
|
.foreign_key(
|
||||||
ForeignKey::create()
|
ForeignKey::create()
|
||||||
.name("fk_bangumi_subscriber_id")
|
.name("fk_bangumi_subscriber_id")
|
||||||
@@ -209,7 +209,7 @@ impl MigrationTrait for Migration {
|
|||||||
.create_index(
|
.create_index(
|
||||||
Index::create()
|
Index::create()
|
||||||
.if_not_exists()
|
.if_not_exists()
|
||||||
.name("index_subscription_bangumi_subscriber_id")
|
.name("idx_subscription_bangumi_subscriber_id")
|
||||||
.table(SubscriptionBangumi::Table)
|
.table(SubscriptionBangumi::Table)
|
||||||
.col(SubscriptionBangumi::SubscriberId)
|
.col(SubscriptionBangumi::SubscriberId)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
@@ -221,7 +221,7 @@ impl MigrationTrait for Migration {
|
|||||||
table_auto_z(Episodes::Table)
|
table_auto_z(Episodes::Table)
|
||||||
.col(pk_auto(Episodes::Id))
|
.col(pk_auto(Episodes::Id))
|
||||||
.col(text_null(Episodes::MikanEpisodeId))
|
.col(text_null(Episodes::MikanEpisodeId))
|
||||||
.col(text(Episodes::RawName))
|
.col(text(Episodes::OriginName))
|
||||||
.col(text(Episodes::DisplayName))
|
.col(text(Episodes::DisplayName))
|
||||||
.col(integer(Episodes::BangumiId))
|
.col(integer(Episodes::BangumiId))
|
||||||
.col(integer(Episodes::SubscriberId))
|
.col(integer(Episodes::SubscriberId))
|
||||||
@@ -231,11 +231,11 @@ impl MigrationTrait for Migration {
|
|||||||
.col(text_null(Episodes::SeasonRaw))
|
.col(text_null(Episodes::SeasonRaw))
|
||||||
.col(text_null(Episodes::Fansub))
|
.col(text_null(Episodes::Fansub))
|
||||||
.col(text_null(Episodes::PosterLink))
|
.col(text_null(Episodes::PosterLink))
|
||||||
|
.col(text_null(Episodes::OriginPosterLink))
|
||||||
.col(integer(Episodes::EpisodeIndex))
|
.col(integer(Episodes::EpisodeIndex))
|
||||||
.col(text_null(Episodes::Homepage))
|
.col(text_null(Episodes::Homepage))
|
||||||
.col(text_null(Episodes::Subtitle))
|
.col(text_null(Episodes::Subtitle))
|
||||||
.col(text_null(Episodes::Source))
|
.col(text_null(Episodes::Source))
|
||||||
.col(json_binary_null(Episodes::Extra))
|
|
||||||
.foreign_key(
|
.foreign_key(
|
||||||
ForeignKey::create()
|
ForeignKey::create()
|
||||||
.name("fk_episodes_bangumi_id")
|
.name("fk_episodes_bangumi_id")
|
||||||
@@ -252,6 +252,15 @@ impl MigrationTrait for Migration {
|
|||||||
.on_update(ForeignKeyAction::Cascade)
|
.on_update(ForeignKeyAction::Cascade)
|
||||||
.on_delete(ForeignKeyAction::Cascade),
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
)
|
)
|
||||||
|
.index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_episodes_mikan_episode_id_subscriber_id")
|
||||||
|
.table(Episodes::Table)
|
||||||
|
.col(Episodes::MikanEpisodeId)
|
||||||
|
.col(Episodes::SubscriberId)
|
||||||
|
.unique(),
|
||||||
|
)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -267,19 +276,6 @@ impl MigrationTrait for Migration {
|
|||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
|
||||||
.create_index(
|
|
||||||
Index::create()
|
|
||||||
.if_not_exists()
|
|
||||||
.name("idx_episodes_bangumi_id_mikan_episode_id")
|
|
||||||
.table(Episodes::Table)
|
|
||||||
.col(Episodes::BangumiId)
|
|
||||||
.col(Episodes::MikanEpisodeId)
|
|
||||||
.unique()
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
|
.create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -338,7 +334,7 @@ impl MigrationTrait for Migration {
|
|||||||
.create_index(
|
.create_index(
|
||||||
Index::create()
|
Index::create()
|
||||||
.if_not_exists()
|
.if_not_exists()
|
||||||
.name("index_subscription_episode_subscriber_id")
|
.name("idx_subscription_episode_subscriber_id")
|
||||||
.table(SubscriptionEpisode::Table)
|
.table(SubscriptionEpisode::Table)
|
||||||
.col(SubscriptionEpisode::SubscriberId)
|
.col(SubscriptionEpisode::SubscriberId)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
@@ -353,7 +349,7 @@ impl MigrationTrait for Migration {
|
|||||||
.drop_index(
|
.drop_index(
|
||||||
Index::drop()
|
Index::drop()
|
||||||
.if_exists()
|
.if_exists()
|
||||||
.name("index_subscription_episode_subscriber_id")
|
.name("idx_subscription_episode_subscriber_id")
|
||||||
.table(SubscriptionBangumi::Table)
|
.table(SubscriptionBangumi::Table)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
@@ -380,7 +376,7 @@ impl MigrationTrait for Migration {
|
|||||||
.drop_index(
|
.drop_index(
|
||||||
Index::drop()
|
Index::drop()
|
||||||
.if_exists()
|
.if_exists()
|
||||||
.name("index_subscription_bangumi_subscriber_id")
|
.name("idx_subscription_bangumi_subscriber_id")
|
||||||
.table(SubscriptionBangumi::Table)
|
.table(SubscriptionBangumi::Table)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ impl MigrationTrait for Migration {
|
|||||||
.create_table(
|
.create_table(
|
||||||
table_auto_z(Downloads::Table)
|
table_auto_z(Downloads::Table)
|
||||||
.col(pk_auto(Downloads::Id))
|
.col(pk_auto(Downloads::Id))
|
||||||
.col(string(Downloads::RawName))
|
.col(string(Downloads::OriginName))
|
||||||
.col(string(Downloads::DisplayName))
|
.col(string(Downloads::DisplayName))
|
||||||
.col(integer(Downloads::SubscriberId))
|
.col(integer(Downloads::SubscriberId))
|
||||||
.col(integer(Downloads::DownloaderId))
|
.col(integer(Downloads::DownloaderId))
|
||||||
@@ -95,8 +95,8 @@ impl MigrationTrait for Migration {
|
|||||||
DownloadMimeEnum,
|
DownloadMimeEnum,
|
||||||
DownloadMime::iden_values(),
|
DownloadMime::iden_values(),
|
||||||
))
|
))
|
||||||
.col(big_unsigned(Downloads::AllSize))
|
.col(big_integer(Downloads::AllSize))
|
||||||
.col(big_unsigned(Downloads::CurrSize))
|
.col(big_integer(Downloads::CurrSize))
|
||||||
.col(text(Downloads::Url))
|
.col(text(Downloads::Url))
|
||||||
.col(text_null(Downloads::Homepage))
|
.col(text_null(Downloads::Homepage))
|
||||||
.col(text_null(Downloads::SavePath))
|
.col(text_null(Downloads::SavePath))
|
||||||
|
|||||||
@@ -28,7 +28,11 @@ impl MigrationTrait for Migration {
|
|||||||
table_auto_z(Credential3rd::Table)
|
table_auto_z(Credential3rd::Table)
|
||||||
.col(pk_auto(Credential3rd::Id))
|
.col(pk_auto(Credential3rd::Id))
|
||||||
.col(integer(Credential3rd::SubscriberId))
|
.col(integer(Credential3rd::SubscriberId))
|
||||||
.col(string(Credential3rd::CredentialType))
|
.col(enumeration(
|
||||||
|
Credential3rd::CredentialType,
|
||||||
|
Credential3rdTypeEnum,
|
||||||
|
Credential3rdType::iden_values(),
|
||||||
|
))
|
||||||
.col(string_null(Credential3rd::Cookies))
|
.col(string_null(Credential3rd::Cookies))
|
||||||
.col(string_null(Credential3rd::Username))
|
.col(string_null(Credential3rd::Username))
|
||||||
.col(string_null(Credential3rd::Password))
|
.col(string_null(Credential3rd::Password))
|
||||||
|
|||||||
@@ -12,13 +12,13 @@ impl MigrationTrait for Migration {
|
|||||||
let db = manager.get_connection();
|
let db = manager.get_connection();
|
||||||
|
|
||||||
db.execute_unprepared(&format!(
|
db.execute_unprepared(&format!(
|
||||||
r#"CREATE VIEW IF NOT EXISTS subscriber_task AS
|
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
|
||||||
SELECT
|
SELECT
|
||||||
job,
|
job,
|
||||||
task_type,
|
job_type,
|
||||||
status,
|
status,
|
||||||
(job->'subscriber_id')::integer AS subscriber_id,
|
(job ->> 'subscriber_id'::text)::integer AS subscriber_id,
|
||||||
(job->'task_type')::text AS task_type,
|
job ->> 'task_type'::text AS task_type,
|
||||||
id,
|
id,
|
||||||
attempts,
|
attempts,
|
||||||
max_attempts,
|
max_attempts,
|
||||||
@@ -29,7 +29,7 @@ SELECT
|
|||||||
done_at,
|
done_at,
|
||||||
priority
|
priority
|
||||||
FROM apalis.jobs
|
FROM apalis.jobs
|
||||||
WHERE job_type = {SUBSCRIBER_TASK_APALIS_NAME}
|
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
|
||||||
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
|
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
|
||||||
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
|
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
|
||||||
))
|
))
|
||||||
@@ -37,10 +37,10 @@ AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
|
|||||||
|
|
||||||
db.execute_unprepared(&format!(
|
db.execute_unprepared(&format!(
|
||||||
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscriber_id
|
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscriber_id
|
||||||
ON apalis.jobs ((job -> 'subscriber_id'))
|
ON apalis.jobs (((job -> 'subscriber_id')::integer))
|
||||||
WHERE job_type = {SUBSCRIBER_TASK_APALIS_NAME}
|
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
|
||||||
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
|
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
|
||||||
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
|
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
|
||||||
))
|
))
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -56,7 +56,7 @@ AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
|
|||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_task")
|
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_tasks")
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
95
apps/recorder/src/migrations/m20250622_015618_feeds.rs
Normal file
95
apps/recorder/src/migrations/m20250622_015618_feeds.rs
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
use async_trait::async_trait;
|
||||||
|
use sea_orm_migration::{prelude::*, schema::*};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
migrations::defs::{
|
||||||
|
CustomSchemaManagerExt, Feeds, GeneralIds, Subscribers, Subscriptions, table_auto_z,
|
||||||
|
},
|
||||||
|
models::feeds::{FeedSource, FeedSourceEnum, FeedType, FeedTypeEnum},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
create_postgres_enum_for_active_enum!(manager, FeedTypeEnum, FeedType::Rss).await?;
|
||||||
|
create_postgres_enum_for_active_enum!(
|
||||||
|
manager,
|
||||||
|
FeedSourceEnum,
|
||||||
|
FeedSource::SubscriptionEpisode
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
table_auto_z(Feeds::Table)
|
||||||
|
.col(pk_auto(Feeds::Id))
|
||||||
|
.col(text(Feeds::Token))
|
||||||
|
.col(enumeration(
|
||||||
|
Feeds::FeedType,
|
||||||
|
FeedTypeEnum,
|
||||||
|
FeedType::iden_values(),
|
||||||
|
))
|
||||||
|
.col(
|
||||||
|
enumeration(Feeds::FeedSource, FeedSourceEnum, FeedSource::iden_values())
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(integer_null(Feeds::SubscriberId))
|
||||||
|
.col(integer_null(Feeds::SubscriptionId))
|
||||||
|
.index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_feeds_token")
|
||||||
|
.table(Feeds::Table)
|
||||||
|
.col(Feeds::Token)
|
||||||
|
.unique(),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_feeds_subscriber_id")
|
||||||
|
.from(Feeds::Table, Feeds::SubscriberId)
|
||||||
|
.to(Subscribers::Table, Subscribers::Id)
|
||||||
|
.on_update(ForeignKeyAction::Cascade)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_feeds_subscription_id")
|
||||||
|
.from(Feeds::Table, Feeds::SubscriptionId)
|
||||||
|
.to(Subscriptions::Table, Subscriptions::Id)
|
||||||
|
.on_update(ForeignKeyAction::Cascade)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_postgres_auto_update_ts_trigger_for_col(Feeds::Table, GeneralIds::UpdatedAt)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_postgres_auto_update_ts_trigger_for_col(Feeds::Table, GeneralIds::UpdatedAt)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().if_exists().table(Feeds::Table).to_owned())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_postgres_enum_for_active_enum(FeedTypeEnum)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_postgres_enum_for_active_enum(FeedSourceEnum)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,133 @@
|
|||||||
|
use async_trait::async_trait;
|
||||||
|
use sea_orm_migration::{prelude::*, schema::*};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
migrations::defs::{Bangumi, CustomSchemaManagerExt, Episodes},
|
||||||
|
models::{
|
||||||
|
bangumi::{BangumiType, BangumiTypeEnum},
|
||||||
|
episodes::{EpisodeType, EpisodeTypeEnum},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan).await?;
|
||||||
|
|
||||||
|
{
|
||||||
|
create_postgres_enum_for_active_enum!(manager, BangumiTypeEnum, BangumiType::Mikan)
|
||||||
|
.await?;
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(Bangumi::Table)
|
||||||
|
.add_column_if_not_exists(enumeration_null(
|
||||||
|
Bangumi::BangumiType,
|
||||||
|
BangumiTypeEnum,
|
||||||
|
BangumiType::iden_values(),
|
||||||
|
))
|
||||||
|
.drop_column(Bangumi::SavePath)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.exec_stmt(
|
||||||
|
UpdateStatement::new()
|
||||||
|
.table(Bangumi::Table)
|
||||||
|
.value(
|
||||||
|
Bangumi::BangumiType,
|
||||||
|
BangumiType::Mikan.as_enum(BangumiTypeEnum),
|
||||||
|
)
|
||||||
|
.and_where(Expr::col(Bangumi::BangumiType).is_null())
|
||||||
|
.and_where(Expr::col(Bangumi::MikanBangumiId).is_not_null())
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(Bangumi::Table)
|
||||||
|
.modify_column(enumeration(
|
||||||
|
Bangumi::BangumiType,
|
||||||
|
BangumiTypeEnum,
|
||||||
|
BangumiType::iden_values(),
|
||||||
|
))
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(Episodes::Table)
|
||||||
|
.add_column_if_not_exists(enumeration_null(
|
||||||
|
Episodes::EpisodeType,
|
||||||
|
EpisodeTypeEnum,
|
||||||
|
EpisodeType::enum_type_name(),
|
||||||
|
))
|
||||||
|
.add_column_if_not_exists(text_null(Episodes::EnclosureMagnetLink))
|
||||||
|
.add_column_if_not_exists(text_null(Episodes::EnclosureTorrentLink))
|
||||||
|
.add_column_if_not_exists(timestamp_with_time_zone_null(
|
||||||
|
Episodes::EnclosurePubDate,
|
||||||
|
))
|
||||||
|
.add_column_if_not_exists(big_integer_null(
|
||||||
|
Episodes::EnclosureContentLength,
|
||||||
|
))
|
||||||
|
.drop_column(Episodes::SavePath)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.exec_stmt(
|
||||||
|
UpdateStatement::new()
|
||||||
|
.table(Episodes::Table)
|
||||||
|
.value(
|
||||||
|
Episodes::EpisodeType,
|
||||||
|
EpisodeType::Mikan.as_enum(EpisodeTypeEnum),
|
||||||
|
)
|
||||||
|
.and_where(Expr::col(Episodes::EpisodeType).is_null())
|
||||||
|
.and_where(Expr::col(Episodes::MikanEpisodeId).is_not_null())
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(Episodes::Table)
|
||||||
|
.modify_column(enumeration(
|
||||||
|
Episodes::EpisodeType,
|
||||||
|
EpisodeTypeEnum,
|
||||||
|
EpisodeType::enum_type_name(),
|
||||||
|
))
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_postgres_enum_for_active_enum(BangumiTypeEnum)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_postgres_enum_for_active_enum(EpisodeTypeEnum)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,6 +8,8 @@ pub mod m20240224_082543_add_downloads;
|
|||||||
pub mod m20241231_000001_auth;
|
pub mod m20241231_000001_auth;
|
||||||
pub mod m20250501_021523_credential_3rd;
|
pub mod m20250501_021523_credential_3rd;
|
||||||
pub mod m20250520_021135_subscriber_tasks;
|
pub mod m20250520_021135_subscriber_tasks;
|
||||||
|
pub mod m20250622_015618_feeds;
|
||||||
|
pub mod m20250622_020819_bangumi_and_episode_type;
|
||||||
|
|
||||||
pub struct Migrator;
|
pub struct Migrator;
|
||||||
|
|
||||||
@@ -20,6 +22,8 @@ impl MigratorTrait for Migrator {
|
|||||||
Box::new(m20241231_000001_auth::Migration),
|
Box::new(m20241231_000001_auth::Migration),
|
||||||
Box::new(m20250501_021523_credential_3rd::Migration),
|
Box::new(m20250501_021523_credential_3rd::Migration),
|
||||||
Box::new(m20250520_021135_subscriber_tasks::Migration),
|
Box::new(m20250520_021135_subscriber_tasks::Migration),
|
||||||
|
Box::new(m20250622_015618_feeds::Migration),
|
||||||
|
Box::new(m20250622_020819_bangumi_and_episode_type::Migration),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -99,7 +99,9 @@ impl Model {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let new_item: Model = new_item.save(&txn).await?.try_into()?;
|
let new_item: Model = new_item.insert(&txn).await?;
|
||||||
|
|
||||||
|
txn.commit().await?;
|
||||||
|
|
||||||
Ok(new_item)
|
Ok(new_item)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ use crate::{
|
|||||||
MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url,
|
MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url,
|
||||||
scrape_mikan_poster_meta_from_image_url,
|
scrape_mikan_poster_meta_from_image_url,
|
||||||
},
|
},
|
||||||
rawname::parse_episode_meta_from_raw_name,
|
origin::{BangumiComps, OriginCompTrait},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -29,19 +29,14 @@ pub struct BangumiFilter {
|
|||||||
pub group: Option<Vec<String>>,
|
pub group: Option<Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
|
||||||
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
|
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "bangumi_type")]
|
||||||
)]
|
pub enum BangumiType {
|
||||||
pub struct BangumiExtra {
|
#[sea_orm(string_value = "mikan")]
|
||||||
pub name_zh: Option<String>,
|
Mikan,
|
||||||
pub s_name_zh: Option<String>,
|
|
||||||
pub name_en: Option<String>,
|
|
||||||
pub s_name_en: Option<String>,
|
|
||||||
pub name_jp: Option<String>,
|
|
||||||
pub s_name_jp: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
#[sea_orm(table_name = "bangumi")]
|
#[sea_orm(table_name = "bangumi")]
|
||||||
pub struct Model {
|
pub struct Model {
|
||||||
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||||
@@ -51,9 +46,10 @@ pub struct Model {
|
|||||||
#[sea_orm(primary_key)]
|
#[sea_orm(primary_key)]
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
pub mikan_bangumi_id: Option<String>,
|
pub mikan_bangumi_id: Option<String>,
|
||||||
|
pub bangumi_type: BangumiType,
|
||||||
pub subscriber_id: i32,
|
pub subscriber_id: i32,
|
||||||
pub display_name: String,
|
pub display_name: String,
|
||||||
pub raw_name: String,
|
pub origin_name: String,
|
||||||
pub season: i32,
|
pub season: i32,
|
||||||
pub season_raw: Option<String>,
|
pub season_raw: Option<String>,
|
||||||
pub fansub: Option<String>,
|
pub fansub: Option<String>,
|
||||||
@@ -61,9 +57,8 @@ pub struct Model {
|
|||||||
pub filter: Option<BangumiFilter>,
|
pub filter: Option<BangumiFilter>,
|
||||||
pub rss_link: Option<String>,
|
pub rss_link: Option<String>,
|
||||||
pub poster_link: Option<String>,
|
pub poster_link: Option<String>,
|
||||||
pub save_path: Option<String>,
|
pub origin_poster_link: Option<String>,
|
||||||
pub homepage: Option<String>,
|
pub homepage: Option<String>,
|
||||||
pub extra: Option<BangumiExtra>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
@@ -133,10 +128,13 @@ impl ActiveModel {
|
|||||||
_subscription_id: i32,
|
_subscription_id: i32,
|
||||||
) -> RecorderResult<Self> {
|
) -> RecorderResult<Self> {
|
||||||
let mikan_client = ctx.mikan();
|
let mikan_client = ctx.mikan();
|
||||||
let storage_service = ctx.storage();
|
|
||||||
let mikan_base_url = mikan_client.base_url();
|
let mikan_base_url = mikan_client.base_url();
|
||||||
|
let season_comp = BangumiComps::parse_comp(&meta.bangumi_title)
|
||||||
let rawname_meta = parse_episode_meta_from_raw_name(&meta.bangumi_title)?;
|
.ok()
|
||||||
|
.map(|(_, s)| s)
|
||||||
|
.and_then(|s| s.season);
|
||||||
|
let season_index = season_comp.as_ref().map(|s| s.num).unwrap_or(1);
|
||||||
|
let season_raw = season_comp.map(|s| s.source.to_string());
|
||||||
|
|
||||||
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
||||||
mikan_base_url.clone(),
|
mikan_base_url.clone(),
|
||||||
@@ -144,14 +142,9 @@ impl ActiveModel {
|
|||||||
Some(&meta.mikan_fansub_id),
|
Some(&meta.mikan_fansub_id),
|
||||||
);
|
);
|
||||||
|
|
||||||
let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src {
|
let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src.clone() {
|
||||||
let poster_meta = scrape_mikan_poster_meta_from_image_url(
|
let poster_meta =
|
||||||
mikan_client,
|
scrape_mikan_poster_meta_from_image_url(ctx, origin_poster_src).await?;
|
||||||
storage_service,
|
|
||||||
origin_poster_src,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
poster_meta.poster_src
|
poster_meta.poster_src
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
@@ -162,21 +155,15 @@ impl ActiveModel {
|
|||||||
mikan_fansub_id: ActiveValue::Set(Some(meta.mikan_fansub_id)),
|
mikan_fansub_id: ActiveValue::Set(Some(meta.mikan_fansub_id)),
|
||||||
subscriber_id: ActiveValue::Set(subscriber_id),
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
display_name: ActiveValue::Set(meta.bangumi_title.clone()),
|
display_name: ActiveValue::Set(meta.bangumi_title.clone()),
|
||||||
raw_name: ActiveValue::Set(meta.bangumi_title),
|
origin_name: ActiveValue::Set(meta.bangumi_title),
|
||||||
season: ActiveValue::Set(rawname_meta.season),
|
season: ActiveValue::Set(season_index),
|
||||||
season_raw: ActiveValue::Set(rawname_meta.season_raw),
|
season_raw: ActiveValue::Set(season_raw),
|
||||||
fansub: ActiveValue::Set(Some(meta.fansub)),
|
fansub: ActiveValue::Set(Some(meta.fansub)),
|
||||||
poster_link: ActiveValue::Set(poster_link),
|
poster_link: ActiveValue::Set(poster_link),
|
||||||
|
origin_poster_link: ActiveValue::Set(meta.origin_poster_src.map(|src| src.to_string())),
|
||||||
homepage: ActiveValue::Set(Some(meta.homepage.to_string())),
|
homepage: ActiveValue::Set(Some(meta.homepage.to_string())),
|
||||||
rss_link: ActiveValue::Set(Some(rss_url.to_string())),
|
rss_link: ActiveValue::Set(Some(rss_url.to_string())),
|
||||||
extra: ActiveValue::Set(Some(BangumiExtra {
|
bangumi_type: ActiveValue::Set(BangumiType::Mikan),
|
||||||
name_zh: rawname_meta.name_zh,
|
|
||||||
name_en: rawname_meta.name_en,
|
|
||||||
name_jp: rawname_meta.name_jp,
|
|
||||||
s_name_en: rawname_meta.name_en_no_season,
|
|
||||||
s_name_jp: rawname_meta.name_jp_no_season,
|
|
||||||
s_name_zh: rawname_meta.name_zh_no_season,
|
|
||||||
})),
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -218,15 +205,16 @@ impl Model {
|
|||||||
Expr::col((
|
Expr::col((
|
||||||
subscription_bangumi_alias.clone(),
|
subscription_bangumi_alias.clone(),
|
||||||
subscription_bangumi::Column::SubscriptionId,
|
subscription_bangumi::Column::SubscriptionId,
|
||||||
)),
|
))
|
||||||
|
.is_not_null(),
|
||||||
"is_subscribed",
|
"is_subscribed",
|
||||||
)
|
)
|
||||||
.join_as_rev(
|
.join_as_rev(
|
||||||
JoinType::LeftJoin,
|
JoinType::LeftJoin,
|
||||||
subscription_bangumi::Relation::Bangumi
|
subscription_bangumi::Relation::Bangumi
|
||||||
.def()
|
.def()
|
||||||
.on_condition(move |_left, right| {
|
.on_condition(move |left, _right| {
|
||||||
Expr::col((right, subscription_bangumi::Column::SubscriptionId))
|
Expr::col((left, subscription_bangumi::Column::SubscriptionId))
|
||||||
.eq(subscription_id)
|
.eq(subscription_id)
|
||||||
.into_condition()
|
.into_condition()
|
||||||
}),
|
}),
|
||||||
@@ -249,9 +237,10 @@ impl Model {
|
|||||||
Column::SubscriberId,
|
Column::SubscriberId,
|
||||||
])
|
])
|
||||||
.update_columns([
|
.update_columns([
|
||||||
Column::RawName,
|
Column::OriginName,
|
||||||
Column::Fansub,
|
Column::Fansub,
|
||||||
Column::PosterLink,
|
Column::PosterLink,
|
||||||
|
Column::OriginPosterLink,
|
||||||
Column::Season,
|
Column::Season,
|
||||||
Column::SeasonRaw,
|
Column::SeasonRaw,
|
||||||
Column::RssLink,
|
Column::RssLink,
|
||||||
@@ -269,8 +258,15 @@ impl Model {
|
|||||||
subscriber_id: ActiveValue::Set(subscriber_id),
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
.on_conflict_do_nothing()
|
.on_conflict(
|
||||||
.exec(db)
|
OnConflict::columns([
|
||||||
|
subscription_bangumi::Column::SubscriptionId,
|
||||||
|
subscription_bangumi::Column::BangumiId,
|
||||||
|
])
|
||||||
|
.do_nothing()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.exec_without_returning(db)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
Ok(new_bangumi_model)
|
Ok(new_bangumi_model)
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use sea_orm::{ActiveValue, prelude::*};
|
use sea_orm::{ActiveValue, prelude::*};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@@ -18,6 +16,7 @@ use crate::{
|
|||||||
db_type = "Enum",
|
db_type = "Enum",
|
||||||
enum_name = "credential_3rd_type"
|
enum_name = "credential_3rd_type"
|
||||||
)]
|
)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
pub enum Credential3rdType {
|
pub enum Credential3rdType {
|
||||||
#[sea_orm(string_value = "mikan")]
|
#[sea_orm(string_value = "mikan")]
|
||||||
Mikan,
|
Mikan,
|
||||||
@@ -66,11 +65,19 @@ impl Related<super::subscriptions::Entity> for Entity {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||||
|
pub enum RelatedEntity {
|
||||||
|
#[sea_orm(entity = "super::subscribers::Entity")]
|
||||||
|
Subscriber,
|
||||||
|
#[sea_orm(entity = "super::subscriptions::Entity")]
|
||||||
|
Subscription,
|
||||||
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
|
|
||||||
impl ActiveModel {
|
impl ActiveModel {
|
||||||
pub async fn try_encrypt(mut self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Self> {
|
pub async fn try_encrypt(mut self, ctx: &dyn AppContextTrait) -> RecorderResult<Self> {
|
||||||
let crypto = ctx.crypto();
|
let crypto = ctx.crypto();
|
||||||
|
|
||||||
if let ActiveValue::Set(Some(username)) = self.username {
|
if let ActiveValue::Set(Some(username)) = self.username {
|
||||||
@@ -93,19 +100,24 @@ impl ActiveModel {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Model {
|
impl Model {
|
||||||
pub async fn find_by_id(
|
pub async fn find_by_id_and_subscriber_id(
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
ctx: &dyn AppContextTrait,
|
||||||
id: i32,
|
id: i32,
|
||||||
|
subscriber_id: i32,
|
||||||
) -> RecorderResult<Option<Self>> {
|
) -> RecorderResult<Option<Self>> {
|
||||||
let db = ctx.db();
|
let db = ctx.db();
|
||||||
let credential = Entity::find_by_id(id).one(db).await?;
|
let credential = Entity::find()
|
||||||
|
.filter(Column::Id.eq(id))
|
||||||
|
.filter(Column::SubscriberId.eq(subscriber_id))
|
||||||
|
.one(db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
Ok(credential)
|
Ok(credential)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn try_into_userpass_credential(
|
pub fn try_into_userpass_credential(
|
||||||
self,
|
self,
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
ctx: &dyn AppContextTrait,
|
||||||
) -> RecorderResult<UserPassCredential> {
|
) -> RecorderResult<UserPassCredential> {
|
||||||
let crypto = ctx.crypto();
|
let crypto = ctx.crypto();
|
||||||
let username_enc = self
|
let username_enc = self
|
||||||
@@ -140,4 +152,31 @@ impl Model {
|
|||||||
user_agent: self.user_agent,
|
user_agent: self.user_agent,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn check_available(self, ctx: &dyn AppContextTrait) -> RecorderResult<bool> {
|
||||||
|
let credential_id = self.id;
|
||||||
|
let subscriber_id = self.subscriber_id;
|
||||||
|
match self.credential_type {
|
||||||
|
Credential3rdType::Mikan => {
|
||||||
|
let mikan_client = {
|
||||||
|
let userpass_credential: UserPassCredential =
|
||||||
|
self.try_into_userpass_credential(ctx)?;
|
||||||
|
ctx.mikan()
|
||||||
|
.fork_with_userpass_credential(userpass_credential)
|
||||||
|
.await?
|
||||||
|
};
|
||||||
|
let mut has_login = mikan_client.has_login().await?;
|
||||||
|
if !has_login {
|
||||||
|
mikan_client.login().await?;
|
||||||
|
has_login = true;
|
||||||
|
}
|
||||||
|
if has_login {
|
||||||
|
mikan_client
|
||||||
|
.sync_credential_cookies(ctx, credential_id, subscriber_id)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(has_login)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ pub struct Model {
|
|||||||
pub updated_at: DateTimeUtc,
|
pub updated_at: DateTimeUtc,
|
||||||
#[sea_orm(primary_key)]
|
#[sea_orm(primary_key)]
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
pub raw_name: String,
|
pub origin_name: String,
|
||||||
pub display_name: String,
|
pub display_name: String,
|
||||||
pub downloader_id: i32,
|
pub downloader_id: i32,
|
||||||
pub episode_id: i32,
|
pub episode_id: i32,
|
||||||
@@ -52,8 +52,8 @@ pub struct Model {
|
|||||||
pub status: DownloadStatus,
|
pub status: DownloadStatus,
|
||||||
pub mime: DownloadMime,
|
pub mime: DownloadMime,
|
||||||
pub url: String,
|
pub url: String,
|
||||||
pub all_size: Option<u64>,
|
pub all_size: Option<i64>,
|
||||||
pub curr_size: Option<u64>,
|
pub curr_size: Option<i64>,
|
||||||
pub homepage: Option<String>,
|
pub homepage: Option<String>,
|
||||||
pub save_path: Option<String>,
|
pub save_path: Option<String>,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use sea_orm::{
|
use sea_orm::{
|
||||||
ActiveValue, FromJsonQueryResult, IntoSimpleExpr, QuerySelect, entity::prelude::*,
|
ActiveValue, IntoSimpleExpr, QuerySelect, entity::prelude::*, sea_query::OnConflict,
|
||||||
sea_query::OnConflict,
|
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
@@ -10,19 +9,17 @@ use crate::{
|
|||||||
app::AppContextTrait,
|
app::AppContextTrait,
|
||||||
errors::RecorderResult,
|
errors::RecorderResult,
|
||||||
extract::{
|
extract::{
|
||||||
|
bittorrent::EpisodeEnclosureMeta,
|
||||||
mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url},
|
mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url},
|
||||||
rawname::parse_episode_meta_from_raw_name,
|
origin::{OriginCompTrait, OriginNameRoot},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, Default)]
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
|
||||||
pub struct EpisodeExtra {
|
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "episode_type")]
|
||||||
pub name_zh: Option<String>,
|
pub enum EpisodeType {
|
||||||
pub s_name_zh: Option<String>,
|
#[sea_orm(string_value = "mikan")]
|
||||||
pub name_en: Option<String>,
|
Mikan,
|
||||||
pub s_name_en: Option<String>,
|
|
||||||
pub name_jp: Option<String>,
|
|
||||||
pub s_name_jp: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
@@ -36,21 +33,25 @@ pub struct Model {
|
|||||||
pub id: i32,
|
pub id: i32,
|
||||||
#[sea_orm(indexed)]
|
#[sea_orm(indexed)]
|
||||||
pub mikan_episode_id: Option<String>,
|
pub mikan_episode_id: Option<String>,
|
||||||
pub raw_name: String,
|
pub enclosure_torrent_link: Option<String>,
|
||||||
|
pub enclosure_magnet_link: Option<String>,
|
||||||
|
pub enclosure_pub_date: Option<DateTimeUtc>,
|
||||||
|
pub enclosure_content_length: Option<i64>,
|
||||||
|
pub episode_type: EpisodeType,
|
||||||
|
pub origin_name: String,
|
||||||
pub display_name: String,
|
pub display_name: String,
|
||||||
pub bangumi_id: i32,
|
pub bangumi_id: i32,
|
||||||
pub subscriber_id: i32,
|
pub subscriber_id: i32,
|
||||||
pub save_path: Option<String>,
|
|
||||||
pub resolution: Option<String>,
|
pub resolution: Option<String>,
|
||||||
pub season: i32,
|
pub season: i32,
|
||||||
pub season_raw: Option<String>,
|
pub season_raw: Option<String>,
|
||||||
pub fansub: Option<String>,
|
pub fansub: Option<String>,
|
||||||
pub poster_link: Option<String>,
|
pub poster_link: Option<String>,
|
||||||
|
pub origin_poster_link: Option<String>,
|
||||||
pub episode_index: i32,
|
pub episode_index: i32,
|
||||||
pub homepage: Option<String>,
|
pub homepage: Option<String>,
|
||||||
pub subtitle: Option<String>,
|
pub subtitle: Option<String>,
|
||||||
pub source: Option<String>,
|
pub source: Option<String>,
|
||||||
pub extra: EpisodeExtra,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
@@ -133,44 +134,61 @@ impl ActiveModel {
|
|||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
bangumi: &bangumi::Model,
|
bangumi: &bangumi::Model,
|
||||||
episode: MikanEpisodeMeta,
|
episode: MikanEpisodeMeta,
|
||||||
|
enclosure_meta: EpisodeEnclosureMeta,
|
||||||
) -> RecorderResult<Self> {
|
) -> RecorderResult<Self> {
|
||||||
let mikan_base_url = ctx.mikan().base_url().clone();
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
let rawname_meta = parse_episode_meta_from_raw_name(&episode.episode_title)?;
|
let episode_extention_meta = OriginNameRoot::parse_comp(&episode.episode_title)
|
||||||
|
.inspect_err(|err| {
|
||||||
|
tracing::error!(
|
||||||
|
err = ?err,
|
||||||
|
episode_title = ?episode.episode_title,
|
||||||
|
"Failed to parse episode extension meta from episode title, skip"
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.map(|(_, e)| e.into_meta())
|
||||||
|
.ok();
|
||||||
let homepage = build_mikan_episode_homepage_url(mikan_base_url, &episode.mikan_episode_id);
|
let homepage = build_mikan_episode_homepage_url(mikan_base_url, &episode.mikan_episode_id);
|
||||||
|
|
||||||
Ok(Self {
|
let mut episode_active_model = Self {
|
||||||
mikan_episode_id: ActiveValue::Set(Some(episode.mikan_episode_id)),
|
mikan_episode_id: ActiveValue::Set(Some(episode.mikan_episode_id)),
|
||||||
raw_name: ActiveValue::Set(episode.episode_title.clone()),
|
origin_name: ActiveValue::Set(episode.episode_title.clone()),
|
||||||
display_name: ActiveValue::Set(episode.episode_title.clone()),
|
display_name: ActiveValue::Set(episode.episode_title.clone()),
|
||||||
bangumi_id: ActiveValue::Set(bangumi.id),
|
bangumi_id: ActiveValue::Set(bangumi.id),
|
||||||
subscriber_id: ActiveValue::Set(bangumi.subscriber_id),
|
subscriber_id: ActiveValue::Set(bangumi.subscriber_id),
|
||||||
resolution: ActiveValue::Set(rawname_meta.resolution),
|
|
||||||
season: ActiveValue::Set(if rawname_meta.season > 0 {
|
|
||||||
rawname_meta.season
|
|
||||||
} else {
|
|
||||||
bangumi.season
|
|
||||||
}),
|
|
||||||
season_raw: ActiveValue::Set(
|
|
||||||
rawname_meta
|
|
||||||
.season_raw
|
|
||||||
.or_else(|| bangumi.season_raw.clone()),
|
|
||||||
),
|
|
||||||
fansub: ActiveValue::Set(rawname_meta.fansub.or_else(|| bangumi.fansub.clone())),
|
|
||||||
poster_link: ActiveValue::Set(bangumi.poster_link.clone()),
|
|
||||||
episode_index: ActiveValue::Set(rawname_meta.episode_index),
|
|
||||||
homepage: ActiveValue::Set(Some(homepage.to_string())),
|
homepage: ActiveValue::Set(Some(homepage.to_string())),
|
||||||
subtitle: ActiveValue::Set(rawname_meta.subtitle),
|
season_raw: ActiveValue::Set(bangumi.season_raw.clone()),
|
||||||
source: ActiveValue::Set(rawname_meta.source),
|
season: ActiveValue::Set(bangumi.season),
|
||||||
extra: ActiveValue::Set(EpisodeExtra {
|
fansub: ActiveValue::Set(bangumi.fansub.clone()),
|
||||||
name_zh: rawname_meta.name_zh,
|
poster_link: ActiveValue::Set(bangumi.poster_link.clone()),
|
||||||
name_en: rawname_meta.name_en,
|
origin_poster_link: ActiveValue::Set(bangumi.origin_poster_link.clone()),
|
||||||
name_jp: rawname_meta.name_jp,
|
episode_index: ActiveValue::Set(0),
|
||||||
s_name_en: rawname_meta.name_en_no_season,
|
enclosure_torrent_link: ActiveValue::Set(enclosure_meta.torrent_link),
|
||||||
s_name_jp: rawname_meta.name_jp_no_season,
|
enclosure_magnet_link: ActiveValue::Set(enclosure_meta.magnet_link),
|
||||||
s_name_zh: rawname_meta.name_zh_no_season,
|
enclosure_pub_date: ActiveValue::Set(enclosure_meta.pub_date),
|
||||||
}),
|
enclosure_content_length: ActiveValue::Set(enclosure_meta.content_length),
|
||||||
|
episode_type: ActiveValue::Set(EpisodeType::Mikan),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
};
|
||||||
|
|
||||||
|
if let Some(episode_extention_meta) = episode_extention_meta {
|
||||||
|
episode_active_model.episode_index =
|
||||||
|
ActiveValue::Set(episode_extention_meta.episode_index);
|
||||||
|
episode_active_model.subtitle = ActiveValue::Set(episode_extention_meta.subtitle);
|
||||||
|
episode_active_model.source = ActiveValue::Set(episode_extention_meta.source);
|
||||||
|
episode_active_model.resolution = ActiveValue::Set(episode_extention_meta.resolution);
|
||||||
|
if episode_extention_meta.season > 0 {
|
||||||
|
episode_active_model.season = ActiveValue::Set(episode_extention_meta.season);
|
||||||
|
}
|
||||||
|
if episode_extention_meta.season_raw.is_some() {
|
||||||
|
episode_active_model.season_raw =
|
||||||
|
ActiveValue::Set(episode_extention_meta.season_raw);
|
||||||
|
}
|
||||||
|
if episode_extention_meta.fansub.is_some() {
|
||||||
|
episode_active_model.fansub = ActiveValue::Set(episode_extention_meta.fansub);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(episode_active_model)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -216,21 +234,46 @@ impl Model {
|
|||||||
|
|
||||||
pub async fn add_mikan_episodes_for_subscription(
|
pub async fn add_mikan_episodes_for_subscription(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
creations: impl Iterator<Item = (&bangumi::Model, MikanEpisodeMeta)>,
|
creations: impl Iterator<Item = (&bangumi::Model, MikanEpisodeMeta, EpisodeEnclosureMeta)>,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
subscription_id: i32,
|
subscription_id: i32,
|
||||||
) -> RecorderResult<()> {
|
) -> RecorderResult<()> {
|
||||||
let db = ctx.db();
|
let db = ctx.db();
|
||||||
let new_episode_active_modes: Vec<ActiveModel> = creations
|
let new_episode_active_modes: Vec<ActiveModel> = creations
|
||||||
.map(|(bangumi, episode_meta)| {
|
.map(|(bangumi, episode_meta, enclosure_meta)| {
|
||||||
ActiveModel::from_mikan_bangumi_and_episode_meta(ctx, bangumi, episode_meta)
|
ActiveModel::from_mikan_bangumi_and_episode_meta(
|
||||||
|
ctx,
|
||||||
|
bangumi,
|
||||||
|
episode_meta,
|
||||||
|
enclosure_meta,
|
||||||
|
)
|
||||||
})
|
})
|
||||||
.collect::<Result<_, _>>()?;
|
.collect::<Result<_, _>>()?;
|
||||||
|
|
||||||
|
if new_episode_active_modes.is_empty() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
let new_episode_ids = Entity::insert_many(new_episode_active_modes)
|
let new_episode_ids = Entity::insert_many(new_episode_active_modes)
|
||||||
.on_conflict(
|
.on_conflict(
|
||||||
OnConflict::columns([Column::MikanEpisodeId, Column::SubscriberId])
|
OnConflict::columns([Column::MikanEpisodeId, Column::SubscriberId])
|
||||||
.update_columns([Column::RawName, Column::PosterLink, Column::Homepage])
|
.update_columns([
|
||||||
|
Column::OriginName,
|
||||||
|
Column::PosterLink,
|
||||||
|
Column::OriginPosterLink,
|
||||||
|
Column::Homepage,
|
||||||
|
Column::EnclosureContentLength,
|
||||||
|
Column::EnclosurePubDate,
|
||||||
|
Column::EnclosureTorrentLink,
|
||||||
|
Column::EnclosureMagnetLink,
|
||||||
|
Column::EpisodeIndex,
|
||||||
|
Column::Subtitle,
|
||||||
|
Column::Source,
|
||||||
|
Column::Resolution,
|
||||||
|
Column::Season,
|
||||||
|
Column::SeasonRaw,
|
||||||
|
Column::Fansub,
|
||||||
|
])
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
.exec_with_returning_columns(db, [Column::Id])
|
.exec_with_returning_columns(db, [Column::Id])
|
||||||
|
|||||||
133
apps/recorder/src/models/feeds/mod.rs
Normal file
133
apps/recorder/src/models/feeds/mod.rs
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
mod registry;
|
||||||
|
mod rss;
|
||||||
|
mod subscription_episodes_feed;
|
||||||
|
|
||||||
|
use ::rss::Channel;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
pub use registry::Feed;
|
||||||
|
pub use rss::{RssFeedItemTrait, RssFeedTrait};
|
||||||
|
use sea_orm::{ActiveValue, DeriveEntityModel, entity::prelude::*};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
pub use subscription_episodes_feed::SubscriptionEpisodesFeed;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay,
|
||||||
|
)]
|
||||||
|
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "feed_type")]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum FeedType {
|
||||||
|
#[sea_orm(string_value = "rss")]
|
||||||
|
Rss,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay,
|
||||||
|
)]
|
||||||
|
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "feed_source")]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum FeedSource {
|
||||||
|
#[sea_orm(string_value = "subscription_episode")]
|
||||||
|
SubscriptionEpisode,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveEntityModel)]
|
||||||
|
#[sea_orm(table_name = "feeds")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: i32,
|
||||||
|
#[sea_orm(indexed)]
|
||||||
|
pub token: String,
|
||||||
|
#[sea_orm(indexed)]
|
||||||
|
pub feed_type: FeedType,
|
||||||
|
#[sea_orm(indexed)]
|
||||||
|
pub feed_source: FeedSource,
|
||||||
|
pub subscriber_id: Option<i32>,
|
||||||
|
pub subscription_id: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::subscriptions::Entity",
|
||||||
|
from = "Column::SubscriptionId",
|
||||||
|
to = "super::subscriptions::Column::Id",
|
||||||
|
on_update = "Cascade",
|
||||||
|
on_delete = "Cascade"
|
||||||
|
)]
|
||||||
|
Subscription,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::subscribers::Entity",
|
||||||
|
from = "Column::SubscriberId",
|
||||||
|
to = "super::subscribers::Column::Id",
|
||||||
|
on_update = "Cascade",
|
||||||
|
on_delete = "Cascade"
|
||||||
|
)]
|
||||||
|
Subscriber,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::subscriptions::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Subscription.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::subscribers::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Subscriber.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||||
|
pub enum RelatedEntity {
|
||||||
|
#[sea_orm(entity = "super::subscribers::Entity")]
|
||||||
|
Subscriber,
|
||||||
|
#[sea_orm(entity = "super::subscriptions::Entity")]
|
||||||
|
Subscription,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
async fn before_save<C>(mut self, _db: &C, insert: bool) -> Result<Self, DbErr>
|
||||||
|
where
|
||||||
|
C: ConnectionTrait,
|
||||||
|
{
|
||||||
|
if insert && let ActiveValue::NotSet = self.token {
|
||||||
|
let token = nanoid::nanoid!(10);
|
||||||
|
self.token = ActiveValue::Set(token);
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
pub async fn find_rss_feed_by_token(
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
token: &str,
|
||||||
|
api_base: &Url,
|
||||||
|
) -> RecorderResult<Channel> {
|
||||||
|
let db = ctx.db();
|
||||||
|
|
||||||
|
let feed_model = Entity::find()
|
||||||
|
.filter(Column::Token.eq(token))
|
||||||
|
.filter(Column::FeedType.eq(FeedType::Rss))
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or(RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "Feed".into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let feed = Feed::from_model(ctx, feed_model).await?;
|
||||||
|
|
||||||
|
feed.into_rss_channel(ctx, api_base)
|
||||||
|
}
|
||||||
|
}
|
||||||
65
apps/recorder/src/models/feeds/registry.rs
Normal file
65
apps/recorder/src/models/feeds/registry.rs
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
use rss::Channel;
|
||||||
|
use sea_orm::{ColumnTrait, EntityTrait, JoinType, QueryFilter, QuerySelect, RelationTrait};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
models::{
|
||||||
|
episodes,
|
||||||
|
feeds::{self, FeedSource, RssFeedTrait, SubscriptionEpisodesFeed},
|
||||||
|
subscription_episode, subscriptions,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub enum Feed {
|
||||||
|
SubscritpionEpisodes(SubscriptionEpisodesFeed),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Feed {
|
||||||
|
pub async fn from_model(ctx: &dyn AppContextTrait, m: feeds::Model) -> RecorderResult<Self> {
|
||||||
|
match m.feed_source {
|
||||||
|
FeedSource::SubscriptionEpisode => {
|
||||||
|
let db = ctx.db();
|
||||||
|
let (subscription, episodes) = if let Some(subscription_id) = m.subscription_id
|
||||||
|
&& let Some(subscription) = subscriptions::Entity::find()
|
||||||
|
.filter(subscriptions::Column::Id.eq(subscription_id))
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
{
|
||||||
|
let episodes = episodes::Entity::find()
|
||||||
|
.join(
|
||||||
|
JoinType::InnerJoin,
|
||||||
|
episodes::Relation::SubscriptionEpisode.def(),
|
||||||
|
)
|
||||||
|
.join(
|
||||||
|
JoinType::InnerJoin,
|
||||||
|
subscription_episode::Relation::Subscription.def(),
|
||||||
|
)
|
||||||
|
.filter(subscriptions::Column::Id.eq(subscription_id))
|
||||||
|
.all(db)
|
||||||
|
.await?;
|
||||||
|
(subscription, episodes)
|
||||||
|
} else {
|
||||||
|
return Err(RecorderError::ModelEntityNotFound {
|
||||||
|
entity: "Subscription".into(),
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Feed::SubscritpionEpisodes(
|
||||||
|
SubscriptionEpisodesFeed::from_model(m, subscription, episodes),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_rss_channel(
|
||||||
|
self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
api_base: &Url,
|
||||||
|
) -> RecorderResult<Channel> {
|
||||||
|
match self {
|
||||||
|
Self::SubscritpionEpisodes(feed) => feed.into_channel(ctx, api_base),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
142
apps/recorder/src/models/feeds/rss.rs
Normal file
142
apps/recorder/src/models/feeds/rss.rs
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
|
||||||
|
use maplit::btreemap;
|
||||||
|
use rss::{
|
||||||
|
Channel, ChannelBuilder, EnclosureBuilder, GuidBuilder, Item, ItemBuilder,
|
||||||
|
extension::{ExtensionBuilder, ExtensionMap},
|
||||||
|
};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub trait RssFeedItemTrait: Sized {
|
||||||
|
fn get_guid_value(&self) -> Cow<'_, str>;
|
||||||
|
fn get_title(&self) -> Cow<'_, str>;
|
||||||
|
fn get_description(&self) -> Cow<'_, str>;
|
||||||
|
fn get_link(&self, ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>>;
|
||||||
|
fn get_enclosure_mime(&self) -> Option<Cow<'_, str>>;
|
||||||
|
fn get_enclosure_link(&self, ctx: &dyn AppContextTrait, api_base: &Url)
|
||||||
|
-> Option<Cow<'_, str>>;
|
||||||
|
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>>;
|
||||||
|
fn get_enclosure_content_length(&self) -> Option<i64>;
|
||||||
|
fn into_item(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Item> {
|
||||||
|
let enclosure_mime_type =
|
||||||
|
self.get_enclosure_mime()
|
||||||
|
.ok_or_else(|| RecorderError::MikanRssInvalidFieldError {
|
||||||
|
field: "enclosure_mime_type".into(),
|
||||||
|
source: None.into(),
|
||||||
|
})?;
|
||||||
|
let enclosure_link = self.get_enclosure_link(ctx, api_base).ok_or_else(|| {
|
||||||
|
RecorderError::MikanRssInvalidFieldError {
|
||||||
|
field: "enclosure_link".into(),
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
let enclosure_content_length = self.get_enclosure_content_length().ok_or_else(|| {
|
||||||
|
RecorderError::MikanRssInvalidFieldError {
|
||||||
|
field: "enclosure_content_length".into(),
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
let enclosure_pub_date = self.get_enclosure_pub_date();
|
||||||
|
let link = self.get_link(ctx, api_base).ok_or_else(|| {
|
||||||
|
RecorderError::MikanRssInvalidFieldError {
|
||||||
|
field: "link".into(),
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut extensions = ExtensionMap::default();
|
||||||
|
if enclosure_mime_type == BITTORRENT_MIME_TYPE {
|
||||||
|
extensions.insert("torrent".to_string(), {
|
||||||
|
let mut map = btreemap! {
|
||||||
|
"link".to_string() => vec![
|
||||||
|
ExtensionBuilder::default().name(
|
||||||
|
"link"
|
||||||
|
).value(enclosure_link.to_string()).build()
|
||||||
|
],
|
||||||
|
"contentLength".to_string() => vec![
|
||||||
|
ExtensionBuilder::default().name(
|
||||||
|
"contentLength"
|
||||||
|
).value(enclosure_content_length.to_string()).build()
|
||||||
|
],
|
||||||
|
};
|
||||||
|
if let Some(pub_date) = enclosure_pub_date {
|
||||||
|
map.insert(
|
||||||
|
"pubDate".to_string(),
|
||||||
|
vec![
|
||||||
|
ExtensionBuilder::default()
|
||||||
|
.name("pubDate")
|
||||||
|
.value(pub_date.to_rfc3339())
|
||||||
|
.build(),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
map
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
let enclosure = EnclosureBuilder::default()
|
||||||
|
.mime_type(enclosure_mime_type)
|
||||||
|
.url(enclosure_link.to_string())
|
||||||
|
.length(enclosure_content_length.to_string())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
let guid = GuidBuilder::default()
|
||||||
|
.value(self.get_guid_value())
|
||||||
|
.permalink(false)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
let item = ItemBuilder::default()
|
||||||
|
.guid(guid)
|
||||||
|
.title(self.get_title().to_string())
|
||||||
|
.description(self.get_description().to_string())
|
||||||
|
.link(link.to_string())
|
||||||
|
.enclosure(enclosure)
|
||||||
|
.extensions(extensions)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Ok(item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait RssFeedTrait: Sized {
|
||||||
|
type Item: RssFeedItemTrait;
|
||||||
|
|
||||||
|
fn get_description(&self) -> Cow<'_, str>;
|
||||||
|
|
||||||
|
fn get_title(&self) -> Cow<'_, str>;
|
||||||
|
|
||||||
|
fn get_link(&self, ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>>;
|
||||||
|
|
||||||
|
fn items(&self) -> impl Iterator<Item = &Self::Item>;
|
||||||
|
|
||||||
|
fn into_items(self) -> impl Iterator<Item = Self::Item>;
|
||||||
|
|
||||||
|
fn into_channel(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Channel> {
|
||||||
|
let link = self.get_link(ctx, api_base).ok_or_else(|| {
|
||||||
|
RecorderError::MikanRssInvalidFieldError {
|
||||||
|
field: "link".into(),
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let channel = ChannelBuilder::default()
|
||||||
|
.title(self.get_title())
|
||||||
|
.link(link.to_string())
|
||||||
|
.description(self.get_description())
|
||||||
|
.items({
|
||||||
|
self.into_items()
|
||||||
|
.map(|item| item.into_item(ctx, api_base))
|
||||||
|
.collect::<RecorderResult<Vec<_>>>()?
|
||||||
|
})
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Ok(channel)
|
||||||
|
}
|
||||||
|
}
|
||||||
114
apps/recorder/src/models/feeds/subscription_episodes_feed.rs
Normal file
114
apps/recorder/src/models/feeds/subscription_episodes_feed.rs
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::{AppContextTrait, PROJECT_NAME},
|
||||||
|
models::{
|
||||||
|
episodes,
|
||||||
|
feeds::{
|
||||||
|
self,
|
||||||
|
rss::{RssFeedItemTrait, RssFeedTrait},
|
||||||
|
},
|
||||||
|
subscriptions,
|
||||||
|
},
|
||||||
|
web::controller,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct SubscriptionEpisodesFeed {
|
||||||
|
pub feed: feeds::Model,
|
||||||
|
pub subscription: subscriptions::Model,
|
||||||
|
pub episodes: Vec<episodes::Model>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SubscriptionEpisodesFeed {
|
||||||
|
pub fn from_model(
|
||||||
|
feed: feeds::Model,
|
||||||
|
subscription: subscriptions::Model,
|
||||||
|
episodes: Vec<episodes::Model>,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
feed,
|
||||||
|
subscription,
|
||||||
|
episodes,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RssFeedItemTrait for episodes::Model {
|
||||||
|
fn get_guid_value(&self) -> Cow<'_, str> {
|
||||||
|
Cow::Owned(format!("{PROJECT_NAME}:episode:{}", self.id))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_title(&self) -> Cow<'_, str> {
|
||||||
|
Cow::Borrowed(&self.display_name)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_description(&self) -> Cow<'_, str> {
|
||||||
|
Cow::Borrowed(&self.display_name)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_link(&self, _ctx: &dyn AppContextTrait, _api_base: &Url) -> Option<Cow<'_, str>> {
|
||||||
|
self.homepage.as_deref().map(Cow::Borrowed)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_enclosure_mime(&self) -> Option<Cow<'_, str>> {
|
||||||
|
if self.enclosure_torrent_link.is_some() {
|
||||||
|
Some(Cow::Borrowed(BITTORRENT_MIME_TYPE))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_enclosure_link(
|
||||||
|
&self,
|
||||||
|
_ctx: &dyn AppContextTrait,
|
||||||
|
_api_base: &Url,
|
||||||
|
) -> Option<Cow<'_, str>> {
|
||||||
|
self.enclosure_torrent_link.as_deref().map(Cow::Borrowed)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>> {
|
||||||
|
self.enclosure_pub_date
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_enclosure_content_length(&self) -> Option<i64> {
|
||||||
|
self.enclosure_content_length
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RssFeedTrait for SubscriptionEpisodesFeed {
|
||||||
|
type Item = episodes::Model;
|
||||||
|
|
||||||
|
fn get_description(&self) -> Cow<'_, str> {
|
||||||
|
Cow::Owned(format!(
|
||||||
|
"{PROJECT_NAME} - episodes of subscription {}",
|
||||||
|
self.subscription.id
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_title(&self) -> Cow<'_, str> {
|
||||||
|
Cow::Owned(format!("{PROJECT_NAME} - subscription episodes"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_link(&self, _ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>> {
|
||||||
|
let api_base = api_base
|
||||||
|
.join(&format!(
|
||||||
|
"{}/{}",
|
||||||
|
controller::feeds::CONTROLLER_PREFIX,
|
||||||
|
self.feed.token
|
||||||
|
))
|
||||||
|
.ok()?;
|
||||||
|
Some(Cow::Owned(api_base.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn items(&self) -> impl Iterator<Item = &Self::Item> {
|
||||||
|
self.episodes.iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn into_items(self) -> impl Iterator<Item = Self::Item> {
|
||||||
|
self.episodes.into_iter()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,6 +4,7 @@ pub mod credential_3rd;
|
|||||||
pub mod downloaders;
|
pub mod downloaders;
|
||||||
pub mod downloads;
|
pub mod downloads;
|
||||||
pub mod episodes;
|
pub mod episodes;
|
||||||
|
pub mod feeds;
|
||||||
pub mod query;
|
pub mod query;
|
||||||
pub mod subscriber_tasks;
|
pub mod subscriber_tasks;
|
||||||
pub mod subscribers;
|
pub mod subscribers;
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user