Compare commits
34 Commits
a7f52fe0eb
...
1b5bdadf10
| Author | SHA1 | Date | |
|---|---|---|---|
| 1b5bdadf10 | |||
| 882b29d7a1 | |||
| c60f6f511e | |||
| 07955286f1 | |||
| 258eeddc74 | |||
| b09e9e6aaa | |||
| 0df371adb7 | |||
| 8144986a48 | |||
| d2aab7369d | |||
| 946d4e8c2c | |||
| 0b5f25a263 | |||
| c669d66969 | |||
| 082e08e7f4 | |||
| a3fd03d32a | |||
| 5645645c5f | |||
| ac7d1efb8d | |||
| a676061b3e | |||
| 1c34cebbde | |||
| 22a2ce0559 | |||
| 313b1bf1ba | |||
| 66413f92e3 | |||
| 0fcbc6bbe9 | |||
| f1d8318500 | |||
| b2f327d48f | |||
| b772937354 | |||
| a3b9543d0e | |||
| d0a423df9f | |||
| 8600bf216a | |||
| bf270e4e87 | |||
| 760cb2344e | |||
| ed2c1038e6 | |||
| d4bdc677a9 | |||
| 9d58d961bd | |||
| 791b75b3af |
92
.vscode/settings.json
vendored
92
.vscode/settings.json
vendored
@ -1,45 +1,51 @@
|
|||||||
{
|
{
|
||||||
"npm.packageManager": "pnpm",
|
"npm.packageManager": "pnpm",
|
||||||
"rust-analyzer.showUnlinkedFileNotification": false,
|
"[javascript]": {
|
||||||
"[javascript]": {
|
"editor.defaultFormatter": "vscode.typescript-language-features",
|
||||||
"editor.defaultFormatter": "vscode.typescript-language-features",
|
"editor.formatOnSave": true
|
||||||
"editor.formatOnSave": true
|
},
|
||||||
},
|
"[json]": {
|
||||||
"[json]": {
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
"editor.formatOnSave": true
|
||||||
"editor.formatOnSave": true
|
},
|
||||||
},
|
"[jsonc]": {
|
||||||
"[jsonc]": {
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
"editor.formatOnSave": true
|
||||||
"editor.formatOnSave": true
|
},
|
||||||
},
|
"[typescript]": {
|
||||||
"[typescript]": {
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
"editor.formatOnSave": true
|
||||||
"editor.formatOnSave": true
|
},
|
||||||
},
|
"[typescriptreact]": {
|
||||||
"[typescriptreact]": {
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
"editor.formatOnSave": true
|
||||||
"editor.formatOnSave": true
|
},
|
||||||
},
|
"editor.codeActionsOnSave": {
|
||||||
"editor.codeActionsOnSave": {
|
"quickfix.biome": "explicit",
|
||||||
"quickfix.biome": "explicit",
|
"source.organizeImports.biome": "explicit"
|
||||||
"source.organizeImports.biome": "explicit"
|
},
|
||||||
},
|
"emmet.showExpandedAbbreviation": "never",
|
||||||
"emmet.showExpandedAbbreviation": "never",
|
"prettier.enable": false,
|
||||||
"prettier.enable": false,
|
"typescript.tsdk": "node_modules/typescript/lib",
|
||||||
"typescript.tsdk": "node_modules/typescript/lib",
|
"rust-analyzer.showUnlinkedFileNotification": false,
|
||||||
"rust-analyzer.cargo.features": [
|
"sqltools.connections": [
|
||||||
"testcontainers"
|
{
|
||||||
],
|
"previewLimit": 50,
|
||||||
"sqltools.connections": [
|
"server": "localhost",
|
||||||
{
|
"port": 5432,
|
||||||
"previewLimit": 50,
|
"driver": "PostgreSQL",
|
||||||
"server": "localhost",
|
"name": "konobangu-dev",
|
||||||
"port": 5432,
|
"database": "konobangu",
|
||||||
"driver": "PostgreSQL",
|
"username": "konobangu"
|
||||||
"name": "konobangu-dev",
|
}
|
||||||
"database": "konobangu",
|
],
|
||||||
"username": "konobangu"
|
"rust-analyzer.cargo.features": "all",
|
||||||
}
|
// https://github.com/rust-lang/rust/issues/141540
|
||||||
]
|
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
|
||||||
|
// "rust-analyzer.check.extraEnv": {
|
||||||
|
// "CARGO_TARGET_DIR": "target/rust-analyzer"
|
||||||
|
// },
|
||||||
|
// "rust-analyzer.cargo.extraEnv": {
|
||||||
|
// "CARGO_TARGET_DIR": "target/analyzer"
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
112
.vscode/tasks.json
vendored
Normal file
112
.vscode/tasks.json
vendored
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "dev-all",
|
||||||
|
"dependsOn": [
|
||||||
|
"dev-webui",
|
||||||
|
"dev-recorder",
|
||||||
|
"dev-proxy",
|
||||||
|
"dev-codegen-wait",
|
||||||
|
"dev-deps",
|
||||||
|
],
|
||||||
|
"dependsOrder": "parallel",
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": false,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"group": "new-group",
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "shared",
|
||||||
|
"clear": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-webui",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-webui"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": true,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-deps",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-deps"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-codegen-wait",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-codegen-wait"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-recorder",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-recorder"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-proxy",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-proxy",
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
1591
Cargo.lock
generated
1591
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
57
Cargo.toml
57
Cargo.toml
@ -1,23 +1,54 @@
|
|||||||
|
# cargo-features = ["codegen-backend"]
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
"packages/testing-torrents",
|
"packages/testing-torrents",
|
||||||
"packages/util",
|
"packages/util",
|
||||||
|
"packages/util-derive",
|
||||||
"packages/fetch",
|
"packages/fetch",
|
||||||
"packages/downloader",
|
"packages/downloader",
|
||||||
"apps/recorder",
|
"apps/recorder",
|
||||||
|
"apps/proxy",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
|
[profile.dev]
|
||||||
|
debug = 0
|
||||||
|
# https://github.com/rust-lang/rust/issues/141540
|
||||||
|
incremental = false
|
||||||
|
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
|
||||||
|
# codegen-backend = "cranelift"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
|
testing-torrents = { path = "./packages/testing-torrents" }
|
||||||
|
util = { path = "./packages/util" }
|
||||||
|
util-derive = { path = "./packages/util-derive" }
|
||||||
|
fetch = { path = "./packages/fetch" }
|
||||||
|
downloader = { path = "./packages/downloader" }
|
||||||
|
recorder = { path = "./apps/recorder" }
|
||||||
|
proxy = { path = "./apps/proxy" }
|
||||||
|
|
||||||
|
reqwest = { version = "0.12.20", features = [
|
||||||
|
"charset",
|
||||||
|
"http2",
|
||||||
|
"json",
|
||||||
|
"macos-system-configuration",
|
||||||
|
"cookies",
|
||||||
|
] }
|
||||||
moka = "0.12"
|
moka = "0.12"
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
quirks_path = "0.1"
|
quirks_path = "0.1"
|
||||||
snafu = { version = "0.8", features = ["futures"] }
|
snafu = { version = "0.8", features = ["futures"] }
|
||||||
testcontainers = { version = "0.23.3" }
|
testcontainers = { version = "0.24" }
|
||||||
testcontainers-modules = { version = "0.11.4" }
|
testcontainers-modules = { version = "0.12.1" }
|
||||||
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
|
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
tokio = { version = "1", features = ["macros", "fs", "rt-multi-thread"] }
|
tokio = { version = "1.45.1", features = [
|
||||||
|
"macros",
|
||||||
|
"fs",
|
||||||
|
"rt-multi-thread",
|
||||||
|
"signal",
|
||||||
|
] }
|
||||||
serde_json = "1"
|
serde_json = "1"
|
||||||
async-trait = "0.1"
|
async-trait = "0.1"
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
@ -30,22 +61,12 @@ serde_with = "3"
|
|||||||
regex = "1.11"
|
regex = "1.11"
|
||||||
lazy_static = "1.5"
|
lazy_static = "1.5"
|
||||||
axum = { version = "0.8.3", features = ["macros"] }
|
axum = { version = "0.8.3", features = ["macros"] }
|
||||||
reqwest = { version = "0.12", default-features = false, features = [
|
|
||||||
"charset",
|
|
||||||
"http2",
|
|
||||||
"json",
|
|
||||||
"macos-system-configuration",
|
|
||||||
"rustls-tls",
|
|
||||||
"cookies",
|
|
||||||
] }
|
|
||||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
|
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
|
||||||
axum-extra = "0.10"
|
axum-extra = "0.10"
|
||||||
|
mockito = { version = "1.6.1" }
|
||||||
testing-torrents = { path = "./packages/testing-torrents" }
|
convert_case = "0.8"
|
||||||
util = { path = "./packages/util" }
|
color-eyre = "0.6.5"
|
||||||
fetch = { path = "./packages/fetch" }
|
inquire = "0.7.5"
|
||||||
downloader = { path = "./packages/downloader" }
|
|
||||||
|
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
|
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" }
|
||||||
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "10ba248" }
|
|
||||||
|
|||||||
@ -6,13 +6,14 @@
|
|||||||
"build": "email build",
|
"build": "email build",
|
||||||
"dev": "email dev --port 5003",
|
"dev": "email dev --port 5003",
|
||||||
"export": "email export",
|
"export": "email export",
|
||||||
"clean": "git clean -xdf .cache .turbo dist node_modules",
|
"clean": "git clean -xdf .cache dist node_modules",
|
||||||
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
|
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@react-email/components": "0.0.31",
|
"@react-email/components": "^0.0.42",
|
||||||
"react": "^19.0.0",
|
"react": "^19.0.0",
|
||||||
"react-email": "3.0.4"
|
"react-email": "^4.0.16",
|
||||||
|
"@konobangu/email": "workspace:*"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/react": "19.0.1"
|
"@types/react": "19.0.1"
|
||||||
|
|||||||
@ -2,8 +2,12 @@
|
|||||||
"extends": "../../tsconfig.base.json",
|
"extends": "../../tsconfig.base.json",
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"composite": true,
|
"composite": true,
|
||||||
"jsx": "react-jsx"
|
"jsx": "react-jsx",
|
||||||
|
"jsxImportSource": "react",
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleResolution": "bundler"
|
||||||
},
|
},
|
||||||
|
"references": [{ "path": "../../packages/email" }],
|
||||||
"include": ["**/*.ts", "**/*.tsx"],
|
"include": ["**/*.ts", "**/*.tsx"],
|
||||||
"exclude": ["node_modules"]
|
"exclude": ["node_modules"]
|
||||||
}
|
}
|
||||||
|
|||||||
1
apps/proxy/.whistle/rules/files/1.mikan_doppel
Normal file
1
apps/proxy/.whistle/rules/files/1.mikan_doppel
Normal file
@ -0,0 +1 @@
|
|||||||
|
^https://mikanani.me/*** http://127.0.0.1:5005/$1
|
||||||
@ -1 +1 @@
|
|||||||
{"filesOrder":["konobangu"],"selectedList":["konobangu"],"disabledDefalutRules":true,"defalutRules":""}
|
{"filesOrder":["konobangu","mikan_doppel"],"selectedList":["konobangu","mikan_doppel"],"disabledDefalutRules":true,"defalutRules":""}
|
||||||
|
|||||||
19
apps/proxy/Cargo.toml
Normal file
19
apps/proxy/Cargo.toml
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
[package]
|
||||||
|
name = "proxy"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
[lib]
|
||||||
|
name = "proxy"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "mikan_doppel"
|
||||||
|
path = "src/bin/mikan_doppel.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
recorder = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
tracing-subscriber = { workspace = true }
|
||||||
|
tracing = { workspace = true }
|
||||||
@ -3,13 +3,13 @@
|
|||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
|
"whistle": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
|
||||||
"dev": "pnpm run start"
|
"mikan_doppel": "cargo run -p proxy --bin mikan_doppel",
|
||||||
|
"dev": "npm-run-all -p mikan_doppel whistle"
|
||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"cross-env": "^7.0.3",
|
"whistle": "^2.9.99"
|
||||||
"whistle": "^2.9.93"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use recorder::{errors::RecorderResult, test_utils::mikan::MikanMockServer};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
#[allow(unused_variables)]
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(Level::DEBUG)
|
||||||
|
.init();
|
||||||
|
|
||||||
|
let mut mikan_server = MikanMockServer::new_with_port(5005).await.unwrap();
|
||||||
|
|
||||||
|
let resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -11,3 +11,7 @@ BASIC_PASSWORD = "konobangu"
|
|||||||
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
||||||
# OIDC_EXTRA_CLAIM_KEY = ""
|
# OIDC_EXTRA_CLAIM_KEY = ""
|
||||||
# OIDC_EXTRA_CLAIM_VALUE = ""
|
# OIDC_EXTRA_CLAIM_VALUE = ""
|
||||||
|
# MIKAN_PROXY = ""
|
||||||
|
# MIKAN_PROXY_AUTH_HEADER = ""
|
||||||
|
# MIKAN_NO_PROXY = ""
|
||||||
|
# MIKAN_PROXY_ACCEPT_INVALID_CERTS = "true"
|
||||||
|
|||||||
17
apps/recorder/.env.dev
Normal file
17
apps/recorder/.env.dev
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
HOST="konobangu.com"
|
||||||
|
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
|
||||||
|
STORAGE_DATA_DIR = "./data"
|
||||||
|
AUTH_TYPE = "basic" # or oidc
|
||||||
|
BASIC_USER = "konobangu"
|
||||||
|
BASIC_PASSWORD = "konobangu"
|
||||||
|
# OIDC_ISSUER="https://auth.logto.io/oidc"
|
||||||
|
# OIDC_AUDIENCE = "https://konobangu.com/api"
|
||||||
|
# OIDC_CLIENT_ID = "client_id"
|
||||||
|
# OIDC_CLIENT_SECRET = "client_secret" # optional
|
||||||
|
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
||||||
|
# OIDC_EXTRA_CLAIM_KEY = ""
|
||||||
|
# OIDC_EXTRA_CLAIM_VALUE = ""
|
||||||
|
MIKAN_PROXY = "http://127.0.0.1:8899"
|
||||||
|
# MIKAN_PROXY_AUTH_HEADER = ""
|
||||||
|
# MIKAN_NO_PROXY = ""
|
||||||
|
MIKAN_PROXY_ACCEPT_INVALID_CERTS = true
|
||||||
@ -15,6 +15,7 @@ required-features = []
|
|||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
|
playground = ["dep:inquire", "dep:color-eyre"]
|
||||||
testcontainers = [
|
testcontainers = [
|
||||||
"dep:testcontainers",
|
"dep:testcontainers",
|
||||||
"dep:testcontainers-modules",
|
"dep:testcontainers-modules",
|
||||||
@ -24,6 +25,11 @@ testcontainers = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
downloader = { workspace = true }
|
||||||
|
util = { workspace = true }
|
||||||
|
util-derive = { workspace = true }
|
||||||
|
fetch = { workspace = true }
|
||||||
|
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
@ -48,18 +54,18 @@ serde_with = { workspace = true }
|
|||||||
moka = { workspace = true }
|
moka = { workspace = true }
|
||||||
chrono = { workspace = true }
|
chrono = { workspace = true }
|
||||||
tracing-subscriber = { workspace = true }
|
tracing-subscriber = { workspace = true }
|
||||||
|
mockito = { workspace = true }
|
||||||
|
|
||||||
sea-orm = { version = "1.1", features = [
|
sea-orm = { version = "1.1", features = [
|
||||||
"sqlx-sqlite",
|
"sqlx-sqlite",
|
||||||
"sqlx-postgres",
|
"sqlx-postgres",
|
||||||
"runtime-tokio-rustls",
|
"runtime-tokio",
|
||||||
"macros",
|
"macros",
|
||||||
"debug-print",
|
"debug-print",
|
||||||
] }
|
] }
|
||||||
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
||||||
uuid = { version = "1.6.0", features = ["v4"] }
|
uuid = { version = "1.6.0", features = ["v4"] }
|
||||||
sea-orm-migration = { version = "1.1", features = ["runtime-tokio-rustls"] }
|
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
|
||||||
rss = "2"
|
rss = "2"
|
||||||
fancy-regex = "0.14"
|
fancy-regex = "0.14"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
@ -70,9 +76,8 @@ zune-image = "0.4.15"
|
|||||||
once_cell = "1.20.2"
|
once_cell = "1.20.2"
|
||||||
scraper = "0.23"
|
scraper = "0.23"
|
||||||
|
|
||||||
jwt-authorizer = "0.15.0"
|
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
async-graphql = { version = "7", features = [] }
|
async-graphql = { version = "7", features = ["dynamic-schema"] }
|
||||||
async-graphql-axum = "7"
|
async-graphql-axum = "7"
|
||||||
seaography = { version = "1.1", features = [
|
seaography = { version = "1.1", features = [
|
||||||
"with-json",
|
"with-json",
|
||||||
@ -82,6 +87,7 @@ seaography = { version = "1.1", features = [
|
|||||||
"with-decimal",
|
"with-decimal",
|
||||||
"with-bigdecimal",
|
"with-bigdecimal",
|
||||||
"with-postgres-array",
|
"with-postgres-array",
|
||||||
|
"with-json-as-scalar",
|
||||||
] }
|
] }
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
tower = "0.5.2"
|
tower = "0.5.2"
|
||||||
@ -96,8 +102,8 @@ tower-http = { version = "0.6", features = [
|
|||||||
"compression-full",
|
"compression-full",
|
||||||
] }
|
] }
|
||||||
tera = "1.20.0"
|
tera = "1.20.0"
|
||||||
openidconnect = { version = "4", features = ["rustls-tls"] }
|
openidconnect = { version = "4" }
|
||||||
dotenv = "0.15.0"
|
dotenvy = "0.15.7"
|
||||||
http = "1.2.0"
|
http = "1.2.0"
|
||||||
async-stream = "0.3.6"
|
async-stream = "0.3.6"
|
||||||
serde_variant = "0.1.3"
|
serde_variant = "0.1.3"
|
||||||
@ -105,20 +111,28 @@ tracing-appender = "0.2.3"
|
|||||||
clap = "4.5.31"
|
clap = "4.5.31"
|
||||||
ipnetwork = "0.21.1"
|
ipnetwork = "0.21.1"
|
||||||
typed-builder = "0.21.0"
|
typed-builder = "0.21.0"
|
||||||
serde_yaml = "0.9.34"
|
apalis = { version = "0.7", features = [
|
||||||
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
|
"limit",
|
||||||
|
"tracing",
|
||||||
|
"catch-panic",
|
||||||
|
"retry",
|
||||||
|
] }
|
||||||
apalis-sql = { version = "0.7", features = ["postgres"] }
|
apalis-sql = { version = "0.7", features = ["postgres"] }
|
||||||
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
|
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
|
||||||
rand = "0.9.1"
|
rand = "0.9.1"
|
||||||
|
rust_decimal = "1.37.1"
|
||||||
reqwest_cookie_store = "0.8.0"
|
reqwest_cookie_store = "0.8.0"
|
||||||
|
nanoid = "0.4.0"
|
||||||
|
jwtk = "0.4.0"
|
||||||
|
color-eyre = { workspace = true, optional = true }
|
||||||
|
inquire = { workspace = true, optional = true }
|
||||||
|
percent-encoding = "2.3.1"
|
||||||
|
|
||||||
downloader = { workspace = true }
|
|
||||||
util = { workspace = true }
|
|
||||||
fetch = { workspace = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
serial_test = "3"
|
serial_test = "3"
|
||||||
insta = { version = "1", features = ["redactions", "yaml", "filters"] }
|
insta = { version = "1", features = ["redactions", "toml", "filters"] }
|
||||||
mockito = "1.6.1"
|
|
||||||
rstest = "0.25"
|
rstest = "0.25"
|
||||||
ctor = "0.4.0"
|
ctor = "0.4.0"
|
||||||
|
inquire = { workspace = true }
|
||||||
|
color-eyre = { workspace = true }
|
||||||
|
|||||||
249
apps/recorder/examples/mikan_doppel_season_subscription.rs
Normal file
249
apps/recorder/examples/mikan_doppel_season_subscription.rs
Normal file
@ -0,0 +1,249 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use color_eyre::{Result, eyre::OptionExt};
|
||||||
|
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||||
|
use inquire::{Password, Text, validator::Validation};
|
||||||
|
use recorder::{
|
||||||
|
crypto::UserPassCredential,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanClient, MikanConfig, MikanRssEpisodeItem, build_mikan_bangumi_expand_subscribed_url,
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||||
|
},
|
||||||
|
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||||
|
};
|
||||||
|
use scraper::Html;
|
||||||
|
use tokio::fs;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(tracing::Level::INFO)
|
||||||
|
.init();
|
||||||
|
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(0),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(Duration::from_millis(1000)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let username_validator = |input: &str| {
|
||||||
|
if input.trim().is_empty() {
|
||||||
|
Ok(Validation::Invalid("Username cannot be empty".into()))
|
||||||
|
} else {
|
||||||
|
Ok(Validation::Valid)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let password_validator = |input: &str| {
|
||||||
|
if input.trim().is_empty() {
|
||||||
|
Ok(Validation::Invalid("Password cannot be empty".into()))
|
||||||
|
} else {
|
||||||
|
Ok(Validation::Valid)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let username = Text::new("Please enter your mikan username:")
|
||||||
|
.with_validator(username_validator)
|
||||||
|
.prompt()?;
|
||||||
|
let password = Password::new("Please enter your mikan password:")
|
||||||
|
.without_confirmation()
|
||||||
|
.with_display_mode(inquire::PasswordDisplayMode::Masked)
|
||||||
|
.with_validator(password_validator)
|
||||||
|
.prompt()?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = mikan_scrape_client
|
||||||
|
.fork_with_userpass_credential(UserPassCredential {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
user_agent: None,
|
||||||
|
cookies: None,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
tracing::info!("Checking if logged in...");
|
||||||
|
if !mikan_scrape_client.has_login().await? {
|
||||||
|
tracing::info!("Logging in to mikan...");
|
||||||
|
mikan_scrape_client.login().await?;
|
||||||
|
tracing::info!("Logged in to mikan");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_scrape_client.base_url().clone();
|
||||||
|
tracing::info!("Scraping season subscription...");
|
||||||
|
let season_subscription =
|
||||||
|
fs::read("tests/resources/mikan/BangumiCoverFlow-2025-spring.html").await?;
|
||||||
|
let html = Html::parse_fragment(String::from_utf8(season_subscription)?.as_str());
|
||||||
|
let bangumi_index_list =
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(&html, &mikan_base_url);
|
||||||
|
|
||||||
|
for bangumi_index in bangumi_index_list {
|
||||||
|
let bangumi_meta = {
|
||||||
|
let bangumi_expand_subscribed_url = build_mikan_bangumi_expand_subscribed_url(
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
bangumi_index.mikan_bangumi_id.as_ref(),
|
||||||
|
);
|
||||||
|
let bangumi_expand_subscribed_doppel_path =
|
||||||
|
MikanDoppelPath::new(bangumi_expand_subscribed_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Scraping bangumi expand subscribed..."
|
||||||
|
);
|
||||||
|
let bangumi_expand_subscribed_data =
|
||||||
|
if !bangumi_expand_subscribed_doppel_path.exists_any() {
|
||||||
|
let bangumi_expand_subscribed_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_expand_subscribed_url).await?;
|
||||||
|
bangumi_expand_subscribed_doppel_path.write(&bangumi_expand_subscribed_data)?;
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Bangumi expand subscribed saved"
|
||||||
|
);
|
||||||
|
bangumi_expand_subscribed_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Bangumi expand subscribed already exists"
|
||||||
|
);
|
||||||
|
String::from_utf8(bangumi_expand_subscribed_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let html = Html::parse_fragment(&bangumi_expand_subscribed_data);
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment(
|
||||||
|
&html,
|
||||||
|
bangumi_index.clone(),
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
)
|
||||||
|
.ok_or_eyre(format!(
|
||||||
|
"Failed to extract bangumi meta from expand subscribed fragment: {:?}",
|
||||||
|
bangumi_index.bangumi_title
|
||||||
|
))
|
||||||
|
}?;
|
||||||
|
{
|
||||||
|
if let Some(poster_url) = bangumi_meta.origin_poster_src.as_ref() {
|
||||||
|
let poster_doppel_path = MikanDoppelPath::new(poster_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi poster..."
|
||||||
|
);
|
||||||
|
if !poster_doppel_path.exists_any() {
|
||||||
|
let poster_data = fetch_image(&mikan_scrape_client, poster_url.clone()).await?;
|
||||||
|
poster_doppel_path.write(&poster_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi poster saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi poster already exists"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let bangumi_homepage_url = bangumi_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi homepage..."
|
||||||
|
);
|
||||||
|
if !bangumi_homepage_doppel_path.exists_any() {
|
||||||
|
let bangumi_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
|
||||||
|
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi homepage already exists"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let rss_items = {
|
||||||
|
let bangumi_rss_url = bangumi_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_rss_url(mikan_base_url.clone());
|
||||||
|
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi rss..."
|
||||||
|
);
|
||||||
|
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
|
||||||
|
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
|
||||||
|
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi rss saved");
|
||||||
|
bangumi_rss_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi rss already exists"
|
||||||
|
);
|
||||||
|
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items;
|
||||||
|
rss_items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssEpisodeItem::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
}?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
{
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode...");
|
||||||
|
if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode already exists");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.url;
|
||||||
|
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source
|
||||||
|
.status()
|
||||||
|
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("Scraping season subscription done");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
215
apps/recorder/examples/mikan_doppel_subscriber_subscription.rs
Normal file
215
apps/recorder/examples/mikan_doppel_subscriber_subscription.rs
Normal file
@ -0,0 +1,215 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||||
|
use recorder::{
|
||||||
|
errors::RecorderResult,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanClient, MikanConfig, MikanRssEpisodeItem,
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||||
|
},
|
||||||
|
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||||
|
};
|
||||||
|
use scraper::Html;
|
||||||
|
use tokio::fs;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(tracing::Level::INFO)
|
||||||
|
.init();
|
||||||
|
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(0),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(Duration::from_millis(500)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_scrape_client.base_url().clone();
|
||||||
|
tracing::info!("Scraping subscriber subscription...");
|
||||||
|
let subscriber_subscription =
|
||||||
|
fs::read("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
|
||||||
|
let channel = rss::Channel::read_from(&subscriber_subscription[..])?;
|
||||||
|
let rss_items: Vec<MikanRssEpisodeItem> = channel
|
||||||
|
.items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssEpisodeItem::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
let episode_homepage_meta = {
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path = MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
let episode_homepage_data = if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage saved");
|
||||||
|
episode_homepage_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage already exists");
|
||||||
|
String::from_utf8(episode_homepage_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
let html = Html::parse_document(&episode_homepage_data);
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html(
|
||||||
|
&html,
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
episode_homepage_url,
|
||||||
|
)
|
||||||
|
}?;
|
||||||
|
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.url;
|
||||||
|
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source
|
||||||
|
.status()
|
||||||
|
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
if let Some(episode_poster_url) = episode_homepage_meta.origin_poster_src.as_ref() {
|
||||||
|
let episode_poster_doppel_path = MikanDoppelPath::new(episode_poster_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode poster...");
|
||||||
|
if !episode_poster_doppel_path.exists_any() {
|
||||||
|
let episode_poster_data =
|
||||||
|
fetch_image(&mikan_scrape_client, episode_poster_url.clone()).await?;
|
||||||
|
episode_poster_doppel_path.write(&episode_poster_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode poster saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode poster already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let bangumi_homepage_url = episode_homepage_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping bangumi homepage...");
|
||||||
|
if !bangumi_homepage_doppel_path.exists_any() {
|
||||||
|
let bangumi_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
|
||||||
|
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi homepage already exists");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let bangumi_rss_url = episode_homepage_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_rss_url(mikan_base_url.clone());
|
||||||
|
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping bangumi rss...");
|
||||||
|
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
|
||||||
|
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
|
||||||
|
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi rss saved");
|
||||||
|
bangumi_rss_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi rss already exists");
|
||||||
|
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(bangumi_rss_data.as_bytes())?;
|
||||||
|
let rss_items: Vec<MikanRssEpisodeItem> = channel
|
||||||
|
.items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssEpisodeItem::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
{
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage already exists");
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.url;
|
||||||
|
let episode_torrent_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source.status().is_some_and(|status| {
|
||||||
|
status == reqwest::StatusCode::NOT_FOUND
|
||||||
|
})
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new \
|
||||||
|
version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("Scraping subscriber subscription done");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@ -1,56 +1,33 @@
|
|||||||
use recorder::errors::RecorderResult;
|
#![feature(duration_constructors_lite)]
|
||||||
// #![allow(unused_imports)]
|
use std::{sync::Arc, time::Duration};
|
||||||
// use recorder::{
|
|
||||||
// app::{AppContext, AppContextTrait},
|
|
||||||
// errors::RecorderResult,
|
|
||||||
// migrations::Migrator,
|
|
||||||
// models::{
|
|
||||||
// subscribers::SEED_SUBSCRIBER,
|
|
||||||
// subscriptions::{self, SubscriptionCreateFromRssDto},
|
|
||||||
// },
|
|
||||||
// };
|
|
||||||
// use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
|
|
||||||
// use sea_orm_migration::MigratorTrait;
|
|
||||||
|
|
||||||
// async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RecorderResult<()> {
|
use apalis_sql::postgres::PostgresStorage;
|
||||||
// let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
|
use recorder::{
|
||||||
|
app::AppContextTrait,
|
||||||
// // let rss_link =
|
errors::RecorderResult,
|
||||||
// // "https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
|
test_utils::{
|
||||||
// let subscription = if let Some(subscription) =
|
app::TestingAppContext,
|
||||||
// subscriptions::Entity::find()
|
database::{TestingDatabaseServiceConfig, build_testing_database_service},
|
||||||
// .filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
|
},
|
||||||
// .one(ctx.db())
|
};
|
||||||
// .await?
|
|
||||||
// {
|
|
||||||
// subscription
|
|
||||||
// } else {
|
|
||||||
// subscriptions::Model::add_subscription(
|
|
||||||
// ctx,
|
|
||||||
//
|
|
||||||
// subscriptions::SubscriptionCreateDto::Mikan(SubscriptionCreateFromRssDto {
|
|
||||||
// rss_link: rss_link.to_string(),
|
|
||||||
// display_name: String::from("Mikan Project - 我的番组"),
|
|
||||||
// enabled: Some(true),
|
|
||||||
// }),
|
|
||||||
// 1,
|
|
||||||
// )
|
|
||||||
// .await?
|
|
||||||
// };
|
|
||||||
|
|
||||||
// subscription.pull_subscription(ctx).await?;
|
|
||||||
|
|
||||||
// Ok(())
|
|
||||||
// }
|
|
||||||
|
|
||||||
// #[tokio::main]
|
|
||||||
// async fn main() -> RecorderResult<()> {
|
|
||||||
// pull_mikan_bangumi_rss(&ctx).await?;
|
|
||||||
|
|
||||||
// Ok(())
|
|
||||||
// }
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> RecorderResult<()> {
|
async fn main() -> RecorderResult<()> {
|
||||||
|
let app_ctx = {
|
||||||
|
let db_service = build_testing_database_service(TestingDatabaseServiceConfig {
|
||||||
|
auto_migrate: false,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
Arc::new(TestingAppContext::builder().db(db_service).build())
|
||||||
|
};
|
||||||
|
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
PostgresStorage::setup(db.get_postgres_connection_pool()).await?;
|
||||||
|
|
||||||
|
dbg!(db.get_postgres_connection_pool().connect_options());
|
||||||
|
|
||||||
|
tokio::time::sleep(Duration::from_hours(1)).await;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@ -86,6 +86,14 @@ leaky_bucket_initial_tokens = 1
|
|||||||
leaky_bucket_refill_tokens = 1
|
leaky_bucket_refill_tokens = 1
|
||||||
leaky_bucket_refill_interval = 500
|
leaky_bucket_refill_interval = 500
|
||||||
|
|
||||||
|
|
||||||
|
[mikan.http_client.proxy]
|
||||||
|
server = '{{ get_env(name="MIKAN_PROXY", default = "") }}'
|
||||||
|
auth_header = '{{ get_env(name="MIKAN_PROXY_AUTH_HEADER", default = "") }}'
|
||||||
|
no_proxy = '{{ get_env(name="MIKAN_NO_PROXY", default = "") }}'
|
||||||
|
accept_invalid_certs = '{{ get_env(name="MIKAN_PROXY_ACCEPT_INVALID_CERTS", default = "false") }}'
|
||||||
|
|
||||||
|
|
||||||
[auth]
|
[auth]
|
||||||
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
|
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
|
||||||
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
|
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
|
||||||
|
|||||||
@ -67,19 +67,9 @@ impl AppBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn build(self) -> RecorderResult<App> {
|
pub async fn build(self) -> RecorderResult<App> {
|
||||||
AppConfig::load_dotenv(
|
self.load_env().await?;
|
||||||
&self.environment,
|
|
||||||
&self.working_dir,
|
|
||||||
self.dotenv_file.as_deref(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let config = AppConfig::load_config(
|
let config = self.load_config().await?;
|
||||||
&self.environment,
|
|
||||||
&self.working_dir,
|
|
||||||
self.config_file.as_deref(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let app_context =
|
let app_context =
|
||||||
AppContext::new(self.environment.clone(), config, self.working_dir.clone()).await?;
|
AppContext::new(self.environment.clone(), config, self.working_dir.clone()).await?;
|
||||||
@ -90,6 +80,26 @@ impl AppBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn load_env(&self) -> RecorderResult<()> {
|
||||||
|
AppConfig::load_dotenv(
|
||||||
|
&self.environment,
|
||||||
|
&self.working_dir,
|
||||||
|
self.dotenv_file.as_deref(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_config(&self) -> RecorderResult<AppConfig> {
|
||||||
|
let config = AppConfig::load_config(
|
||||||
|
&self.environment,
|
||||||
|
&self.working_dir,
|
||||||
|
self.config_file.as_deref(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn working_dir(self, working_dir: String) -> Self {
|
pub fn working_dir(self, working_dir: String) -> Self {
|
||||||
let mut ret = self;
|
let mut ret = self;
|
||||||
ret.working_dir = working_dir;
|
ret.working_dir = working_dir;
|
||||||
|
|||||||
@ -11,6 +11,10 @@ leaky_bucket_initial_tokens = 0
|
|||||||
leaky_bucket_refill_tokens = 1
|
leaky_bucket_refill_tokens = 1
|
||||||
leaky_bucket_refill_interval = 500
|
leaky_bucket_refill_interval = 500
|
||||||
|
|
||||||
|
[mikan.http_client.proxy]
|
||||||
|
|
||||||
|
[mikan.http_client.proxy.headers]
|
||||||
|
|
||||||
[graphql]
|
[graphql]
|
||||||
depth_limit = inf
|
depth_limit = inf
|
||||||
complexity_limit = inf
|
complexity_limit = inf
|
||||||
@ -20,3 +24,5 @@ complexity_limit = inf
|
|||||||
[crypto]
|
[crypto]
|
||||||
|
|
||||||
[task]
|
[task]
|
||||||
|
|
||||||
|
[message]
|
||||||
|
|||||||
@ -11,7 +11,8 @@ use super::env::Environment;
|
|||||||
use crate::{
|
use crate::{
|
||||||
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
|
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
|
||||||
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
|
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
|
||||||
logger::LoggerConfig, storage::StorageConfig, tasks::TaskConfig, web::WebServerConfig,
|
logger::LoggerConfig, message::MessageConfig, storage::StorageConfig, task::TaskConfig,
|
||||||
|
web::WebServerConfig,
|
||||||
};
|
};
|
||||||
|
|
||||||
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
|
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
|
||||||
@ -28,7 +29,8 @@ pub struct AppConfig {
|
|||||||
pub graphql: GraphQLConfig,
|
pub graphql: GraphQLConfig,
|
||||||
pub logger: LoggerConfig,
|
pub logger: LoggerConfig,
|
||||||
pub database: DatabaseConfig,
|
pub database: DatabaseConfig,
|
||||||
pub tasks: TaskConfig,
|
pub task: TaskConfig,
|
||||||
|
pub message: MessageConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppConfig {
|
impl AppConfig {
|
||||||
@ -108,12 +110,12 @@ impl AppConfig {
|
|||||||
for f in try_filenames.iter() {
|
for f in try_filenames.iter() {
|
||||||
let p = try_dotenv_file_or_dir_path.join(f);
|
let p = try_dotenv_file_or_dir_path.join(f);
|
||||||
if p.exists() && p.is_file() {
|
if p.exists() && p.is_file() {
|
||||||
dotenv::from_path(p)?;
|
dotenvy::from_path(p)?;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if try_dotenv_file_or_dir_path.is_file() {
|
} else if try_dotenv_file_or_dir_path.is_file() {
|
||||||
dotenv::from_path(try_dotenv_file_or_dir_path)?;
|
dotenvy::from_path(try_dotenv_file_or_dir_path)?;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -12,8 +12,9 @@ use crate::{
|
|||||||
extract::mikan::MikanClient,
|
extract::mikan::MikanClient,
|
||||||
graphql::GraphQLService,
|
graphql::GraphQLService,
|
||||||
logger::LoggerService,
|
logger::LoggerService,
|
||||||
|
message::MessageService,
|
||||||
storage::{StorageService, StorageServiceTrait},
|
storage::{StorageService, StorageServiceTrait},
|
||||||
tasks::TaskService,
|
task::TaskService,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait AppContextTrait: Send + Sync + Debug {
|
pub trait AppContextTrait: Send + Sync + Debug {
|
||||||
@ -29,6 +30,7 @@ pub trait AppContextTrait: Send + Sync + Debug {
|
|||||||
fn environment(&self) -> &Environment;
|
fn environment(&self) -> &Environment;
|
||||||
fn crypto(&self) -> &CryptoService;
|
fn crypto(&self) -> &CryptoService;
|
||||||
fn task(&self) -> &TaskService;
|
fn task(&self) -> &TaskService;
|
||||||
|
fn message(&self) -> &MessageService;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AppContext {
|
pub struct AppContext {
|
||||||
@ -38,12 +40,13 @@ pub struct AppContext {
|
|||||||
cache: CacheService,
|
cache: CacheService,
|
||||||
mikan: MikanClient,
|
mikan: MikanClient,
|
||||||
auth: AuthService,
|
auth: AuthService,
|
||||||
graphql: GraphQLService,
|
|
||||||
storage: StorageService,
|
storage: StorageService,
|
||||||
crypto: CryptoService,
|
crypto: CryptoService,
|
||||||
working_dir: String,
|
working_dir: String,
|
||||||
environment: Environment,
|
environment: Environment,
|
||||||
|
message: MessageService,
|
||||||
task: OnceCell<TaskService>,
|
task: OnceCell<TaskService>,
|
||||||
|
graphql: OnceCell<GraphQLService>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppContext {
|
impl AppContext {
|
||||||
@ -58,10 +61,10 @@ impl AppContext {
|
|||||||
let cache = CacheService::from_config(config.cache).await?;
|
let cache = CacheService::from_config(config.cache).await?;
|
||||||
let db = DatabaseService::from_config(config.database).await?;
|
let db = DatabaseService::from_config(config.database).await?;
|
||||||
let storage = StorageService::from_config(config.storage).await?;
|
let storage = StorageService::from_config(config.storage).await?;
|
||||||
|
let message = MessageService::from_config(config.message).await?;
|
||||||
let auth = AuthService::from_conf(config.auth).await?;
|
let auth = AuthService::from_conf(config.auth).await?;
|
||||||
let mikan = MikanClient::from_config(config.mikan).await?;
|
let mikan = MikanClient::from_config(config.mikan).await?;
|
||||||
let crypto = CryptoService::from_config(config.crypto).await?;
|
let crypto = CryptoService::from_config(config.crypto).await?;
|
||||||
let graphql = GraphQLService::from_config_and_database(config.graphql, db.clone()).await?;
|
|
||||||
|
|
||||||
let ctx = Arc::new(AppContext {
|
let ctx = Arc::new(AppContext {
|
||||||
config: config_cloned,
|
config: config_cloned,
|
||||||
@ -73,14 +76,21 @@ impl AppContext {
|
|||||||
storage,
|
storage,
|
||||||
mikan,
|
mikan,
|
||||||
working_dir: working_dir.to_string(),
|
working_dir: working_dir.to_string(),
|
||||||
graphql,
|
|
||||||
crypto,
|
crypto,
|
||||||
|
message,
|
||||||
task: OnceCell::new(),
|
task: OnceCell::new(),
|
||||||
|
graphql: OnceCell::new(),
|
||||||
});
|
});
|
||||||
|
|
||||||
ctx.task
|
ctx.task
|
||||||
.get_or_try_init(async || {
|
.get_or_try_init(async || {
|
||||||
TaskService::from_config_and_ctx(config.tasks, ctx.clone()).await
|
TaskService::from_config_and_ctx(config.task, ctx.clone()).await
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
ctx.graphql
|
||||||
|
.get_or_try_init(async || {
|
||||||
|
GraphQLService::from_config_and_ctx(config.graphql, ctx.clone()).await
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@ -114,7 +124,7 @@ impl AppContextTrait for AppContext {
|
|||||||
&self.auth
|
&self.auth
|
||||||
}
|
}
|
||||||
fn graphql(&self) -> &GraphQLService {
|
fn graphql(&self) -> &GraphQLService {
|
||||||
&self.graphql
|
self.graphql.get().expect("graphql should be set")
|
||||||
}
|
}
|
||||||
fn storage(&self) -> &dyn StorageServiceTrait {
|
fn storage(&self) -> &dyn StorageServiceTrait {
|
||||||
&self.storage
|
&self.storage
|
||||||
@ -131,4 +141,7 @@ impl AppContextTrait for AppContext {
|
|||||||
fn task(&self) -> &TaskService {
|
fn task(&self) -> &TaskService {
|
||||||
self.task.get().expect("task should be set")
|
self.task.get().expect("task should be set")
|
||||||
}
|
}
|
||||||
|
fn message(&self) -> &MessageService {
|
||||||
|
&self.message
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,11 +1,12 @@
|
|||||||
use std::{net::SocketAddr, sync::Arc};
|
use std::{net::SocketAddr, sync::Arc};
|
||||||
|
|
||||||
use axum::Router;
|
use axum::Router;
|
||||||
use tokio::signal;
|
use tokio::{net::TcpSocket, signal};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
use super::{builder::AppBuilder, context::AppContextTrait};
|
use super::{builder::AppBuilder, context::AppContextTrait};
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::RecorderResult,
|
errors::{RecorderError, RecorderResult},
|
||||||
web::{
|
web::{
|
||||||
controller::{self, core::ControllerTrait},
|
controller::{self, core::ControllerTrait},
|
||||||
middleware::default_middleware_stack,
|
middleware::default_middleware_stack,
|
||||||
@ -22,14 +23,31 @@ impl App {
|
|||||||
AppBuilder::default()
|
AppBuilder::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(err, skip(self))]
|
||||||
pub async fn serve(&self) -> RecorderResult<()> {
|
pub async fn serve(&self) -> RecorderResult<()> {
|
||||||
let context = &self.context;
|
let context = &self.context;
|
||||||
let config = context.config();
|
let config = context.config();
|
||||||
let listener = tokio::net::TcpListener::bind(&format!(
|
|
||||||
"{}:{}",
|
let listener = {
|
||||||
config.server.binding, config.server.port
|
let addr: SocketAddr =
|
||||||
))
|
format!("{}:{}", config.server.binding, config.server.port).parse()?;
|
||||||
.await?;
|
|
||||||
|
let socket = if addr.is_ipv4() {
|
||||||
|
TcpSocket::new_v4()
|
||||||
|
} else {
|
||||||
|
TcpSocket::new_v6()
|
||||||
|
}?;
|
||||||
|
|
||||||
|
socket.set_reuseaddr(true)?;
|
||||||
|
|
||||||
|
#[cfg(all(unix, not(target_os = "solaris")))]
|
||||||
|
if let Err(e) = socket.set_reuseport(true) {
|
||||||
|
tracing::warn!("Failed to set SO_REUSEPORT: {}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
socket.bind(addr)?;
|
||||||
|
socket.listen(1024)
|
||||||
|
}?;
|
||||||
|
|
||||||
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
||||||
|
|
||||||
@ -53,12 +71,38 @@ impl App {
|
|||||||
.with_state(context.clone())
|
.with_state(context.clone())
|
||||||
.into_make_service_with_connect_info::<SocketAddr>();
|
.into_make_service_with_connect_info::<SocketAddr>();
|
||||||
|
|
||||||
axum::serve(listener, router)
|
let task = context.task();
|
||||||
.with_graceful_shutdown(async move {
|
|
||||||
Self::shutdown_signal().await;
|
tokio::try_join!(
|
||||||
tracing::info!("shutting down...");
|
async {
|
||||||
})
|
axum::serve(listener, router)
|
||||||
.await?;
|
.with_graceful_shutdown(async move {
|
||||||
|
Self::shutdown_signal().await;
|
||||||
|
tracing::info!("axum shutting down...");
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
},
|
||||||
|
async {
|
||||||
|
let monitor = task.setup_monitor().await?;
|
||||||
|
|
||||||
|
monitor
|
||||||
|
.run_with_signal(async move {
|
||||||
|
Self::shutdown_signal().await;
|
||||||
|
tracing::info!("apalis shutting down...");
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
},
|
||||||
|
async {
|
||||||
|
let listener = task.setup_listener().await?;
|
||||||
|
listener.listen().await?;
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
}
|
||||||
|
)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,8 @@
|
|||||||
use jwt_authorizer::OneOrArray;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use jwtk::OneOrMany;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_with::{NoneAsEmptyString, serde_as};
|
use serde_with::serde_as;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
pub struct BasicAuthConfig {
|
pub struct BasicAuthConfig {
|
||||||
@ -22,13 +24,9 @@ pub struct OidcAuthConfig {
|
|||||||
#[serde(rename = "oidc_client_secret")]
|
#[serde(rename = "oidc_client_secret")]
|
||||||
pub client_secret: String,
|
pub client_secret: String,
|
||||||
#[serde(rename = "oidc_extra_scopes")]
|
#[serde(rename = "oidc_extra_scopes")]
|
||||||
pub extra_scopes: Option<OneOrArray<String>>,
|
pub extra_scopes: Option<OneOrMany<String>>,
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
#[serde(rename = "oidc_extra_claims")]
|
||||||
#[serde(rename = "oidc_extra_claim_key")]
|
pub extra_claims: Option<HashMap<String, Option<String>>>,
|
||||||
pub extra_claim_key: Option<String>,
|
|
||||||
#[serde(rename = "oidc_extra_claim_value")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub extra_claim_value: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
|||||||
@ -11,6 +11,7 @@ use openidconnect::{
|
|||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use snafu::prelude::*;
|
use snafu::prelude::*;
|
||||||
|
use util::OptDynErr;
|
||||||
|
|
||||||
use crate::models::auth::AuthType;
|
use crate::models::auth::AuthType;
|
||||||
|
|
||||||
@ -26,10 +27,6 @@ pub enum AuthError {
|
|||||||
FindAuthRecordError,
|
FindAuthRecordError,
|
||||||
#[snafu(display("Invalid credentials"))]
|
#[snafu(display("Invalid credentials"))]
|
||||||
BasicInvalidCredentials,
|
BasicInvalidCredentials,
|
||||||
#[snafu(transparent)]
|
|
||||||
OidcInitError {
|
|
||||||
source: jwt_authorizer::error::InitError,
|
|
||||||
},
|
|
||||||
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
|
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
|
||||||
OidcProviderHttpClientError { source: HttpClientError },
|
OidcProviderHttpClientError { source: HttpClientError },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
@ -65,8 +62,10 @@ pub enum AuthError {
|
|||||||
OidcSignatureVerificationError { source: SignatureVerificationError },
|
OidcSignatureVerificationError { source: SignatureVerificationError },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
OidcSigningError { source: SigningError },
|
OidcSigningError { source: SigningError },
|
||||||
|
#[snafu(display("Missing Bearer token"))]
|
||||||
|
OidcMissingBearerToken,
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
OidcJwtAuthError { source: jwt_authorizer::AuthError },
|
OidcJwtkError { source: jwtk::Error },
|
||||||
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
|
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
|
||||||
OidcExtraScopesMatchError { expected: String, found: String },
|
OidcExtraScopesMatchError { expected: String, found: String },
|
||||||
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
|
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
|
||||||
@ -87,23 +86,29 @@ pub enum AuthError {
|
|||||||
(if column.is_empty() { "" } else { "." }),
|
(if column.is_empty() { "" } else { "." }),
|
||||||
source.message
|
source.message
|
||||||
))]
|
))]
|
||||||
GraphQLPermissionError {
|
GraphqlDynamicPermissionError {
|
||||||
#[snafu(source(false))]
|
#[snafu(source(false))]
|
||||||
source: Box<async_graphql::Error>,
|
source: Box<async_graphql::Error>,
|
||||||
field: String,
|
field: String,
|
||||||
column: String,
|
column: String,
|
||||||
context_path: String,
|
context_path: String,
|
||||||
},
|
},
|
||||||
|
#[snafu(display("GraphQL permission denied since {field}"))]
|
||||||
|
GraphqlStaticPermissionError {
|
||||||
|
#[snafu(source)]
|
||||||
|
source: OptDynErr,
|
||||||
|
field: String,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AuthError {
|
impl AuthError {
|
||||||
pub fn from_graphql_subscribe_id_guard(
|
pub fn from_graphql_dynamic_subscribe_id_guard(
|
||||||
source: async_graphql::Error,
|
source: async_graphql::Error,
|
||||||
context: &ResolverContext,
|
context: &ResolverContext,
|
||||||
field_name: &str,
|
field_name: &str,
|
||||||
column_name: &str,
|
column_name: &str,
|
||||||
) -> AuthError {
|
) -> AuthError {
|
||||||
AuthError::GraphQLPermissionError {
|
AuthError::GraphqlDynamicPermissionError {
|
||||||
source: Box::new(source),
|
source: Box::new(source),
|
||||||
field: field_name.to_string(),
|
field: field_name.to_string(),
|
||||||
column: column_name.to_string(),
|
column: column_name.to_string(),
|
||||||
|
|||||||
@ -12,8 +12,9 @@ use axum::{
|
|||||||
http::{HeaderValue, request::Parts},
|
http::{HeaderValue, request::Parts},
|
||||||
};
|
};
|
||||||
use fetch::{HttpClient, client::HttpClientError};
|
use fetch::{HttpClient, client::HttpClientError};
|
||||||
|
use http::header::AUTHORIZATION;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer};
|
use jwtk::jwk::RemoteJwksVerifier;
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
use openidconnect::{
|
use openidconnect::{
|
||||||
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
|
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
|
||||||
@ -77,21 +78,6 @@ impl<'c> openidconnect::AsyncHttpClient<'c> for OidcHttpClient {
|
|||||||
|
|
||||||
#[derive(Deserialize, Serialize, Clone, Debug)]
|
#[derive(Deserialize, Serialize, Clone, Debug)]
|
||||||
pub struct OidcAuthClaims {
|
pub struct OidcAuthClaims {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub iss: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub sub: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub aud: Option<OneOrArray<String>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub exp: Option<NumericDate>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub nbf: Option<NumericDate>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub iat: Option<NumericDate>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub jti: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub scope: Option<String>,
|
pub scope: Option<String>,
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub custom: HashMap<String, Value>,
|
pub custom: HashMap<String, Value>,
|
||||||
@ -101,40 +87,6 @@ impl OidcAuthClaims {
|
|||||||
pub fn scopes(&self) -> std::str::Split<'_, char> {
|
pub fn scopes(&self) -> std::str::Split<'_, char> {
|
||||||
self.scope.as_deref().unwrap_or_default().split(',')
|
self.scope.as_deref().unwrap_or_default().split(',')
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_claim(&self, key: &str) -> Option<String> {
|
|
||||||
match key {
|
|
||||||
"iss" => self.iss.clone(),
|
|
||||||
"sub" => self.sub.clone(),
|
|
||||||
"aud" => self.aud.as_ref().map(|s| s.iter().join(",")),
|
|
||||||
"exp" => self.exp.clone().map(|s| s.0.to_string()),
|
|
||||||
"nbf" => self.nbf.clone().map(|s| s.0.to_string()),
|
|
||||||
"iat" => self.iat.clone().map(|s| s.0.to_string()),
|
|
||||||
"jti" => self.jti.clone(),
|
|
||||||
"scope" => self.scope.clone(),
|
|
||||||
key => self.custom.get(key).map(|s| s.to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn has_claim(&self, key: &str) -> bool {
|
|
||||||
match key {
|
|
||||||
"iss" => self.iss.is_some(),
|
|
||||||
"sub" => self.sub.is_some(),
|
|
||||||
"aud" => self.aud.is_some(),
|
|
||||||
"exp" => self.exp.is_some(),
|
|
||||||
"nbf" => self.nbf.is_some(),
|
|
||||||
"iat" => self.iat.is_some(),
|
|
||||||
"jti" => self.jti.is_some(),
|
|
||||||
"scope" => self.scope.is_some(),
|
|
||||||
key => self.custom.contains_key(key),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn contains_audience(&self, aud: &str) -> bool {
|
|
||||||
self.aud
|
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|arr| arr.iter().any(|s| s == aud))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize)]
|
#[derive(Debug, Clone, Serialize)]
|
||||||
@ -164,7 +116,7 @@ pub struct OidcAuthCallbackPayload {
|
|||||||
|
|
||||||
pub struct OidcAuthService {
|
pub struct OidcAuthService {
|
||||||
pub config: OidcAuthConfig,
|
pub config: OidcAuthConfig,
|
||||||
pub api_authorizer: Authorizer<OidcAuthClaims>,
|
pub jwk_verifier: RemoteJwksVerifier,
|
||||||
pub oidc_provider_client: Arc<HttpClient>,
|
pub oidc_provider_client: Arc<HttpClient>,
|
||||||
pub oidc_request_cache: Cache<String, OidcAuthRequest>,
|
pub oidc_request_cache: Cache<String, OidcAuthRequest>,
|
||||||
}
|
}
|
||||||
@ -317,47 +269,68 @@ impl AuthServiceTrait for OidcAuthService {
|
|||||||
request: &mut Parts,
|
request: &mut Parts,
|
||||||
) -> Result<AuthUserInfo, AuthError> {
|
) -> Result<AuthUserInfo, AuthError> {
|
||||||
let config = &self.config;
|
let config = &self.config;
|
||||||
let token = self
|
let token = request
|
||||||
.api_authorizer
|
.headers
|
||||||
.extract_token(&request.headers)
|
.get(AUTHORIZATION)
|
||||||
.ok_or(jwt_authorizer::AuthError::MissingToken())?;
|
.and_then(|authorization| {
|
||||||
|
authorization
|
||||||
|
.to_str()
|
||||||
|
.ok()
|
||||||
|
.and_then(|s| s.strip_prefix("Bearer "))
|
||||||
|
})
|
||||||
|
.ok_or(AuthError::OidcMissingBearerToken)?;
|
||||||
|
|
||||||
let token_data = self.api_authorizer.check_auth(&token).await?;
|
let token_data = self.jwk_verifier.verify::<OidcAuthClaims>(token).await?;
|
||||||
let claims = token_data.claims;
|
let claims = token_data.claims();
|
||||||
let sub = if let Some(sub) = claims.sub.as_deref() {
|
let sub = if let Some(sub) = claims.sub.as_deref() {
|
||||||
sub
|
sub
|
||||||
} else {
|
} else {
|
||||||
return Err(AuthError::OidcSubMissingError);
|
return Err(AuthError::OidcSubMissingError);
|
||||||
};
|
};
|
||||||
if !claims.contains_audience(&config.audience) {
|
if !claims.aud.iter().any(|aud| aud == &config.audience) {
|
||||||
return Err(AuthError::OidcAudMissingError {
|
return Err(AuthError::OidcAudMissingError {
|
||||||
aud: config.audience.clone(),
|
aud: config.audience.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
let extra_claims = &claims.extra;
|
||||||
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
|
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
|
||||||
let found_scopes = claims.scopes().collect::<HashSet<_>>();
|
let found_scopes = extra_claims.scopes().collect::<HashSet<_>>();
|
||||||
if !expected_scopes
|
if !expected_scopes
|
||||||
.iter()
|
.iter()
|
||||||
.all(|es| found_scopes.contains(es as &str))
|
.all(|es| found_scopes.contains(es as &str))
|
||||||
{
|
{
|
||||||
return Err(AuthError::OidcExtraScopesMatchError {
|
return Err(AuthError::OidcExtraScopesMatchError {
|
||||||
expected: expected_scopes.iter().join(","),
|
expected: expected_scopes.iter().join(","),
|
||||||
found: claims.scope.unwrap_or_default(),
|
found: extra_claims
|
||||||
|
.scope
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(key) = config.extra_claim_key.as_ref() {
|
if let Some(expected_extra_claims) = config.extra_claims.as_ref() {
|
||||||
if !claims.has_claim(key) {
|
for (expected_key, expected_value) in expected_extra_claims.iter() {
|
||||||
return Err(AuthError::OidcExtraClaimMissingError { claim: key.clone() });
|
match (extra_claims.custom.get(expected_key), expected_value) {
|
||||||
}
|
(found_value, Some(expected_value)) => {
|
||||||
if let Some(value) = config.extra_claim_value.as_ref()
|
if let Some(Value::String(found_value)) = found_value
|
||||||
&& claims.get_claim(key).is_none_or(|v| &v != value)
|
&& expected_value == found_value
|
||||||
{
|
{
|
||||||
return Err(AuthError::OidcExtraClaimMatchError {
|
} else {
|
||||||
expected: value.clone(),
|
return Err(AuthError::OidcExtraClaimMatchError {
|
||||||
found: claims.get_claim(key).unwrap_or_default().to_string(),
|
expected: expected_value.clone(),
|
||||||
key: key.clone(),
|
found: found_value.map(|v| v.to_string()).unwrap_or_default(),
|
||||||
});
|
key: expected_key.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(None, None) => {
|
||||||
|
return Err(AuthError::OidcExtraClaimMissingError {
|
||||||
|
claim: expected_key.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
||||||
|
|||||||
@ -1,25 +1,22 @@
|
|||||||
use std::{sync::Arc, time::Duration};
|
use std::{sync::Arc, time::Duration};
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use axum::{
|
use axum::http::request::Parts;
|
||||||
extract::FromRequestParts,
|
|
||||||
http::request::Parts,
|
|
||||||
response::{IntoResponse as _, Response},
|
|
||||||
};
|
|
||||||
use fetch::{
|
use fetch::{
|
||||||
HttpClient, HttpClientConfig,
|
HttpClient, HttpClientConfig,
|
||||||
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
|
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
|
||||||
};
|
};
|
||||||
use http::header::HeaderValue;
|
use http::header::HeaderValue;
|
||||||
use jwt_authorizer::{JwtAuthorizer, Validation};
|
use jwtk::jwk::RemoteJwksVerifier;
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
|
use openidconnect::{IssuerUrl, core::CoreProviderMetadata};
|
||||||
use snafu::prelude::*;
|
use snafu::prelude::*;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
AuthConfig,
|
AuthConfig,
|
||||||
basic::BasicAuthService,
|
basic::BasicAuthService,
|
||||||
errors::{AuthError, OidcProviderHttpClientSnafu},
|
errors::{AuthError, OidcProviderHttpClientSnafu, OidcProviderUrlSnafu},
|
||||||
oidc::{OidcAuthClaims, OidcAuthService},
|
oidc::{OidcAuthService, OidcHttpClient},
|
||||||
};
|
};
|
||||||
use crate::{app::AppContextTrait, models::auth::AuthType};
|
use crate::{app::AppContextTrait, models::auth::AuthType};
|
||||||
|
|
||||||
@ -29,22 +26,6 @@ pub struct AuthUserInfo {
|
|||||||
pub auth_type: AuthType,
|
pub auth_type: AuthType,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromRequestParts<Arc<dyn AppContextTrait>> for AuthUserInfo {
|
|
||||||
type Rejection = Response;
|
|
||||||
|
|
||||||
async fn from_request_parts(
|
|
||||||
parts: &mut Parts,
|
|
||||||
state: &Arc<dyn AppContextTrait>,
|
|
||||||
) -> Result<Self, Self::Rejection> {
|
|
||||||
let auth_service = state.auth();
|
|
||||||
|
|
||||||
auth_service
|
|
||||||
.extract_user_info(state.as_ref(), parts)
|
|
||||||
.await
|
|
||||||
.map_err(|err| err.into_response())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait AuthServiceTrait {
|
pub trait AuthServiceTrait {
|
||||||
async fn extract_user_info(
|
async fn extract_user_info(
|
||||||
@ -66,27 +47,33 @@ impl AuthService {
|
|||||||
let result = match config {
|
let result = match config {
|
||||||
AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
|
AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
|
||||||
AuthConfig::Oidc(config) => {
|
AuthConfig::Oidc(config) => {
|
||||||
let validation = Validation::new()
|
let oidc_provider_client = Arc::new(
|
||||||
.iss(&[&config.issuer])
|
HttpClient::from_config(HttpClientConfig {
|
||||||
.aud(&[&config.audience]);
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
cache_backend: Some(HttpClientCacheBackendConfig::Moka { cache_size: 1 }),
|
||||||
|
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.context(OidcProviderHttpClientSnafu)?,
|
||||||
|
);
|
||||||
|
|
||||||
let oidc_provider_client = HttpClient::from_config(HttpClientConfig {
|
let provider_metadata = {
|
||||||
exponential_backoff_max_retries: Some(3),
|
let client = OidcHttpClient(oidc_provider_client.clone());
|
||||||
cache_backend: Some(HttpClientCacheBackendConfig::Moka { cache_size: 1 }),
|
let issuer_url =
|
||||||
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
|
IssuerUrl::new(config.issuer.clone()).context(OidcProviderUrlSnafu)?;
|
||||||
..Default::default()
|
CoreProviderMetadata::discover_async(issuer_url, &client).await
|
||||||
})
|
}?;
|
||||||
.context(OidcProviderHttpClientSnafu)?;
|
|
||||||
|
|
||||||
let api_authorizer = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer)
|
let jwk_verifier = RemoteJwksVerifier::new(
|
||||||
.validation(validation)
|
provider_metadata.jwks_uri().to_string().clone(),
|
||||||
.build()
|
None,
|
||||||
.await?;
|
Duration::from_secs(300),
|
||||||
|
);
|
||||||
|
|
||||||
AuthService::Oidc(Box::new(OidcAuthService {
|
AuthService::Oidc(Box::new(OidcAuthService {
|
||||||
config,
|
config,
|
||||||
api_authorizer,
|
jwk_verifier,
|
||||||
oidc_provider_client: Arc::new(oidc_provider_client),
|
oidc_provider_client,
|
||||||
oidc_request_cache: Cache::builder()
|
oidc_request_cache: Cache::builder()
|
||||||
.time_to_live(Duration::from_mins(5))
|
.time_to_live(Duration::from_mins(5))
|
||||||
.name("oidc_request_cache")
|
.name("oidc_request_cache")
|
||||||
@ -100,6 +87,7 @@ impl AuthService {
|
|||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl AuthServiceTrait for AuthService {
|
impl AuthServiceTrait for AuthService {
|
||||||
|
#[tracing::instrument(skip(self, ctx, request))]
|
||||||
async fn extract_user_info(
|
async fn extract_user_info(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
|
|||||||
16
apps/recorder/src/bin/migrate_down.rs
Normal file
16
apps/recorder/src/bin/migrate_down.rs
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
use recorder::{app::AppBuilder, database::DatabaseService, errors::RecorderResult};
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
let builder = AppBuilder::from_main_cli(None).await?;
|
||||||
|
|
||||||
|
builder.load_env().await?;
|
||||||
|
let mut database_config = builder.load_config().await?.database;
|
||||||
|
database_config.auto_migrate = false;
|
||||||
|
|
||||||
|
let database_service = DatabaseService::from_config(database_config).await?;
|
||||||
|
|
||||||
|
database_service.migrate_down().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@ -1,3 +1,6 @@
|
|||||||
|
use async_graphql::Error as AsyncGraphQLError;
|
||||||
|
use seaography::SeaographyError;
|
||||||
|
|
||||||
#[derive(Debug, snafu::Snafu)]
|
#[derive(Debug, snafu::Snafu)]
|
||||||
pub enum CryptoError {
|
pub enum CryptoError {
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
@ -9,3 +12,9 @@ pub enum CryptoError {
|
|||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
SerdeJsonError { source: serde_json::Error },
|
SerdeJsonError { source: serde_json::Error },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<CryptoError> for SeaographyError {
|
||||||
|
fn from(error: CryptoError) -> Self {
|
||||||
|
SeaographyError::AsyncGraphQLError(AsyncGraphQLError::new(error.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -16,7 +16,7 @@ impl CryptoService {
|
|||||||
Ok(Self { config })
|
Ok(Self { config })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn encrypt_data(&self, data: String) -> Result<String, CryptoError> {
|
pub fn encrypt_string(&self, data: String) -> Result<String, CryptoError> {
|
||||||
let key = rand::rng().random::<[u8; 32]>();
|
let key = rand::rng().random::<[u8; 32]>();
|
||||||
let mut cocoon = Cocoon::new(&key);
|
let mut cocoon = Cocoon::new(&key);
|
||||||
|
|
||||||
@ -32,7 +32,7 @@ impl CryptoService {
|
|||||||
Ok(BASE64_URL_SAFE.encode(combined))
|
Ok(BASE64_URL_SAFE.encode(combined))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decrypt_data(&self, data: &str) -> Result<String, CryptoError> {
|
pub fn decrypt_string(&self, data: &str) -> Result<String, CryptoError> {
|
||||||
let decoded = BASE64_URL_SAFE.decode(data)?;
|
let decoded = BASE64_URL_SAFE.decode(data)?;
|
||||||
|
|
||||||
let (key, remain) = decoded.split_at(32);
|
let (key, remain) = decoded.split_at(32);
|
||||||
@ -45,20 +45,17 @@ impl CryptoService {
|
|||||||
String::from_utf8(data).map_err(CryptoError::from)
|
String::from_utf8(data).map_err(CryptoError::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn encrypt_credentials<T: Serialize>(
|
pub fn encrypt_serialize<T: Serialize>(&self, credentials: &T) -> Result<String, CryptoError> {
|
||||||
&self,
|
|
||||||
credentials: &T,
|
|
||||||
) -> Result<String, CryptoError> {
|
|
||||||
let json = serde_json::to_string(credentials)?;
|
let json = serde_json::to_string(credentials)?;
|
||||||
|
|
||||||
self.encrypt_data(json)
|
self.encrypt_string(json)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decrypt_credentials<T: for<'de> Deserialize<'de>>(
|
pub fn decrypt_deserialize<T: for<'de> Deserialize<'de>>(
|
||||||
&self,
|
&self,
|
||||||
encrypted: &str,
|
encrypted: &str,
|
||||||
) -> Result<T, CryptoError> {
|
) -> Result<T, CryptoError> {
|
||||||
let data = self.decrypt_data(encrypted)?;
|
let data = self.decrypt_string(encrypted)?;
|
||||||
|
|
||||||
serde_json::from_str(&data).map_err(CryptoError::from)
|
serde_json::from_str(&data).map_err(CryptoError::from)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
use std::{ops::Deref, time::Duration};
|
use std::{ops::Deref, time::Duration};
|
||||||
|
|
||||||
|
use apalis_sql::postgres::PostgresStorage;
|
||||||
use sea_orm::{
|
use sea_orm::{
|
||||||
ConnectOptions, ConnectionTrait, Database, DatabaseConnection, DbBackend, DbErr, ExecResult,
|
ConnectOptions, ConnectionTrait, Database, DatabaseConnection, DbBackend, DbErr, ExecResult,
|
||||||
QueryResult, Statement,
|
QueryResult, Statement,
|
||||||
@ -9,19 +10,17 @@ use sea_orm_migration::MigratorTrait;
|
|||||||
use super::DatabaseConfig;
|
use super::DatabaseConfig;
|
||||||
use crate::{errors::RecorderResult, migrations::Migrator};
|
use crate::{errors::RecorderResult, migrations::Migrator};
|
||||||
|
|
||||||
pub trait DatabaseServiceConnectionTrait {
|
|
||||||
fn get_database_connection(&self) -> &DatabaseConnection;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct DatabaseService {
|
pub struct DatabaseService {
|
||||||
|
pub config: DatabaseConfig,
|
||||||
connection: DatabaseConnection,
|
connection: DatabaseConnection,
|
||||||
#[cfg(all(test, feature = "testcontainers"))]
|
#[cfg(feature = "testcontainers")]
|
||||||
pub container:
|
pub container:
|
||||||
Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>,
|
Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DatabaseService {
|
impl DatabaseService {
|
||||||
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
|
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
|
||||||
|
let db_config = config.clone();
|
||||||
let mut opt = ConnectOptions::new(&config.uri);
|
let mut opt = ConnectOptions::new(&config.uri);
|
||||||
opt.max_connections(config.max_connections)
|
opt.max_connections(config.max_connections)
|
||||||
.min_connections(config.min_connections)
|
.min_connections(config.min_connections)
|
||||||
@ -51,15 +50,36 @@ impl DatabaseService {
|
|||||||
// .await?;
|
// .await?;
|
||||||
// }
|
// }
|
||||||
|
|
||||||
|
let me = Self {
|
||||||
|
connection: db,
|
||||||
|
#[cfg(feature = "testcontainers")]
|
||||||
|
container: None,
|
||||||
|
config: db_config,
|
||||||
|
};
|
||||||
|
|
||||||
if config.auto_migrate {
|
if config.auto_migrate {
|
||||||
Migrator::up(&db, None).await?;
|
me.migrate_up().await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self {
|
Ok(me)
|
||||||
connection: db,
|
}
|
||||||
#[cfg(all(test, feature = "testcontainers"))]
|
|
||||||
container: None,
|
pub async fn migrate_up(&self) -> RecorderResult<()> {
|
||||||
})
|
{
|
||||||
|
let pool = &self.get_postgres_connection_pool();
|
||||||
|
PostgresStorage::setup(pool).await?;
|
||||||
|
}
|
||||||
|
Migrator::up(&self.connection, None).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn migrate_down(&self) -> RecorderResult<()> {
|
||||||
|
Migrator::down(&self.connection, None).await?;
|
||||||
|
{
|
||||||
|
self.execute_unprepared(r#"DROP SCHEMA IF EXISTS apalis CASCADE"#)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -25,6 +25,8 @@ pub enum RecorderError {
|
|||||||
source: Box<fancy_regex::Error>,
|
source: Box<fancy_regex::Error>,
|
||||||
},
|
},
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
|
NetAddrParseError { source: std::net::AddrParseError },
|
||||||
|
#[snafu(transparent)]
|
||||||
RegexError { source: regex::Error },
|
RegexError { source: regex::Error },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
InvalidMethodError { source: http::method::InvalidMethod },
|
InvalidMethodError { source: http::method::InvalidMethod },
|
||||||
@ -47,13 +49,15 @@ pub enum RecorderError {
|
|||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
RSSError { source: rss::Error },
|
RSSError { source: rss::Error },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
DotEnvError { source: dotenv::Error },
|
DotEnvError { source: dotenvy::Error },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
TeraError { source: tera::Error },
|
TeraError { source: tera::Error },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
IOError { source: std::io::Error },
|
IOError { source: std::io::Error },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
DbError { source: sea_orm::DbErr },
|
DbError { source: sea_orm::DbErr },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
DbSqlxError { source: sea_orm::SqlxError },
|
||||||
#[snafu(transparent, context(false))]
|
#[snafu(transparent, context(false))]
|
||||||
FigmentError {
|
FigmentError {
|
||||||
#[snafu(source(from(figment::Error, Box::new)))]
|
#[snafu(source(from(figment::Error, Box::new)))]
|
||||||
@ -74,7 +78,7 @@ pub enum RecorderError {
|
|||||||
},
|
},
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
HttpClientError { source: HttpClientError },
|
HttpClientError { source: HttpClientError },
|
||||||
#[cfg(all(feature = "testcontainers", test))]
|
#[cfg(feature = "testcontainers")]
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
TestcontainersError {
|
TestcontainersError {
|
||||||
source: testcontainers::TestcontainersError,
|
source: testcontainers::TestcontainersError,
|
||||||
@ -103,7 +107,7 @@ pub enum RecorderError {
|
|||||||
ModelEntityNotFound { entity: Cow<'static, str> },
|
ModelEntityNotFound { entity: Cow<'static, str> },
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
FetchError { source: FetchError },
|
FetchError { source: FetchError },
|
||||||
#[snafu(display("Credential3rdError: {source}"))]
|
#[snafu(display("Credential3rdError: {message}, source = {source}"))]
|
||||||
Credential3rdError {
|
Credential3rdError {
|
||||||
message: String,
|
message: String,
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
@ -111,6 +115,8 @@ pub enum RecorderError {
|
|||||||
},
|
},
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
CryptoError { source: CryptoError },
|
CryptoError { source: CryptoError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
StringFromUtf8Error { source: std::string::FromUtf8Error },
|
||||||
#[snafu(display("{message}"))]
|
#[snafu(display("{message}"))]
|
||||||
Whatever {
|
Whatever {
|
||||||
message: String,
|
message: String,
|
||||||
|
|||||||
@ -1,7 +1,8 @@
|
|||||||
use std::{fmt::Debug, ops::Deref, sync::Arc};
|
use std::{fmt::Debug, ops::Deref};
|
||||||
|
|
||||||
use fetch::{HttpClient, HttpClientTrait};
|
use fetch::{HttpClient, HttpClientTrait};
|
||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
|
use scraper::{Html, Selector};
|
||||||
use sea_orm::{
|
use sea_orm::{
|
||||||
ActiveModelTrait, ActiveValue::Set, ColumnTrait, DbErr, EntityTrait, QueryFilter, TryIntoModel,
|
ActiveModelTrait, ActiveValue::Set, ColumnTrait, DbErr, EntityTrait, QueryFilter, TryIntoModel,
|
||||||
};
|
};
|
||||||
@ -68,50 +69,44 @@ impl MikanClient {
|
|||||||
message: "mikan login failed, credential required".to_string(),
|
message: "mikan login failed, credential required".to_string(),
|
||||||
source: None.into(),
|
source: None.into(),
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let login_page_url = {
|
let login_page_url = {
|
||||||
let mut u = self.base_url.join(MIKAN_LOGIN_PAGE_PATH)?;
|
let mut u = self.base_url.join(MIKAN_LOGIN_PAGE_PATH)?;
|
||||||
u.set_query(Some(MIKAN_LOGIN_PAGE_SEARCH));
|
u.set_query(Some(MIKAN_LOGIN_PAGE_SEARCH));
|
||||||
u
|
u
|
||||||
};
|
};
|
||||||
|
|
||||||
// access login page to get antiforgery cookie
|
let antiforgery_token = {
|
||||||
self.http_client
|
// access login page to get antiforgery cookie
|
||||||
.get(login_page_url.clone())
|
let login_page_html = self
|
||||||
.send()
|
.http_client
|
||||||
.await
|
.get(login_page_url.clone())
|
||||||
.map_err(|error| RecorderError::Credential3rdError {
|
.send()
|
||||||
message: "failed to get mikan login page".to_string(),
|
.await
|
||||||
source: OptDynErr::some_boxed(error),
|
.map_err(|error| RecorderError::Credential3rdError {
|
||||||
})?;
|
message: "failed to get mikan login page".to_string(),
|
||||||
|
source: OptDynErr::some_boxed(error),
|
||||||
|
})?
|
||||||
|
.text()
|
||||||
|
.await?;
|
||||||
|
|
||||||
let antiforgery_cookie = {
|
let login_page_html = Html::parse_document(&login_page_html);
|
||||||
let cookie_store_lock = self.http_client.cookie_store.clone().ok_or_else(|| {
|
|
||||||
RecorderError::Credential3rdError {
|
let antiforgery_selector =
|
||||||
message: "failed to get cookie store".to_string(),
|
Selector::parse("input[name='__RequestVerificationToken']").unwrap();
|
||||||
|
|
||||||
|
login_page_html
|
||||||
|
.select(&antiforgery_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|element| element.value().attr("value").map(|value| value.to_string()))
|
||||||
|
.ok_or_else(|| RecorderError::Credential3rdError {
|
||||||
|
message: "mikan login failed, failed to get antiforgery token".to_string(),
|
||||||
source: None.into(),
|
source: None.into(),
|
||||||
}
|
})
|
||||||
})?;
|
}?;
|
||||||
let cookie_store =
|
|
||||||
cookie_store_lock
|
|
||||||
.read()
|
|
||||||
.map_err(|_| RecorderError::Credential3rdError {
|
|
||||||
message: "failed to read cookie store".to_string(),
|
|
||||||
source: None.into(),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
cookie_store
|
|
||||||
.matches(&login_page_url)
|
|
||||||
.iter()
|
|
||||||
.find(|cookie| cookie.name().starts_with(".AspNetCore.Antiforgery."))
|
|
||||||
.map(|cookie| cookie.value().to_string())
|
|
||||||
}
|
|
||||||
.ok_or_else(|| RecorderError::Credential3rdError {
|
|
||||||
message: "mikan login failed, failed to get antiforgery cookie".to_string(),
|
|
||||||
source: None.into(),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let login_post_form = hashmap! {
|
let login_post_form = hashmap! {
|
||||||
"__RequestVerificationToken".to_string() => antiforgery_cookie,
|
"__RequestVerificationToken".to_string() => antiforgery_token,
|
||||||
"UserName".to_string() => userpass_credential.username.clone(),
|
"UserName".to_string() => userpass_credential.username.clone(),
|
||||||
"Password".to_string() => userpass_credential.password.clone(),
|
"Password".to_string() => userpass_credential.password.clone(),
|
||||||
"RememberMe".to_string() => "true".to_string(),
|
"RememberMe".to_string() => "true".to_string(),
|
||||||
@ -141,7 +136,7 @@ impl MikanClient {
|
|||||||
|
|
||||||
pub async fn submit_credential_form(
|
pub async fn submit_credential_form(
|
||||||
&self,
|
&self,
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
ctx: &dyn AppContextTrait,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
credential_form: MikanCredentialForm,
|
credential_form: MikanCredentialForm,
|
||||||
) -> RecorderResult<credential_3rd::Model> {
|
) -> RecorderResult<credential_3rd::Model> {
|
||||||
@ -154,7 +149,7 @@ impl MikanClient {
|
|||||||
subscriber_id: Set(subscriber_id),
|
subscriber_id: Set(subscriber_id),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
}
|
||||||
.try_encrypt(ctx.clone())
|
.try_encrypt(ctx)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let credential: credential_3rd::Model = am.save(db).await?.try_into_model()?;
|
let credential: credential_3rd::Model = am.save(db).await?.try_into_model()?;
|
||||||
@ -163,8 +158,9 @@ impl MikanClient {
|
|||||||
|
|
||||||
pub async fn sync_credential_cookies(
|
pub async fn sync_credential_cookies(
|
||||||
&self,
|
&self,
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
ctx: &dyn AppContextTrait,
|
||||||
credential_id: i32,
|
credential_id: i32,
|
||||||
|
subscriber_id: i32,
|
||||||
) -> RecorderResult<()> {
|
) -> RecorderResult<()> {
|
||||||
let cookies = self.http_client.save_cookie_store_to_json()?;
|
let cookies = self.http_client.save_cookie_store_to_json()?;
|
||||||
if let Some(cookies) = cookies {
|
if let Some(cookies) = cookies {
|
||||||
@ -172,26 +168,51 @@ impl MikanClient {
|
|||||||
cookies: Set(Some(cookies)),
|
cookies: Set(Some(cookies)),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
}
|
||||||
.try_encrypt(ctx.clone())
|
.try_encrypt(ctx)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
credential_3rd::Entity::update_many()
|
credential_3rd::Entity::update_many()
|
||||||
.set(am)
|
.set(am)
|
||||||
.filter(credential_3rd::Column::Id.eq(credential_id))
|
.filter(credential_3rd::Column::Id.eq(credential_id))
|
||||||
|
.filter(credential_3rd::Column::SubscriberId.eq(subscriber_id))
|
||||||
.exec(ctx.db())
|
.exec(ctx.db())
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn fork_with_credential(
|
pub async fn fork_with_userpass_credential(
|
||||||
&self,
|
&self,
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
userpass_credential: UserPassCredential,
|
||||||
credential_id: i32,
|
|
||||||
) -> RecorderResult<Self> {
|
) -> RecorderResult<Self> {
|
||||||
let mut fork = self.http_client.fork();
|
let mut fork = self
|
||||||
|
.http_client
|
||||||
|
.fork()
|
||||||
|
.attach_cookies(userpass_credential.cookies.as_deref())?;
|
||||||
|
|
||||||
let credential = credential_3rd::Model::find_by_id(ctx.clone(), credential_id).await?;
|
if let Some(user_agent) = userpass_credential.user_agent.as_ref() {
|
||||||
|
fork = fork.attach_user_agent(user_agent);
|
||||||
|
}
|
||||||
|
|
||||||
|
let userpass_credential_opt = Some(userpass_credential);
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
http_client: HttpClient::from_fork(fork)?,
|
||||||
|
base_url: self.base_url.clone(),
|
||||||
|
origin_url: self.origin_url.clone(),
|
||||||
|
userpass_credential: userpass_credential_opt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn fork_with_credential_id(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
credential_id: i32,
|
||||||
|
subscriber_id: i32,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let credential =
|
||||||
|
credential_3rd::Model::find_by_id_and_subscriber_id(ctx, credential_id, subscriber_id)
|
||||||
|
.await?;
|
||||||
if let Some(credential) = credential {
|
if let Some(credential) = credential {
|
||||||
if credential.credential_type != Credential3rdType::Mikan {
|
if credential.credential_type != Credential3rdType::Mikan {
|
||||||
return Err(RecorderError::Credential3rdError {
|
return Err(RecorderError::Credential3rdError {
|
||||||
@ -203,20 +224,8 @@ impl MikanClient {
|
|||||||
let userpass_credential: UserPassCredential =
|
let userpass_credential: UserPassCredential =
|
||||||
credential.try_into_userpass_credential(ctx)?;
|
credential.try_into_userpass_credential(ctx)?;
|
||||||
|
|
||||||
fork = fork.attach_cookies(userpass_credential.cookies.as_deref())?;
|
self.fork_with_userpass_credential(userpass_credential)
|
||||||
|
.await
|
||||||
if let Some(user_agent) = userpass_credential.user_agent.as_ref() {
|
|
||||||
fork = fork.attach_user_agent(user_agent);
|
|
||||||
}
|
|
||||||
|
|
||||||
let userpass_credential_opt = Some(userpass_credential);
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
http_client: HttpClient::from_fork(fork)?,
|
|
||||||
base_url: self.base_url.clone(),
|
|
||||||
origin_url: self.origin_url.clone(),
|
|
||||||
userpass_credential: userpass_credential_opt,
|
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
Err(RecorderError::from_db_record_not_found(
|
Err(RecorderError::from_db_record_not_found(
|
||||||
DbErr::RecordNotFound(format!("credential={credential_id} not found")),
|
DbErr::RecordNotFound(format!("credential={credential_id} not found")),
|
||||||
@ -242,3 +251,94 @@ impl Deref for MikanClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl HttpClientTrait for MikanClient {}
|
impl HttpClientTrait for MikanClient {}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
#![allow(unused_variables)]
|
||||||
|
use std::{assert_matches::assert_matches, sync::Arc};
|
||||||
|
|
||||||
|
use rstest::{fixture, rstest};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::test_utils::{
|
||||||
|
app::TestingAppContext,
|
||||||
|
crypto::build_testing_crypto_service,
|
||||||
|
database::build_testing_database_service,
|
||||||
|
mikan::{MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential_form},
|
||||||
|
tracing::try_init_testing_tracing,
|
||||||
|
};
|
||||||
|
|
||||||
|
async fn create_testing_context(
|
||||||
|
mikan_base_url: Url,
|
||||||
|
) -> RecorderResult<Arc<dyn AppContextTrait>> {
|
||||||
|
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||||
|
let db_service = build_testing_database_service(Default::default()).await?;
|
||||||
|
let crypto_service = build_testing_crypto_service().await?;
|
||||||
|
let ctx = TestingAppContext::builder()
|
||||||
|
.db(db_service)
|
||||||
|
.crypto(crypto_service)
|
||||||
|
.mikan(mikan_client)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Ok(Arc::new(ctx))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
fn before_each() {
|
||||||
|
try_init_testing_tracing(Level::DEBUG);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_client_submit_credential_form(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let mut mikan_server = MikanMockServer::new().await?;
|
||||||
|
|
||||||
|
let app_ctx = create_testing_context(mikan_server.base_url().clone()).await?;
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let mikan_client = app_ctx.mikan();
|
||||||
|
let crypto_service = app_ctx.crypto();
|
||||||
|
|
||||||
|
let credential_form = build_testing_mikan_credential_form();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let credential_model = mikan_client
|
||||||
|
.submit_credential_form(app_ctx.as_ref(), subscriber_id, credential_form.clone())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let expected_username = &credential_form.username;
|
||||||
|
let expected_password = &credential_form.password;
|
||||||
|
|
||||||
|
let found_username = crypto_service
|
||||||
|
.decrypt_string(credential_model.username.as_deref().unwrap_or_default())?;
|
||||||
|
let found_password = crypto_service
|
||||||
|
.decrypt_string(credential_model.password.as_deref().unwrap_or_default())?;
|
||||||
|
|
||||||
|
assert_eq!(&found_username, expected_username);
|
||||||
|
assert_eq!(&found_password, expected_password);
|
||||||
|
|
||||||
|
let has_login = mikan_client.has_login().await?;
|
||||||
|
|
||||||
|
assert!(!has_login);
|
||||||
|
|
||||||
|
assert_matches!(
|
||||||
|
mikan_client.login().await,
|
||||||
|
Err(RecorderError::Credential3rdError { .. })
|
||||||
|
);
|
||||||
|
|
||||||
|
let mikan_client = mikan_client
|
||||||
|
.fork_with_credential_id(app_ctx.as_ref(), credential_model.id, subscriber_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
mikan_client.login().await?;
|
||||||
|
|
||||||
|
let has_login = mikan_client.has_login().await?;
|
||||||
|
|
||||||
|
assert!(has_login);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -2,7 +2,7 @@ use fetch::HttpClientConfig;
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct MikanConfig {
|
pub struct MikanConfig {
|
||||||
pub http_client: HttpClientConfig,
|
pub http_client: HttpClientConfig,
|
||||||
pub base_url: Url,
|
pub base_url: Url,
|
||||||
|
|||||||
@ -5,4 +5,15 @@ pub const MIKAN_LOGIN_PAGE_PATH: &str = "/Account/Login";
|
|||||||
pub const MIKAN_LOGIN_PAGE_SEARCH: &str = "ReturnUrl=%2F";
|
pub const MIKAN_LOGIN_PAGE_SEARCH: &str = "ReturnUrl=%2F";
|
||||||
pub const MIKAN_ACCOUNT_MANAGE_PAGE_PATH: &str = "/Account/Manage";
|
pub const MIKAN_ACCOUNT_MANAGE_PAGE_PATH: &str = "/Account/Manage";
|
||||||
pub const MIKAN_SEASON_FLOW_PAGE_PATH: &str = "/Home/BangumiCoverFlow";
|
pub const MIKAN_SEASON_FLOW_PAGE_PATH: &str = "/Home/BangumiCoverFlow";
|
||||||
|
pub const MIKAN_BANGUMI_HOMEPAGE_PATH: &str = "/Home/Bangumi";
|
||||||
pub const MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH: &str = "/Home/ExpandBangumi";
|
pub const MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH: &str = "/Home/ExpandBangumi";
|
||||||
|
pub const MIKAN_EPISODE_HOMEPAGE_PATH: &str = "/Home/Episode";
|
||||||
|
pub const MIKAN_BANGUMI_POSTER_PATH: &str = "/images/Bangumi";
|
||||||
|
pub const MIKAN_EPISODE_TORRENT_PATH: &str = "/Download";
|
||||||
|
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH: &str = "/RSS/MyBangumi";
|
||||||
|
pub const MIKAN_BANGUMI_RSS_PATH: &str = "/RSS/Bangumi";
|
||||||
|
pub const MIKAN_BANGUMI_ID_QUERY_KEY: &str = "bangumiId";
|
||||||
|
pub const MIKAN_FANSUB_ID_QUERY_KEY: &str = "subgroupid";
|
||||||
|
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY: &str = "token";
|
||||||
|
pub const MIKAN_SEASON_STR_QUERY_KEY: &str = "seasonStr";
|
||||||
|
pub const MIKAN_YEAR_QUERY_KEY: &str = "year";
|
||||||
|
|||||||
@ -2,33 +2,38 @@ mod client;
|
|||||||
mod config;
|
mod config;
|
||||||
mod constants;
|
mod constants;
|
||||||
mod credential;
|
mod credential;
|
||||||
mod rss;
|
mod subscription;
|
||||||
mod web;
|
mod web;
|
||||||
|
|
||||||
pub use client::MikanClient;
|
pub use client::MikanClient;
|
||||||
pub use config::MikanConfig;
|
pub use config::MikanConfig;
|
||||||
pub use constants::{
|
pub use constants::{
|
||||||
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH,
|
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH,
|
||||||
MIKAN_LOGIN_PAGE_PATH, MIKAN_LOGIN_PAGE_SEARCH, MIKAN_POSTER_BUCKET_KEY,
|
MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH,
|
||||||
MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_UNKNOWN_FANSUB_ID, MIKAN_UNKNOWN_FANSUB_NAME,
|
MIKAN_BANGUMI_RSS_PATH, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_EPISODE_TORRENT_PATH,
|
||||||
|
MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_LOGIN_PAGE_PATH, MIKAN_LOGIN_PAGE_SEARCH,
|
||||||
|
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
|
||||||
|
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
|
||||||
|
MIKAN_UNKNOWN_FANSUB_ID, MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
|
||||||
};
|
};
|
||||||
pub use credential::MikanCredentialForm;
|
pub use credential::MikanCredentialForm;
|
||||||
pub use rss::{
|
pub use subscription::{
|
||||||
MikanBangumiIndexRssChannel, MikanBangumiRssChannel, MikanBangumiRssUrlMeta, MikanRssChannel,
|
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
|
||||||
MikanRssItem, MikanSubscriberAggregationRssUrlMeta, MikanSubscriberStreamRssChannel,
|
|
||||||
build_mikan_bangumi_rss_url, build_mikan_subscriber_aggregation_rss_url,
|
|
||||||
extract_mikan_bangumi_id_from_rss_url, extract_mikan_rss_channel_from_rss_link,
|
|
||||||
extract_mikan_subscriber_aggregation_id_from_rss_link,
|
|
||||||
};
|
};
|
||||||
pub use web::{
|
pub use web::{
|
||||||
MikanBangumiHomepageUrlMeta, MikanBangumiIndexHomepageUrlMeta, MikanBangumiIndexMeta,
|
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
|
||||||
MikanBangumiMeta, MikanBangumiPosterMeta, MikanEpisodeHomepageUrlMeta, MikanEpisodeMeta,
|
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanRssEpisodeItem,
|
||||||
MikanSeasonFlowUrlMeta, MikanSeasonStr, build_mikan_bangumi_expand_subscribed_url,
|
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionRssUrlMeta,
|
||||||
build_mikan_bangumi_homepage_url, build_mikan_episode_homepage_url,
|
build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url,
|
||||||
build_mikan_season_flow_url, extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
build_mikan_bangumi_subscription_rss_url, build_mikan_episode_homepage_url,
|
||||||
|
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||||
extract_mikan_episode_meta_from_episode_homepage_html,
|
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||||
|
scrape_mikan_bangumi_index_meta_from_bangumi_homepage_url,
|
||||||
scrape_mikan_bangumi_meta_from_bangumi_homepage_url,
|
scrape_mikan_bangumi_meta_from_bangumi_homepage_url,
|
||||||
scrape_mikan_bangumi_meta_list_from_season_flow_url,
|
scrape_mikan_bangumi_meta_list_from_season_flow_url,
|
||||||
|
scrape_mikan_bangumi_meta_stream_from_season_flow_url,
|
||||||
scrape_mikan_episode_meta_from_episode_homepage_url, scrape_mikan_poster_data_from_image_url,
|
scrape_mikan_episode_meta_from_episode_homepage_url, scrape_mikan_poster_data_from_image_url,
|
||||||
scrape_mikan_poster_meta_from_image_url,
|
scrape_mikan_poster_meta_from_image_url,
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,414 +0,0 @@
|
|||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
use chrono::DateTime;
|
|
||||||
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
|
|
||||||
use fetch::{FetchError, IntoUrl, bytes::fetch_bytes};
|
|
||||||
use itertools::Itertools;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use tracing::instrument;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
errors::app_error::{RecorderError, RecorderResult},
|
|
||||||
extract::mikan::{MikanClient, MikanEpisodeHomepageUrlMeta},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub struct MikanRssItem {
|
|
||||||
pub title: String,
|
|
||||||
pub homepage: Url,
|
|
||||||
pub url: Url,
|
|
||||||
pub content_length: Option<u64>,
|
|
||||||
pub mime: String,
|
|
||||||
pub pub_date: Option<i64>,
|
|
||||||
pub mikan_episode_id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub struct MikanBangumiRssChannel {
|
|
||||||
pub name: String,
|
|
||||||
pub url: Url,
|
|
||||||
pub mikan_bangumi_id: String,
|
|
||||||
pub mikan_fansub_id: String,
|
|
||||||
pub items: Vec<MikanRssItem>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub struct MikanBangumiIndexRssChannel {
|
|
||||||
pub name: String,
|
|
||||||
pub url: Url,
|
|
||||||
pub mikan_bangumi_id: String,
|
|
||||||
pub items: Vec<MikanRssItem>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub struct MikanSubscriberStreamRssChannel {
|
|
||||||
pub mikan_aggregation_id: String,
|
|
||||||
pub url: Url,
|
|
||||||
pub items: Vec<MikanRssItem>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub enum MikanRssChannel {
|
|
||||||
Bangumi(MikanBangumiRssChannel),
|
|
||||||
BangumiIndex(MikanBangumiIndexRssChannel),
|
|
||||||
SubscriberStream(MikanSubscriberStreamRssChannel),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MikanRssChannel {
|
|
||||||
pub fn items(&self) -> &[MikanRssItem] {
|
|
||||||
match &self {
|
|
||||||
Self::Bangumi(MikanBangumiRssChannel { items, .. })
|
|
||||||
| Self::BangumiIndex(MikanBangumiIndexRssChannel { items, .. })
|
|
||||||
| Self::SubscriberStream(MikanSubscriberStreamRssChannel { items, .. }) => items,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn into_items(self) -> Vec<MikanRssItem> {
|
|
||||||
match self {
|
|
||||||
Self::Bangumi(MikanBangumiRssChannel { items, .. })
|
|
||||||
| Self::BangumiIndex(MikanBangumiIndexRssChannel { items, .. })
|
|
||||||
| Self::SubscriberStream(MikanSubscriberStreamRssChannel { items, .. }) => items,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn name(&self) -> Option<&str> {
|
|
||||||
match &self {
|
|
||||||
Self::Bangumi(MikanBangumiRssChannel { name, .. })
|
|
||||||
| Self::BangumiIndex(MikanBangumiIndexRssChannel { name, .. }) => Some(name.as_str()),
|
|
||||||
Self::SubscriberStream(MikanSubscriberStreamRssChannel { .. }) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn url(&self) -> &Url {
|
|
||||||
match &self {
|
|
||||||
Self::Bangumi(MikanBangumiRssChannel { url, .. })
|
|
||||||
| Self::BangumiIndex(MikanBangumiIndexRssChannel { url, .. })
|
|
||||||
| Self::SubscriberStream(MikanSubscriberStreamRssChannel { url, .. }) => url,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<rss::Item> for MikanRssItem {
|
|
||||||
type Error = RecorderError;
|
|
||||||
|
|
||||||
fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
|
|
||||||
let enclosure = item.enclosure.ok_or_else(|| {
|
|
||||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure"))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mime_type = enclosure.mime_type;
|
|
||||||
if mime_type != BITTORRENT_MIME_TYPE {
|
|
||||||
return Err(RecorderError::MimeError {
|
|
||||||
expected: String::from(BITTORRENT_MIME_TYPE),
|
|
||||||
found: mime_type.to_string(),
|
|
||||||
desc: String::from("MikanRssItem"),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let title = item.title.ok_or_else(|| {
|
|
||||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title"))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
|
|
||||||
RecorderError::from_mikan_rss_invalid_field_and_source(
|
|
||||||
"enclosure_url:enclosure.link".into(),
|
|
||||||
err,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let homepage = item
|
|
||||||
.link
|
|
||||||
.and_then(|link| Url::parse(&link).ok())
|
|
||||||
.ok_or_else(|| {
|
|
||||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link"))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let MikanEpisodeHomepageUrlMeta {
|
|
||||||
mikan_episode_id, ..
|
|
||||||
} = MikanEpisodeHomepageUrlMeta::parse_url(&homepage).ok_or_else(|| {
|
|
||||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(MikanRssItem {
|
|
||||||
title,
|
|
||||||
homepage,
|
|
||||||
url: enclosure_url,
|
|
||||||
content_length: enclosure.length.parse().ok(),
|
|
||||||
mime: mime_type,
|
|
||||||
pub_date: item
|
|
||||||
.pub_date
|
|
||||||
.and_then(|s| DateTime::parse_from_rfc2822(&s).ok())
|
|
||||||
.map(|s| s.timestamp_millis()),
|
|
||||||
mikan_episode_id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct MikanBangumiRssUrlMeta {
|
|
||||||
pub mikan_bangumi_id: String,
|
|
||||||
pub mikan_fansub_id: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct MikanSubscriberAggregationRssUrlMeta {
|
|
||||||
pub mikan_aggregation_id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_mikan_bangumi_rss_url(
|
|
||||||
mikan_base_url: impl IntoUrl,
|
|
||||||
mikan_bangumi_id: &str,
|
|
||||||
mikan_fansub_id: Option<&str>,
|
|
||||||
) -> RecorderResult<Url> {
|
|
||||||
let mut url = mikan_base_url.into_url().map_err(FetchError::from)?;
|
|
||||||
url.set_path("/RSS/Bangumi");
|
|
||||||
url.query_pairs_mut()
|
|
||||||
.append_pair("bangumiId", mikan_bangumi_id);
|
|
||||||
if let Some(mikan_fansub_id) = mikan_fansub_id {
|
|
||||||
url.query_pairs_mut()
|
|
||||||
.append_pair("subgroupid", mikan_fansub_id);
|
|
||||||
};
|
|
||||||
Ok(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_mikan_subscriber_aggregation_rss_url(
|
|
||||||
mikan_base_url: &str,
|
|
||||||
mikan_aggregation_id: &str,
|
|
||||||
) -> RecorderResult<Url> {
|
|
||||||
let mut url = Url::parse(mikan_base_url)?;
|
|
||||||
url.set_path("/RSS/MyBangumi");
|
|
||||||
url.query_pairs_mut()
|
|
||||||
.append_pair("token", mikan_aggregation_id);
|
|
||||||
Ok(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract_mikan_bangumi_id_from_rss_url(url: &Url) -> Option<MikanBangumiRssUrlMeta> {
|
|
||||||
if url.path() == "/RSS/Bangumi" {
|
|
||||||
url.query_pairs()
|
|
||||||
.find(|(k, _)| k == "bangumiId")
|
|
||||||
.map(|(_, v)| MikanBangumiRssUrlMeta {
|
|
||||||
mikan_bangumi_id: v.to_string(),
|
|
||||||
mikan_fansub_id: url
|
|
||||||
.query_pairs()
|
|
||||||
.find(|(k, _)| k == "subgroupid")
|
|
||||||
.map(|(_, v)| v.to_string()),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract_mikan_subscriber_aggregation_id_from_rss_link(
|
|
||||||
url: &Url,
|
|
||||||
) -> Option<MikanSubscriberAggregationRssUrlMeta> {
|
|
||||||
if url.path() == "/RSS/MyBangumi" {
|
|
||||||
url.query_pairs().find(|(k, _)| k == "token").map(|(_, v)| {
|
|
||||||
MikanSubscriberAggregationRssUrlMeta {
|
|
||||||
mikan_aggregation_id: v.to_string(),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip_all, fields(channel_rss_link = channel_rss_link.as_str()))]
|
|
||||||
pub async fn extract_mikan_rss_channel_from_rss_link(
|
|
||||||
http_client: &MikanClient,
|
|
||||||
channel_rss_link: impl IntoUrl,
|
|
||||||
) -> RecorderResult<MikanRssChannel> {
|
|
||||||
let bytes = fetch_bytes(http_client, channel_rss_link.as_str()).await?;
|
|
||||||
|
|
||||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
|
||||||
|
|
||||||
let channel_link = Url::parse(channel.link())?;
|
|
||||||
|
|
||||||
if let Some(MikanBangumiRssUrlMeta {
|
|
||||||
mikan_bangumi_id,
|
|
||||||
mikan_fansub_id,
|
|
||||||
}) = extract_mikan_bangumi_id_from_rss_url(&channel_link)
|
|
||||||
{
|
|
||||||
tracing::trace!(
|
|
||||||
mikan_bangumi_id,
|
|
||||||
mikan_fansub_id,
|
|
||||||
"MikanBangumiRssLink extracting..."
|
|
||||||
);
|
|
||||||
|
|
||||||
let channel_name = channel.title().replace("Mikan Project - ", "");
|
|
||||||
|
|
||||||
let items = channel
|
|
||||||
.items
|
|
||||||
.into_iter()
|
|
||||||
.enumerate()
|
|
||||||
.flat_map(|(idx, item)| {
|
|
||||||
MikanRssItem::try_from(item).inspect_err(
|
|
||||||
|error| tracing::warn!(error = %error, "failed to extract rss item idx = {}", idx),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect_vec();
|
|
||||||
|
|
||||||
if let Some(mikan_fansub_id) = mikan_fansub_id {
|
|
||||||
tracing::trace!(
|
|
||||||
channel_name,
|
|
||||||
channel_link = channel_link.as_str(),
|
|
||||||
mikan_bangumi_id,
|
|
||||||
mikan_fansub_id,
|
|
||||||
"MikanBangumiRssChannel extracted"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(MikanRssChannel::Bangumi(MikanBangumiRssChannel {
|
|
||||||
name: channel_name,
|
|
||||||
mikan_bangumi_id,
|
|
||||||
mikan_fansub_id,
|
|
||||||
url: channel_link,
|
|
||||||
items,
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
tracing::trace!(
|
|
||||||
channel_name,
|
|
||||||
channel_link = channel_link.as_str(),
|
|
||||||
mikan_bangumi_id,
|
|
||||||
"MikanBangumiIndexRssChannel extracted"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(MikanRssChannel::BangumiIndex(MikanBangumiIndexRssChannel {
|
|
||||||
name: channel_name,
|
|
||||||
mikan_bangumi_id,
|
|
||||||
url: channel_link,
|
|
||||||
items,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
} else if let Some(MikanSubscriberAggregationRssUrlMeta {
|
|
||||||
mikan_aggregation_id,
|
|
||||||
..
|
|
||||||
}) = extract_mikan_subscriber_aggregation_id_from_rss_link(&channel_link)
|
|
||||||
{
|
|
||||||
tracing::trace!(
|
|
||||||
mikan_aggregation_id,
|
|
||||||
"MikanSubscriberAggregationRssLink extracting..."
|
|
||||||
);
|
|
||||||
|
|
||||||
let items = channel
|
|
||||||
.items
|
|
||||||
.into_iter()
|
|
||||||
.enumerate()
|
|
||||||
.flat_map(|(idx, item)| {
|
|
||||||
MikanRssItem::try_from(item).inspect_err(
|
|
||||||
|error| tracing::warn!(error = %error, "failed to extract rss item idx = {}", idx),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect_vec();
|
|
||||||
|
|
||||||
tracing::trace!(
|
|
||||||
channel_link = channel_link.as_str(),
|
|
||||||
mikan_aggregation_id,
|
|
||||||
"MikanSubscriberAggregationRssChannel extracted"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(MikanRssChannel::SubscriberStream(
|
|
||||||
MikanSubscriberStreamRssChannel {
|
|
||||||
mikan_aggregation_id,
|
|
||||||
items,
|
|
||||||
url: channel_link,
|
|
||||||
},
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
Err(RecorderError::MikanRssInvalidFormatError).inspect_err(|error| {
|
|
||||||
tracing::warn!(error = %error);
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::assert_matches::assert_matches;
|
|
||||||
|
|
||||||
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
|
|
||||||
use rstest::rstest;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
errors::RecorderResult,
|
|
||||||
extract::mikan::{
|
|
||||||
MikanBangumiIndexRssChannel, MikanBangumiRssChannel, MikanRssChannel,
|
|
||||||
extract_mikan_rss_channel_from_rss_link,
|
|
||||||
},
|
|
||||||
test_utils::mikan::build_testing_mikan_client,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_parse_mikan_rss_channel_from_rss_link() -> RecorderResult<()> {
|
|
||||||
let mut mikan_server = mockito::Server::new_async().await;
|
|
||||||
|
|
||||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
|
||||||
|
|
||||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
|
||||||
|
|
||||||
{
|
|
||||||
let bangumi_rss_url =
|
|
||||||
mikan_base_url.join("/RSS/Bangumi?bangumiId=3141&subgroupid=370")?;
|
|
||||||
let bangumi_rss_mock = mikan_server
|
|
||||||
.mock("GET", bangumi_rss_url.path())
|
|
||||||
.with_body_from_file("tests/resources/mikan/Bangumi-3141-370.rss")
|
|
||||||
.match_query(mockito::Matcher::Any)
|
|
||||||
.create_async()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let channel = extract_mikan_rss_channel_from_rss_link(&mikan_client, bangumi_rss_url)
|
|
||||||
.await
|
|
||||||
.expect("should get mikan channel from rss url");
|
|
||||||
|
|
||||||
assert_matches!(
|
|
||||||
&channel,
|
|
||||||
MikanRssChannel::Bangumi(MikanBangumiRssChannel { .. })
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_matches!(&channel.name(), Some("葬送的芙莉莲"));
|
|
||||||
|
|
||||||
let items = channel.items();
|
|
||||||
let first_sub_item = items
|
|
||||||
.first()
|
|
||||||
.expect("mikan subscriptions should have at least one subs");
|
|
||||||
|
|
||||||
assert_eq!(first_sub_item.mime, BITTORRENT_MIME_TYPE);
|
|
||||||
|
|
||||||
assert!(
|
|
||||||
&first_sub_item
|
|
||||||
.homepage
|
|
||||||
.as_str()
|
|
||||||
.starts_with("https://mikanani.me/Home/Episode")
|
|
||||||
);
|
|
||||||
|
|
||||||
let name = first_sub_item.title.as_str();
|
|
||||||
assert!(name.contains("葬送的芙莉莲"));
|
|
||||||
|
|
||||||
bangumi_rss_mock.expect(1);
|
|
||||||
}
|
|
||||||
{
|
|
||||||
let bangumi_rss_url = mikan_base_url.join("/RSS/Bangumi?bangumiId=3416")?;
|
|
||||||
|
|
||||||
let bangumi_rss_mock = mikan_server
|
|
||||||
.mock("GET", bangumi_rss_url.path())
|
|
||||||
.match_query(mockito::Matcher::Any)
|
|
||||||
.with_body_from_file("tests/resources/mikan/Bangumi-3416.rss")
|
|
||||||
.create_async()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let channel = extract_mikan_rss_channel_from_rss_link(&mikan_client, bangumi_rss_url)
|
|
||||||
.await
|
|
||||||
.expect("should get mikan channel from rss url");
|
|
||||||
|
|
||||||
assert_matches!(
|
|
||||||
&channel,
|
|
||||||
MikanRssChannel::BangumiIndex(MikanBangumiIndexRssChannel { .. })
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_matches!(&channel.name(), Some("叹气的亡灵想隐退"));
|
|
||||||
|
|
||||||
bangumi_rss_mock.expect(1);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
777
apps/recorder/src/extract/mikan/subscription.rs
Normal file
777
apps/recorder/src/extract/mikan/subscription.rs
Normal file
@ -0,0 +1,777 @@
|
|||||||
|
use std::{
|
||||||
|
collections::{HashMap, HashSet},
|
||||||
|
fmt::Debug,
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
|
use async_graphql::{InputObject, SimpleObject};
|
||||||
|
use fetch::fetch_bytes;
|
||||||
|
use futures::{Stream, TryStreamExt, pin_mut, try_join};
|
||||||
|
use maplit::hashmap;
|
||||||
|
use sea_orm::{
|
||||||
|
ColumnTrait, Condition, EntityTrait, JoinType, QueryFilter, QuerySelect, RelationTrait,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use snafu::{OptionExt, ResultExt};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use super::scrape_mikan_bangumi_meta_stream_from_season_flow_url;
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
extract::mikan::{
|
||||||
|
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
|
||||||
|
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||||
|
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url,
|
||||||
|
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
|
||||||
|
scrape_mikan_episode_meta_from_episode_homepage_url,
|
||||||
|
},
|
||||||
|
models::{
|
||||||
|
bangumi, episodes, subscription_bangumi, subscription_episode,
|
||||||
|
subscriptions::{self, SubscriptionTrait},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[tracing::instrument(err, skip(ctx, rss_item_list))]
|
||||||
|
async fn sync_mikan_feeds_from_rss_item_list(
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
rss_item_list: Vec<MikanRssEpisodeItem>,
|
||||||
|
subscriber_id: i32,
|
||||||
|
subscription_id: i32,
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
|
let (new_episode_meta_list, existed_episode_hash2id_map) = {
|
||||||
|
let existed_episode_hash2id_map = episodes::Model::get_existed_mikan_episode_list(
|
||||||
|
ctx,
|
||||||
|
rss_item_list.iter().map(|s| MikanEpisodeHash {
|
||||||
|
mikan_episode_id: s.mikan_episode_id.clone(),
|
||||||
|
}),
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.map(|(episode_id, hash, bangumi_id)| (hash.mikan_episode_id, (episode_id, bangumi_id)))
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
|
let mut new_episode_meta_list: Vec<MikanEpisodeMeta> = vec![];
|
||||||
|
|
||||||
|
let mikan_client = ctx.mikan();
|
||||||
|
for to_insert_rss_item in rss_item_list.into_iter().filter(|rss_item| {
|
||||||
|
!existed_episode_hash2id_map.contains_key(&rss_item.mikan_episode_id)
|
||||||
|
}) {
|
||||||
|
let episode_meta = scrape_mikan_episode_meta_from_episode_homepage_url(
|
||||||
|
mikan_client,
|
||||||
|
to_insert_rss_item.build_homepage_url(mikan_base_url.clone()),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
new_episode_meta_list.push(episode_meta);
|
||||||
|
}
|
||||||
|
|
||||||
|
(new_episode_meta_list, existed_episode_hash2id_map)
|
||||||
|
};
|
||||||
|
|
||||||
|
// subscribe existed but not subscribed episode and bangumi
|
||||||
|
let (existed_episode_id_list, existed_episode_bangumi_id_set): (Vec<i32>, HashSet<i32>) =
|
||||||
|
existed_episode_hash2id_map.into_values().unzip();
|
||||||
|
|
||||||
|
try_join!(
|
||||||
|
subscription_episode::Model::add_episodes_for_subscription(
|
||||||
|
ctx,
|
||||||
|
existed_episode_id_list.into_iter(),
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
),
|
||||||
|
subscription_bangumi::Model::add_bangumis_for_subscription(
|
||||||
|
ctx,
|
||||||
|
existed_episode_bangumi_id_set.into_iter(),
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let new_episode_meta_list_group_by_bangumi_hash: HashMap<
|
||||||
|
MikanBangumiHash,
|
||||||
|
Vec<MikanEpisodeMeta>,
|
||||||
|
> = {
|
||||||
|
let mut m = hashmap! {};
|
||||||
|
for episode_meta in new_episode_meta_list {
|
||||||
|
let bangumi_hash = episode_meta.bangumi_hash();
|
||||||
|
|
||||||
|
m.entry(bangumi_hash)
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push(episode_meta);
|
||||||
|
}
|
||||||
|
m
|
||||||
|
};
|
||||||
|
|
||||||
|
for (group_bangumi_hash, group_episode_meta_list) in new_episode_meta_list_group_by_bangumi_hash
|
||||||
|
{
|
||||||
|
let first_episode_meta = group_episode_meta_list.first().unwrap();
|
||||||
|
let group_bangumi_model = bangumi::Model::get_or_insert_from_mikan(
|
||||||
|
ctx,
|
||||||
|
group_bangumi_hash,
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
async || {
|
||||||
|
let bangumi_meta: MikanBangumiMeta = first_episode_meta.clone().into();
|
||||||
|
let bangumi_am = bangumi::ActiveModel::from_mikan_bangumi_meta(
|
||||||
|
ctx,
|
||||||
|
bangumi_meta,
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(bangumi_am)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let group_episode_creation_list = group_episode_meta_list
|
||||||
|
.into_iter()
|
||||||
|
.map(|episode_meta| (&group_bangumi_model, episode_meta));
|
||||||
|
|
||||||
|
episodes::Model::add_mikan_episodes_for_subscription(
|
||||||
|
ctx,
|
||||||
|
group_episode_creation_list.into_iter(),
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct MikanSubscriberSubscription {
|
||||||
|
pub subscription_id: i32,
|
||||||
|
pub mikan_subscription_token: String,
|
||||||
|
pub subscriber_id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl SubscriptionTrait for MikanSubscriberSubscription {
|
||||||
|
fn get_subscriber_id(&self) -> i32 {
|
||||||
|
self.subscriber_id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_subscription_id(&self) -> i32 {
|
||||||
|
self.subscription_id
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
let rss_item_list = self.get_rss_item_list_from_source_url(ctx.as_ref()).await?;
|
||||||
|
|
||||||
|
sync_mikan_feeds_from_rss_item_list(
|
||||||
|
ctx.as_ref(),
|
||||||
|
rss_item_list,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
self.sync_feeds_incremental(ctx.clone()).await?;
|
||||||
|
|
||||||
|
let rss_item_list = self
|
||||||
|
.get_rss_item_list_from_subsribed_url_rss_link(ctx.as_ref())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
sync_mikan_feeds_from_rss_item_list(
|
||||||
|
ctx.as_ref(),
|
||||||
|
rss_item_list,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_sources(&self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
|
||||||
|
let source_url = Url::parse(&model.source_url)?;
|
||||||
|
|
||||||
|
let meta = MikanSubscriberSubscriptionRssUrlMeta::from_rss_url(&source_url)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"MikanSubscriberSubscription should extract mikan_subscription_token from \
|
||||||
|
source_url = {}, subscription_id = {}",
|
||||||
|
source_url, model.id
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
subscription_id: model.id,
|
||||||
|
mikan_subscription_token: meta.mikan_subscription_token,
|
||||||
|
subscriber_id: model.subscriber_id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanSubscriberSubscription {
|
||||||
|
#[tracing::instrument(err, skip(ctx))]
|
||||||
|
async fn get_rss_item_list_from_source_url(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
|
let rss_url = build_mikan_subscriber_subscription_rss_url(
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
&self.mikan_subscription_token,
|
||||||
|
);
|
||||||
|
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
|
let mut result = vec![];
|
||||||
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!("failed to extract rss item at idx {idx}")
|
||||||
|
})?;
|
||||||
|
result.push(item);
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(err, skip(ctx))]
|
||||||
|
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
|
let subscribed_bangumi_list =
|
||||||
|
bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.subscription_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut rss_item_list = vec![];
|
||||||
|
for subscribed_bangumi in subscribed_bangumi_list {
|
||||||
|
let rss_url = subscribed_bangumi
|
||||||
|
.rss_link
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"rss link is required, subscription_id = {:?}, bangumi_name = {}",
|
||||||
|
self.subscription_id, subscribed_bangumi.display_name
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!("failed to extract rss item at idx {idx}")
|
||||||
|
})?;
|
||||||
|
rss_item_list.push(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(rss_item_list)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)]
|
||||||
|
pub struct MikanSeasonSubscription {
|
||||||
|
pub subscription_id: i32,
|
||||||
|
pub year: i32,
|
||||||
|
pub season_str: MikanSeasonStr,
|
||||||
|
pub credential_id: i32,
|
||||||
|
pub subscriber_id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl SubscriptionTrait for MikanSeasonSubscription {
|
||||||
|
fn get_subscriber_id(&self) -> i32 {
|
||||||
|
self.subscriber_id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_subscription_id(&self) -> i32 {
|
||||||
|
self.subscription_id
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
let rss_item_list = self
|
||||||
|
.get_rss_item_list_from_subsribed_url_rss_link(ctx.as_ref())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
sync_mikan_feeds_from_rss_item_list(
|
||||||
|
ctx.as_ref(),
|
||||||
|
rss_item_list,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
self.sync_sources(ctx.clone()).await?;
|
||||||
|
self.sync_feeds_incremental(ctx).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
let bangumi_meta_list = self.get_bangumi_meta_stream_from_source_url(ctx.clone());
|
||||||
|
|
||||||
|
pin_mut!(bangumi_meta_list);
|
||||||
|
|
||||||
|
while let Some(bangumi_meta) = bangumi_meta_list.try_next().await? {
|
||||||
|
let bangumi_hash = bangumi_meta.bangumi_hash();
|
||||||
|
bangumi::Model::get_or_insert_from_mikan(
|
||||||
|
ctx.as_ref(),
|
||||||
|
bangumi_hash,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
async || {
|
||||||
|
let bangumi_am = bangumi::ActiveModel::from_mikan_bangumi_meta(
|
||||||
|
ctx.as_ref(),
|
||||||
|
bangumi_meta,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(bangumi_am)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
|
||||||
|
let source_url = Url::parse(&model.source_url)?;
|
||||||
|
|
||||||
|
let source_url_meta = MikanSeasonFlowUrlMeta::from_url(&source_url)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"season_str and year is required when extracting MikanSeasonSubscription from \
|
||||||
|
source_url, source_url = {}, subscription_id = {}",
|
||||||
|
source_url, model.id
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let credential_id = model
|
||||||
|
.credential_id
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"credential_id is required when extracting MikanSeasonSubscription, \
|
||||||
|
subscription_id = {}",
|
||||||
|
model.id
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
subscription_id: model.id,
|
||||||
|
year: source_url_meta.year,
|
||||||
|
season_str: source_url_meta.season_str,
|
||||||
|
credential_id,
|
||||||
|
subscriber_id: model.subscriber_id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanSeasonSubscription {
|
||||||
|
pub fn get_bangumi_meta_stream_from_source_url(
|
||||||
|
&self,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
) -> impl Stream<Item = RecorderResult<MikanBangumiMeta>> {
|
||||||
|
let credential_id = self.credential_id;
|
||||||
|
let year = self.year;
|
||||||
|
let season_str = self.season_str;
|
||||||
|
|
||||||
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
|
let mikan_season_flow_url = build_mikan_season_flow_url(mikan_base_url, year, season_str);
|
||||||
|
|
||||||
|
scrape_mikan_bangumi_meta_stream_from_season_flow_url(
|
||||||
|
ctx,
|
||||||
|
mikan_season_flow_url,
|
||||||
|
credential_id,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(err, skip(ctx))]
|
||||||
|
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
|
let db = ctx.db();
|
||||||
|
|
||||||
|
let subscribed_bangumi_list = bangumi::Entity::find()
|
||||||
|
.filter(
|
||||||
|
Condition::all()
|
||||||
|
.add(subscription_bangumi::Column::SubscriptionId.eq(self.subscription_id)),
|
||||||
|
)
|
||||||
|
.join_rev(
|
||||||
|
JoinType::InnerJoin,
|
||||||
|
subscription_bangumi::Relation::Bangumi.def(),
|
||||||
|
)
|
||||||
|
.all(db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut rss_item_list = vec![];
|
||||||
|
for subscribed_bangumi in subscribed_bangumi_list {
|
||||||
|
let rss_url = subscribed_bangumi
|
||||||
|
.rss_link
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"rss_link is required, subscription_id = {}, bangumi_name = {}",
|
||||||
|
self.subscription_id, subscribed_bangumi.display_name
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!("failed to extract rss item at idx {idx}")
|
||||||
|
})?;
|
||||||
|
rss_item_list.push(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(rss_item_list)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)]
|
||||||
|
pub struct MikanBangumiSubscription {
|
||||||
|
pub subscription_id: i32,
|
||||||
|
pub mikan_bangumi_id: String,
|
||||||
|
pub mikan_fansub_id: String,
|
||||||
|
pub subscriber_id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl SubscriptionTrait for MikanBangumiSubscription {
|
||||||
|
fn get_subscriber_id(&self) -> i32 {
|
||||||
|
self.subscriber_id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_subscription_id(&self) -> i32 {
|
||||||
|
self.subscription_id
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
let rss_item_list = self.get_rss_item_list_from_source_url(ctx.as_ref()).await?;
|
||||||
|
|
||||||
|
sync_mikan_feeds_from_rss_item_list(
|
||||||
|
ctx.as_ref(),
|
||||||
|
rss_item_list,
|
||||||
|
self.get_subscriber_id(),
|
||||||
|
self.get_subscription_id(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_full(&self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
self.sync_feeds_incremental(_ctx).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_sources(&self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
|
||||||
|
let source_url = Url::parse(&model.source_url)?;
|
||||||
|
|
||||||
|
let meta = MikanBangumiHash::from_rss_url(&source_url)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||||
|
format!(
|
||||||
|
"bangumi_id and fansub_id is required when extracting \
|
||||||
|
MikanBangumiSubscription, source_url = {}, subscription_id = {}",
|
||||||
|
source_url, model.id
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
subscription_id: model.id,
|
||||||
|
mikan_bangumi_id: meta.mikan_bangumi_id,
|
||||||
|
mikan_fansub_id: meta.mikan_fansub_id,
|
||||||
|
subscriber_id: model.subscriber_id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanBangumiSubscription {
|
||||||
|
#[tracing::instrument(err, skip(ctx))]
|
||||||
|
async fn get_rss_item_list_from_source_url(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
|
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
&self.mikan_bangumi_id,
|
||||||
|
Some(&self.mikan_fansub_id),
|
||||||
|
);
|
||||||
|
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||||
|
|
||||||
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
|
let mut result = vec![];
|
||||||
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
|
format!("failed to extract rss item at idx {idx}")
|
||||||
|
})?;
|
||||||
|
result.push(item);
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[allow(unused_variables)]
|
||||||
|
mod tests {
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use rstest::{fixture, rstest};
|
||||||
|
use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::RecorderResult,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||||
|
MikanSubscriberSubscriptionRssUrlMeta,
|
||||||
|
},
|
||||||
|
models::{
|
||||||
|
bangumi, episodes,
|
||||||
|
subscriptions::{self, SubscriptionTrait},
|
||||||
|
},
|
||||||
|
test_utils::{
|
||||||
|
app::TestingAppContext,
|
||||||
|
crypto::build_testing_crypto_service,
|
||||||
|
database::build_testing_database_service,
|
||||||
|
mikan::{
|
||||||
|
MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential_form,
|
||||||
|
},
|
||||||
|
storage::build_testing_storage_service,
|
||||||
|
tracing::try_init_testing_tracing,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
struct TestingResources {
|
||||||
|
pub app_ctx: Arc<dyn AppContextTrait>,
|
||||||
|
pub mikan_server: MikanMockServer,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn build_testing_app_context() -> RecorderResult<TestingResources> {
|
||||||
|
let mikan_server = MikanMockServer::new().await?;
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_server.base_url().clone();
|
||||||
|
|
||||||
|
let app_ctx = {
|
||||||
|
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||||
|
let db_service = build_testing_database_service(Default::default()).await?;
|
||||||
|
let crypto_service = build_testing_crypto_service().await?;
|
||||||
|
let storage_service = build_testing_storage_service().await?;
|
||||||
|
let app_ctx = TestingAppContext::builder()
|
||||||
|
.mikan(mikan_client)
|
||||||
|
.db(db_service)
|
||||||
|
.crypto(crypto_service)
|
||||||
|
.storage(storage_service)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Arc::new(app_ctx)
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mikan_server,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
fn before_each() {
|
||||||
|
try_init_testing_tracing(Level::DEBUG);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let mikan_client = app_ctx.mikan();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let credential = mikan_client
|
||||||
|
.submit_credential_form(
|
||||||
|
app_ctx.as_ref(),
|
||||||
|
subscriber_id,
|
||||||
|
build_testing_mikan_credential_form(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSeason),
|
||||||
|
source_url: ActiveValue::Set(
|
||||||
|
MikanSeasonFlowUrlMeta {
|
||||||
|
year: 2025,
|
||||||
|
season_str: MikanSeasonStr::Spring,
|
||||||
|
}
|
||||||
|
.build_season_flow_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
credential_id: ActiveValue::Set(Some(credential.id)),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
{
|
||||||
|
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(bangumi_list.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
subscription.sync_feeds_full(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_subscriber_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber),
|
||||||
|
source_url: ActiveValue::Set(
|
||||||
|
MikanSubscriberSubscriptionRssUrlMeta {
|
||||||
|
mikan_subscription_token: "test".into(),
|
||||||
|
}
|
||||||
|
.build_rss_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let (incremental_bangumi_list, incremental_episode_list) = {
|
||||||
|
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
|
||||||
|
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
|
||||||
|
let episode_list = episodes::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!episode_list.is_empty());
|
||||||
|
|
||||||
|
(bangumi_list, episode_list)
|
||||||
|
};
|
||||||
|
|
||||||
|
let (full_bangumi_list, full_episode_list) = {
|
||||||
|
subscription.sync_feeds_full(app_ctx.clone()).await?;
|
||||||
|
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
|
||||||
|
let episode_list = episodes::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!episode_list.is_empty());
|
||||||
|
|
||||||
|
(bangumi_list, episode_list)
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(incremental_bangumi_list.len(), full_bangumi_list.len());
|
||||||
|
assert!(incremental_episode_list.len() < full_episode_list.len());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanBangumi),
|
||||||
|
source_url: ActiveValue::Set(
|
||||||
|
MikanBangumiHash {
|
||||||
|
mikan_bangumi_id: "3600".into(),
|
||||||
|
mikan_fansub_id: "370".into(),
|
||||||
|
}
|
||||||
|
.build_rss_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
{
|
||||||
|
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
};
|
||||||
|
|
||||||
|
{
|
||||||
|
subscription.sync_feeds_full(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,5 @@
|
|||||||
pub mod parser;
|
pub mod parser;
|
||||||
|
|
||||||
pub use parser::{
|
pub use parser::{
|
||||||
extract_season_from_title_body, parse_episode_meta_from_raw_name, RawEpisodeMeta,
|
RawEpisodeMeta, extract_episode_meta_from_raw_name, extract_season_from_title_body,
|
||||||
};
|
};
|
||||||
|
|||||||
@ -261,7 +261,7 @@ pub fn check_is_movie(title: &str) -> bool {
|
|||||||
MOVIE_TITLE_RE.is_match(title)
|
MOVIE_TITLE_RE.is_match(title)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
|
pub fn extract_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
|
||||||
let raw_title = s.trim();
|
let raw_title = s.trim();
|
||||||
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
||||||
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
||||||
@ -321,11 +321,11 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMet
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::{RawEpisodeMeta, parse_episode_meta_from_raw_name};
|
use super::{RawEpisodeMeta, extract_episode_meta_from_raw_name};
|
||||||
|
|
||||||
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
|
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
|
||||||
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap_or_default();
|
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap_or_default();
|
||||||
let found = parse_episode_meta_from_raw_name(raw_name).ok();
|
let found = extract_episode_meta_from_raw_name(raw_name).ok();
|
||||||
|
|
||||||
if expected != found {
|
if expected != found {
|
||||||
println!(
|
println!(
|
||||||
|
|||||||
115
apps/recorder/src/graphql/domains/credential_3rd.rs
Normal file
115
apps/recorder/src/graphql/domains/credential_3rd.rs
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{
|
||||||
|
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef,
|
||||||
|
};
|
||||||
|
use seaography::Builder as SeaographyBuilder;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use util_derive::DynamicGraphql;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait, auth::AuthUserInfo, errors::RecorderError, models::credential_3rd,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
|
||||||
|
struct Credential3rdCheckAvailableInput {
|
||||||
|
pub id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Credential3rdCheckAvailableInput {
|
||||||
|
fn input_type_name() -> &'static str {
|
||||||
|
"Credential3rdCheckAvailableInput"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn arg_name() -> &'static str {
|
||||||
|
"filter"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_input_object() -> InputObject {
|
||||||
|
InputObject::new(Self::input_type_name())
|
||||||
|
.description("The input of the credential3rdCheckAvailable query")
|
||||||
|
.field(InputValue::new(
|
||||||
|
Credential3rdCheckAvailableInputFieldEnum::Id.as_str(),
|
||||||
|
TypeRef::named_nn(TypeRef::INT),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
|
||||||
|
pub struct Credential3rdCheckAvailableInfo {
|
||||||
|
pub available: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Credential3rdCheckAvailableInfo {
|
||||||
|
fn object_type_name() -> &'static str {
|
||||||
|
"Credential3rdCheckAvailableInfo"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_output_object() -> Object {
|
||||||
|
Object::new(Self::object_type_name())
|
||||||
|
.description("The output of the credential3rdCheckAvailable query")
|
||||||
|
.field(Field::new(
|
||||||
|
Credential3rdCheckAvailableInfoFieldEnum::Available,
|
||||||
|
TypeRef::named_nn(TypeRef::BOOLEAN),
|
||||||
|
move |ctx| {
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let subscription_info = ctx.parent_value.try_downcast_ref::<Self>()?;
|
||||||
|
Ok(Some(async_graphql::Value::from(
|
||||||
|
subscription_info.available,
|
||||||
|
)))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_credential3rd_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
builder.schema = builder
|
||||||
|
.schema
|
||||||
|
.register(Credential3rdCheckAvailableInput::generate_input_object());
|
||||||
|
builder.schema = builder
|
||||||
|
.schema
|
||||||
|
.register(Credential3rdCheckAvailableInfo::generate_output_object());
|
||||||
|
|
||||||
|
builder.queries.push(
|
||||||
|
Field::new(
|
||||||
|
"credential3rdCheckAvailable",
|
||||||
|
TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()),
|
||||||
|
move |ctx| {
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let auth_user_info = ctx.data::<AuthUserInfo>()?;
|
||||||
|
let input: Credential3rdCheckAvailableInput = ctx
|
||||||
|
.args
|
||||||
|
.get(Credential3rdCheckAvailableInput::arg_name())
|
||||||
|
.unwrap()
|
||||||
|
.deserialize()?;
|
||||||
|
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
||||||
|
|
||||||
|
let credential_model = credential_3rd::Model::find_by_id_and_subscriber_id(
|
||||||
|
app_ctx.as_ref(),
|
||||||
|
input.id,
|
||||||
|
auth_user_info.subscriber_auth.subscriber_id,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::Credential3rdError {
|
||||||
|
message: format!("credential = {} not found", input.id),
|
||||||
|
source: None.into(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let available = credential_model.check_available(app_ctx.as_ref()).await?;
|
||||||
|
Ok(Some(FieldValue::owned_any(
|
||||||
|
Credential3rdCheckAvailableInfo { available },
|
||||||
|
)))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.argument(InputValue::new(
|
||||||
|
Credential3rdCheckAvailableInput::arg_name(),
|
||||||
|
TypeRef::named_nn(Credential3rdCheckAvailableInput::input_type_name()),
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
106
apps/recorder/src/graphql/domains/crypto.rs
Normal file
106
apps/recorder/src/graphql/domains/crypto.rs
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{ResolverContext, ValueAccessor};
|
||||||
|
use sea_orm::{EntityTrait, Value as SeaValue};
|
||||||
|
use seaography::{BuilderContext, SeaResult};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
graphql::infra::util::{get_column_key, get_entity_key},
|
||||||
|
models::credential_3rd,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn register_crypto_column_input_conversion_to_schema_context<T>(
|
||||||
|
context: &mut BuilderContext,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
column: &T::Column,
|
||||||
|
) where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let column_name = get_column_key::<T>(context, column);
|
||||||
|
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
|
||||||
|
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
|
||||||
|
|
||||||
|
context.types.input_conversions.insert(
|
||||||
|
format!("{entity_name}.{column_name}"),
|
||||||
|
Box::new(
|
||||||
|
move |_resolve_context: &ResolverContext<'_>,
|
||||||
|
value: &ValueAccessor|
|
||||||
|
-> SeaResult<sea_orm::Value> {
|
||||||
|
let source = value.string()?;
|
||||||
|
let encrypted = ctx.crypto().encrypt_string(source.into())?;
|
||||||
|
Ok(encrypted.into())
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn register_crypto_column_output_conversion_to_schema_context<T>(
|
||||||
|
context: &mut BuilderContext,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
column: &T::Column,
|
||||||
|
) where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let column_name = get_column_key::<T>(context, column);
|
||||||
|
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
|
||||||
|
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
|
||||||
|
|
||||||
|
context.types.output_conversions.insert(
|
||||||
|
format!("{entity_name}.{column_name}"),
|
||||||
|
Box::new(
|
||||||
|
move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> {
|
||||||
|
if let SeaValue::String(s) = value {
|
||||||
|
if let Some(s) = s {
|
||||||
|
let decrypted = ctx.crypto().decrypt_string(s)?;
|
||||||
|
Ok(async_graphql::Value::String(decrypted))
|
||||||
|
} else {
|
||||||
|
Ok(async_graphql::Value::Null)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(async_graphql::Error::new("crypto column must be string column").into())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_crypto_to_schema_context(
|
||||||
|
context: &mut BuilderContext,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
) {
|
||||||
|
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Cookies,
|
||||||
|
);
|
||||||
|
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Username,
|
||||||
|
);
|
||||||
|
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Password,
|
||||||
|
);
|
||||||
|
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Cookies,
|
||||||
|
);
|
||||||
|
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx.clone(),
|
||||||
|
&credential_3rd::Column::Username,
|
||||||
|
);
|
||||||
|
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||||
|
context,
|
||||||
|
ctx,
|
||||||
|
&credential_3rd::Column::Password,
|
||||||
|
);
|
||||||
|
}
|
||||||
5
apps/recorder/src/graphql/domains/mod.rs
Normal file
5
apps/recorder/src/graphql/domains/mod.rs
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
pub mod credential_3rd;
|
||||||
|
pub mod crypto;
|
||||||
|
pub mod subscriber_tasks;
|
||||||
|
pub mod subscribers;
|
||||||
|
pub mod subscriptions;
|
||||||
19
apps/recorder/src/graphql/domains/subscriber_tasks.rs
Normal file
19
apps/recorder/src/graphql/domains/subscriber_tasks.rs
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
graphql::infra::json::restrict_jsonb_filter_input_for_entity, models::subscriber_tasks,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>(
|
||||||
|
context,
|
||||||
|
&subscriber_tasks::Column::Job,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscriber_tasks_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>();
|
||||||
|
builder
|
||||||
|
}
|
||||||
353
apps/recorder/src/graphql/domains/subscribers.rs
Normal file
353
apps/recorder/src/graphql/domains/subscribers.rs
Normal file
@ -0,0 +1,353 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{ObjectAccessor, ResolverContext, TypeRef, ValueAccessor};
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use maplit::btreeset;
|
||||||
|
use sea_orm::{ColumnTrait, Condition, EntityTrait, Iterable, Value as SeaValue};
|
||||||
|
use seaography::{
|
||||||
|
Builder as SeaographyBuilder, BuilderContext, FilterInfo,
|
||||||
|
FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper,
|
||||||
|
FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult, SeaographyError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
auth::{AuthError, AuthUserInfo},
|
||||||
|
graphql::infra::util::{get_column_key, get_entity_column_key, get_entity_key},
|
||||||
|
models::subscribers,
|
||||||
|
};
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref SUBSCRIBER_ID_FILTER_INFO: FilterInfo = FilterInfo {
|
||||||
|
type_name: String::from("SubscriberIdFilterInput"),
|
||||||
|
base_type: TypeRef::INT.into(),
|
||||||
|
supported_operations: btreeset! { SeaographqlFilterOperation::Equals },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn guard_data_object_accessor_with_subscriber_id(
|
||||||
|
value: ValueAccessor<'_>,
|
||||||
|
column_name: &str,
|
||||||
|
subscriber_id: i32,
|
||||||
|
) -> async_graphql::Result<()> {
|
||||||
|
let obj = value.object()?;
|
||||||
|
|
||||||
|
let subscriber_id_value = obj.try_get(column_name)?;
|
||||||
|
|
||||||
|
let id = subscriber_id_value.i64()?;
|
||||||
|
|
||||||
|
if id == subscriber_id as i64 {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(async_graphql::Error::new("subscriber not match"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn guard_data_object_accessor_with_optional_subscriber_id(
|
||||||
|
value: ValueAccessor<'_>,
|
||||||
|
column_name: &str,
|
||||||
|
subscriber_id: i32,
|
||||||
|
) -> async_graphql::Result<()> {
|
||||||
|
if value.is_null() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let obj = value.object()?;
|
||||||
|
|
||||||
|
if let Some(subscriber_id_value) = obj.get(column_name) {
|
||||||
|
let id = subscriber_id_value.i64()?;
|
||||||
|
if id == subscriber_id as i64 {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(async_graphql::Error::new("subscriber not match"))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn guard_entity_with_subscriber_id<T>(_context: &BuilderContext, _column: &T::Column) -> FnGuard
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
Box::new(move |context: &ResolverContext| -> GuardAction {
|
||||||
|
match context.ctx.data::<AuthUserInfo>() {
|
||||||
|
Ok(_) => GuardAction::Allow,
|
||||||
|
Err(err) => GuardAction::Block(Some(err.message)),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn guard_field_with_subscriber_id<T>(context: &BuilderContext, column: &T::Column) -> FnGuard
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
||||||
|
let column_key = get_column_key::<T>(context, column);
|
||||||
|
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
|
||||||
|
&entity_key,
|
||||||
|
&column_key,
|
||||||
|
));
|
||||||
|
let entity_create_one_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name, context.entity_create_one_mutation.mutation_suffix
|
||||||
|
));
|
||||||
|
let entity_create_one_mutation_data_field_name =
|
||||||
|
Arc::new(context.entity_create_one_mutation.data_field.clone());
|
||||||
|
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name,
|
||||||
|
context.entity_create_batch_mutation.mutation_suffix.clone()
|
||||||
|
));
|
||||||
|
let entity_create_batch_mutation_data_field_name =
|
||||||
|
Arc::new(context.entity_create_batch_mutation.data_field.clone());
|
||||||
|
let entity_update_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name, context.entity_update_mutation.mutation_suffix
|
||||||
|
));
|
||||||
|
let entity_update_mutation_data_field_name =
|
||||||
|
Arc::new(context.entity_update_mutation.data_field.clone());
|
||||||
|
|
||||||
|
Box::new(move |context: &ResolverContext| -> GuardAction {
|
||||||
|
match context.ctx.data::<AuthUserInfo>() {
|
||||||
|
Ok(user_info) => {
|
||||||
|
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
||||||
|
let validation_result = match context.field().name() {
|
||||||
|
field if field == entity_create_one_mutation_field_name.as_str() => {
|
||||||
|
if let Some(data_value) = context
|
||||||
|
.args
|
||||||
|
.get(&entity_create_one_mutation_data_field_name)
|
||||||
|
{
|
||||||
|
guard_data_object_accessor_with_subscriber_id(
|
||||||
|
data_value,
|
||||||
|
&column_name,
|
||||||
|
subscriber_id,
|
||||||
|
)
|
||||||
|
.map_err(|inner_error| {
|
||||||
|
AuthError::from_graphql_dynamic_subscribe_id_guard(
|
||||||
|
inner_error,
|
||||||
|
context,
|
||||||
|
&entity_create_one_mutation_data_field_name,
|
||||||
|
&column_name,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
field if field == entity_create_batch_mutation_field_name.as_str() => {
|
||||||
|
if let Some(data_value) = context
|
||||||
|
.args
|
||||||
|
.get(&entity_create_batch_mutation_data_field_name)
|
||||||
|
{
|
||||||
|
data_value
|
||||||
|
.list()
|
||||||
|
.and_then(|data_list| {
|
||||||
|
data_list.iter().try_for_each(|data_item_value| {
|
||||||
|
guard_data_object_accessor_with_optional_subscriber_id(
|
||||||
|
data_item_value,
|
||||||
|
&column_name,
|
||||||
|
subscriber_id,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.map_err(|inner_error| {
|
||||||
|
AuthError::from_graphql_dynamic_subscribe_id_guard(
|
||||||
|
inner_error,
|
||||||
|
context,
|
||||||
|
&entity_create_batch_mutation_data_field_name,
|
||||||
|
&column_name,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
field if field == entity_update_mutation_field_name.as_str() => {
|
||||||
|
if let Some(data_value) =
|
||||||
|
context.args.get(&entity_update_mutation_data_field_name)
|
||||||
|
{
|
||||||
|
guard_data_object_accessor_with_optional_subscriber_id(
|
||||||
|
data_value,
|
||||||
|
&column_name,
|
||||||
|
subscriber_id,
|
||||||
|
)
|
||||||
|
.map_err(|inner_error| {
|
||||||
|
AuthError::from_graphql_dynamic_subscribe_id_guard(
|
||||||
|
inner_error,
|
||||||
|
context,
|
||||||
|
&entity_update_mutation_data_field_name,
|
||||||
|
&column_name,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Ok(()),
|
||||||
|
};
|
||||||
|
match validation_result {
|
||||||
|
Ok(_) => GuardAction::Allow,
|
||||||
|
Err(err) => GuardAction::Block(Some(err.to_string())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => GuardAction::Block(Some(err.message)),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_subscriber_id_filter_condition<T>(
|
||||||
|
_context: &BuilderContext,
|
||||||
|
column: &T::Column,
|
||||||
|
) -> FnFilterCondition
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let column = *column;
|
||||||
|
Box::new(
|
||||||
|
move |context: &ResolverContext,
|
||||||
|
mut condition: Condition,
|
||||||
|
filter: Option<&ObjectAccessor<'_>>|
|
||||||
|
-> SeaResult<Condition> {
|
||||||
|
match context.ctx.data::<AuthUserInfo>() {
|
||||||
|
Ok(user_info) => {
|
||||||
|
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
||||||
|
|
||||||
|
if let Some(filter) = filter {
|
||||||
|
for operation in &SUBSCRIBER_ID_FILTER_INFO.supported_operations {
|
||||||
|
match operation {
|
||||||
|
SeaographqlFilterOperation::Equals => {
|
||||||
|
if let Some(value) = filter.get("eq") {
|
||||||
|
let value: i32 = value.i64()?.try_into()?;
|
||||||
|
if value != subscriber_id {
|
||||||
|
return Err(SeaographyError::AsyncGraphQLError(
|
||||||
|
async_graphql::Error::new(
|
||||||
|
"subscriber_id and auth_info does not match",
|
||||||
|
),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => unreachable!("unreachable filter operation for subscriber_id"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
condition = condition.add(column.eq(subscriber_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(condition)
|
||||||
|
}
|
||||||
|
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_default_subscriber_id_input_conversion<T>(
|
||||||
|
context: &BuilderContext,
|
||||||
|
_column: &T::Column,
|
||||||
|
) -> FnInputTypeNoneConversion
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
||||||
|
let entity_create_one_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name, context.entity_create_one_mutation.mutation_suffix
|
||||||
|
));
|
||||||
|
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
||||||
|
"{}{}",
|
||||||
|
entity_name,
|
||||||
|
context.entity_create_batch_mutation.mutation_suffix.clone()
|
||||||
|
));
|
||||||
|
Box::new(
|
||||||
|
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
|
||||||
|
let field_name = context.field().name();
|
||||||
|
tracing::warn!("field_name: {:?}", field_name);
|
||||||
|
if field_name == entity_create_one_mutation_field_name.as_str()
|
||||||
|
|| field_name == entity_create_batch_mutation_field_name.as_str()
|
||||||
|
{
|
||||||
|
match context.ctx.data::<AuthUserInfo>() {
|
||||||
|
Ok(user_info) => {
|
||||||
|
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
||||||
|
Ok(Some(SeaValue::Int(Some(subscriber_id))))
|
||||||
|
}
|
||||||
|
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn restrict_subscriber_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
|
||||||
|
where
|
||||||
|
T: EntityTrait,
|
||||||
|
<T as EntityTrait>::Model: Sync,
|
||||||
|
{
|
||||||
|
let entity_key = get_entity_key::<T>(context);
|
||||||
|
let entity_column_key = get_entity_column_key::<T>(context, column);
|
||||||
|
|
||||||
|
context.guards.entity_guards.insert(
|
||||||
|
entity_key.clone(),
|
||||||
|
guard_entity_with_subscriber_id::<T>(context, column),
|
||||||
|
);
|
||||||
|
context.guards.field_guards.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
guard_field_with_subscriber_id::<T>(context, column),
|
||||||
|
);
|
||||||
|
context.filter_types.overwrites.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
Some(FilterType::Custom(
|
||||||
|
SUBSCRIBER_ID_FILTER_INFO.type_name.clone(),
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
context.filter_types.condition_functions.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
generate_subscriber_id_filter_condition::<T>(context, column),
|
||||||
|
);
|
||||||
|
context.types.input_none_conversions.insert(
|
||||||
|
entity_column_key.clone(),
|
||||||
|
generate_default_subscriber_id_input_conversion::<T>(context, column),
|
||||||
|
);
|
||||||
|
|
||||||
|
context.entity_input.update_skips.push(entity_column_key);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
|
||||||
|
for column in subscribers::Column::iter() {
|
||||||
|
if !matches!(column, subscribers::Column::Id) {
|
||||||
|
let key = get_entity_column_key::<subscribers::Entity>(context, &column);
|
||||||
|
context.filter_types.overwrites.insert(key, None);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||||
|
{
|
||||||
|
let filter_types_map_helper = FilterTypesMapHelper {
|
||||||
|
context: builder.context,
|
||||||
|
};
|
||||||
|
|
||||||
|
builder.schema = builder
|
||||||
|
.schema
|
||||||
|
.register(filter_types_map_helper.generate_filter_input(&SUBSCRIBER_ID_FILTER_INFO));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
builder.register_entity::<subscribers::Entity>(
|
||||||
|
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
|
||||||
|
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
|
||||||
|
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
226
apps/recorder/src/graphql/domains/subscriptions.rs
Normal file
226
apps/recorder/src/graphql/domains/subscriptions.rs
Normal file
@ -0,0 +1,226 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::{
|
||||||
|
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef,
|
||||||
|
};
|
||||||
|
use seaography::Builder as SeaographyBuilder;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use util_derive::DynamicGraphql;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
auth::AuthUserInfo,
|
||||||
|
models::subscriptions::{self, SubscriptionTrait},
|
||||||
|
task::SubscriberTask,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
|
||||||
|
struct SyncOneSubscriptionFilterInput {
|
||||||
|
pub id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SyncOneSubscriptionFilterInput {
|
||||||
|
fn input_type_name() -> &'static str {
|
||||||
|
"SyncOneSubscriptionFilterInput"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn arg_name() -> &'static str {
|
||||||
|
"filter"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_input_object() -> InputObject {
|
||||||
|
InputObject::new(Self::input_type_name())
|
||||||
|
.description("The input of the subscriptionSyncOne series of mutations")
|
||||||
|
.field(InputValue::new(
|
||||||
|
SyncOneSubscriptionFilterInputFieldEnum::Id.as_str(),
|
||||||
|
TypeRef::named_nn(TypeRef::INT),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
|
||||||
|
pub struct SyncOneSubscriptionInfo {
|
||||||
|
pub task_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SyncOneSubscriptionInfo {
|
||||||
|
fn object_type_name() -> &'static str {
|
||||||
|
"SyncOneSubscriptionInfo"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_output_object() -> Object {
|
||||||
|
Object::new(Self::object_type_name())
|
||||||
|
.description("The output of the subscriptionSyncOne series of mutations")
|
||||||
|
.field(Field::new(
|
||||||
|
SyncOneSubscriptionInfoFieldEnum::TaskId,
|
||||||
|
TypeRef::named_nn(TypeRef::STRING),
|
||||||
|
move |ctx| {
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let subscription_info = ctx.parent_value.try_downcast_ref::<Self>()?;
|
||||||
|
Ok(Some(async_graphql::Value::from(
|
||||||
|
subscription_info.task_id.as_str(),
|
||||||
|
)))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_subscriptions_to_schema_builder(
|
||||||
|
mut builder: SeaographyBuilder,
|
||||||
|
) -> SeaographyBuilder {
|
||||||
|
builder.schema = builder
|
||||||
|
.schema
|
||||||
|
.register(SyncOneSubscriptionFilterInput::generate_input_object());
|
||||||
|
builder.schema = builder
|
||||||
|
.schema
|
||||||
|
.register(SyncOneSubscriptionInfo::generate_output_object());
|
||||||
|
|
||||||
|
builder.mutations.push(
|
||||||
|
Field::new(
|
||||||
|
"subscriptionSyncOneFeedsIncremental",
|
||||||
|
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
|
||||||
|
move |ctx| {
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let auth_user_info = ctx.data::<AuthUserInfo>()?;
|
||||||
|
|
||||||
|
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
||||||
|
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
|
||||||
|
|
||||||
|
let filter_input: SyncOneSubscriptionFilterInput = ctx
|
||||||
|
.args
|
||||||
|
.get(SyncOneSubscriptionFilterInput::arg_name())
|
||||||
|
.unwrap()
|
||||||
|
.deserialize()?;
|
||||||
|
|
||||||
|
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
|
||||||
|
app_ctx.as_ref(),
|
||||||
|
filter_input.id,
|
||||||
|
subscriber_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let subscription =
|
||||||
|
subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let task_service = app_ctx.task();
|
||||||
|
|
||||||
|
let task_id = task_service
|
||||||
|
.add_subscriber_task(
|
||||||
|
auth_user_info.subscriber_auth.subscriber_id,
|
||||||
|
SubscriberTask::SyncOneSubscriptionFeedsIncremental(
|
||||||
|
subscription.into(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
|
||||||
|
task_id: task_id.to_string(),
|
||||||
|
})))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.argument(InputValue::new(
|
||||||
|
SyncOneSubscriptionFilterInput::arg_name(),
|
||||||
|
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutations.push(
|
||||||
|
Field::new(
|
||||||
|
"subscriptionSyncOneFeedsFull",
|
||||||
|
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
|
||||||
|
move |ctx| {
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let auth_user_info = ctx.data::<AuthUserInfo>()?;
|
||||||
|
|
||||||
|
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
||||||
|
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
|
||||||
|
|
||||||
|
let filter_input: SyncOneSubscriptionFilterInput = ctx
|
||||||
|
.args
|
||||||
|
.get(SyncOneSubscriptionFilterInput::arg_name())
|
||||||
|
.unwrap()
|
||||||
|
.deserialize()?;
|
||||||
|
|
||||||
|
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
|
||||||
|
app_ctx.as_ref(),
|
||||||
|
filter_input.id,
|
||||||
|
subscriber_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let subscription =
|
||||||
|
subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let task_service = app_ctx.task();
|
||||||
|
|
||||||
|
let task_id = task_service
|
||||||
|
.add_subscriber_task(
|
||||||
|
auth_user_info.subscriber_auth.subscriber_id,
|
||||||
|
SubscriberTask::SyncOneSubscriptionFeedsFull(subscription.into()),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
|
||||||
|
task_id: task_id.to_string(),
|
||||||
|
})))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.argument(InputValue::new(
|
||||||
|
SyncOneSubscriptionFilterInput::arg_name(),
|
||||||
|
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutations.push(
|
||||||
|
Field::new(
|
||||||
|
"subscriptionSyncOneSources",
|
||||||
|
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
|
||||||
|
move |ctx| {
|
||||||
|
FieldFuture::new(async move {
|
||||||
|
let auth_user_info = ctx.data::<AuthUserInfo>()?;
|
||||||
|
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
|
||||||
|
|
||||||
|
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
|
||||||
|
|
||||||
|
let filter_input: SyncOneSubscriptionFilterInput = ctx
|
||||||
|
.args
|
||||||
|
.get(SyncOneSubscriptionFilterInput::arg_name())
|
||||||
|
.unwrap()
|
||||||
|
.deserialize()?;
|
||||||
|
|
||||||
|
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
|
||||||
|
app_ctx.as_ref(),
|
||||||
|
filter_input.id,
|
||||||
|
subscriber_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let subscription =
|
||||||
|
subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
let task_service = app_ctx.task();
|
||||||
|
|
||||||
|
let task_id = task_service
|
||||||
|
.add_subscriber_task(
|
||||||
|
auth_user_info.subscriber_auth.subscriber_id,
|
||||||
|
SubscriberTask::SyncOneSubscriptionSources(subscription.into()),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
|
||||||
|
task_id: task_id.to_string(),
|
||||||
|
})))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.argument(InputValue::new(
|
||||||
|
SyncOneSubscriptionFilterInput::arg_name(),
|
||||||
|
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder
|
||||||
|
}
|
||||||
@ -1,46 +0,0 @@
|
|||||||
use async_graphql::dynamic::{ObjectAccessor, TypeRef};
|
|
||||||
use maplit::btreeset;
|
|
||||||
use once_cell::sync::OnceCell;
|
|
||||||
use sea_orm::{ColumnTrait, Condition, EntityTrait, Value};
|
|
||||||
use seaography::{BuilderContext, FilterInfo, FilterOperation, SeaResult};
|
|
||||||
|
|
||||||
pub static SUBSCRIBER_ID_FILTER_INFO: OnceCell<FilterInfo> = OnceCell::new();
|
|
||||||
|
|
||||||
pub fn init_custom_filter_info() {
|
|
||||||
SUBSCRIBER_ID_FILTER_INFO.get_or_init(|| FilterInfo {
|
|
||||||
type_name: String::from("SubscriberIdFilterInput"),
|
|
||||||
base_type: TypeRef::INT.into(),
|
|
||||||
supported_operations: btreeset! { FilterOperation::Equals },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type FnFilterCondition =
|
|
||||||
Box<dyn Fn(Condition, &ObjectAccessor) -> SeaResult<Condition> + Send + Sync>;
|
|
||||||
|
|
||||||
pub fn subscriber_id_condition_function<T>(
|
|
||||||
_context: &BuilderContext,
|
|
||||||
column: &T::Column,
|
|
||||||
) -> FnFilterCondition
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let column = *column;
|
|
||||||
Box::new(move |mut condition, filter| {
|
|
||||||
let subscriber_id_filter_info = SUBSCRIBER_ID_FILTER_INFO.get().unwrap();
|
|
||||||
let operations = &subscriber_id_filter_info.supported_operations;
|
|
||||||
for operation in operations {
|
|
||||||
match operation {
|
|
||||||
FilterOperation::Equals => {
|
|
||||||
if let Some(value) = filter.get("eq") {
|
|
||||||
let value: i32 = value.i64()?.try_into()?;
|
|
||||||
let value = Value::Int(Some(value));
|
|
||||||
condition = condition.add(column.eq(value));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => unreachable!("unreachable filter operation for subscriber_id"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(condition)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,181 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use async_graphql::dynamic::{ResolverContext, ValueAccessor};
|
|
||||||
use sea_orm::EntityTrait;
|
|
||||||
use seaography::{BuilderContext, FnGuard, GuardAction};
|
|
||||||
|
|
||||||
use super::util::{get_column_key, get_entity_key};
|
|
||||||
use crate::auth::{AuthError, AuthUserInfo};
|
|
||||||
|
|
||||||
fn guard_data_object_accessor_with_subscriber_id(
|
|
||||||
value: ValueAccessor<'_>,
|
|
||||||
column_name: &str,
|
|
||||||
subscriber_id: i32,
|
|
||||||
) -> async_graphql::Result<()> {
|
|
||||||
let obj = value.object()?;
|
|
||||||
|
|
||||||
let subscriber_id_value = obj.try_get(column_name)?;
|
|
||||||
|
|
||||||
let id = subscriber_id_value.i64()?;
|
|
||||||
|
|
||||||
if id == subscriber_id as i64 {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(async_graphql::Error::new("subscriber not match"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn guard_data_object_accessor_with_optional_subscriber_id(
|
|
||||||
value: ValueAccessor<'_>,
|
|
||||||
column_name: &str,
|
|
||||||
subscriber_id: i32,
|
|
||||||
) -> async_graphql::Result<()> {
|
|
||||||
if value.is_null() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
let obj = value.object()?;
|
|
||||||
|
|
||||||
if let Some(subscriber_id_value) = obj.get(column_name) {
|
|
||||||
let id = subscriber_id_value.i64()?;
|
|
||||||
if id == subscriber_id as i64 {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(async_graphql::Error::new("subscriber not match"))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn guard_entity_with_subscriber_id<T>(_context: &BuilderContext, _column: &T::Column) -> FnGuard
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
Box::new(move |context: &ResolverContext| -> GuardAction {
|
|
||||||
match context.ctx.data::<AuthUserInfo>() {
|
|
||||||
Ok(_) => GuardAction::Allow,
|
|
||||||
Err(err) => GuardAction::Block(Some(err.message)),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn guard_field_with_subscriber_id<T>(context: &BuilderContext, column: &T::Column) -> FnGuard
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let entity_key = get_entity_key::<T>(context);
|
|
||||||
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
|
||||||
let column_key = get_column_key::<T>(context, column);
|
|
||||||
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
|
|
||||||
&entity_key,
|
|
||||||
&column_key,
|
|
||||||
));
|
|
||||||
let entity_create_one_mutation_field_name = Arc::new(format!(
|
|
||||||
"{}{}",
|
|
||||||
entity_name, context.entity_create_one_mutation.mutation_suffix
|
|
||||||
));
|
|
||||||
let entity_create_one_mutation_data_field_name =
|
|
||||||
Arc::new(context.entity_create_one_mutation.data_field.clone());
|
|
||||||
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
|
||||||
"{}{}",
|
|
||||||
entity_name,
|
|
||||||
context.entity_create_batch_mutation.mutation_suffix.clone()
|
|
||||||
));
|
|
||||||
let entity_create_batch_mutation_data_field_name =
|
|
||||||
Arc::new(context.entity_create_batch_mutation.data_field.clone());
|
|
||||||
let entity_update_mutation_field_name = Arc::new(format!(
|
|
||||||
"{}{}",
|
|
||||||
entity_name, context.entity_update_mutation.mutation_suffix
|
|
||||||
));
|
|
||||||
let entity_update_mutation_data_field_name =
|
|
||||||
Arc::new(context.entity_update_mutation.data_field.clone());
|
|
||||||
|
|
||||||
Box::new(move |context: &ResolverContext| -> GuardAction {
|
|
||||||
match context.ctx.data::<AuthUserInfo>() {
|
|
||||||
Ok(user_info) => {
|
|
||||||
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
|
||||||
let validation_result = match context.field().name() {
|
|
||||||
field if field == entity_create_one_mutation_field_name.as_str() => {
|
|
||||||
if let Some(data_value) = context
|
|
||||||
.args
|
|
||||||
.get(&entity_create_one_mutation_data_field_name)
|
|
||||||
{
|
|
||||||
guard_data_object_accessor_with_subscriber_id(
|
|
||||||
data_value,
|
|
||||||
&column_name,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
.map_err(|inner_error| {
|
|
||||||
AuthError::from_graphql_subscribe_id_guard(
|
|
||||||
inner_error,
|
|
||||||
context,
|
|
||||||
&entity_create_one_mutation_data_field_name,
|
|
||||||
&column_name,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
field if field == entity_create_batch_mutation_field_name.as_str() => {
|
|
||||||
if let Some(data_value) = context
|
|
||||||
.args
|
|
||||||
.get(&entity_create_batch_mutation_data_field_name)
|
|
||||||
{
|
|
||||||
data_value
|
|
||||||
.list()
|
|
||||||
.and_then(|data_list| {
|
|
||||||
data_list.iter().try_for_each(|data_item_value| {
|
|
||||||
guard_data_object_accessor_with_optional_subscriber_id(
|
|
||||||
data_item_value,
|
|
||||||
&column_name,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.map_err(|inner_error| {
|
|
||||||
AuthError::from_graphql_subscribe_id_guard(
|
|
||||||
inner_error,
|
|
||||||
context,
|
|
||||||
&entity_create_batch_mutation_data_field_name,
|
|
||||||
&column_name,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
field if field == entity_update_mutation_field_name.as_str() => {
|
|
||||||
if let Some(data_value) =
|
|
||||||
context.args.get(&entity_update_mutation_data_field_name)
|
|
||||||
{
|
|
||||||
guard_data_object_accessor_with_optional_subscriber_id(
|
|
||||||
data_value,
|
|
||||||
&column_name,
|
|
||||||
subscriber_id,
|
|
||||||
)
|
|
||||||
.map_err(|inner_error| {
|
|
||||||
AuthError::from_graphql_subscribe_id_guard(
|
|
||||||
inner_error,
|
|
||||||
context,
|
|
||||||
&entity_update_mutation_data_field_name,
|
|
||||||
&column_name,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => Ok(()),
|
|
||||||
};
|
|
||||||
match validation_result {
|
|
||||||
Ok(_) => GuardAction::Allow,
|
|
||||||
Err(err) => GuardAction::Block(Some(err.to_string())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => GuardAction::Block(Some(err.message)),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
1317
apps/recorder/src/graphql/infra/json.rs
Normal file
1317
apps/recorder/src/graphql/infra/json.rs
Normal file
File diff suppressed because it is too large
Load Diff
2
apps/recorder/src/graphql/infra/mod.rs
Normal file
2
apps/recorder/src/graphql/infra/mod.rs
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
pub mod json;
|
||||||
|
pub mod util;
|
||||||
@ -1,12 +1,9 @@
|
|||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod filter;
|
pub mod domains;
|
||||||
pub mod guard;
|
pub mod infra;
|
||||||
pub mod schema_root;
|
mod schema;
|
||||||
pub mod service;
|
pub mod service;
|
||||||
pub mod subscriptions;
|
|
||||||
pub mod transformer;
|
|
||||||
pub mod util;
|
|
||||||
|
|
||||||
pub use config::GraphQLConfig;
|
pub use config::GraphQLConfig;
|
||||||
pub use schema_root::schema;
|
pub use schema::build_schema;
|
||||||
pub use service::GraphQLService;
|
pub use service::GraphQLService;
|
||||||
|
|||||||
149
apps/recorder/src/graphql/schema.rs
Normal file
149
apps/recorder/src/graphql/schema.rs
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_graphql::dynamic::*;
|
||||||
|
use once_cell::sync::OnceCell;
|
||||||
|
use seaography::{Builder, BuilderContext};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
graphql::{
|
||||||
|
domains::{
|
||||||
|
credential_3rd::register_credential3rd_to_schema_builder,
|
||||||
|
crypto::register_crypto_to_schema_context,
|
||||||
|
subscriber_tasks::{
|
||||||
|
register_subscriber_tasks_to_schema_builder,
|
||||||
|
register_subscriber_tasks_to_schema_context,
|
||||||
|
},
|
||||||
|
subscribers::{
|
||||||
|
register_subscribers_to_schema_builder, register_subscribers_to_schema_context,
|
||||||
|
restrict_subscriber_for_entity,
|
||||||
|
},
|
||||||
|
subscriptions::register_subscriptions_to_schema_builder,
|
||||||
|
},
|
||||||
|
infra::json::register_jsonb_input_filter_to_schema_builder,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub static CONTEXT: OnceCell<BuilderContext> = OnceCell::new();
|
||||||
|
|
||||||
|
pub fn build_schema(
|
||||||
|
app_ctx: Arc<dyn AppContextTrait>,
|
||||||
|
depth: Option<usize>,
|
||||||
|
complexity: Option<usize>,
|
||||||
|
) -> Result<Schema, SchemaError> {
|
||||||
|
use crate::models::*;
|
||||||
|
let database = app_ctx.db().as_ref().clone();
|
||||||
|
|
||||||
|
let context = CONTEXT.get_or_init(|| {
|
||||||
|
let mut context = BuilderContext::default();
|
||||||
|
|
||||||
|
{
|
||||||
|
// domains
|
||||||
|
register_subscribers_to_schema_context(&mut context);
|
||||||
|
|
||||||
|
{
|
||||||
|
restrict_subscriber_for_entity::<bangumi::Entity>(
|
||||||
|
&mut context,
|
||||||
|
&bangumi::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
restrict_subscriber_for_entity::<downloaders::Entity>(
|
||||||
|
&mut context,
|
||||||
|
&downloaders::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
restrict_subscriber_for_entity::<downloads::Entity>(
|
||||||
|
&mut context,
|
||||||
|
&downloads::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
restrict_subscriber_for_entity::<episodes::Entity>(
|
||||||
|
&mut context,
|
||||||
|
&episodes::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
restrict_subscriber_for_entity::<subscriptions::Entity>(
|
||||||
|
&mut context,
|
||||||
|
&subscriptions::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
restrict_subscriber_for_entity::<subscribers::Entity>(
|
||||||
|
&mut context,
|
||||||
|
&subscribers::Column::Id,
|
||||||
|
);
|
||||||
|
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
|
||||||
|
&mut context,
|
||||||
|
&subscription_bangumi::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
restrict_subscriber_for_entity::<subscription_episode::Entity>(
|
||||||
|
&mut context,
|
||||||
|
&subscription_episode::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
|
||||||
|
&mut context,
|
||||||
|
&subscriber_tasks::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
restrict_subscriber_for_entity::<credential_3rd::Entity>(
|
||||||
|
&mut context,
|
||||||
|
&credential_3rd::Column::SubscriberId,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
register_crypto_to_schema_context(&mut context, app_ctx.clone());
|
||||||
|
register_subscriber_tasks_to_schema_context(&mut context);
|
||||||
|
}
|
||||||
|
context
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut builder = Builder::new(context, database.clone());
|
||||||
|
|
||||||
|
{
|
||||||
|
// infra
|
||||||
|
builder = register_jsonb_input_filter_to_schema_builder(builder);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
// domains
|
||||||
|
builder = register_subscribers_to_schema_builder(builder);
|
||||||
|
|
||||||
|
seaography::register_entities!(
|
||||||
|
builder,
|
||||||
|
[
|
||||||
|
bangumi,
|
||||||
|
downloaders,
|
||||||
|
downloads,
|
||||||
|
episodes,
|
||||||
|
subscription_bangumi,
|
||||||
|
subscription_episode,
|
||||||
|
subscriptions,
|
||||||
|
subscriber_tasks,
|
||||||
|
credential_3rd
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
{
|
||||||
|
builder.register_enumeration::<downloads::DownloadStatus>();
|
||||||
|
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
|
||||||
|
builder.register_enumeration::<downloaders::DownloaderCategory>();
|
||||||
|
builder.register_enumeration::<downloads::DownloadMime>();
|
||||||
|
builder.register_enumeration::<credential_3rd::Credential3rdType>();
|
||||||
|
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
|
||||||
|
}
|
||||||
|
|
||||||
|
builder = register_subscriptions_to_schema_builder(builder);
|
||||||
|
builder = register_credential3rd_to_schema_builder(builder);
|
||||||
|
builder = register_subscriber_tasks_to_schema_builder(builder);
|
||||||
|
}
|
||||||
|
|
||||||
|
let schema = builder.schema_builder();
|
||||||
|
|
||||||
|
let schema = if let Some(depth) = depth {
|
||||||
|
schema.limit_depth(depth)
|
||||||
|
} else {
|
||||||
|
schema
|
||||||
|
};
|
||||||
|
let schema = if let Some(complexity) = complexity {
|
||||||
|
schema.limit_complexity(complexity)
|
||||||
|
} else {
|
||||||
|
schema
|
||||||
|
};
|
||||||
|
schema
|
||||||
|
.data(database)
|
||||||
|
.data(app_ctx)
|
||||||
|
.finish()
|
||||||
|
.inspect_err(|e| tracing::error!(e = ?e))
|
||||||
|
}
|
||||||
@ -1,181 +0,0 @@
|
|||||||
use async_graphql::dynamic::*;
|
|
||||||
use once_cell::sync::OnceCell;
|
|
||||||
use sea_orm::{DatabaseConnection, EntityTrait, Iterable};
|
|
||||||
use seaography::{Builder, BuilderContext, FilterType, FilterTypesMapHelper};
|
|
||||||
|
|
||||||
use super::transformer::{filter_condition_transformer, mutation_input_object_transformer};
|
|
||||||
use crate::graphql::{
|
|
||||||
filter::{
|
|
||||||
SUBSCRIBER_ID_FILTER_INFO, init_custom_filter_info, subscriber_id_condition_function,
|
|
||||||
},
|
|
||||||
guard::{guard_entity_with_subscriber_id, guard_field_with_subscriber_id},
|
|
||||||
util::{get_entity_column_key, get_entity_key},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub static CONTEXT: OnceCell<BuilderContext> = OnceCell::new();
|
|
||||||
|
|
||||||
fn restrict_filter_input_for_entity<T>(
|
|
||||||
context: &mut BuilderContext,
|
|
||||||
column: &T::Column,
|
|
||||||
filter_type: Option<FilterType>,
|
|
||||||
) where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let key = get_entity_column_key::<T>(context, column);
|
|
||||||
context.filter_types.overwrites.insert(key, filter_type);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn restrict_subscriber_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let entity_key = get_entity_key::<T>(context);
|
|
||||||
let entity_column_key = get_entity_column_key::<T>(context, column);
|
|
||||||
context.guards.entity_guards.insert(
|
|
||||||
entity_key.clone(),
|
|
||||||
guard_entity_with_subscriber_id::<T>(context, column),
|
|
||||||
);
|
|
||||||
context.guards.field_guards.insert(
|
|
||||||
entity_column_key.clone(),
|
|
||||||
guard_field_with_subscriber_id::<T>(context, column),
|
|
||||||
);
|
|
||||||
context.filter_types.overwrites.insert(
|
|
||||||
entity_column_key.clone(),
|
|
||||||
Some(FilterType::Custom(
|
|
||||||
SUBSCRIBER_ID_FILTER_INFO.get().unwrap().type_name.clone(),
|
|
||||||
)),
|
|
||||||
);
|
|
||||||
context.filter_types.condition_functions.insert(
|
|
||||||
entity_column_key.clone(),
|
|
||||||
subscriber_id_condition_function::<T>(context, column),
|
|
||||||
);
|
|
||||||
context.transformers.filter_conditions_transformers.insert(
|
|
||||||
entity_key.clone(),
|
|
||||||
filter_condition_transformer::<T>(context, column),
|
|
||||||
);
|
|
||||||
context
|
|
||||||
.transformers
|
|
||||||
.mutation_input_object_transformers
|
|
||||||
.insert(
|
|
||||||
entity_key,
|
|
||||||
mutation_input_object_transformer::<T>(context, column),
|
|
||||||
);
|
|
||||||
context
|
|
||||||
.entity_input
|
|
||||||
.insert_skips
|
|
||||||
.push(entity_column_key.clone());
|
|
||||||
context.entity_input.update_skips.push(entity_column_key);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn schema(
|
|
||||||
database: DatabaseConnection,
|
|
||||||
depth: Option<usize>,
|
|
||||||
complexity: Option<usize>,
|
|
||||||
) -> Result<Schema, SchemaError> {
|
|
||||||
use crate::models::*;
|
|
||||||
init_custom_filter_info();
|
|
||||||
let context = CONTEXT.get_or_init(|| {
|
|
||||||
let mut context = BuilderContext::default();
|
|
||||||
|
|
||||||
restrict_subscriber_for_entity::<bangumi::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&bangumi::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<downloaders::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&downloaders::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<downloads::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&downloads::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<episodes::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&episodes::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<subscriptions::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&subscriptions::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<subscribers::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&subscribers::Column::Id,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&subscription_bangumi::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
restrict_subscriber_for_entity::<subscription_episode::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&subscription_episode::Column::SubscriberId,
|
|
||||||
);
|
|
||||||
for column in subscribers::Column::iter() {
|
|
||||||
if !matches!(column, subscribers::Column::Id) {
|
|
||||||
restrict_filter_input_for_entity::<subscribers::Entity>(
|
|
||||||
&mut context,
|
|
||||||
&column,
|
|
||||||
None,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
context
|
|
||||||
});
|
|
||||||
let mut builder = Builder::new(context, database.clone());
|
|
||||||
|
|
||||||
{
|
|
||||||
let filter_types_map_helper = FilterTypesMapHelper { context };
|
|
||||||
|
|
||||||
builder.schema = builder.schema.register(
|
|
||||||
filter_types_map_helper.generate_filter_input(SUBSCRIBER_ID_FILTER_INFO.get().unwrap()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
builder.register_entity::<subscribers::Entity>(
|
|
||||||
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
|
|
||||||
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
|
|
||||||
.collect(),
|
|
||||||
);
|
|
||||||
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
|
|
||||||
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
|
|
||||||
}
|
|
||||||
|
|
||||||
seaography::register_entities!(
|
|
||||||
builder,
|
|
||||||
[
|
|
||||||
bangumi,
|
|
||||||
downloaders,
|
|
||||||
downloads,
|
|
||||||
episodes,
|
|
||||||
subscription_bangumi,
|
|
||||||
subscription_episode,
|
|
||||||
subscriptions
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
{
|
|
||||||
builder.register_enumeration::<downloads::DownloadStatus>();
|
|
||||||
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
|
|
||||||
builder.register_enumeration::<downloaders::DownloaderCategory>();
|
|
||||||
builder.register_enumeration::<downloads::DownloadMime>();
|
|
||||||
}
|
|
||||||
|
|
||||||
let schema = builder.schema_builder();
|
|
||||||
|
|
||||||
let schema = if let Some(depth) = depth {
|
|
||||||
schema.limit_depth(depth)
|
|
||||||
} else {
|
|
||||||
schema
|
|
||||||
};
|
|
||||||
let schema = if let Some(complexity) = complexity {
|
|
||||||
schema.limit_complexity(complexity)
|
|
||||||
} else {
|
|
||||||
schema
|
|
||||||
};
|
|
||||||
schema
|
|
||||||
.data(database)
|
|
||||||
.finish()
|
|
||||||
.inspect_err(|e| tracing::error!(e = ?e))
|
|
||||||
}
|
|
||||||
@ -1,8 +1,9 @@
|
|||||||
use async_graphql::dynamic::Schema;
|
use std::sync::Arc;
|
||||||
use sea_orm::DatabaseConnection;
|
|
||||||
|
|
||||||
use super::{config::GraphQLConfig, schema_root};
|
use async_graphql::dynamic::Schema;
|
||||||
use crate::errors::RecorderResult;
|
|
||||||
|
use super::{build_schema, config::GraphQLConfig};
|
||||||
|
use crate::{app::AppContextTrait, errors::RecorderResult};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct GraphQLService {
|
pub struct GraphQLService {
|
||||||
@ -10,12 +11,12 @@ pub struct GraphQLService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl GraphQLService {
|
impl GraphQLService {
|
||||||
pub async fn from_config_and_database(
|
pub async fn from_config_and_ctx(
|
||||||
config: GraphQLConfig,
|
config: GraphQLConfig,
|
||||||
db: DatabaseConnection,
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
) -> RecorderResult<Self> {
|
) -> RecorderResult<Self> {
|
||||||
let schema = schema_root::schema(
|
let schema = build_schema(
|
||||||
db,
|
ctx,
|
||||||
config.depth_limit.and_then(|l| l.into()),
|
config.depth_limit.and_then(|l| l.into()),
|
||||||
config.complexity_limit.and_then(|l| l.into()),
|
config.complexity_limit.and_then(|l| l.into()),
|
||||||
)?;
|
)?;
|
||||||
|
|||||||
@ -1,83 +0,0 @@
|
|||||||
use std::{collections::BTreeMap, sync::Arc};
|
|
||||||
|
|
||||||
use async_graphql::dynamic::ResolverContext;
|
|
||||||
use sea_orm::{ColumnTrait, Condition, EntityTrait, Value};
|
|
||||||
use seaography::{BuilderContext, FnFilterConditionsTransformer, FnMutationInputObjectTransformer};
|
|
||||||
|
|
||||||
use super::util::{get_column_key, get_entity_key};
|
|
||||||
use crate::auth::AuthUserInfo;
|
|
||||||
|
|
||||||
pub fn filter_condition_transformer<T>(
|
|
||||||
_context: &BuilderContext,
|
|
||||||
column: &T::Column,
|
|
||||||
) -> FnFilterConditionsTransformer
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let column = *column;
|
|
||||||
Box::new(
|
|
||||||
move |context: &ResolverContext, condition: Condition| -> Condition {
|
|
||||||
match context.ctx.data::<AuthUserInfo>() {
|
|
||||||
Ok(user_info) => {
|
|
||||||
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
|
||||||
condition.add(column.eq(subscriber_id))
|
|
||||||
}
|
|
||||||
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn mutation_input_object_transformer<T>(
|
|
||||||
context: &BuilderContext,
|
|
||||||
column: &T::Column,
|
|
||||||
) -> FnMutationInputObjectTransformer
|
|
||||||
where
|
|
||||||
T: EntityTrait,
|
|
||||||
<T as EntityTrait>::Model: Sync,
|
|
||||||
{
|
|
||||||
let entity_key = get_entity_key::<T>(context);
|
|
||||||
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
|
||||||
let column_key = get_column_key::<T>(context, column);
|
|
||||||
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
|
|
||||||
&entity_key,
|
|
||||||
&column_key,
|
|
||||||
));
|
|
||||||
let entity_create_one_mutation_field_name = Arc::new(format!(
|
|
||||||
"{}{}",
|
|
||||||
entity_name, context.entity_create_one_mutation.mutation_suffix
|
|
||||||
));
|
|
||||||
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
|
||||||
"{}{}",
|
|
||||||
entity_name,
|
|
||||||
context.entity_create_batch_mutation.mutation_suffix.clone()
|
|
||||||
));
|
|
||||||
Box::new(
|
|
||||||
move |context: &ResolverContext,
|
|
||||||
mut input: BTreeMap<String, Value>|
|
|
||||||
-> BTreeMap<String, Value> {
|
|
||||||
let field_name = context.field().name();
|
|
||||||
if field_name == entity_create_one_mutation_field_name.as_str()
|
|
||||||
|| field_name == entity_create_batch_mutation_field_name.as_str()
|
|
||||||
{
|
|
||||||
match context.ctx.data::<AuthUserInfo>() {
|
|
||||||
Ok(user_info) => {
|
|
||||||
let subscriber_id = user_info.subscriber_auth.subscriber_id;
|
|
||||||
let value = input.get_mut(column_name.as_str());
|
|
||||||
if value.is_none() {
|
|
||||||
input.insert(
|
|
||||||
column_name.as_str().to_string(),
|
|
||||||
Value::Int(Some(subscriber_id)),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
input
|
|
||||||
}
|
|
||||||
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
input
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
@ -9,6 +9,7 @@
|
|||||||
associated_type_defaults,
|
associated_type_defaults,
|
||||||
let_chains
|
let_chains
|
||||||
)]
|
)]
|
||||||
|
#![allow(clippy::enum_variant_names)]
|
||||||
pub use downloader;
|
pub use downloader;
|
||||||
|
|
||||||
pub mod app;
|
pub mod app;
|
||||||
@ -20,10 +21,10 @@ pub mod errors;
|
|||||||
pub mod extract;
|
pub mod extract;
|
||||||
pub mod graphql;
|
pub mod graphql;
|
||||||
pub mod logger;
|
pub mod logger;
|
||||||
|
pub mod message;
|
||||||
pub mod migrations;
|
pub mod migrations;
|
||||||
pub mod models;
|
pub mod models;
|
||||||
pub mod storage;
|
pub mod storage;
|
||||||
pub mod tasks;
|
pub mod task;
|
||||||
#[cfg(test)]
|
|
||||||
pub mod test_utils;
|
pub mod test_utils;
|
||||||
pub mod web;
|
pub mod web;
|
||||||
|
|||||||
@ -5,4 +5,4 @@ pub mod service;
|
|||||||
pub use core::{LogFormat, LogLevel, LogRotation};
|
pub use core::{LogFormat, LogLevel, LogRotation};
|
||||||
|
|
||||||
pub use config::{LoggerConfig, LoggerFileAppender};
|
pub use config::{LoggerConfig, LoggerFileAppender};
|
||||||
pub use service::LoggerService;
|
pub use service::{LoggerService, MODULE_WHITELIST};
|
||||||
|
|||||||
@ -13,7 +13,7 @@ use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
|
|||||||
use crate::errors::RecorderResult;
|
use crate::errors::RecorderResult;
|
||||||
|
|
||||||
// Function to initialize the logger based on the provided configuration
|
// Function to initialize the logger based on the provided configuration
|
||||||
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];
|
pub const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sea_orm", "sea_query"];
|
||||||
|
|
||||||
// Keep nonblocking file appender work guard
|
// Keep nonblocking file appender work guard
|
||||||
static NONBLOCKING_WORK_GUARD_KEEP: OnceLock<WorkerGuard> = OnceLock::new();
|
static NONBLOCKING_WORK_GUARD_KEEP: OnceLock<WorkerGuard> = OnceLock::new();
|
||||||
|
|||||||
4
apps/recorder/src/message/config.rs
Normal file
4
apps/recorder/src/message/config.rs
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct MessageConfig {}
|
||||||
5
apps/recorder/src/message/mod.rs
Normal file
5
apps/recorder/src/message/mod.rs
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
mod config;
|
||||||
|
mod service;
|
||||||
|
|
||||||
|
pub use config::MessageConfig;
|
||||||
|
pub use service::MessageService;
|
||||||
12
apps/recorder/src/message/service.rs
Normal file
12
apps/recorder/src/message/service.rs
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
use super::MessageConfig;
|
||||||
|
use crate::errors::RecorderResult;
|
||||||
|
|
||||||
|
pub struct MessageService {
|
||||||
|
pub config: MessageConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MessageService {
|
||||||
|
pub async fn from_config(config: MessageConfig) -> RecorderResult<Self> {
|
||||||
|
Ok(Self { config })
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -52,9 +52,7 @@ pub enum Bangumi {
|
|||||||
RssLink,
|
RssLink,
|
||||||
PosterLink,
|
PosterLink,
|
||||||
SavePath,
|
SavePath,
|
||||||
Deleted,
|
|
||||||
Homepage,
|
Homepage,
|
||||||
Extra,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(DeriveIden)]
|
#[derive(DeriveIden)]
|
||||||
@ -85,7 +83,6 @@ pub enum Episodes {
|
|||||||
EpisodeIndex,
|
EpisodeIndex,
|
||||||
Homepage,
|
Homepage,
|
||||||
Subtitle,
|
Subtitle,
|
||||||
Deleted,
|
|
||||||
Source,
|
Source,
|
||||||
Extra,
|
Extra,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -50,7 +50,9 @@ impl MigrationTrait for Migration {
|
|||||||
create_postgres_enum_for_active_enum!(
|
create_postgres_enum_for_active_enum!(
|
||||||
manager,
|
manager,
|
||||||
subscriptions::SubscriptionCategoryEnum,
|
subscriptions::SubscriptionCategoryEnum,
|
||||||
subscriptions::SubscriptionCategory::Mikan,
|
subscriptions::SubscriptionCategory::MikanSubscriber,
|
||||||
|
subscriptions::SubscriptionCategory::MikanBangumi,
|
||||||
|
subscriptions::SubscriptionCategory::MikanSeason,
|
||||||
subscriptions::SubscriptionCategory::Manual
|
subscriptions::SubscriptionCategory::Manual
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
@ -103,9 +105,7 @@ impl MigrationTrait for Migration {
|
|||||||
.col(text_null(Bangumi::RssLink))
|
.col(text_null(Bangumi::RssLink))
|
||||||
.col(text_null(Bangumi::PosterLink))
|
.col(text_null(Bangumi::PosterLink))
|
||||||
.col(text_null(Bangumi::SavePath))
|
.col(text_null(Bangumi::SavePath))
|
||||||
.col(boolean(Bangumi::Deleted).default(false))
|
|
||||||
.col(text_null(Bangumi::Homepage))
|
.col(text_null(Bangumi::Homepage))
|
||||||
.col(json_binary_null(Bangumi::Extra))
|
|
||||||
.foreign_key(
|
.foreign_key(
|
||||||
ForeignKey::create()
|
ForeignKey::create()
|
||||||
.name("fk_bangumi_subscriber_id")
|
.name("fk_bangumi_subscriber_id")
|
||||||
@ -180,6 +180,17 @@ impl MigrationTrait for Migration {
|
|||||||
.on_update(ForeignKeyAction::Cascade)
|
.on_update(ForeignKeyAction::Cascade)
|
||||||
.on_delete(ForeignKeyAction::Cascade),
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
)
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_subscription_bangumi_subscriber_id")
|
||||||
|
.from(
|
||||||
|
SubscriptionBangumi::Table,
|
||||||
|
SubscriptionBangumi::SubscriberId,
|
||||||
|
)
|
||||||
|
.to(Subscribers::Table, Subscribers::Id)
|
||||||
|
.on_update(ForeignKeyAction::Cascade)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
.index(
|
.index(
|
||||||
Index::create()
|
Index::create()
|
||||||
.if_not_exists()
|
.if_not_exists()
|
||||||
@ -197,7 +208,7 @@ impl MigrationTrait for Migration {
|
|||||||
.create_index(
|
.create_index(
|
||||||
Index::create()
|
Index::create()
|
||||||
.if_not_exists()
|
.if_not_exists()
|
||||||
.name("index_subscription_bangumi_subscriber_id")
|
.name("idx_subscription_bangumi_subscriber_id")
|
||||||
.table(SubscriptionBangumi::Table)
|
.table(SubscriptionBangumi::Table)
|
||||||
.col(SubscriptionBangumi::SubscriberId)
|
.col(SubscriptionBangumi::SubscriberId)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
@ -222,9 +233,7 @@ impl MigrationTrait for Migration {
|
|||||||
.col(integer(Episodes::EpisodeIndex))
|
.col(integer(Episodes::EpisodeIndex))
|
||||||
.col(text_null(Episodes::Homepage))
|
.col(text_null(Episodes::Homepage))
|
||||||
.col(text_null(Episodes::Subtitle))
|
.col(text_null(Episodes::Subtitle))
|
||||||
.col(boolean(Episodes::Deleted).default(false))
|
|
||||||
.col(text_null(Episodes::Source))
|
.col(text_null(Episodes::Source))
|
||||||
.col(json_binary_null(Episodes::Extra))
|
|
||||||
.foreign_key(
|
.foreign_key(
|
||||||
ForeignKey::create()
|
ForeignKey::create()
|
||||||
.name("fk_episodes_bangumi_id")
|
.name("fk_episodes_bangumi_id")
|
||||||
@ -241,6 +250,15 @@ impl MigrationTrait for Migration {
|
|||||||
.on_update(ForeignKeyAction::Cascade)
|
.on_update(ForeignKeyAction::Cascade)
|
||||||
.on_delete(ForeignKeyAction::Cascade),
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
)
|
)
|
||||||
|
.index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_episodes_mikan_episode_id_subscriber_id")
|
||||||
|
.table(Episodes::Table)
|
||||||
|
.col(Episodes::MikanEpisodeId)
|
||||||
|
.col(Episodes::SubscriberId)
|
||||||
|
.unique(),
|
||||||
|
)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
@ -256,19 +274,6 @@ impl MigrationTrait for Migration {
|
|||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
|
||||||
.create_index(
|
|
||||||
Index::create()
|
|
||||||
.if_not_exists()
|
|
||||||
.name("idx_episodes_bangumi_id_mikan_episode_id")
|
|
||||||
.table(Episodes::Table)
|
|
||||||
.col(Episodes::BangumiId)
|
|
||||||
.col(Episodes::MikanEpisodeId)
|
|
||||||
.unique()
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
|
.create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
|
||||||
.await?;
|
.await?;
|
||||||
@ -299,6 +304,17 @@ impl MigrationTrait for Migration {
|
|||||||
.on_update(ForeignKeyAction::Cascade)
|
.on_update(ForeignKeyAction::Cascade)
|
||||||
.on_delete(ForeignKeyAction::Cascade),
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
)
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_subscription_episode_subscriber_id")
|
||||||
|
.from(
|
||||||
|
SubscriptionEpisode::Table,
|
||||||
|
SubscriptionEpisode::SubscriberId,
|
||||||
|
)
|
||||||
|
.to(Subscribers::Table, Subscribers::Id)
|
||||||
|
.on_update(ForeignKeyAction::Cascade)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
.index(
|
.index(
|
||||||
Index::create()
|
Index::create()
|
||||||
.if_not_exists()
|
.if_not_exists()
|
||||||
@ -316,7 +332,7 @@ impl MigrationTrait for Migration {
|
|||||||
.create_index(
|
.create_index(
|
||||||
Index::create()
|
Index::create()
|
||||||
.if_not_exists()
|
.if_not_exists()
|
||||||
.name("index_subscription_episode_subscriber_id")
|
.name("idx_subscription_episode_subscriber_id")
|
||||||
.table(SubscriptionEpisode::Table)
|
.table(SubscriptionEpisode::Table)
|
||||||
.col(SubscriptionEpisode::SubscriberId)
|
.col(SubscriptionEpisode::SubscriberId)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
@ -331,14 +347,19 @@ impl MigrationTrait for Migration {
|
|||||||
.drop_index(
|
.drop_index(
|
||||||
Index::drop()
|
Index::drop()
|
||||||
.if_exists()
|
.if_exists()
|
||||||
.name("index_subscription_episode_subscriber_id")
|
.name("idx_subscription_episode_subscriber_id")
|
||||||
.table(SubscriptionBangumi::Table)
|
.table(SubscriptionBangumi::Table)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.drop_table(Table::drop().table(SubscriptionEpisode::Table).to_owned())
|
.drop_table(
|
||||||
|
Table::drop()
|
||||||
|
.if_exists()
|
||||||
|
.table(SubscriptionEpisode::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
@ -346,21 +367,26 @@ impl MigrationTrait for Migration {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.drop_table(Table::drop().table(Episodes::Table).to_owned())
|
.drop_table(Table::drop().if_exists().table(Episodes::Table).to_owned())
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.drop_index(
|
.drop_index(
|
||||||
Index::drop()
|
Index::drop()
|
||||||
.if_exists()
|
.if_exists()
|
||||||
.name("index_subscription_bangumi_subscriber_id")
|
.name("idx_subscription_bangumi_subscriber_id")
|
||||||
.table(SubscriptionBangumi::Table)
|
.table(SubscriptionBangumi::Table)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.drop_table(Table::drop().table(SubscriptionBangumi::Table).to_owned())
|
.drop_table(
|
||||||
|
Table::drop()
|
||||||
|
.if_exists()
|
||||||
|
.table(SubscriptionBangumi::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
@ -368,7 +394,7 @@ impl MigrationTrait for Migration {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.drop_table(Table::drop().table(Bangumi::Table).to_owned())
|
.drop_table(Table::drop().if_exists().table(Bangumi::Table).to_owned())
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
@ -379,7 +405,12 @@ impl MigrationTrait for Migration {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.drop_table(Table::drop().table(Subscriptions::Table).to_owned())
|
.drop_table(
|
||||||
|
Table::drop()
|
||||||
|
.if_exists()
|
||||||
|
.table(Subscriptions::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
@ -387,7 +418,12 @@ impl MigrationTrait for Migration {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.drop_table(Table::drop().table(Subscribers::Table).to_owned())
|
.drop_table(
|
||||||
|
Table::drop()
|
||||||
|
.if_exists()
|
||||||
|
.table(Subscribers::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
|
|||||||
@ -88,7 +88,7 @@ impl MigrationTrait for Migration {
|
|||||||
.col(enumeration(
|
.col(enumeration(
|
||||||
Downloads::Status,
|
Downloads::Status,
|
||||||
DownloadStatusEnum,
|
DownloadStatusEnum,
|
||||||
DownloadMime::iden_values(),
|
DownloadStatus::iden_values(),
|
||||||
))
|
))
|
||||||
.col(enumeration(
|
.col(enumeration(
|
||||||
Downloads::Mime,
|
Downloads::Mime,
|
||||||
@ -158,7 +158,7 @@ impl MigrationTrait for Migration {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.drop_table(Table::drop().table(Downloads::Table).to_owned())
|
.drop_table(Table::drop().if_exists().table(Downloads::Table).to_owned())
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
@ -174,7 +174,12 @@ impl MigrationTrait for Migration {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.drop_table(Table::drop().table(Downloaders::Table).to_owned())
|
.drop_table(
|
||||||
|
Table::drop()
|
||||||
|
.if_exists()
|
||||||
|
.table(Downloaders::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
|
|||||||
@ -1,103 +0,0 @@
|
|||||||
use sea_orm_migration::{prelude::*, schema::*};
|
|
||||||
|
|
||||||
use super::defs::table_auto_z;
|
|
||||||
use crate::{
|
|
||||||
migrations::defs::{CustomSchemaManagerExt, Downloaders, GeneralIds, Subscribers},
|
|
||||||
models::downloaders::{DownloaderCategory, DownloaderCategoryEnum},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(DeriveMigrationName)]
|
|
||||||
pub struct Migration;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigrationTrait for Migration {
|
|
||||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
create_postgres_enum_for_active_enum!(
|
|
||||||
manager,
|
|
||||||
DownloaderCategoryEnum,
|
|
||||||
DownloaderCategory::QBittorrent
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
manager
|
|
||||||
.create_table(
|
|
||||||
table_auto_z(Downloaders::Table)
|
|
||||||
.col(pk_auto(Downloaders::Id))
|
|
||||||
.col(text(Downloaders::Endpoint))
|
|
||||||
.col(string_null(Downloaders::Username))
|
|
||||||
.col(string_null(Downloaders::Password))
|
|
||||||
.col(enumeration(
|
|
||||||
Downloaders::Category,
|
|
||||||
DownloaderCategoryEnum,
|
|
||||||
DownloaderCategory::iden_values(),
|
|
||||||
))
|
|
||||||
.col(text(Downloaders::SavePath))
|
|
||||||
.col(integer(Downloaders::SubscriberId))
|
|
||||||
.foreign_key(
|
|
||||||
ForeignKey::create()
|
|
||||||
.name("fk_downloader_subscriber_id")
|
|
||||||
.from_tbl(Downloaders::Table)
|
|
||||||
.from_col(Downloaders::SubscriberId)
|
|
||||||
.to_tbl(Subscribers::Table)
|
|
||||||
.to_col(Subscribers::Id)
|
|
||||||
.on_delete(ForeignKeyAction::Cascade)
|
|
||||||
.on_update(ForeignKeyAction::Restrict),
|
|
||||||
)
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
manager
|
|
||||||
.create_postgres_auto_update_ts_trigger_for_col(
|
|
||||||
Downloaders::Table,
|
|
||||||
GeneralIds::UpdatedAt,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
manager
|
|
||||||
.alter_table(
|
|
||||||
Table::alter()
|
|
||||||
.table(Subscribers::Table)
|
|
||||||
.add_column_if_not_exists(integer_null(Subscribers::DownloaderId))
|
|
||||||
.add_foreign_key(
|
|
||||||
TableForeignKey::new()
|
|
||||||
.name("fk_subscribers_downloader_id")
|
|
||||||
.from_tbl(Subscribers::Table)
|
|
||||||
.from_col(Subscribers::DownloaderId)
|
|
||||||
.to_tbl(Downloaders::Table)
|
|
||||||
.to_col(Downloaders::Id)
|
|
||||||
.on_delete(ForeignKeyAction::SetNull)
|
|
||||||
.on_update(ForeignKeyAction::Restrict),
|
|
||||||
)
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.alter_table(
|
|
||||||
Table::alter()
|
|
||||||
.table(Subscribers::Table)
|
|
||||||
.drop_foreign_key(Alias::new("fk_subscribers_downloader_id"))
|
|
||||||
.drop_column(Subscribers::DownloaderId)
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
manager
|
|
||||||
.drop_postgres_auto_update_ts_trigger_for_col(Downloaders::Table, GeneralIds::UpdatedAt)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
manager
|
|
||||||
.drop_table(Table::drop().table(Downloaders::Table).to_owned())
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
manager
|
|
||||||
.drop_postgres_enum_for_active_enum(DownloaderCategoryEnum)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -52,6 +52,7 @@ impl MigrationTrait for Migration {
|
|||||||
manager
|
manager
|
||||||
.create_index(
|
.create_index(
|
||||||
Index::create()
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
.name("idx_auth_pid_auth_type")
|
.name("idx_auth_pid_auth_type")
|
||||||
.unique()
|
.unique()
|
||||||
.table(Auth::Table)
|
.table(Auth::Table)
|
||||||
@ -102,7 +103,7 @@ impl MigrationTrait for Migration {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.drop_table(Table::drop().table(Auth::Table).to_owned())
|
.drop_table(Table::drop().if_exists().table(Auth::Table).to_owned())
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
|
|||||||
@ -28,7 +28,11 @@ impl MigrationTrait for Migration {
|
|||||||
table_auto_z(Credential3rd::Table)
|
table_auto_z(Credential3rd::Table)
|
||||||
.col(pk_auto(Credential3rd::Id))
|
.col(pk_auto(Credential3rd::Id))
|
||||||
.col(integer(Credential3rd::SubscriberId))
|
.col(integer(Credential3rd::SubscriberId))
|
||||||
.col(string(Credential3rd::CredentialType))
|
.col(enumeration(
|
||||||
|
Credential3rd::CredentialType,
|
||||||
|
Credential3rdTypeEnum,
|
||||||
|
Credential3rdType::iden_values(),
|
||||||
|
))
|
||||||
.col(string_null(Credential3rd::Cookies))
|
.col(string_null(Credential3rd::Cookies))
|
||||||
.col(string_null(Credential3rd::Username))
|
.col(string_null(Credential3rd::Username))
|
||||||
.col(string_null(Credential3rd::Password))
|
.col(string_null(Credential3rd::Password))
|
||||||
@ -48,6 +52,7 @@ impl MigrationTrait for Migration {
|
|||||||
manager
|
manager
|
||||||
.create_index(
|
.create_index(
|
||||||
Index::create()
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
.name("idx_credential_3rd_credential_type")
|
.name("idx_credential_3rd_credential_type")
|
||||||
.table(Credential3rd::Table)
|
.table(Credential3rd::Table)
|
||||||
.col(Credential3rd::CredentialType)
|
.col(Credential3rd::CredentialType)
|
||||||
@ -95,7 +100,19 @@ impl MigrationTrait for Migration {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.drop_table(Table::drop().table(Credential3rd::Table).to_owned())
|
.drop_postgres_auto_update_ts_trigger_for_col(
|
||||||
|
Credential3rd::Table,
|
||||||
|
GeneralIds::UpdatedAt,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(
|
||||||
|
Table::drop()
|
||||||
|
.if_exists()
|
||||||
|
.table(Credential3rd::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
manager
|
||||||
|
|||||||
@ -0,0 +1,64 @@
|
|||||||
|
use async_trait::async_trait;
|
||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
use crate::task::SUBSCRIBER_TASK_APALIS_NAME;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
let db = manager.get_connection();
|
||||||
|
|
||||||
|
db.execute_unprepared(&format!(
|
||||||
|
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
|
||||||
|
SELECT
|
||||||
|
job,
|
||||||
|
job_type,
|
||||||
|
status,
|
||||||
|
(job ->> 'subscriber_id'::text)::integer AS subscriber_id,
|
||||||
|
job ->> 'task_type'::text AS task_type,
|
||||||
|
id,
|
||||||
|
attempts,
|
||||||
|
max_attempts,
|
||||||
|
run_at,
|
||||||
|
last_error,
|
||||||
|
lock_at,
|
||||||
|
lock_by,
|
||||||
|
done_at,
|
||||||
|
priority
|
||||||
|
FROM apalis.jobs
|
||||||
|
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
|
||||||
|
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
|
||||||
|
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
|
||||||
|
))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
db.execute_unprepared(&format!(
|
||||||
|
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscriber_id
|
||||||
|
ON apalis.jobs (((job -> 'subscriber_id')::integer))
|
||||||
|
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
|
||||||
|
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
|
||||||
|
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
|
||||||
|
))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
let db = manager.get_connection();
|
||||||
|
|
||||||
|
db.execute_unprepared(
|
||||||
|
r#"DROP INDEX IF EXISTS idx_apalis_jobs_subscriber_id
|
||||||
|
ON apalis.jobs"#,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_tasks")
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -5,9 +5,9 @@ pub use sea_orm_migration::prelude::*;
|
|||||||
pub mod defs;
|
pub mod defs;
|
||||||
pub mod m20220101_000001_init;
|
pub mod m20220101_000001_init;
|
||||||
pub mod m20240224_082543_add_downloads;
|
pub mod m20240224_082543_add_downloads;
|
||||||
pub mod m20240225_060853_subscriber_add_downloader;
|
|
||||||
pub mod m20241231_000001_auth;
|
pub mod m20241231_000001_auth;
|
||||||
pub mod m20250501_021523_credential_3rd;
|
pub mod m20250501_021523_credential_3rd;
|
||||||
|
pub mod m20250520_021135_subscriber_tasks;
|
||||||
|
|
||||||
pub struct Migrator;
|
pub struct Migrator;
|
||||||
|
|
||||||
@ -17,9 +17,9 @@ impl MigratorTrait for Migrator {
|
|||||||
vec![
|
vec![
|
||||||
Box::new(m20220101_000001_init::Migration),
|
Box::new(m20220101_000001_init::Migration),
|
||||||
Box::new(m20240224_082543_add_downloads::Migration),
|
Box::new(m20240224_082543_add_downloads::Migration),
|
||||||
Box::new(m20240225_060853_subscriber_add_downloader::Migration),
|
|
||||||
Box::new(m20241231_000001_auth::Migration),
|
Box::new(m20241231_000001_auth::Migration),
|
||||||
Box::new(m20250501_021523_credential_3rd::Migration),
|
Box::new(m20250501_021523_credential_3rd::Migration),
|
||||||
|
Box::new(m20250520_021135_subscriber_tasks::Migration),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,10 +1,25 @@
|
|||||||
use async_graphql::SimpleObject;
|
use async_graphql::SimpleObject;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::OnConflict};
|
use sea_orm::{
|
||||||
|
ActiveValue, Condition, FromJsonQueryResult, FromQueryResult, IntoSimpleExpr, JoinType,
|
||||||
|
QuerySelect,
|
||||||
|
entity::prelude::*,
|
||||||
|
sea_query::{Alias, IntoCondition, OnConflict},
|
||||||
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use super::subscription_bangumi;
|
use super::subscription_bangumi;
|
||||||
use crate::{app::AppContextTrait, errors::RecorderResult};
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::RecorderResult,
|
||||||
|
extract::{
|
||||||
|
mikan::{
|
||||||
|
MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url,
|
||||||
|
scrape_mikan_poster_meta_from_image_url,
|
||||||
|
},
|
||||||
|
rawname::extract_season_from_title_body,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(
|
#[derive(
|
||||||
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
|
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
|
||||||
@ -14,18 +29,6 @@ pub struct BangumiFilter {
|
|||||||
pub group: Option<Vec<String>>,
|
pub group: Option<Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
|
|
||||||
)]
|
|
||||||
pub struct BangumiExtra {
|
|
||||||
pub name_zh: Option<String>,
|
|
||||||
pub s_name_zh: Option<String>,
|
|
||||||
pub name_en: Option<String>,
|
|
||||||
pub s_name_en: Option<String>,
|
|
||||||
pub name_jp: Option<String>,
|
|
||||||
pub s_name_jp: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)]
|
||||||
#[sea_orm(table_name = "bangumi")]
|
#[sea_orm(table_name = "bangumi")]
|
||||||
pub struct Model {
|
pub struct Model {
|
||||||
@ -47,10 +50,7 @@ pub struct Model {
|
|||||||
pub rss_link: Option<String>,
|
pub rss_link: Option<String>,
|
||||||
pub poster_link: Option<String>,
|
pub poster_link: Option<String>,
|
||||||
pub save_path: Option<String>,
|
pub save_path: Option<String>,
|
||||||
#[sea_orm(default = "false")]
|
|
||||||
pub deleted: bool,
|
|
||||||
pub homepage: Option<String>,
|
pub homepage: Option<String>,
|
||||||
pub extra: Option<BangumiExtra>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
@ -111,38 +111,116 @@ pub enum RelatedEntity {
|
|||||||
SubscriptionBangumi,
|
SubscriptionBangumi,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ActiveModel {
|
||||||
|
#[tracing::instrument(err, skip_all, fields(mikan_bangumi_id = %meta.mikan_bangumi_id, mikan_fansub_id = %meta.mikan_fansub_id, subscriber_id = %subscriber_id))]
|
||||||
|
pub async fn from_mikan_bangumi_meta(
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
meta: MikanBangumiMeta,
|
||||||
|
subscriber_id: i32,
|
||||||
|
_subscription_id: i32,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let mikan_client = ctx.mikan();
|
||||||
|
let storage_service = ctx.storage();
|
||||||
|
let mikan_base_url = mikan_client.base_url();
|
||||||
|
let (_, season_raw, season_index) = extract_season_from_title_body(&meta.bangumi_title);
|
||||||
|
|
||||||
|
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
&meta.mikan_bangumi_id,
|
||||||
|
Some(&meta.mikan_fansub_id),
|
||||||
|
);
|
||||||
|
|
||||||
|
let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src {
|
||||||
|
let poster_meta = scrape_mikan_poster_meta_from_image_url(
|
||||||
|
mikan_client,
|
||||||
|
storage_service,
|
||||||
|
origin_poster_src,
|
||||||
|
subscriber_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
poster_meta.poster_src
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
mikan_bangumi_id: ActiveValue::Set(Some(meta.mikan_bangumi_id)),
|
||||||
|
mikan_fansub_id: ActiveValue::Set(Some(meta.mikan_fansub_id)),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
display_name: ActiveValue::Set(meta.bangumi_title.clone()),
|
||||||
|
raw_name: ActiveValue::Set(meta.bangumi_title),
|
||||||
|
season: ActiveValue::Set(season_index),
|
||||||
|
season_raw: ActiveValue::Set(season_raw),
|
||||||
|
fansub: ActiveValue::Set(Some(meta.fansub)),
|
||||||
|
poster_link: ActiveValue::Set(poster_link),
|
||||||
|
homepage: ActiveValue::Set(Some(meta.homepage.to_string())),
|
||||||
|
rss_link: ActiveValue::Set(Some(rss_url.to_string())),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
|
|
||||||
impl Model {
|
impl Model {
|
||||||
pub async fn get_or_insert_from_mikan<F>(
|
pub async fn get_or_insert_from_mikan<F>(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
|
hash: MikanBangumiHash,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
subscription_id: i32,
|
subscription_id: i32,
|
||||||
mikan_bangumi_id: String,
|
create_bangumi_fn: F,
|
||||||
mikan_fansub_id: String,
|
) -> RecorderResult<Self>
|
||||||
f: F,
|
|
||||||
) -> RecorderResult<Model>
|
|
||||||
where
|
where
|
||||||
F: AsyncFnOnce(&mut ActiveModel) -> RecorderResult<()>,
|
F: AsyncFnOnce() -> RecorderResult<ActiveModel>,
|
||||||
{
|
{
|
||||||
|
#[derive(FromQueryResult)]
|
||||||
|
struct ModelWithIsSubscribed {
|
||||||
|
#[sea_orm(nested)]
|
||||||
|
bangumi: Model,
|
||||||
|
is_subscribed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
let db = ctx.db();
|
let db = ctx.db();
|
||||||
if let Some(existed) = Entity::find()
|
|
||||||
|
let subscription_bangumi_alias = Alias::new("sb");
|
||||||
|
let mut is_subscribed = false;
|
||||||
|
let new_bangumi_model = if let Some(existed) = Entity::find()
|
||||||
.filter(
|
.filter(
|
||||||
Column::MikanBangumiId
|
Condition::all()
|
||||||
.eq(Some(mikan_bangumi_id.clone()))
|
.add(Column::MikanBangumiId.eq(Some(hash.mikan_bangumi_id)))
|
||||||
.and(Column::MikanFansubId.eq(Some(mikan_fansub_id.clone()))),
|
.add(Column::MikanFansubId.eq(Some(hash.mikan_fansub_id)))
|
||||||
|
.add(Column::SubscriberId.eq(subscriber_id)),
|
||||||
)
|
)
|
||||||
|
.column_as(
|
||||||
|
Expr::col((
|
||||||
|
subscription_bangumi_alias.clone(),
|
||||||
|
subscription_bangumi::Column::SubscriptionId,
|
||||||
|
))
|
||||||
|
.is_not_null(),
|
||||||
|
"is_subscribed",
|
||||||
|
)
|
||||||
|
.join_as_rev(
|
||||||
|
JoinType::LeftJoin,
|
||||||
|
subscription_bangumi::Relation::Bangumi
|
||||||
|
.def()
|
||||||
|
.on_condition(move |left, _right| {
|
||||||
|
Expr::col((left, subscription_bangumi::Column::SubscriptionId))
|
||||||
|
.eq(subscription_id)
|
||||||
|
.into_condition()
|
||||||
|
}),
|
||||||
|
subscription_bangumi_alias.clone(),
|
||||||
|
)
|
||||||
|
.into_model::<ModelWithIsSubscribed>()
|
||||||
.one(db)
|
.one(db)
|
||||||
.await?
|
.await?
|
||||||
{
|
{
|
||||||
Ok(existed)
|
is_subscribed = existed.is_subscribed;
|
||||||
|
existed.bangumi
|
||||||
} else {
|
} else {
|
||||||
let mut bgm = ActiveModel {
|
let new_bangumi_active_model = create_bangumi_fn().await?;
|
||||||
mikan_bangumi_id: ActiveValue::Set(Some(mikan_bangumi_id)),
|
|
||||||
mikan_fansub_id: ActiveValue::Set(Some(mikan_fansub_id)),
|
Entity::insert(new_bangumi_active_model)
|
||||||
subscriber_id: ActiveValue::Set(subscriber_id),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
f(&mut bgm).await?;
|
|
||||||
let bgm = Entity::insert(bgm)
|
|
||||||
.on_conflict(
|
.on_conflict(
|
||||||
OnConflict::columns([
|
OnConflict::columns([
|
||||||
Column::MikanBangumiId,
|
Column::MikanBangumiId,
|
||||||
@ -151,28 +229,96 @@ impl Model {
|
|||||||
])
|
])
|
||||||
.update_columns([
|
.update_columns([
|
||||||
Column::RawName,
|
Column::RawName,
|
||||||
Column::Extra,
|
|
||||||
Column::Fansub,
|
Column::Fansub,
|
||||||
Column::PosterLink,
|
Column::PosterLink,
|
||||||
Column::Season,
|
Column::Season,
|
||||||
Column::SeasonRaw,
|
Column::SeasonRaw,
|
||||||
|
Column::RssLink,
|
||||||
|
Column::Homepage,
|
||||||
])
|
])
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
.exec_with_returning(db)
|
.exec_with_returning(db)
|
||||||
.await?;
|
.await?
|
||||||
|
};
|
||||||
|
if !is_subscribed {
|
||||||
subscription_bangumi::Entity::insert(subscription_bangumi::ActiveModel {
|
subscription_bangumi::Entity::insert(subscription_bangumi::ActiveModel {
|
||||||
subscription_id: ActiveValue::Set(subscription_id),
|
subscription_id: ActiveValue::Set(subscription_id),
|
||||||
bangumi_id: ActiveValue::Set(bgm.id),
|
bangumi_id: ActiveValue::Set(new_bangumi_model.id),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
.on_conflict_do_nothing()
|
.on_conflict(
|
||||||
.exec(db)
|
OnConflict::columns([
|
||||||
|
subscription_bangumi::Column::SubscriptionId,
|
||||||
|
subscription_bangumi::Column::BangumiId,
|
||||||
|
])
|
||||||
|
.do_nothing()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.exec_without_returning(db)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(bgm)
|
|
||||||
}
|
}
|
||||||
|
Ok(new_bangumi_model)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_existed_mikan_bangumi_list(
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
hashes: impl Iterator<Item = MikanBangumiHash>,
|
||||||
|
subscriber_id: i32,
|
||||||
|
_subscription_id: i32,
|
||||||
|
) -> RecorderResult<impl Iterator<Item = (i32, MikanBangumiHash)>> {
|
||||||
|
Ok(Entity::find()
|
||||||
|
.select_only()
|
||||||
|
.column(Column::Id)
|
||||||
|
.column(Column::MikanBangumiId)
|
||||||
|
.column(Column::MikanFansubId)
|
||||||
|
.filter(
|
||||||
|
Expr::tuple([
|
||||||
|
Column::MikanBangumiId.into_simple_expr(),
|
||||||
|
Column::MikanFansubId.into_simple_expr(),
|
||||||
|
Column::SubscriberId.into_simple_expr(),
|
||||||
|
])
|
||||||
|
.in_tuples(hashes.map(|hash| {
|
||||||
|
(
|
||||||
|
hash.mikan_bangumi_id.clone(),
|
||||||
|
hash.mikan_fansub_id.clone(),
|
||||||
|
subscriber_id,
|
||||||
|
)
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.into_tuple::<(i32, String, String)>()
|
||||||
|
.all(ctx.db())
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|(bangumi_id, mikan_bangumi_id, mikan_fansub_id)| {
|
||||||
|
(
|
||||||
|
bangumi_id,
|
||||||
|
MikanBangumiHash {
|
||||||
|
mikan_bangumi_id,
|
||||||
|
mikan_fansub_id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_subsribed_bangumi_list_from_subscription(
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
subscription_id: i32,
|
||||||
|
) -> RecorderResult<Vec<Self>> {
|
||||||
|
let db = ctx.db();
|
||||||
|
let bangumi_list = Entity::find()
|
||||||
|
.filter(
|
||||||
|
Condition::all()
|
||||||
|
.add(subscription_bangumi::Column::SubscriptionId.eq(subscription_id)),
|
||||||
|
)
|
||||||
|
.join_rev(
|
||||||
|
JoinType::InnerJoin,
|
||||||
|
subscription_bangumi::Relation::Bangumi.def(),
|
||||||
|
)
|
||||||
|
.all(db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(bangumi_list)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
||||||
|
|||||||
@ -1,5 +1,3 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use sea_orm::{ActiveValue, prelude::*};
|
use sea_orm::{ActiveValue, prelude::*};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -18,6 +16,7 @@ use crate::{
|
|||||||
db_type = "Enum",
|
db_type = "Enum",
|
||||||
enum_name = "credential_3rd_type"
|
enum_name = "credential_3rd_type"
|
||||||
)]
|
)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
pub enum Credential3rdType {
|
pub enum Credential3rdType {
|
||||||
#[sea_orm(string_value = "mikan")]
|
#[sea_orm(string_value = "mikan")]
|
||||||
Mikan,
|
Mikan,
|
||||||
@ -66,25 +65,33 @@ impl Related<super::subscriptions::Entity> for Entity {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||||
|
pub enum RelatedEntity {
|
||||||
|
#[sea_orm(entity = "super::subscribers::Entity")]
|
||||||
|
Subscriber,
|
||||||
|
#[sea_orm(entity = "super::subscriptions::Entity")]
|
||||||
|
Subscription,
|
||||||
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
|
|
||||||
impl ActiveModel {
|
impl ActiveModel {
|
||||||
pub async fn try_encrypt(mut self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Self> {
|
pub async fn try_encrypt(mut self, ctx: &dyn AppContextTrait) -> RecorderResult<Self> {
|
||||||
let crypto = ctx.crypto();
|
let crypto = ctx.crypto();
|
||||||
|
|
||||||
if let ActiveValue::Set(Some(username)) = self.username {
|
if let ActiveValue::Set(Some(username)) = self.username {
|
||||||
let username_enc = crypto.encrypt_credentials(&username)?;
|
let username_enc = crypto.encrypt_string(username)?;
|
||||||
self.username = ActiveValue::Set(Some(username_enc));
|
self.username = ActiveValue::Set(Some(username_enc));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let ActiveValue::Set(Some(password)) = self.password {
|
if let ActiveValue::Set(Some(password)) = self.password {
|
||||||
let password_enc = crypto.encrypt_credentials(&password)?;
|
let password_enc = crypto.encrypt_string(password)?;
|
||||||
self.password = ActiveValue::Set(Some(password_enc));
|
self.password = ActiveValue::Set(Some(password_enc));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let ActiveValue::Set(Some(cookies)) = self.cookies {
|
if let ActiveValue::Set(Some(cookies)) = self.cookies {
|
||||||
let cookies_enc = crypto.encrypt_credentials(&cookies)?;
|
let cookies_enc = crypto.encrypt_string(cookies)?;
|
||||||
self.cookies = ActiveValue::Set(Some(cookies_enc));
|
self.cookies = ActiveValue::Set(Some(cookies_enc));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -93,19 +100,24 @@ impl ActiveModel {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Model {
|
impl Model {
|
||||||
pub async fn find_by_id(
|
pub async fn find_by_id_and_subscriber_id(
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
ctx: &dyn AppContextTrait,
|
||||||
id: i32,
|
id: i32,
|
||||||
|
subscriber_id: i32,
|
||||||
) -> RecorderResult<Option<Self>> {
|
) -> RecorderResult<Option<Self>> {
|
||||||
let db = ctx.db();
|
let db = ctx.db();
|
||||||
let credential = Entity::find_by_id(id).one(db).await?;
|
let credential = Entity::find()
|
||||||
|
.filter(Column::Id.eq(id))
|
||||||
|
.filter(Column::SubscriberId.eq(subscriber_id))
|
||||||
|
.one(db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
Ok(credential)
|
Ok(credential)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn try_into_userpass_credential(
|
pub fn try_into_userpass_credential(
|
||||||
self,
|
self,
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
ctx: &dyn AppContextTrait,
|
||||||
) -> RecorderResult<UserPassCredential> {
|
) -> RecorderResult<UserPassCredential> {
|
||||||
let crypto = ctx.crypto();
|
let crypto = ctx.crypto();
|
||||||
let username_enc = self
|
let username_enc = self
|
||||||
@ -115,7 +127,7 @@ impl Model {
|
|||||||
source: None.into(),
|
source: None.into(),
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let username: String = crypto.decrypt_credentials(&username_enc)?;
|
let username: String = crypto.decrypt_string(&username_enc)?;
|
||||||
|
|
||||||
let password_enc = self
|
let password_enc = self
|
||||||
.password
|
.password
|
||||||
@ -124,10 +136,10 @@ impl Model {
|
|||||||
source: None.into(),
|
source: None.into(),
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let password: String = crypto.decrypt_credentials(&password_enc)?;
|
let password: String = crypto.decrypt_string(&password_enc)?;
|
||||||
|
|
||||||
let cookies: Option<String> = if let Some(cookies_enc) = self.cookies {
|
let cookies: Option<String> = if let Some(cookies_enc) = self.cookies {
|
||||||
let cookies = crypto.decrypt_credentials(&cookies_enc)?;
|
let cookies = crypto.decrypt_string(&cookies_enc)?;
|
||||||
Some(cookies)
|
Some(cookies)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
@ -140,4 +152,31 @@ impl Model {
|
|||||||
user_agent: self.user_agent,
|
user_agent: self.user_agent,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn check_available(self, ctx: &dyn AppContextTrait) -> RecorderResult<bool> {
|
||||||
|
let credential_id = self.id;
|
||||||
|
let subscriber_id = self.subscriber_id;
|
||||||
|
match self.credential_type {
|
||||||
|
Credential3rdType::Mikan => {
|
||||||
|
let mikan_client = {
|
||||||
|
let userpass_credential: UserPassCredential =
|
||||||
|
self.try_into_userpass_credential(ctx)?;
|
||||||
|
ctx.mikan()
|
||||||
|
.fork_with_userpass_credential(userpass_credential)
|
||||||
|
.await?
|
||||||
|
};
|
||||||
|
let mut has_login = mikan_client.has_login().await?;
|
||||||
|
if !has_login {
|
||||||
|
mikan_client.login().await?;
|
||||||
|
has_login = true;
|
||||||
|
}
|
||||||
|
if has_login {
|
||||||
|
mikan_client
|
||||||
|
.sync_credential_cookies(ctx, credential_id, subscriber_id)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(has_login)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::OnConflict};
|
use sea_orm::{
|
||||||
|
ActiveValue, IntoSimpleExpr, QuerySelect, entity::prelude::*, sea_query::OnConflict,
|
||||||
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use super::{bangumi, query::InsertManyReturningExt, subscription_episode};
|
use super::{bangumi, query::InsertManyReturningExt, subscription_episode};
|
||||||
@ -9,21 +9,11 @@ use crate::{
|
|||||||
app::AppContextTrait,
|
app::AppContextTrait,
|
||||||
errors::RecorderResult,
|
errors::RecorderResult,
|
||||||
extract::{
|
extract::{
|
||||||
mikan::{MikanEpisodeMeta, build_mikan_episode_homepage_url},
|
mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url},
|
||||||
rawname::parse_episode_meta_from_raw_name,
|
rawname::extract_episode_meta_from_raw_name,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, Default)]
|
|
||||||
pub struct EpisodeExtra {
|
|
||||||
pub name_zh: Option<String>,
|
|
||||||
pub s_name_zh: Option<String>,
|
|
||||||
pub name_en: Option<String>,
|
|
||||||
pub s_name_en: Option<String>,
|
|
||||||
pub name_jp: Option<String>,
|
|
||||||
pub s_name_jp: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
#[sea_orm(table_name = "episodes")]
|
#[sea_orm(table_name = "episodes")]
|
||||||
pub struct Model {
|
pub struct Model {
|
||||||
@ -48,10 +38,7 @@ pub struct Model {
|
|||||||
pub episode_index: i32,
|
pub episode_index: i32,
|
||||||
pub homepage: Option<String>,
|
pub homepage: Option<String>,
|
||||||
pub subtitle: Option<String>,
|
pub subtitle: Option<String>,
|
||||||
#[sea_orm(default = "false")]
|
|
||||||
pub deleted: bool,
|
|
||||||
pub source: Option<String>,
|
pub source: Option<String>,
|
||||||
pub extra: EpisodeExtra,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
@ -128,34 +115,123 @@ pub enum RelatedEntity {
|
|||||||
SubscriptionEpisode,
|
SubscriptionEpisode,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
impl ActiveModel {
|
||||||
pub struct MikanEpsiodeCreation {
|
#[tracing::instrument(err, skip(ctx), fields(bangumi_id = ?bangumi.id, mikan_episode_id = ?episode.mikan_episode_id))]
|
||||||
pub episode: MikanEpisodeMeta,
|
pub fn from_mikan_bangumi_and_episode_meta(
|
||||||
pub bangumi: Arc<bangumi::Model>,
|
ctx: &dyn AppContextTrait,
|
||||||
|
bangumi: &bangumi::Model,
|
||||||
|
episode: MikanEpisodeMeta,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
|
let episode_extention_meta = extract_episode_meta_from_raw_name(&episode.episode_title)
|
||||||
|
.inspect_err(|err| {
|
||||||
|
tracing::error!(
|
||||||
|
err = ?err,
|
||||||
|
episode_title = ?episode.episode_title,
|
||||||
|
"Failed to parse episode extension meta from episode title, skip"
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
let homepage = build_mikan_episode_homepage_url(mikan_base_url, &episode.mikan_episode_id);
|
||||||
|
|
||||||
|
let mut episode_active_model = Self {
|
||||||
|
mikan_episode_id: ActiveValue::Set(Some(episode.mikan_episode_id)),
|
||||||
|
raw_name: ActiveValue::Set(episode.episode_title.clone()),
|
||||||
|
display_name: ActiveValue::Set(episode.episode_title.clone()),
|
||||||
|
bangumi_id: ActiveValue::Set(bangumi.id),
|
||||||
|
subscriber_id: ActiveValue::Set(bangumi.subscriber_id),
|
||||||
|
homepage: ActiveValue::Set(Some(homepage.to_string())),
|
||||||
|
season_raw: ActiveValue::Set(bangumi.season_raw.clone()),
|
||||||
|
season: ActiveValue::Set(bangumi.season),
|
||||||
|
fansub: ActiveValue::Set(bangumi.fansub.clone()),
|
||||||
|
poster_link: ActiveValue::Set(bangumi.poster_link.clone()),
|
||||||
|
episode_index: ActiveValue::Set(0),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(episode_extention_meta) = episode_extention_meta {
|
||||||
|
episode_active_model.episode_index =
|
||||||
|
ActiveValue::Set(episode_extention_meta.episode_index);
|
||||||
|
episode_active_model.subtitle = ActiveValue::Set(episode_extention_meta.subtitle);
|
||||||
|
episode_active_model.source = ActiveValue::Set(episode_extention_meta.source);
|
||||||
|
episode_active_model.resolution = ActiveValue::Set(episode_extention_meta.resolution);
|
||||||
|
if episode_extention_meta.season > 0 {
|
||||||
|
episode_active_model.season = ActiveValue::Set(episode_extention_meta.season);
|
||||||
|
}
|
||||||
|
if episode_extention_meta.season_raw.is_some() {
|
||||||
|
episode_active_model.season_raw =
|
||||||
|
ActiveValue::Set(episode_extention_meta.season_raw);
|
||||||
|
}
|
||||||
|
if episode_extention_meta.fansub.is_some() {
|
||||||
|
episode_active_model.fansub = ActiveValue::Set(episode_extention_meta.fansub);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(episode_active_model)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
|
|
||||||
impl Model {
|
impl Model {
|
||||||
pub async fn add_episodes(
|
pub async fn get_existed_mikan_episode_list(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
|
ids: impl Iterator<Item = MikanEpisodeHash>,
|
||||||
|
subscriber_id: i32,
|
||||||
|
_subscription_id: i32,
|
||||||
|
) -> RecorderResult<impl Iterator<Item = (i32, MikanEpisodeHash, i32)>> {
|
||||||
|
let db = ctx.db();
|
||||||
|
|
||||||
|
Ok(Entity::find()
|
||||||
|
.select_only()
|
||||||
|
.column(Column::Id)
|
||||||
|
.column(Column::MikanEpisodeId)
|
||||||
|
.column(Column::BangumiId)
|
||||||
|
.filter(
|
||||||
|
Expr::tuple([
|
||||||
|
Column::MikanEpisodeId.into_simple_expr(),
|
||||||
|
Column::SubscriberId.into_simple_expr(),
|
||||||
|
])
|
||||||
|
.in_tuples(
|
||||||
|
ids.into_iter()
|
||||||
|
.map(|id| (id.mikan_episode_id, subscriber_id)),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.into_tuple::<(i32, String, i32)>()
|
||||||
|
.all(db)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|(episode_id, mikan_episode_id, bangumi_id)| {
|
||||||
|
(
|
||||||
|
episode_id,
|
||||||
|
MikanEpisodeHash { mikan_episode_id },
|
||||||
|
bangumi_id,
|
||||||
|
)
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn add_mikan_episodes_for_subscription(
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
creations: impl Iterator<Item = (&bangumi::Model, MikanEpisodeMeta)>,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
subscription_id: i32,
|
subscription_id: i32,
|
||||||
creations: impl IntoIterator<Item = MikanEpsiodeCreation>,
|
|
||||||
) -> RecorderResult<()> {
|
) -> RecorderResult<()> {
|
||||||
let db = ctx.db();
|
let db = ctx.db();
|
||||||
let new_episode_active_modes = creations
|
let new_episode_active_modes: Vec<ActiveModel> = creations
|
||||||
.into_iter()
|
.map(|(bangumi, episode_meta)| {
|
||||||
.map(|cr| ActiveModel::from_mikan_episode_meta(ctx, cr))
|
ActiveModel::from_mikan_bangumi_and_episode_meta(ctx, bangumi, episode_meta)
|
||||||
.inspect(|result| {
|
|
||||||
if let Err(e) = result {
|
|
||||||
tracing::warn!("Failed to create episode: {:?}", e);
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.flatten();
|
.collect::<Result<_, _>>()?;
|
||||||
|
|
||||||
let inserted_episodes = Entity::insert_many(new_episode_active_modes)
|
if new_episode_active_modes.is_empty() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let new_episode_ids = Entity::insert_many(new_episode_active_modes)
|
||||||
.on_conflict(
|
.on_conflict(
|
||||||
OnConflict::columns([Column::BangumiId, Column::MikanEpisodeId])
|
OnConflict::columns([Column::MikanEpisodeId, Column::SubscriberId])
|
||||||
.do_nothing()
|
.update_columns([Column::RawName, Column::PosterLink, Column::Homepage])
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
.exec_with_returning_columns(db, [Column::Id])
|
.exec_with_returning_columns(db, [Column::Id])
|
||||||
@ -163,79 +239,14 @@ impl Model {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|r| r.try_get_many_by_index::<i32>());
|
.flat_map(|r| r.try_get_many_by_index::<i32>());
|
||||||
|
|
||||||
let insert_subscription_episode_links = inserted_episodes.into_iter().map(|episode_id| {
|
subscription_episode::Model::add_episodes_for_subscription(
|
||||||
subscription_episode::ActiveModel::from_subscription_and_episode(
|
ctx,
|
||||||
subscriber_id,
|
new_episode_ids,
|
||||||
subscription_id,
|
subscriber_id,
|
||||||
episode_id,
|
subscription_id,
|
||||||
)
|
)
|
||||||
});
|
.await?;
|
||||||
|
|
||||||
subscription_episode::Entity::insert_many(insert_subscription_episode_links)
|
|
||||||
.on_conflict(
|
|
||||||
OnConflict::columns([
|
|
||||||
subscription_episode::Column::SubscriptionId,
|
|
||||||
subscription_episode::Column::EpisodeId,
|
|
||||||
])
|
|
||||||
.do_nothing()
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.exec(db)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActiveModel {
|
|
||||||
pub fn from_mikan_episode_meta(
|
|
||||||
ctx: &dyn AppContextTrait,
|
|
||||||
creation: MikanEpsiodeCreation,
|
|
||||||
) -> RecorderResult<Self> {
|
|
||||||
let item = creation.episode;
|
|
||||||
let bgm = creation.bangumi;
|
|
||||||
let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title)
|
|
||||||
.inspect_err(|e| {
|
|
||||||
tracing::warn!("Failed to parse episode meta: {:?}", e);
|
|
||||||
})
|
|
||||||
.ok()
|
|
||||||
.unwrap_or_default();
|
|
||||||
let homepage = build_mikan_episode_homepage_url(
|
|
||||||
ctx.mikan().base_url().clone(),
|
|
||||||
&item.mikan_episode_id,
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
mikan_episode_id: ActiveValue::Set(Some(item.mikan_episode_id)),
|
|
||||||
raw_name: ActiveValue::Set(item.episode_title.clone()),
|
|
||||||
display_name: ActiveValue::Set(item.episode_title.clone()),
|
|
||||||
bangumi_id: ActiveValue::Set(bgm.id),
|
|
||||||
subscriber_id: ActiveValue::Set(bgm.subscriber_id),
|
|
||||||
resolution: ActiveValue::Set(raw_meta.resolution),
|
|
||||||
season: ActiveValue::Set(if raw_meta.season > 0 {
|
|
||||||
raw_meta.season
|
|
||||||
} else {
|
|
||||||
bgm.season
|
|
||||||
}),
|
|
||||||
season_raw: ActiveValue::Set(raw_meta.season_raw.or_else(|| bgm.season_raw.clone())),
|
|
||||||
fansub: ActiveValue::Set(raw_meta.fansub.or_else(|| bgm.fansub.clone())),
|
|
||||||
poster_link: ActiveValue::Set(bgm.poster_link.clone()),
|
|
||||||
episode_index: ActiveValue::Set(raw_meta.episode_index),
|
|
||||||
homepage: ActiveValue::Set(Some(homepage.to_string())),
|
|
||||||
subtitle: ActiveValue::Set(raw_meta.subtitle),
|
|
||||||
source: ActiveValue::Set(raw_meta.source),
|
|
||||||
extra: ActiveValue::Set(EpisodeExtra {
|
|
||||||
name_zh: raw_meta.name_zh,
|
|
||||||
name_en: raw_meta.name_en,
|
|
||||||
name_jp: raw_meta.name_jp,
|
|
||||||
s_name_en: raw_meta.name_en_no_season,
|
|
||||||
s_name_jp: raw_meta.name_jp_no_season,
|
|
||||||
s_name_zh: raw_meta.name_zh_no_season,
|
|
||||||
}),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
||||||
|
|||||||
@ -5,9 +5,8 @@ pub mod downloaders;
|
|||||||
pub mod downloads;
|
pub mod downloads;
|
||||||
pub mod episodes;
|
pub mod episodes;
|
||||||
pub mod query;
|
pub mod query;
|
||||||
|
pub mod subscriber_tasks;
|
||||||
pub mod subscribers;
|
pub mod subscribers;
|
||||||
pub mod subscription_bangumi;
|
pub mod subscription_bangumi;
|
||||||
pub mod subscription_episode;
|
pub mod subscription_episode;
|
||||||
pub mod subscriptions;
|
pub mod subscriptions;
|
||||||
pub mod task_stream_item;
|
|
||||||
pub mod tasks;
|
|
||||||
|
|||||||
@ -1,32 +1,9 @@
|
|||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use sea_orm::{
|
use sea_orm::{
|
||||||
prelude::Expr,
|
|
||||||
sea_query::{Alias, IntoColumnRef, IntoTableRef, Query, SelectStatement},
|
|
||||||
ActiveModelTrait, ColumnTrait, ConnectionTrait, DbErr, EntityTrait, Insert, IntoActiveModel,
|
ActiveModelTrait, ColumnTrait, ConnectionTrait, DbErr, EntityTrait, Insert, IntoActiveModel,
|
||||||
Iterable, QueryResult, QueryTrait, SelectModel, SelectorRaw, Value,
|
Iterable, QueryResult, QueryTrait, SelectModel, SelectorRaw, sea_query::Query,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn filter_values_in<
|
|
||||||
I: IntoIterator<Item = T>,
|
|
||||||
T: Into<Value>,
|
|
||||||
R: IntoTableRef,
|
|
||||||
C: IntoColumnRef + Copy,
|
|
||||||
>(
|
|
||||||
tbl_ref: R,
|
|
||||||
col_ref: C,
|
|
||||||
values: I,
|
|
||||||
) -> SelectStatement {
|
|
||||||
Query::select()
|
|
||||||
.expr(Expr::col((Alias::new("t"), Alias::new("column1"))))
|
|
||||||
.from_values(values, Alias::new("t"))
|
|
||||||
.left_join(
|
|
||||||
tbl_ref,
|
|
||||||
Expr::col((Alias::new("t"), Alias::new("column1"))).equals(col_ref),
|
|
||||||
)
|
|
||||||
.and_where(Expr::col(col_ref).is_not_null())
|
|
||||||
.to_owned()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait InsertManyReturningExt<A>: Sized
|
pub trait InsertManyReturningExt<A>: Sized
|
||||||
where
|
where
|
||||||
|
|||||||
70
apps/recorder/src/models/subscriber_tasks.rs
Normal file
70
apps/recorder/src/models/subscriber_tasks.rs
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
use async_trait::async_trait;
|
||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
|
||||||
|
pub use crate::task::{
|
||||||
|
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant,
|
||||||
|
SubscriberTaskTypeVariantIter,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)]
|
||||||
|
#[sea_orm(rs_type = "String", db_type = "Text")]
|
||||||
|
pub enum SubscriberTaskStatus {
|
||||||
|
#[sea_orm(string_value = "Pending")]
|
||||||
|
Pending,
|
||||||
|
#[sea_orm(string_value = "Scheduled")]
|
||||||
|
Scheduled,
|
||||||
|
#[sea_orm(string_value = "Running")]
|
||||||
|
Running,
|
||||||
|
#[sea_orm(string_value = "Done")]
|
||||||
|
Done,
|
||||||
|
#[sea_orm(string_value = "Failed")]
|
||||||
|
Failed,
|
||||||
|
#[sea_orm(string_value = "Killed")]
|
||||||
|
Killed,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||||
|
#[sea_orm(table_name = "subscriber_tasks")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: String,
|
||||||
|
pub subscriber_id: i32,
|
||||||
|
pub job: SubscriberTask,
|
||||||
|
pub task_type: SubscriberTaskType,
|
||||||
|
pub status: SubscriberTaskStatus,
|
||||||
|
pub attempts: i32,
|
||||||
|
pub max_attempts: i32,
|
||||||
|
pub run_at: DateTimeUtc,
|
||||||
|
pub last_error: Option<String>,
|
||||||
|
pub lock_at: Option<DateTimeUtc>,
|
||||||
|
pub lock_by: Option<String>,
|
||||||
|
pub done_at: Option<DateTimeUtc>,
|
||||||
|
pub priority: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::subscribers::Entity",
|
||||||
|
from = "Column::SubscriberId",
|
||||||
|
to = "super::subscribers::Column::Id",
|
||||||
|
on_update = "Cascade",
|
||||||
|
on_delete = "Cascade"
|
||||||
|
)]
|
||||||
|
Subscriber,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::subscribers::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Subscriber.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||||
|
pub enum RelatedEntity {
|
||||||
|
#[sea_orm(entity = "super::subscribers::Entity")]
|
||||||
|
Subscriber,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
@ -1,4 +1,3 @@
|
|||||||
use async_graphql::SimpleObject;
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use sea_orm::{ActiveValue, FromJsonQueryResult, TransactionTrait, entity::prelude::*};
|
use sea_orm::{ActiveValue, FromJsonQueryResult, TransactionTrait, entity::prelude::*};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -10,14 +9,12 @@ use crate::{
|
|||||||
|
|
||||||
pub const SEED_SUBSCRIBER: &str = "konobangu";
|
pub const SEED_SUBSCRIBER: &str = "konobangu";
|
||||||
|
|
||||||
#[derive(
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||||
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
|
|
||||||
)]
|
|
||||||
pub struct SubscriberBangumiConfig {
|
pub struct SubscriberBangumiConfig {
|
||||||
pub leading_group_tag: Option<bool>,
|
pub leading_group_tag: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
#[sea_orm(table_name = "subscribers")]
|
#[sea_orm(table_name = "subscribers")]
|
||||||
pub struct Model {
|
pub struct Model {
|
||||||
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||||
@ -42,6 +39,8 @@ pub enum Relation {
|
|||||||
Episode,
|
Episode,
|
||||||
#[sea_orm(has_many = "super::auth::Entity")]
|
#[sea_orm(has_many = "super::auth::Entity")]
|
||||||
Auth,
|
Auth,
|
||||||
|
#[sea_orm(has_many = "super::credential_3rd::Entity")]
|
||||||
|
Credential3rd,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Related<super::subscriptions::Entity> for Entity {
|
impl Related<super::subscriptions::Entity> for Entity {
|
||||||
@ -74,6 +73,12 @@ impl Related<super::auth::Entity> for Entity {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Related<super::credential_3rd::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Credential3rd.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||||
pub enum RelatedEntity {
|
pub enum RelatedEntity {
|
||||||
#[sea_orm(entity = "super::subscriptions::Entity")]
|
#[sea_orm(entity = "super::subscriptions::Entity")]
|
||||||
@ -84,6 +89,8 @@ pub enum RelatedEntity {
|
|||||||
Bangumi,
|
Bangumi,
|
||||||
#[sea_orm(entity = "super::episodes::Entity")]
|
#[sea_orm(entity = "super::episodes::Entity")]
|
||||||
Episode,
|
Episode,
|
||||||
|
#[sea_orm(entity = "super::credential_3rd::Entity")]
|
||||||
|
Credential3rd,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize)]
|
#[derive(Debug, Deserialize, Serialize)]
|
||||||
@ -106,7 +113,7 @@ impl Model {
|
|||||||
let subscriber = Entity::find_by_id(id)
|
let subscriber = Entity::find_by_id(id)
|
||||||
.one(db)
|
.one(db)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| RecorderError::from_db_record_not_found("subscriptions::find_by_id"))?;
|
.ok_or_else(|| RecorderError::from_db_record_not_found("subscribers::find_by_id"))?;
|
||||||
Ok(subscriber)
|
Ok(subscriber)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,9 @@
|
|||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use sea_orm::{ActiveValue, entity::prelude::*};
|
use sea_orm::{ActiveValue, entity::prelude::*, sea_query::OnConflict};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::{app::AppContextTrait, errors::RecorderResult};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
#[sea_orm(table_name = "subscription_bangumi")]
|
#[sea_orm(table_name = "subscription_bangumi")]
|
||||||
pub struct Model {
|
pub struct Model {
|
||||||
@ -30,6 +32,14 @@ pub enum Relation {
|
|||||||
on_delete = "Cascade"
|
on_delete = "Cascade"
|
||||||
)]
|
)]
|
||||||
Bangumi,
|
Bangumi,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::subscribers::Entity",
|
||||||
|
from = "Column::SubscriberId",
|
||||||
|
to = "super::subscribers::Column::Id",
|
||||||
|
on_update = "Cascade",
|
||||||
|
on_delete = "Cascade"
|
||||||
|
)]
|
||||||
|
Subscriber,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Related<super::subscriptions::Entity> for Entity {
|
impl Related<super::subscriptions::Entity> for Entity {
|
||||||
@ -44,12 +54,20 @@ impl Related<super::bangumi::Entity> for Entity {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Related<super::subscribers::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Subscriber.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||||
pub enum RelatedEntity {
|
pub enum RelatedEntity {
|
||||||
#[sea_orm(entity = "super::subscriptions::Entity")]
|
#[sea_orm(entity = "super::subscriptions::Entity")]
|
||||||
Subscription,
|
Subscription,
|
||||||
#[sea_orm(entity = "super::bangumi::Entity")]
|
#[sea_orm(entity = "super::bangumi::Entity")]
|
||||||
Bangumi,
|
Bangumi,
|
||||||
|
#[sea_orm(entity = "super::subscribers::Entity")]
|
||||||
|
Subscriber,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
@ -69,3 +87,39 @@ impl ActiveModel {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
pub async fn add_bangumis_for_subscription(
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
bangumi_ids: impl Iterator<Item = i32>,
|
||||||
|
subscriber_id: i32,
|
||||||
|
subscription_id: i32,
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let db = ctx.db();
|
||||||
|
|
||||||
|
let active_models = bangumi_ids
|
||||||
|
.map(|bangumi_id| {
|
||||||
|
ActiveModel::from_subscription_and_bangumi(
|
||||||
|
subscriber_id,
|
||||||
|
subscription_id,
|
||||||
|
bangumi_id,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if active_models.is_empty() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
Entity::insert_many(active_models)
|
||||||
|
.on_conflict(
|
||||||
|
OnConflict::columns([Column::SubscriptionId, Column::BangumiId])
|
||||||
|
.do_nothing()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.exec_without_returning(db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -1,7 +1,9 @@
|
|||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use sea_orm::{ActiveValue, entity::prelude::*};
|
use sea_orm::{ActiveValue, entity::prelude::*, sea_query::OnConflict};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::{app::AppContextTrait, errors::RecorderResult};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
#[sea_orm(table_name = "subscription_episode")]
|
#[sea_orm(table_name = "subscription_episode")]
|
||||||
pub struct Model {
|
pub struct Model {
|
||||||
@ -30,6 +32,14 @@ pub enum Relation {
|
|||||||
on_delete = "Cascade"
|
on_delete = "Cascade"
|
||||||
)]
|
)]
|
||||||
Episode,
|
Episode,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::subscribers::Entity",
|
||||||
|
from = "Column::SubscriberId",
|
||||||
|
to = "super::subscribers::Column::Id",
|
||||||
|
on_update = "Cascade",
|
||||||
|
on_delete = "Cascade"
|
||||||
|
)]
|
||||||
|
Subscriber,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Related<super::subscriptions::Entity> for Entity {
|
impl Related<super::subscriptions::Entity> for Entity {
|
||||||
@ -44,28 +54,56 @@ impl Related<super::episodes::Entity> for Entity {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Related<super::subscribers::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Subscriber.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||||
pub enum RelatedEntity {
|
pub enum RelatedEntity {
|
||||||
#[sea_orm(entity = "super::subscriptions::Entity")]
|
#[sea_orm(entity = "super::subscriptions::Entity")]
|
||||||
Subscription,
|
Subscription,
|
||||||
#[sea_orm(entity = "super::episodes::Entity")]
|
#[sea_orm(entity = "super::episodes::Entity")]
|
||||||
Episode,
|
Episode,
|
||||||
|
#[sea_orm(entity = "super::subscribers::Entity")]
|
||||||
|
Subscriber,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
|
|
||||||
impl ActiveModel {
|
impl Model {
|
||||||
pub fn from_subscription_and_episode(
|
pub async fn add_episodes_for_subscription(
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
episode_ids: impl Iterator<Item = i32>,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
subscription_id: i32,
|
subscription_id: i32,
|
||||||
episode_id: i32,
|
) -> RecorderResult<()> {
|
||||||
) -> Self {
|
let db = ctx.db();
|
||||||
Self {
|
|
||||||
subscriber_id: ActiveValue::Set(subscriber_id),
|
let active_models = episode_ids
|
||||||
subscription_id: ActiveValue::Set(subscription_id),
|
.map(|episode_id| ActiveModel {
|
||||||
episode_id: ActiveValue::Set(episode_id),
|
episode_id: ActiveValue::Set(episode_id),
|
||||||
..Default::default()
|
subscription_id: ActiveValue::Set(subscription_id),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if active_models.is_empty() {
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Entity::insert_many(active_models)
|
||||||
|
.on_conflict(
|
||||||
|
OnConflict::columns([Column::SubscriptionId, Column::EpisodeId])
|
||||||
|
.do_nothing()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.exec_without_returning(db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,25 +1,15 @@
|
|||||||
use std::{collections::HashSet, sync::Arc};
|
use std::{fmt::Debug, sync::Arc};
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use itertools::Itertools;
|
use sea_orm::entity::prelude::*;
|
||||||
use sea_orm::{ActiveValue, entity::prelude::*};
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use super::{bangumi, episodes, query::filter_values_in};
|
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContextTrait,
|
||||||
errors::RecorderResult,
|
errors::{RecorderError, RecorderResult},
|
||||||
extract::{
|
extract::mikan::{
|
||||||
mikan::{
|
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
|
||||||
MikanBangumiPosterMeta, build_mikan_bangumi_homepage_url, build_mikan_bangumi_rss_url,
|
|
||||||
extract_mikan_rss_channel_from_rss_link,
|
|
||||||
scrape_mikan_bangumi_meta_from_bangumi_homepage_url,
|
|
||||||
scrape_mikan_episode_meta_from_episode_homepage_url,
|
|
||||||
scrape_mikan_poster_meta_from_image_url,
|
|
||||||
},
|
|
||||||
rawname::extract_season_from_title_body,
|
|
||||||
},
|
},
|
||||||
models::episodes::MikanEpsiodeCreation,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(
|
#[derive(
|
||||||
@ -32,8 +22,12 @@ use crate::{
|
|||||||
)]
|
)]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
pub enum SubscriptionCategory {
|
pub enum SubscriptionCategory {
|
||||||
#[sea_orm(string_value = "mikan")]
|
#[sea_orm(string_value = "mikan_subscriber")]
|
||||||
Mikan,
|
MikanSubscriber,
|
||||||
|
#[sea_orm(string_value = "mikan_season")]
|
||||||
|
MikanSeason,
|
||||||
|
#[sea_orm(string_value = "mikan_bangumi")]
|
||||||
|
MikanBangumi,
|
||||||
#[sea_orm(string_value = "manual")]
|
#[sea_orm(string_value = "manual")]
|
||||||
Manual,
|
Manual,
|
||||||
}
|
}
|
||||||
@ -151,59 +145,12 @@ pub enum RelatedEntity {
|
|||||||
Credential3rd,
|
Credential3rd,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub struct SubscriptionCreateFromRssDto {
|
|
||||||
pub rss_link: String,
|
|
||||||
pub display_name: String,
|
|
||||||
pub enabled: Option<bool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
#[serde(tag = "category")]
|
|
||||||
pub enum SubscriptionCreateDto {
|
|
||||||
Mikan(SubscriptionCreateFromRssDto),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
|
|
||||||
impl ActiveModel {
|
impl ActiveModel {}
|
||||||
pub fn from_create_dto(create_dto: SubscriptionCreateDto, subscriber_id: i32) -> Self {
|
|
||||||
match create_dto {
|
|
||||||
SubscriptionCreateDto::Mikan(create_dto) => {
|
|
||||||
Self::from_rss_create_dto(SubscriptionCategory::Mikan, create_dto, subscriber_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_rss_create_dto(
|
|
||||||
category: SubscriptionCategory,
|
|
||||||
create_dto: SubscriptionCreateFromRssDto,
|
|
||||||
subscriber_id: i32,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
display_name: ActiveValue::Set(create_dto.display_name),
|
|
||||||
enabled: ActiveValue::Set(create_dto.enabled.unwrap_or(false)),
|
|
||||||
subscriber_id: ActiveValue::Set(subscriber_id),
|
|
||||||
category: ActiveValue::Set(category),
|
|
||||||
source_url: ActiveValue::Set(create_dto.rss_link),
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Model {
|
impl Model {
|
||||||
pub async fn add_subscription(
|
|
||||||
ctx: &dyn AppContextTrait,
|
|
||||||
create_dto: SubscriptionCreateDto,
|
|
||||||
subscriber_id: i32,
|
|
||||||
) -> RecorderResult<Self> {
|
|
||||||
let db = ctx.db();
|
|
||||||
let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id);
|
|
||||||
|
|
||||||
Ok(subscription.insert(db).await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn toggle_with_ids(
|
pub async fn toggle_with_ids(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
ids: impl Iterator<Item = i32>,
|
ids: impl Iterator<Item = i32>,
|
||||||
@ -230,127 +177,141 @@ impl Model {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn pull_subscription(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
|
pub async fn find_by_id_and_subscriber_id(
|
||||||
match &self.category {
|
ctx: &dyn AppContextTrait,
|
||||||
SubscriptionCategory::Mikan => {
|
subscriber_id: i32,
|
||||||
let mikan_client = ctx.mikan();
|
subscription_id: i32,
|
||||||
let channel =
|
) -> RecorderResult<Self> {
|
||||||
extract_mikan_rss_channel_from_rss_link(mikan_client, &self.source_url).await?;
|
let db = ctx.db();
|
||||||
|
let subscription_model = Entity::find_by_id(subscription_id)
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| RecorderError::DbError {
|
||||||
|
source: DbErr::RecordNotFound(format!(
|
||||||
|
"Subscription id {subscription_id} not found or not belong to subscriber \
|
||||||
|
{subscriber_id}",
|
||||||
|
)),
|
||||||
|
})?;
|
||||||
|
|
||||||
let items = channel.into_items();
|
if subscription_model.subscriber_id != subscriber_id {
|
||||||
|
Err(RecorderError::DbError {
|
||||||
|
source: DbErr::RecordNotFound(format!(
|
||||||
|
"Subscription id {subscription_id} not found or not belong to subscriber \
|
||||||
|
{subscriber_id}",
|
||||||
|
)),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
let db = ctx.db();
|
Ok(subscription_model)
|
||||||
let items = items.into_iter().collect_vec();
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let mut stmt = filter_values_in(
|
#[async_trait]
|
||||||
episodes::Entity,
|
pub trait SubscriptionTrait: Sized + Debug {
|
||||||
episodes::Column::MikanEpisodeId,
|
fn get_subscriber_id(&self) -> i32;
|
||||||
items
|
|
||||||
.iter()
|
|
||||||
.map(|s| Value::from(s.mikan_episode_id.clone())),
|
|
||||||
);
|
|
||||||
stmt.and_where(Expr::col(episodes::Column::SubscriberId).eq(self.subscriber_id));
|
|
||||||
|
|
||||||
let builder = &db.get_database_backend();
|
fn get_subscription_id(&self) -> i32;
|
||||||
|
|
||||||
let old_rss_item_mikan_episode_ids_set = db
|
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
|
||||||
.query_all(builder.build(&stmt))
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.flat_map(|qs| qs.try_get_by_index(0))
|
|
||||||
.collect::<HashSet<String>>();
|
|
||||||
|
|
||||||
let new_rss_items = items
|
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
|
||||||
.into_iter()
|
|
||||||
.filter(|item| {
|
|
||||||
!old_rss_item_mikan_episode_ids_set.contains(&item.mikan_episode_id)
|
|
||||||
})
|
|
||||||
.collect_vec();
|
|
||||||
|
|
||||||
let mut new_metas = vec![];
|
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
|
||||||
for new_rss_item in new_rss_items.iter() {
|
|
||||||
new_metas.push(
|
|
||||||
scrape_mikan_episode_meta_from_episode_homepage_url(
|
|
||||||
mikan_client,
|
|
||||||
new_rss_item.homepage.clone(),
|
|
||||||
)
|
|
||||||
.await?,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let new_mikan_bangumi_groups = new_metas
|
fn try_from_model(model: &Model) -> RecorderResult<Self>;
|
||||||
.into_iter()
|
}
|
||||||
.into_group_map_by(|s| (s.mikan_bangumi_id.clone(), s.mikan_fansub_id.clone()));
|
|
||||||
|
|
||||||
for ((mikan_bangumi_id, mikan_fansub_id), new_ep_metas) in new_mikan_bangumi_groups
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
{
|
#[serde(tag = "category")]
|
||||||
let mikan_base_url = ctx.mikan().base_url();
|
pub enum Subscription {
|
||||||
let bgm_homepage = build_mikan_bangumi_homepage_url(
|
#[serde(rename = "mikan_subscriber")]
|
||||||
mikan_base_url.clone(),
|
MikanSubscriber(MikanSubscriberSubscription),
|
||||||
&mikan_bangumi_id,
|
#[serde(rename = "mikan_season")]
|
||||||
Some(&mikan_fansub_id),
|
MikanSeason(MikanSeasonSubscription),
|
||||||
);
|
#[serde(rename = "mikan_bangumi")]
|
||||||
let bgm_rss_link = build_mikan_bangumi_rss_url(
|
MikanBangumi(MikanBangumiSubscription),
|
||||||
mikan_base_url.clone(),
|
#[serde(rename = "manual")]
|
||||||
&mikan_bangumi_id,
|
Manual,
|
||||||
Some(&mikan_fansub_id),
|
}
|
||||||
)?;
|
|
||||||
let bgm = Arc::new(
|
impl Subscription {
|
||||||
bangumi::Model::get_or_insert_from_mikan(
|
pub fn category(&self) -> SubscriptionCategory {
|
||||||
ctx,
|
match self {
|
||||||
self.subscriber_id,
|
Self::MikanSubscriber(_) => SubscriptionCategory::MikanSubscriber,
|
||||||
self.id,
|
Self::MikanSeason(_) => SubscriptionCategory::MikanSeason,
|
||||||
mikan_bangumi_id.to_string(),
|
Self::MikanBangumi(_) => SubscriptionCategory::MikanBangumi,
|
||||||
mikan_fansub_id.to_string(),
|
Self::Manual => SubscriptionCategory::Manual,
|
||||||
async |am| -> RecorderResult<()> {
|
|
||||||
let bgm_meta = scrape_mikan_bangumi_meta_from_bangumi_homepage_url(
|
|
||||||
mikan_client,
|
|
||||||
bgm_homepage.clone(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
let bgm_name = bgm_meta.bangumi_title;
|
|
||||||
let (_, bgm_season_raw, bgm_season) =
|
|
||||||
extract_season_from_title_body(&bgm_name);
|
|
||||||
am.raw_name = ActiveValue::Set(bgm_name.clone());
|
|
||||||
am.display_name = ActiveValue::Set(bgm_name);
|
|
||||||
am.season = ActiveValue::Set(bgm_season);
|
|
||||||
am.season_raw = ActiveValue::Set(bgm_season_raw);
|
|
||||||
am.rss_link = ActiveValue::Set(Some(bgm_rss_link.to_string()));
|
|
||||||
am.homepage = ActiveValue::Set(Some(bgm_homepage.to_string()));
|
|
||||||
am.fansub = ActiveValue::Set(Some(bgm_meta.fansub));
|
|
||||||
if let Some(origin_poster_src) = bgm_meta.origin_poster_src
|
|
||||||
&& let MikanBangumiPosterMeta {
|
|
||||||
poster_src: Some(poster_src),
|
|
||||||
..
|
|
||||||
} = scrape_mikan_poster_meta_from_image_url(
|
|
||||||
mikan_client,
|
|
||||||
ctx.storage(),
|
|
||||||
origin_poster_src,
|
|
||||||
self.subscriber_id,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
am.poster_link = ActiveValue::Set(Some(poster_src))
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await?,
|
|
||||||
);
|
|
||||||
episodes::Model::add_episodes(
|
|
||||||
ctx,
|
|
||||||
self.subscriber_id,
|
|
||||||
self.id,
|
|
||||||
new_ep_metas.into_iter().map(|item| MikanEpsiodeCreation {
|
|
||||||
episode: item,
|
|
||||||
bangumi: bgm.clone(),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
_ => todo!(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl SubscriptionTrait for Subscription {
|
||||||
|
fn get_subscriber_id(&self) -> i32 {
|
||||||
|
match self {
|
||||||
|
Self::MikanSubscriber(subscription) => subscription.get_subscriber_id(),
|
||||||
|
Self::MikanSeason(subscription) => subscription.get_subscriber_id(),
|
||||||
|
Self::MikanBangumi(subscription) => subscription.get_subscriber_id(),
|
||||||
|
Self::Manual => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_subscription_id(&self) -> i32 {
|
||||||
|
match self {
|
||||||
|
Self::MikanSubscriber(subscription) => subscription.get_subscription_id(),
|
||||||
|
Self::MikanSeason(subscription) => subscription.get_subscription_id(),
|
||||||
|
Self::MikanBangumi(subscription) => subscription.get_subscription_id(),
|
||||||
|
Self::Manual => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
match self {
|
||||||
|
Self::MikanSubscriber(subscription) => subscription.sync_feeds_incremental(ctx).await,
|
||||||
|
Self::MikanSeason(subscription) => subscription.sync_feeds_incremental(ctx).await,
|
||||||
|
Self::MikanBangumi(subscription) => subscription.sync_feeds_incremental(ctx).await,
|
||||||
|
Self::Manual => Ok(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
match self {
|
||||||
|
Self::MikanSubscriber(subscription) => subscription.sync_feeds_full(ctx).await,
|
||||||
|
Self::MikanSeason(subscription) => subscription.sync_feeds_full(ctx).await,
|
||||||
|
Self::MikanBangumi(subscription) => subscription.sync_feeds_full(ctx).await,
|
||||||
|
Self::Manual => Ok(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
match self {
|
||||||
|
Self::MikanSubscriber(subscription) => subscription.sync_sources(ctx).await,
|
||||||
|
Self::MikanSeason(subscription) => subscription.sync_sources(ctx).await,
|
||||||
|
Self::MikanBangumi(subscription) => subscription.sync_sources(ctx).await,
|
||||||
|
Self::Manual => Ok(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_from_model(model: &Model) -> RecorderResult<Self> {
|
||||||
|
match model.category {
|
||||||
|
SubscriptionCategory::MikanSubscriber => {
|
||||||
|
MikanSubscriberSubscription::try_from_model(model).map(Self::MikanSubscriber)
|
||||||
|
}
|
||||||
|
SubscriptionCategory::MikanSeason => {
|
||||||
|
MikanSeasonSubscription::try_from_model(model).map(Self::MikanSeason)
|
||||||
|
}
|
||||||
|
SubscriptionCategory::MikanBangumi => {
|
||||||
|
MikanBangumiSubscription::try_from_model(model).map(Self::MikanBangumi)
|
||||||
|
}
|
||||||
|
SubscriptionCategory::Manual => Ok(Self::Manual),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<&Model> for Subscription {
|
||||||
|
type Error = RecorderError;
|
||||||
|
|
||||||
|
fn try_from(model: &Model) -> Result<Self, Self::Error> {
|
||||||
|
Self::try_from_model(model)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -1,62 +0,0 @@
|
|||||||
use async_trait::async_trait;
|
|
||||||
use sea_orm::entity::prelude::*;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
|
||||||
)]
|
|
||||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "task_status")]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum TaskStatus {
|
|
||||||
#[sea_orm(string_value = "r")]
|
|
||||||
Running,
|
|
||||||
#[sea_orm(string_value = "s")]
|
|
||||||
Success,
|
|
||||||
#[sea_orm(string_value = "f")]
|
|
||||||
Failed,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
|
||||||
#[sea_orm(table_name = "tasks")]
|
|
||||||
pub struct Model {
|
|
||||||
#[sea_orm(primary_key)]
|
|
||||||
pub id: i32,
|
|
||||||
pub task_id: i32,
|
|
||||||
pub subscriber_id: i32,
|
|
||||||
pub item: serde_json::Value,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
|
||||||
pub enum Relation {
|
|
||||||
#[sea_orm(
|
|
||||||
belongs_to = "super::subscribers::Entity",
|
|
||||||
from = "Column::SubscriberId",
|
|
||||||
to = "super::subscribers::Column::Id",
|
|
||||||
on_update = "Cascade",
|
|
||||||
on_delete = "Cascade"
|
|
||||||
)]
|
|
||||||
Subscriber,
|
|
||||||
#[sea_orm(
|
|
||||||
belongs_to = "super::tasks::Entity",
|
|
||||||
from = "Column::TaskId",
|
|
||||||
to = "super::tasks::Column::Id",
|
|
||||||
on_update = "Cascade",
|
|
||||||
on_delete = "Cascade"
|
|
||||||
)]
|
|
||||||
Task,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Related<super::subscribers::Entity> for Entity {
|
|
||||||
fn to() -> RelationDef {
|
|
||||||
Relation::Subscriber.def()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Related<super::tasks::Entity> for Entity {
|
|
||||||
fn to() -> RelationDef {
|
|
||||||
Relation::Task.def()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
||||||
@ -1,95 +0,0 @@
|
|||||||
use async_trait::async_trait;
|
|
||||||
use sea_orm::{QuerySelect, entity::prelude::*};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use crate::{app::AppContextTrait, errors::RecorderResult};
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
|
||||||
)]
|
|
||||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "task_status")]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum TaskStatus {
|
|
||||||
#[sea_orm(string_value = "p")]
|
|
||||||
Pending,
|
|
||||||
#[sea_orm(string_value = "r")]
|
|
||||||
Running,
|
|
||||||
#[sea_orm(string_value = "s")]
|
|
||||||
Success,
|
|
||||||
#[sea_orm(string_value = "f")]
|
|
||||||
Failed,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
|
||||||
)]
|
|
||||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "task_status")]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum TaskMode {
|
|
||||||
#[sea_orm(string_value = "stream")]
|
|
||||||
Stream,
|
|
||||||
#[sea_orm(string_value = "future")]
|
|
||||||
Future,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
|
||||||
#[sea_orm(table_name = "tasks")]
|
|
||||||
pub struct Model {
|
|
||||||
#[sea_orm(primary_key)]
|
|
||||||
pub id: i32,
|
|
||||||
pub subscriber_id: i32,
|
|
||||||
pub task_mode: TaskMode,
|
|
||||||
pub task_status: TaskStatus,
|
|
||||||
pub task_type: String,
|
|
||||||
pub state_data: serde_json::Value,
|
|
||||||
pub request_data: serde_json::Value,
|
|
||||||
pub error_data: serde_json::Value,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
|
||||||
pub enum Relation {
|
|
||||||
#[sea_orm(has_many = "super::task_stream_item::Entity")]
|
|
||||||
StreamItem,
|
|
||||||
#[sea_orm(
|
|
||||||
belongs_to = "super::subscribers::Entity",
|
|
||||||
from = "Column::SubscriberId",
|
|
||||||
to = "super::subscribers::Column::Id",
|
|
||||||
on_update = "Cascade",
|
|
||||||
on_delete = "Cascade"
|
|
||||||
)]
|
|
||||||
Subscriber,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Related<super::subscribers::Entity> for Entity {
|
|
||||||
fn to() -> RelationDef {
|
|
||||||
Relation::Subscriber.def()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Related<super::task_stream_item::Entity> for Entity {
|
|
||||||
fn to() -> RelationDef {
|
|
||||||
Relation::StreamItem.def()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Model {
|
|
||||||
pub async fn find_stream_task_by_id(
|
|
||||||
ctx: &dyn AppContextTrait,
|
|
||||||
task_id: i32,
|
|
||||||
) -> RecorderResult<Option<(Model, Vec<super::task_stream_item::Model>)>> {
|
|
||||||
let db = ctx.db();
|
|
||||||
let res = Entity::find()
|
|
||||||
.filter(Column::Id.eq(task_id))
|
|
||||||
.filter(Column::TaskMode.eq(TaskMode::Stream))
|
|
||||||
.find_with_related(super::task_stream_item::Entity)
|
|
||||||
.limit(1)
|
|
||||||
.all(db)
|
|
||||||
.await?
|
|
||||||
.pop();
|
|
||||||
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
||||||
33
apps/recorder/src/task/core.rs
Normal file
33
apps/recorder/src/task/core.rs
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use futures::Stream;
|
||||||
|
use serde::{Serialize, de::DeserializeOwned};
|
||||||
|
|
||||||
|
use crate::{app::AppContextTrait, errors::RecorderResult};
|
||||||
|
|
||||||
|
pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task";
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
pub trait SubscriberAsyncTaskTrait: Serialize + DeserializeOwned + Sized {
|
||||||
|
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
|
||||||
|
|
||||||
|
async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
self.run_async(ctx).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
pub trait SubscriberStreamTaskTrait: Serialize + DeserializeOwned + Sized {
|
||||||
|
type Yield: Serialize + DeserializeOwned + Send;
|
||||||
|
|
||||||
|
fn run_stream(
|
||||||
|
self,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
) -> impl Stream<Item = RecorderResult<Self::Yield>> + Send;
|
||||||
|
|
||||||
|
async fn run(self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
||||||
14
apps/recorder/src/task/mod.rs
Normal file
14
apps/recorder/src/task/mod.rs
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
mod config;
|
||||||
|
mod core;
|
||||||
|
mod registry;
|
||||||
|
mod service;
|
||||||
|
|
||||||
|
pub use core::{SUBSCRIBER_TASK_APALIS_NAME, SubscriberAsyncTaskTrait, SubscriberStreamTaskTrait};
|
||||||
|
|
||||||
|
pub use config::TaskConfig;
|
||||||
|
pub use registry::{
|
||||||
|
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant,
|
||||||
|
SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask,
|
||||||
|
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask,
|
||||||
|
};
|
||||||
|
pub use service::TaskService;
|
||||||
99
apps/recorder/src/task/registry/mod.rs
Normal file
99
apps/recorder/src/task/registry/mod.rs
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
mod subscription;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
pub use subscription::{
|
||||||
|
SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
|
||||||
|
SyncOneSubscriptionSourcesTask,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::SubscriberAsyncTaskTrait;
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
models::subscriptions::SubscriptionTrait,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Clone,
|
||||||
|
Debug,
|
||||||
|
Serialize,
|
||||||
|
Deserialize,
|
||||||
|
PartialEq,
|
||||||
|
Eq,
|
||||||
|
Copy,
|
||||||
|
DeriveActiveEnum,
|
||||||
|
DeriveDisplay,
|
||||||
|
EnumIter,
|
||||||
|
)]
|
||||||
|
#[sea_orm(rs_type = "String", db_type = "Text")]
|
||||||
|
pub enum SubscriberTaskType {
|
||||||
|
#[serde(rename = "sync_one_subscription_feeds_incremental")]
|
||||||
|
#[sea_orm(string_value = "sync_one_subscription_feeds_incremental")]
|
||||||
|
SyncOneSubscriptionFeedsIncremental,
|
||||||
|
#[serde(rename = "sync_one_subscription_feeds_full")]
|
||||||
|
#[sea_orm(string_value = "sync_one_subscription_feeds_full")]
|
||||||
|
SyncOneSubscriptionFeedsFull,
|
||||||
|
#[serde(rename = "sync_one_subscription_sources")]
|
||||||
|
#[sea_orm(string_value = "sync_one_subscription_sources")]
|
||||||
|
SyncOneSubscriptionSources,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<&SubscriberTask> for serde_json::Value {
|
||||||
|
type Error = RecorderError;
|
||||||
|
|
||||||
|
fn try_from(value: &SubscriberTask) -> Result<Self, Self::Error> {
|
||||||
|
let json_value = serde_json::to_value(value)?;
|
||||||
|
Ok(match json_value {
|
||||||
|
serde_json::Value::Object(mut map) => {
|
||||||
|
map.remove("task_type");
|
||||||
|
serde_json::Value::Object(map)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
unreachable!("subscriber task must be an json object");
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, FromJsonQueryResult)]
|
||||||
|
#[serde(tag = "task_type")]
|
||||||
|
pub enum SubscriberTask {
|
||||||
|
#[serde(rename = "sync_one_subscription_feeds_incremental")]
|
||||||
|
SyncOneSubscriptionFeedsIncremental(SyncOneSubscriptionFeedsIncrementalTask),
|
||||||
|
#[serde(rename = "sync_one_subscription_feeds_full")]
|
||||||
|
SyncOneSubscriptionFeedsFull(SyncOneSubscriptionFeedsFullTask),
|
||||||
|
#[serde(rename = "sync_one_subscription_sources")]
|
||||||
|
SyncOneSubscriptionSources(SyncOneSubscriptionSourcesTask),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SubscriberTask {
|
||||||
|
pub fn get_subscriber_id(&self) -> i32 {
|
||||||
|
match self {
|
||||||
|
Self::SyncOneSubscriptionFeedsIncremental(task) => task.0.get_subscriber_id(),
|
||||||
|
Self::SyncOneSubscriptionFeedsFull(task) => task.0.get_subscriber_id(),
|
||||||
|
Self::SyncOneSubscriptionSources(task) => task.0.get_subscriber_id(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
match self {
|
||||||
|
Self::SyncOneSubscriptionFeedsIncremental(task) => task.run(ctx).await,
|
||||||
|
Self::SyncOneSubscriptionFeedsFull(task) => task.run(ctx).await,
|
||||||
|
Self::SyncOneSubscriptionSources(task) => task.run(ctx).await,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn task_type(&self) -> SubscriberTaskType {
|
||||||
|
match self {
|
||||||
|
Self::SyncOneSubscriptionFeedsIncremental(_) => {
|
||||||
|
SubscriberTaskType::SyncOneSubscriptionFeedsIncremental
|
||||||
|
}
|
||||||
|
Self::SyncOneSubscriptionFeedsFull(_) => {
|
||||||
|
SubscriberTaskType::SyncOneSubscriptionFeedsFull
|
||||||
|
}
|
||||||
|
Self::SyncOneSubscriptionSources(_) => SubscriberTaskType::SyncOneSubscriptionSources,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
62
apps/recorder/src/task/registry/subscription.rs
Normal file
62
apps/recorder/src/task/registry/subscription.rs
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use sea_orm::prelude::*;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::RecorderResult,
|
||||||
|
models::subscriptions::{self, SubscriptionTrait},
|
||||||
|
task::SubscriberAsyncTaskTrait,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct SyncOneSubscriptionFeedsIncrementalTask(pub subscriptions::Subscription);
|
||||||
|
|
||||||
|
impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsIncrementalTask {
|
||||||
|
fn from(subscription: subscriptions::Subscription) -> Self {
|
||||||
|
Self(subscription)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionFeedsIncrementalTask {
|
||||||
|
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
self.0.sync_feeds_incremental(ctx).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct SyncOneSubscriptionFeedsFullTask(pub subscriptions::Subscription);
|
||||||
|
|
||||||
|
impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsFullTask {
|
||||||
|
fn from(subscription: subscriptions::Subscription) -> Self {
|
||||||
|
Self(subscription)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
|
||||||
|
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
self.0.sync_feeds_full(ctx).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct SyncOneSubscriptionSourcesTask(pub subscriptions::Subscription);
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionSourcesTask {
|
||||||
|
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
|
self.0.sync_sources(ctx).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<subscriptions::Subscription> for SyncOneSubscriptionSourcesTask {
|
||||||
|
fn from(subscription: subscriptions::Subscription) -> Self {
|
||||||
|
Self(subscription)
|
||||||
|
}
|
||||||
|
}
|
||||||
90
apps/recorder/src/task/service.rs
Normal file
90
apps/recorder/src/task/service.rs
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
use std::{ops::Deref, sync::Arc};
|
||||||
|
|
||||||
|
use apalis::prelude::*;
|
||||||
|
use apalis_sql::{
|
||||||
|
Config,
|
||||||
|
context::SqlContext,
|
||||||
|
postgres::{PgListen, PostgresStorage},
|
||||||
|
};
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::RecorderResult,
|
||||||
|
task::{SUBSCRIBER_TASK_APALIS_NAME, SubscriberTask, TaskConfig},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct TaskService {
|
||||||
|
pub config: TaskConfig,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
subscriber_task_storage: Arc<RwLock<PostgresStorage<SubscriberTask>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TaskService {
|
||||||
|
pub async fn from_config_and_ctx(
|
||||||
|
config: TaskConfig,
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let pool = ctx.db().get_postgres_connection_pool().clone();
|
||||||
|
let storage_config = Config::new(SUBSCRIBER_TASK_APALIS_NAME);
|
||||||
|
let subscriber_task_storage = PostgresStorage::new_with_config(pool, storage_config);
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
config,
|
||||||
|
ctx,
|
||||||
|
subscriber_task_storage: Arc::new(RwLock::new(subscriber_task_storage)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run_subscriber_task(
|
||||||
|
job: SubscriberTask,
|
||||||
|
data: Data<Arc<dyn AppContextTrait>>,
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let ctx = data.deref().clone();
|
||||||
|
|
||||||
|
job.run(ctx).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn add_subscriber_task(
|
||||||
|
&self,
|
||||||
|
_subscriber_id: i32,
|
||||||
|
subscriber_task: SubscriberTask,
|
||||||
|
) -> RecorderResult<TaskId> {
|
||||||
|
let task_id = {
|
||||||
|
let mut storage = self.subscriber_task_storage.write().await;
|
||||||
|
let sql_context = {
|
||||||
|
let mut c = SqlContext::default();
|
||||||
|
c.set_max_attempts(1);
|
||||||
|
c
|
||||||
|
};
|
||||||
|
let request = Request::new_with_ctx(subscriber_task, sql_context);
|
||||||
|
storage.push_request(request).await?.task_id
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(task_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn setup_monitor(&self) -> RecorderResult<Monitor> {
|
||||||
|
let monitor = Monitor::new();
|
||||||
|
let worker = WorkerBuilder::new(SUBSCRIBER_TASK_APALIS_NAME)
|
||||||
|
.catch_panic()
|
||||||
|
.enable_tracing()
|
||||||
|
.data(self.ctx.clone())
|
||||||
|
.backend(self.subscriber_task_storage.read().await.clone())
|
||||||
|
.build_fn(Self::run_subscriber_task);
|
||||||
|
|
||||||
|
Ok(monitor.register(worker))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn setup_listener(&self) -> RecorderResult<PgListen> {
|
||||||
|
let pool = self.ctx.db().get_postgres_connection_pool().clone();
|
||||||
|
let mut subscriber_task_listener = PgListen::new(pool).await?;
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut subscriber_task_storage = self.subscriber_task_storage.write().await;
|
||||||
|
subscriber_task_listener.subscribe_with(&mut subscriber_task_storage);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(subscriber_task_listener)
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,87 +0,0 @@
|
|||||||
use std::{ops::Deref, sync::Arc};
|
|
||||||
|
|
||||||
use apalis::prelude::*;
|
|
||||||
use apalis_sql::postgres::PostgresStorage;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
app::AppContextTrait,
|
|
||||||
errors::RecorderResult,
|
|
||||||
extract::mikan::{
|
|
||||||
MikanBangumiMeta, MikanSeasonStr, build_mikan_season_flow_url,
|
|
||||||
scrape_mikan_bangumi_meta_list_from_season_flow_url,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const TASK_NAME: &str = "mikan_extract_season_subscription";
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
|
||||||
pub struct ExtractMikanSeasonSubscriptionTask {
|
|
||||||
pub task_id: i32,
|
|
||||||
pub year: i32,
|
|
||||||
pub season_str: MikanSeasonStr,
|
|
||||||
pub credential_id: i32,
|
|
||||||
pub subscription_id: i32,
|
|
||||||
pub subscriber_id: i32,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
|
||||||
pub struct ExtractMikanSeasonSubscriptionTaskResult {
|
|
||||||
pub task_id: i32,
|
|
||||||
pub year: i32,
|
|
||||||
pub season_str: MikanSeasonStr,
|
|
||||||
pub credential_id: i32,
|
|
||||||
pub subscription_id: i32,
|
|
||||||
pub subscriber_id: i32,
|
|
||||||
pub bangumi_meta_list: Vec<MikanBangumiMeta>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn extract_mikan_season_subscription(
|
|
||||||
job: ExtractMikanSeasonSubscriptionTask,
|
|
||||||
data: Data<Arc<dyn AppContextTrait>>,
|
|
||||||
) -> RecorderResult<GoTo<ExtractMikanSeasonSubscriptionTaskResult>> {
|
|
||||||
let ctx = data.deref();
|
|
||||||
|
|
||||||
let mikan_client = ctx.mikan();
|
|
||||||
let mikan_base_url = mikan_client.base_url();
|
|
||||||
|
|
||||||
let mikan_season_flow_url =
|
|
||||||
build_mikan_season_flow_url(mikan_base_url.clone(), job.year, job.season_str);
|
|
||||||
|
|
||||||
let bangumi_meta_list = scrape_mikan_bangumi_meta_list_from_season_flow_url(
|
|
||||||
mikan_client,
|
|
||||||
ctx.clone(),
|
|
||||||
mikan_season_flow_url,
|
|
||||||
job.credential_id,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(GoTo::Done(ExtractMikanSeasonSubscriptionTaskResult {
|
|
||||||
bangumi_meta_list,
|
|
||||||
credential_id: job.credential_id,
|
|
||||||
season_str: job.season_str,
|
|
||||||
subscriber_id: job.subscriber_id,
|
|
||||||
subscription_id: job.subscription_id,
|
|
||||||
task_id: job.task_id,
|
|
||||||
year: job.year,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn register_extract_mikan_season_subscription_task(
|
|
||||||
monitor: Monitor,
|
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
|
||||||
) -> RecorderResult<(Monitor, PostgresStorage<StepRequest<serde_json::Value>>)> {
|
|
||||||
let pool = ctx.db().get_postgres_connection_pool().clone();
|
|
||||||
let storage = PostgresStorage::new(pool);
|
|
||||||
|
|
||||||
let steps = StepBuilder::new().step_fn(extract_mikan_season_subscription);
|
|
||||||
|
|
||||||
let worker = WorkerBuilder::new(TASK_NAME)
|
|
||||||
.catch_panic()
|
|
||||||
.enable_tracing()
|
|
||||||
.data(ctx)
|
|
||||||
.backend(storage.clone())
|
|
||||||
.build_stepped(steps);
|
|
||||||
|
|
||||||
Ok((monitor.register(worker), storage))
|
|
||||||
}
|
|
||||||
@ -1,5 +0,0 @@
|
|||||||
mod extract_season_subscription;
|
|
||||||
|
|
||||||
pub use extract_season_subscription::{
|
|
||||||
ExtractMikanSeasonSubscriptionTask, register_extract_mikan_season_subscription_task,
|
|
||||||
};
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
pub mod config;
|
|
||||||
pub mod mikan;
|
|
||||||
pub mod service;
|
|
||||||
|
|
||||||
pub use config::TaskConfig;
|
|
||||||
pub use service::TaskService;
|
|
||||||
@ -1,41 +0,0 @@
|
|||||||
use std::{fmt::Debug, sync::Arc};
|
|
||||||
|
|
||||||
use apalis::prelude::*;
|
|
||||||
use apalis_sql::postgres::PostgresStorage;
|
|
||||||
use tokio::sync::Mutex;
|
|
||||||
|
|
||||||
use super::{TaskConfig, mikan::register_extract_mikan_season_subscription_task};
|
|
||||||
use crate::{app::AppContextTrait, errors::RecorderResult};
|
|
||||||
|
|
||||||
pub struct TaskService {
|
|
||||||
config: TaskConfig,
|
|
||||||
#[allow(dead_code)]
|
|
||||||
monitor: Arc<Mutex<Monitor>>,
|
|
||||||
pub extract_mikan_season_subscription_task_storage:
|
|
||||||
PostgresStorage<StepRequest<serde_json::Value>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TaskService {
|
|
||||||
pub async fn from_config_and_ctx(
|
|
||||||
config: TaskConfig,
|
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
|
||||||
) -> RecorderResult<Self> {
|
|
||||||
let monitor = Monitor::new();
|
|
||||||
let (monitor, extract_mikan_season_subscription_task_storage) =
|
|
||||||
register_extract_mikan_season_subscription_task(monitor, ctx.clone())?;
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
config,
|
|
||||||
monitor: Arc::new(Mutex::new(monitor)),
|
|
||||||
extract_mikan_season_subscription_task_storage,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for TaskService {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_struct("TaskService")
|
|
||||||
.field("config", &self.config)
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,12 +1,13 @@
|
|||||||
use std::fmt::Debug;
|
use std::{fmt::Debug, sync::Arc};
|
||||||
|
|
||||||
|
use once_cell::sync::OnceCell;
|
||||||
use typed_builder::TypedBuilder;
|
use typed_builder::TypedBuilder;
|
||||||
|
|
||||||
use crate::app::AppContextTrait;
|
use crate::{app::AppContextTrait, test_utils::storage::TestingStorageService};
|
||||||
|
|
||||||
#[derive(TypedBuilder)]
|
#[derive(TypedBuilder)]
|
||||||
#[builder(field_defaults(default, setter(strip_option)))]
|
#[builder(field_defaults(default, setter(strip_option)))]
|
||||||
pub struct UnitTestAppContext {
|
pub struct TestingAppContext {
|
||||||
logger: Option<crate::logger::LoggerService>,
|
logger: Option<crate::logger::LoggerService>,
|
||||||
db: Option<crate::database::DatabaseService>,
|
db: Option<crate::database::DatabaseService>,
|
||||||
config: Option<crate::app::AppConfig>,
|
config: Option<crate::app::AppConfig>,
|
||||||
@ -14,22 +15,30 @@ pub struct UnitTestAppContext {
|
|||||||
mikan: Option<crate::extract::mikan::MikanClient>,
|
mikan: Option<crate::extract::mikan::MikanClient>,
|
||||||
auth: Option<crate::auth::AuthService>,
|
auth: Option<crate::auth::AuthService>,
|
||||||
graphql: Option<crate::graphql::GraphQLService>,
|
graphql: Option<crate::graphql::GraphQLService>,
|
||||||
storage: Option<crate::storage::StorageService>,
|
storage: Option<TestingStorageService>,
|
||||||
crypto: Option<crate::crypto::CryptoService>,
|
crypto: Option<crate::crypto::CryptoService>,
|
||||||
tasks: Option<crate::tasks::TaskService>,
|
#[builder(default = Arc::new(OnceCell::new()), setter(!strip_option))]
|
||||||
|
task: Arc<OnceCell<crate::task::TaskService>>,
|
||||||
|
message: Option<crate::message::MessageService>,
|
||||||
#[builder(default = Some(String::from(env!("CARGO_MANIFEST_DIR"))))]
|
#[builder(default = Some(String::from(env!("CARGO_MANIFEST_DIR"))))]
|
||||||
working_dir: Option<String>,
|
working_dir: Option<String>,
|
||||||
#[builder(default = crate::app::Environment::Testing, setter(!strip_option))]
|
#[builder(default = crate::app::Environment::Testing, setter(!strip_option))]
|
||||||
environment: crate::app::Environment,
|
environment: crate::app::Environment,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for UnitTestAppContext {
|
impl TestingAppContext {
|
||||||
|
pub fn set_task(&self, task: crate::task::TaskService) {
|
||||||
|
self.task.get_or_init(|| task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for TestingAppContext {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "UnitTestAppContext")
|
write!(f, "UnitTestAppContext")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppContextTrait for UnitTestAppContext {
|
impl AppContextTrait for TestingAppContext {
|
||||||
fn logger(&self) -> &crate::logger::LoggerService {
|
fn logger(&self) -> &crate::logger::LoggerService {
|
||||||
self.logger.as_ref().expect("should set logger")
|
self.logger.as_ref().expect("should set logger")
|
||||||
}
|
}
|
||||||
@ -74,7 +83,11 @@ impl AppContextTrait for UnitTestAppContext {
|
|||||||
self.crypto.as_ref().expect("should set crypto")
|
self.crypto.as_ref().expect("should set crypto")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn task(&self) -> &crate::tasks::TaskService {
|
fn task(&self) -> &crate::task::TaskService {
|
||||||
self.tasks.as_ref().expect("should set tasks")
|
self.task.get().expect("should set task")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn message(&self) -> &crate::message::MessageService {
|
||||||
|
self.message.as_ref().expect("should set message")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,8 +3,20 @@ use crate::{
|
|||||||
errors::RecorderResult,
|
errors::RecorderResult,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub struct TestingDatabaseServiceConfig {
|
||||||
|
pub auto_migrate: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for TestingDatabaseServiceConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self { auto_migrate: true }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(feature = "testcontainers")]
|
#[cfg(feature = "testcontainers")]
|
||||||
pub async fn build_testing_database_service() -> RecorderResult<DatabaseService> {
|
pub async fn build_testing_database_service(
|
||||||
|
config: TestingDatabaseServiceConfig,
|
||||||
|
) -> RecorderResult<DatabaseService> {
|
||||||
use testcontainers::{ImageExt, runners::AsyncRunner};
|
use testcontainers::{ImageExt, runners::AsyncRunner};
|
||||||
use testcontainers_ext::{ImageDefaultLogConsumerExt, ImagePruneExistedLabelExt};
|
use testcontainers_ext::{ImageDefaultLogConsumerExt, ImagePruneExistedLabelExt};
|
||||||
use testcontainers_modules::postgres::Postgres;
|
use testcontainers_modules::postgres::Postgres;
|
||||||
@ -34,7 +46,7 @@ pub async fn build_testing_database_service() -> RecorderResult<DatabaseService>
|
|||||||
connect_timeout: 5000,
|
connect_timeout: 5000,
|
||||||
idle_timeout: 10000,
|
idle_timeout: 10000,
|
||||||
acquire_timeout: None,
|
acquire_timeout: None,
|
||||||
auto_migrate: true,
|
auto_migrate: config.auto_migrate,
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
db_service.container = Some(container);
|
db_service.container = Some(container);
|
||||||
@ -43,7 +55,9 @@ pub async fn build_testing_database_service() -> RecorderResult<DatabaseService>
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "testcontainers"))]
|
#[cfg(not(feature = "testcontainers"))]
|
||||||
pub async fn build_testing_database_service() -> RecorderResult<DatabaseService> {
|
pub async fn build_testing_database_service(
|
||||||
|
config: TestingDatabaseServiceConfig,
|
||||||
|
) -> RecorderResult<DatabaseService> {
|
||||||
let db_service = DatabaseService::from_config(DatabaseConfig {
|
let db_service = DatabaseService::from_config(DatabaseConfig {
|
||||||
uri: String::from("postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"),
|
uri: String::from("postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"),
|
||||||
enable_logging: true,
|
enable_logging: true,
|
||||||
@ -52,7 +66,7 @@ pub async fn build_testing_database_service() -> RecorderResult<DatabaseService>
|
|||||||
connect_timeout: 5000,
|
connect_timeout: 5000,
|
||||||
idle_timeout: 10000,
|
idle_timeout: 10000,
|
||||||
acquire_timeout: None,
|
acquire_timeout: None,
|
||||||
auto_migrate: true,
|
auto_migrate: config.auto_migrate,
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
|||||||
@ -1,14 +1,25 @@
|
|||||||
use std::collections::HashMap;
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
ops::{Deref, DerefMut},
|
||||||
|
path::{self, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
use chrono::{Duration, Utc};
|
use chrono::{Duration, Utc};
|
||||||
use fetch::{FetchError, HttpClientConfig, IntoUrl, get_random_ua};
|
use fetch::{FetchError, HttpClientConfig, IntoUrl, get_random_ua};
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use percent_encoding::{AsciiSet, CONTROLS, percent_decode, utf8_percent_encode};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
crypto::UserPassCredential,
|
||||||
errors::RecorderResult,
|
errors::RecorderResult,
|
||||||
extract::mikan::{
|
extract::mikan::{
|
||||||
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_LOGIN_PAGE_PATH, MikanClient, MikanConfig,
|
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH,
|
||||||
MikanCredentialForm,
|
MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_POSTER_PATH, MIKAN_BANGUMI_RSS_PATH,
|
||||||
|
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_EPISODE_TORRENT_PATH, MIKAN_LOGIN_PAGE_PATH,
|
||||||
|
MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MikanClient,
|
||||||
|
MikanConfig, MikanCredentialForm,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -17,13 +28,25 @@ const TESTING_MIKAN_PASSWORD: &str = "test_password";
|
|||||||
const TESTING_MIKAN_ANTIFORGERY: &str = "test_antiforgery";
|
const TESTING_MIKAN_ANTIFORGERY: &str = "test_antiforgery";
|
||||||
const TESTING_MIKAN_IDENTITY: &str = "test_identity";
|
const TESTING_MIKAN_IDENTITY: &str = "test_identity";
|
||||||
|
|
||||||
|
const FILE_UNSAFE: &AsciiSet = &CONTROLS
|
||||||
|
.add(b'<')
|
||||||
|
.add(b'>')
|
||||||
|
.add(b':')
|
||||||
|
.add(b'"')
|
||||||
|
.add(b'|')
|
||||||
|
.add(b'?')
|
||||||
|
.add(b'*')
|
||||||
|
.add(b'\\')
|
||||||
|
.add(b'/')
|
||||||
|
.add(b'&')
|
||||||
|
.add(b'=')
|
||||||
|
.add(b'#');
|
||||||
|
|
||||||
pub async fn build_testing_mikan_client(
|
pub async fn build_testing_mikan_client(
|
||||||
base_mikan_url: impl IntoUrl,
|
base_mikan_url: impl IntoUrl,
|
||||||
) -> RecorderResult<MikanClient> {
|
) -> RecorderResult<MikanClient> {
|
||||||
let mikan_client = MikanClient::from_config(MikanConfig {
|
let mikan_client = MikanClient::from_config(MikanConfig {
|
||||||
http_client: HttpClientConfig {
|
http_client: HttpClientConfig::default(),
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
base_url: base_mikan_url.into_url().map_err(FetchError::from)?,
|
base_url: base_mikan_url.into_url().map_err(FetchError::from)?,
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
@ -38,6 +61,126 @@ pub fn build_testing_mikan_credential_form() -> MikanCredentialForm {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn build_testing_mikan_credential() -> UserPassCredential {
|
||||||
|
UserPassCredential {
|
||||||
|
username: String::from(TESTING_MIKAN_USERNAME),
|
||||||
|
password: String::from(TESTING_MIKAN_PASSWORD),
|
||||||
|
user_agent: None,
|
||||||
|
cookies: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
|
pub struct MikanDoppelMeta {
|
||||||
|
pub status: u16,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MikanDoppelPath {
|
||||||
|
path: path::PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanDoppelPath {
|
||||||
|
pub fn new(source: impl Into<Self>) -> Self {
|
||||||
|
source.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn exists_any(&self) -> bool {
|
||||||
|
self.exists() || self.exists_meta()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn exists(&self) -> bool {
|
||||||
|
self.path().exists()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn exists_meta(&self) -> bool {
|
||||||
|
self.meta_path().exists()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write(&self, content: impl AsRef<[u8]>) -> std::io::Result<()> {
|
||||||
|
if let Some(parent) = self.as_ref().parent() {
|
||||||
|
std::fs::create_dir_all(parent)?;
|
||||||
|
}
|
||||||
|
std::fs::write(self.as_ref(), content)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_meta(&self, meta: MikanDoppelMeta) -> std::io::Result<()> {
|
||||||
|
self.write(serde_json::to_string(&meta)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read(&self) -> std::io::Result<Vec<u8>> {
|
||||||
|
let content = std::fs::read(self.as_ref())?;
|
||||||
|
Ok(content)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read_meta(&self) -> std::io::Result<MikanDoppelMeta> {
|
||||||
|
let content = std::fs::read(self.meta_path())?;
|
||||||
|
Ok(serde_json::from_slice(&content)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn encode_path_component(component: &str) -> String {
|
||||||
|
utf8_percent_encode(component, FILE_UNSAFE).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decode_path_component(component: &str) -> Result<String, std::str::Utf8Error> {
|
||||||
|
Ok(percent_decode(component.as_bytes())
|
||||||
|
.decode_utf8()?
|
||||||
|
.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn meta_path(&self) -> path::PathBuf {
|
||||||
|
let extension = if let Some(ext) = self.path().extension() {
|
||||||
|
format!("{}.meta.json", ext.to_string_lossy())
|
||||||
|
} else {
|
||||||
|
String::from("meta.json")
|
||||||
|
};
|
||||||
|
self.path.to_path_buf().with_extension(extension)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn path(&self) -> &path::Path {
|
||||||
|
&self.path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<path::Path> for MikanDoppelPath {
|
||||||
|
fn as_ref(&self) -> &path::Path {
|
||||||
|
self.path()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TEST_RESOURCES_DIR: String =
|
||||||
|
if cfg!(any(test, debug_assertions, feature = "playground")) {
|
||||||
|
format!("{}/tests/resources", env!("CARGO_MANIFEST_DIR"))
|
||||||
|
} else {
|
||||||
|
"tests/resources".to_string()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Url> for MikanDoppelPath {
|
||||||
|
fn from(value: Url) -> Self {
|
||||||
|
let doppel_path = PathBuf::from(format!("{}/mikan/doppel", TEST_RESOURCES_DIR.as_str()));
|
||||||
|
let base_path = doppel_path.join(value.path().trim_matches('/'));
|
||||||
|
let dirname = base_path.parent();
|
||||||
|
let stem = base_path.file_stem();
|
||||||
|
debug_assert!(dirname.is_some() && stem.is_some());
|
||||||
|
let extension = if let Some(ext) = base_path.extension() {
|
||||||
|
ext.to_string_lossy().to_string()
|
||||||
|
} else {
|
||||||
|
String::from("html")
|
||||||
|
};
|
||||||
|
let mut filename = stem.unwrap().to_string_lossy().to_string();
|
||||||
|
if let Some(query) = value.query() {
|
||||||
|
filename.push_str(&format!("-{}", Self::encode_path_component(query)));
|
||||||
|
}
|
||||||
|
filename.push_str(&format!(".{extension}"));
|
||||||
|
|
||||||
|
Self {
|
||||||
|
path: dirname.unwrap().join(filename),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct MikanMockServerLoginMock {
|
pub struct MikanMockServerLoginMock {
|
||||||
pub login_get_mock: mockito::Mock,
|
pub login_get_mock: mockito::Mock,
|
||||||
pub login_post_success_mock: mockito::Mock,
|
pub login_post_success_mock: mockito::Mock,
|
||||||
@ -46,17 +189,68 @@ pub struct MikanMockServerLoginMock {
|
|||||||
pub account_get_failed_mock: mockito::Mock,
|
pub account_get_failed_mock: mockito::Mock,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct MikanMockServerResourcesMock {
|
||||||
|
pub shared_resource_mock: mockito::Mock,
|
||||||
|
pub shared_resource_not_found_mock: mockito::Mock,
|
||||||
|
pub user_resource_mock: mockito::Mock,
|
||||||
|
pub expand_bangumi_noauth_mock: mockito::Mock,
|
||||||
|
pub season_flow_noauth_mock: mockito::Mock,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum MikanMockServerInner {
|
||||||
|
Server(mockito::Server),
|
||||||
|
ServerGuard(mockito::ServerGuard),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for MikanMockServerInner {
|
||||||
|
type Target = mockito::Server;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
match self {
|
||||||
|
MikanMockServerInner::Server(server) => server,
|
||||||
|
MikanMockServerInner::ServerGuard(server) => server,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DerefMut for MikanMockServerInner {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
match self {
|
||||||
|
MikanMockServerInner::Server(server) => server,
|
||||||
|
MikanMockServerInner::ServerGuard(server) => server,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct MikanMockServer {
|
pub struct MikanMockServer {
|
||||||
pub server: mockito::ServerGuard,
|
pub server: MikanMockServerInner,
|
||||||
base_url: Url,
|
base_url: Url,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MikanMockServer {
|
impl MikanMockServer {
|
||||||
|
pub async fn new_with_port(port: u16) -> RecorderResult<Self> {
|
||||||
|
let server = mockito::Server::new_with_opts_async(mockito::ServerOpts {
|
||||||
|
host: "0.0.0.0",
|
||||||
|
port,
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
let base_url = Url::parse(&server.url())?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
server: MikanMockServerInner::Server(server),
|
||||||
|
base_url,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn new() -> RecorderResult<Self> {
|
pub async fn new() -> RecorderResult<Self> {
|
||||||
let server = mockito::Server::new_async().await;
|
let server = mockito::Server::new_async().await;
|
||||||
let base_url = Url::parse(&server.url())?;
|
let base_url = Url::parse(&server.url())?;
|
||||||
|
|
||||||
Ok(Self { server, base_url })
|
Ok(Self {
|
||||||
|
server: MikanMockServerInner::ServerGuard(server),
|
||||||
|
base_url,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn base_url(&self) -> &Url {
|
pub fn base_url(&self) -> &Url {
|
||||||
@ -80,7 +274,7 @@ impl MikanMockServer {
|
|||||||
.server
|
.server
|
||||||
.mock("GET", MIKAN_LOGIN_PAGE_PATH)
|
.mock("GET", MIKAN_LOGIN_PAGE_PATH)
|
||||||
.match_query(mockito::Matcher::Any)
|
.match_query(mockito::Matcher::Any)
|
||||||
.with_status(201)
|
.with_status(200)
|
||||||
.with_header("Content-Type", "text/html; charset=utf-8")
|
.with_header("Content-Type", "text/html; charset=utf-8")
|
||||||
.with_header(
|
.with_header(
|
||||||
"Set-Cookie",
|
"Set-Cookie",
|
||||||
@ -89,6 +283,10 @@ impl MikanMockServer {
|
|||||||
SameSite=Strict; Path=/"
|
SameSite=Strict; Path=/"
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
.with_body_from_file(format!(
|
||||||
|
"{}/mikan/LoginPage.html",
|
||||||
|
TEST_RESOURCES_DIR.as_str()
|
||||||
|
))
|
||||||
.create();
|
.create();
|
||||||
|
|
||||||
let test_identity_expires = (Utc::now() + Duration::days(30)).to_rfc2822();
|
let test_identity_expires = (Utc::now() + Duration::days(30)).to_rfc2822();
|
||||||
@ -142,7 +340,10 @@ impl MikanMockServer {
|
|||||||
.match_query(mockito::Matcher::Any)
|
.match_query(mockito::Matcher::Any)
|
||||||
.match_request(move |req| !match_post_login_body(req))
|
.match_request(move |req| !match_post_login_body(req))
|
||||||
.with_status(200)
|
.with_status(200)
|
||||||
.with_body_from_file("tests/resources/mikan/LoginError.html")
|
.with_body_from_file(format!(
|
||||||
|
"{}/mikan/LoginError.html",
|
||||||
|
TEST_RESOURCES_DIR.as_str()
|
||||||
|
))
|
||||||
.create();
|
.create();
|
||||||
|
|
||||||
let account_get_success_mock = self
|
let account_get_success_mock = self
|
||||||
@ -170,4 +371,150 @@ impl MikanMockServer {
|
|||||||
account_get_failed_mock,
|
account_get_failed_mock,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn mock_resources_with_doppel(&mut self) -> MikanMockServerResourcesMock {
|
||||||
|
let shared_resource_mock = self
|
||||||
|
.server
|
||||||
|
.mock("GET", mockito::Matcher::Any)
|
||||||
|
.match_request({
|
||||||
|
let mikan_base_url = self.base_url().clone();
|
||||||
|
move |request| {
|
||||||
|
let path = request.path();
|
||||||
|
if !path.starts_with(MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH)
|
||||||
|
&& !path.starts_with(MIKAN_SEASON_FLOW_PAGE_PATH)
|
||||||
|
&& (path.starts_with(MIKAN_BANGUMI_RSS_PATH)
|
||||||
|
|| path.starts_with(MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH)
|
||||||
|
|| path.starts_with(MIKAN_BANGUMI_HOMEPAGE_PATH)
|
||||||
|
|| path.starts_with(MIKAN_EPISODE_HOMEPAGE_PATH)
|
||||||
|
|| path.starts_with(MIKAN_BANGUMI_POSTER_PATH)
|
||||||
|
|| path.starts_with(MIKAN_EPISODE_TORRENT_PATH))
|
||||||
|
{
|
||||||
|
if let Ok(url) = mikan_base_url.join(request.path_and_query()) {
|
||||||
|
let doppel_path = MikanDoppelPath::from(url);
|
||||||
|
doppel_path.exists()
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.with_status(200)
|
||||||
|
.with_body_from_request({
|
||||||
|
let mikan_base_url = self.base_url().clone();
|
||||||
|
move |req| {
|
||||||
|
let path_and_query = req.path_and_query();
|
||||||
|
let url = mikan_base_url.join(path_and_query).unwrap();
|
||||||
|
let doppel_path = MikanDoppelPath::from(url);
|
||||||
|
doppel_path.read().unwrap()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.create();
|
||||||
|
|
||||||
|
let shared_resource_not_found_mock = self
|
||||||
|
.server
|
||||||
|
.mock("GET", mockito::Matcher::Any)
|
||||||
|
.match_request({
|
||||||
|
let mikan_base_url = self.base_url().clone();
|
||||||
|
move |request| {
|
||||||
|
let path = request.path();
|
||||||
|
if !path.starts_with(MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH)
|
||||||
|
&& !path.starts_with(MIKAN_SEASON_FLOW_PAGE_PATH)
|
||||||
|
&& (path.starts_with(MIKAN_BANGUMI_RSS_PATH)
|
||||||
|
|| path.starts_with(MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH)
|
||||||
|
|| path.starts_with(MIKAN_BANGUMI_HOMEPAGE_PATH)
|
||||||
|
|| path.starts_with(MIKAN_EPISODE_HOMEPAGE_PATH)
|
||||||
|
|| path.starts_with(MIKAN_BANGUMI_POSTER_PATH)
|
||||||
|
|| path.starts_with(MIKAN_EPISODE_TORRENT_PATH))
|
||||||
|
{
|
||||||
|
if let Ok(url) = mikan_base_url.join(request.path_and_query()) {
|
||||||
|
let doppel_path = MikanDoppelPath::from(url);
|
||||||
|
doppel_path.exists_meta()
|
||||||
|
&& doppel_path.read_meta().unwrap().status == 404
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.with_status(404)
|
||||||
|
.create();
|
||||||
|
|
||||||
|
let user_resource_mock = self
|
||||||
|
.server
|
||||||
|
.mock("GET", mockito::Matcher::Any)
|
||||||
|
.match_request({
|
||||||
|
let mikan_base_url = self.base_url().clone();
|
||||||
|
move |req| {
|
||||||
|
if !Self::get_has_auth_matcher()(req) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
let path = req.path();
|
||||||
|
if path.starts_with(MIKAN_SEASON_FLOW_PAGE_PATH)
|
||||||
|
|| path.starts_with(MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH)
|
||||||
|
{
|
||||||
|
if let Ok(url) = mikan_base_url.join(req.path_and_query()) {
|
||||||
|
let doppel_path = MikanDoppelPath::from(url);
|
||||||
|
doppel_path.exists()
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.with_status(200)
|
||||||
|
.with_body_from_request({
|
||||||
|
let mikan_base_url = self.base_url().clone();
|
||||||
|
move |req| {
|
||||||
|
let path_and_query = req.path_and_query();
|
||||||
|
let url = mikan_base_url.join(path_and_query).unwrap();
|
||||||
|
let doppel_path = MikanDoppelPath::from(url);
|
||||||
|
doppel_path.read().unwrap()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.create();
|
||||||
|
|
||||||
|
let expand_bangumi_noauth_mock = self
|
||||||
|
.server
|
||||||
|
.mock("GET", mockito::Matcher::Any)
|
||||||
|
.match_request(move |req| {
|
||||||
|
!Self::get_has_auth_matcher()(req)
|
||||||
|
&& req
|
||||||
|
.path()
|
||||||
|
.starts_with(MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH)
|
||||||
|
})
|
||||||
|
.with_status(200)
|
||||||
|
.with_body_from_file(format!(
|
||||||
|
"{}/mikan/ExpandBangumi-noauth.html",
|
||||||
|
TEST_RESOURCES_DIR.as_str()
|
||||||
|
))
|
||||||
|
.create();
|
||||||
|
|
||||||
|
let season_flow_noauth_mock = self
|
||||||
|
.server
|
||||||
|
.mock("GET", mockito::Matcher::Any)
|
||||||
|
.match_request(move |req| {
|
||||||
|
!Self::get_has_auth_matcher()(req)
|
||||||
|
&& req.path().starts_with(MIKAN_SEASON_FLOW_PAGE_PATH)
|
||||||
|
})
|
||||||
|
.with_status(200)
|
||||||
|
.with_body_from_file(format!(
|
||||||
|
"{}/mikan/BangumiCoverFlow-noauth.html",
|
||||||
|
TEST_RESOURCES_DIR.as_str()
|
||||||
|
))
|
||||||
|
.create();
|
||||||
|
|
||||||
|
MikanMockServerResourcesMock {
|
||||||
|
shared_resource_mock,
|
||||||
|
shared_resource_not_found_mock,
|
||||||
|
user_resource_mock,
|
||||||
|
expand_bangumi_noauth_mock,
|
||||||
|
season_flow_noauth_mock,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,4 +3,5 @@ pub mod crypto;
|
|||||||
pub mod database;
|
pub mod database;
|
||||||
pub mod mikan;
|
pub mod mikan;
|
||||||
pub mod storage;
|
pub mod storage;
|
||||||
|
pub mod task;
|
||||||
pub mod tracing;
|
pub mod tracing;
|
||||||
|
|||||||
15
apps/recorder/src/test_utils/task.rs
Normal file
15
apps/recorder/src/test_utils/task.rs
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::RecorderResult,
|
||||||
|
task::{TaskConfig, TaskService},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub async fn build_testing_task_service(
|
||||||
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
|
) -> RecorderResult<TaskService> {
|
||||||
|
let config = TaskConfig {};
|
||||||
|
let task_service = TaskService::from_config_and_ctx(config, ctx).await?;
|
||||||
|
Ok(task_service)
|
||||||
|
}
|
||||||
@ -1,11 +1,18 @@
|
|||||||
use tracing::Level;
|
use tracing::Level;
|
||||||
use tracing_subscriber::EnvFilter;
|
use tracing_subscriber::EnvFilter;
|
||||||
|
|
||||||
|
use crate::logger::MODULE_WHITELIST;
|
||||||
|
|
||||||
pub fn try_init_testing_tracing(level: Level) {
|
pub fn try_init_testing_tracing(level: Level) {
|
||||||
let crate_name = env!("CARGO_PKG_NAME");
|
let crate_name = env!("CARGO_PKG_NAME");
|
||||||
let level = level.as_str().to_lowercase();
|
let level = level.as_str().to_lowercase();
|
||||||
let filter = EnvFilter::new(format!("{crate_name}[]={level}"))
|
let mut filter = EnvFilter::new(format!("{crate_name}[]={level}"));
|
||||||
.add_directive(format!("mockito[]={level}").parse().unwrap())
|
|
||||||
.add_directive(format!("sqlx[]={level}").parse().unwrap());
|
let mut modules = vec!["mockito"];
|
||||||
|
modules.extend(MODULE_WHITELIST.iter());
|
||||||
|
for module in modules {
|
||||||
|
filter = filter.add_directive(format!("{module}[]={level}").parse().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
let _ = tracing_subscriber::fmt().with_env_filter(filter).try_init();
|
let _ = tracing_subscriber::fmt().with_env_filter(filter).try_init();
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user