Compare commits

...

27 Commits

Author SHA1 Message Date
9fd3ae6563 feat: basic support rss 2025-06-24 06:37:19 +08:00
cde3361458 feat: add new test resource mikan classic episodes tiny.parquet 2025-06-23 03:07:58 +08:00
f055011b86 feat: add rss feeds and episode enclosure 2025-06-22 01:04:23 +08:00
16429a44b4 fix: fix missing 2025-06-21 03:25:22 +08:00
fe0b7e88e6 feat: classic episodes scraper 2025-06-21 03:21:58 +08:00
28dd9da6ac fix: fix typo 2025-06-20 02:05:23 +08:00
02c16a2972 feat: support optimize images 2025-06-20 01:56:34 +08:00
324427513c refactor: rewrite origin name extractor from regex to nom combinators 2025-06-19 02:37:56 +08:00
c12b9b360a feat: static server support etag 2025-06-18 04:42:33 +08:00
cc06142050 fix: fix middlewares config 2025-06-18 03:09:10 +08:00
6726cafff4 feat: support static server 2025-06-18 02:19:42 +08:00
35312ea1ff fix: fix issues 2025-06-17 02:23:02 +08:00
721eee9c88 fix: fix issues 2025-06-16 08:01:02 +08:00
421f9d0293 feat: task ui & custom filter mutation 2025-06-16 07:56:52 +08:00
7eb4e41708 feat: try views and seaography 2025-06-15 05:02:23 +08:00
a2254bbe80 fix: fix auto accessToken renew 2025-06-15 02:48:48 +08:00
1b5bdadf10 fix: fix tasks 2025-06-14 22:30:58 +08:00
882b29d7a1 feat: task ui basic done 2025-06-13 04:02:01 +08:00
c60f6f511e feat: remove turbo 2025-06-13 00:09:18 +08:00
07955286f1 feat: add tasks manage view 2025-06-12 03:32:18 +08:00
258eeddc74 refactor: refactor graphql 2025-06-12 00:15:26 +08:00
b09e9e6aaa fix: update webui graphql schema 2025-06-11 04:01:00 +08:00
0df371adb7 fix: fix subscription and mikan doppel 2025-05-11 03:41:02 +08:00
8144986a48 fix: fix subscriptions api 2025-05-10 02:31:58 +08:00
d2aab7369d fix: add sync subscription webui and check credential web ui 2025-06-08 00:36:59 +08:00
946d4e8c2c feat: add subscription detail & edit page 2025-06-07 02:50:14 +08:00
0b5f25a263 fix: fix credential 3rd error 2025-06-06 01:58:19 +08:00
254 changed files with 25492 additions and 7800 deletions

13
.vscode/settings.json vendored
View File

@@ -39,7 +39,14 @@
"username": "konobangu" "username": "konobangu"
} }
], ],
"rust-analyzer.cargo.extraArgs": [ "rust-analyzer.cargo.features": "all",
"--all-features" "rust-analyzer.testExplorer": true
] // https://github.com/rust-lang/rust/issues/141540
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
// "rust-analyzer.check.extraEnv": {
// "CARGO_TARGET_DIR": "target/rust-analyzer"
// },
// "rust-analyzer.cargo.extraEnv": {
// "CARGO_TARGET_DIR": "target/analyzer"
// }
} }

112
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,112 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "dev-all",
"dependsOn": [
"dev-webui",
"dev-recorder",
"dev-proxy",
"dev-codegen-wait",
"dev-deps",
],
"dependsOrder": "parallel",
"group": {
"kind": "build",
"isDefault": false,
},
"presentation": {
"group": "new-group",
"echo": true,
"reveal": "always",
"panel": "shared",
"clear": false
}
},
{
"label": "dev-webui",
"type": "shell",
"command": "just",
"args": [
"dev-webui"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "always",
"focus": true,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-deps",
"type": "shell",
"command": "just",
"args": [
"dev-deps"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-codegen-wait",
"type": "shell",
"command": "just",
"args": [
"dev-codegen-wait"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-recorder",
"type": "shell",
"command": "just",
"args": [
"dev-recorder"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-proxy",
"type": "shell",
"command": "just",
"args": [
"dev-proxy",
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
}
]
}

2674
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -8,11 +8,14 @@ members = [
"packages/fetch", "packages/fetch",
"packages/downloader", "packages/downloader",
"apps/recorder", "apps/recorder",
"apps/proxy",
] ]
resolver = "2" resolver = "2"
[profile.dev] [profile.dev]
debug = 0 debug = 0
# https://github.com/rust-lang/rust/issues/141540
incremental = false
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171) # [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
# codegen-backend = "cranelift" # codegen-backend = "cranelift"
@@ -22,8 +25,9 @@ util = { path = "./packages/util" }
util-derive = { path = "./packages/util-derive" } util-derive = { path = "./packages/util-derive" }
fetch = { path = "./packages/fetch" } fetch = { path = "./packages/fetch" }
downloader = { path = "./packages/downloader" } downloader = { path = "./packages/downloader" }
recorder = { path = "./apps/recorder" }
reqwest = { version = "0.12", features = [ reqwest = { version = "0.12.20", features = [
"charset", "charset",
"http2", "http2",
"json", "json",
@@ -57,11 +61,30 @@ regex = "1.11"
lazy_static = "1.5" lazy_static = "1.5"
axum = { version = "0.8.3", features = ["macros"] } axum = { version = "0.8.3", features = ["macros"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
axum-extra = "0.10" axum-extra = { version = "0.10", features = ["typed-header"] }
mockito = { version = "1.6.1" } mockito = { version = "1.6.1" }
convert_case = "0.8" convert_case = "0.8"
color-eyre = "0.6.5" color-eyre = "0.6.5"
inquire = "0.7.5" inquire = "0.7.5"
image = "0.25.6"
uuid = { version = "1.6.0", features = ["v4"] }
maplit = "1.0.2"
once_cell = "1.20.2"
rand = "0.9.1"
rust_decimal = "1.37.2"
base64 = "0.22.1"
nom = "8.0.0"
percent-encoding = "2.3.1"
num-traits = "0.2.19"
http = "1.2.0"
async-stream = "0.3.6"
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.40"
ipnetwork = "0.21.1"
typed-builder = "0.21.0"
nanoid = "0.4.0"
webp = "0.3.0"
[patch.crates-io] [patch.crates-io]
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "10ba248" } seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" }

View File

@@ -6,13 +6,14 @@
"build": "email build", "build": "email build",
"dev": "email dev --port 5003", "dev": "email dev --port 5003",
"export": "email export", "export": "email export",
"clean": "git clean -xdf .cache .turbo dist node_modules", "clean": "git clean -xdf .cache dist node_modules",
"typecheck": "tsc --noEmit --emitDeclarationOnly false" "typecheck": "tsc --noEmit --emitDeclarationOnly false"
}, },
"dependencies": { "dependencies": {
"@react-email/components": "0.0.31", "@react-email/components": "^0.0.42",
"react": "^19.0.0", "react": "^19.0.0",
"react-email": "3.0.4" "react-email": "^4.0.16",
"@konobangu/email": "workspace:*"
}, },
"devDependencies": { "devDependencies": {
"@types/react": "19.0.1" "@types/react": "19.0.1"

View File

@@ -2,8 +2,12 @@
"extends": "../../tsconfig.base.json", "extends": "../../tsconfig.base.json",
"compilerOptions": { "compilerOptions": {
"composite": true, "composite": true,
"jsx": "react-jsx" "jsx": "react-jsx",
"jsxImportSource": "react",
"module": "ESNext",
"moduleResolution": "bundler"
}, },
"references": [{ "path": "../../packages/email" }],
"include": ["**/*.ts", "**/*.tsx"], "include": ["**/*.ts", "**/*.tsx"],
"exclude": ["node_modules"] "exclude": ["node_modules"]
} }

View File

@@ -0,0 +1 @@
^https://mikanani.me/*** http://127.0.0.1:5005/$1 excludeFilter://^**/***.svg excludeFilter://^**/***.css excludeFilter://^**/***.js

View File

@@ -1 +1 @@
{"filesOrder":["konobangu"],"selectedList":["konobangu"],"disabledDefalutRules":true,"defalutRules":""} {"filesOrder":["konobangu","mikan_doppel"],"selectedList":["konobangu","mikan_doppel"],"disabledDefalutRules":true,"defalutRules":""}

19
apps/proxy/Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[package]
name = "proxy"
version = "0.1.0"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "proxy"
path = "src/lib.rs"
[[bin]]
name = "mikan_doppel"
path = "src/bin/mikan_doppel.rs"
[dependencies]
recorder = { workspace = true }
tokio = { workspace = true }
tracing-subscriber = { workspace = true }
tracing = { workspace = true }

View File

@@ -3,13 +3,13 @@
"version": "0.1.0", "version": "0.1.0",
"private": true, "private": true,
"scripts": { "scripts": {
"start": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .", "whistle": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
"dev": "pnpm run start" "mikan_doppel": "cargo run -p proxy --bin mikan_doppel",
"dev": "npm-run-all -p mikan_doppel whistle"
}, },
"keywords": [], "keywords": [],
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"cross-env": "^7.0.3", "whistle": "^2.9.99"
"whistle": "^2.9.93"
} }
} }

View File

@@ -0,0 +1,22 @@
use std::time::Duration;
use recorder::{errors::RecorderResult, test_utils::mikan::MikanMockServer};
use tracing::Level;
#[allow(unused_variables)]
#[tokio::main]
async fn main() -> RecorderResult<()> {
tracing_subscriber::fmt()
.with_max_level(Level::DEBUG)
.init();
let mut mikan_server = MikanMockServer::new_with_port(5005).await.unwrap();
let resources_mock = mikan_server.mock_resources_with_doppel();
let login_mock = mikan_server.mock_get_login_page();
loop {
tokio::time::sleep(Duration::from_secs(1)).await;
}
}

View File

@@ -11,3 +11,7 @@ BASIC_PASSWORD = "konobangu"
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu" # OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# OIDC_EXTRA_CLAIM_KEY = "" # OIDC_EXTRA_CLAIM_KEY = ""
# OIDC_EXTRA_CLAIM_VALUE = "" # OIDC_EXTRA_CLAIM_VALUE = ""
# MIKAN_PROXY = ""
# MIKAN_PROXY_AUTH_HEADER = ""
# MIKAN_NO_PROXY = ""
# MIKAN_PROXY_ACCEPT_INVALID_CERTS = "true"

17
apps/recorder/.env.dev Normal file
View File

@@ -0,0 +1,17 @@
HOST="konobangu.com"
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
STORAGE_DATA_DIR = "./data"
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# OIDC_EXTRA_CLAIM_KEY = ""
# OIDC_EXTRA_CLAIM_VALUE = ""
MIKAN_PROXY = "http://127.0.0.1:8899"
# MIKAN_PROXY_AUTH_HEADER = ""
# MIKAN_NO_PROXY = ""
MIKAN_PROXY_ACCEPT_INVALID_CERTS = true

View File

@@ -27,3 +27,5 @@ node_modules
dist/ dist/
temp/* temp/*
!temp/.gitkeep !temp/.gitkeep
tests/resources/mikan/classic_episodes/*/*
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet

View File

@@ -2,8 +2,20 @@
name = "recorder" name = "recorder"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = ["jxl"]
playground = ["dep:inquire", "dep:color-eyre", "dep:polars"]
testcontainers = [
"dep:testcontainers",
"dep:testcontainers-modules",
"dep:testcontainers-ext",
"downloader/testcontainers",
"testcontainers-modules/postgres",
]
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
[lib] [lib]
name = "recorder" name = "recorder"
path = "src/lib.rs" path = "src/lib.rs"
@@ -13,16 +25,25 @@ name = "recorder_cli"
path = "src/bin/main.rs" path = "src/bin/main.rs"
required-features = [] required-features = []
[features] [[example]]
default = [] name = "mikan_collect_classic_eps"
playground = ["dep:mockito", "dep:inquire", "dep:color-eyre"] path = "examples/mikan_collect_classic_eps.rs"
testcontainers = [ required-features = ["playground"]
"dep:testcontainers",
"dep:testcontainers-modules", [[example]]
"dep:testcontainers-ext", name = "mikan_doppel_season_subscription"
"downloader/testcontainers", path = "examples/mikan_doppel_season_subscription.rs"
"testcontainers-modules/postgres", required-features = ["playground"]
]
[[example]]
name = "mikan_doppel_subscriber_subscription"
path = "examples/mikan_doppel_subscriber_subscription.rs"
required-features = ["playground"]
[[example]]
name = "playground"
path = "examples/playground.rs"
required-features = ["playground"]
[dependencies] [dependencies]
downloader = { workspace = true } downloader = { workspace = true }
@@ -54,7 +75,29 @@ serde_with = { workspace = true }
moka = { workspace = true } moka = { workspace = true }
chrono = { workspace = true } chrono = { workspace = true }
tracing-subscriber = { workspace = true } tracing-subscriber = { workspace = true }
mockito = { workspace = true, optional = true } mockito = { workspace = true }
color-eyre = { workspace = true, optional = true }
inquire = { workspace = true, optional = true }
convert_case = { workspace = true }
image = { workspace = true }
uuid = { workspace = true }
maplit = { workspace = true }
once_cell = { workspace = true }
rand = { workspace = true }
rust_decimal = { workspace = true }
base64 = { workspace = true }
nom = { workspace = true }
percent-encoding = { workspace = true }
num-traits = { workspace = true }
http = { workspace = true }
async-stream = { workspace = true }
serde_variant = { workspace = true }
tracing-appender = { workspace = true }
clap = { workspace = true }
ipnetwork = { workspace = true }
typed-builder = { workspace = true }
nanoid = { workspace = true }
webp = { workspace = true }
sea-orm = { version = "1.1", features = [ sea-orm = { version = "1.1", features = [
"sqlx-sqlite", "sqlx-sqlite",
@@ -64,19 +107,13 @@ sea-orm = { version = "1.1", features = [
"debug-print", "debug-print",
] } ] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] } figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
uuid = { version = "1.6.0", features = ["v4"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] } sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
rss = "2" rss = "2"
fancy-regex = "0.14" fancy-regex = "0.14"
maplit = "1.0.2"
lightningcss = "1.0.0-alpha.66" lightningcss = "1.0.0-alpha.66"
html-escape = "0.2.13" html-escape = "0.2.13"
opendal = { version = "0.53", features = ["default", "services-fs"] } opendal = { version = "0.53", features = ["default", "services-fs"] }
zune-image = "0.4.15" scraper = "0.23.1"
once_cell = "1.20.2"
scraper = "0.23"
log = "0.4"
async-graphql = { version = "7", features = ["dynamic-schema"] } async-graphql = { version = "7", features = ["dynamic-schema"] }
async-graphql-axum = "7" async-graphql-axum = "7"
seaography = { version = "1.1", features = [ seaography = { version = "1.1", features = [
@@ -87,8 +124,8 @@ seaography = { version = "1.1", features = [
"with-decimal", "with-decimal",
"with-bigdecimal", "with-bigdecimal",
"with-postgres-array", "with-postgres-array",
"with-json-as-scalar",
] } ] }
base64 = "0.22.1"
tower = "0.5.2" tower = "0.5.2"
tower-http = { version = "0.6", features = [ tower-http = { version = "0.6", features = [
"trace", "trace",
@@ -103,30 +140,30 @@ tower-http = { version = "0.6", features = [
tera = "1.20.0" tera = "1.20.0"
openidconnect = { version = "4" } openidconnect = { version = "4" }
dotenvy = "0.15.7" dotenvy = "0.15.7"
http = "1.2.0" jpegxl-rs = { version = "0.11.2", optional = true }
async-stream = "0.3.6" jpegxl-sys = { version = "0.11.2", optional = true }
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.31"
ipnetwork = "0.21.1"
typed-builder = "0.21.0"
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] } apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
apalis-sql = { version = "0.7", features = ["postgres"] } apalis-sql = { version = "0.7", features = ["postgres"] }
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] } cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
rand = "0.9.1"
rust_decimal = "1.37.1"
reqwest_cookie_store = "0.8.0" reqwest_cookie_store = "0.8.0"
nanoid = "0.4.0"
jwtk = "0.4.0" jwtk = "0.4.0"
color-eyre = { workspace = true, optional = true } mime_guess = "2.0.5"
inquire = { workspace = true, optional = true } icu_properties = "2.0.1"
percent-encoding = "2.3.1" icu = "2.0.0"
tracing-tree = "0.4.0"
num_cpus = "1.17.0"
headers-accept = "0.1.4"
polars = { version = "0.49.1", features = [
"parquet",
"lazy",
"diagonal_concat",
], optional = true }
[dev-dependencies] [dev-dependencies]
inquire = { workspace = true }
color-eyre = { workspace = true }
serial_test = "3" serial_test = "3"
insta = { version = "1", features = ["redactions", "toml", "filters"] } insta = { version = "1", features = ["redactions", "toml", "filters"] }
rstest = "0.25" rstest = "0.25"
ctor = "0.4.0" ctor = "0.4.0"
mockito = { workspace = true }
inquire = { workspace = true }
color-eyre = { workspace = true }

View File

@@ -0,0 +1,584 @@
use std::collections::HashSet;
use chrono::{DateTime, Duration, FixedOffset, NaiveDate, NaiveTime, TimeZone, Utc};
use fetch::{HttpClientConfig, fetch_html};
use itertools::Itertools;
use lazy_static::lazy_static;
use nom::{
IResult, Parser,
branch::alt,
bytes::complete::{tag, take, take_till1},
character::complete::space1,
combinator::map,
};
use recorder::{
errors::{RecorderError, RecorderResult},
extract::{
html::extract_inner_text_from_element_ref,
mikan::{MikanClient, MikanConfig, MikanEpisodeHash, MikanFansubHash},
},
};
use regex::Regex;
use scraper::{ElementRef, Html, Selector};
use snafu::FromString;
use url::Url;
lazy_static! {
static ref TEST_FOLDER: std::path::PathBuf =
if cfg!(any(test, debug_assertions, feature = "playground")) {
std::path::PathBuf::from(format!(
"{}/tests/resources/mikan/classic_episodes",
env!("CARGO_MANIFEST_DIR")
))
} else {
std::path::PathBuf::from("tests/resources/mikan/classic_episodes")
};
}
lazy_static! {
static ref TOTAL_PAGE_REGEX: Regex =
Regex::new(r#"\$\(\'\.classic-view-pagination2\'\)\.bootpag\(\{\s*total:\s*(\d+)"#)
.unwrap();
}
pub struct MikanClassicEpisodeTableRow {
pub id: i32,
pub publish_at: DateTime<Utc>,
pub mikan_fansub_id: Option<String>,
pub fansub_name: Option<String>,
pub mikan_episode_id: String,
pub original_name: String,
pub magnet_link: Option<String>,
pub file_size: Option<String>,
pub torrent_link: Option<String>,
}
impl MikanClassicEpisodeTableRow {
fn timezone() -> FixedOffset {
FixedOffset::east_opt(8 * 3600).unwrap()
}
fn fixed_date_parser(input: &str) -> IResult<&str, NaiveDate> {
alt((
map(tag("今天"), move |_| {
Utc::now().with_timezone(&Self::timezone()).date_naive()
}),
map(tag("昨天"), move |_| {
Utc::now().with_timezone(&Self::timezone()).date_naive() - Duration::days(1)
}),
))
.parse(input)
}
fn formatted_date_parser(input: &str) -> IResult<&str, NaiveDate> {
let (remain, date_str) = take_till1(|c: char| c.is_whitespace()).parse(input)?;
let date = NaiveDate::parse_from_str(date_str, "%Y/%m/%d").map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
})?;
Ok((remain, date))
}
fn date_parser(input: &str) -> IResult<&str, NaiveDate> {
alt((Self::fixed_date_parser, Self::formatted_date_parser)).parse(input)
}
fn time_parser(input: &str) -> IResult<&str, NaiveTime> {
let (remain, time_str) = take(5usize).parse(input)?;
let time = NaiveTime::parse_from_str(time_str, "%H:%M").map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
})?;
Ok((remain, time))
}
fn extract_publish_at(text: &str) -> Option<DateTime<Utc>> {
let (_, (date, _, time)) = (Self::date_parser, space1, Self::time_parser)
.parse(text)
.ok()?;
let local_dt = Self::timezone()
.from_local_datetime(&date.and_time(time))
.single()?;
Some(local_dt.with_timezone(&Utc))
}
pub fn from_element_ref(
row: ElementRef<'_>,
rev_id: i32,
idx: i32,
mikan_base_url: &Url,
) -> RecorderResult<Self> {
let publish_at_selector = &Selector::parse("td:nth-of-type(1)").unwrap();
let fansub_selector = &Selector::parse("td:nth-of-type(2) > a").unwrap();
let original_name_selector =
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(1)").unwrap();
let magnet_link_selector =
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(2)").unwrap();
let file_size_selector = &Selector::parse("td:nth-of-type(4)").unwrap();
let torrent_link_selector = &Selector::parse("td:nth-of-type(5) > a").unwrap();
let publish_at = row
.select(publish_at_selector)
.next()
.map(extract_inner_text_from_element_ref)
.and_then(|e| Self::extract_publish_at(&e));
let (mikan_fansub_hash, fansub_name) = row
.select(fansub_selector)
.next()
.and_then(|e| {
e.attr("href")
.and_then(|s| mikan_base_url.join(s).ok())
.and_then(|u| MikanFansubHash::from_homepage_url(&u))
.map(|h| (h, extract_inner_text_from_element_ref(e)))
})
.unzip();
let (mikan_episode_hash, original_name) = row
.select(original_name_selector)
.next()
.and_then(|el| {
el.attr("href")
.and_then(|s| mikan_base_url.join(s).ok())
.and_then(|u| MikanEpisodeHash::from_homepage_url(&u))
.map(|h| (h, extract_inner_text_from_element_ref(el)))
})
.unzip();
let magnet_link = row
.select(magnet_link_selector)
.next()
.and_then(|el| el.attr("data-clipboard-text"));
let file_size = row
.select(file_size_selector)
.next()
.map(extract_inner_text_from_element_ref);
let torrent_link = row
.select(torrent_link_selector)
.next()
.and_then(|el| el.attr("href"));
if let (Some(mikan_episode_hash), Some(original_name), Some(publish_at)) = (
mikan_episode_hash.as_ref(),
original_name.as_ref(),
publish_at.as_ref(),
) {
Ok(Self {
id: rev_id * 1000 + idx,
publish_at: *publish_at,
mikan_fansub_id: mikan_fansub_hash.map(|h| h.mikan_fansub_id.clone()),
fansub_name,
mikan_episode_id: mikan_episode_hash.mikan_episode_id.clone(),
original_name: original_name.clone(),
magnet_link: magnet_link.map(|s| s.to_string()),
file_size: file_size.map(|s| s.to_string()),
torrent_link: torrent_link.map(|s| s.to_string()),
})
} else {
let mut missing_fields = vec![];
if mikan_episode_hash.is_none() {
missing_fields.push("mikan_episode_id");
}
if original_name.is_none() {
missing_fields.push("original_name");
}
if publish_at.is_none() {
missing_fields.push("publish_at");
}
Err(RecorderError::without_source(format!(
"Failed to parse episode table row, missing fields: {missing_fields:?}, row \
index: {idx}"
)))
}
}
}
pub struct MikanClassicEpisodeTablePage {
pub page: i32,
pub total: i32,
pub html: String,
pub rows: Vec<MikanClassicEpisodeTableRow>,
}
impl MikanClassicEpisodeTablePage {
pub fn from_html(
html: String,
mikan_base_url: &Url,
page: i32,
updated_info: Option<(i32, i32)>,
) -> RecorderResult<Self> {
let tr_selector = &Selector::parse("tbody tr").unwrap();
let doc = Html::parse_document(&html);
if let Some(mut total) = TOTAL_PAGE_REGEX
.captures(&html)
.and_then(|c| c.get(1))
.and_then(|s| s.as_str().parse::<i32>().ok())
{
if let Some((_, update_total)) = updated_info {
total = update_total;
}
let rev_id = total - page;
let rows = doc
.select(tr_selector)
.rev()
.enumerate()
.map(|(idx, tr)| {
MikanClassicEpisodeTableRow::from_element_ref(
tr,
rev_id,
idx as i32,
mikan_base_url,
)
})
.collect::<RecorderResult<Vec<_>>>()?;
Ok(Self {
page,
total,
html,
rows,
})
} else {
Err(RecorderError::without_source(
"Failed to parse pagination meta and rows".into(),
))
}
}
pub fn save_to_files(&self) -> RecorderResult<()> {
use polars::prelude::*;
let rev_id = self.total - self.page;
let parquet_path = TEST_FOLDER.join(format!("parquet/rev_{rev_id}.parquet"));
let csv_path = TEST_FOLDER.join(format!("csv/rev_{rev_id}.csv"));
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
std::fs::write(html_path, self.html.clone())?;
let mut id_vec = Vec::new();
let mut publish_at_vec = Vec::new();
let mut mikan_fansub_id_vec = Vec::new();
let mut fansub_name_vec = Vec::new();
let mut mikan_episode_id_vec = Vec::new();
let mut original_name_vec = Vec::new();
let mut magnet_link_vec = Vec::new();
let mut file_size_vec = Vec::new();
let mut torrent_link_vec = Vec::new();
for row in &self.rows {
id_vec.push(row.id);
publish_at_vec.push(row.publish_at.to_rfc3339());
mikan_fansub_id_vec.push(row.mikan_fansub_id.clone());
fansub_name_vec.push(row.fansub_name.clone());
mikan_episode_id_vec.push(row.mikan_episode_id.clone());
original_name_vec.push(row.original_name.clone());
magnet_link_vec.push(row.magnet_link.clone());
file_size_vec.push(row.file_size.clone());
torrent_link_vec.push(row.torrent_link.clone());
}
let df = df! [
"id" => id_vec,
"publish_at_timestamp" => publish_at_vec,
"mikan_fansub_id" => mikan_fansub_id_vec,
"fansub_name" => fansub_name_vec,
"mikan_episode_id" => mikan_episode_id_vec,
"original_name" => original_name_vec,
"magnet_link" => magnet_link_vec,
"file_size" => file_size_vec,
"torrent_link" => torrent_link_vec,
]
.map_err(|e| {
let message = format!("Failed to create DataFrame: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let mut parquet_file = std::fs::File::create(&parquet_path)?;
ParquetWriter::new(&mut parquet_file)
.finish(&mut df.clone())
.map_err(|e| {
let message = format!("Failed to write parquet file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let mut csv_file = std::fs::File::create(&csv_path)?;
CsvWriter::new(&mut csv_file)
.include_header(true)
.with_quote_style(QuoteStyle::Always)
.finish(&mut df.clone())
.map_err(|e| {
let message = format!("Failed to write csv file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
println!(
"[{}/{}] Saved {} rows to rev_{}.{{parquet,html,csv}}",
self.page,
self.total,
self.rows.len(),
rev_id
);
Ok(())
}
pub fn waiting_rev_ids(total: i32) -> RecorderResult<Vec<i32>> {
let dir = TEST_FOLDER.join("csv");
let files = std::fs::read_dir(dir)?;
let rev_ids = files
.filter_map(|f| f.ok())
.filter_map(|f| {
f.path().file_stem().and_then(|s| {
s.to_str().and_then(|s| {
if s.starts_with("rev_") {
s.replace("rev_", "").parse::<i32>().ok()
} else {
None
}
})
})
})
.collect::<HashSet<_>>();
Ok((0..total)
.filter(|rev_id| !rev_ids.contains(rev_id))
.collect::<Vec<_>>())
}
}
async fn scrape_mikan_classic_episode_table_page(
mikan_client: &MikanClient,
page: i32,
updated_info: Option<(i32, i32)>,
) -> RecorderResult<MikanClassicEpisodeTablePage> {
let mikan_base_url = mikan_client.base_url();
let url = mikan_base_url.join(&format!("/Home/Classic/{page}"))?;
if let Some((rev_id, update_total)) = updated_info.as_ref() {
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
if html_path.exists() {
let html = std::fs::read_to_string(&html_path)?;
println!("[{page}/{update_total}] html exists, skipping fetch");
return MikanClassicEpisodeTablePage::from_html(
html,
mikan_base_url,
page,
updated_info,
);
}
}
let total = if let Some((_, update_total)) = updated_info.as_ref() {
update_total.to_string()
} else {
"Unknown".to_string()
};
println!("[{page}/{total}] fetching html...");
let html = fetch_html(mikan_client, url).await?;
println!("[{page}/{total}] fetched html done");
std::fs::write(TEST_FOLDER.join("html/temp.html"), html.clone())?;
MikanClassicEpisodeTablePage::from_html(html, mikan_base_url, page, updated_info)
}
async fn scrape_mikan_classic_episode_table_page_from_rev_id(
mikan_client: &MikanClient,
total: i32,
rev_idx: i32,
) -> RecorderResult<MikanClassicEpisodeTablePage> {
let page = total - rev_idx;
scrape_mikan_classic_episode_table_page(mikan_client, page, Some((rev_idx, total))).await
}
async fn merge_mikan_classic_episodes_and_strip_columns() -> RecorderResult<()> {
use polars::prelude::*;
let dir = TEST_FOLDER.join("parquet");
let files = std::fs::read_dir(dir)?;
let parquet_paths = files
.filter_map(|f| f.ok())
.filter_map(|f| {
let path = f.path();
if let Some(ext) = path.extension()
&& ext == "parquet"
&& path
.file_stem()
.is_some_and(|f| f.to_string_lossy().starts_with("rev_"))
{
Some(path)
} else {
None
}
})
.collect::<Vec<_>>();
if parquet_paths.is_empty() {
return Err(RecorderError::without_source(
"No parquet files found to merge".into(),
));
}
println!("Found {} parquet files to merge", parquet_paths.len());
// 读取并合并所有 parquet 文件
let mut all_dfs = Vec::new();
for path in &parquet_paths {
println!("Reading {path:?}");
let file = std::fs::File::open(path)?;
let df = ParquetReader::new(file).finish().map_err(|e| {
let message = format!("Failed to read parquet file {path:?}: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
all_dfs.push(df);
}
let lazy_frames: Vec<LazyFrame> = all_dfs.into_iter().map(|df| df.lazy()).collect();
let merged_df = concat_lf_diagonal(&lazy_frames, UnionArgs::default())
.map_err(|e| {
let message = format!("Failed to concat DataFrames: {e}");
RecorderError::with_source(Box::new(e), message)
})?
.sort(
["publish_at_timestamp"],
SortMultipleOptions::default().with_order_descending(true),
)
.unique(
Some(vec![
"mikan_fansub_id".to_string(),
"mikan_episode_id".to_string(),
]),
UniqueKeepStrategy::First,
)
.collect()
.map_err(|e| {
let message = format!("Failed to collect lazy DataFrame: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
fn select_columns_and_write(
merged_df: DataFrame,
name: &str,
columns: &[&str],
) -> RecorderResult<()> {
let result_df = merged_df
.lazy()
.sort(["publish_at_timestamp"], SortMultipleOptions::default())
.select(columns.iter().map(|c| col(*c)).collect_vec())
.collect()
.map_err(|e| {
let message = format!("Failed to sort and select columns: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let output_path = TEST_FOLDER.join(format!("parquet/{name}.parquet"));
let mut output_file = std::fs::File::create(&output_path)?;
ParquetWriter::new(&mut output_file)
.set_parallel(true)
.with_compression(ParquetCompression::Zstd(Some(
ZstdLevel::try_new(22).unwrap(),
)))
.finish(&mut result_df.clone())
.map_err(|e| {
let message = format!("Failed to write merged parquet file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
println!("Merged {} rows into {output_path:?}", result_df.height());
Ok(())
}
select_columns_and_write(merged_df.clone(), "tiny", &["fansub_name", "original_name"])?;
// select_columns_and_write(
// merged_df.clone(),
// "lite",
// &[
// "mikan_fansub_id",
// "fansub_name",
// "mikan_episode_id",
// "original_name",
// ],
// )?;
// select_columns_and_write(
// merged_df,
// "full",
// &[
// "id",
// "publish_at_timestamp",
// "mikan_fansub_id",
// "fansub_name",
// "mikan_episode_id",
// "original_name",
// "magnet_link",
// "file_size",
// "torrent_link",
// ],
// )?;
Ok(())
}
#[tokio::main]
async fn main() -> RecorderResult<()> {
std::fs::create_dir_all(TEST_FOLDER.join("html"))?;
std::fs::create_dir_all(TEST_FOLDER.join("parquet"))?;
std::fs::create_dir_all(TEST_FOLDER.join("csv"))?;
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
http_client: HttpClientConfig {
exponential_backoff_max_retries: Some(3),
leaky_bucket_max_tokens: Some(2),
leaky_bucket_initial_tokens: Some(1),
leaky_bucket_refill_tokens: Some(1),
leaky_bucket_refill_interval: Some(std::time::Duration::from_millis(1000)),
user_agent: Some(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
.to_string(),
),
..Default::default()
},
base_url: Url::parse("https://mikanani.me")?,
})
.await?;
let first_page_and_pagination_info =
scrape_mikan_classic_episode_table_page(&mikan_scrape_client, 1, None).await?;
let total_page = first_page_and_pagination_info.total;
first_page_and_pagination_info.save_to_files()?;
let next_rev_ids = MikanClassicEpisodeTablePage::waiting_rev_ids(total_page)?;
for todo_rev_id in next_rev_ids {
let page = scrape_mikan_classic_episode_table_page_from_rev_id(
&mikan_scrape_client,
total_page,
todo_rev_id,
)
.await?;
page.save_to_files()?;
}
// 合并所有 parquet 文件
println!("\nMerging all parquet files...");
merge_mikan_classic_episodes_and_strip_columns().await?;
println!("Merge completed!");
Ok(())
}

View File

@@ -6,7 +6,7 @@ use inquire::{Password, Text, validator::Validation};
use recorder::{ use recorder::{
crypto::UserPassCredential, crypto::UserPassCredential,
extract::mikan::{ extract::mikan::{
MikanClient, MikanConfig, MikanRssItem, build_mikan_bangumi_expand_subscribed_url, MikanClient, MikanConfig, MikanRssEpisodeItem, build_mikan_bangumi_expand_subscribed_url,
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment, extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
extract_mikan_bangumi_meta_from_expand_subscribed_fragment, extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
}, },
@@ -65,7 +65,7 @@ async fn main() -> Result<()> {
.prompt()?; .prompt()?;
let mikan_scrape_client = mikan_scrape_client let mikan_scrape_client = mikan_scrape_client
.fork_with_credential(UserPassCredential { .fork_with_userpass_credential(UserPassCredential {
username, username,
password, password,
user_agent: None, user_agent: None,
@@ -193,12 +193,12 @@ async fn main() -> Result<()> {
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items; let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items;
rss_items rss_items
.into_iter() .into_iter()
.map(MikanRssItem::try_from) .map(MikanRssEpisodeItem::try_from)
.collect::<Result<Vec<_>, _>>() .collect::<Result<Vec<_>, _>>()
}?; }?;
for rss_item in rss_items { for rss_item in rss_items {
{ {
let episode_homepage_url = rss_item.homepage; let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
let episode_homepage_doppel_path = let episode_homepage_doppel_path =
MikanDoppelPath::new(episode_homepage_url.clone()); MikanDoppelPath::new(episode_homepage_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode..."); tracing::info!(title = rss_item.title, "Scraping episode...");
@@ -212,7 +212,7 @@ async fn main() -> Result<()> {
}; };
} }
{ {
let episode_torrent_url = rss_item.url; let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone()); let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent..."); tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() { if !episode_torrent_doppel_path.exists_any() {

View File

@@ -4,7 +4,7 @@ use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image,
use recorder::{ use recorder::{
errors::RecorderResult, errors::RecorderResult,
extract::mikan::{ extract::mikan::{
MikanClient, MikanConfig, MikanRssItem, MikanClient, MikanConfig, MikanRssEpisodeItem,
extract_mikan_episode_meta_from_episode_homepage_html, extract_mikan_episode_meta_from_episode_homepage_html,
}, },
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath}, test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
@@ -41,17 +41,17 @@ async fn main() -> RecorderResult<()> {
let mikan_base_url = mikan_scrape_client.base_url().clone(); let mikan_base_url = mikan_scrape_client.base_url().clone();
tracing::info!("Scraping subscriber subscription..."); tracing::info!("Scraping subscriber subscription...");
let subscriber_subscription = let subscriber_subscription =
fs::read("tests/resources/mikan/MyBangumi-2025-spring.rss").await?; fs::read("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
let channel = rss::Channel::read_from(&subscriber_subscription[..])?; let channel = rss::Channel::read_from(&subscriber_subscription[..])?;
let rss_items: Vec<MikanRssItem> = channel let rss_items: Vec<MikanRssEpisodeItem> = channel
.items .items
.into_iter() .into_iter()
.map(MikanRssItem::try_from) .map(MikanRssEpisodeItem::try_from)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items { for rss_item in rss_items {
let episode_homepage_meta = { let episode_homepage_meta = {
tracing::info!(title = rss_item.title, "Scraping episode homepage..."); tracing::info!(title = rss_item.title, "Scraping episode homepage...");
let episode_homepage_url = rss_item.homepage; let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
let episode_homepage_doppel_path = MikanDoppelPath::new(episode_homepage_url.clone()); let episode_homepage_doppel_path = MikanDoppelPath::new(episode_homepage_url.clone());
let episode_homepage_data = if !episode_homepage_doppel_path.exists_any() { let episode_homepage_data = if !episode_homepage_doppel_path.exists_any() {
let episode_homepage_data = let episode_homepage_data =
@@ -72,7 +72,7 @@ async fn main() -> RecorderResult<()> {
}?; }?;
{ {
let episode_torrent_url = rss_item.url; let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone()); let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent..."); tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() { if !episode_torrent_doppel_path.exists_any() {
@@ -134,6 +134,81 @@ async fn main() -> RecorderResult<()> {
tracing::info!(title = rss_item.title, "Bangumi homepage already exists"); tracing::info!(title = rss_item.title, "Bangumi homepage already exists");
}; };
} }
{
let bangumi_rss_url = episode_homepage_meta
.bangumi_hash()
.build_rss_url(mikan_base_url.clone());
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
tracing::info!(title = rss_item.title, "Scraping bangumi rss...");
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
tracing::info!(title = rss_item.title, "Bangumi rss saved");
bangumi_rss_data
} else {
tracing::info!(title = rss_item.title, "Bangumi rss already exists");
String::from_utf8(bangumi_rss_doppel_path.read()?)?
};
let channel = rss::Channel::read_from(bangumi_rss_data.as_bytes())?;
let rss_items: Vec<MikanRssEpisodeItem> = channel
.items
.into_iter()
.map(MikanRssEpisodeItem::try_from)
.collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items {
{
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
let episode_homepage_doppel_path =
MikanDoppelPath::new(episode_homepage_url.clone());
if !episode_homepage_doppel_path.exists_any() {
let episode_homepage_data =
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
episode_homepage_doppel_path.write(&episode_homepage_data)?;
tracing::info!(title = rss_item.title, "Episode homepage saved");
} else {
tracing::info!(title = rss_item.title, "Episode homepage already exists");
};
};
{
let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path =
MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() {
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
Ok(episode_torrent_data) => {
episode_torrent_doppel_path.write(&episode_torrent_data)?;
tracing::info!(title = rss_item.title, "Episode torrent saved");
}
Err(e) => {
if let FetchError::ReqwestError { source } = &e
&& source.status().is_some_and(|status| {
status == reqwest::StatusCode::NOT_FOUND
})
{
tracing::warn!(
title = rss_item.title,
"Episode torrent not found, maybe deleted since new \
version"
);
episode_torrent_doppel_path
.write_meta(MikanDoppelMeta { status: 404 })?;
} else {
Err(e)?;
}
}
}
tracing::info!(title = rss_item.title, "Episode torrent saved");
} else {
tracing::info!(title = rss_item.title, "Episode torrent already exists");
}
}
}
}
} }
tracing::info!("Scraping subscriber subscription done"); tracing::info!("Scraping subscriber subscription done");
Ok(()) Ok(())

View File

@@ -26,25 +26,25 @@ host = '{{ get_env(name="HOST", default="localhost") }}'
enable = true enable = true
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details. # Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
[server.middleware.request_id] [server.middlewares.request_id]
enable = true enable = true
[server.middleware.logger] [server.middlewares.logger]
enable = true enable = true
# when your code is panicked, the request still returns 500 status code. # when your code is panicked, the request still returns 500 status code.
[server.middleware.catch_panic] [server.middlewares.catch_panic]
enable = true enable = true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned. # Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
[server.middleware.timeout_request] [server.middlewares.timeout_request]
enable = false enable = false
# Duration time in milliseconds. # Duration time in milliseconds.
timeout = 5000 timeout = 5000
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header # Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins: # allow_origins:
# - https://loco.rs # - https://konobangu.com
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header # Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers: # allow_headers:
# - Content-Type # - Content-Type
@@ -53,7 +53,10 @@ timeout = 5000
# - POST # - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds # Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600 # max_age: 3600
[server.middleware.cors] [server.middlewares.cors]
enable = true
[server.middlewares.compression]
enable = true enable = true
# Database Configuration # Database Configuration
@@ -86,6 +89,14 @@ leaky_bucket_initial_tokens = 1
leaky_bucket_refill_tokens = 1 leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500 leaky_bucket_refill_interval = 500
[mikan.http_client.proxy]
server = '{{ get_env(name="MIKAN_PROXY", default = "") }}'
auth_header = '{{ get_env(name="MIKAN_PROXY_AUTH_HEADER", default = "") }}'
no_proxy = '{{ get_env(name="MIKAN_NO_PROXY", default = "") }}'
accept_invalid_certs = '{{ get_env(name="MIKAN_PROXY_ACCEPT_INVALID_CERTS", default = "false") }}'
[auth] [auth]
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}' auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}' basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'

View File

@@ -21,6 +21,9 @@ pub struct MainCliArgs {
/// Explicit environment /// Explicit environment
#[arg(short, long)] #[arg(short, long)]
environment: Option<Environment>, environment: Option<Environment>,
#[arg(long)]
graceful_shutdown: Option<bool>,
} }
pub struct AppBuilder { pub struct AppBuilder {
@@ -28,6 +31,7 @@ pub struct AppBuilder {
config_file: Option<String>, config_file: Option<String>,
working_dir: String, working_dir: String,
environment: Environment, environment: Environment,
pub graceful_shutdown: bool,
} }
impl AppBuilder { impl AppBuilder {
@@ -61,7 +65,8 @@ impl AppBuilder {
builder = builder builder = builder
.config_file(args.config_file) .config_file(args.config_file)
.dotenv_file(args.dotenv_file) .dotenv_file(args.dotenv_file)
.environment(environment); .environment(environment)
.graceful_shutdown(args.graceful_shutdown.unwrap_or(true));
Ok(builder) Ok(builder)
} }
@@ -118,6 +123,12 @@ impl AppBuilder {
ret ret
} }
pub fn graceful_shutdown(self, graceful_shutdown: bool) -> Self {
let mut ret = self;
ret.graceful_shutdown = graceful_shutdown;
ret
}
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self { pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
let mut ret = self; let mut ret = self;
ret.dotenv_file = dotenv_file; ret.dotenv_file = dotenv_file;
@@ -141,6 +152,7 @@ impl Default for AppBuilder {
dotenv_file: None, dotenv_file: None,
config_file: None, config_file: None,
working_dir: String::from("."), working_dir: String::from("."),
graceful_shutdown: true,
} }
} }
} }

View File

@@ -11,6 +11,11 @@ leaky_bucket_initial_tokens = 0
leaky_bucket_refill_tokens = 1 leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500 leaky_bucket_refill_interval = 500
[mikan.http_client.proxy]
[mikan.http_client.proxy.headers]
[graphql] [graphql]
depth_limit = inf depth_limit = inf
complexity_limit = inf complexity_limit = inf
@@ -22,3 +27,5 @@ complexity_limit = inf
[task] [task]
[message] [message]
[media]

View File

@@ -11,8 +11,8 @@ use super::env::Environment;
use crate::{ use crate::{
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig, auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig, errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
logger::LoggerConfig, message::MessageConfig, storage::StorageConfig, task::TaskConfig, logger::LoggerConfig, media::MediaConfig, message::MessageConfig, storage::StorageConfig,
web::WebServerConfig, task::TaskConfig, web::WebServerConfig,
}; };
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml"); const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
@@ -27,6 +27,7 @@ pub struct AppConfig {
pub mikan: MikanConfig, pub mikan: MikanConfig,
pub crypto: CryptoConfig, pub crypto: CryptoConfig,
pub graphql: GraphQLConfig, pub graphql: GraphQLConfig,
pub media: MediaConfig,
pub logger: LoggerConfig, pub logger: LoggerConfig,
pub database: DatabaseConfig, pub database: DatabaseConfig,
pub task: TaskConfig, pub task: TaskConfig,

View File

@@ -4,16 +4,9 @@ use tokio::sync::OnceCell;
use super::{Environment, config::AppConfig}; use super::{Environment, config::AppConfig};
use crate::{ use crate::{
auth::AuthService, auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
cache::CacheService, errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
crypto::CryptoService, logger::LoggerService, media::MediaService, message::MessageService, storage::StorageService,
database::DatabaseService,
errors::RecorderResult,
extract::mikan::MikanClient,
graphql::GraphQLService,
logger::LoggerService,
message::MessageService,
storage::{StorageService, StorageServiceTrait},
task::TaskService, task::TaskService,
}; };
@@ -25,12 +18,13 @@ pub trait AppContextTrait: Send + Sync + Debug {
fn mikan(&self) -> &MikanClient; fn mikan(&self) -> &MikanClient;
fn auth(&self) -> &AuthService; fn auth(&self) -> &AuthService;
fn graphql(&self) -> &GraphQLService; fn graphql(&self) -> &GraphQLService;
fn storage(&self) -> &dyn StorageServiceTrait; fn storage(&self) -> &StorageService;
fn working_dir(&self) -> &String; fn working_dir(&self) -> &String;
fn environment(&self) -> &Environment; fn environment(&self) -> &Environment;
fn crypto(&self) -> &CryptoService; fn crypto(&self) -> &CryptoService;
fn task(&self) -> &TaskService; fn task(&self) -> &TaskService;
fn message(&self) -> &MessageService; fn message(&self) -> &MessageService;
fn media(&self) -> &MediaService;
} }
pub struct AppContext { pub struct AppContext {
@@ -45,6 +39,7 @@ pub struct AppContext {
working_dir: String, working_dir: String,
environment: Environment, environment: Environment,
message: MessageService, message: MessageService,
media: MediaService,
task: OnceCell<TaskService>, task: OnceCell<TaskService>,
graphql: OnceCell<GraphQLService>, graphql: OnceCell<GraphQLService>,
} }
@@ -65,6 +60,7 @@ impl AppContext {
let auth = AuthService::from_conf(config.auth).await?; let auth = AuthService::from_conf(config.auth).await?;
let mikan = MikanClient::from_config(config.mikan).await?; let mikan = MikanClient::from_config(config.mikan).await?;
let crypto = CryptoService::from_config(config.crypto).await?; let crypto = CryptoService::from_config(config.crypto).await?;
let media = MediaService::from_config(config.media).await?;
let ctx = Arc::new(AppContext { let ctx = Arc::new(AppContext {
config: config_cloned, config: config_cloned,
@@ -78,6 +74,7 @@ impl AppContext {
working_dir: working_dir.to_string(), working_dir: working_dir.to_string(),
crypto, crypto,
message, message,
media,
task: OnceCell::new(), task: OnceCell::new(),
graphql: OnceCell::new(), graphql: OnceCell::new(),
}); });
@@ -126,7 +123,7 @@ impl AppContextTrait for AppContext {
fn graphql(&self) -> &GraphQLService { fn graphql(&self) -> &GraphQLService {
self.graphql.get().expect("graphql should be set") self.graphql.get().expect("graphql should be set")
} }
fn storage(&self) -> &dyn StorageServiceTrait { fn storage(&self) -> &StorageService {
&self.storage &self.storage
} }
fn working_dir(&self) -> &String { fn working_dir(&self) -> &String {
@@ -144,4 +141,7 @@ impl AppContextTrait for AppContext {
fn message(&self) -> &MessageService { fn message(&self) -> &MessageService {
&self.message &self.message
} }
fn media(&self) -> &MediaService {
&self.media
}
} }

View File

@@ -6,13 +6,15 @@ use tracing::instrument;
use super::{builder::AppBuilder, context::AppContextTrait}; use super::{builder::AppBuilder, context::AppContextTrait};
use crate::{ use crate::{
errors::RecorderResult, errors::{RecorderError, RecorderResult},
web::{ web::{
controller::{self, core::ControllerTrait}, controller::{self, core::ControllerTrait},
middleware::default_middleware_stack, middleware::default_middleware_stack,
}, },
}; };
pub const PROJECT_NAME: &str = "konobangu";
pub struct App { pub struct App {
pub context: Arc<dyn AppContextTrait>, pub context: Arc<dyn AppContextTrait>,
pub builder: AppBuilder, pub builder: AppBuilder,
@@ -51,32 +53,76 @@ impl App {
let mut router = Router::<Arc<dyn AppContextTrait>>::new(); let mut router = Router::<Arc<dyn AppContextTrait>>::new();
let (graphql_c, oidc_c, metadata_c) = futures::try_join!( let (graphql_c, oidc_c, metadata_c, static_c, feeds_c) = futures::try_join!(
controller::graphql::create(context.clone()), controller::graphql::create(context.clone()),
controller::oidc::create(context.clone()), controller::oidc::create(context.clone()),
controller::metadata::create(context.clone()) controller::metadata::create(context.clone()),
controller::r#static::create(context.clone()),
controller::feeds::create(context.clone()),
)?; )?;
for c in [graphql_c, oidc_c, metadata_c] { for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
router = c.apply_to(router); router = c.apply_to(router);
} }
let middlewares = default_middleware_stack(context.clone()); let middlewares = default_middleware_stack(context.clone());
for mid in middlewares { for mid in middlewares {
if mid.is_enabled() {
router = mid.apply(router)?; router = mid.apply(router)?;
tracing::info!(name = mid.name(), "+middleware"); tracing::info!(name = mid.name(), "+middleware");
} }
}
let router = router let router = router
.with_state(context.clone()) .with_state(context.clone())
.into_make_service_with_connect_info::<SocketAddr>(); .into_make_service_with_connect_info::<SocketAddr>();
axum::serve(listener, router) let task = context.task();
let graceful_shutdown = self.builder.graceful_shutdown;
tokio::try_join!(
async {
let axum_serve = axum::serve(listener, router);
if graceful_shutdown {
axum_serve
.with_graceful_shutdown(async move { .with_graceful_shutdown(async move {
Self::shutdown_signal().await; Self::shutdown_signal().await;
tracing::info!("shutting down..."); tracing::info!("axum shutting down...");
}) })
.await?; .await?;
} else {
axum_serve.await?;
}
Ok::<(), RecorderError>(())
},
async {
{
let monitor = task.setup_monitor().await?;
if graceful_shutdown {
monitor
.run_with_signal(async move {
Self::shutdown_signal().await;
tracing::info!("apalis shutting down...");
Ok(())
})
.await?;
} else {
monitor.run().await?;
}
}
Ok::<(), RecorderError>(())
},
async {
let listener = task.setup_listener().await?;
listener.listen().await?;
Ok::<(), RecorderError>(())
}
)?;
Ok(()) Ok(())
} }
@@ -108,7 +154,7 @@ impl App {
#[cfg(not(unix))] #[cfg(not(unix))]
let terminate = std::future::pending::<()>(); let terminate = std::future::pending::<()>();
#[cfg(all(not(unix), debug_assertions))] #[cfg(not(all(unix, debug_assertions)))]
let quit = std::future::pending::<()>(); let quit = std::future::pending::<()>();
tokio::select! { tokio::select! {

View File

@@ -4,7 +4,7 @@ pub mod context;
pub mod core; pub mod core;
pub mod env; pub mod env;
pub use core::App; pub use core::{App, PROJECT_NAME};
pub use builder::AppBuilder; pub use builder::AppBuilder;
pub use config::AppConfig; pub use config::AppConfig;

View File

@@ -9,7 +9,7 @@ use super::{
service::{AuthServiceTrait, AuthUserInfo}, service::{AuthServiceTrait, AuthUserInfo},
}; };
use crate::{ use crate::{
app::AppContextTrait, app::{AppContextTrait, PROJECT_NAME},
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER}, models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
}; };
@@ -86,7 +86,7 @@ impl AuthServiceTrait for BasicAuthService {
} }
fn www_authenticate_header_value(&self) -> Option<HeaderValue> { fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
Some(HeaderValue::from_static(r#"Basic realm="konobangu""#)) Some(HeaderValue::from_str(format!("Basic realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
} }
fn auth_type(&self) -> AuthType { fn auth_type(&self) -> AuthType {

View File

@@ -11,13 +11,14 @@ use openidconnect::{
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use snafu::prelude::*; use snafu::prelude::*;
use util::OptDynErr;
use crate::models::auth::AuthType; use crate::models::auth::AuthType;
#[derive(Debug, Snafu)] #[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))] #[snafu(visibility(pub(crate)))]
pub enum AuthError { pub enum AuthError {
#[snafu(display("Permission denied"))]
PermissionError,
#[snafu(display("Not support auth method"))] #[snafu(display("Not support auth method"))]
NotSupportAuthMethod { NotSupportAuthMethod {
supported: Vec<AuthType>, supported: Vec<AuthType>,
@@ -93,12 +94,6 @@ pub enum AuthError {
column: String, column: String,
context_path: String, context_path: String,
}, },
#[snafu(display("GraphQL permission denied since {field}"))]
GraphqlStaticPermissionError {
#[snafu(source)]
source: OptDynErr,
field: String,
},
} }
impl AuthError { impl AuthError {

View File

@@ -32,7 +32,11 @@ use super::{
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu}, errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
service::{AuthServiceTrait, AuthUserInfo}, service::{AuthServiceTrait, AuthUserInfo},
}; };
use crate::{app::AppContextTrait, errors::RecorderError, models::auth::AuthType}; use crate::{
app::{AppContextTrait, PROJECT_NAME},
errors::RecorderError,
models::auth::AuthType,
};
pub struct OidcHttpClient(pub Arc<HttpClient>); pub struct OidcHttpClient(pub Arc<HttpClient>);
@@ -351,7 +355,7 @@ impl AuthServiceTrait for OidcAuthService {
} }
fn www_authenticate_header_value(&self) -> Option<HeaderValue> { fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
Some(HeaderValue::from_static(r#"Bearer realm="konobangu""#)) Some(HeaderValue::from_str(format!("Bearer realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
} }
fn auth_type(&self) -> AuthType { fn auth_type(&self) -> AuthType {

View File

@@ -11,14 +11,16 @@ use super::DatabaseConfig;
use crate::{errors::RecorderResult, migrations::Migrator}; use crate::{errors::RecorderResult, migrations::Migrator};
pub struct DatabaseService { pub struct DatabaseService {
pub config: DatabaseConfig,
connection: DatabaseConnection, connection: DatabaseConnection,
#[cfg(all(any(test, feature = "playground"), feature = "testcontainers"))] #[cfg(feature = "testcontainers")]
pub container: pub container:
Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>, Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>,
} }
impl DatabaseService { impl DatabaseService {
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> { pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
let db_config = config.clone();
let mut opt = ConnectOptions::new(&config.uri); let mut opt = ConnectOptions::new(&config.uri);
opt.max_connections(config.max_connections) opt.max_connections(config.max_connections)
.min_connections(config.min_connections) .min_connections(config.min_connections)
@@ -50,8 +52,9 @@ impl DatabaseService {
let me = Self { let me = Self {
connection: db, connection: db,
#[cfg(all(any(test, feature = "playground"), feature = "testcontainers"))] #[cfg(feature = "testcontainers")]
container: None, container: None,
config: db_config,
}; };
if config.auto_migrate { if config.auto_migrate {

View File

@@ -5,8 +5,7 @@ use axum::{
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use fetch::{FetchError, HttpClientError, reqwest, reqwest_middleware}; use fetch::{FetchError, HttpClientError, reqwest, reqwest_middleware};
use http::StatusCode; use http::{HeaderMap, StatusCode};
use serde::{Deserialize, Deserializer, Serialize};
use snafu::Snafu; use snafu::Snafu;
use crate::{ use crate::{
@@ -19,6 +18,24 @@ use crate::{
#[derive(Snafu, Debug)] #[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))] #[snafu(visibility(pub(crate)))]
pub enum RecorderError { pub enum RecorderError {
#[snafu(display(
"HTTP {status} {reason}, source = {source:?}",
status = status,
reason = status.canonical_reason().unwrap_or("Unknown")
))]
HttpResponseError {
status: StatusCode,
headers: Option<HeaderMap>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(transparent)]
ImageError { source: image::ImageError },
#[cfg(feature = "jxl")]
#[snafu(transparent)]
JxlEncodeError { source: jpegxl_rs::EncodeError },
#[snafu(transparent, context(false))]
HttpError { source: http::Error },
#[snafu(transparent, context(false))] #[snafu(transparent, context(false))]
FancyRegexError { FancyRegexError {
#[snafu(source(from(fancy_regex::Error, Box::new)))] #[snafu(source(from(fancy_regex::Error, Box::new)))]
@@ -28,12 +45,14 @@ pub enum RecorderError {
NetAddrParseError { source: std::net::AddrParseError }, NetAddrParseError { source: std::net::AddrParseError },
#[snafu(transparent)] #[snafu(transparent)]
RegexError { source: regex::Error }, RegexError { source: regex::Error },
#[snafu(transparent)] #[snafu(display("Invalid method"))]
InvalidMethodError { source: http::method::InvalidMethod }, InvalidMethodError,
#[snafu(transparent)] #[snafu(display("Invalid header value"))]
InvalidHeaderNameError { InvalidHeaderValueError,
source: http::header::InvalidHeaderName, #[snafu(display("Invalid header name"))]
}, InvalidHeaderNameError,
#[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))]
MissingOriginError,
#[snafu(transparent)] #[snafu(transparent)]
TracingAppenderInitError { TracingAppenderInitError {
source: tracing_appender::rolling::InitError, source: tracing_appender::rolling::InitError,
@@ -73,12 +92,8 @@ pub enum RecorderError {
source: Box<opendal::Error>, source: Box<opendal::Error>,
}, },
#[snafu(transparent)] #[snafu(transparent)]
InvalidHeaderValueError {
source: http::header::InvalidHeaderValue,
},
#[snafu(transparent)]
HttpClientError { source: HttpClientError }, HttpClientError { source: HttpClientError },
#[cfg(all(any(test, feature = "playground"), feature = "testcontainers"))] #[cfg(feature = "testcontainers")]
#[snafu(transparent)] #[snafu(transparent)]
TestcontainersError { TestcontainersError {
source: testcontainers::TestcontainersError, source: testcontainers::TestcontainersError,
@@ -103,7 +118,7 @@ pub enum RecorderError {
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))] #[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr, source: OptDynErr,
}, },
#[snafu(display("Model Entity {entity} not found"))] #[snafu(display("Model Entity {entity} not found or not belong to subscriber"))]
ModelEntityNotFound { entity: Cow<'static, str> }, ModelEntityNotFound { entity: Cow<'static, str> },
#[snafu(transparent)] #[snafu(transparent)]
FetchError { source: FetchError }, FetchError { source: FetchError },
@@ -123,9 +138,27 @@ pub enum RecorderError {
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))] #[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr, source: OptDynErr,
}, },
#[snafu(display("Invalid task id: {message}"))]
InvalidTaskId { message: String },
} }
impl RecorderError { impl RecorderError {
pub fn from_status(status: StatusCode) -> Self {
Self::HttpResponseError {
status,
headers: None,
source: None.into(),
}
}
pub fn from_status_and_headers(status: StatusCode, headers: HeaderMap) -> Self {
Self::HttpResponseError {
status,
headers: Some(headers),
source: None.into(),
}
}
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self { pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError { Self::MikanMetaMissingFieldError {
field, field,
@@ -175,10 +208,53 @@ impl snafu::FromString for RecorderError {
} }
} }
impl From<StatusCode> for RecorderError {
fn from(status: StatusCode) -> Self {
Self::HttpResponseError {
status,
headers: None,
source: None.into(),
}
}
}
impl From<(StatusCode, HeaderMap)> for RecorderError {
fn from((status, headers): (StatusCode, HeaderMap)) -> Self {
Self::HttpResponseError {
status,
headers: Some(headers),
source: None.into(),
}
}
}
impl IntoResponse for RecorderError { impl IntoResponse for RecorderError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
match self { match self {
Self::AuthError { source: auth_error } => auth_error.into_response(), Self::AuthError { source: auth_error } => auth_error.into_response(),
Self::HttpResponseError {
status,
headers,
source,
} => {
let message = source
.into_inner()
.map(|s| s.to_string())
.unwrap_or_else(|| {
String::from(status.canonical_reason().unwrap_or("Unknown"))
});
(
status,
headers,
Json::<StandardErrorResponse>(StandardErrorResponse::from(message)),
)
.into_response()
}
Self::ModelEntityNotFound { entity } => (
StatusCode::NOT_FOUND,
Json::<StandardErrorResponse>(StandardErrorResponse::from(entity.to_string())),
)
.into_response(),
err => ( err => (
StatusCode::INTERNAL_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR,
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())), Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
@@ -188,28 +264,6 @@ impl IntoResponse for RecorderError {
} }
} }
impl Serialize for RecorderError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for RecorderError {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(Self::Whatever {
message: s,
source: None.into(),
})
}
}
impl From<reqwest::Error> for RecorderError { impl From<reqwest::Error> for RecorderError {
fn from(error: reqwest::Error) -> Self { fn from(error: reqwest::Error) -> Self {
FetchError::from(error).into() FetchError::from(error).into()
@@ -222,4 +276,22 @@ impl From<reqwest_middleware::Error> for RecorderError {
} }
} }
impl From<http::header::InvalidHeaderValue> for RecorderError {
fn from(_error: http::header::InvalidHeaderValue) -> Self {
Self::InvalidHeaderValueError
}
}
impl From<http::header::InvalidHeaderName> for RecorderError {
fn from(_error: http::header::InvalidHeaderName) -> Self {
Self::InvalidHeaderNameError
}
}
impl From<http::method::InvalidMethod> for RecorderError {
fn from(_error: http::method::InvalidMethod) -> Self {
Self::InvalidMethodError
}
}
pub type RecorderResult<T> = Result<T, RecorderError>; pub type RecorderResult<T> = Result<T, RecorderError>;

View File

@@ -1,3 +1,4 @@
use chrono::{DateTime, Utc};
use fancy_regex::Regex as FancyRegex; use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use quirks_path::Path; use quirks_path::Path;
@@ -33,6 +34,14 @@ lazy_static! {
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap(); Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
} }
#[derive(Clone, Debug)]
pub struct EpisodeEnclosureMeta {
pub magnet_link: Option<String>,
pub torrent_link: Option<String>,
pub pub_date: Option<DateTime<Utc>>,
pub content_length: Option<i64>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeMediaMeta { pub struct TorrentEpisodeMediaMeta {
pub fansub: Option<String>, pub fansub: Option<String>,
@@ -268,8 +277,8 @@ mod tests {
) )
} }
pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) { pub fn test_torrent_ep_parser(origin_name: &str, expected: &str) {
let extname = Path::new(raw_name) let extname = Path::new(origin_name)
.extension() .extension()
.map(|e| format!(".{e}")) .map(|e| format!(".{e}"))
.unwrap_or_default() .unwrap_or_default()
@@ -278,7 +287,7 @@ mod tests {
if extname == ".srt" || extname == ".ass" { if extname == ".srt" || extname == ".ass" {
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok(); let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
let found_raw = let found_raw =
parse_episode_subtitle_meta_from_torrent(Path::new(raw_name), None, None); parse_episode_subtitle_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned(); let found = found_raw.as_ref().ok().cloned();
if expected != found { if expected != found {
@@ -299,7 +308,8 @@ mod tests {
assert_eq!(expected, found); assert_eq!(expected, found);
} else { } else {
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok(); let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
let found_raw = parse_episode_media_meta_from_torrent(Path::new(raw_name), None, None); let found_raw =
parse_episode_media_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned(); let found = found_raw.as_ref().ok().cloned();
if expected != found { if expected != found {

View File

@@ -1,8 +1,5 @@
use std::collections::HashMap;
use fancy_regex::Regex as FancyRegex; use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use maplit::hashmap;
use regex::Regex; use regex::Regex;
const LANG_ZH_TW: &str = "zh-tw"; const LANG_ZH_TW: &str = "zh-tw";
@@ -34,40 +31,4 @@ lazy_static! {
(LANG_JP, vec!["jp", "jpn", ""]), (LANG_JP, vec!["jp", "jpn", ""]),
] ]
}; };
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {
hashmap! {
"" => 0,
"" => 1,
"" => 2,
"" => 3,
"" => 4,
"" => 5,
"" => 6,
"" => 7,
"" => 8,
"" => 9,
"" => 10,
"廿" => 20,
"" => 100,
"" => 1000,
"" => 0,
"" => 1,
"" => 2,
"" => 3,
"" => 4,
"" => 5,
"" => 6,
"" => 7,
"" => 8,
"" => 9,
"" => 10,
"" => 20,
"" => 100,
"" => 1000,
}
};
pub static ref ZH_NUM_RE: Regex =
Regex::new(r"[〇一二三四五六七八九十廿百千零壹贰叁肆伍陆柒捌玖拾念佰仟]").unwrap();
} }

View File

@@ -1,7 +1,12 @@
use axum::http::{HeaderName, HeaderValue, Uri, header, request::Parts}; use axum::{
extract::FromRequestParts,
http::{HeaderName, HeaderValue, Uri, header, request::Parts},
};
use itertools::Itertools; use itertools::Itertools;
use url::Url; use url::Url;
use crate::errors::RecorderError;
/// Fields from a "Forwarded" header per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4) /// Fields from a "Forwarded" header per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4)
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ForwardedHeader { pub struct ForwardedHeader {
@@ -101,9 +106,13 @@ pub struct ForwardedRelatedInfo {
pub origin: Option<String>, pub origin: Option<String>,
} }
impl ForwardedRelatedInfo { impl<T> FromRequestParts<T> for ForwardedRelatedInfo {
pub fn from_request_parts(request_parts: &Parts) -> ForwardedRelatedInfo { type Rejection = RecorderError;
let headers = &request_parts.headers; fn from_request_parts(
parts: &mut Parts,
_state: &T,
) -> impl Future<Output = Result<Self, Self::Rejection>> + Send {
let headers = &parts.headers;
let forwarded = headers let forwarded = headers
.get(header::FORWARDED) .get(header::FORWARDED)
.and_then(|s| ForwardedHeader::try_from(s.clone()).ok()); .and_then(|s| ForwardedHeader::try_from(s.clone()).ok());
@@ -132,17 +141,19 @@ impl ForwardedRelatedInfo {
.get(header::ORIGIN) .get(header::ORIGIN)
.and_then(|s| s.to_str().map(String::from).ok()); .and_then(|s| s.to_str().map(String::from).ok());
ForwardedRelatedInfo { futures::future::ready(Ok(ForwardedRelatedInfo {
host, host,
x_forwarded_for, x_forwarded_for,
x_forwarded_host, x_forwarded_host,
x_forwarded_proto, x_forwarded_proto,
forwarded, forwarded,
uri: request_parts.uri.clone(), uri: parts.uri.clone(),
origin, origin,
}))
} }
} }
impl ForwardedRelatedInfo {
pub fn resolved_protocol(&self) -> Option<&str> { pub fn resolved_protocol(&self) -> Option<&str> {
self.forwarded self.forwarded
.as_ref() .as_ref()

View File

@@ -2,10 +2,6 @@ use url::Url;
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> { pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
let mut image_url = base_url.join(image_src).ok()?; let mut image_url = base_url.join(image_src).ok()?;
if let Some((_, value)) = image_url.query_pairs().find(|(key, _)| key == "webp") {
image_url.set_query(Some(&format!("webp={value}")));
} else {
image_url.set_query(None); image_url.set_query(None);
}
Some(image_url) Some(image_url)
} }

View File

@@ -1,4 +1,4 @@
use std::{fmt::Debug, ops::Deref, sync::Arc}; use std::{fmt::Debug, ops::Deref};
use fetch::{HttpClient, HttpClientTrait}; use fetch::{HttpClient, HttpClientTrait};
use maplit::hashmap; use maplit::hashmap;
@@ -136,7 +136,7 @@ impl MikanClient {
pub async fn submit_credential_form( pub async fn submit_credential_form(
&self, &self,
ctx: Arc<dyn AppContextTrait>, ctx: &dyn AppContextTrait,
subscriber_id: i32, subscriber_id: i32,
credential_form: MikanCredentialForm, credential_form: MikanCredentialForm,
) -> RecorderResult<credential_3rd::Model> { ) -> RecorderResult<credential_3rd::Model> {
@@ -149,7 +149,7 @@ impl MikanClient {
subscriber_id: Set(subscriber_id), subscriber_id: Set(subscriber_id),
..Default::default() ..Default::default()
} }
.try_encrypt(ctx.clone()) .try_encrypt(ctx)
.await?; .await?;
let credential: credential_3rd::Model = am.save(db).await?.try_into_model()?; let credential: credential_3rd::Model = am.save(db).await?.try_into_model()?;
@@ -158,8 +158,9 @@ impl MikanClient {
pub async fn sync_credential_cookies( pub async fn sync_credential_cookies(
&self, &self,
ctx: Arc<dyn AppContextTrait>, ctx: &dyn AppContextTrait,
credential_id: i32, credential_id: i32,
subscriber_id: i32,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
let cookies = self.http_client.save_cookie_store_to_json()?; let cookies = self.http_client.save_cookie_store_to_json()?;
if let Some(cookies) = cookies { if let Some(cookies) = cookies {
@@ -167,19 +168,20 @@ impl MikanClient {
cookies: Set(Some(cookies)), cookies: Set(Some(cookies)),
..Default::default() ..Default::default()
} }
.try_encrypt(ctx.clone()) .try_encrypt(ctx)
.await?; .await?;
credential_3rd::Entity::update_many() credential_3rd::Entity::update_many()
.set(am) .set(am)
.filter(credential_3rd::Column::Id.eq(credential_id)) .filter(credential_3rd::Column::Id.eq(credential_id))
.filter(credential_3rd::Column::SubscriberId.eq(subscriber_id))
.exec(ctx.db()) .exec(ctx.db())
.await?; .await?;
} }
Ok(()) Ok(())
} }
pub async fn fork_with_credential( pub async fn fork_with_userpass_credential(
&self, &self,
userpass_credential: UserPassCredential, userpass_credential: UserPassCredential,
) -> RecorderResult<Self> { ) -> RecorderResult<Self> {
@@ -204,10 +206,13 @@ impl MikanClient {
pub async fn fork_with_credential_id( pub async fn fork_with_credential_id(
&self, &self,
ctx: Arc<dyn AppContextTrait>, ctx: &dyn AppContextTrait,
credential_id: i32, credential_id: i32,
subscriber_id: i32,
) -> RecorderResult<Self> { ) -> RecorderResult<Self> {
let credential = credential_3rd::Model::find_by_id(ctx.clone(), credential_id).await?; let credential =
credential_3rd::Model::find_by_id_and_subscriber_id(ctx, credential_id, subscriber_id)
.await?;
if let Some(credential) = credential { if let Some(credential) = credential {
if credential.credential_type != Credential3rdType::Mikan { if credential.credential_type != Credential3rdType::Mikan {
return Err(RecorderError::Credential3rdError { return Err(RecorderError::Credential3rdError {
@@ -219,7 +224,8 @@ impl MikanClient {
let userpass_credential: UserPassCredential = let userpass_credential: UserPassCredential =
credential.try_into_userpass_credential(ctx)?; credential.try_into_userpass_credential(ctx)?;
self.fork_with_credential(userpass_credential).await self.fork_with_userpass_credential(userpass_credential)
.await
} else { } else {
Err(RecorderError::from_db_record_not_found( Err(RecorderError::from_db_record_not_found(
DbErr::RecordNotFound(format!("credential={credential_id} not found")), DbErr::RecordNotFound(format!("credential={credential_id} not found")),
@@ -249,7 +255,7 @@ impl HttpClientTrait for MikanClient {}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
#![allow(unused_variables)] #![allow(unused_variables)]
use std::assert_matches::assert_matches; use std::{assert_matches::assert_matches, sync::Arc};
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
use tracing::Level; use tracing::Level;
@@ -297,8 +303,10 @@ mod tests {
let credential_form = build_testing_mikan_credential_form(); let credential_form = build_testing_mikan_credential_form();
let subscriber_id = 1;
let credential_model = mikan_client let credential_model = mikan_client
.submit_credential_form(app_ctx.clone(), 1, credential_form.clone()) .submit_credential_form(app_ctx.as_ref(), subscriber_id, credential_form.clone())
.await?; .await?;
let expected_username = &credential_form.username; let expected_username = &credential_form.username;
@@ -322,7 +330,7 @@ mod tests {
); );
let mikan_client = mikan_client let mikan_client = mikan_client
.fork_with_credential_id(app_ctx.clone(), credential_model.id) .fork_with_credential_id(app_ctx.as_ref(), credential_model.id, subscriber_id)
.await?; .await?;
mikan_client.login().await?; mikan_client.login().await?;

View File

@@ -2,7 +2,7 @@ use fetch::HttpClientConfig;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanConfig { pub struct MikanConfig {
pub http_client: HttpClientConfig, pub http_client: HttpClientConfig,
pub base_url: Url, pub base_url: Url,

View File

@@ -12,6 +12,7 @@ pub const MIKAN_BANGUMI_POSTER_PATH: &str = "/images/Bangumi";
pub const MIKAN_EPISODE_TORRENT_PATH: &str = "/Download"; pub const MIKAN_EPISODE_TORRENT_PATH: &str = "/Download";
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH: &str = "/RSS/MyBangumi"; pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH: &str = "/RSS/MyBangumi";
pub const MIKAN_BANGUMI_RSS_PATH: &str = "/RSS/Bangumi"; pub const MIKAN_BANGUMI_RSS_PATH: &str = "/RSS/Bangumi";
pub const MIKAN_FANSUB_HOMEPAGE_PATH: &str = "/Home/PublishGroup";
pub const MIKAN_BANGUMI_ID_QUERY_KEY: &str = "bangumiId"; pub const MIKAN_BANGUMI_ID_QUERY_KEY: &str = "bangumiId";
pub const MIKAN_FANSUB_ID_QUERY_KEY: &str = "subgroupid"; pub const MIKAN_FANSUB_ID_QUERY_KEY: &str = "subgroupid";
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY: &str = "token"; pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY: &str = "token";

View File

@@ -11,10 +11,11 @@ pub use constants::{
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH, MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH,
MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH, MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH,
MIKAN_BANGUMI_RSS_PATH, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_EPISODE_TORRENT_PATH, MIKAN_BANGUMI_RSS_PATH, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_EPISODE_TORRENT_PATH,
MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_LOGIN_PAGE_PATH, MIKAN_LOGIN_PAGE_SEARCH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_LOGIN_PAGE_PATH,
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY, MIKAN_LOGIN_PAGE_SEARCH, MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_SEASON_STR_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH,
MIKAN_UNKNOWN_FANSUB_ID, MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_UNKNOWN_FANSUB_ID,
MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
}; };
pub use credential::MikanCredentialForm; pub use credential::MikanCredentialForm;
pub use subscription::{ pub use subscription::{
@@ -22,11 +23,12 @@ pub use subscription::{
}; };
pub use web::{ pub use web::{
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta, MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanRssItem, MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanFansubHash,
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionRssUrlMeta, MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url, MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_expand_subscribed_url,
build_mikan_bangumi_subscription_rss_url, build_mikan_episode_homepage_url, build_mikan_bangumi_homepage_url, build_mikan_bangumi_subscription_rss_url,
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url, build_mikan_episode_homepage_url, build_mikan_season_flow_url,
build_mikan_subscriber_subscription_rss_url,
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment, extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
extract_mikan_bangumi_meta_from_expand_subscribed_fragment, extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
extract_mikan_episode_meta_from_episode_homepage_html, extract_mikan_episode_meta_from_episode_homepage_html,

View File

@@ -5,6 +5,7 @@ use std::{
}; };
use async_graphql::{InputObject, SimpleObject}; use async_graphql::{InputObject, SimpleObject};
use async_stream::try_stream;
use fetch::fetch_bytes; use fetch::fetch_bytes;
use futures::{Stream, TryStreamExt, pin_mut, try_join}; use futures::{Stream, TryStreamExt, pin_mut, try_join};
use maplit::hashmap; use maplit::hashmap;
@@ -19,13 +20,16 @@ use super::scrape_mikan_bangumi_meta_stream_from_season_flow_url;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::{RecorderError, RecorderResult}, errors::{RecorderError, RecorderResult},
extract::mikan::{ extract::{
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanRssItem, bittorrent::EpisodeEnclosureMeta,
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionRssUrlMeta, mikan::{
build_mikan_bangumi_subscription_rss_url, build_mikan_season_flow_url, MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
build_mikan_subscriber_subscription_rss_url, MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url,
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
scrape_mikan_episode_meta_from_episode_homepage_url, scrape_mikan_episode_meta_from_episode_homepage_url,
}, },
},
models::{ models::{
bangumi, episodes, subscription_bangumi, subscription_episode, bangumi, episodes, subscription_bangumi, subscription_episode,
subscriptions::{self, SubscriptionTrait}, subscriptions::{self, SubscriptionTrait},
@@ -35,10 +39,11 @@ use crate::{
#[tracing::instrument(err, skip(ctx, rss_item_list))] #[tracing::instrument(err, skip(ctx, rss_item_list))]
async fn sync_mikan_feeds_from_rss_item_list( async fn sync_mikan_feeds_from_rss_item_list(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
rss_item_list: Vec<MikanRssItem>, rss_item_list: Vec<MikanRssEpisodeItem>,
subscriber_id: i32, subscriber_id: i32,
subscription_id: i32, subscription_id: i32,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
let mikan_base_url = ctx.mikan().base_url().clone();
let (new_episode_meta_list, existed_episode_hash2id_map) = { let (new_episode_meta_list, existed_episode_hash2id_map) = {
let existed_episode_hash2id_map = episodes::Model::get_existed_mikan_episode_list( let existed_episode_hash2id_map = episodes::Model::get_existed_mikan_episode_list(
ctx, ctx,
@@ -52,7 +57,7 @@ async fn sync_mikan_feeds_from_rss_item_list(
.map(|(episode_id, hash, bangumi_id)| (hash.mikan_episode_id, (episode_id, bangumi_id))) .map(|(episode_id, hash, bangumi_id)| (hash.mikan_episode_id, (episode_id, bangumi_id)))
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
let mut new_episode_meta_list: Vec<MikanEpisodeMeta> = vec![]; let mut new_episode_meta_list: Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)> = vec![];
let mikan_client = ctx.mikan(); let mikan_client = ctx.mikan();
for to_insert_rss_item in rss_item_list.into_iter().filter(|rss_item| { for to_insert_rss_item in rss_item_list.into_iter().filter(|rss_item| {
@@ -60,10 +65,11 @@ async fn sync_mikan_feeds_from_rss_item_list(
}) { }) {
let episode_meta = scrape_mikan_episode_meta_from_episode_homepage_url( let episode_meta = scrape_mikan_episode_meta_from_episode_homepage_url(
mikan_client, mikan_client,
to_insert_rss_item.homepage, to_insert_rss_item.build_homepage_url(mikan_base_url.clone()),
) )
.await?; .await?;
new_episode_meta_list.push(episode_meta); let episode_enclosure_meta = EpisodeEnclosureMeta::from(to_insert_rss_item);
new_episode_meta_list.push((episode_meta, episode_enclosure_meta));
} }
(new_episode_meta_list, existed_episode_hash2id_map) (new_episode_meta_list, existed_episode_hash2id_map)
@@ -90,22 +96,22 @@ async fn sync_mikan_feeds_from_rss_item_list(
let new_episode_meta_list_group_by_bangumi_hash: HashMap< let new_episode_meta_list_group_by_bangumi_hash: HashMap<
MikanBangumiHash, MikanBangumiHash,
Vec<MikanEpisodeMeta>, Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)>,
> = { > = {
let mut m = hashmap! {}; let mut m = hashmap! {};
for episode_meta in new_episode_meta_list { for (episode_meta, episode_enclosure_meta) in new_episode_meta_list {
let bangumi_hash = episode_meta.bangumi_hash(); let bangumi_hash = episode_meta.bangumi_hash();
m.entry(bangumi_hash) m.entry(bangumi_hash)
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.push(episode_meta); .push((episode_meta, episode_enclosure_meta));
} }
m m
}; };
for (group_bangumi_hash, group_episode_meta_list) in new_episode_meta_list_group_by_bangumi_hash for (group_bangumi_hash, group_episode_meta_list) in new_episode_meta_list_group_by_bangumi_hash
{ {
let first_episode_meta = group_episode_meta_list.first().unwrap(); let (first_episode_meta, _) = group_episode_meta_list.first().unwrap();
let group_bangumi_model = bangumi::Model::get_or_insert_from_mikan( let group_bangumi_model = bangumi::Model::get_or_insert_from_mikan(
ctx, ctx,
group_bangumi_hash, group_bangumi_hash,
@@ -124,9 +130,12 @@ async fn sync_mikan_feeds_from_rss_item_list(
}, },
) )
.await?; .await?;
let group_episode_creation_list = group_episode_meta_list let group_episode_creation_list =
group_episode_meta_list
.into_iter() .into_iter()
.map(|episode_meta| (&group_bangumi_model, episode_meta)); .map(|(episode_meta, episode_enclosure_meta)| {
(&group_bangumi_model, episode_meta, episode_enclosure_meta)
});
episodes::Model::add_mikan_episodes_for_subscription( episodes::Model::add_mikan_episodes_for_subscription(
ctx, ctx,
@@ -141,7 +150,7 @@ async fn sync_mikan_feeds_from_rss_item_list(
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanSubscriberSubscription { pub struct MikanSubscriberSubscription {
pub id: i32, pub subscription_id: i32,
pub mikan_subscription_token: String, pub mikan_subscription_token: String,
pub subscriber_id: i32, pub subscriber_id: i32,
} }
@@ -153,7 +162,7 @@ impl SubscriptionTrait for MikanSubscriberSubscription {
} }
fn get_subscription_id(&self) -> i32 { fn get_subscription_id(&self) -> i32 {
self.id self.subscription_id
} }
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> { async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
@@ -203,7 +212,7 @@ impl SubscriptionTrait for MikanSubscriberSubscription {
})?; })?;
Ok(Self { Ok(Self {
id: model.id, subscription_id: model.id,
mikan_subscription_token: meta.mikan_subscription_token, mikan_subscription_token: meta.mikan_subscription_token,
subscriber_id: model.subscriber_id, subscriber_id: model.subscriber_id,
}) })
@@ -215,7 +224,7 @@ impl MikanSubscriberSubscription {
async fn get_rss_item_list_from_source_url( async fn get_rss_item_list_from_source_url(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssItem>> { ) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
let mikan_base_url = ctx.mikan().base_url().clone(); let mikan_base_url = ctx.mikan().base_url().clone();
let rss_url = build_mikan_subscriber_subscription_rss_url( let rss_url = build_mikan_subscriber_subscription_rss_url(
mikan_base_url.clone(), mikan_base_url.clone(),
@@ -227,7 +236,7 @@ impl MikanSubscriberSubscription {
let mut result = vec![]; let mut result = vec![];
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssItem::try_from(item) let item = MikanRssEpisodeItem::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@@ -240,9 +249,10 @@ impl MikanSubscriberSubscription {
async fn get_rss_item_list_from_subsribed_url_rss_link( async fn get_rss_item_list_from_subsribed_url_rss_link(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssItem>> { ) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
let subscribed_bangumi_list = let subscribed_bangumi_list =
bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.id).await?; bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.subscription_id)
.await?;
let mut rss_item_list = vec![]; let mut rss_item_list = vec![];
for subscribed_bangumi in subscribed_bangumi_list { for subscribed_bangumi in subscribed_bangumi_list {
@@ -251,7 +261,7 @@ impl MikanSubscriberSubscription {
.with_whatever_context::<_, String, RecorderError>(|| { .with_whatever_context::<_, String, RecorderError>(|| {
format!( format!(
"rss link is required, subscription_id = {:?}, bangumi_name = {}", "rss link is required, subscription_id = {:?}, bangumi_name = {}",
self.id, subscribed_bangumi.display_name self.subscription_id, subscribed_bangumi.display_name
) )
})?; })?;
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
@@ -259,7 +269,7 @@ impl MikanSubscriberSubscription {
let channel = rss::Channel::read_from(&bytes[..])?; let channel = rss::Channel::read_from(&bytes[..])?;
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssItem::try_from(item) let item = MikanRssEpisodeItem::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@@ -270,9 +280,9 @@ impl MikanSubscriberSubscription {
} }
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanSeasonSubscription { pub struct MikanSeasonSubscription {
pub id: i32, pub subscription_id: i32,
pub year: i32, pub year: i32,
pub season_str: MikanSeasonStr, pub season_str: MikanSeasonStr,
pub credential_id: i32, pub credential_id: i32,
@@ -286,21 +296,23 @@ impl SubscriptionTrait for MikanSeasonSubscription {
} }
fn get_subscription_id(&self) -> i32 { fn get_subscription_id(&self) -> i32 {
self.id self.subscription_id
} }
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> { async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
let rss_item_list = self let rss_item_stream = self.get_rss_item_stream_from_subsribed_url_rss_link(ctx.as_ref());
.get_rss_item_list_from_subsribed_url_rss_link(ctx.as_ref())
.await?;
pin_mut!(rss_item_stream);
while let Some(rss_item_chunk_list) = rss_item_stream.try_next().await? {
sync_mikan_feeds_from_rss_item_list( sync_mikan_feeds_from_rss_item_list(
ctx.as_ref(), ctx.as_ref(),
rss_item_list, rss_item_chunk_list,
self.get_subscriber_id(), self.get_subscriber_id(),
self.get_subscription_id(), self.get_subscription_id(),
) )
.await?; .await?;
}
Ok(()) Ok(())
} }
@@ -362,7 +374,7 @@ impl SubscriptionTrait for MikanSeasonSubscription {
})?; })?;
Ok(Self { Ok(Self {
id: model.id, subscription_id: model.id,
year: source_url_meta.year, year: source_url_meta.year,
season_str: source_url_meta.season_str, season_str: source_url_meta.season_str,
credential_id, credential_id,
@@ -387,18 +399,23 @@ impl MikanSeasonSubscription {
ctx, ctx,
mikan_season_flow_url, mikan_season_flow_url,
credential_id, credential_id,
self.get_subscriber_id(),
) )
} }
#[tracing::instrument(err, skip(ctx))] fn get_rss_item_stream_from_subsribed_url_rss_link(
async fn get_rss_item_list_from_subsribed_url_rss_link(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssItem>> { ) -> impl Stream<Item = RecorderResult<Vec<MikanRssEpisodeItem>>> {
try_stream! {
let db = ctx.db(); let db = ctx.db();
let subscribed_bangumi_list = bangumi::Entity::find() let subscribed_bangumi_list = bangumi::Entity::find()
.filter(Condition::all().add(subscription_bangumi::Column::SubscriptionId.eq(self.id))) .filter(
Condition::all()
.add(subscription_bangumi::Column::SubscriptionId.eq(self.subscription_id)),
)
.join_rev( .join_rev(
JoinType::InnerJoin, JoinType::InnerJoin,
subscription_bangumi::Relation::Bangumi.def(), subscription_bangumi::Relation::Bangumi.def(),
@@ -406,35 +423,39 @@ impl MikanSeasonSubscription {
.all(db) .all(db)
.await?; .await?;
let mut rss_item_list = vec![];
for subscribed_bangumi in subscribed_bangumi_list { for subscribed_bangumi in subscribed_bangumi_list {
let rss_url = subscribed_bangumi let rss_url = subscribed_bangumi
.rss_link .rss_link
.with_whatever_context::<_, String, RecorderError>(|| { .with_whatever_context::<_, String, RecorderError>(|| {
format!( format!(
"rss_link is required, subscription_id = {}, bangumi_name = {}", "rss_link is required, subscription_id = {}, bangumi_name = {}",
self.id, subscribed_bangumi.display_name self.subscription_id, subscribed_bangumi.display_name
) )
})?; })?;
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = rss::Channel::read_from(&bytes[..])?;
let mut rss_item_list = vec![];
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssItem::try_from(item) let item = MikanRssEpisodeItem::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
rss_item_list.push(item); rss_item_list.push(item);
} }
yield rss_item_list;
}
} }
Ok(rss_item_list)
} }
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)]
pub struct MikanBangumiSubscription { pub struct MikanBangumiSubscription {
pub id: i32, pub subscription_id: i32,
pub mikan_bangumi_id: String, pub mikan_bangumi_id: String,
pub mikan_fansub_id: String, pub mikan_fansub_id: String,
pub subscriber_id: i32, pub subscriber_id: i32,
@@ -447,7 +468,7 @@ impl SubscriptionTrait for MikanBangumiSubscription {
} }
fn get_subscription_id(&self) -> i32 { fn get_subscription_id(&self) -> i32 {
self.id self.subscription_id
} }
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> { async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
@@ -485,7 +506,7 @@ impl SubscriptionTrait for MikanBangumiSubscription {
})?; })?;
Ok(Self { Ok(Self {
id: model.id, subscription_id: model.id,
mikan_bangumi_id: meta.mikan_bangumi_id, mikan_bangumi_id: meta.mikan_bangumi_id,
mikan_fansub_id: meta.mikan_fansub_id, mikan_fansub_id: meta.mikan_fansub_id,
subscriber_id: model.subscriber_id, subscriber_id: model.subscriber_id,
@@ -498,7 +519,7 @@ impl MikanBangumiSubscription {
async fn get_rss_item_list_from_source_url( async fn get_rss_item_list_from_source_url(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssItem>> { ) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
let mikan_base_url = ctx.mikan().base_url().clone(); let mikan_base_url = ctx.mikan().base_url().clone();
let rss_url = build_mikan_bangumi_subscription_rss_url( let rss_url = build_mikan_bangumi_subscription_rss_url(
mikan_base_url.clone(), mikan_base_url.clone(),
@@ -511,7 +532,7 @@ impl MikanBangumiSubscription {
let mut result = vec![]; let mut result = vec![];
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssItem::try_from(item) let item = MikanRssEpisodeItem::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@@ -521,106 +542,237 @@ impl MikanBangumiSubscription {
} }
} }
// #[cfg(test)] #[cfg(test)]
// mod tests { #[allow(unused_variables)]
// use std::assert_matches::assert_matches; mod tests {
use std::sync::Arc;
// use downloader::bittorrent::BITTORRENT_MIME_TYPE; use rstest::{fixture, rstest};
// use rstest::rstest; use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait};
// use url::Url; use tracing::Level;
// use crate::{ use crate::{
// errors::RecorderResult, app::AppContextTrait,
// extract::mikan::{ errors::RecorderResult,
// MikanBangumiIndexRssChannel, MikanBangumiRssChannel, extract::mikan::{
// MikanRssChannel, build_mikan_bangumi_subscription_rss_url, MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr,
// extract_mikan_rss_channel_from_rss_link, }, MikanSubscriberSubscriptionRssUrlMeta,
// test_utils::mikan::build_testing_mikan_client, },
// }; models::{
bangumi, episodes,
subscriptions::{self, SubscriptionTrait},
},
test_utils::{
app::{TestingAppContext, TestingAppContextPreset},
mikan::{MikanMockServer, build_testing_mikan_credential_form},
tracing::try_init_testing_tracing,
},
};
// #[rstest] struct TestingResources {
// #[tokio::test] pub app_ctx: Arc<dyn AppContextTrait>,
// async fn test_parse_mikan_rss_channel_from_rss_link() -> pub mikan_server: MikanMockServer,
// RecorderResult<()> { let mut mikan_server = }
// mockito::Server::new_async().await;
// let mikan_base_url = Url::parse(&mikan_server.url())?; async fn build_testing_app_context() -> RecorderResult<TestingResources> {
let mikan_server = MikanMockServer::new().await?;
// let mikan_client = let mikan_base_url = mikan_server.base_url().clone();
// build_testing_mikan_client(mikan_base_url.clone()).await?;
// { let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
// let bangumi_rss_url = build_mikan_bangumi_subscription_rss_url( mikan_base_url: mikan_base_url.to_string(),
// mikan_base_url.clone(), database_config: None,
// "3141", })
// Some("370"), .await?;
// );
// let bangumi_rss_mock = mikan_server Ok(TestingResources {
// .mock("GET", bangumi_rss_url.path()) app_ctx,
// mikan_server,
// .with_body_from_file("tests/resources/mikan/Bangumi-3141-370.rss") })
// .match_query(mockito::Matcher::Any) }
// .create_async()
// .await;
// let channel = #[fixture]
// scrape_mikan_rss_channel_from_rss_link(&mikan_client, bangumi_rss_url) fn before_each() {
// .await try_init_testing_tracing(Level::DEBUG);
// .expect("should get mikan channel from rss url"); }
// assert_matches!( #[rstest]
// &channel, #[tokio::test]
// MikanRssChannel::Bangumi(MikanBangumiRssChannel { .. }) async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
// ); let TestingResources {
app_ctx,
mut mikan_server,
} = build_testing_app_context().await?;
// assert_matches!(&channel.name(), Some("葬送的芙莉莲")); let _resources_mock = mikan_server.mock_resources_with_doppel();
// let items = channel.items(); let _login_mock = mikan_server.mock_get_login_page();
// let first_sub_item = items
// .first()
// .expect("mikan subscriptions should have at least one subs");
// assert_eq!(first_sub_item.mime, BITTORRENT_MIME_TYPE); let mikan_client = app_ctx.mikan();
// assert!( let subscriber_id = 1;
// &first_sub_item
// .homepage
// .as_str()
// .starts_with("https://mikanani.me/Home/Episode")
// );
// let name = first_sub_item.title.as_str(); let credential = mikan_client
// assert!(name.contains("葬送的芙莉莲")); .submit_credential_form(
app_ctx.as_ref(),
subscriber_id,
build_testing_mikan_credential_form(),
)
.await?;
// bangumi_rss_mock.expect(1); let subscription_am = subscriptions::ActiveModel {
// } display_name: ActiveValue::Set("test subscription".to_string()),
// { subscriber_id: ActiveValue::Set(subscriber_id),
// let bangumi_rss_url = category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSeason),
// mikan_base_url.join("/RSS/Bangumi?bangumiId=3416")?; source_url: ActiveValue::Set(
MikanSeasonFlowUrlMeta {
year: 2025,
season_str: MikanSeasonStr::Spring,
}
.build_season_flow_url(mikan_server.base_url().clone())
.to_string(),
),
enabled: ActiveValue::Set(true),
credential_id: ActiveValue::Set(Some(credential.id)),
..Default::default()
};
// let bangumi_rss_mock = mikan_server let subscription_model = subscription_am.insert(app_ctx.db()).await?;
// .mock("GET", bangumi_rss_url.path())
// .match_query(mockito::Matcher::Any)
//
// .with_body_from_file("tests/resources/mikan/Bangumi-3416.rss")
// .create_async()
// .await;
// let channel = let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
// scrape_mikan_rss_channel_from_rss_link(&mikan_client, bangumi_rss_url)
// .await
// .expect("should get mikan channel from rss url");
// assert_matches!( {
// &channel, subscription.sync_feeds_incremental(app_ctx.clone()).await?;
// MikanRssChannel::BangumiIndex(MikanBangumiIndexRssChannel { let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
// .. }) );
// assert_matches!(&channel.name(), Some("叹气的亡灵想隐退")); assert!(bangumi_list.is_empty());
}
// bangumi_rss_mock.expect(1); {
// } subscription.sync_feeds_full(app_ctx.clone()).await?;
// Ok(()) let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
// }
// } assert!(!bangumi_list.is_empty());
}
Ok(())
}
#[rstest]
#[tokio::test]
async fn test_mikan_subscriber_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources {
app_ctx,
mut mikan_server,
} = build_testing_app_context().await?;
let _resources_mock = mikan_server.mock_resources_with_doppel();
let _login_mock = mikan_server.mock_get_login_page();
let subscriber_id = 1;
let subscription_am = subscriptions::ActiveModel {
display_name: ActiveValue::Set("test subscription".to_string()),
subscriber_id: ActiveValue::Set(subscriber_id),
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber),
source_url: ActiveValue::Set(
MikanSubscriberSubscriptionRssUrlMeta {
mikan_subscription_token: "test".into(),
}
.build_rss_url(mikan_server.base_url().clone())
.to_string(),
),
enabled: ActiveValue::Set(true),
..Default::default()
};
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
let (incremental_bangumi_list, incremental_episode_list) = {
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
assert!(!bangumi_list.is_empty());
let episode_list = episodes::Entity::find().all(app_ctx.db()).await?;
assert!(!episode_list.is_empty());
(bangumi_list, episode_list)
};
let (full_bangumi_list, full_episode_list) = {
subscription.sync_feeds_full(app_ctx.clone()).await?;
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
assert!(!bangumi_list.is_empty());
let episode_list = episodes::Entity::find().all(app_ctx.db()).await?;
assert!(!episode_list.is_empty());
(bangumi_list, episode_list)
};
assert_eq!(incremental_bangumi_list.len(), full_bangumi_list.len());
assert!(incremental_episode_list.len() < full_episode_list.len());
Ok(())
}
#[rstest]
#[tokio::test]
async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources {
app_ctx,
mut mikan_server,
} = build_testing_app_context().await?;
let _resources_mock = mikan_server.mock_resources_with_doppel();
let _login_mock = mikan_server.mock_get_login_page();
let subscriber_id = 1;
let subscription_am = subscriptions::ActiveModel {
display_name: ActiveValue::Set("test subscription".to_string()),
subscriber_id: ActiveValue::Set(subscriber_id),
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanBangumi),
source_url: ActiveValue::Set(
MikanBangumiHash {
mikan_bangumi_id: "3600".into(),
mikan_fansub_id: "370".into(),
}
.build_rss_url(mikan_server.base_url().clone())
.to_string(),
),
enabled: ActiveValue::Set(true),
..Default::default()
};
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
{
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
assert!(!bangumi_list.is_empty());
};
{
subscription.sync_feeds_full(app_ctx.clone()).await?;
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
assert!(!bangumi_list.is_empty());
}
Ok(())
}
}

View File

@@ -2,7 +2,7 @@ use std::{borrow::Cow, fmt, str::FromStr, sync::Arc};
use async_stream::try_stream; use async_stream::try_stream;
use bytes::Bytes; use bytes::Bytes;
use chrono::DateTime; use chrono::{DateTime, Utc};
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE; use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
use fetch::{html::fetch_html, image::fetch_image}; use fetch::{html::fetch_html, image::fetch_image};
use futures::{Stream, TryStreamExt, pin_mut}; use futures::{Stream, TryStreamExt, pin_mut};
@@ -17,32 +17,44 @@ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::app_error::{RecorderError, RecorderResult}, errors::app_error::{RecorderError, RecorderResult},
extract::{ extract::{
bittorrent::EpisodeEnclosureMeta,
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref}, html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str, media::extract_image_src_from_str,
mikan::{ mikan::{
MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH, MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH, MIKAN_BANGUMI_HOMEPAGE_PATH,
MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH, MIKAN_BANGUMI_RSS_PATH, MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH, MIKAN_BANGUMI_RSS_PATH,
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_POSTER_BUCKET_KEY, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY, MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
MIKAN_YEAR_QUERY_KEY, MikanClient, MIKAN_YEAR_QUERY_KEY, MikanClient,
}, },
}, },
storage::{StorageContentCategory, StorageServiceTrait}, media::{
AutoOptimizeImageFormat, EncodeAvifOptions, EncodeImageOptions, EncodeJxlOptions,
EncodeWebpOptions,
},
storage::StorageContentCategory,
task::{OptimizeImageTask, SystemTask},
}; };
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanRssItem { pub struct MikanRssEpisodeItem {
pub title: String, pub title: String,
pub homepage: Url, pub torrent_link: Url,
pub url: Url, pub content_length: Option<i64>,
pub content_length: Option<u64>,
pub mime: String, pub mime: String,
pub pub_date: Option<i64>, pub pub_date: Option<DateTime<Utc>>,
pub mikan_episode_id: String, pub mikan_episode_id: String,
pub magnet_link: Option<String>,
} }
impl TryFrom<rss::Item> for MikanRssItem { impl MikanRssEpisodeItem {
pub fn build_homepage_url(&self, mikan_base_url: Url) -> Url {
build_mikan_episode_homepage_url(mikan_base_url, &self.mikan_episode_id)
}
}
impl TryFrom<rss::Item> for MikanRssEpisodeItem {
type Error = RecorderError; type Error = RecorderError;
fn try_from(item: rss::Item) -> Result<Self, Self::Error> { fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
@@ -83,21 +95,49 @@ impl TryFrom<rss::Item> for MikanRssItem {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id")) RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
})?; })?;
Ok(MikanRssItem { let pub_date = item
.extensions
.get("torrent")
.and_then(|t| t.get("pubDate"))
.and_then(|e| e.first())
.and_then(|e| e.value.as_deref());
Ok(MikanRssEpisodeItem {
title, title,
homepage, torrent_link: enclosure_url,
url: enclosure_url,
content_length: enclosure.length.parse().ok(), content_length: enclosure.length.parse().ok(),
mime: mime_type, mime: mime_type,
pub_date: item pub_date: pub_date.and_then(|s| {
.pub_date DateTime::parse_from_rfc2822(s)
.and_then(|s| DateTime::parse_from_rfc2822(&s).ok()) .ok()
.map(|s| s.timestamp_millis()), .map(|s| s.with_timezone(&Utc))
.or_else(|| {
DateTime::parse_from_rfc3339(s)
.ok()
.map(|s| s.with_timezone(&Utc))
})
.or_else(|| {
DateTime::parse_from_rfc3339(&format!("{s}+08:00"))
.ok()
.map(|s| s.with_timezone(&Utc))
})
}),
mikan_episode_id, mikan_episode_id,
magnet_link: None,
}) })
} }
} }
impl From<MikanRssEpisodeItem> for EpisodeEnclosureMeta {
fn from(item: MikanRssEpisodeItem) -> Self {
Self {
magnet_link: item.magnet_link,
torrent_link: Some(item.torrent_link.to_string()),
pub_date: item.pub_date,
content_length: item.content_length,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanSubscriberSubscriptionRssUrlMeta { pub struct MikanSubscriberSubscriptionRssUrlMeta {
pub mikan_subscription_token: String, pub mikan_subscription_token: String,
@@ -196,6 +236,32 @@ impl MikanBangumiMeta {
} }
} }
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct MikanFansubHash {
pub mikan_fansub_id: String,
}
impl MikanFansubHash {
pub fn from_homepage_url(url: &Url) -> Option<Self> {
let path = url.path();
if path.starts_with(MIKAN_FANSUB_HOMEPAGE_PATH) {
let mikan_fansub_id = path.replace(&format!("{MIKAN_FANSUB_HOMEPAGE_PATH}/"), "");
Some(Self { mikan_fansub_id })
} else {
None
}
}
pub fn build_homepage_url(self, mikan_base_url: Url) -> Url {
let mut url = mikan_base_url;
url.set_path(&format!(
"{MIKAN_FANSUB_HOMEPAGE_PATH}/{}",
self.mikan_fansub_id
));
url
}
}
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct MikanEpisodeMeta { pub struct MikanEpisodeMeta {
pub homepage: Url, pub homepage: Url,
@@ -436,6 +502,10 @@ impl MikanSeasonFlowUrlMeta {
None None
} }
} }
pub fn build_season_flow_url(self, mikan_base_url: Url) -> Url {
build_mikan_season_flow_url(mikan_base_url, self.year, self.season_str)
}
} }
pub fn build_mikan_bangumi_homepage_url( pub fn build_mikan_bangumi_homepage_url(
mikan_base_url: Url, mikan_base_url: Url,
@@ -511,6 +581,7 @@ pub fn extract_mikan_episode_meta_from_episode_homepage_html(
.select(&Selector::parse("title").unwrap()) .select(&Selector::parse("title").unwrap())
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.map(|s| s.replace(" - Mikan Project", ""))
.ok_or_else(|| { .ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")) RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title"))
})?; })?;
@@ -543,7 +614,7 @@ pub fn extract_mikan_episode_meta_from_episode_homepage_html(
}) })
}); });
tracing::trace!( tracing::debug!(
bangumi_title, bangumi_title,
mikan_bangumi_id, mikan_bangumi_id,
episode_title, episode_title,
@@ -566,7 +637,7 @@ pub fn extract_mikan_episode_meta_from_episode_homepage_html(
}) })
} }
#[instrument(skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))] #[instrument(err, skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))]
pub async fn scrape_mikan_episode_meta_from_episode_homepage_url( pub async fn scrape_mikan_episode_meta_from_episode_homepage_url(
http_client: &MikanClient, http_client: &MikanClient,
mikan_episode_homepage_url: Url, mikan_episode_homepage_url: Url,
@@ -701,6 +772,7 @@ pub async fn scrape_mikan_bangumi_meta_from_bangumi_homepage_url(
) )
} }
#[allow(dead_code)]
#[instrument(err, skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))] #[instrument(err, skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))]
pub async fn scrape_mikan_bangumi_index_meta_from_bangumi_homepage_url( pub async fn scrape_mikan_bangumi_index_meta_from_bangumi_homepage_url(
mikan_client: &MikanClient, mikan_client: &MikanClient,
@@ -728,48 +800,92 @@ pub async fn scrape_mikan_poster_data_from_image_url(
#[instrument(skip_all, fields(origin_poster_src_url = origin_poster_src_url.as_str()))] #[instrument(skip_all, fields(origin_poster_src_url = origin_poster_src_url.as_str()))]
pub async fn scrape_mikan_poster_meta_from_image_url( pub async fn scrape_mikan_poster_meta_from_image_url(
mikan_client: &MikanClient, ctx: &dyn AppContextTrait,
storage_service: &dyn StorageServiceTrait,
origin_poster_src_url: Url, origin_poster_src_url: Url,
subscriber_id: i32,
) -> RecorderResult<MikanBangumiPosterMeta> { ) -> RecorderResult<MikanBangumiPosterMeta> {
if let Some(poster_src) = storage_service let storage_service = ctx.storage();
.exists_object( let media_service = ctx.media();
let mikan_client = ctx.mikan();
let task_service = ctx.task();
let storage_path = storage_service.build_public_object_path(
StorageContentCategory::Image, StorageContentCategory::Image,
subscriber_id, MIKAN_POSTER_BUCKET_KEY,
Some(MIKAN_POSTER_BUCKET_KEY),
&origin_poster_src_url &origin_poster_src_url
.path() .path()
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""), .replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
) );
.await? let meta = if let Some(poster_src) = storage_service.exists(&storage_path).await? {
{ MikanBangumiPosterMeta {
return Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url, origin_poster_src: origin_poster_src_url,
poster_src: Some(poster_src.to_string()), poster_src: Some(poster_src.to_string()),
});
} }
} else {
let poster_data = let poster_data =
scrape_mikan_poster_data_from_image_url(mikan_client, origin_poster_src_url.clone()) scrape_mikan_poster_data_from_image_url(mikan_client, origin_poster_src_url.clone())
.await?; .await?;
let poster_str = storage_service let poster_str = storage_service
.store_object( .write(storage_path.clone(), poster_data)
StorageContentCategory::Image,
subscriber_id,
Some(MIKAN_POSTER_BUCKET_KEY),
&origin_poster_src_url
.path()
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
poster_data,
)
.await?; .await?;
Ok(MikanBangumiPosterMeta { tracing::warn!(
poster_str = poster_str.to_string(),
"mikan poster meta extracted"
);
MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url, origin_poster_src: origin_poster_src_url,
poster_src: Some(poster_str.to_string()), poster_src: Some(poster_str.to_string()),
}) }
};
if meta.poster_src.is_some()
&& storage_path
.extension()
.is_some_and(|ext| media_service.is_legacy_image_format(ext))
{
let auto_optimize_formats = &media_service.config.auto_optimize_formats;
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Webp) {
let webp_storage_path = storage_path.with_extension("webp");
if storage_service.exists(&webp_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: webp_storage_path.to_string(),
format_options: EncodeImageOptions::Webp(EncodeWebpOptions::default()),
}))
.await?;
}
}
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Avif) {
let avif_storage_path = storage_path.with_extension("avif");
if storage_service.exists(&avif_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: avif_storage_path.to_string(),
format_options: EncodeImageOptions::Avif(EncodeAvifOptions::default()),
}))
.await?;
}
}
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Jxl) {
let jxl_storage_path = storage_path.with_extension("jxl");
if storage_service.exists(&jxl_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: jxl_storage_path.to_string(),
format_options: EncodeImageOptions::Jxl(EncodeJxlOptions::default()),
}))
.await?;
}
}
}
Ok(meta)
} }
pub fn extract_mikan_bangumi_index_meta_list_from_season_flow_fragment( pub fn extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(
@@ -917,10 +1033,11 @@ pub fn scrape_mikan_bangumi_meta_stream_from_season_flow_url(
ctx: Arc<dyn AppContextTrait>, ctx: Arc<dyn AppContextTrait>,
mikan_season_flow_url: Url, mikan_season_flow_url: Url,
credential_id: i32, credential_id: i32,
subscriber_id: i32,
) -> impl Stream<Item = RecorderResult<MikanBangumiMeta>> { ) -> impl Stream<Item = RecorderResult<MikanBangumiMeta>> {
try_stream! { try_stream! {
let mikan_base_url = ctx.mikan().base_url().clone(); let mikan_base_url = ctx.mikan().base_url().clone();
let mikan_client = ctx.mikan().fork_with_credential_id(ctx.clone(), credential_id).await?; let mikan_client = ctx.mikan().fork_with_credential_id(ctx.as_ref(), credential_id, subscriber_id).await?;
let content = fetch_html(&mikan_client, mikan_season_flow_url.clone()).await?; let content = fetch_html(&mikan_client, mikan_season_flow_url.clone()).await?;
@@ -940,7 +1057,7 @@ pub fn scrape_mikan_bangumi_meta_stream_from_season_flow_url(
mikan_client mikan_client
.sync_credential_cookies(ctx.clone(), credential_id) .sync_credential_cookies(ctx.as_ref(), credential_id, subscriber_id)
.await?; .await?;
for bangumi_index in bangumi_indices_meta { for bangumi_index in bangumi_indices_meta {
@@ -969,7 +1086,7 @@ pub fn scrape_mikan_bangumi_meta_stream_from_season_flow_url(
} }
mikan_client mikan_client
.sync_credential_cookies(ctx, credential_id) .sync_credential_cookies(ctx.as_ref(), credential_id, subscriber_id)
.await?; .await?;
} }
} }
@@ -978,11 +1095,13 @@ pub async fn scrape_mikan_bangumi_meta_list_from_season_flow_url(
ctx: Arc<dyn AppContextTrait>, ctx: Arc<dyn AppContextTrait>,
mikan_season_flow_url: Url, mikan_season_flow_url: Url,
credential_id: i32, credential_id: i32,
subscriber_id: i32,
) -> RecorderResult<Vec<MikanBangumiMeta>> { ) -> RecorderResult<Vec<MikanBangumiMeta>> {
let stream = scrape_mikan_bangumi_meta_stream_from_season_flow_url( let stream = scrape_mikan_bangumi_meta_stream_from_season_flow_url(
ctx, ctx,
mikan_season_flow_url, mikan_season_flow_url,
credential_id, credential_id,
subscriber_id,
); );
pin_mut!(stream); pin_mut!(stream);
@@ -993,24 +1112,23 @@ pub async fn scrape_mikan_bangumi_meta_list_from_season_flow_url(
#[cfg(test)] #[cfg(test)]
mod test { mod test {
#![allow(unused_variables)] #![allow(unused_variables)]
use std::{fs, sync::Arc}; use std::{fs, io::Cursor, sync::Arc};
use futures::StreamExt; use futures::StreamExt;
use image::{ImageFormat, ImageReader};
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
use tracing::Level; use tracing::Level;
use url::Url; use url::Url;
use zune_image::{codecs::ImageFormat, image::Image};
use super::*; use super::*;
use crate::test_utils::{ use crate::test_utils::{
app::TestingAppContext, app::{TestingAppContext, TestingAppContextPreset},
crypto::build_testing_crypto_service, crypto::build_testing_crypto_service,
database::build_testing_database_service, database::build_testing_database_service,
mikan::{ mikan::{
MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential, MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential,
build_testing_mikan_credential_form, build_testing_mikan_credential_form,
}, },
storage::build_testing_storage_service,
tracing::try_init_testing_tracing, tracing::try_init_testing_tracing,
}; };
@@ -1035,12 +1153,14 @@ mod test {
scrape_mikan_poster_data_from_image_url(&mikan_client, bangumi_poster_url).await?; scrape_mikan_poster_data_from_image_url(&mikan_client, bangumi_poster_url).await?;
resources_mock.shared_resource_mock.expect(1); resources_mock.shared_resource_mock.expect(1);
let image = Image::read(bgm_poster_data.to_vec(), Default::default());
let image = {
let c = Cursor::new(bgm_poster_data);
ImageReader::new(c)
};
let image_format = image.with_guessed_format().ok().and_then(|i| i.format());
assert!( assert!(
image.is_ok_and(|img| img image_format.is_some_and(|fmt| matches!(fmt, ImageFormat::Jpeg)),
.metadata()
.get_image_format()
.is_some_and(|fmt| matches!(fmt, ImageFormat::JPEG))),
"should start with valid jpeg data magic number" "should start with valid jpeg data magic number"
); );
@@ -1054,39 +1174,47 @@ mod test {
let mikan_base_url = mikan_server.base_url().clone(); let mikan_base_url = mikan_server.base_url().clone();
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
mikan_base_url: mikan_base_url.to_string(),
database_config: None,
})
.await?;
let resources_mock = mikan_server.mock_resources_with_doppel(); let resources_mock = mikan_server.mock_resources_with_doppel();
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
let storage_service = build_testing_storage_service().await?;
let storage_operator = storage_service.get_operator()?;
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?; let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;
let bgm_poster = scrape_mikan_poster_meta_from_image_url( let bgm_poster =
&mikan_client, scrape_mikan_poster_meta_from_image_url(app_ctx.as_ref(), bangumi_poster_url).await?;
&storage_service,
bangumi_poster_url,
1,
)
.await?;
resources_mock.shared_resource_mock.expect(1); resources_mock.shared_resource_mock.expect(1);
let storage_fullname = storage_service.get_fullname( let storage_service = app_ctx.storage();
let storage_fullname = storage_service.build_public_object_path(
StorageContentCategory::Image, StorageContentCategory::Image,
1, MIKAN_POSTER_BUCKET_KEY,
Some(MIKAN_POSTER_BUCKET_KEY),
"202309/5ce9fed1.jpg", "202309/5ce9fed1.jpg",
); );
let storage_fullename_str = storage_fullname.as_str();
assert!(storage_operator.exists(storage_fullename_str).await?); assert!(
storage_service.exists(&storage_fullname).await?.is_some(),
"storage_fullename_str = {}, list public = {:?}",
&storage_fullname,
storage_service.list_public().await?
);
let expected_data = let bgm_poster_data = storage_service.read(&storage_fullname).await?;
fs::read("tests/resources/mikan/doppel/images/Bangumi/202309/5ce9fed1.jpg")?;
let found_data = storage_operator.read(storage_fullename_str).await?.to_vec(); let image = {
assert_eq!(expected_data, found_data); let c = Cursor::new(bgm_poster_data.to_vec());
ImageReader::new(c)
};
let image_format = image.with_guessed_format().ok().and_then(|i| i.format());
assert!(
image_format.is_some_and(|fmt| matches!(fmt, ImageFormat::Jpeg)),
"should start with valid jpeg data magic number"
);
Ok(()) Ok(())
} }
@@ -1160,7 +1288,7 @@ mod test {
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()) let mikan_client = build_testing_mikan_client(mikan_base_url.clone())
.await? .await?
.fork_with_credential(build_testing_mikan_credential()) .fork_with_userpass_credential(build_testing_mikan_credential())
.await?; .await?;
mikan_client.login().await?; mikan_client.login().await?;
@@ -1268,8 +1396,14 @@ mod test {
let mikan_client = app_ctx.mikan(); let mikan_client = app_ctx.mikan();
let subscriber_id = 1;
let credential = mikan_client let credential = mikan_client
.submit_credential_form(app_ctx.clone(), 1, build_testing_mikan_credential_form()) .submit_credential_form(
app_ctx.as_ref(),
subscriber_id,
build_testing_mikan_credential_form(),
)
.await?; .await?;
let mikan_season_flow_url = let mikan_season_flow_url =
@@ -1279,6 +1413,7 @@ mod test {
app_ctx.clone(), app_ctx.clone(),
mikan_season_flow_url, mikan_season_flow_url,
credential.id, credential.id,
subscriber_id,
); );
pin_mut!(bangumi_meta_stream); pin_mut!(bangumi_meta_stream);

View File

@@ -1,7 +1,7 @@
pub mod bittorrent;
pub mod defs; pub mod defs;
pub mod html; pub mod html;
pub mod http; pub mod http;
pub mod media; pub mod media;
pub mod mikan; pub mod mikan;
pub mod rawname; pub mod origin;
pub mod bittorrent;

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +0,0 @@
pub mod parser;
pub use parser::{
extract_season_from_title_body, parse_episode_meta_from_raw_name, RawEpisodeMeta,
};

View File

@@ -1,845 +0,0 @@
/**
* @TODO: rewrite with nom
*/
use std::borrow::Cow;
use itertools::Itertools;
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::whatever;
use crate::{
errors::RecorderResult,
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
};
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
lazy_static! {
static ref TITLE_RE: Regex = Regex::new(
r#"(.*|\[.*])( -? \d+|\[\d+]|\[\d+.?[vV]\d]|第\d+[话話集]|\[第?\d+[话話集]]|\[\d+.?END]|[Ee][Pp]?\d+|\[\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*[话話集]\s*])(.*)"#
).unwrap();
static ref EP_COLLECTION_RE:Regex = Regex::new(r#"\[?\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*合?[话話集]\s*]?"#).unwrap();
static ref MOVIE_TITLE_RE:Regex = Regex::new(r#"(.*|\[.*])(剧场版|[Mm]ovie|电影)(.*?)$"#).unwrap();
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
static ref SOURCE_L1_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|W[Ee][Bb][Rr][Ii][Pp]|Sentai|B[Dd][Rr][Ii][Pp]|UHD[Rr][Ii][Pp]|NETFLIX").unwrap();
static ref SOURCE_L2_RE: Regex = Regex::new(r"AMZ|CR|W[Ee][Bb]|B[Dd]").unwrap();
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap();
static ref PREFIX_RE: Regex =
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
static ref MOVIE_SEASON_EXTRACT_RE: Regex = Regex::new(r"剧场版|Movie|电影").unwrap();
static ref MAIN_TITLE_PREFIX_PROCESS_RE1: Regex = Regex::new(r"新番|月?番").unwrap();
static ref MAIN_TITLE_PREFIX_PROCESS_RE2: Regex = Regex::new(r"[港澳台]{1,3}地区").unwrap();
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE: Regex = Regex::new(r"\[.+\]").unwrap();
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1: Regex = Regex::new(r"^.*?\[").unwrap();
static ref SEASON_EXTRACT_SEASON_ALL_RE: Regex = Regex::new(r"S\d{1,2}|Season \d{1,2}|[第].[季期]|1st|2nd|3rd|\d{1,2}th").unwrap();
static ref SEASON_EXTRACT_SEASON_EN_PREFIX_RE: Regex = Regex::new(r"Season|S").unwrap();
static ref SEASON_EXTRACT_SEASON_EN_NTH_RE: Regex = Regex::new(r"1st|2nd|3rd|\d{1,2}th").unwrap();
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_RE: Regex = Regex::new(r"[第 ].*[季期(部分)]|部分").unwrap();
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE: Regex = Regex::new(r"[第季期 ]").unwrap();
static ref NAME_EXTRACT_REMOVE_RE: Regex = Regex::new(r"[(]仅限[港澳台]{1,3}地区[)]").unwrap();
static ref NAME_EXTRACT_SPLIT_RE: Regex = Regex::new(r"/|\s{2}|-\s{2}|\]\[").unwrap();
static ref NAME_EXTRACT_REPLACE_ADHOC1_RE: Regex = Regex::new(r"([\p{scx=Han}\s\(\)]{5,})_([a-zA-Z]{2,})").unwrap();
static ref NAME_JP_TEST: Regex = Regex::new(r"[\p{scx=Hira}\p{scx=Kana}]{2,}").unwrap();
static ref NAME_ZH_TEST: Regex = Regex::new(r"[\p{scx=Han}]{2,}").unwrap();
static ref NAME_EN_TEST: Regex = Regex::new(r"[a-zA-Z]{3,}").unwrap();
static ref TAGS_EXTRACT_SPLIT_RE: Regex = Regex::new(r"[\[\]()_]").unwrap();
static ref CLEAR_SUB_RE: Regex = Regex::new(r"_MP4|_MKV").unwrap();
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
pub struct RawEpisodeMeta {
pub name_en: Option<String>,
pub name_en_no_season: Option<String>,
pub name_jp: Option<String>,
pub name_jp_no_season: Option<String>,
pub name_zh: Option<String>,
pub name_zh_no_season: Option<String>,
pub season: i32,
pub season_raw: Option<String>,
pub episode_index: i32,
pub subtitle: Option<String>,
pub source: Option<String>,
pub fansub: Option<String>,
pub resolution: Option<String>,
}
fn extract_fansub(raw_name: &str) -> Option<&str> {
let mut groups = EN_BRACKET_SPLIT_RE.splitn(raw_name, 3);
groups.nth(1)
}
fn replace_ch_bracket_to_en(raw_name: &str) -> String {
raw_name.replace('【', "[").replace('】', "]")
}
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RecorderResult<String> {
let raw_without_fansub = if let Some(fansub) = fansub {
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
fan_sub_re.replace_all(title_body, "")
} else {
Cow::Borrowed(title_body)
};
let raw_with_prefix_replaced = PREFIX_RE.replace_all(&raw_without_fansub, "/");
let mut arg_group = raw_with_prefix_replaced
.split('/')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
if arg_group.len() == 1 {
arg_group = arg_group.first_mut().unwrap().split(' ').collect();
}
let mut raw = raw_without_fansub.to_string();
for arg in arg_group.iter() {
if (arg_group.len() <= 5 && MAIN_TITLE_PREFIX_PROCESS_RE1.is_match(arg))
|| (MAIN_TITLE_PREFIX_PROCESS_RE2.is_match(arg))
{
let sub = Regex::new(&format!(".{arg}."))?;
raw = sub.replace_all(&raw, "").to_string();
}
}
if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw)
&& m.len() as f32 > (raw.len() as f32) * 0.5
{
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1
.replace(&raw, "")
.chars()
.collect_vec();
while let Some(ch) = raw1.pop() {
if ch == ']' {
break;
}
}
raw = raw1.into_iter().collect();
}
Ok(raw.to_string())
}
pub fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, i32) {
let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " ");
let seasons = SEASON_EXTRACT_SEASON_ALL_RE
.find(&name_and_season)
.into_iter()
.map(|s| s.as_str())
.collect_vec();
if seasons.is_empty() {
return (title_body.to_string(), None, 1);
}
let mut season = 1;
let mut season_raw = None;
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
for s in seasons {
season_raw = Some(s);
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s)
&& let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
.replace_all(m.as_str(), "")
.parse::<i32>()
{
season = s;
break;
}
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s)
&& let Some(s) = DIGIT_1PLUS_REG
.find(m.as_str())
.and_then(|s| s.as_str().parse::<i32>().ok())
{
season = s;
break;
}
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
.replace(m.as_str(), "")
.parse::<i32>()
{
season = s;
break;
}
if let Some(m) = ZH_NUM_RE.find(m.as_str()) {
season = ZH_NUM_MAP[m.as_str()];
break;
}
}
}
(name.to_string(), season_raw.map(|s| s.to_string()), season)
}
fn extract_name_from_title_body_name_section(
title_body_name_section: &str,
) -> (Option<String>, Option<String>, Option<String>) {
let mut name_en = None;
let mut name_zh = None;
let mut name_jp = None;
let replaced1 = NAME_EXTRACT_REMOVE_RE.replace_all(title_body_name_section, "");
let replaced2 = NAME_EXTRACT_REPLACE_ADHOC1_RE
.replace_all(&replaced1, NAME_EXTRACT_REPLACE_ADHOC1_REPLACED);
let trimmed = replaced2.trim();
let mut split = NAME_EXTRACT_SPLIT_RE
.split(trimmed)
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.map(|s| s.to_string())
.collect_vec();
if split.len() == 1 {
let mut split_space = split[0].split(' ').collect_vec();
let mut search_indices = vec![0];
if split_space.len() > 1 {
search_indices.push(split_space.len() - 1);
}
for i in search_indices {
if NAME_ZH_TEST.is_match(split_space[i]) {
let chs = split_space[i];
split_space.remove(i);
split = vec![chs.to_string(), split_space.join(" ")];
break;
}
}
}
for item in split {
if NAME_JP_TEST.is_match(&item) && name_jp.is_none() {
name_jp = Some(item);
} else if NAME_ZH_TEST.is_match(&item) && name_zh.is_none() {
name_zh = Some(item);
} else if NAME_EN_TEST.is_match(&item) && name_en.is_none() {
name_en = Some(item);
}
}
(name_en, name_zh, name_jp)
}
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32> {
DIGIT_1PLUS_REG
.find(title_episode)?
.as_str()
.parse::<i32>()
.ok()
}
fn clear_sub(sub: Option<String>) -> Option<String> {
sub.map(|s| CLEAR_SUB_RE.replace_all(&s, "").to_string())
}
fn extract_tags_from_title_extra(
title_extra: &str,
) -> (Option<String>, Option<String>, Option<String>) {
let replaced = TAGS_EXTRACT_SPLIT_RE.replace_all(title_extra, " ");
let elements = replaced
.split(' ')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect_vec();
let mut sub = None;
let mut resolution = None;
let mut source = None;
for element in elements.iter() {
if SUB_RE.is_match(element) {
sub = Some(element.to_string())
} else if RESOLUTION_RE.is_match(element) {
resolution = Some(element.to_string())
} else if SOURCE_L1_RE.is_match(element) {
source = Some(element.to_string())
}
}
if source.is_none() {
for element in elements {
if SOURCE_L2_RE.is_match(element) {
source = Some(element.to_string())
}
}
}
(clear_sub(sub), resolution, source)
}
pub fn check_is_movie(title: &str) -> bool {
MOVIE_TITLE_RE.is_match(title)
}
pub fn parse_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
let raw_title = s.trim();
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
let fansub = extract_fansub(&raw_title_without_ch_brackets);
let movie_capture = check_is_movie(&raw_title_without_ch_brackets);
if let Some(title_re_match_obj) = MOVIE_TITLE_RE
.captures(&raw_title_without_ch_brackets)
.or(TITLE_RE.captures(&raw_title_without_ch_brackets))
{
let mut title_body = title_re_match_obj
.get(1)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"))
.to_string();
let mut title_episode = title_re_match_obj
.get(2)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
let title_extra = title_re_match_obj
.get(3)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
if movie_capture {
title_body += title_episode;
title_episode = "";
} else if EP_COLLECTION_RE.is_match(title_episode) {
title_episode = "";
}
let title_body = title_body_pre_process(&title_body, fansub)?;
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
let (name_en_no_season, name_zh_no_season, name_jp_no_season) =
extract_name_from_title_body_name_section(&name_without_season);
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(1);
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
Ok(RawEpisodeMeta {
name_en,
name_en_no_season,
name_jp,
name_jp_no_season,
name_zh,
name_zh_no_season,
season,
season_raw,
episode_index,
subtitle: sub,
source,
fansub: fansub.map(|s| s.to_string()),
resolution,
})
} else {
whatever!("Can not parse episode meta from raw filename {}", raw_title)
}
}
#[cfg(test)]
mod tests {
use super::{RawEpisodeMeta, parse_episode_meta_from_raw_name};
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap_or_default();
let found = parse_episode_meta_from_raw_name(raw_name).ok();
if expected != found {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
}
assert_eq!(expected, found);
}
#[test]
fn test_parse_ep_with_all_parts_wrapped() {
test_raw_ep_parser_case(
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
r#"{
"name_zh": "我心里危险的东西",
"name_zh_no_season": "我心里危险的东西",
"season": 2,
"season_raw": "第二季",
"episode_index": 5,
"subtitle": "简日双语",
"source": null,
"fansub": "新Sub",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_title_wrapped_by_one_square_bracket_and_season_prefix() {
test_raw_ep_parser_case(
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
r#"{
"name_en": "Boku no Kokoro no Yabai Yatsu",
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu",
"name_zh": "我内心的糟糕念头",
"name_zh_no_season": "我内心的糟糕念头",
"season": 1,
"season_raw": null,
"episode_index": 18,
"subtitle": "简日双语",
"source": null,
"fansub": "喵萌奶茶屋",
"resolution": "1080p"
}"#,
);
}
#[test]
fn test_parse_ep_with_ep_and_version() {
test_raw_ep_parser_case(
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{
"name_en": "Shin no Nakama 2nd",
"name_en_no_season": "Shin no Nakama",
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
"season": 2,
"season_raw": "2nd",
"episode_index": 8,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_en_title_only() {
test_raw_ep_parser_case(
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
r#"{
"name_en": "THE MARGINAL SERVICE",
"name_en_no_season": "THE MARGINAL SERVICE",
"season": 1,
"episode_index": 8,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "动漫国字幕组&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_two_zh_title() {
test_raw_ep_parser_case(
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{
"name_en": "Nozomanu Fushi no Boukensha",
"name_en_no_season": "Nozomanu Fushi no Boukensha",
"name_zh": "事与愿违的不死冒险者",
"name_zh_no_season": "事与愿违的不死冒险者",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_en_zh_jp_titles() {
test_raw_ep_parser_case(
r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
r#"{
"name_en": "Pon no Michi",
"name_jp": "ぽんのみち",
"name_zh": "碰之道",
"name_en_no_season": "Pon no Michi",
"name_jp_no_season": "ぽんのみち",
"name_zh_no_season": "碰之道",
"season": 1,
"season_raw": null,
"episode_index": 7,
"subtitle": "简繁日内封字幕",
"source": "WebRip",
"fansub": "喵萌奶茶屋&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_nth_season() {
test_raw_ep_parser_case(
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_en": "Yowai Character Tomozakikun",
"name_en_no_season": "Yowai Character Tomozakikun",
"name_zh": "弱角友崎同学 2nd STAGE",
"name_zh_no_season": "弱角友崎同学",
"season": 2,
"season_raw": "2nd",
"episode_index": 9,
"subtitle": "CHT",
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_season_en_and_season_zh() {
test_raw_ep_parser_case(
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
r#"{
"name_en": "Kingdom S5",
"name_en_no_season": "Kingdom",
"name_zh": "王者天下 第五季",
"name_zh_no_season": "王者天下",
"season": 5,
"season_raw": "第五季",
"episode_index": 7,
"subtitle": "简繁外挂字幕",
"source": "WebRip",
"fansub": "豌豆字幕组&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_airota_fansub_style_case1() {
test_raw_ep_parser_case(
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
r#"{
"name_en": "Alice to Therese no Maboroshi Koujou",
"name_en_no_season": "Alice to Therese no Maboroshi Koujou",
"name_zh": "爱丽丝与特蕾丝的虚幻工厂",
"name_zh_no_season": "爱丽丝与特蕾丝的虚幻工厂",
"season": 1,
"episode_index": 1,
"subtitle": "简繁内封",
"source": "WebRip",
"fansub": "千夏字幕组",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_airota_fansub_style_case2() {
test_raw_ep_parser_case(
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
r#"{
"name_en": "Yuru Camp Movie",
"name_en_no_season": "Yuru Camp Movie",
"name_zh": "电影 轻旅轻营 (摇曳露营)",
"name_zh_no_season": "电影 轻旅轻营 (摇曳露营)",
"season": 1,
"episode_index": 1,
"subtitle": "繁体",
"source": "UHDRip",
"fansub": "千夏字幕组&喵萌奶茶屋",
"resolution": "2160p"
}"#,
)
}
#[test]
fn test_parse_ep_with_large_episode_style() {
test_raw_ep_parser_case(
r#"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"#,
r#"{
"name_en": "New Doraemon",
"name_en_no_season": "New Doraemon",
"name_zh": "哆啦A梦新番",
"name_zh_no_season": "哆啦A梦新番",
"season": 1,
"episode_index": 747,
"subtitle": "GB",
"fansub": "梦蓝字幕组",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_many_square_brackets_split_title() {
test_raw_ep_parser_case(
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
r#"{
"name_en": "Yuru Camp",
"name_en_no_season": "Yuru Camp",
"name_zh": "剧场版-摇曳露营",
"name_zh_no_season": "剧场版-摇曳露营",
"season": 1,
"episode_index": 1,
"subtitle": "简日双语",
"fansub": "MCE汉化组",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_implicit_lang_title_sep() {
test_raw_ep_parser_case(
r#"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"#,
r#"{
"name_en": "NieR Automata Ver1.1a",
"name_en_no_season": "NieR Automata Ver1.1a",
"name_zh": "尼尔:机械纪元",
"name_zh_no_season": "尼尔:机械纪元",
"season": 1,
"episode_index": 2,
"subtitle": "简日双语",
"fansub": "织梦字幕组",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_square_brackets_wrapped_and_space_split() {
test_raw_ep_parser_case(
r#"[天月搬运组][迷宫饭 Delicious in Dungeon][03][日语中字][MKV][1080P][NETFLIX][高画质版]"#,
r#"
{
"name_en": "Delicious in Dungeon",
"name_en_no_season": "Delicious in Dungeon",
"name_zh": "迷宫饭",
"name_zh_no_season": "迷宫饭",
"season": 1,
"episode_index": 3,
"subtitle": "日语中字",
"source": "NETFLIX",
"fansub": "天月搬运组",
"resolution": "1080P"
}
"#,
)
}
#[test]
fn test_parse_ep_with_start_with_brackets_wrapped_season_info_prefix() {
test_raw_ep_parser_case(
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
r#"{
"name_en": "Dungeon Meshi",
"name_en_no_season": "Dungeon Meshi",
"name_zh": "迷宫饭",
"name_zh_no_season": "迷宫饭",
"season": 1,
"episode_index": 1,
"subtitle": "简日双语",
"fansub": "爱恋字幕社",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_small_no_title_extra_brackets_case() {
test_raw_ep_parser_case(
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_en": "Mahou Shoujo ni Akogarete",
"name_en_no_season": "Mahou Shoujo ni Akogarete",
"name_zh": "梦想成为魔法少女 [年龄限制版]",
"name_zh_no_season": "梦想成为魔法少女 [年龄限制版]",
"season": 1,
"episode_index": 9,
"subtitle": "CHT",
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_title_leading_space_style() {
test_raw_ep_parser_case(
r#"[ANi] 16bit 的感动 ANOTHER LAYER - 01 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_zh": "16bit 的感动 ANOTHER LAYER",
"name_zh_no_season": "16bit 的感动 ANOTHER LAYER",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "CHT",
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_title_leading_month_and_wrapped_brackets_style() {
test_raw_ep_parser_case(
r#"【喵萌奶茶屋】★07月新番★[银砂糖师与黑妖精 ~ Sugar Apple Fairy Tale ~][13][1080p][简日双语][招募翻译]"#,
r#"{
"name_en": "~ Sugar Apple Fairy Tale ~",
"name_en_no_season": "~ Sugar Apple Fairy Tale ~",
"name_zh": "银砂糖师与黑妖精",
"name_zh_no_season": "银砂糖师与黑妖精",
"season": 1,
"episode_index": 13,
"subtitle": "简日双语",
"fansub": "喵萌奶茶屋",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_title_leading_month_style() {
test_raw_ep_parser_case(
r#"【极影字幕社】★4月新番 天国大魔境 Tengoku Daimakyou 第05话 GB 720P MP4字幕社招人内详"#,
r#"{
"name_en": "Tengoku Daimakyou",
"name_en_no_season": "Tengoku Daimakyou",
"name_zh": "天国大魔境",
"name_zh_no_season": "天国大魔境",
"season": 1,
"episode_index": 5,
"subtitle": "字幕社招人内详",
"source": null,
"fansub": "极影字幕社",
"resolution": "720P"
}"#,
)
}
#[test]
fn test_parse_ep_tokusatsu_style() {
test_raw_ep_parser_case(
r#"[MagicStar] 假面骑士Geats / 仮面ライダーギーツ EP33 [WEBDL] [1080p] [TTFC]【生】"#,
r#"{
"name_jp": "仮面ライダーギーツ",
"name_jp_no_season": "仮面ライダーギーツ",
"name_zh": "假面骑士Geats",
"name_zh_no_season": "假面骑士Geats",
"season": 1,
"episode_index": 33,
"source": "WEBDL",
"fansub": "MagicStar",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_multi_lang_zh_title() {
test_raw_ep_parser_case(
r#"[百冬练习组&LoliHouse] BanG Dream! 少女乐团派对☆PICO FEVER / Garupa Pico: Fever! - 26 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕][END] [101.69 MB]"#,
r#"{
"name_en": "Garupa Pico: Fever!",
"name_en_no_season": "Garupa Pico: Fever!",
"name_zh": "BanG Dream! 少女乐团派对☆PICO FEVER",
"name_zh_no_season": "BanG Dream! 少女乐团派对☆PICO FEVER",
"season": 1,
"episode_index": 26,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "百冬练习组&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_ep_collections() {
test_raw_ep_parser_case(
r#"[奶²&LoliHouse] 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简日内封字幕]"#,
r#"{
"name_en": "Kinokoinu: Mushroom Pup",
"name_en_no_season": "Kinokoinu: Mushroom Pup",
"name_zh": "蘑菇狗",
"name_zh_no_season": "蘑菇狗",
"season": 1,
"episode_index": 1,
"subtitle": "简日内封字幕",
"source": "WebRip",
"fansub": "奶²&LoliHouse",
"resolution": "1080p",
"name": " 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集]"
}"#,
);
test_raw_ep_parser_case(
r#"[LoliHouse] 叹气的亡灵想隐退 / Nageki no Bourei wa Intai shitai [01-13 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
r#"{
"name_en": "Nageki no Bourei wa Intai shitai",
"name_en_no_season": "Nageki no Bourei wa Intai shitai",
"name_jp": null,
"name_jp_no_season": null,
"name_zh": "叹气的亡灵想隐退",
"name_zh_no_season": "叹气的亡灵想隐退",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
);
test_raw_ep_parser_case(
r#"[LoliHouse] 精灵幻想记 第二季 / Seirei Gensouki S2 [01-12 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
r#"{
"name_en": "Seirei Gensouki S2",
"name_en_no_season": "Seirei Gensouki",
"name_zh": "精灵幻想记 第二季",
"name_zh_no_season": "精灵幻想记",
"season": 2,
"season_raw": "第二季",
"episode_index": 1,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
);
test_raw_ep_parser_case(
r#"[喵萌奶茶屋&LoliHouse] 超自然武装当哒当 / 胆大党 / Dandadan [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简繁日内封字幕][Fin]"#,
r#" {
"name_en": "Dandadan",
"name_en_no_season": "Dandadan",
"name_zh": "超自然武装当哒当",
"name_zh_no_season": "超自然武装当哒当",
"season": 1,
"episode_index": 1,
"subtitle": "简繁日内封字幕",
"source": "WebRip",
"fansub": "喵萌奶茶屋&LoliHouse",
"resolution": "1080p"
}"#,
);
}
// TODO: FIXME
#[test]
fn test_bad_cases() {
test_raw_ep_parser_case(
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
r#"{
"name_zh": "摇曳露营△剧场版",
"name_zh_no_season": "摇曳露营△剧场版",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "简繁字幕",
"source": "BDrip",
"fansub": "7³ACG x 桜都字幕组",
"resolution": "1080p"
}"#,
);
test_raw_ep_parser_case(
r#"【幻樱字幕组】【4月新番】【古见同学有交流障碍症 第二季 Komi-san wa, Komyushou Desu. S02】【22】【GB_MP4】【1920X1080】"#,
r#"{
"name_en": "第二季 Komi-san wa, Komyushou Desu. S02",
"name_en_no_season": "Komi-san wa, Komyushou Desu.",
"name_zh": "古见同学有交流障碍症",
"name_zh_no_season": "古见同学有交流障碍症",
"season": 2,
"season_raw": "第二季",
"episode_index": 22,
"subtitle": "GB",
"fansub": "幻樱字幕组",
"resolution": "1920X1080"
}"#,
);
}
}

View File

@@ -0,0 +1,14 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::bangumi};
pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId);
}
pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<bangumi::BangumiType>();
seaography::register_entity!(builder, bangumi);
builder
}

View File

@@ -0,0 +1,168 @@
use std::sync::Arc;
use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef,
};
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use serde::{Deserialize, Serialize};
use util_derive::DynamicGraphql;
use crate::{
app::AppContextTrait,
auth::AuthUserInfo,
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::crypto::{
register_crypto_column_input_conversion_to_schema_context,
register_crypto_column_output_conversion_to_schema_context,
},
},
models::credential_3rd,
};
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
struct Credential3rdCheckAvailableInput {
pub id: i32,
}
impl Credential3rdCheckAvailableInput {
fn input_type_name() -> &'static str {
"Credential3rdCheckAvailableInput"
}
fn arg_name() -> &'static str {
"filter"
}
fn generate_input_object() -> InputObject {
InputObject::new(Self::input_type_name())
.description("The input of the credential3rdCheckAvailable query")
.field(InputValue::new(
Credential3rdCheckAvailableInputFieldEnum::Id.as_str(),
TypeRef::named_nn(TypeRef::INT),
))
}
}
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
pub struct Credential3rdCheckAvailableInfo {
pub available: bool,
}
impl Credential3rdCheckAvailableInfo {
fn object_type_name() -> &'static str {
"Credential3rdCheckAvailableInfo"
}
fn generate_output_object() -> Object {
Object::new(Self::object_type_name())
.description("The output of the credential3rdCheckAvailable query")
.field(Field::new(
Credential3rdCheckAvailableInfoFieldEnum::Available,
TypeRef::named_nn(TypeRef::BOOLEAN),
move |ctx| {
FieldFuture::new(async move {
let subscription_info = ctx.parent_value.try_downcast_ref::<Self>()?;
Ok(Some(async_graphql::Value::from(
subscription_info.available,
)))
})
},
))
}
}
pub fn register_credential3rd_to_schema_context(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
) {
restrict_subscriber_for_entity::<credential_3rd::Entity>(
context,
&credential_3rd::Column::SubscriberId,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Password,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx,
&credential_3rd::Column::Password,
);
}
pub fn register_credential3rd_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.register_enumeration::<credential_3rd::Credential3rdType>();
seaography::register_entity!(builder, credential_3rd);
builder.schema = builder
.schema
.register(Credential3rdCheckAvailableInput::generate_input_object());
builder.schema = builder
.schema
.register(Credential3rdCheckAvailableInfo::generate_output_object());
builder.queries.push(
Field::new(
"credential3rdCheckAvailable",
TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()),
move |ctx| {
FieldFuture::new(async move {
let auth_user_info = ctx.data::<AuthUserInfo>()?;
let input: Credential3rdCheckAvailableInput = ctx
.args
.get(Credential3rdCheckAvailableInput::arg_name())
.unwrap()
.deserialize()?;
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let credential_model = credential_3rd::Model::find_by_id_and_subscriber_id(
app_ctx.as_ref(),
input.id,
auth_user_info.subscriber_auth.subscriber_id,
)
.await?
.ok_or_else(|| RecorderError::Credential3rdError {
message: format!("credential = {} not found", input.id),
source: None.into(),
})?;
let available = credential_model.check_available(app_ctx.as_ref()).await?;
Ok(Some(FieldValue::owned_any(
Credential3rdCheckAvailableInfo { available },
)))
})
},
)
.argument(InputValue::new(
Credential3rdCheckAvailableInput::arg_name(),
TypeRef::named_nn(Credential3rdCheckAvailableInput::input_type_name()),
)),
);
builder
}

View File

@@ -0,0 +1,17 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloaders};
pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloaders::Entity>(
context,
&downloaders::Column::SubscriberId,
);
}
pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloaders::DownloaderCategory>();
seaography::register_entity!(builder, downloaders);
builder
}

View File

@@ -0,0 +1,15 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloads};
pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId);
}
pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<downloads::DownloadMime>();
seaography::register_entity!(builder, downloads);
builder
}

View File

@@ -0,0 +1,14 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::episodes};
pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId);
}
pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<episodes::EpisodeType>();
seaography::register_entity!(builder, episodes);
builder
}

View File

@@ -0,0 +1,56 @@
use std::sync::Arc;
use async_graphql::dynamic::ResolverContext;
use sea_orm::Value as SeaValue;
use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::util::{get_entity_column_key, get_entity_key},
},
models::feeds,
};
pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId);
{
let entity_column_key =
get_entity_column_key::<feeds::Entity>(context, &feeds::Column::Token);
let entity_key = get_entity_key::<feeds::Entity>(context);
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
let entity_create_one_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
context.types.input_none_conversions.insert(
entity_column_key,
Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name();
if field_name == entity_create_one_mutation_field_name.as_str()
|| field_name == entity_create_batch_mutation_field_name.as_str()
{
Ok(Some(SeaValue::String(Some(Box::new(nanoid::nanoid!())))))
} else {
Ok(None)
}
},
),
);
}
}
pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<feeds::FeedType>();
builder.register_enumeration::<feeds::FeedSource>();
seaography::register_entity!(builder, feeds);
builder
}

View File

@@ -0,0 +1,12 @@
pub mod credential_3rd;
pub mod bangumi;
pub mod downloaders;
pub mod downloads;
pub mod episodes;
pub mod feeds;
pub mod subscriber_tasks;
pub mod subscribers;
pub mod subscription_bangumi;
pub mod subscription_episode;
pub mod subscriptions;

View File

@@ -0,0 +1,151 @@
use std::{ops::Deref, sync::Arc};
use async_graphql::dynamic::{FieldValue, TypeRef};
use sea_orm::{
ColumnTrait, ConnectionTrait, EntityTrait, QueryFilter, QuerySelect, QueryTrait, prelude::Expr,
sea_query::Query,
};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityDeleteMutationBuilder, EntityObjectBuilder,
EntityQueryFieldBuilder, get_filter_conditions,
};
use crate::{
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::{
custom::generate_entity_filter_mutation_field,
json::{convert_jsonb_output_case_for_entity, restrict_jsonb_filter_input_for_entity},
},
},
models::subscriber_tasks,
task::{ApalisJobs, ApalisSchema},
};
pub fn register_subscriber_tasks_entity_mutations(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
let context = builder.context;
{
let entitity_delete_mutation_builder = EntityDeleteMutationBuilder { context };
let delete_mutation = generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>(
context,
entitity_delete_mutation_builder.type_name::<subscriber_tasks::Entity>(),
TypeRef::named_nn(TypeRef::INT),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>(
resolver_ctx,
context,
filters,
);
Box::pin(async move {
let db = app_ctx.db();
let select_subquery = subscriber_tasks::Entity::find()
.select_only()
.column(subscriber_tasks::Column::Id)
.filter(filters_condition);
let delete_query = Query::delete()
.from_table((ApalisSchema::Schema, ApalisJobs::Table))
.and_where(
Expr::col(ApalisJobs::Id).in_subquery(select_subquery.into_query()),
)
.to_owned();
let db_backend = db.deref().get_database_backend();
let delete_statement = db_backend.build(&delete_query);
let result = db.execute(delete_statement).await?;
Ok::<_, RecorderError>(Some(FieldValue::value(result.rows_affected() as i32)))
})
}),
);
builder.mutations.push(delete_mutation);
}
{
let entity_object_builder = EntityObjectBuilder { context };
let entity_query_field = EntityQueryFieldBuilder { context };
let entity_retry_one_mutation_name = format!(
"{}RetryOne",
entity_query_field.type_name::<subscriber_tasks::Entity>()
);
let retry_one_mutation =
generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>(
context,
entity_retry_one_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>(
resolver_ctx,
context,
filters,
);
Box::pin(async move {
let db = app_ctx.db();
let job_id = subscriber_tasks::Entity::find()
.filter(filters_condition)
.select_only()
.column(subscriber_tasks::Column::Id)
.into_tuple::<String>()
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
let task = app_ctx.task();
task.retry_subscriber_task(job_id.clone()).await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(&job_id))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(retry_one_mutation);
}
builder
}
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::SubscriberId,
);
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
);
convert_jsonb_output_case_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
);
}
pub fn register_subscriber_tasks_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.register_entity::<subscriber_tasks::Entity>(
<subscriber_tasks::RelatedEntity as sea_orm::Iterable>::iter()
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(subscriber_tasks::Entity, tokio::spawn);
builder =
builder.register_entity_dataloader_one_to_many(subscriber_tasks::Entity, tokio::spawn);
builder = register_subscriber_tasks_entity_mutations(builder);
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>();
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
builder
}

View File

@@ -1,14 +1,29 @@
use std::sync::Arc; use std::sync::Arc;
use async_graphql::dynamic::{ResolverContext, ValueAccessor}; use async_graphql::dynamic::{ObjectAccessor, ResolverContext, TypeRef, ValueAccessor};
use sea_orm::EntityTrait; use lazy_static::lazy_static;
use seaography::{BuilderContext, FnGuard, GuardAction}; use maplit::btreeset;
use sea_orm::{ColumnTrait, Condition, EntityTrait, Iterable, Value as SeaValue};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, FilterInfo,
FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper,
FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult, SeaographyError,
};
use crate::{ use crate::{
auth::{AuthError, AuthUserInfo}, auth::{AuthError, AuthUserInfo},
graphql::infra::util::{get_column_key, get_entity_key}, graphql::infra::util::{get_column_key, get_entity_column_key, get_entity_key},
models::subscribers,
}; };
lazy_static! {
pub static ref SUBSCRIBER_ID_FILTER_INFO: FilterInfo = FilterInfo {
type_name: String::from("SubscriberIdFilterInput"),
base_type: TypeRef::INT.into(),
supported_operations: btreeset! { SeaographqlFilterOperation::Equals },
};
}
fn guard_data_object_accessor_with_subscriber_id( fn guard_data_object_accessor_with_subscriber_id(
value: ValueAccessor<'_>, value: ValueAccessor<'_>,
column_name: &str, column_name: &str,
@@ -181,3 +196,158 @@ where
} }
}) })
} }
pub fn generate_subscriber_id_filter_condition<T>(
_context: &BuilderContext,
column: &T::Column,
) -> FnFilterCondition
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let column = *column;
Box::new(
move |context: &ResolverContext,
mut condition: Condition,
filter: Option<&ObjectAccessor<'_>>|
-> SeaResult<Condition> {
match context.ctx.data::<AuthUserInfo>() {
Ok(user_info) => {
let subscriber_id = user_info.subscriber_auth.subscriber_id;
if let Some(filter) = filter {
for operation in &SUBSCRIBER_ID_FILTER_INFO.supported_operations {
match operation {
SeaographqlFilterOperation::Equals => {
if let Some(value) = filter.get("eq") {
let value: i32 = value.i64()?.try_into()?;
if value != subscriber_id {
return Err(SeaographyError::AsyncGraphQLError(
async_graphql::Error::new(
"subscriber_id and auth_info does not match",
),
));
}
}
}
_ => unreachable!("unreachable filter operation for subscriber_id"),
}
}
} else {
condition = condition.add(column.eq(subscriber_id));
}
Ok(condition)
}
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
}
},
)
}
pub fn generate_default_subscriber_id_input_conversion<T>(
context: &BuilderContext,
_column: &T::Column,
) -> FnInputTypeNoneConversion
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
let entity_create_one_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name();
if field_name == entity_create_one_mutation_field_name.as_str()
|| field_name == entity_create_batch_mutation_field_name.as_str()
{
match context.ctx.data::<AuthUserInfo>() {
Ok(user_info) => {
let subscriber_id = user_info.subscriber_auth.subscriber_id;
Ok(Some(SeaValue::Int(Some(subscriber_id))))
}
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
}
} else {
Ok(None)
}
},
)
}
pub fn restrict_subscriber_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let entity_column_key = get_entity_column_key::<T>(context, column);
context.guards.entity_guards.insert(
entity_key.clone(),
guard_entity_with_subscriber_id::<T>(context, column),
);
context.guards.field_guards.insert(
entity_column_key.clone(),
guard_field_with_subscriber_id::<T>(context, column),
);
context.filter_types.overwrites.insert(
entity_column_key.clone(),
Some(FilterType::Custom(
SUBSCRIBER_ID_FILTER_INFO.type_name.clone(),
)),
);
context.filter_types.condition_functions.insert(
entity_column_key.clone(),
generate_subscriber_id_filter_condition::<T>(context, column),
);
context.types.input_none_conversions.insert(
entity_column_key.clone(),
generate_default_subscriber_id_input_conversion::<T>(context, column),
);
context.entity_input.update_skips.push(entity_column_key);
}
pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscribers::Entity>(context, &subscribers::Column::Id);
for column in subscribers::Column::iter() {
if !matches!(column, subscribers::Column::Id) {
let key = get_entity_column_key::<subscribers::Entity>(context, &column);
context.filter_types.overwrites.insert(key, None);
}
}
}
pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
{
let filter_types_map_helper = FilterTypesMapHelper {
context: builder.context,
};
builder.schema = builder
.schema
.register(filter_types_map_helper.generate_filter_input(&SUBSCRIBER_ID_FILTER_INFO));
}
{
builder.register_entity::<subscribers::Entity>(
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
}
builder
}

View File

@@ -0,0 +1,20 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_bangumi,
};
pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
context,
&subscription_bangumi::Column::SubscriberId,
);
}
pub fn register_subscription_bangumi_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_bangumi);
builder
}

View File

@@ -0,0 +1,20 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_episode,
};
pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscription_episode::Entity>(
context,
&subscription_episode::Column::SubscriberId,
);
}
pub fn register_subscription_episode_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_episode);
builder
}

View File

@@ -0,0 +1,214 @@
use std::sync::Arc;
use async_graphql::dynamic::{FieldValue, TypeRef};
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityObjectBuilder, EntityQueryFieldBuilder,
get_filter_conditions,
};
use crate::{
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::generate_entity_filter_mutation_field,
},
models::{
subscriber_tasks,
subscriptions::{self, SubscriptionTrait},
},
task::SubscriberTask,
};
pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscriptions::Entity>(
context,
&subscriptions::Column::SubscriberId,
);
}
pub fn register_subscriptions_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
seaography::register_entity!(builder, subscriptions);
let context = builder.context;
let entity_object_builder = EntityObjectBuilder { context };
let entity_query_field = EntityQueryFieldBuilder { context };
{
let sync_one_feeds_incremental_mutation_name = format!(
"{}SyncOneFeedsIncremental",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_feeds_incremental_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_incremental_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsIncremental(
subscription.into(),
),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_feeds_incremental_mutation);
}
{
let sync_one_feeds_full_mutation_name = format!(
"{}SyncOneFeedsFull",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_feeds_full_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_full_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsFull(subscription.into()),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_feeds_full_mutation);
}
{
let sync_one_sources_mutation_name = format!(
"{}SyncOneSources",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_sources_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_sources_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionSources(subscription.into()),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_sources_mutation);
}
builder
}

View File

@@ -0,0 +1,69 @@
use std::sync::Arc;
use async_graphql::dynamic::{ResolverContext, ValueAccessor};
use sea_orm::{EntityTrait, Value as SeaValue};
use seaography::{BuilderContext, SeaResult};
use crate::{
app::AppContextTrait,
graphql::infra::util::{get_column_key, get_entity_key},
};
pub fn register_crypto_column_input_conversion_to_schema_context<T>(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
column: &T::Column,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.input_conversions.insert(
format!("{entity_name}.{column_name}"),
Box::new(
move |_resolve_context: &ResolverContext<'_>,
value: &ValueAccessor|
-> SeaResult<sea_orm::Value> {
let source = value.string()?;
let encrypted = ctx.crypto().encrypt_string(source.into())?;
Ok(encrypted.into())
},
),
);
}
pub fn register_crypto_column_output_conversion_to_schema_context<T>(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
column: &T::Column,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.output_conversions.insert(
format!("{entity_name}.{column_name}"),
Box::new(
move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> {
if let SeaValue::String(s) = value {
if let Some(s) = s {
let decrypted = ctx.crypto().decrypt_string(s)?;
Ok(async_graphql::Value::String(decrypted))
} else {
Ok(async_graphql::Value::Null)
}
} else {
Err(async_graphql::Error::new("crypto column must be string column").into())
}
},
),
);
}

View File

@@ -0,0 +1,76 @@
use std::{pin::Pin, sync::Arc};
use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputValue, ResolverContext, TypeRef, ValueAccessor,
};
use sea_orm::EntityTrait;
use seaography::{BuilderContext, EntityObjectBuilder, FilterInputBuilder, GuardAction};
use crate::{app::AppContextTrait, errors::RecorderResult};
pub type FilterMutationFn = Arc<
dyn for<'a> Fn(
&ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Option<ValueAccessor<'_>>,
) -> Pin<
Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>,
> + Send
+ Sync,
>;
pub fn generate_entity_filter_mutation_field<T, N, R>(
builder_context: &'static BuilderContext,
field_name: N,
type_ref: R,
mutation_fn: FilterMutationFn,
) -> Field
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
N: Into<String>,
R: Into<TypeRef>,
{
let entity_filter_input_builder = FilterInputBuilder {
context: builder_context,
};
let entity_object_builder = EntityObjectBuilder {
context: builder_context,
};
let object_name: String = entity_object_builder.type_name::<T>();
let context = builder_context;
let guard = builder_context.guards.entity_guards.get(&object_name);
Field::new(field_name, type_ref, move |ctx| {
let mutation_fn = mutation_fn.clone();
FieldFuture::new(async move {
let guard_flag = if let Some(guard) = guard {
(*guard)(&ctx)
} else {
GuardAction::Allow
};
if let GuardAction::Block(reason) = guard_flag {
return Err::<Option<_>, async_graphql::Error>(async_graphql::Error::new(
reason.unwrap_or("Entity guard triggered.".into()),
));
}
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let filters = ctx.args.get(&context.entity_delete_mutation.filter_field);
let result = mutation_fn(&ctx, app_ctx.clone(), filters)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(result)
})
})
.argument(InputValue::new(
&context.entity_delete_mutation.filter_field,
TypeRef::named(entity_filter_input_builder.type_name(&object_name)),
))
}

View File

@@ -1,19 +0,0 @@
mod json;
mod subscriber;
use async_graphql::dynamic::TypeRef;
pub use json::{
JSONB_FILTER_NAME, jsonb_filter_condition_function,
register_jsonb_input_filter_to_dynamic_schema,
};
use maplit::btreeset;
use seaography::{FilterInfo, FilterOperation as SeaographqlFilterOperation};
pub use subscriber::{SUBSCRIBER_ID_FILTER_INFO, subscriber_id_condition_function};
pub fn init_custom_filter_info() {
SUBSCRIBER_ID_FILTER_INFO.get_or_init(|| FilterInfo {
type_name: String::from("SubscriberIdFilterInput"),
base_type: TypeRef::INT.into(),
supported_operations: btreeset! { SeaographqlFilterOperation::Equals },
});
}

View File

@@ -1,39 +0,0 @@
use async_graphql::dynamic::ObjectAccessor;
use once_cell::sync::OnceCell;
use sea_orm::{ColumnTrait, Condition, EntityTrait};
use seaography::{
BuilderContext, FilterInfo, FilterOperation as SeaographqlFilterOperation, SeaResult,
};
pub static SUBSCRIBER_ID_FILTER_INFO: OnceCell<FilterInfo> = OnceCell::new();
pub type FnFilterCondition =
Box<dyn Fn(Condition, &ObjectAccessor) -> SeaResult<Condition> + Send + Sync>;
pub fn subscriber_id_condition_function<T>(
_context: &BuilderContext,
column: &T::Column,
) -> FnFilterCondition
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let column = *column;
Box::new(move |mut condition, filter| {
let subscriber_id_filter_info = SUBSCRIBER_ID_FILTER_INFO.get().unwrap();
let operations = &subscriber_id_filter_info.supported_operations;
for operation in operations {
match operation {
SeaographqlFilterOperation::Equals => {
if let Some(value) = filter.get("eq") {
let value: i32 = value.i64()?.try_into()?;
let value = sea_orm::Value::Int(Some(value));
condition = condition.add(column.eq(value));
}
}
_ => unreachable!("unreachable filter operation for subscriber_id"),
}
}
Ok(condition)
})
}

View File

@@ -1,18 +1,25 @@
use async_graphql::{ use async_graphql::{
Error as GraphqlError, Error as GraphqlError,
dynamic::{Scalar, SchemaBuilder, SchemaError}, dynamic::{ResolverContext, Scalar, SchemaError},
to_value, to_value,
}; };
use convert_case::Case;
use itertools::Itertools; use itertools::Itertools;
use rust_decimal::{Decimal, prelude::FromPrimitive}; use rust_decimal::{Decimal, prelude::FromPrimitive};
use sea_orm::{ use sea_orm::{
Condition, EntityTrait, Condition, EntityTrait,
sea_query::{ArrayType, Expr, ExprTrait, IntoLikeExpr, SimpleExpr, Value as DbValue}, sea_query::{ArrayType, Expr, ExprTrait, IntoLikeExpr, SimpleExpr, Value as DbValue},
}; };
use seaography::{BuilderContext, SeaographyError}; use seaography::{
Builder as SeaographyBuilder, BuilderContext, FilterType, FnFilterCondition, SeaographyError,
};
use serde::{Serialize, de::DeserializeOwned};
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use crate::{errors::RecorderResult, graphql::infra::filter::subscriber::FnFilterCondition}; use crate::{
errors::RecorderResult, graphql::infra::util::get_entity_column_key,
utils::json::convert_json_keys,
};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Copy)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Copy)]
pub enum JsonbFilterOperation { pub enum JsonbFilterOperation {
@@ -892,7 +899,7 @@ where
pub const JSONB_FILTER_NAME: &str = "JsonbFilterInput"; pub const JSONB_FILTER_NAME: &str = "JsonbFilterInput";
pub fn jsonb_filter_condition_function<T>( pub fn generate_jsonb_filter_condition_function<T>(
_context: &BuilderContext, _context: &BuilderContext,
column: &T::Column, column: &T::Column,
) -> FnFilterCondition ) -> FnFilterCondition
@@ -901,27 +908,107 @@ where
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let column = *column; let column = *column;
Box::new(move |mut condition, filter| { Box::new(
let filter_value = to_value(filter.as_index_map()) move |_resolve_context: &ResolverContext<'_>, condition, filter| {
.map_err(|e| SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e)))?; if let Some(filter) = filter {
let filter_value = to_value(filter.as_index_map()).map_err(|e| {
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e))
})?;
let filter_json: JsonValue = filter_value let filter_json: JsonValue = filter_value.into_json().map_err(|e| {
.into_json() SeaographyError::AsyncGraphQLError(GraphqlError::new(format!("{e:?}")))
.map_err(|e| SeaographyError::AsyncGraphQLError(GraphqlError::new(format!("{e:?}"))))?; })?;
let cond_where = prepare_jsonb_filter_input(&Expr::col(column), filter_json) let cond_where = prepare_jsonb_filter_input(&Expr::col(column), filter_json)
.map_err(|e| SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e)))?; .map_err(|e| {
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e))
})?;
condition = condition.add(cond_where); let condition = condition.add(cond_where);
Ok(condition) Ok(condition)
}) } else {
Ok(condition)
}
},
)
} }
pub fn register_jsonb_input_filter_to_dynamic_schema( pub fn register_jsonb_input_filter_to_schema_builder(
schema_builder: SchemaBuilder, mut builder: SeaographyBuilder,
) -> SchemaBuilder { ) -> SeaographyBuilder {
let json_filter_input_type = Scalar::new(JSONB_FILTER_NAME); let json_filter_input_type = Scalar::new(JSONB_FILTER_NAME);
schema_builder.register(json_filter_input_type) builder.schema = builder.schema.register(json_filter_input_type);
builder
}
pub fn restrict_jsonb_filter_input_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_column_key = get_entity_column_key::<T>(context, column);
context.filter_types.overwrites.insert(
entity_column_key.clone(),
Some(FilterType::Custom(JSONB_FILTER_NAME.to_string())),
);
}
pub fn validate_jsonb_input_for_entity<T, S>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
S: DeserializeOwned + Serialize,
{
let entity_column_key = get_entity_column_key::<T>(context, column);
context.types.input_conversions.insert(
entity_column_key.clone(),
Box::new(move |_resolve_context, accessor| {
let deserialized = accessor.deserialize::<S>().map_err(|err| {
SeaographyError::TypeConversionError(
err.message,
format!("Json - {entity_column_key}"),
)
})?;
let json_value = serde_json::to_value(deserialized).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_key}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
}
pub fn convert_jsonb_output_case_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_column_key = get_entity_column_key::<T>(context, column);
context.types.output_conversions.insert(
entity_column_key.clone(),
Box::new(move |value| {
if let sea_orm::Value::Json(Some(json)) = value {
let result = async_graphql::Value::from_json(convert_json_keys(
json.as_ref().clone(),
Case::Camel,
))
.map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_key}"),
)
})?;
Ok(result)
} else {
Err(SeaographyError::TypeConversionError(
"value should be json".to_string(),
format!("Json - {entity_column_key}"),
))
}
}),
);
} }
#[cfg(test)] #[cfg(test)]

View File

@@ -1,6 +1,4 @@
pub mod filter; pub mod crypto;
pub mod guard; pub mod custom;
pub mod order; pub mod json;
pub mod pagination;
pub mod transformer;
pub mod util; pub mod util;

View File

@@ -1,36 +0,0 @@
use async_graphql::{InputObject, SimpleObject};
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, InputObject)]
pub struct CursorInput {
pub cursor: Option<String>,
pub limit: u64,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, InputObject)]
pub struct PageInput {
pub page: u64,
pub limit: u64,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, InputObject)]
pub struct OffsetInput {
pub offset: u64,
pub limit: u64,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, InputObject)]
pub struct PaginationInput {
pub cursor: Option<CursorInput>,
pub page: Option<PageInput>,
pub offset: Option<OffsetInput>,
}
pub type PageInfo = async_graphql::connection::PageInfo;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, SimpleObject)]
pub struct PaginationInfo {
pub pages: u64,
pub current: u64,
pub offset: u64,
pub total: u64,
}

View File

@@ -1,173 +0,0 @@
use std::{collections::BTreeMap, sync::Arc};
use async_graphql::dynamic::{ResolverContext, ValueAccessor};
use sea_orm::{ColumnTrait, Condition, EntityTrait, Value as SeaValue};
use seaography::{
BuilderContext, FnFilterConditionsTransformer, FnMutationInputObjectTransformer, SeaResult,
};
use super::util::{get_column_key, get_entity_key};
use crate::{app::AppContextTrait, auth::AuthUserInfo, models::credential_3rd};
pub fn build_filter_condition_transformer<T>(
_context: &BuilderContext,
column: &T::Column,
) -> FnFilterConditionsTransformer
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let column = *column;
Box::new(
move |context: &ResolverContext, condition: Condition| -> Condition {
match context.ctx.data::<AuthUserInfo>() {
Ok(user_info) => {
let subscriber_id = user_info.subscriber_auth.subscriber_id;
condition.add(column.eq(subscriber_id))
}
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
}
},
)
}
pub fn build_mutation_input_object_transformer<T>(
context: &BuilderContext,
column: &T::Column,
) -> FnMutationInputObjectTransformer
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
let column_key = get_column_key::<T>(context, column);
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
&entity_key,
&column_key,
));
let entity_create_one_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
Box::new(
move |context: &ResolverContext,
mut input: BTreeMap<String, SeaValue>|
-> BTreeMap<String, SeaValue> {
let field_name = context.field().name();
if field_name == entity_create_one_mutation_field_name.as_str()
|| field_name == entity_create_batch_mutation_field_name.as_str()
{
match context.ctx.data::<AuthUserInfo>() {
Ok(user_info) => {
let subscriber_id = user_info.subscriber_auth.subscriber_id;
let value = input.get_mut(column_name.as_str());
if value.is_none() {
input.insert(
column_name.as_str().to_string(),
SeaValue::Int(Some(subscriber_id)),
);
}
input
}
Err(err) => unreachable!("auth user info must be guarded: {:?}", err),
}
} else {
input
}
},
)
}
fn add_crypto_column_input_conversion<T>(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
column: &T::Column,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.input_conversions.insert(
format!("{entity_name}.{column_name}"),
Box::new(move |value: &ValueAccessor| -> SeaResult<sea_orm::Value> {
let source = value.string()?;
let encrypted = ctx.crypto().encrypt_string(source.into())?;
Ok(encrypted.into())
}),
);
}
fn add_crypto_column_output_conversion<T>(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
column: &T::Column,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.output_conversions.insert(
format!("{entity_name}.{column_name}"),
Box::new(
move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> {
if let SeaValue::String(s) = value {
if let Some(s) = s {
let decrypted = ctx.crypto().decrypt_string(s)?;
Ok(async_graphql::Value::String(decrypted))
} else {
Ok(async_graphql::Value::Null)
}
} else {
Err(async_graphql::Error::new("crypto column must be string column").into())
}
},
),
);
}
pub fn add_crypto_transformers(context: &mut BuilderContext, ctx: Arc<dyn AppContextTrait>) {
add_crypto_column_input_conversion::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
add_crypto_column_input_conversion::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
add_crypto_column_input_conversion::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Password,
);
add_crypto_column_output_conversion::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
add_crypto_column_output_conversion::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
add_crypto_column_output_conversion::<credential_3rd::Entity>(
context,
ctx,
&credential_3rd::Column::Password,
);
}

View File

@@ -1,8 +1,8 @@
pub mod config; pub mod config;
pub mod domains;
pub mod infra; pub mod infra;
mod schema; mod schema;
pub mod service; pub mod service;
pub mod views;
pub use config::GraphQLConfig; pub use config::GraphQLConfig;
pub use schema::build_schema; pub use schema::build_schema;

View File

@@ -2,169 +2,72 @@ use std::sync::Arc;
use async_graphql::dynamic::*; use async_graphql::dynamic::*;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use sea_orm::{EntityTrait, Iterable}; use seaography::{Builder, BuilderContext};
use seaography::{Builder, BuilderContext, FilterType, FilterTypesMapHelper};
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
graphql::{ graphql::{
infra::{ domains::{
filter::{ bangumi::{register_bangumi_to_schema_builder, register_bangumi_to_schema_context},
JSONB_FILTER_NAME, SUBSCRIBER_ID_FILTER_INFO, init_custom_filter_info, credential_3rd::{
register_jsonb_input_filter_to_dynamic_schema, subscriber_id_condition_function, register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context,
}, },
guard::{guard_entity_with_subscriber_id, guard_field_with_subscriber_id}, downloaders::{
transformer::{ register_downloaders_to_schema_builder, register_downloaders_to_schema_context,
add_crypto_transformers, build_filter_condition_transformer,
build_mutation_input_object_transformer,
}, },
util::{get_entity_column_key, get_entity_key}, downloads::{
register_downloads_to_schema_builder, register_downloads_to_schema_context,
}, },
views::register_subscriptions_to_schema, episodes::{register_episodes_to_schema_builder, register_episodes_to_schema_context},
feeds::{register_feeds_to_schema_builder, register_feeds_to_schema_context},
subscriber_tasks::{
register_subscriber_tasks_to_schema_builder,
register_subscriber_tasks_to_schema_context,
},
subscribers::{
register_subscribers_to_schema_builder, register_subscribers_to_schema_context,
},
subscription_bangumi::{
register_subscription_bangumi_to_schema_builder,
register_subscription_bangumi_to_schema_context,
},
subscription_episode::{
register_subscription_episode_to_schema_builder,
register_subscription_episode_to_schema_context,
},
subscriptions::{
register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context,
},
},
infra::json::register_jsonb_input_filter_to_schema_builder,
}, },
}; };
pub static CONTEXT: OnceCell<BuilderContext> = OnceCell::new(); pub static CONTEXT: OnceCell<BuilderContext> = OnceCell::new();
fn restrict_filter_input_for_entity<T>(
context: &mut BuilderContext,
column: &T::Column,
filter_type: Option<FilterType>,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let key = get_entity_column_key::<T>(context, column);
context.filter_types.overwrites.insert(key, filter_type);
}
fn restrict_jsonb_filter_input_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_column_key = get_entity_column_key::<T>(context, column);
context.filter_types.overwrites.insert(
entity_column_key.clone(),
Some(FilterType::Custom(JSONB_FILTER_NAME.to_string())),
);
}
fn restrict_subscriber_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let entity_column_key = get_entity_column_key::<T>(context, column);
context.guards.entity_guards.insert(
entity_key.clone(),
guard_entity_with_subscriber_id::<T>(context, column),
);
context.guards.field_guards.insert(
entity_column_key.clone(),
guard_field_with_subscriber_id::<T>(context, column),
);
context.filter_types.overwrites.insert(
entity_column_key.clone(),
Some(FilterType::Custom(
SUBSCRIBER_ID_FILTER_INFO.get().unwrap().type_name.clone(),
)),
);
context.filter_types.condition_functions.insert(
entity_column_key.clone(),
subscriber_id_condition_function::<T>(context, column),
);
context.transformers.filter_conditions_transformers.insert(
entity_key.clone(),
build_filter_condition_transformer::<T>(context, column),
);
context
.transformers
.mutation_input_object_transformers
.insert(
entity_key,
build_mutation_input_object_transformer::<T>(context, column),
);
context
.entity_input
.insert_skips
.push(entity_column_key.clone());
context.entity_input.update_skips.push(entity_column_key);
}
pub fn build_schema( pub fn build_schema(
app_ctx: Arc<dyn AppContextTrait>, app_ctx: Arc<dyn AppContextTrait>,
depth: Option<usize>, depth: Option<usize>,
complexity: Option<usize>, complexity: Option<usize>,
) -> Result<Schema, SchemaError> { ) -> Result<Schema, SchemaError> {
use crate::models::*;
let database = app_ctx.db().as_ref().clone(); let database = app_ctx.db().as_ref().clone();
init_custom_filter_info();
let context = CONTEXT.get_or_init(|| { let context = CONTEXT.get_or_init(|| {
let mut context = BuilderContext::default(); let mut context = BuilderContext::default();
context.pagination_input.type_name = "PaginationInput".to_string(); {
context.pagination_info_object.type_name = "PaginationInfo".to_string(); // domains
context.cursor_input.type_name = "CursorInput".to_string(); register_feeds_to_schema_context(&mut context);
context.offset_input.type_name = "OffsetInput".to_string(); register_subscribers_to_schema_context(&mut context);
context.page_input.type_name = "PageInput".to_string(); register_subscriptions_to_schema_context(&mut context);
context.page_info_object.type_name = "PageInfo".to_string(); register_subscriber_tasks_to_schema_context(&mut context);
register_credential3rd_to_schema_context(&mut context, app_ctx.clone());
restrict_subscriber_for_entity::<bangumi::Entity>( register_downloaders_to_schema_context(&mut context);
&mut context, register_downloads_to_schema_context(&mut context);
&bangumi::Column::SubscriberId, register_episodes_to_schema_context(&mut context);
); register_subscription_bangumi_to_schema_context(&mut context);
restrict_subscriber_for_entity::<downloaders::Entity>( register_subscription_episode_to_schema_context(&mut context);
&mut context, register_bangumi_to_schema_context(&mut context);
&downloaders::Column::SubscriberId,
);
restrict_subscriber_for_entity::<downloads::Entity>(
&mut context,
&downloads::Column::SubscriberId,
);
restrict_subscriber_for_entity::<episodes::Entity>(
&mut context,
&episodes::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscriptions::Entity>(
&mut context,
&subscriptions::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscribers::Entity>(
&mut context,
&subscribers::Column::Id,
);
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
&mut context,
&subscription_bangumi::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscription_episode::Entity>(
&mut context,
&subscription_episode::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
&mut context,
&subscriber_tasks::Column::SubscriberId,
);
restrict_subscriber_for_entity::<credential_3rd::Entity>(
&mut context,
&credential_3rd::Column::SubscriberId,
);
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>(
&mut context,
&subscriber_tasks::Column::Job,
);
add_crypto_transformers(&mut context, app_ctx);
for column in subscribers::Column::iter() {
if !matches!(column, subscribers::Column::Id) {
restrict_filter_input_for_entity::<subscribers::Entity>(
&mut context,
&column,
None,
);
}
} }
context context
}); });
@@ -172,49 +75,22 @@ pub fn build_schema(
let mut builder = Builder::new(context, database.clone()); let mut builder = Builder::new(context, database.clone());
{ {
let filter_types_map_helper = FilterTypesMapHelper { context }; // infra
builder = register_jsonb_input_filter_to_schema_builder(builder);
builder.schema = builder.schema.register(
filter_types_map_helper.generate_filter_input(SUBSCRIBER_ID_FILTER_INFO.get().unwrap()),
);
builder.schema = register_jsonb_input_filter_to_dynamic_schema(builder.schema);
} }
{ {
builder.register_entity::<subscribers::Entity>( // domains
<subscribers::RelatedEntity as sea_orm::Iterable>::iter() builder = register_subscribers_to_schema_builder(builder);
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context)) builder = register_feeds_to_schema_builder(builder);
.collect(), builder = register_episodes_to_schema_builder(builder);
); builder = register_subscription_bangumi_to_schema_builder(builder);
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn); builder = register_subscription_episode_to_schema_builder(builder);
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn); builder = register_downloaders_to_schema_builder(builder);
} builder = register_downloads_to_schema_builder(builder);
builder = register_subscriptions_to_schema_builder(builder);
seaography::register_entities!( builder = register_credential3rd_to_schema_builder(builder);
builder, builder = register_subscriber_tasks_to_schema_builder(builder);
[ builder = register_bangumi_to_schema_builder(builder);
bangumi,
downloaders,
downloads,
episodes,
subscription_bangumi,
subscription_episode,
subscriptions,
subscriber_tasks,
credential_3rd
]
);
{
builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
builder.register_enumeration::<downloaders::DownloaderCategory>();
builder.register_enumeration::<downloads::DownloadMime>();
builder.register_enumeration::<credential_3rd::Credential3rdType>();
}
{
builder = register_subscriptions_to_schema(builder);
} }
let schema = builder.schema_builder(); let schema = builder.schema_builder();
@@ -231,6 +107,7 @@ pub fn build_schema(
}; };
schema schema
.data(database) .data(database)
.data(app_ctx)
.finish() .finish()
.inspect_err(|e| tracing::error!(e = ?e)) .inspect_err(|e| tracing::error!(e = ?e))
} }

View File

@@ -1,3 +0,0 @@
mod subscription;
pub use subscription::register_subscriptions_to_schema;

View File

@@ -1,226 +0,0 @@
use std::sync::Arc;
use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef,
};
use seaography::Builder as SeaographyBuilder;
use serde::{Deserialize, Serialize};
use util_derive::DynamicGraphql;
use crate::{
app::AppContextTrait,
auth::AuthUserInfo,
models::subscriptions::{self, SubscriptionTrait},
task::SubscriberTaskPayload,
};
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
struct SyncOneSubscriptionFilterInput {
pub subscription_id: i32,
}
impl SyncOneSubscriptionFilterInput {
fn input_type_name() -> &'static str {
"SyncOneSubscriptionFilterInput"
}
fn arg_name() -> &'static str {
"filter"
}
fn generate_input_object() -> InputObject {
InputObject::new(Self::input_type_name())
.description("The input of the subscriptionSyncOne series of mutations")
.field(InputValue::new(
SyncOneSubscriptionFilterInputFieldEnum::SubscriptionId.as_str(),
TypeRef::named_nn(TypeRef::INT),
))
}
}
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
pub struct SyncOneSubscriptionInfo {
pub task_id: String,
}
impl SyncOneSubscriptionInfo {
fn object_type_name() -> &'static str {
"SyncOneSubscriptionInfo"
}
fn generate_output_object() -> Object {
Object::new(Self::object_type_name())
.description("The output of the subscriptionSyncOne series of mutations")
.field(Field::new(
SyncOneSubscriptionInfoFieldEnum::TaskId,
TypeRef::named_nn(TypeRef::STRING),
move |ctx| {
FieldFuture::new(async move {
let subscription_info = ctx.parent_value.try_downcast_ref::<Self>()?;
Ok(Some(async_graphql::Value::from(
subscription_info.task_id.as_str(),
)))
})
},
))
}
}
pub fn register_subscriptions_to_schema(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.schema = builder
.schema
.register(SyncOneSubscriptionFilterInput::generate_input_object());
builder.schema = builder
.schema
.register(SyncOneSubscriptionInfo::generate_output_object());
builder.queries.push(
Field::new(
"subscriptionSyncOneFeedsIncremental",
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
move |ctx| {
FieldFuture::new(async move {
let auth_user_info = ctx.data::<AuthUserInfo>()?;
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
let filter_input: SyncOneSubscriptionFilterInput = ctx
.args
.get(SyncOneSubscriptionFilterInput::arg_name())
.unwrap()
.deserialize()?;
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
app_ctx.as_ref(),
filter_input.subscription_id,
subscriber_id,
)
.await?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
auth_user_info.subscriber_auth.subscriber_id,
SubscriberTaskPayload::SyncOneSubscriptionFeedsIncremental(
subscription.into(),
),
)
.await?;
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
task_id: task_id.to_string(),
})))
})
},
)
.argument(InputValue::new(
SyncOneSubscriptionFilterInput::arg_name(),
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
)),
);
builder.queries.push(
Field::new(
"subscriptionSyncOneFeedsFull",
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
move |ctx| {
FieldFuture::new(async move {
let auth_user_info = ctx.data::<AuthUserInfo>()?;
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
let filter_input: SyncOneSubscriptionFilterInput = ctx
.args
.get(SyncOneSubscriptionFilterInput::arg_name())
.unwrap()
.deserialize()?;
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
app_ctx.as_ref(),
filter_input.subscription_id,
subscriber_id,
)
.await?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
auth_user_info.subscriber_auth.subscriber_id,
SubscriberTaskPayload::SyncOneSubscriptionFeedsFull(
subscription.into(),
),
)
.await?;
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
task_id: task_id.to_string(),
})))
})
},
)
.argument(InputValue::new(
SyncOneSubscriptionFilterInput::arg_name(),
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
)),
);
builder.mutations.push(
Field::new(
"subscriptionSyncOneSources",
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
move |ctx| {
FieldFuture::new(async move {
let auth_user_info = ctx.data::<AuthUserInfo>()?;
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
let filter_input: SyncOneSubscriptionFilterInput = ctx
.args
.get(SyncOneSubscriptionFilterInput::arg_name())
.unwrap()
.deserialize()?;
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
app_ctx.as_ref(),
filter_input.subscription_id,
subscriber_id,
)
.await?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
auth_user_info.subscriber_auth.subscriber_id,
SubscriberTaskPayload::SyncOneSubscriptionSources(subscription.into()),
)
.await?;
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
task_id: task_id.to_string(),
})))
})
},
)
.argument(InputValue::new(
SyncOneSubscriptionFilterInput::arg_name(),
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
)),
);
builder
}

View File

@@ -21,11 +21,12 @@ pub mod errors;
pub mod extract; pub mod extract;
pub mod graphql; pub mod graphql;
pub mod logger; pub mod logger;
pub mod media;
pub mod message; pub mod message;
pub mod migrations; pub mod migrations;
pub mod models; pub mod models;
pub mod storage; pub mod storage;
pub mod task; pub mod task;
#[cfg(any(test, feature = "playground"))]
pub mod test_utils; pub mod test_utils;
pub mod utils;
pub mod web; pub mod web;

View File

@@ -5,4 +5,4 @@ pub mod service;
pub use core::{LogFormat, LogLevel, LogRotation}; pub use core::{LogFormat, LogLevel, LogRotation};
pub use config::{LoggerConfig, LoggerFileAppender}; pub use config::{LoggerConfig, LoggerFileAppender};
pub use service::LoggerService; pub use service::{LoggerService, MODULE_WHITELIST};

View File

@@ -13,7 +13,7 @@ use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
use crate::errors::RecorderResult; use crate::errors::RecorderResult;
// Function to initialize the logger based on the provided configuration // Function to initialize the logger based on the provided configuration
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"]; pub const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sea_orm", "sea_query"];
// Keep nonblocking file appender work guard // Keep nonblocking file appender work guard
static NONBLOCKING_WORK_GUARD_KEEP: OnceLock<WorkerGuard> = OnceLock::new(); static NONBLOCKING_WORK_GUARD_KEEP: OnceLock<WorkerGuard> = OnceLock::new();

View File

@@ -0,0 +1,105 @@
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum AutoOptimizeImageFormat {
#[serde(rename = "image/webp")]
Webp,
#[serde(rename = "image/avif")]
Avif,
#[serde(rename = "image/jxl")]
Jxl,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct EncodeWebpOptions {
pub quality: Option<f32>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct EncodeAvifOptions {
pub quality: Option<u8>,
pub speed: Option<u8>,
pub threads: Option<u8>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct EncodeJxlOptions {
pub quality: Option<f32>,
pub speed: Option<u8>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(tag = "mime_type")]
pub enum EncodeImageOptions {
#[serde(rename = "image/webp")]
Webp(EncodeWebpOptions),
#[serde(rename = "image/avif")]
Avif(EncodeAvifOptions),
#[serde(rename = "image/jxl")]
Jxl(EncodeJxlOptions),
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MediaConfig {
#[serde(default = "default_webp_quality")]
pub webp_quality: f32,
#[serde(default = "default_avif_quality")]
pub avif_quality: u8,
#[serde(default = "default_avif_speed")]
pub avif_speed: u8,
#[serde(default = "default_avif_threads")]
pub avif_threads: u8,
#[serde(default = "default_jxl_quality")]
pub jxl_quality: f32,
#[serde(default = "default_jxl_speed")]
pub jxl_speed: u8,
#[serde(default = "default_auto_optimize_formats")]
pub auto_optimize_formats: Vec<AutoOptimizeImageFormat>,
}
impl Default for MediaConfig {
fn default() -> Self {
Self {
webp_quality: default_webp_quality(),
avif_quality: default_avif_quality(),
avif_speed: default_avif_speed(),
avif_threads: default_avif_threads(),
jxl_quality: default_jxl_quality(),
jxl_speed: default_jxl_speed(),
auto_optimize_formats: default_auto_optimize_formats(),
}
}
}
fn default_webp_quality() -> f32 {
80.0
}
fn default_avif_quality() -> u8 {
80
}
fn default_avif_speed() -> u8 {
6
}
fn default_avif_threads() -> u8 {
1
}
fn default_jxl_quality() -> f32 {
80.0
}
fn default_jxl_speed() -> u8 {
7
}
fn default_auto_optimize_formats() -> Vec<AutoOptimizeImageFormat> {
vec![
AutoOptimizeImageFormat::Webp,
// AutoOptimizeImageFormat::Avif, // TOO SLOW */
#[cfg(feature = "jxl")]
AutoOptimizeImageFormat::Jxl,
]
}

View File

@@ -0,0 +1,8 @@
mod config;
mod service;
pub use config::{
AutoOptimizeImageFormat, EncodeAvifOptions, EncodeImageOptions, EncodeJxlOptions,
EncodeWebpOptions, MediaConfig,
};
pub use service::MediaService;

View File

@@ -0,0 +1,199 @@
use std::io::Cursor;
use bytes::Bytes;
use image::{GenericImageView, ImageEncoder, ImageReader, codecs::avif::AvifEncoder};
use quirks_path::Path;
use snafu::ResultExt;
use crate::{
errors::{RecorderError, RecorderResult},
media::{EncodeAvifOptions, EncodeJxlOptions, EncodeWebpOptions, MediaConfig},
};
#[derive(Debug)]
pub struct MediaService {
pub config: MediaConfig,
}
impl MediaService {
pub async fn from_config(config: MediaConfig) -> RecorderResult<Self> {
Ok(Self { config })
}
pub fn is_legacy_image_format(&self, ext: &str) -> bool {
matches!(ext, "jpeg" | "jpg" | "png")
}
pub async fn optimize_image_to_webp(
&self,
path: impl AsRef<Path>,
data: impl Into<Bytes>,
options: Option<EncodeWebpOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.and_then(|o| o.quality)
.unwrap_or(self.config.webp_quality);
let data = data.into();
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let img = image_reader.decode()?;
let (width, height) = (img.width(), img.height());
let color = img.color();
let webp_data = if color.has_alpha() {
let rgba_image = img.into_rgba8();
let encoder = webp::Encoder::from_rgba(&rgba_image, width, height);
encoder.encode(quality)
} else {
let rgba_image = img.into_rgb8();
let encoder = webp::Encoder::from_rgb(&rgba_image, width, height);
encoder.encode(quality)
};
Ok(Bytes::from(webp_data.to_vec()))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to webp: {}",
path.as_ref().display()
)
})?
}
pub async fn optimize_image_to_avif(
&self,
path: impl AsRef<Path>,
data: Bytes,
options: Option<EncodeAvifOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.as_ref()
.and_then(|o| o.quality)
.unwrap_or(self.config.avif_quality);
let speed = options
.as_ref()
.and_then(|o| o.speed)
.unwrap_or(self.config.avif_speed);
let threads = options
.as_ref()
.and_then(|o| o.threads)
.unwrap_or(self.config.avif_threads);
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
let mut buf = vec![];
{
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let img = image_reader.decode()?;
let (width, height) = img.dimensions();
let color_type = img.color();
let encoder = AvifEncoder::new_with_speed_quality(&mut buf, speed, quality)
.with_num_threads(Some(threads as usize));
encoder.write_image(img.as_bytes(), width, height, color_type.into())?;
}
Ok(Bytes::from(buf))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to avif: {}",
path.as_ref().display()
)
})?
}
#[cfg(feature = "jxl")]
pub async fn optimize_image_to_jxl(
&self,
path: impl AsRef<Path>,
data: Bytes,
options: Option<EncodeJxlOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.as_ref()
.and_then(|o| o.quality)
.unwrap_or(self.config.jxl_quality);
let speed = options
.as_ref()
.and_then(|o| o.speed)
.unwrap_or(self.config.jxl_speed);
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
use jpegxl_rs::encode::{ColorEncoding, EncoderResult, EncoderSpeed};
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let image = image_reader.decode()?;
let (width, height) = image.dimensions();
let color = image.color();
let has_alpha = color.has_alpha();
let libjxl_speed = {
match speed {
0 | 1 => EncoderSpeed::Lightning,
2 => EncoderSpeed::Thunder,
3 => EncoderSpeed::Falcon,
4 => EncoderSpeed::Cheetah,
5 => EncoderSpeed::Hare,
6 => EncoderSpeed::Wombat,
7 => EncoderSpeed::Squirrel,
8 => EncoderSpeed::Kitten,
_ => EncoderSpeed::Tortoise,
}
};
let mut encoder_builder = jpegxl_rs::encoder_builder()
.lossless(false)
.has_alpha(has_alpha)
.color_encoding(ColorEncoding::Srgb)
.speed(libjxl_speed)
.jpeg_quality(quality)
.build()?;
let buffer: EncoderResult<u8> = if color.has_alpha() {
let sample = image.into_rgba8();
encoder_builder.encode(&sample, width, height)?
} else {
let sample = image.into_rgb8();
encoder_builder.encode(&sample, width, height)?
};
Ok(Bytes::from(buffer.data))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to avif: {}",
path.as_ref().display()
)
})?
}
#[cfg(not(feature = "jxl"))]
pub async fn optimize_image_to_jxl(
&self,
_path: impl AsRef<Path>,
_data: Bytes,
_options: Option<EncodeJxlOptions>,
) -> RecorderResult<Bytes> {
Err(RecorderError::Whatever {
message: "jxl feature is not enabled".to_string(),
source: None.into(),
})
}
}

View File

@@ -43,7 +43,7 @@ pub enum Bangumi {
MikanBangumiId, MikanBangumiId,
DisplayName, DisplayName,
SubscriberId, SubscriberId,
RawName, OriginName,
Season, Season,
SeasonRaw, SeasonRaw,
Fansub, Fansub,
@@ -51,9 +51,13 @@ pub enum Bangumi {
Filter, Filter,
RssLink, RssLink,
PosterLink, PosterLink,
OriginPosterLink,
/**
* @deprecated
*/
SavePath, SavePath,
Homepage, Homepage,
Extra, BangumiType,
} }
#[derive(DeriveIden)] #[derive(DeriveIden)]
@@ -70,22 +74,30 @@ pub enum Episodes {
Table, Table,
Id, Id,
MikanEpisodeId, MikanEpisodeId,
RawName, OriginName,
DisplayName, DisplayName,
BangumiId, BangumiId,
SubscriberId, SubscriberId,
DownloadId, DownloadId,
/**
* @deprecated
*/
SavePath, SavePath,
Resolution, Resolution,
Season, Season,
SeasonRaw, SeasonRaw,
Fansub, Fansub,
PosterLink, PosterLink,
OriginPosterLink,
EpisodeIndex, EpisodeIndex,
Homepage, Homepage,
Subtitle, Subtitle,
Source, Source,
Extra, EpisodeType,
EnclosureTorrentLink,
EnclosureMagnetLink,
EnclosurePubDate,
EnclosureContentLength,
} }
#[derive(DeriveIden)] #[derive(DeriveIden)]
@@ -101,7 +113,7 @@ pub enum SubscriptionEpisode {
pub enum Downloads { pub enum Downloads {
Table, Table,
Id, Id,
RawName, OriginName,
DisplayName, DisplayName,
SubscriberId, SubscriberId,
DownloaderId, DownloaderId,
@@ -148,6 +160,17 @@ pub enum Credential3rd {
UserAgent, UserAgent,
} }
#[derive(DeriveIden)]
pub enum Feeds {
Table,
Id,
Token,
FeedType,
FeedSource,
SubscriberId,
SubscriptionId,
}
macro_rules! create_postgres_enum_for_active_enum { macro_rules! create_postgres_enum_for_active_enum {
($manager: expr, $active_enum: expr, $($enum_value:expr),+) => { ($manager: expr, $active_enum: expr, $($enum_value:expr),+) => {
{ {

View File

@@ -96,7 +96,7 @@ impl MigrationTrait for Migration {
.col(text_null(Bangumi::MikanBangumiId)) .col(text_null(Bangumi::MikanBangumiId))
.col(integer(Bangumi::SubscriberId)) .col(integer(Bangumi::SubscriberId))
.col(text(Bangumi::DisplayName)) .col(text(Bangumi::DisplayName))
.col(text(Bangumi::RawName)) .col(text(Bangumi::OriginName))
.col(integer(Bangumi::Season)) .col(integer(Bangumi::Season))
.col(text_null(Bangumi::SeasonRaw)) .col(text_null(Bangumi::SeasonRaw))
.col(text_null(Bangumi::Fansub)) .col(text_null(Bangumi::Fansub))
@@ -104,9 +104,9 @@ impl MigrationTrait for Migration {
.col(json_binary_null(Bangumi::Filter)) .col(json_binary_null(Bangumi::Filter))
.col(text_null(Bangumi::RssLink)) .col(text_null(Bangumi::RssLink))
.col(text_null(Bangumi::PosterLink)) .col(text_null(Bangumi::PosterLink))
.col(text_null(Bangumi::OriginPosterLink))
.col(text_null(Bangumi::SavePath)) .col(text_null(Bangumi::SavePath))
.col(text_null(Bangumi::Homepage)) .col(text_null(Bangumi::Homepage))
.col(json_binary_null(Bangumi::Extra))
.foreign_key( .foreign_key(
ForeignKey::create() ForeignKey::create()
.name("fk_bangumi_subscriber_id") .name("fk_bangumi_subscriber_id")
@@ -209,7 +209,7 @@ impl MigrationTrait for Migration {
.create_index( .create_index(
Index::create() Index::create()
.if_not_exists() .if_not_exists()
.name("index_subscription_bangumi_subscriber_id") .name("idx_subscription_bangumi_subscriber_id")
.table(SubscriptionBangumi::Table) .table(SubscriptionBangumi::Table)
.col(SubscriptionBangumi::SubscriberId) .col(SubscriptionBangumi::SubscriberId)
.to_owned(), .to_owned(),
@@ -221,7 +221,7 @@ impl MigrationTrait for Migration {
table_auto_z(Episodes::Table) table_auto_z(Episodes::Table)
.col(pk_auto(Episodes::Id)) .col(pk_auto(Episodes::Id))
.col(text_null(Episodes::MikanEpisodeId)) .col(text_null(Episodes::MikanEpisodeId))
.col(text(Episodes::RawName)) .col(text(Episodes::OriginName))
.col(text(Episodes::DisplayName)) .col(text(Episodes::DisplayName))
.col(integer(Episodes::BangumiId)) .col(integer(Episodes::BangumiId))
.col(integer(Episodes::SubscriberId)) .col(integer(Episodes::SubscriberId))
@@ -231,11 +231,11 @@ impl MigrationTrait for Migration {
.col(text_null(Episodes::SeasonRaw)) .col(text_null(Episodes::SeasonRaw))
.col(text_null(Episodes::Fansub)) .col(text_null(Episodes::Fansub))
.col(text_null(Episodes::PosterLink)) .col(text_null(Episodes::PosterLink))
.col(text_null(Episodes::OriginPosterLink))
.col(integer(Episodes::EpisodeIndex)) .col(integer(Episodes::EpisodeIndex))
.col(text_null(Episodes::Homepage)) .col(text_null(Episodes::Homepage))
.col(text_null(Episodes::Subtitle)) .col(text_null(Episodes::Subtitle))
.col(text_null(Episodes::Source)) .col(text_null(Episodes::Source))
.col(json_binary_null(Episodes::Extra))
.foreign_key( .foreign_key(
ForeignKey::create() ForeignKey::create()
.name("fk_episodes_bangumi_id") .name("fk_episodes_bangumi_id")
@@ -252,6 +252,15 @@ impl MigrationTrait for Migration {
.on_update(ForeignKeyAction::Cascade) .on_update(ForeignKeyAction::Cascade)
.on_delete(ForeignKeyAction::Cascade), .on_delete(ForeignKeyAction::Cascade),
) )
.index(
Index::create()
.if_not_exists()
.name("idx_episodes_mikan_episode_id_subscriber_id")
.table(Episodes::Table)
.col(Episodes::MikanEpisodeId)
.col(Episodes::SubscriberId)
.unique(),
)
.to_owned(), .to_owned(),
) )
.await?; .await?;
@@ -267,19 +276,6 @@ impl MigrationTrait for Migration {
) )
.await?; .await?;
manager
.create_index(
Index::create()
.if_not_exists()
.name("idx_episodes_bangumi_id_mikan_episode_id")
.table(Episodes::Table)
.col(Episodes::BangumiId)
.col(Episodes::MikanEpisodeId)
.unique()
.to_owned(),
)
.await?;
manager manager
.create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt) .create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
.await?; .await?;
@@ -338,7 +334,7 @@ impl MigrationTrait for Migration {
.create_index( .create_index(
Index::create() Index::create()
.if_not_exists() .if_not_exists()
.name("index_subscription_episode_subscriber_id") .name("idx_subscription_episode_subscriber_id")
.table(SubscriptionEpisode::Table) .table(SubscriptionEpisode::Table)
.col(SubscriptionEpisode::SubscriberId) .col(SubscriptionEpisode::SubscriberId)
.to_owned(), .to_owned(),
@@ -353,7 +349,7 @@ impl MigrationTrait for Migration {
.drop_index( .drop_index(
Index::drop() Index::drop()
.if_exists() .if_exists()
.name("index_subscription_episode_subscriber_id") .name("idx_subscription_episode_subscriber_id")
.table(SubscriptionBangumi::Table) .table(SubscriptionBangumi::Table)
.to_owned(), .to_owned(),
) )
@@ -380,7 +376,7 @@ impl MigrationTrait for Migration {
.drop_index( .drop_index(
Index::drop() Index::drop()
.if_exists() .if_exists()
.name("index_subscription_bangumi_subscriber_id") .name("idx_subscription_bangumi_subscriber_id")
.table(SubscriptionBangumi::Table) .table(SubscriptionBangumi::Table)
.to_owned(), .to_owned(),
) )

View File

@@ -80,7 +80,7 @@ impl MigrationTrait for Migration {
.create_table( .create_table(
table_auto_z(Downloads::Table) table_auto_z(Downloads::Table)
.col(pk_auto(Downloads::Id)) .col(pk_auto(Downloads::Id))
.col(string(Downloads::RawName)) .col(string(Downloads::OriginName))
.col(string(Downloads::DisplayName)) .col(string(Downloads::DisplayName))
.col(integer(Downloads::SubscriberId)) .col(integer(Downloads::SubscriberId))
.col(integer(Downloads::DownloaderId)) .col(integer(Downloads::DownloaderId))
@@ -95,8 +95,8 @@ impl MigrationTrait for Migration {
DownloadMimeEnum, DownloadMimeEnum,
DownloadMime::iden_values(), DownloadMime::iden_values(),
)) ))
.col(big_unsigned(Downloads::AllSize)) .col(big_integer(Downloads::AllSize))
.col(big_unsigned(Downloads::CurrSize)) .col(big_integer(Downloads::CurrSize))
.col(text(Downloads::Url)) .col(text(Downloads::Url))
.col(text_null(Downloads::Homepage)) .col(text_null(Downloads::Homepage))
.col(text_null(Downloads::SavePath)) .col(text_null(Downloads::SavePath))

View File

@@ -28,7 +28,11 @@ impl MigrationTrait for Migration {
table_auto_z(Credential3rd::Table) table_auto_z(Credential3rd::Table)
.col(pk_auto(Credential3rd::Id)) .col(pk_auto(Credential3rd::Id))
.col(integer(Credential3rd::SubscriberId)) .col(integer(Credential3rd::SubscriberId))
.col(string(Credential3rd::CredentialType)) .col(enumeration(
Credential3rd::CredentialType,
Credential3rdTypeEnum,
Credential3rdType::iden_values(),
))
.col(string_null(Credential3rd::Cookies)) .col(string_null(Credential3rd::Cookies))
.col(string_null(Credential3rd::Username)) .col(string_null(Credential3rd::Username))
.col(string_null(Credential3rd::Password)) .col(string_null(Credential3rd::Password))

View File

@@ -12,13 +12,13 @@ impl MigrationTrait for Migration {
let db = manager.get_connection(); let db = manager.get_connection();
db.execute_unprepared(&format!( db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_task AS r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT SELECT
job, job,
job_type, job_type,
status, status,
(job->'subscriber_id')::integer AS subscriber_id, (job ->> 'subscriber_id'::text)::integer AS subscriber_id,
(job->'task_type')::text AS task_type, job ->> 'task_type'::text AS task_type,
id, id,
attempts, attempts,
max_attempts, max_attempts,
@@ -37,10 +37,10 @@ AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
db.execute_unprepared(&format!( db.execute_unprepared(&format!(
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscriber_id r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscriber_id
ON apalis.jobs ((job -> 'subscriber_id')) ON apalis.jobs (((job -> 'subscriber_id')::integer))
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}' WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")') AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"# AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
)) ))
.await?; .await?;
@@ -56,7 +56,7 @@ AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
) )
.await?; .await?;
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_task") db.execute_unprepared("DROP VIEW IF EXISTS subscriber_tasks")
.await?; .await?;
Ok(()) Ok(())

View File

@@ -0,0 +1,95 @@
use async_trait::async_trait;
use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{
CustomSchemaManagerExt, Feeds, GeneralIds, Subscribers, Subscriptions, table_auto_z,
},
models::feeds::{FeedSource, FeedSourceEnum, FeedType, FeedTypeEnum},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
create_postgres_enum_for_active_enum!(manager, FeedTypeEnum, FeedType::Rss).await?;
create_postgres_enum_for_active_enum!(
manager,
FeedSourceEnum,
FeedSource::SubscriptionEpisode
)
.await?;
manager
.create_table(
table_auto_z(Feeds::Table)
.col(pk_auto(Feeds::Id))
.col(text(Feeds::Token))
.col(enumeration(
Feeds::FeedType,
FeedTypeEnum,
FeedType::iden_values(),
))
.col(
enumeration(Feeds::FeedSource, FeedSourceEnum, FeedSource::iden_values())
.not_null(),
)
.col(integer_null(Feeds::SubscriberId))
.col(integer_null(Feeds::SubscriptionId))
.index(
Index::create()
.if_not_exists()
.name("idx_feeds_token")
.table(Feeds::Table)
.col(Feeds::Token)
.unique(),
)
.foreign_key(
ForeignKey::create()
.name("fk_feeds_subscriber_id")
.from(Feeds::Table, Feeds::SubscriberId)
.to(Subscribers::Table, Subscribers::Id)
.on_update(ForeignKeyAction::Cascade)
.on_delete(ForeignKeyAction::Cascade),
)
.foreign_key(
ForeignKey::create()
.name("fk_feeds_subscription_id")
.from(Feeds::Table, Feeds::SubscriptionId)
.to(Subscriptions::Table, Subscriptions::Id)
.on_update(ForeignKeyAction::Cascade)
.on_delete(ForeignKeyAction::Cascade),
)
.to_owned(),
)
.await?;
manager
.create_postgres_auto_update_ts_trigger_for_col(Feeds::Table, GeneralIds::UpdatedAt)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_postgres_auto_update_ts_trigger_for_col(Feeds::Table, GeneralIds::UpdatedAt)
.await?;
manager
.drop_table(Table::drop().if_exists().table(Feeds::Table).to_owned())
.await?;
manager
.drop_postgres_enum_for_active_enum(FeedTypeEnum)
.await?;
manager
.drop_postgres_enum_for_active_enum(FeedSourceEnum)
.await?;
Ok(())
}
}

View File

@@ -0,0 +1,133 @@
use async_trait::async_trait;
use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{Bangumi, CustomSchemaManagerExt, Episodes},
models::{
bangumi::{BangumiType, BangumiTypeEnum},
episodes::{EpisodeType, EpisodeTypeEnum},
},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan).await?;
{
create_postgres_enum_for_active_enum!(manager, BangumiTypeEnum, BangumiType::Mikan)
.await?;
manager
.alter_table(
Table::alter()
.table(Bangumi::Table)
.add_column_if_not_exists(enumeration_null(
Bangumi::BangumiType,
BangumiTypeEnum,
BangumiType::iden_values(),
))
.drop_column(Bangumi::SavePath)
.to_owned(),
)
.await?;
manager
.exec_stmt(
UpdateStatement::new()
.table(Bangumi::Table)
.value(
Bangumi::BangumiType,
BangumiType::Mikan.as_enum(BangumiTypeEnum),
)
.and_where(Expr::col(Bangumi::BangumiType).is_null())
.and_where(Expr::col(Bangumi::MikanBangumiId).is_not_null())
.to_owned(),
)
.await?;
manager
.alter_table(
Table::alter()
.table(Bangumi::Table)
.modify_column(enumeration(
Bangumi::BangumiType,
BangumiTypeEnum,
BangumiType::iden_values(),
))
.to_owned(),
)
.await?;
}
{
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan)
.await?;
manager
.alter_table(
Table::alter()
.table(Episodes::Table)
.add_column_if_not_exists(enumeration_null(
Episodes::EpisodeType,
EpisodeTypeEnum,
EpisodeType::enum_type_name(),
))
.add_column_if_not_exists(text_null(Episodes::EnclosureMagnetLink))
.add_column_if_not_exists(text_null(Episodes::EnclosureTorrentLink))
.add_column_if_not_exists(timestamp_with_time_zone_null(
Episodes::EnclosurePubDate,
))
.add_column_if_not_exists(big_integer_null(
Episodes::EnclosureContentLength,
))
.drop_column(Episodes::SavePath)
.to_owned(),
)
.await?;
manager
.exec_stmt(
UpdateStatement::new()
.table(Episodes::Table)
.value(
Episodes::EpisodeType,
EpisodeType::Mikan.as_enum(EpisodeTypeEnum),
)
.and_where(Expr::col(Episodes::EpisodeType).is_null())
.and_where(Expr::col(Episodes::MikanEpisodeId).is_not_null())
.to_owned(),
)
.await?;
manager
.alter_table(
Table::alter()
.table(Episodes::Table)
.modify_column(enumeration(
Episodes::EpisodeType,
EpisodeTypeEnum,
EpisodeType::enum_type_name(),
))
.to_owned(),
)
.await?;
}
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_postgres_enum_for_active_enum(BangumiTypeEnum)
.await?;
manager
.drop_postgres_enum_for_active_enum(EpisodeTypeEnum)
.await?;
Ok(())
}
}

View File

@@ -8,6 +8,8 @@ pub mod m20240224_082543_add_downloads;
pub mod m20241231_000001_auth; pub mod m20241231_000001_auth;
pub mod m20250501_021523_credential_3rd; pub mod m20250501_021523_credential_3rd;
pub mod m20250520_021135_subscriber_tasks; pub mod m20250520_021135_subscriber_tasks;
pub mod m20250622_015618_feeds;
pub mod m20250622_020819_bangumi_and_episode_type;
pub struct Migrator; pub struct Migrator;
@@ -20,6 +22,8 @@ impl MigratorTrait for Migrator {
Box::new(m20241231_000001_auth::Migration), Box::new(m20241231_000001_auth::Migration),
Box::new(m20250501_021523_credential_3rd::Migration), Box::new(m20250501_021523_credential_3rd::Migration),
Box::new(m20250520_021135_subscriber_tasks::Migration), Box::new(m20250520_021135_subscriber_tasks::Migration),
Box::new(m20250622_015618_feeds::Migration),
Box::new(m20250622_020819_bangumi_and_episode_type::Migration),
] ]
} }
} }

View File

@@ -99,7 +99,9 @@ impl Model {
..Default::default() ..Default::default()
}; };
let new_item: Model = new_item.save(&txn).await?.try_into()?; let new_item: Model = new_item.insert(&txn).await?;
txn.commit().await?;
Ok(new_item) Ok(new_item)
} }

View File

@@ -17,7 +17,7 @@ use crate::{
MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url, MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url,
scrape_mikan_poster_meta_from_image_url, scrape_mikan_poster_meta_from_image_url,
}, },
rawname::parse_episode_meta_from_raw_name, origin::{BangumiComps, OriginCompTrait},
}, },
}; };
@@ -29,19 +29,14 @@ pub struct BangumiFilter {
pub group: Option<Vec<String>>, pub group: Option<Vec<String>>,
} }
#[derive( #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject, #[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "bangumi_type")]
)] pub enum BangumiType {
pub struct BangumiExtra { #[sea_orm(string_value = "mikan")]
pub name_zh: Option<String>, Mikan,
pub s_name_zh: Option<String>,
pub name_en: Option<String>,
pub s_name_en: Option<String>,
pub name_jp: Option<String>,
pub s_name_jp: Option<String>,
} }
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "bangumi")] #[sea_orm(table_name = "bangumi")]
pub struct Model { pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")] #[sea_orm(default_expr = "Expr::current_timestamp()")]
@@ -51,9 +46,10 @@ pub struct Model {
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub id: i32, pub id: i32,
pub mikan_bangumi_id: Option<String>, pub mikan_bangumi_id: Option<String>,
pub bangumi_type: BangumiType,
pub subscriber_id: i32, pub subscriber_id: i32,
pub display_name: String, pub display_name: String,
pub raw_name: String, pub origin_name: String,
pub season: i32, pub season: i32,
pub season_raw: Option<String>, pub season_raw: Option<String>,
pub fansub: Option<String>, pub fansub: Option<String>,
@@ -61,9 +57,8 @@ pub struct Model {
pub filter: Option<BangumiFilter>, pub filter: Option<BangumiFilter>,
pub rss_link: Option<String>, pub rss_link: Option<String>,
pub poster_link: Option<String>, pub poster_link: Option<String>,
pub save_path: Option<String>, pub origin_poster_link: Option<String>,
pub homepage: Option<String>, pub homepage: Option<String>,
pub extra: Option<BangumiExtra>,
} }
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -133,10 +128,13 @@ impl ActiveModel {
_subscription_id: i32, _subscription_id: i32,
) -> RecorderResult<Self> { ) -> RecorderResult<Self> {
let mikan_client = ctx.mikan(); let mikan_client = ctx.mikan();
let storage_service = ctx.storage();
let mikan_base_url = mikan_client.base_url(); let mikan_base_url = mikan_client.base_url();
let season_comp = BangumiComps::parse_comp(&meta.bangumi_title)
let rawname_meta = parse_episode_meta_from_raw_name(&meta.bangumi_title)?; .ok()
.map(|(_, s)| s)
.and_then(|s| s.season);
let season_index = season_comp.as_ref().map(|s| s.num).unwrap_or(1);
let season_raw = season_comp.map(|s| s.source.to_string());
let rss_url = build_mikan_bangumi_subscription_rss_url( let rss_url = build_mikan_bangumi_subscription_rss_url(
mikan_base_url.clone(), mikan_base_url.clone(),
@@ -144,14 +142,9 @@ impl ActiveModel {
Some(&meta.mikan_fansub_id), Some(&meta.mikan_fansub_id),
); );
let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src { let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src.clone() {
let poster_meta = scrape_mikan_poster_meta_from_image_url( let poster_meta =
mikan_client, scrape_mikan_poster_meta_from_image_url(ctx, origin_poster_src).await?;
storage_service,
origin_poster_src,
subscriber_id,
)
.await?;
poster_meta.poster_src poster_meta.poster_src
} else { } else {
None None
@@ -162,21 +155,15 @@ impl ActiveModel {
mikan_fansub_id: ActiveValue::Set(Some(meta.mikan_fansub_id)), mikan_fansub_id: ActiveValue::Set(Some(meta.mikan_fansub_id)),
subscriber_id: ActiveValue::Set(subscriber_id), subscriber_id: ActiveValue::Set(subscriber_id),
display_name: ActiveValue::Set(meta.bangumi_title.clone()), display_name: ActiveValue::Set(meta.bangumi_title.clone()),
raw_name: ActiveValue::Set(meta.bangumi_title), origin_name: ActiveValue::Set(meta.bangumi_title),
season: ActiveValue::Set(rawname_meta.season), season: ActiveValue::Set(season_index),
season_raw: ActiveValue::Set(rawname_meta.season_raw), season_raw: ActiveValue::Set(season_raw),
fansub: ActiveValue::Set(Some(meta.fansub)), fansub: ActiveValue::Set(Some(meta.fansub)),
poster_link: ActiveValue::Set(poster_link), poster_link: ActiveValue::Set(poster_link),
origin_poster_link: ActiveValue::Set(meta.origin_poster_src.map(|src| src.to_string())),
homepage: ActiveValue::Set(Some(meta.homepage.to_string())), homepage: ActiveValue::Set(Some(meta.homepage.to_string())),
rss_link: ActiveValue::Set(Some(rss_url.to_string())), rss_link: ActiveValue::Set(Some(rss_url.to_string())),
extra: ActiveValue::Set(Some(BangumiExtra { bangumi_type: ActiveValue::Set(BangumiType::Mikan),
name_zh: rawname_meta.name_zh,
name_en: rawname_meta.name_en,
name_jp: rawname_meta.name_jp,
s_name_en: rawname_meta.name_en_no_season,
s_name_jp: rawname_meta.name_jp_no_season,
s_name_zh: rawname_meta.name_zh_no_season,
})),
..Default::default() ..Default::default()
}) })
} }
@@ -218,15 +205,16 @@ impl Model {
Expr::col(( Expr::col((
subscription_bangumi_alias.clone(), subscription_bangumi_alias.clone(),
subscription_bangumi::Column::SubscriptionId, subscription_bangumi::Column::SubscriptionId,
)), ))
.is_not_null(),
"is_subscribed", "is_subscribed",
) )
.join_as_rev( .join_as_rev(
JoinType::LeftJoin, JoinType::LeftJoin,
subscription_bangumi::Relation::Bangumi subscription_bangumi::Relation::Bangumi
.def() .def()
.on_condition(move |_left, right| { .on_condition(move |left, _right| {
Expr::col((right, subscription_bangumi::Column::SubscriptionId)) Expr::col((left, subscription_bangumi::Column::SubscriptionId))
.eq(subscription_id) .eq(subscription_id)
.into_condition() .into_condition()
}), }),
@@ -249,9 +237,10 @@ impl Model {
Column::SubscriberId, Column::SubscriberId,
]) ])
.update_columns([ .update_columns([
Column::RawName, Column::OriginName,
Column::Fansub, Column::Fansub,
Column::PosterLink, Column::PosterLink,
Column::OriginPosterLink,
Column::Season, Column::Season,
Column::SeasonRaw, Column::SeasonRaw,
Column::RssLink, Column::RssLink,
@@ -269,8 +258,15 @@ impl Model {
subscriber_id: ActiveValue::Set(subscriber_id), subscriber_id: ActiveValue::Set(subscriber_id),
..Default::default() ..Default::default()
}) })
.on_conflict_do_nothing() .on_conflict(
.exec(db) OnConflict::columns([
subscription_bangumi::Column::SubscriptionId,
subscription_bangumi::Column::BangumiId,
])
.do_nothing()
.to_owned(),
)
.exec_without_returning(db)
.await?; .await?;
} }
Ok(new_bangumi_model) Ok(new_bangumi_model)

View File

@@ -1,5 +1,3 @@
use std::sync::Arc;
use async_trait::async_trait; use async_trait::async_trait;
use sea_orm::{ActiveValue, prelude::*}; use sea_orm::{ActiveValue, prelude::*};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -18,6 +16,7 @@ use crate::{
db_type = "Enum", db_type = "Enum",
enum_name = "credential_3rd_type" enum_name = "credential_3rd_type"
)] )]
#[serde(rename_all = "snake_case")]
pub enum Credential3rdType { pub enum Credential3rdType {
#[sea_orm(string_value = "mikan")] #[sea_orm(string_value = "mikan")]
Mikan, Mikan,
@@ -78,7 +77,7 @@ pub enum RelatedEntity {
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel { impl ActiveModel {
pub async fn try_encrypt(mut self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Self> { pub async fn try_encrypt(mut self, ctx: &dyn AppContextTrait) -> RecorderResult<Self> {
let crypto = ctx.crypto(); let crypto = ctx.crypto();
if let ActiveValue::Set(Some(username)) = self.username { if let ActiveValue::Set(Some(username)) = self.username {
@@ -101,19 +100,24 @@ impl ActiveModel {
} }
impl Model { impl Model {
pub async fn find_by_id( pub async fn find_by_id_and_subscriber_id(
ctx: Arc<dyn AppContextTrait>, ctx: &dyn AppContextTrait,
id: i32, id: i32,
subscriber_id: i32,
) -> RecorderResult<Option<Self>> { ) -> RecorderResult<Option<Self>> {
let db = ctx.db(); let db = ctx.db();
let credential = Entity::find_by_id(id).one(db).await?; let credential = Entity::find()
.filter(Column::Id.eq(id))
.filter(Column::SubscriberId.eq(subscriber_id))
.one(db)
.await?;
Ok(credential) Ok(credential)
} }
pub fn try_into_userpass_credential( pub fn try_into_userpass_credential(
self, self,
ctx: Arc<dyn AppContextTrait>, ctx: &dyn AppContextTrait,
) -> RecorderResult<UserPassCredential> { ) -> RecorderResult<UserPassCredential> {
let crypto = ctx.crypto(); let crypto = ctx.crypto();
let username_enc = self let username_enc = self
@@ -148,4 +152,31 @@ impl Model {
user_agent: self.user_agent, user_agent: self.user_agent,
}) })
} }
pub async fn check_available(self, ctx: &dyn AppContextTrait) -> RecorderResult<bool> {
let credential_id = self.id;
let subscriber_id = self.subscriber_id;
match self.credential_type {
Credential3rdType::Mikan => {
let mikan_client = {
let userpass_credential: UserPassCredential =
self.try_into_userpass_credential(ctx)?;
ctx.mikan()
.fork_with_userpass_credential(userpass_credential)
.await?
};
let mut has_login = mikan_client.has_login().await?;
if !has_login {
mikan_client.login().await?;
has_login = true;
}
if has_login {
mikan_client
.sync_credential_cookies(ctx, credential_id, subscriber_id)
.await?;
}
Ok(has_login)
}
}
}
} }

View File

@@ -44,7 +44,7 @@ pub struct Model {
pub updated_at: DateTimeUtc, pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub id: i32, pub id: i32,
pub raw_name: String, pub origin_name: String,
pub display_name: String, pub display_name: String,
pub downloader_id: i32, pub downloader_id: i32,
pub episode_id: i32, pub episode_id: i32,
@@ -52,8 +52,8 @@ pub struct Model {
pub status: DownloadStatus, pub status: DownloadStatus,
pub mime: DownloadMime, pub mime: DownloadMime,
pub url: String, pub url: String,
pub all_size: Option<u64>, pub all_size: Option<i64>,
pub curr_size: Option<u64>, pub curr_size: Option<i64>,
pub homepage: Option<String>, pub homepage: Option<String>,
pub save_path: Option<String>, pub save_path: Option<String>,
} }

View File

@@ -1,7 +1,6 @@
use async_trait::async_trait; use async_trait::async_trait;
use sea_orm::{ use sea_orm::{
ActiveValue, FromJsonQueryResult, IntoSimpleExpr, QuerySelect, entity::prelude::*, ActiveValue, IntoSimpleExpr, QuerySelect, entity::prelude::*, sea_query::OnConflict,
sea_query::OnConflict,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -10,19 +9,17 @@ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::RecorderResult, errors::RecorderResult,
extract::{ extract::{
bittorrent::EpisodeEnclosureMeta,
mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url}, mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url},
rawname::parse_episode_meta_from_raw_name, origin::{OriginCompTrait, OriginNameRoot},
}, },
}; };
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, Default)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
pub struct EpisodeExtra { #[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "episode_type")]
pub name_zh: Option<String>, pub enum EpisodeType {
pub s_name_zh: Option<String>, #[sea_orm(string_value = "mikan")]
pub name_en: Option<String>, Mikan,
pub s_name_en: Option<String>,
pub name_jp: Option<String>,
pub s_name_jp: Option<String>,
} }
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
@@ -36,21 +33,25 @@ pub struct Model {
pub id: i32, pub id: i32,
#[sea_orm(indexed)] #[sea_orm(indexed)]
pub mikan_episode_id: Option<String>, pub mikan_episode_id: Option<String>,
pub raw_name: String, pub enclosure_torrent_link: Option<String>,
pub enclosure_magnet_link: Option<String>,
pub enclosure_pub_date: Option<DateTimeUtc>,
pub enclosure_content_length: Option<i64>,
pub episode_type: EpisodeType,
pub origin_name: String,
pub display_name: String, pub display_name: String,
pub bangumi_id: i32, pub bangumi_id: i32,
pub subscriber_id: i32, pub subscriber_id: i32,
pub save_path: Option<String>,
pub resolution: Option<String>, pub resolution: Option<String>,
pub season: i32, pub season: i32,
pub season_raw: Option<String>, pub season_raw: Option<String>,
pub fansub: Option<String>, pub fansub: Option<String>,
pub poster_link: Option<String>, pub poster_link: Option<String>,
pub origin_poster_link: Option<String>,
pub episode_index: i32, pub episode_index: i32,
pub homepage: Option<String>, pub homepage: Option<String>,
pub subtitle: Option<String>, pub subtitle: Option<String>,
pub source: Option<String>, pub source: Option<String>,
pub extra: EpisodeExtra,
} }
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -133,44 +134,61 @@ impl ActiveModel {
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
bangumi: &bangumi::Model, bangumi: &bangumi::Model,
episode: MikanEpisodeMeta, episode: MikanEpisodeMeta,
enclosure_meta: EpisodeEnclosureMeta,
) -> RecorderResult<Self> { ) -> RecorderResult<Self> {
let mikan_base_url = ctx.mikan().base_url().clone(); let mikan_base_url = ctx.mikan().base_url().clone();
let rawname_meta = parse_episode_meta_from_raw_name(&episode.episode_title)?; let episode_extention_meta = OriginNameRoot::parse_comp(&episode.episode_title)
.inspect_err(|err| {
tracing::error!(
err = ?err,
episode_title = ?episode.episode_title,
"Failed to parse episode extension meta from episode title, skip"
);
})
.map(|(_, e)| e.into_meta())
.ok();
let homepage = build_mikan_episode_homepage_url(mikan_base_url, &episode.mikan_episode_id); let homepage = build_mikan_episode_homepage_url(mikan_base_url, &episode.mikan_episode_id);
Ok(Self { let mut episode_active_model = Self {
mikan_episode_id: ActiveValue::Set(Some(episode.mikan_episode_id)), mikan_episode_id: ActiveValue::Set(Some(episode.mikan_episode_id)),
raw_name: ActiveValue::Set(episode.episode_title.clone()), origin_name: ActiveValue::Set(episode.episode_title.clone()),
display_name: ActiveValue::Set(episode.episode_title.clone()), display_name: ActiveValue::Set(episode.episode_title.clone()),
bangumi_id: ActiveValue::Set(bangumi.id), bangumi_id: ActiveValue::Set(bangumi.id),
subscriber_id: ActiveValue::Set(bangumi.subscriber_id), subscriber_id: ActiveValue::Set(bangumi.subscriber_id),
resolution: ActiveValue::Set(rawname_meta.resolution),
season: ActiveValue::Set(if rawname_meta.season > 0 {
rawname_meta.season
} else {
bangumi.season
}),
season_raw: ActiveValue::Set(
rawname_meta
.season_raw
.or_else(|| bangumi.season_raw.clone()),
),
fansub: ActiveValue::Set(rawname_meta.fansub.or_else(|| bangumi.fansub.clone())),
poster_link: ActiveValue::Set(bangumi.poster_link.clone()),
episode_index: ActiveValue::Set(rawname_meta.episode_index),
homepage: ActiveValue::Set(Some(homepage.to_string())), homepage: ActiveValue::Set(Some(homepage.to_string())),
subtitle: ActiveValue::Set(rawname_meta.subtitle), season_raw: ActiveValue::Set(bangumi.season_raw.clone()),
source: ActiveValue::Set(rawname_meta.source), season: ActiveValue::Set(bangumi.season),
extra: ActiveValue::Set(EpisodeExtra { fansub: ActiveValue::Set(bangumi.fansub.clone()),
name_zh: rawname_meta.name_zh, poster_link: ActiveValue::Set(bangumi.poster_link.clone()),
name_en: rawname_meta.name_en, origin_poster_link: ActiveValue::Set(bangumi.origin_poster_link.clone()),
name_jp: rawname_meta.name_jp, episode_index: ActiveValue::Set(0),
s_name_en: rawname_meta.name_en_no_season, enclosure_torrent_link: ActiveValue::Set(enclosure_meta.torrent_link),
s_name_jp: rawname_meta.name_jp_no_season, enclosure_magnet_link: ActiveValue::Set(enclosure_meta.magnet_link),
s_name_zh: rawname_meta.name_zh_no_season, enclosure_pub_date: ActiveValue::Set(enclosure_meta.pub_date),
}), enclosure_content_length: ActiveValue::Set(enclosure_meta.content_length),
episode_type: ActiveValue::Set(EpisodeType::Mikan),
..Default::default() ..Default::default()
}) };
if let Some(episode_extention_meta) = episode_extention_meta {
episode_active_model.episode_index =
ActiveValue::Set(episode_extention_meta.episode_index);
episode_active_model.subtitle = ActiveValue::Set(episode_extention_meta.subtitle);
episode_active_model.source = ActiveValue::Set(episode_extention_meta.source);
episode_active_model.resolution = ActiveValue::Set(episode_extention_meta.resolution);
if episode_extention_meta.season > 0 {
episode_active_model.season = ActiveValue::Set(episode_extention_meta.season);
}
if episode_extention_meta.season_raw.is_some() {
episode_active_model.season_raw =
ActiveValue::Set(episode_extention_meta.season_raw);
}
if episode_extention_meta.fansub.is_some() {
episode_active_model.fansub = ActiveValue::Set(episode_extention_meta.fansub);
}
}
Ok(episode_active_model)
} }
} }
@@ -216,21 +234,46 @@ impl Model {
pub async fn add_mikan_episodes_for_subscription( pub async fn add_mikan_episodes_for_subscription(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
creations: impl Iterator<Item = (&bangumi::Model, MikanEpisodeMeta)>, creations: impl Iterator<Item = (&bangumi::Model, MikanEpisodeMeta, EpisodeEnclosureMeta)>,
subscriber_id: i32, subscriber_id: i32,
subscription_id: i32, subscription_id: i32,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
let db = ctx.db(); let db = ctx.db();
let new_episode_active_modes: Vec<ActiveModel> = creations let new_episode_active_modes: Vec<ActiveModel> = creations
.map(|(bangumi, episode_meta)| { .map(|(bangumi, episode_meta, enclosure_meta)| {
ActiveModel::from_mikan_bangumi_and_episode_meta(ctx, bangumi, episode_meta) ActiveModel::from_mikan_bangumi_and_episode_meta(
ctx,
bangumi,
episode_meta,
enclosure_meta,
)
}) })
.collect::<Result<_, _>>()?; .collect::<Result<_, _>>()?;
if new_episode_active_modes.is_empty() {
return Ok(());
}
let new_episode_ids = Entity::insert_many(new_episode_active_modes) let new_episode_ids = Entity::insert_many(new_episode_active_modes)
.on_conflict( .on_conflict(
OnConflict::columns([Column::MikanEpisodeId, Column::SubscriberId]) OnConflict::columns([Column::MikanEpisodeId, Column::SubscriberId])
.update_columns([Column::RawName, Column::PosterLink, Column::Homepage]) .update_columns([
Column::OriginName,
Column::PosterLink,
Column::OriginPosterLink,
Column::Homepage,
Column::EnclosureContentLength,
Column::EnclosurePubDate,
Column::EnclosureTorrentLink,
Column::EnclosureMagnetLink,
Column::EpisodeIndex,
Column::Subtitle,
Column::Source,
Column::Resolution,
Column::Season,
Column::SeasonRaw,
Column::Fansub,
])
.to_owned(), .to_owned(),
) )
.exec_with_returning_columns(db, [Column::Id]) .exec_with_returning_columns(db, [Column::Id])

View File

@@ -0,0 +1,133 @@
mod registry;
mod rss;
mod subscription_episodes_feed;
use ::rss::Channel;
use async_trait::async_trait;
pub use registry::Feed;
pub use rss::{RssFeedItemTrait, RssFeedTrait};
use sea_orm::{ActiveValue, DeriveEntityModel, entity::prelude::*};
use serde::{Deserialize, Serialize};
pub use subscription_episodes_feed::SubscriptionEpisodesFeed;
use url::Url;
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
};
#[derive(
Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "feed_type")]
#[serde(rename_all = "snake_case")]
pub enum FeedType {
#[sea_orm(string_value = "rss")]
Rss,
}
#[derive(
Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "feed_source")]
#[serde(rename_all = "snake_case")]
pub enum FeedSource {
#[sea_orm(string_value = "subscription_episode")]
SubscriptionEpisode,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "feeds")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTimeUtc,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)]
pub id: i32,
#[sea_orm(indexed)]
pub token: String,
#[sea_orm(indexed)]
pub feed_type: FeedType,
#[sea_orm(indexed)]
pub feed_source: FeedSource,
pub subscriber_id: Option<i32>,
pub subscription_id: Option<i32>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscription,
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscriber,
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::subscriptions::Entity")]
Subscription,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
if insert && let ActiveValue::NotSet = self.token {
let token = nanoid::nanoid!(10);
self.token = ActiveValue::Set(token);
}
Ok(self)
}
}
impl Model {
pub async fn find_rss_feed_by_token(
ctx: &dyn AppContextTrait,
token: &str,
api_base: &Url,
) -> RecorderResult<Channel> {
let db = ctx.db();
let feed_model = Entity::find()
.filter(Column::Token.eq(token))
.filter(Column::FeedType.eq(FeedType::Rss))
.one(db)
.await?
.ok_or(RecorderError::ModelEntityNotFound {
entity: "Feed".into(),
})?;
let feed = Feed::from_model(ctx, feed_model).await?;
feed.into_rss_channel(ctx, api_base)
}
}

View File

@@ -0,0 +1,65 @@
use rss::Channel;
use sea_orm::{ColumnTrait, EntityTrait, JoinType, QueryFilter, QuerySelect, RelationTrait};
use url::Url;
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
models::{
episodes,
feeds::{self, FeedSource, RssFeedTrait, SubscriptionEpisodesFeed},
subscription_episode, subscriptions,
},
};
pub enum Feed {
SubscritpionEpisodes(SubscriptionEpisodesFeed),
}
impl Feed {
pub async fn from_model(ctx: &dyn AppContextTrait, m: feeds::Model) -> RecorderResult<Self> {
match m.feed_source {
FeedSource::SubscriptionEpisode => {
let db = ctx.db();
let (subscription, episodes) = if let Some(subscription_id) = m.subscription_id
&& let Some(subscription) = subscriptions::Entity::find()
.filter(subscriptions::Column::Id.eq(subscription_id))
.one(db)
.await?
{
let episodes = episodes::Entity::find()
.join(
JoinType::InnerJoin,
episodes::Relation::SubscriptionEpisode.def(),
)
.join(
JoinType::InnerJoin,
subscription_episode::Relation::Subscription.def(),
)
.filter(subscriptions::Column::Id.eq(subscription_id))
.all(db)
.await?;
(subscription, episodes)
} else {
return Err(RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
});
};
Ok(Feed::SubscritpionEpisodes(
SubscriptionEpisodesFeed::from_model(m, subscription, episodes),
))
}
}
}
pub fn into_rss_channel(
self,
ctx: &dyn AppContextTrait,
api_base: &Url,
) -> RecorderResult<Channel> {
match self {
Self::SubscritpionEpisodes(feed) => feed.into_channel(ctx, api_base),
}
}
}

View File

@@ -0,0 +1,142 @@
use std::borrow::Cow;
use chrono::{DateTime, Utc};
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
use maplit::btreemap;
use rss::{
Channel, ChannelBuilder, EnclosureBuilder, GuidBuilder, Item, ItemBuilder,
extension::{ExtensionBuilder, ExtensionMap},
};
use url::Url;
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
};
pub trait RssFeedItemTrait: Sized {
fn get_guid_value(&self) -> Cow<'_, str>;
fn get_title(&self) -> Cow<'_, str>;
fn get_description(&self) -> Cow<'_, str>;
fn get_link(&self, ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>>;
fn get_enclosure_mime(&self) -> Option<Cow<'_, str>>;
fn get_enclosure_link(&self, ctx: &dyn AppContextTrait, api_base: &Url)
-> Option<Cow<'_, str>>;
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>>;
fn get_enclosure_content_length(&self) -> Option<i64>;
fn into_item(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Item> {
let enclosure_mime_type =
self.get_enclosure_mime()
.ok_or_else(|| RecorderError::MikanRssInvalidFieldError {
field: "enclosure_mime_type".into(),
source: None.into(),
})?;
let enclosure_link = self.get_enclosure_link(ctx, api_base).ok_or_else(|| {
RecorderError::MikanRssInvalidFieldError {
field: "enclosure_link".into(),
source: None.into(),
}
})?;
let enclosure_content_length = self.get_enclosure_content_length().ok_or_else(|| {
RecorderError::MikanRssInvalidFieldError {
field: "enclosure_content_length".into(),
source: None.into(),
}
})?;
let enclosure_pub_date = self.get_enclosure_pub_date();
let link = self.get_link(ctx, api_base).ok_or_else(|| {
RecorderError::MikanRssInvalidFieldError {
field: "link".into(),
source: None.into(),
}
})?;
let mut extensions = ExtensionMap::default();
if enclosure_mime_type == BITTORRENT_MIME_TYPE {
extensions.insert("torrent".to_string(), {
let mut map = btreemap! {
"link".to_string() => vec![
ExtensionBuilder::default().name(
"link"
).value(enclosure_link.to_string()).build()
],
"contentLength".to_string() => vec![
ExtensionBuilder::default().name(
"contentLength"
).value(enclosure_content_length.to_string()).build()
],
};
if let Some(pub_date) = enclosure_pub_date {
map.insert(
"pubDate".to_string(),
vec![
ExtensionBuilder::default()
.name("pubDate")
.value(pub_date.to_rfc3339())
.build(),
],
);
}
map
});
};
let enclosure = EnclosureBuilder::default()
.mime_type(enclosure_mime_type)
.url(enclosure_link.to_string())
.length(enclosure_content_length.to_string())
.build();
let guid = GuidBuilder::default()
.value(self.get_guid_value())
.permalink(false)
.build();
let item = ItemBuilder::default()
.guid(guid)
.title(self.get_title().to_string())
.description(self.get_description().to_string())
.link(link.to_string())
.enclosure(enclosure)
.extensions(extensions)
.build();
Ok(item)
}
}
pub trait RssFeedTrait: Sized {
type Item: RssFeedItemTrait;
fn get_description(&self) -> Cow<'_, str>;
fn get_title(&self) -> Cow<'_, str>;
fn get_link(&self, ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>>;
fn items(&self) -> impl Iterator<Item = &Self::Item>;
fn into_items(self) -> impl Iterator<Item = Self::Item>;
fn into_channel(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Channel> {
let link = self.get_link(ctx, api_base).ok_or_else(|| {
RecorderError::MikanRssInvalidFieldError {
field: "link".into(),
source: None.into(),
}
})?;
let channel = ChannelBuilder::default()
.title(self.get_title())
.link(link.to_string())
.description(self.get_description())
.items({
self.into_items()
.map(|item| item.into_item(ctx, api_base))
.collect::<RecorderResult<Vec<_>>>()?
})
.build();
Ok(channel)
}
}

View File

@@ -0,0 +1,114 @@
use std::borrow::Cow;
use chrono::{DateTime, Utc};
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
use url::Url;
use crate::{
app::{AppContextTrait, PROJECT_NAME},
models::{
episodes,
feeds::{
self,
rss::{RssFeedItemTrait, RssFeedTrait},
},
subscriptions,
},
web::controller,
};
pub struct SubscriptionEpisodesFeed {
pub feed: feeds::Model,
pub subscription: subscriptions::Model,
pub episodes: Vec<episodes::Model>,
}
impl SubscriptionEpisodesFeed {
pub fn from_model(
feed: feeds::Model,
subscription: subscriptions::Model,
episodes: Vec<episodes::Model>,
) -> Self {
Self {
feed,
subscription,
episodes,
}
}
}
impl RssFeedItemTrait for episodes::Model {
fn get_guid_value(&self) -> Cow<'_, str> {
Cow::Owned(format!("{PROJECT_NAME}:episode:{}", self.id))
}
fn get_title(&self) -> Cow<'_, str> {
Cow::Borrowed(&self.display_name)
}
fn get_description(&self) -> Cow<'_, str> {
Cow::Borrowed(&self.display_name)
}
fn get_link(&self, _ctx: &dyn AppContextTrait, _api_base: &Url) -> Option<Cow<'_, str>> {
self.homepage.as_deref().map(Cow::Borrowed)
}
fn get_enclosure_mime(&self) -> Option<Cow<'_, str>> {
if self.enclosure_torrent_link.is_some() {
Some(Cow::Borrowed(BITTORRENT_MIME_TYPE))
} else {
None
}
}
fn get_enclosure_link(
&self,
_ctx: &dyn AppContextTrait,
_api_base: &Url,
) -> Option<Cow<'_, str>> {
self.enclosure_torrent_link.as_deref().map(Cow::Borrowed)
}
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>> {
self.enclosure_pub_date
}
fn get_enclosure_content_length(&self) -> Option<i64> {
self.enclosure_content_length
}
}
impl RssFeedTrait for SubscriptionEpisodesFeed {
type Item = episodes::Model;
fn get_description(&self) -> Cow<'_, str> {
Cow::Owned(format!(
"{PROJECT_NAME} - episodes of subscription {}",
self.subscription.id
))
}
fn get_title(&self) -> Cow<'_, str> {
Cow::Owned(format!("{PROJECT_NAME} - subscription episodes"))
}
fn get_link(&self, _ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>> {
let api_base = api_base
.join(&format!(
"{}/{}",
controller::feeds::CONTROLLER_PREFIX,
self.feed.token
))
.ok()?;
Some(Cow::Owned(api_base.to_string()))
}
fn items(&self) -> impl Iterator<Item = &Self::Item> {
self.episodes.iter()
}
fn into_items(self) -> impl Iterator<Item = Self::Item> {
self.episodes.into_iter()
}
}

View File

@@ -4,6 +4,7 @@ pub mod credential_3rd;
pub mod downloaders; pub mod downloaders;
pub mod downloads; pub mod downloads;
pub mod episodes; pub mod episodes;
pub mod feeds;
pub mod query; pub mod query;
pub mod subscriber_tasks; pub mod subscriber_tasks;
pub mod subscribers; pub mod subscribers;

View File

@@ -1,6 +1,27 @@
use async_trait::async_trait;
use sea_orm::entity::prelude::*; use sea_orm::entity::prelude::*;
use crate::task::SubscriberTask; pub use crate::task::{
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant,
SubscriberTaskTypeVariantIter,
};
#[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SubscriberTaskStatus {
#[sea_orm(string_value = "Pending")]
Pending,
#[sea_orm(string_value = "Scheduled")]
Scheduled,
#[sea_orm(string_value = "Running")]
Running,
#[sea_orm(string_value = "Done")]
Done,
#[sea_orm(string_value = "Failed")]
Failed,
#[sea_orm(string_value = "Killed")]
Killed,
}
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] #[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "subscriber_tasks")] #[sea_orm(table_name = "subscriber_tasks")]
@@ -9,7 +30,8 @@ pub struct Model {
pub id: String, pub id: String,
pub subscriber_id: i32, pub subscriber_id: i32,
pub job: SubscriberTask, pub job: SubscriberTask,
pub status: String, pub task_type: SubscriberTaskType,
pub status: SubscriberTaskStatus,
pub attempts: i32, pub attempts: i32,
pub max_attempts: i32, pub max_attempts: i32,
pub run_at: DateTimeUtc, pub run_at: DateTimeUtc,
@@ -44,4 +66,5 @@ pub enum RelatedEntity {
Subscriber, Subscriber,
} }
#[async_trait]
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}

View File

@@ -3,11 +3,11 @@ use sea_orm::{ActiveValue, FromJsonQueryResult, TransactionTrait, entity::prelud
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
app::AppContextTrait, app::{AppContextTrait, PROJECT_NAME},
errors::app_error::{RecorderError, RecorderResult}, errors::app_error::{RecorderError, RecorderResult},
}; };
pub const SEED_SUBSCRIBER: &str = "konobangu"; pub const SEED_SUBSCRIBER: &str = PROJECT_NAME;
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscriberBangumiConfig { pub struct SubscriberBangumiConfig {
@@ -41,6 +41,10 @@ pub enum Relation {
Auth, Auth,
#[sea_orm(has_many = "super::credential_3rd::Entity")] #[sea_orm(has_many = "super::credential_3rd::Entity")]
Credential3rd, Credential3rd,
#[sea_orm(has_many = "super::feeds::Entity")]
Feed,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask,
} }
impl Related<super::subscriptions::Entity> for Entity { impl Related<super::subscriptions::Entity> for Entity {
@@ -79,6 +83,18 @@ impl Related<super::credential_3rd::Entity> for Entity {
} }
} }
impl Related<super::feeds::Entity> for Entity {
fn to() -> RelationDef {
Relation::Feed.def()
}
}
impl Related<super::subscriber_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriberTask.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity { pub enum RelatedEntity {
#[sea_orm(entity = "super::subscriptions::Entity")] #[sea_orm(entity = "super::subscriptions::Entity")]
@@ -91,6 +107,10 @@ pub enum RelatedEntity {
Episode, Episode,
#[sea_orm(entity = "super::credential_3rd::Entity")] #[sea_orm(entity = "super::credential_3rd::Entity")]
Credential3rd, Credential3rd,
#[sea_orm(entity = "super::feeds::Entity")]
Feed,
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask,
} }
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]

View File

@@ -1,5 +1,5 @@
use async_trait::async_trait; use async_trait::async_trait;
use sea_orm::{ActiveValue, entity::prelude::*}; use sea_orm::{ActiveValue, entity::prelude::*, sea_query::OnConflict};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{app::AppContextTrait, errors::RecorderResult}; use crate::{app::AppContextTrait, errors::RecorderResult};
@@ -96,14 +96,28 @@ impl Model {
subscription_id: i32, subscription_id: i32,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
let db = ctx.db(); let db = ctx.db();
Entity::insert_many(bangumi_ids.map(|bangumi_id| ActiveModel {
bangumi_id: ActiveValue::Set(bangumi_id), let active_models = bangumi_ids
subscriber_id: ActiveValue::Set(subscriber_id), .map(|bangumi_id| {
subscription_id: ActiveValue::Set(subscription_id), ActiveModel::from_subscription_and_bangumi(
..Default::default() subscriber_id,
})) subscription_id,
.on_conflict_do_nothing() bangumi_id,
.exec(db) )
})
.collect::<Vec<_>>();
if active_models.is_empty() {
return Ok(());
}
Entity::insert_many(active_models)
.on_conflict(
OnConflict::columns([Column::SubscriptionId, Column::BangumiId])
.do_nothing()
.to_owned(),
)
.exec_without_returning(db)
.await?; .await?;
Ok(()) Ok(())

View File

@@ -1,5 +1,5 @@
use async_trait::async_trait; use async_trait::async_trait;
use sea_orm::{ActiveValue, entity::prelude::*}; use sea_orm::{ActiveValue, entity::prelude::*, sea_query::OnConflict};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{app::AppContextTrait, errors::RecorderResult}; use crate::{app::AppContextTrait, errors::RecorderResult};
@@ -81,14 +81,27 @@ impl Model {
subscription_id: i32, subscription_id: i32,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
let db = ctx.db(); let db = ctx.db();
Entity::insert_many(episode_ids.map(|episode_id| ActiveModel {
let active_models = episode_ids
.map(|episode_id| ActiveModel {
episode_id: ActiveValue::Set(episode_id), episode_id: ActiveValue::Set(episode_id),
subscription_id: ActiveValue::Set(subscription_id), subscription_id: ActiveValue::Set(subscription_id),
subscriber_id: ActiveValue::Set(subscriber_id), subscriber_id: ActiveValue::Set(subscriber_id),
..Default::default() ..Default::default()
})) })
.on_conflict_do_nothing() .collect::<Vec<_>>();
.exec(db)
if active_models.is_empty() {
return Ok(());
}
Entity::insert_many(active_models)
.on_conflict(
OnConflict::columns([Column::SubscriptionId, Column::EpisodeId])
.do_nothing()
.to_owned(),
)
.exec_without_returning(db)
.await?; .await?;
Ok(()) Ok(())

View File

@@ -1,317 +0,0 @@
use std::{fmt::Debug, sync::Arc};
use async_trait::async_trait;
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
extract::mikan::{
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
},
};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
#[serde(rename_all = "snake_case")]
pub enum SubscriptionCategory {
#[sea_orm(string_value = "mikan_subscriber")]
MikanSubscriber,
#[sea_orm(string_value = "mikan_season")]
MikanSeason,
#[sea_orm(string_value = "mikan_bangumi")]
MikanBangumi,
#[sea_orm(string_value = "manual")]
Manual,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTimeUtc,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)]
pub id: i32,
pub display_name: String,
pub subscriber_id: i32,
pub category: SubscriptionCategory,
pub source_url: String,
pub enabled: bool,
pub credential_id: Option<i32>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscriber,
#[sea_orm(has_many = "super::bangumi::Entity")]
Bangumi,
#[sea_orm(has_many = "super::episodes::Entity")]
Episodes,
#[sea_orm(has_many = "super::subscription_episode::Entity")]
SubscriptionEpisode,
#[sea_orm(has_many = "super::subscription_bangumi::Entity")]
SubscriptionBangumi,
#[sea_orm(
belongs_to = "super::credential_3rd::Entity",
from = "Column::CredentialId",
to = "super::credential_3rd::Column::Id",
on_update = "Cascade",
on_delete = "SetNull"
)]
Credential3rd,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::subscription_bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriptionBangumi.def()
}
}
impl Related<super::subscription_episode::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriptionEpisode.def()
}
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
super::subscription_bangumi::Relation::Bangumi.def()
}
fn via() -> Option<RelationDef> {
Some(
super::subscription_bangumi::Relation::Subscription
.def()
.rev(),
)
}
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
super::subscription_episode::Relation::Episode.def()
}
fn via() -> Option<RelationDef> {
Some(
super::subscription_episode::Relation::Subscription
.def()
.rev(),
)
}
}
impl Related<super::credential_3rd::Entity> for Entity {
fn to() -> RelationDef {
Relation::Credential3rd.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::bangumi::Entity")]
Bangumi,
#[sea_orm(entity = "super::episodes::Entity")]
Episode,
#[sea_orm(entity = "super::subscription_episode::Entity")]
SubscriptionEpisode,
#[sea_orm(entity = "super::subscription_bangumi::Entity")]
SubscriptionBangumi,
#[sea_orm(entity = "super::credential_3rd::Entity")]
Credential3rd,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {}
impl Model {
pub async fn toggle_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
enabled: bool,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::update_many()
.col_expr(Column::Enabled, Expr::value(enabled))
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn delete_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::delete_many()
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn find_by_id_and_subscriber_id(
ctx: &dyn AppContextTrait,
subscriber_id: i32,
subscription_id: i32,
) -> RecorderResult<Self> {
let db = ctx.db();
let subscription_model = Entity::find_by_id(subscription_id)
.one(db)
.await?
.ok_or_else(|| RecorderError::DbError {
source: DbErr::RecordNotFound(format!(
"Subscription id {subscription_id} not found or not belong to subscriber \
{subscriber_id}",
)),
})?;
if subscription_model.subscriber_id != subscriber_id {
Err(RecorderError::DbError {
source: DbErr::RecordNotFound(format!(
"Subscription id {subscription_id} not found or not belong to subscriber \
{subscriber_id}",
)),
})?;
}
Ok(subscription_model)
}
}
#[async_trait]
pub trait SubscriptionTrait: Sized + Debug {
fn get_subscriber_id(&self) -> i32;
fn get_subscription_id(&self) -> i32;
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
fn try_from_model(model: &Model) -> RecorderResult<Self>;
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "category")]
pub enum Subscription {
#[serde(rename = "mikan_subscriber")]
MikanSubscriber(MikanSubscriberSubscription),
#[serde(rename = "mikan_season")]
MikanSeason(MikanSeasonSubscription),
#[serde(rename = "mikan_bangumi")]
MikanBangumi(MikanBangumiSubscription),
#[serde(rename = "manual")]
Manual,
}
impl Subscription {
pub fn category(&self) -> SubscriptionCategory {
match self {
Self::MikanSubscriber(_) => SubscriptionCategory::MikanSubscriber,
Self::MikanSeason(_) => SubscriptionCategory::MikanSeason,
Self::MikanBangumi(_) => SubscriptionCategory::MikanBangumi,
Self::Manual => SubscriptionCategory::Manual,
}
}
}
#[async_trait]
impl SubscriptionTrait for Subscription {
fn get_subscriber_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscriber_id(),
Self::MikanSeason(subscription) => subscription.get_subscriber_id(),
Self::MikanBangumi(subscription) => subscription.get_subscriber_id(),
Self::Manual => unreachable!(),
}
}
fn get_subscription_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscription_id(),
Self::MikanSeason(subscription) => subscription.get_subscription_id(),
Self::MikanBangumi(subscription) => subscription.get_subscription_id(),
Self::Manual => unreachable!(),
}
}
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_full(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_sources(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_sources(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_sources(ctx).await,
Self::Manual => Ok(()),
}
}
fn try_from_model(model: &Model) -> RecorderResult<Self> {
match model.category {
SubscriptionCategory::MikanSubscriber => {
MikanSubscriberSubscription::try_from_model(model).map(Self::MikanSubscriber)
}
SubscriptionCategory::MikanSeason => {
MikanSeasonSubscription::try_from_model(model).map(Self::MikanSeason)
}
SubscriptionCategory::MikanBangumi => {
MikanBangumiSubscription::try_from_model(model).map(Self::MikanBangumi)
}
SubscriptionCategory::Manual => Ok(Self::Manual),
}
}
}
impl TryFrom<&Model> for Subscription {
type Error = RecorderError;
fn try_from(model: &Model) -> Result<Self, Self::Error> {
Self::try_from_model(model)
}
}

View File

@@ -0,0 +1,20 @@
use std::{fmt::Debug, sync::Arc};
use async_trait::async_trait;
use crate::{app::AppContextTrait, errors::RecorderResult, models::subscriptions};
#[async_trait]
pub trait SubscriptionTrait: Sized + Debug {
fn get_subscriber_id(&self) -> i32;
fn get_subscription_id(&self) -> i32;
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self>;
}

View File

@@ -0,0 +1,195 @@
mod core;
mod registry;
pub use core::SubscriptionTrait;
use std::fmt::Debug;
use async_trait::async_trait;
pub use registry::{
Subscription, SubscriptionCategory, SubscriptionCategoryEnum, SubscriptionCategoryIter,
SubscriptionCategoryVariant, SubscriptionCategoryVariantIter,
};
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTimeUtc,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)]
pub id: i32,
pub display_name: String,
pub subscriber_id: i32,
pub category: SubscriptionCategory,
pub source_url: String,
pub enabled: bool,
pub credential_id: Option<i32>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscriber,
#[sea_orm(has_many = "super::bangumi::Entity")]
Bangumi,
#[sea_orm(has_many = "super::episodes::Entity")]
Episodes,
#[sea_orm(has_many = "super::subscription_episode::Entity")]
SubscriptionEpisode,
#[sea_orm(has_many = "super::subscription_bangumi::Entity")]
SubscriptionBangumi,
#[sea_orm(
belongs_to = "super::credential_3rd::Entity",
from = "Column::CredentialId",
to = "super::credential_3rd::Column::Id",
on_update = "Cascade",
on_delete = "SetNull"
)]
Credential3rd,
#[sea_orm(has_many = "super::feeds::Entity")]
Feed,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::subscription_bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriptionBangumi.def()
}
}
impl Related<super::subscription_episode::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriptionEpisode.def()
}
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
super::subscription_bangumi::Relation::Bangumi.def()
}
fn via() -> Option<RelationDef> {
Some(
super::subscription_bangumi::Relation::Subscription
.def()
.rev(),
)
}
}
impl Related<super::feeds::Entity> for Entity {
fn to() -> RelationDef {
Relation::Feed.def()
}
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
super::subscription_episode::Relation::Episode.def()
}
fn via() -> Option<RelationDef> {
Some(
super::subscription_episode::Relation::Subscription
.def()
.rev(),
)
}
}
impl Related<super::credential_3rd::Entity> for Entity {
fn to() -> RelationDef {
Relation::Credential3rd.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::bangumi::Entity")]
Bangumi,
#[sea_orm(entity = "super::episodes::Entity")]
Episode,
#[sea_orm(entity = "super::subscription_episode::Entity")]
SubscriptionEpisode,
#[sea_orm(entity = "super::subscription_bangumi::Entity")]
SubscriptionBangumi,
#[sea_orm(entity = "super::credential_3rd::Entity")]
Credential3rd,
#[sea_orm(entity = "super::feeds::Entity")]
Feed,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {}
impl Model {
pub async fn toggle_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
enabled: bool,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::update_many()
.col_expr(Column::Enabled, Expr::value(enabled))
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn delete_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::delete_many()
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn find_by_id_and_subscriber_id(
ctx: &dyn AppContextTrait,
subscriber_id: i32,
subscription_id: i32,
) -> RecorderResult<Self> {
let db = ctx.db();
let subscription_model = Entity::find_by_id(subscription_id)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
if subscription_model.subscriber_id != subscriber_id {
Err(RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
}
Ok(subscription_model)
}
}

Some files were not shown because too many files have changed in this diff Show More