Compare commits

..

44 Commits

Author SHA1 Message Date
94919878ea fix: fix issues 2025-07-02 01:33:32 +08:00
81bf27ed28 fix: fix 2025-07-08 00:54:34 +08:00
5be5b9f634 fix: fix cron builder 2025-07-07 01:34:56 +08:00
6cdd8c27ce fix: fix typos 2025-07-06 05:05:07 +08:00
4174cea728 fix: fix cron webui 2025-07-06 02:35:55 +08:00
3aad31a36b feat: more cron webui 2025-07-05 04:08:56 +08:00
004fed9b2e feat: init cron webui 2025-07-05 02:08:55 +08:00
a1c2eeded1 temp save 2025-07-04 05:59:56 +08:00
147df00155 build: add prod build 2025-07-04 05:06:45 +08:00
5155c59293 fix: fix migrations 2025-07-04 01:25:07 +08:00
b5b3c77ba3 fix: fix migrations 2025-07-03 04:25:50 +08:00
1d0aa8d7f1 feat: support system tasks 2025-07-03 03:48:23 +08:00
5b001f9584 refactor: refactor graphql 2025-07-02 01:25:44 +08:00
d06acde882 fix: temp save 2025-07-01 03:45:56 +08:00
bacfe99ef2 fix: fix issues 2025-06-30 02:05:23 +08:00
b4090e74c0 fix: fix webui compability 2025-06-29 02:05:44 +08:00
c3e546e256 refactor: refactor graphql more 2025-06-27 05:54:25 +08:00
f83371bbf9 fix: fix task lifetime 2025-06-28 04:10:18 +08:00
c858cc7d44 fix: fix cron timeout clean 2025-06-28 03:38:53 +08:00
65505f91b2 refactor: refactor graphql 2025-06-27 04:06:58 +08:00
c8501b1768 fix: remove inconsistent cleanup function 2025-06-27 02:18:23 +08:00
3a8eb88e1a feat: add cron 2025-06-26 02:56:55 +08:00
003d8840fd fix: fix dotenv loader inconsistent and many ui issues 2025-06-25 06:36:15 +08:00
41ff5c2a11 fix: fix production issues 2025-06-25 05:21:08 +08:00
571caf50ff fix: fix feed rss 2025-06-25 01:26:06 +08:00
9fd3ae6563 feat: basic support rss 2025-06-24 06:37:19 +08:00
cde3361458 feat: add new test resource mikan classic episodes tiny.parquet 2025-06-23 03:07:58 +08:00
f055011b86 feat: add rss feeds and episode enclosure 2025-06-22 01:04:23 +08:00
16429a44b4 fix: fix missing 2025-06-21 03:25:22 +08:00
fe0b7e88e6 feat: classic episodes scraper 2025-06-21 03:21:58 +08:00
28dd9da6ac fix: fix typo 2025-06-20 02:05:23 +08:00
02c16a2972 feat: support optimize images 2025-06-20 01:56:34 +08:00
324427513c refactor: rewrite origin name extractor from regex to nom combinators 2025-06-19 02:37:56 +08:00
c12b9b360a feat: static server support etag 2025-06-18 04:42:33 +08:00
cc06142050 fix: fix middlewares config 2025-06-18 03:09:10 +08:00
6726cafff4 feat: support static server 2025-06-18 02:19:42 +08:00
35312ea1ff fix: fix issues 2025-06-17 02:23:02 +08:00
721eee9c88 fix: fix issues 2025-06-16 08:01:02 +08:00
421f9d0293 feat: task ui & custom filter mutation 2025-06-16 07:56:52 +08:00
7eb4e41708 feat: try views and seaography 2025-06-15 05:02:23 +08:00
a2254bbe80 fix: fix auto accessToken renew 2025-06-15 02:48:48 +08:00
1b5bdadf10 fix: fix tasks 2025-06-14 22:30:58 +08:00
882b29d7a1 feat: task ui basic done 2025-06-13 04:02:01 +08:00
c60f6f511e feat: remove turbo 2025-06-13 00:09:18 +08:00
258 changed files with 22421 additions and 9477 deletions

11
.vscode/settings.json vendored
View File

@@ -40,12 +40,5 @@
} }
], ],
"rust-analyzer.cargo.features": "all", "rust-analyzer.cargo.features": "all",
// https://github.com/rust-lang/rust/issues/141540 "rust-analyzer.testExplorer": true
"rust-analyzer.cargo.targetDir": "target/rust-analyzer", }
"rust-analyzer.check.extraEnv": {
"CARGO_TARGET_DIR": "target/rust-analyzer"
},
"rust-analyzer.cargo.extraEnv": {
"CARGO_TARGET_DIR": "target/analyzer"
}
}

112
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,112 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "dev-all",
"dependsOn": [
"dev-webui",
"dev-recorder",
"dev-proxy",
"dev-codegen-wait",
"dev-deps",
],
"dependsOrder": "parallel",
"group": {
"kind": "build",
"isDefault": false,
},
"presentation": {
"group": "new-group",
"echo": true,
"reveal": "always",
"panel": "shared",
"clear": false
}
},
{
"label": "dev-webui",
"type": "shell",
"command": "just",
"args": [
"dev-webui"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "always",
"focus": true,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-deps",
"type": "shell",
"command": "just",
"args": [
"dev-deps"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-codegen-wait",
"type": "shell",
"command": "just",
"args": [
"dev-codegen-wait"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-recorder",
"type": "shell",
"command": "just",
"args": [
"dev-recorder"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-proxy",
"type": "shell",
"command": "just",
"args": [
"dev-proxy",
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
}
]
}

2866
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,6 @@ members = [
resolver = "2" resolver = "2"
[profile.dev] [profile.dev]
debug = 0
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171) # [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
# codegen-backend = "cranelift" # codegen-backend = "cranelift"
@@ -24,31 +23,30 @@ util-derive = { path = "./packages/util-derive" }
fetch = { path = "./packages/fetch" } fetch = { path = "./packages/fetch" }
downloader = { path = "./packages/downloader" } downloader = { path = "./packages/downloader" }
recorder = { path = "./apps/recorder" } recorder = { path = "./apps/recorder" }
proxy = { path = "./apps/proxy" }
reqwest = { version = "0.12", features = [ reqwest = { version = "0.12.20", features = [
"charset", "charset",
"http2", "http2",
"json", "json",
"macos-system-configuration", "macos-system-configuration",
"cookies", "cookies",
] } ] }
moka = "0.12" moka = "0.12.10"
futures = "0.3" futures = "0.3.31"
quirks_path = "0.1" quirks_path = "0.1.1"
snafu = { version = "0.8", features = ["futures"] } snafu = { version = "0.8.0", features = ["futures"] }
testcontainers = { version = "0.24" } testcontainers = { version = "0.24.0" }
testcontainers-modules = { version = "0.12.1" } testcontainers-modules = { version = "0.12.1" }
testcontainers-ext = { version = "0.1.0", features = ["tracing"] } testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
serde = { version = "1", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
tokio = { version = "1.45.1", features = [ tokio = { version = "1.46", features = [
"macros", "macros",
"fs", "fs",
"rt-multi-thread", "rt-multi-thread",
"signal", "signal",
] } ] }
serde_json = "1" serde_json = "1.0.140"
async-trait = "0.1" async-trait = "0.1.88"
tracing = "0.1" tracing = "0.1"
url = "2.5.2" url = "2.5.2"
anyhow = "1" anyhow = "1"
@@ -60,11 +58,31 @@ regex = "1.11"
lazy_static = "1.5" lazy_static = "1.5"
axum = { version = "0.8.3", features = ["macros"] } axum = { version = "0.8.3", features = ["macros"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
axum-extra = "0.10" axum-extra = { version = "0.10", features = ["typed-header"] }
mockito = { version = "1.6.1" } mockito = { version = "1.6.1" }
convert_case = "0.8" convert_case = "0.8"
color-eyre = "0.6.5" color-eyre = "0.6.5"
inquire = "0.7.5" inquire = "0.7.5"
image = "0.25.6"
uuid = { version = "1.6.0", features = ["v7"] }
maplit = "1.0.2"
once_cell = "1.20.2"
rand = "0.9.1"
rust_decimal = "1.37.2"
base64 = "0.22.1"
nom = "8.0.0"
percent-encoding = "2.3.1"
num-traits = "0.2.19"
http = "1.2.0"
async-stream = "0.3.6"
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.41"
ipnetwork = "0.21.1"
typed-builder = "0.21.0"
nanoid = "0.4.0"
webp = "0.3.0"
[patch.crates-io] [patch.crates-io]
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "01d3f99" } seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "292cdd2" }

View File

@@ -6,13 +6,14 @@
"build": "email build", "build": "email build",
"dev": "email dev --port 5003", "dev": "email dev --port 5003",
"export": "email export", "export": "email export",
"clean": "git clean -xdf .cache .turbo dist node_modules", "clean": "git clean -xdf .cache dist node_modules",
"typecheck": "tsc --noEmit --emitDeclarationOnly false" "typecheck": "tsc --noEmit --emitDeclarationOnly false"
}, },
"dependencies": { "dependencies": {
"@react-email/components": "0.0.31", "@react-email/components": "^0.0.42",
"react": "^19.0.0", "react": "^19.0.0",
"react-email": "3.0.4" "react-email": "^4.0.16",
"@konobangu/email": "workspace:*"
}, },
"devDependencies": { "devDependencies": {
"@types/react": "19.0.1" "@types/react": "19.0.1"

View File

@@ -2,8 +2,12 @@
"extends": "../../tsconfig.base.json", "extends": "../../tsconfig.base.json",
"compilerOptions": { "compilerOptions": {
"composite": true, "composite": true,
"jsx": "react-jsx" "jsx": "react-jsx",
"jsxImportSource": "react",
"module": "ESNext",
"moduleResolution": "bundler"
}, },
"references": [{ "path": "../../packages/email" }],
"include": ["**/*.ts", "**/*.tsx"], "include": ["**/*.ts", "**/*.tsx"],
"exclude": ["node_modules"] "exclude": ["node_modules"]
} }

View File

@@ -0,0 +1 @@
^https://mikanani.me/*** http://127.0.0.1:5005/$1 excludeFilter://^**/***.svg excludeFilter://^**/***.css excludeFilter://^**/***.js

View File

@@ -1 +0,0 @@
^https://mikanani.me/*** http://127.0.0.1:5010/$1

View File

@@ -0,0 +1,8 @@
```x-forwarded.json
{
"X-Forwarded-Host": "konobangu.com",
"X-Forwarded-Proto": "https"
}
```
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/$1

View File

@@ -1 +1 @@
{"filesOrder":["konobangu","mikan_doppel"],"selectedList":["konobangu","mikan_doppel"],"disabledDefalutRules":true,"defalutRules":""} {"filesOrder":["konobangu","konobangu-prod","mikan-doppel"],"selectedList":["mikan-doppel","konobangu"],"disabledDefalutRules":true,"defalutRules":""}

View File

@@ -13,7 +13,7 @@ name = "mikan_doppel"
path = "src/bin/mikan_doppel.rs" path = "src/bin/mikan_doppel.rs"
[dependencies] [dependencies]
recorder = { workspace = true } recorder = { workspace = true, features = ["playground"] }
tokio = { workspace = true } tokio = { workspace = true }
tracing-subscriber = { workspace = true } tracing-subscriber = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }

View File

@@ -10,6 +10,6 @@
"keywords": [], "keywords": [],
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"whistle": "^2.9.93" "whistle": "^2.9.99"
} }
} }

View File

@@ -10,7 +10,7 @@ async fn main() -> RecorderResult<()> {
.with_max_level(Level::DEBUG) .with_max_level(Level::DEBUG)
.init(); .init();
let mut mikan_server = MikanMockServer::new_with_port(5010).await.unwrap(); let mut mikan_server = MikanMockServer::new_with_port(5005).await.unwrap();
let resources_mock = mikan_server.mock_resources_with_doppel(); let resources_mock = mikan_server.mock_resources_with_doppel();

View File

@@ -1,16 +0,0 @@
HOST="konobangu.com"
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
STORAGE_DATA_DIR = "./data"
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# OIDC_EXTRA_CLAIM_KEY = ""
# OIDC_EXTRA_CLAIM_VALUE = ""
# MIKAN_PROXY = ""
# MIKAN_PROXY_AUTH_HEADER = ""
# MIKAN_NO_PROXY = ""

View File

@@ -0,0 +1,18 @@
LOGGER__LEVEL = "debug"
DATABASE__URI = "postgres://konobangu:konobangu@localhost:5432/konobangu"
AUTH__AUTH_TYPE = "basic"
AUTH__BASIC_USER = "konobangu"
AUTH__BASIC_PASSWORD = "konobangu"
# AUTH__OIDC_ISSUER = "https://auth.logto.io/oidc"
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
# AUTH__OIDC_CLIENT_ID = "client_id"
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# AUTH__OIDC_EXTRA_CLAIM_KEY = ""
# AUTH__OIDC_EXTRA_CLAIM_VALUE = ""
MIKAN__HTTP_CLIENT__PROXY__ACCEPT_INVALID_CERTS = true
MIKAN__HTTP_CLIENT__PROXY__SERVER = "http://127.0.0.1:8899"

View File

@@ -0,0 +1,15 @@
HOST="konobangu.com"
DATABASE__URI = "postgres://konobangu:konobangu@localhost:5432/konobangu"
AUTH__AUTH_TYPE = "basic" # or oidc
AUTH__BASIC_USER = "konobangu"
AUTH__BASIC_PASSWORD = "konobangu"
# AUTH__OIDC_ISSUER="https://auth.logto.io/oidc"
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
# AUTH__OIDC_CLIENT_ID = "client_id"
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# AUTH__OIDC_EXTRA_CLAIM_KEY = ""
# AUTH__OIDC_EXTRA_CLAIM_VALUE = ""

View File

@@ -27,3 +27,7 @@ node_modules
dist/ dist/
temp/* temp/*
!temp/.gitkeep !temp/.gitkeep
tests/resources/mikan/classic_episodes/*/*
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet
webui/
data/

View File

@@ -2,8 +2,21 @@
name = "recorder" name = "recorder"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = ["jxl"]
playground = ["dep:inquire", "dep:color-eyre", "dep:polars", "test-utils"]
testcontainers = [
"dep:testcontainers",
"dep:testcontainers-modules",
"dep:testcontainers-ext",
"downloader/testcontainers",
"testcontainers-modules/postgres",
]
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
test-utils = []
[lib] [lib]
name = "recorder" name = "recorder"
path = "src/lib.rs" path = "src/lib.rs"
@@ -13,16 +26,25 @@ name = "recorder_cli"
path = "src/bin/main.rs" path = "src/bin/main.rs"
required-features = [] required-features = []
[features] [[example]]
default = [] name = "mikan_collect_classic_eps"
playground = ["dep:inquire", "dep:color-eyre"] path = "examples/mikan_collect_classic_eps.rs"
testcontainers = [ required-features = ["playground"]
"dep:testcontainers",
"dep:testcontainers-modules", [[example]]
"dep:testcontainers-ext", name = "mikan_doppel_season_subscription"
"downloader/testcontainers", path = "examples/mikan_doppel_season_subscription.rs"
"testcontainers-modules/postgres", required-features = ["playground"]
]
[[example]]
name = "mikan_doppel_subscriber_subscription"
path = "examples/mikan_doppel_subscriber_subscription.rs"
required-features = ["playground"]
[[example]]
name = "playground"
path = "examples/playground.rs"
required-features = ["playground"]
[dependencies] [dependencies]
downloader = { workspace = true } downloader = { workspace = true }
@@ -55,6 +77,27 @@ moka = { workspace = true }
chrono = { workspace = true } chrono = { workspace = true }
tracing-subscriber = { workspace = true } tracing-subscriber = { workspace = true }
mockito = { workspace = true } mockito = { workspace = true }
color-eyre = { workspace = true, optional = true }
inquire = { workspace = true, optional = true }
convert_case = { workspace = true }
image = { workspace = true }
uuid = { workspace = true }
maplit = { workspace = true }
once_cell = { workspace = true }
rand = { workspace = true }
rust_decimal = { workspace = true }
base64 = { workspace = true }
nom = { workspace = true }
percent-encoding = { workspace = true }
num-traits = { workspace = true }
http = { workspace = true }
async-stream = { workspace = true }
serde_variant = { workspace = true }
tracing-appender = { workspace = true }
clap = { workspace = true }
ipnetwork = { workspace = true }
typed-builder = { workspace = true }
webp = { workspace = true }
sea-orm = { version = "1.1", features = [ sea-orm = { version = "1.1", features = [
"sqlx-sqlite", "sqlx-sqlite",
@@ -64,19 +107,13 @@ sea-orm = { version = "1.1", features = [
"debug-print", "debug-print",
] } ] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] } figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
uuid = { version = "1.6.0", features = ["v4"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] } sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
rss = "2" rss = { version = "2", features = ["builders", "with-serde"] }
fancy-regex = "0.14" fancy-regex = "0.15"
maplit = "1.0.2"
lightningcss = "1.0.0-alpha.66" lightningcss = "1.0.0-alpha.66"
html-escape = "0.2.13" html-escape = "0.2.13"
opendal = { version = "0.53", features = ["default", "services-fs"] } opendal = { version = "0.53", features = ["default", "services-fs"] }
zune-image = "0.4.15" scraper = "0.23.1"
once_cell = "1.20.2"
scraper = "0.23"
log = "0.4"
async-graphql = { version = "7", features = ["dynamic-schema"] } async-graphql = { version = "7", features = ["dynamic-schema"] }
async-graphql-axum = "7" async-graphql-axum = "7"
seaography = { version = "1.1", features = [ seaography = { version = "1.1", features = [
@@ -88,9 +125,9 @@ seaography = { version = "1.1", features = [
"with-bigdecimal", "with-bigdecimal",
"with-postgres-array", "with-postgres-array",
"with-json-as-scalar", "with-json-as-scalar",
"with-custom-as-json",
] } ] }
base64 = "0.22.1" tower = { version = "0.5.2", features = ["util"] }
tower = "0.5.2"
tower-http = { version = "0.6", features = [ tower-http = { version = "0.6", features = [
"trace", "trace",
"catch-panic", "catch-panic",
@@ -104,30 +141,41 @@ tower-http = { version = "0.6", features = [
tera = "1.20.0" tera = "1.20.0"
openidconnect = { version = "4" } openidconnect = { version = "4" }
dotenvy = "0.15.7" dotenvy = "0.15.7"
http = "1.2.0" jpegxl-rs = { version = "0.11.2", optional = true }
async-stream = "0.3.6" jpegxl-sys = { version = "0.11.2", optional = true }
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.31"
ipnetwork = "0.21.1"
typed-builder = "0.21.0"
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] } apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
apalis-sql = { version = "0.7", features = ["postgres"] } apalis-sql = { version = "0.7", features = ["postgres"] }
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] } cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
rand = "0.9.1"
rust_decimal = "1.37.1"
reqwest_cookie_store = "0.8.0" reqwest_cookie_store = "0.8.0"
nanoid = "0.4.0"
jwtk = "0.4.0" jwtk = "0.4.0"
color-eyre = { workspace = true, optional = true } mime_guess = "2.0.5"
inquire = { workspace = true, optional = true } icu_properties = "2.0.1"
percent-encoding = "2.3.1" icu = "2.0.0"
tracing-tree = "0.4.0"
num_cpus = "1.17.0"
headers-accept = "0.1.4"
polars = { version = "0.49.1", features = [
"parquet",
"lazy",
"diagonal_concat",
], optional = true }
quick-xml = { version = "0.38", features = [
"serialize",
"serde-types",
"serde",
] }
croner = "2.2.0"
ts-rs = "11.0.1"
secrecy = { version = "0.10.3", features = ["serde"] }
paste = "1.0.15"
chrono-tz = "0.10.3"
[dev-dependencies] [dev-dependencies]
serial_test = "3"
insta = { version = "1", features = ["redactions", "toml", "filters"] }
rstest = "0.25"
ctor = "0.4.0"
inquire = { workspace = true } inquire = { workspace = true }
color-eyre = { workspace = true } color-eyre = { workspace = true }
serial_test = "3"
insta = { version = "1", features = ["redactions", "toml", "filters"] }
ctor = "0.4.0"
tracing-test = "0.2.5"
rstest = "0.25"

View File

@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncOneSubscriptionFeedsFullTaskInput } from "./SyncOneSubscriptionFeedsFullTaskInput";
import type { SyncOneSubscriptionFeedsIncrementalTaskInput } from "./SyncOneSubscriptionFeedsIncrementalTaskInput";
import type { SyncOneSubscriptionSourcesTaskInput } from "./SyncOneSubscriptionSourcesTaskInput";
export type SubscriberTaskInput = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTaskInput | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTaskInput | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTaskInput;

View File

@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncOneSubscriptionFeedsFullTask } from "./SyncOneSubscriptionFeedsFullTask";
import type { SyncOneSubscriptionFeedsIncrementalTask } from "./SyncOneSubscriptionFeedsIncrementalTask";
import type { SyncOneSubscriptionSourcesTask } from "./SyncOneSubscriptionSourcesTask";
export type SubscriberTaskType = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTask | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTask | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTask;

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsFullTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsFullTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsIncrementalTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsIncrementalTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionSourcesTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionSourcesTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -0,0 +1,584 @@
use std::collections::HashSet;
use chrono::{DateTime, Duration, FixedOffset, NaiveDate, NaiveTime, TimeZone, Utc};
use fetch::{HttpClientConfig, fetch_html};
use itertools::Itertools;
use lazy_static::lazy_static;
use nom::{
IResult, Parser,
branch::alt,
bytes::complete::{tag, take, take_till1},
character::complete::space1,
combinator::map,
};
use recorder::{
errors::{RecorderError, RecorderResult},
extract::{
html::extract_inner_text_from_element_ref,
mikan::{MikanClient, MikanConfig, MikanEpisodeHash, MikanFansubHash},
},
};
use regex::Regex;
use scraper::{ElementRef, Html, Selector};
use snafu::FromString;
use url::Url;
lazy_static! {
static ref TEST_FOLDER: std::path::PathBuf =
if cfg!(any(test, debug_assertions, feature = "playground")) {
std::path::PathBuf::from(format!(
"{}/tests/resources/mikan/classic_episodes",
env!("CARGO_MANIFEST_DIR")
))
} else {
std::path::PathBuf::from("tests/resources/mikan/classic_episodes")
};
}
lazy_static! {
static ref TOTAL_PAGE_REGEX: Regex =
Regex::new(r#"\$\(\'\.classic-view-pagination2\'\)\.bootpag\(\{\s*total:\s*(\d+)"#)
.unwrap();
}
pub struct MikanClassicEpisodeTableRow {
pub id: i32,
pub publish_at: DateTime<Utc>,
pub mikan_fansub_id: Option<String>,
pub fansub_name: Option<String>,
pub mikan_episode_id: String,
pub original_name: String,
pub magnet_link: Option<String>,
pub file_size: Option<String>,
pub torrent_link: Option<String>,
}
impl MikanClassicEpisodeTableRow {
fn timezone() -> FixedOffset {
FixedOffset::east_opt(8 * 3600).unwrap()
}
fn fixed_date_parser(input: &str) -> IResult<&str, NaiveDate> {
alt((
map(tag("今天"), move |_| {
Utc::now().with_timezone(&Self::timezone()).date_naive()
}),
map(tag("昨天"), move |_| {
Utc::now().with_timezone(&Self::timezone()).date_naive() - Duration::days(1)
}),
))
.parse(input)
}
fn formatted_date_parser(input: &str) -> IResult<&str, NaiveDate> {
let (remain, date_str) = take_till1(|c: char| c.is_whitespace()).parse(input)?;
let date = NaiveDate::parse_from_str(date_str, "%Y/%m/%d").map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
})?;
Ok((remain, date))
}
fn date_parser(input: &str) -> IResult<&str, NaiveDate> {
alt((Self::fixed_date_parser, Self::formatted_date_parser)).parse(input)
}
fn time_parser(input: &str) -> IResult<&str, NaiveTime> {
let (remain, time_str) = take(5usize).parse(input)?;
let time = NaiveTime::parse_from_str(time_str, "%H:%M").map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
})?;
Ok((remain, time))
}
fn extract_publish_at(text: &str) -> Option<DateTime<Utc>> {
let (_, (date, _, time)) = (Self::date_parser, space1, Self::time_parser)
.parse(text)
.ok()?;
let local_dt = Self::timezone()
.from_local_datetime(&date.and_time(time))
.single()?;
Some(local_dt.with_timezone(&Utc))
}
pub fn from_element_ref(
row: ElementRef<'_>,
rev_id: i32,
idx: i32,
mikan_base_url: &Url,
) -> RecorderResult<Self> {
let publish_at_selector = &Selector::parse("td:nth-of-type(1)").unwrap();
let fansub_selector = &Selector::parse("td:nth-of-type(2) > a").unwrap();
let original_name_selector =
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(1)").unwrap();
let magnet_link_selector =
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(2)").unwrap();
let file_size_selector = &Selector::parse("td:nth-of-type(4)").unwrap();
let torrent_link_selector = &Selector::parse("td:nth-of-type(5) > a").unwrap();
let publish_at = row
.select(publish_at_selector)
.next()
.map(extract_inner_text_from_element_ref)
.and_then(|e| Self::extract_publish_at(&e));
let (mikan_fansub_hash, fansub_name) = row
.select(fansub_selector)
.next()
.and_then(|e| {
e.attr("href")
.and_then(|s| mikan_base_url.join(s).ok())
.and_then(|u| MikanFansubHash::from_homepage_url(&u))
.map(|h| (h, extract_inner_text_from_element_ref(e)))
})
.unzip();
let (mikan_episode_hash, original_name) = row
.select(original_name_selector)
.next()
.and_then(|el| {
el.attr("href")
.and_then(|s| mikan_base_url.join(s).ok())
.and_then(|u| MikanEpisodeHash::from_homepage_url(&u))
.map(|h| (h, extract_inner_text_from_element_ref(el)))
})
.unzip();
let magnet_link = row
.select(magnet_link_selector)
.next()
.and_then(|el| el.attr("data-clipboard-text"));
let file_size = row
.select(file_size_selector)
.next()
.map(extract_inner_text_from_element_ref);
let torrent_link = row
.select(torrent_link_selector)
.next()
.and_then(|el| el.attr("href"));
if let (Some(mikan_episode_hash), Some(original_name), Some(publish_at)) = (
mikan_episode_hash.as_ref(),
original_name.as_ref(),
publish_at.as_ref(),
) {
Ok(Self {
id: rev_id * 1000 + idx,
publish_at: *publish_at,
mikan_fansub_id: mikan_fansub_hash.map(|h| h.mikan_fansub_id.clone()),
fansub_name,
mikan_episode_id: mikan_episode_hash.mikan_episode_id.clone(),
original_name: original_name.clone(),
magnet_link: magnet_link.map(|s| s.to_string()),
file_size: file_size.map(|s| s.to_string()),
torrent_link: torrent_link.map(|s| s.to_string()),
})
} else {
let mut missing_fields = vec![];
if mikan_episode_hash.is_none() {
missing_fields.push("mikan_episode_id");
}
if original_name.is_none() {
missing_fields.push("original_name");
}
if publish_at.is_none() {
missing_fields.push("publish_at");
}
Err(RecorderError::without_source(format!(
"Failed to parse episode table row, missing fields: {missing_fields:?}, row \
index: {idx}"
)))
}
}
}
pub struct MikanClassicEpisodeTablePage {
pub page: i32,
pub total: i32,
pub html: String,
pub rows: Vec<MikanClassicEpisodeTableRow>,
}
impl MikanClassicEpisodeTablePage {
pub fn from_html(
html: String,
mikan_base_url: &Url,
page: i32,
updated_info: Option<(i32, i32)>,
) -> RecorderResult<Self> {
let tr_selector = &Selector::parse("tbody tr").unwrap();
let doc = Html::parse_document(&html);
if let Some(mut total) = TOTAL_PAGE_REGEX
.captures(&html)
.and_then(|c| c.get(1))
.and_then(|s| s.as_str().parse::<i32>().ok())
{
if let Some((_, update_total)) = updated_info {
total = update_total;
}
let rev_id = total - page;
let rows = doc
.select(tr_selector)
.rev()
.enumerate()
.map(|(idx, tr)| {
MikanClassicEpisodeTableRow::from_element_ref(
tr,
rev_id,
idx as i32,
mikan_base_url,
)
})
.collect::<RecorderResult<Vec<_>>>()?;
Ok(Self {
page,
total,
html,
rows,
})
} else {
Err(RecorderError::without_source(
"Failed to parse pagination meta and rows".into(),
))
}
}
pub fn save_to_files(&self) -> RecorderResult<()> {
use polars::prelude::*;
let rev_id = self.total - self.page;
let parquet_path = TEST_FOLDER.join(format!("parquet/rev_{rev_id}.parquet"));
let csv_path = TEST_FOLDER.join(format!("csv/rev_{rev_id}.csv"));
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
std::fs::write(html_path, self.html.clone())?;
let mut id_vec = Vec::new();
let mut publish_at_vec = Vec::new();
let mut mikan_fansub_id_vec = Vec::new();
let mut fansub_name_vec = Vec::new();
let mut mikan_episode_id_vec = Vec::new();
let mut original_name_vec = Vec::new();
let mut magnet_link_vec = Vec::new();
let mut file_size_vec = Vec::new();
let mut torrent_link_vec = Vec::new();
for row in &self.rows {
id_vec.push(row.id);
publish_at_vec.push(row.publish_at.to_rfc3339());
mikan_fansub_id_vec.push(row.mikan_fansub_id.clone());
fansub_name_vec.push(row.fansub_name.clone());
mikan_episode_id_vec.push(row.mikan_episode_id.clone());
original_name_vec.push(row.original_name.clone());
magnet_link_vec.push(row.magnet_link.clone());
file_size_vec.push(row.file_size.clone());
torrent_link_vec.push(row.torrent_link.clone());
}
let df = df! [
"id" => id_vec,
"publish_at_timestamp" => publish_at_vec,
"mikan_fansub_id" => mikan_fansub_id_vec,
"fansub_name" => fansub_name_vec,
"mikan_episode_id" => mikan_episode_id_vec,
"original_name" => original_name_vec,
"magnet_link" => magnet_link_vec,
"file_size" => file_size_vec,
"torrent_link" => torrent_link_vec,
]
.map_err(|e| {
let message = format!("Failed to create DataFrame: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let mut parquet_file = std::fs::File::create(&parquet_path)?;
ParquetWriter::new(&mut parquet_file)
.finish(&mut df.clone())
.map_err(|e| {
let message = format!("Failed to write parquet file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let mut csv_file = std::fs::File::create(&csv_path)?;
CsvWriter::new(&mut csv_file)
.include_header(true)
.with_quote_style(QuoteStyle::Always)
.finish(&mut df.clone())
.map_err(|e| {
let message = format!("Failed to write csv file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
println!(
"[{}/{}] Saved {} rows to rev_{}.{{parquet,html,csv}}",
self.page,
self.total,
self.rows.len(),
rev_id
);
Ok(())
}
pub fn waiting_rev_ids(total: i32) -> RecorderResult<Vec<i32>> {
let dir = TEST_FOLDER.join("csv");
let files = std::fs::read_dir(dir)?;
let rev_ids = files
.filter_map(|f| f.ok())
.filter_map(|f| {
f.path().file_stem().and_then(|s| {
s.to_str().and_then(|s| {
if s.starts_with("rev_") {
s.replace("rev_", "").parse::<i32>().ok()
} else {
None
}
})
})
})
.collect::<HashSet<_>>();
Ok((0..total)
.filter(|rev_id| !rev_ids.contains(rev_id))
.collect::<Vec<_>>())
}
}
async fn scrape_mikan_classic_episode_table_page(
mikan_client: &MikanClient,
page: i32,
updated_info: Option<(i32, i32)>,
) -> RecorderResult<MikanClassicEpisodeTablePage> {
let mikan_base_url = mikan_client.base_url();
let url = mikan_base_url.join(&format!("/Home/Classic/{page}"))?;
if let Some((rev_id, update_total)) = updated_info.as_ref() {
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
if html_path.exists() {
let html = std::fs::read_to_string(&html_path)?;
println!("[{page}/{update_total}] html exists, skipping fetch");
return MikanClassicEpisodeTablePage::from_html(
html,
mikan_base_url,
page,
updated_info,
);
}
}
let total = if let Some((_, update_total)) = updated_info.as_ref() {
update_total.to_string()
} else {
"Unknown".to_string()
};
println!("[{page}/{total}] fetching html...");
let html = fetch_html(mikan_client, url).await?;
println!("[{page}/{total}] fetched html done");
std::fs::write(TEST_FOLDER.join("html/temp.html"), html.clone())?;
MikanClassicEpisodeTablePage::from_html(html, mikan_base_url, page, updated_info)
}
async fn scrape_mikan_classic_episode_table_page_from_rev_id(
mikan_client: &MikanClient,
total: i32,
rev_idx: i32,
) -> RecorderResult<MikanClassicEpisodeTablePage> {
let page = total - rev_idx;
scrape_mikan_classic_episode_table_page(mikan_client, page, Some((rev_idx, total))).await
}
async fn merge_mikan_classic_episodes_and_strip_columns() -> RecorderResult<()> {
use polars::prelude::*;
let dir = TEST_FOLDER.join("parquet");
let files = std::fs::read_dir(dir)?;
let parquet_paths = files
.filter_map(|f| f.ok())
.filter_map(|f| {
let path = f.path();
if let Some(ext) = path.extension()
&& ext == "parquet"
&& path
.file_stem()
.is_some_and(|f| f.to_string_lossy().starts_with("rev_"))
{
Some(path)
} else {
None
}
})
.collect::<Vec<_>>();
if parquet_paths.is_empty() {
return Err(RecorderError::without_source(
"No parquet files found to merge".into(),
));
}
println!("Found {} parquet files to merge", parquet_paths.len());
// 读取并合并所有 parquet 文件
let mut all_dfs = Vec::new();
for path in &parquet_paths {
println!("Reading {path:?}");
let file = std::fs::File::open(path)?;
let df = ParquetReader::new(file).finish().map_err(|e| {
let message = format!("Failed to read parquet file {path:?}: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
all_dfs.push(df);
}
let lazy_frames: Vec<LazyFrame> = all_dfs.into_iter().map(|df| df.lazy()).collect();
let merged_df = concat_lf_diagonal(&lazy_frames, UnionArgs::default())
.map_err(|e| {
let message = format!("Failed to concat DataFrames: {e}");
RecorderError::with_source(Box::new(e), message)
})?
.sort(
["publish_at_timestamp"],
SortMultipleOptions::default().with_order_descending(true),
)
.unique(
Some(vec![
"mikan_fansub_id".to_string(),
"mikan_episode_id".to_string(),
]),
UniqueKeepStrategy::First,
)
.collect()
.map_err(|e| {
let message = format!("Failed to collect lazy DataFrame: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
fn select_columns_and_write(
merged_df: DataFrame,
name: &str,
columns: &[&str],
) -> RecorderResult<()> {
let result_df = merged_df
.lazy()
.sort(["publish_at_timestamp"], SortMultipleOptions::default())
.select(columns.iter().map(|c| col(*c)).collect_vec())
.collect()
.map_err(|e| {
let message = format!("Failed to sort and select columns: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let output_path = TEST_FOLDER.join(format!("parquet/{name}.parquet"));
let mut output_file = std::fs::File::create(&output_path)?;
ParquetWriter::new(&mut output_file)
.set_parallel(true)
.with_compression(ParquetCompression::Zstd(Some(
ZstdLevel::try_new(22).unwrap(),
)))
.finish(&mut result_df.clone())
.map_err(|e| {
let message = format!("Failed to write merged parquet file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
println!("Merged {} rows into {output_path:?}", result_df.height());
Ok(())
}
select_columns_and_write(merged_df.clone(), "tiny", &["fansub_name", "original_name"])?;
// select_columns_and_write(
// merged_df.clone(),
// "lite",
// &[
// "mikan_fansub_id",
// "fansub_name",
// "mikan_episode_id",
// "original_name",
// ],
// )?;
// select_columns_and_write(
// merged_df,
// "full",
// &[
// "id",
// "publish_at_timestamp",
// "mikan_fansub_id",
// "fansub_name",
// "mikan_episode_id",
// "original_name",
// "magnet_link",
// "file_size",
// "torrent_link",
// ],
// )?;
Ok(())
}
#[tokio::main]
async fn main() -> RecorderResult<()> {
std::fs::create_dir_all(TEST_FOLDER.join("html"))?;
std::fs::create_dir_all(TEST_FOLDER.join("parquet"))?;
std::fs::create_dir_all(TEST_FOLDER.join("csv"))?;
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
http_client: HttpClientConfig {
exponential_backoff_max_retries: Some(3),
leaky_bucket_max_tokens: Some(2),
leaky_bucket_initial_tokens: Some(1),
leaky_bucket_refill_tokens: Some(1),
leaky_bucket_refill_interval: Some(std::time::Duration::from_millis(1000)),
user_agent: Some(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
.to_string(),
),
..Default::default()
},
base_url: Url::parse("https://mikanani.me")?,
})
.await?;
let first_page_and_pagination_info =
scrape_mikan_classic_episode_table_page(&mikan_scrape_client, 1, None).await?;
let total_page = first_page_and_pagination_info.total;
first_page_and_pagination_info.save_to_files()?;
let next_rev_ids = MikanClassicEpisodeTablePage::waiting_rev_ids(total_page)?;
for todo_rev_id in next_rev_ids {
let page = scrape_mikan_classic_episode_table_page_from_rev_id(
&mikan_scrape_client,
total_page,
todo_rev_id,
)
.await?;
page.save_to_files()?;
}
// 合并所有 parquet 文件
println!("\nMerging all parquet files...");
merge_mikan_classic_episodes_and_strip_columns().await?;
println!("Merge completed!");
Ok(())
}

View File

@@ -1,4 +1,4 @@
use std::time::Duration; use std::{str::FromStr, time::Duration};
use color_eyre::{Result, eyre::OptionExt}; use color_eyre::{Result, eyre::OptionExt};
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest}; use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
@@ -6,7 +6,8 @@ use inquire::{Password, Text, validator::Validation};
use recorder::{ use recorder::{
crypto::UserPassCredential, crypto::UserPassCredential,
extract::mikan::{ extract::mikan::{
MikanClient, MikanConfig, MikanRssEpisodeItem, build_mikan_bangumi_expand_subscribed_url, MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
build_mikan_bangumi_expand_subscribed_url,
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment, extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
extract_mikan_bangumi_meta_from_expand_subscribed_fragment, extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
}, },
@@ -190,10 +191,10 @@ async fn main() -> Result<()> {
); );
String::from_utf8(bangumi_rss_doppel_path.read()?)? String::from_utf8(bangumi_rss_doppel_path.read()?)?
}; };
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items; let rss_items = MikanRssRoot::from_str(&bangumi_rss_data)?.channel.items;
rss_items rss_items
.into_iter() .into_iter()
.map(MikanRssEpisodeItem::try_from) .map(MikanRssItemMeta::try_from)
.collect::<Result<Vec<_>, _>>() .collect::<Result<Vec<_>, _>>()
}?; }?;
for rss_item in rss_items { for rss_item in rss_items {
@@ -212,7 +213,7 @@ async fn main() -> Result<()> {
}; };
} }
{ {
let episode_torrent_url = rss_item.url; let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone()); let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent..."); tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() { if !episode_torrent_doppel_path.exists_any() {

View File

@@ -1,10 +1,10 @@
use std::time::Duration; use std::{str::FromStr, time::Duration};
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest}; use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
use recorder::{ use recorder::{
errors::RecorderResult, errors::RecorderResult,
extract::mikan::{ extract::mikan::{
MikanClient, MikanConfig, MikanRssEpisodeItem, MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
extract_mikan_episode_meta_from_episode_homepage_html, extract_mikan_episode_meta_from_episode_homepage_html,
}, },
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath}, test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
@@ -41,12 +41,12 @@ async fn main() -> RecorderResult<()> {
let mikan_base_url = mikan_scrape_client.base_url().clone(); let mikan_base_url = mikan_scrape_client.base_url().clone();
tracing::info!("Scraping subscriber subscription..."); tracing::info!("Scraping subscriber subscription...");
let subscriber_subscription = let subscriber_subscription =
fs::read("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?; fs::read_to_string("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
let channel = rss::Channel::read_from(&subscriber_subscription[..])?; let channel = MikanRssRoot::from_str(&subscriber_subscription)?.channel;
let rss_items: Vec<MikanRssEpisodeItem> = channel let rss_items: Vec<MikanRssItemMeta> = channel
.items .items
.into_iter() .into_iter()
.map(MikanRssEpisodeItem::try_from) .map(MikanRssItemMeta::try_from)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items { for rss_item in rss_items {
let episode_homepage_meta = { let episode_homepage_meta = {
@@ -72,7 +72,7 @@ async fn main() -> RecorderResult<()> {
}?; }?;
{ {
let episode_torrent_url = rss_item.url; let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone()); let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent..."); tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() { if !episode_torrent_doppel_path.exists_any() {
@@ -150,11 +150,11 @@ async fn main() -> RecorderResult<()> {
String::from_utf8(bangumi_rss_doppel_path.read()?)? String::from_utf8(bangumi_rss_doppel_path.read()?)?
}; };
let channel = rss::Channel::read_from(bangumi_rss_data.as_bytes())?; let rss_items: Vec<MikanRssItemMeta> = MikanRssRoot::from_str(&bangumi_rss_data)?
let rss_items: Vec<MikanRssEpisodeItem> = channel .channel
.items .items
.into_iter() .into_iter()
.map(MikanRssEpisodeItem::try_from) .map(MikanRssItemMeta::try_from)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items { for rss_item in rss_items {
{ {
@@ -173,7 +173,7 @@ async fn main() -> RecorderResult<()> {
}; };
{ {
let episode_torrent_url = rss_item.url; let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path = let episode_torrent_doppel_path =
MikanDoppelPath::new(episode_torrent_url.clone()); MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent..."); tracing::info!(title = rss_item.title, "Scraping episode torrent...");

View File

@@ -0,0 +1,6 @@
{
"name": "recorder",
"version": "0.0.1",
"private": true,
"type": "module"
}

View File

@@ -4,8 +4,8 @@
enable = true enable = true
# Enable pretty backtrace (sets RUST_BACKTRACE=1) # Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace = true pretty_backtrace = true
level = "info"
# Log level, options: trace, debug, info, warn or error. # Log level, options: trace, debug, info, warn or error.
level = "debug"
# Define the logging format. options: compact, pretty or Json # Define the logging format. options: compact, pretty or Json
format = "compact" format = "compact"
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries # By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
@@ -26,25 +26,25 @@ host = '{{ get_env(name="HOST", default="localhost") }}'
enable = true enable = true
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details. # Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
[server.middleware.request_id] [server.middlewares.request_id]
enable = true enable = true
[server.middleware.logger] [server.middlewares.logger]
enable = true enable = true
# when your code is panicked, the request still returns 500 status code. # when your code is panicked, the request still returns 500 status code.
[server.middleware.catch_panic] [server.middlewares.catch_panic]
enable = true enable = true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned. # Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
[server.middleware.timeout_request] [server.middlewares.timeout_request]
enable = false enable = false
# Duration time in milliseconds. # Duration time in milliseconds.
timeout = 5000 timeout = 5000
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header # Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins: # allow_origins:
# - https://loco.rs # - https://konobangu.com
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header # Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers: # allow_headers:
# - Content-Type # - Content-Type
@@ -53,7 +53,10 @@ timeout = 5000
# - POST # - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds # Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600 # max_age: 3600
[server.middleware.cors] [server.middlewares.cors]
enable = true
[server.middlewares.compression]
enable = true enable = true
# Database Configuration # Database Configuration
@@ -74,7 +77,7 @@ max_connections = 10
auto_migrate = true auto_migrate = true
[storage] [storage]
data_dir = '{{ get_env(name="STORAGE_DATA_DIR", default="./data") }}' data_dir = './data'
[mikan] [mikan]
base_url = "https://mikanani.me/" base_url = "https://mikanani.me/"
@@ -86,26 +89,6 @@ leaky_bucket_initial_tokens = 1
leaky_bucket_refill_tokens = 1 leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500 leaky_bucket_refill_interval = 500
[mikan.http_client.proxy]
server = '{{ get_env(name="MIKAN_PROXY", default = "") }}'
auth_header = '{{ get_env(name="MIKAN_PROXY_AUTH_HEADER", default = "") }}'
no_proxy = '{{ get_env(name="MIKAN_NO_PROXY", default = "") }}'
accept_invalid_certs = '{{ get_env(name="MIKAN_PROXY_ACCEPT_INVALID_CERTS", default = "false") }}'
[auth]
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
basic_password = '{{ get_env(name="BASIC_PASSWORD", default = "konobangu") }}'
oidc_issuer = '{{ get_env(name="OIDC_ISSUER", default = "") }}'
oidc_audience = '{{ get_env(name="OIDC_AUDIENCE", default = "") }}'
oidc_client_id = '{{ get_env(name="OIDC_CLIENT_ID", default = "") }}'
oidc_client_secret = '{{ get_env(name="OIDC_CLIENT_SECRET", default = "") }}'
oidc_extra_scopes = '{{ get_env(name="OIDC_EXTRA_SCOPES", default = "") }}'
oidc_extra_claim_key = '{{ get_env(name="OIDC_EXTRA_CLAIM_KEY", default = "") }}'
oidc_extra_claim_value = '{{ get_env(name="OIDC_EXTRA_CLAIM_VALUE", default = "") }}'
[graphql] [graphql]
# depth_limit = inf # depth_limit = inf
# complexity_limit = inf # complexity_limit = inf

View File

@@ -21,6 +21,9 @@ pub struct MainCliArgs {
/// Explicit environment /// Explicit environment
#[arg(short, long)] #[arg(short, long)]
environment: Option<Environment>, environment: Option<Environment>,
#[arg(long)]
graceful_shutdown: Option<bool>,
} }
pub struct AppBuilder { pub struct AppBuilder {
@@ -28,6 +31,7 @@ pub struct AppBuilder {
config_file: Option<String>, config_file: Option<String>,
working_dir: String, working_dir: String,
environment: Environment, environment: Environment,
pub graceful_shutdown: bool,
} }
impl AppBuilder { impl AppBuilder {
@@ -61,12 +65,18 @@ impl AppBuilder {
builder = builder builder = builder
.config_file(args.config_file) .config_file(args.config_file)
.dotenv_file(args.dotenv_file) .dotenv_file(args.dotenv_file)
.environment(environment); .environment(environment)
.graceful_shutdown(args.graceful_shutdown.unwrap_or(true));
Ok(builder) Ok(builder)
} }
pub async fn build(self) -> RecorderResult<App> { pub async fn build(self) -> RecorderResult<App> {
if self.working_dir != "." {
std::env::set_current_dir(&self.working_dir)?;
println!("set current dir to working dir: {}", self.working_dir);
}
self.load_env().await?; self.load_env().await?;
let config = self.load_config().await?; let config = self.load_config().await?;
@@ -81,22 +91,12 @@ impl AppBuilder {
} }
pub async fn load_env(&self) -> RecorderResult<()> { pub async fn load_env(&self) -> RecorderResult<()> {
AppConfig::load_dotenv( AppConfig::load_dotenv(&self.environment, self.dotenv_file.as_deref()).await?;
&self.environment,
&self.working_dir,
self.dotenv_file.as_deref(),
)
.await?;
Ok(()) Ok(())
} }
pub async fn load_config(&self) -> RecorderResult<AppConfig> { pub async fn load_config(&self) -> RecorderResult<AppConfig> {
let config = AppConfig::load_config( let config = AppConfig::load_config(&self.environment, self.config_file.as_deref()).await?;
&self.environment,
&self.working_dir,
self.config_file.as_deref(),
)
.await?;
Ok(config) Ok(config)
} }
@@ -118,6 +118,12 @@ impl AppBuilder {
ret ret
} }
pub fn graceful_shutdown(self, graceful_shutdown: bool) -> Self {
let mut ret = self;
ret.graceful_shutdown = graceful_shutdown;
ret
}
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self { pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
let mut ret = self; let mut ret = self;
ret.dotenv_file = dotenv_file; ret.dotenv_file = dotenv_file;
@@ -125,11 +131,12 @@ impl AppBuilder {
} }
pub fn working_dir_from_manifest_dir(self) -> Self { pub fn working_dir_from_manifest_dir(self) -> Self {
let manifest_dir = if cfg!(debug_assertions) || cfg!(test) { #[cfg(any(test, debug_assertions, feature = "test-utils"))]
env!("CARGO_MANIFEST_DIR") let manifest_dir = env!("CARGO_MANIFEST_DIR");
} else {
"./apps/recorder" #[cfg(not(any(test, debug_assertions, feature = "test-utils")))]
}; let manifest_dir = "./apps/recorder";
self.working_dir(manifest_dir.to_string()) self.working_dir(manifest_dir.to_string())
} }
} }
@@ -141,6 +148,7 @@ impl Default for AppBuilder {
dotenv_file: None, dotenv_file: None,
config_file: None, config_file: None,
working_dir: String::from("."), working_dir: String::from("."),
graceful_shutdown: true,
} }
} }
} }

View File

@@ -11,6 +11,7 @@ leaky_bucket_initial_tokens = 0
leaky_bucket_refill_tokens = 1 leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500 leaky_bucket_refill_interval = 500
[mikan.http_client.proxy] [mikan.http_client.proxy]
[mikan.http_client.proxy.headers] [mikan.http_client.proxy.headers]
@@ -26,3 +27,5 @@ complexity_limit = inf
[task] [task]
[message] [message]
[media]

View File

@@ -1,8 +1,13 @@
use std::{fs, path::Path, str}; use std::{
collections::HashMap,
fs,
path::Path,
str::{self, FromStr},
};
use figment::{ use figment::{
Figment, Provider, Figment, Provider,
providers::{Format, Json, Toml, Yaml}, providers::{Env, Format, Json, Toml, Yaml},
}; };
use itertools::Itertools; use itertools::Itertools;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -11,8 +16,8 @@ use super::env::Environment;
use crate::{ use crate::{
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig, auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig, errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
logger::LoggerConfig, message::MessageConfig, storage::StorageConfig, task::TaskConfig, logger::LoggerConfig, media::MediaConfig, message::MessageConfig, storage::StorageConfig,
web::WebServerConfig, task::TaskConfig, web::WebServerConfig,
}; };
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml"); const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
@@ -27,6 +32,7 @@ pub struct AppConfig {
pub mikan: MikanConfig, pub mikan: MikanConfig,
pub crypto: CryptoConfig, pub crypto: CryptoConfig,
pub graphql: GraphQLConfig, pub graphql: GraphQLConfig,
pub media: MediaConfig,
pub logger: LoggerConfig, pub logger: LoggerConfig,
pub database: DatabaseConfig, pub database: DatabaseConfig,
pub task: TaskConfig, pub task: TaskConfig,
@@ -54,8 +60,8 @@ impl AppConfig {
format!(".{}.local", environment.full_name()), format!(".{}.local", environment.full_name()),
format!(".{}.local", environment.short_name()), format!(".{}.local", environment.short_name()),
String::from(".local"), String::from(".local"),
environment.full_name().to_string(), format!(".{}", environment.full_name()),
environment.short_name().to_string(), format!(".{}", environment.short_name()),
String::from(""), String::from(""),
] ]
} }
@@ -64,6 +70,102 @@ impl AppConfig {
Toml::string(DEFAULT_CONFIG_MIXIN) Toml::string(DEFAULT_CONFIG_MIXIN)
} }
fn build_enhanced_tera_engine() -> tera::Tera {
let mut tera = tera::Tera::default();
tera.register_filter(
"cast_to",
|value: &tera::Value,
args: &HashMap<String, tera::Value>|
-> tera::Result<tera::Value> {
let target_type = args
.get("type")
.and_then(|v| v.as_str())
.ok_or_else(|| tera::Error::msg("invalid target type: should be string"))?;
let target_type = TeraCastToFilterType::from_str(target_type)
.map_err(|e| tera::Error::msg(format!("invalid target type: {e}")))?;
let input_str = value.as_str().unwrap_or("");
match target_type {
TeraCastToFilterType::Boolean => {
let is_true = matches!(input_str.to_lowercase().as_str(), "true" | "1");
let is_false = matches!(input_str.to_lowercase().as_str(), "false" | "0");
if is_true {
Ok(tera::Value::Bool(true))
} else if is_false {
Ok(tera::Value::Bool(false))
} else {
Err(tera::Error::msg(
"target type is bool but value is not a boolean like true, false, \
1, 0",
))
}
}
TeraCastToFilterType::Integer => {
let parsed = input_str.parse::<i64>().map_err(|e| {
tera::Error::call_filter("invalid integer".to_string(), e)
})?;
Ok(tera::Value::Number(serde_json::Number::from(parsed)))
}
TeraCastToFilterType::Unsigned => {
let parsed = input_str.parse::<u64>().map_err(|e| {
tera::Error::call_filter("invalid unsigned integer".to_string(), e)
})?;
Ok(tera::Value::Number(serde_json::Number::from(parsed)))
}
TeraCastToFilterType::Float => {
let parsed = input_str.parse::<f64>().map_err(|e| {
tera::Error::call_filter("invalid float".to_string(), e)
})?;
Ok(tera::Value::Number(
serde_json::Number::from_f64(parsed).ok_or_else(|| {
tera::Error::msg("failed to convert f64 to serde_json::Number")
})?,
))
}
TeraCastToFilterType::String => Ok(tera::Value::String(input_str.to_string())),
TeraCastToFilterType::Null => Ok(tera::Value::Null),
}
},
);
tera.register_filter(
"try_auto_cast",
|value: &tera::Value,
_args: &HashMap<String, tera::Value>|
-> tera::Result<tera::Value> {
let input_str = value.as_str().unwrap_or("");
if input_str == "null" {
return Ok(tera::Value::Null);
}
if matches!(input_str, "true" | "false") {
return Ok(tera::Value::Bool(input_str == "true"));
}
if let Ok(parsed) = input_str.parse::<i64>() {
return Ok(tera::Value::Number(serde_json::Number::from(parsed)));
}
if let Ok(parsed) = input_str.parse::<u64>() {
return Ok(tera::Value::Number(serde_json::Number::from(parsed)));
}
if let Ok(parsed) = input_str.parse::<f64>() {
return Ok(tera::Value::Number(
serde_json::Number::from_f64(parsed).ok_or_else(|| {
tera::Error::msg("failed to convert f64 to serde_json::Number")
})?,
));
}
Ok(tera::Value::String(input_str.to_string()))
},
);
tera
}
pub fn merge_provider_from_file( pub fn merge_provider_from_file(
fig: Figment, fig: Figment,
filepath: impl AsRef<Path>, filepath: impl AsRef<Path>,
@@ -71,11 +173,9 @@ impl AppConfig {
) -> RecorderResult<Figment> { ) -> RecorderResult<Figment> {
let content = fs::read_to_string(filepath)?; let content = fs::read_to_string(filepath)?;
let rendered = tera::Tera::one_off( let mut tera_engine = AppConfig::build_enhanced_tera_engine();
&content, let rendered =
&tera::Context::from_value(serde_json::json!({}))?, tera_engine.render_str(&content, &tera::Context::from_value(serde_json::json!({}))?)?;
false,
)?;
Ok(match ext { Ok(match ext {
".toml" => fig.merge(Toml::string(&rendered)), ".toml" => fig.merge(Toml::string(&rendered)),
@@ -87,13 +187,12 @@ impl AppConfig {
pub async fn load_dotenv( pub async fn load_dotenv(
environment: &Environment, environment: &Environment,
working_dir: &str,
dotenv_file: Option<&str>, dotenv_file: Option<&str>,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
let try_dotenv_file_or_dirs = if dotenv_file.is_some() { let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
vec![dotenv_file] vec![dotenv_file]
} else { } else {
vec![Some(working_dir)] vec![Some(".")]
}; };
let priority_suffix = &AppConfig::priority_suffix(environment); let priority_suffix = &AppConfig::priority_suffix(environment);
@@ -110,11 +209,16 @@ impl AppConfig {
for f in try_filenames.iter() { for f in try_filenames.iter() {
let p = try_dotenv_file_or_dir_path.join(f); let p = try_dotenv_file_or_dir_path.join(f);
if p.exists() && p.is_file() { if p.exists() && p.is_file() {
println!("Loading dotenv file: {}", p.display());
dotenvy::from_path(p)?; dotenvy::from_path(p)?;
break; break;
} }
} }
} else if try_dotenv_file_or_dir_path.is_file() { } else if try_dotenv_file_or_dir_path.is_file() {
println!(
"Loading dotenv file: {}",
try_dotenv_file_or_dir_path.display()
);
dotenvy::from_path(try_dotenv_file_or_dir_path)?; dotenvy::from_path(try_dotenv_file_or_dir_path)?;
break; break;
} }
@@ -126,13 +230,12 @@ impl AppConfig {
pub async fn load_config( pub async fn load_config(
environment: &Environment, environment: &Environment,
working_dir: &str,
config_file: Option<&str>, config_file: Option<&str>,
) -> RecorderResult<AppConfig> { ) -> RecorderResult<AppConfig> {
let try_config_file_or_dirs = if config_file.is_some() { let try_config_file_or_dirs = if config_file.is_some() {
vec![config_file] vec![config_file]
} else { } else {
vec![Some(working_dir)] vec![Some(".")]
}; };
let allowed_extensions = &AppConfig::allowed_extension(); let allowed_extensions = &AppConfig::allowed_extension();
@@ -158,6 +261,7 @@ impl AppConfig {
let p = try_config_file_or_dir_path.join(f); let p = try_config_file_or_dir_path.join(f);
if p.exists() && p.is_file() { if p.exists() && p.is_file() {
fig = AppConfig::merge_provider_from_file(fig, &p, ext)?; fig = AppConfig::merge_provider_from_file(fig, &p, ext)?;
println!("Loaded config file: {}", p.display());
break; break;
} }
} }
@@ -168,13 +272,52 @@ impl AppConfig {
{ {
fig = fig =
AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?; AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?;
println!(
"Loaded config file: {}",
try_config_file_or_dir_path.display()
);
break; break;
} }
} }
} }
fig = fig.merge(Env::prefixed("").split("__").lowercase(true));
let app_config: AppConfig = fig.extract()?; let app_config: AppConfig = fig.extract()?;
Ok(app_config) Ok(app_config)
} }
} }
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
enum TeraCastToFilterType {
#[serde(alias = "str")]
String,
#[serde(alias = "bool")]
Boolean,
#[serde(alias = "int")]
Integer,
#[serde(alias = "uint")]
Unsigned,
#[serde(alias = "float")]
Float,
#[serde(alias = "null")]
Null,
}
impl FromStr for TeraCastToFilterType {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"string" | "str" => Ok(TeraCastToFilterType::String),
"boolean" | "bool" => Ok(TeraCastToFilterType::Boolean),
"integer" | "int" => Ok(TeraCastToFilterType::Integer),
"unsigned" | "uint" => Ok(TeraCastToFilterType::Unsigned),
"float" => Ok(TeraCastToFilterType::Float),
"null" => Ok(TeraCastToFilterType::Null),
_ => Err(format!("invalid target type: {s}")),
}
}
}

View File

@@ -4,16 +4,9 @@ use tokio::sync::OnceCell;
use super::{Environment, config::AppConfig}; use super::{Environment, config::AppConfig};
use crate::{ use crate::{
auth::AuthService, auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
cache::CacheService, errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
crypto::CryptoService, logger::LoggerService, media::MediaService, message::MessageService, storage::StorageService,
database::DatabaseService,
errors::RecorderResult,
extract::mikan::MikanClient,
graphql::GraphQLService,
logger::LoggerService,
message::MessageService,
storage::{StorageService, StorageServiceTrait},
task::TaskService, task::TaskService,
}; };
@@ -25,12 +18,13 @@ pub trait AppContextTrait: Send + Sync + Debug {
fn mikan(&self) -> &MikanClient; fn mikan(&self) -> &MikanClient;
fn auth(&self) -> &AuthService; fn auth(&self) -> &AuthService;
fn graphql(&self) -> &GraphQLService; fn graphql(&self) -> &GraphQLService;
fn storage(&self) -> &dyn StorageServiceTrait; fn storage(&self) -> &StorageService;
fn working_dir(&self) -> &String; fn working_dir(&self) -> &String;
fn environment(&self) -> &Environment; fn environment(&self) -> &Environment;
fn crypto(&self) -> &CryptoService; fn crypto(&self) -> &CryptoService;
fn task(&self) -> &TaskService; fn task(&self) -> &TaskService;
fn message(&self) -> &MessageService; fn message(&self) -> &MessageService;
fn media(&self) -> &MediaService;
} }
pub struct AppContext { pub struct AppContext {
@@ -45,6 +39,7 @@ pub struct AppContext {
working_dir: String, working_dir: String,
environment: Environment, environment: Environment,
message: MessageService, message: MessageService,
media: MediaService,
task: OnceCell<TaskService>, task: OnceCell<TaskService>,
graphql: OnceCell<GraphQLService>, graphql: OnceCell<GraphQLService>,
} }
@@ -65,6 +60,7 @@ impl AppContext {
let auth = AuthService::from_conf(config.auth).await?; let auth = AuthService::from_conf(config.auth).await?;
let mikan = MikanClient::from_config(config.mikan).await?; let mikan = MikanClient::from_config(config.mikan).await?;
let crypto = CryptoService::from_config(config.crypto).await?; let crypto = CryptoService::from_config(config.crypto).await?;
let media = MediaService::from_config(config.media).await?;
let ctx = Arc::new(AppContext { let ctx = Arc::new(AppContext {
config: config_cloned, config: config_cloned,
@@ -78,6 +74,7 @@ impl AppContext {
working_dir: working_dir.to_string(), working_dir: working_dir.to_string(),
crypto, crypto,
message, message,
media,
task: OnceCell::new(), task: OnceCell::new(),
graphql: OnceCell::new(), graphql: OnceCell::new(),
}); });
@@ -126,7 +123,7 @@ impl AppContextTrait for AppContext {
fn graphql(&self) -> &GraphQLService { fn graphql(&self) -> &GraphQLService {
self.graphql.get().expect("graphql should be set") self.graphql.get().expect("graphql should be set")
} }
fn storage(&self) -> &dyn StorageServiceTrait { fn storage(&self) -> &StorageService {
&self.storage &self.storage
} }
fn working_dir(&self) -> &String { fn working_dir(&self) -> &String {
@@ -144,4 +141,7 @@ impl AppContextTrait for AppContext {
fn message(&self) -> &MessageService { fn message(&self) -> &MessageService {
&self.message &self.message
} }
fn media(&self) -> &MediaService {
&self.media
}
} }

View File

@@ -1,11 +1,13 @@
use std::{net::SocketAddr, sync::Arc}; use std::{net::SocketAddr, sync::Arc};
use axum::Router; use axum::{Router, middleware::from_fn_with_state};
use tokio::{net::TcpSocket, signal}; use tokio::{net::TcpSocket, signal};
use tower_http::services::{ServeDir, ServeFile};
use tracing::instrument; use tracing::instrument;
use super::{builder::AppBuilder, context::AppContextTrait}; use super::{builder::AppBuilder, context::AppContextTrait};
use crate::{ use crate::{
auth::webui_auth_middleware,
errors::{RecorderError, RecorderResult}, errors::{RecorderError, RecorderResult},
web::{ web::{
controller::{self, core::ControllerTrait}, controller::{self, core::ControllerTrait},
@@ -13,6 +15,8 @@ use crate::{
}, },
}; };
pub const PROJECT_NAME: &str = "konobangu";
pub struct App { pub struct App {
pub context: Arc<dyn AppContextTrait>, pub context: Arc<dyn AppContextTrait>,
pub builder: AppBuilder, pub builder: AppBuilder,
@@ -51,20 +55,30 @@ impl App {
let mut router = Router::<Arc<dyn AppContextTrait>>::new(); let mut router = Router::<Arc<dyn AppContextTrait>>::new();
let (graphql_c, oidc_c, metadata_c) = futures::try_join!( let (graphql_c, oidc_c, metadata_c, static_c, feeds_c) = futures::try_join!(
controller::graphql::create(context.clone()), controller::graphql::create(context.clone()),
controller::oidc::create(context.clone()), controller::oidc::create(context.clone()),
controller::metadata::create(context.clone()) controller::metadata::create(context.clone()),
controller::r#static::create(context.clone()),
controller::feeds::create(context.clone())
)?; )?;
for c in [graphql_c, oidc_c, metadata_c] { for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
router = c.apply_to(router); router = c.apply_to(router);
} }
router = router
.fallback_service(
ServeDir::new("webui").not_found_service(ServeFile::new("webui/index.html")),
)
.layer(from_fn_with_state(context.clone(), webui_auth_middleware));
let middlewares = default_middleware_stack(context.clone()); let middlewares = default_middleware_stack(context.clone());
for mid in middlewares { for mid in middlewares {
router = mid.apply(router)?; if mid.is_enabled() {
tracing::info!(name = mid.name(), "+middleware"); router = mid.apply(router)?;
tracing::info!(name = mid.name(), "+middleware");
}
} }
let router = router let router = router
@@ -73,32 +87,32 @@ impl App {
let task = context.task(); let task = context.task();
let graceful_shutdown = self.builder.graceful_shutdown;
tokio::try_join!( tokio::try_join!(
async { async {
axum::serve(listener, router) let axum_serve = axum::serve(listener, router);
.with_graceful_shutdown(async move {
Self::shutdown_signal().await;
tracing::info!("axum shutting down...");
})
.await?;
Ok::<(), RecorderError>(())
},
async {
let monitor = task.setup_monitor().await?;
monitor if graceful_shutdown {
.run_with_signal(async move { axum_serve
Self::shutdown_signal().await; .with_graceful_shutdown(async move {
tracing::info!("apalis shutting down..."); Self::shutdown_signal().await;
Ok(()) tracing::info!("axum shutting down...");
}) })
.await?; .await?;
} else {
axum_serve.await?;
}
Ok::<(), RecorderError>(()) Ok::<(), RecorderError>(())
}, },
async { async {
let listener = task.setup_listener().await?; task.run_with_signal(if graceful_shutdown {
listener.listen().await?; Some(Self::shutdown_signal)
} else {
None
})
.await?;
Ok::<(), RecorderError>(()) Ok::<(), RecorderError>(())
} }
@@ -134,7 +148,7 @@ impl App {
#[cfg(not(unix))] #[cfg(not(unix))]
let terminate = std::future::pending::<()>(); let terminate = std::future::pending::<()>();
#[cfg(all(not(unix), debug_assertions))] #[cfg(not(all(unix, debug_assertions)))]
let quit = std::future::pending::<()>(); let quit = std::future::pending::<()>();
tokio::select! { tokio::select! {

View File

@@ -4,7 +4,7 @@ pub mod context;
pub mod core; pub mod core;
pub mod env; pub mod env;
pub use core::App; pub use core::{App, PROJECT_NAME};
pub use builder::AppBuilder; pub use builder::AppBuilder;
pub use config::AppConfig; pub use config::AppConfig;

View File

@@ -9,7 +9,7 @@ use super::{
service::{AuthServiceTrait, AuthUserInfo}, service::{AuthServiceTrait, AuthUserInfo},
}; };
use crate::{ use crate::{
app::AppContextTrait, app::{AppContextTrait, PROJECT_NAME},
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER}, models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
}; };
@@ -86,7 +86,7 @@ impl AuthServiceTrait for BasicAuthService {
} }
fn www_authenticate_header_value(&self) -> Option<HeaderValue> { fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
Some(HeaderValue::from_static(r#"Basic realm="konobangu""#)) Some(HeaderValue::from_str(format!("Basic realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
} }
fn auth_type(&self) -> AuthType { fn auth_type(&self) -> AuthType {

View File

@@ -11,13 +11,14 @@ use openidconnect::{
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use snafu::prelude::*; use snafu::prelude::*;
use util::OptDynErr;
use crate::models::auth::AuthType; use crate::models::auth::AuthType;
#[derive(Debug, Snafu)] #[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))] #[snafu(visibility(pub(crate)))]
pub enum AuthError { pub enum AuthError {
#[snafu(display("Permission denied"))]
PermissionError,
#[snafu(display("Not support auth method"))] #[snafu(display("Not support auth method"))]
NotSupportAuthMethod { NotSupportAuthMethod {
supported: Vec<AuthType>, supported: Vec<AuthType>,
@@ -93,12 +94,6 @@ pub enum AuthError {
column: String, column: String,
context_path: String, context_path: String,
}, },
#[snafu(display("GraphQL permission denied since {field}"))]
GraphqlStaticPermissionError {
#[snafu(source)]
source: OptDynErr,
field: String,
},
} }
impl AuthError { impl AuthError {

View File

@@ -7,7 +7,10 @@ use axum::{
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use crate::{app::AppContextTrait, auth::AuthServiceTrait}; use crate::{
app::AppContextTrait,
auth::{AuthService, AuthServiceTrait},
};
pub async fn auth_middleware( pub async fn auth_middleware(
State(ctx): State<Arc<dyn AppContextTrait>>, State(ctx): State<Arc<dyn AppContextTrait>>,
@@ -38,3 +41,37 @@ pub async fn auth_middleware(
response response
} }
pub async fn webui_auth_middleware(
State(ctx): State<Arc<dyn AppContextTrait>>,
request: Request,
next: Next,
) -> Response {
if (!request.uri().path().starts_with("/api"))
&& let AuthService::Basic(auth_service) = ctx.auth()
{
let (mut parts, body) = request.into_parts();
let mut response = match auth_service
.extract_user_info(ctx.as_ref() as &dyn AppContextTrait, &mut parts)
.await
{
Ok(auth_user_info) => {
let mut request = Request::from_parts(parts, body);
request.extensions_mut().insert(auth_user_info);
next.run(request).await
}
Err(auth_error) => auth_error.into_response(),
};
if let Some(header_value) = auth_service.www_authenticate_header_value() {
response
.headers_mut()
.insert(header::WWW_AUTHENTICATE, header_value);
};
response
} else {
next.run(request).await
}
}

View File

@@ -7,5 +7,5 @@ pub mod service;
pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig}; pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig};
pub use errors::AuthError; pub use errors::AuthError;
pub use middleware::auth_middleware; pub use middleware::{auth_middleware, webui_auth_middleware};
pub use service::{AuthService, AuthServiceTrait, AuthUserInfo}; pub use service::{AuthService, AuthServiceTrait, AuthUserInfo};

View File

@@ -21,7 +21,6 @@ use openidconnect::{
OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse, OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse,
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata}, core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata},
}; };
use sea_orm::DbErr;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use snafu::ResultExt; use snafu::ResultExt;
@@ -32,7 +31,11 @@ use super::{
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu}, errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
service::{AuthServiceTrait, AuthUserInfo}, service::{AuthServiceTrait, AuthUserInfo},
}; };
use crate::{app::AppContextTrait, errors::RecorderError, models::auth::AuthType}; use crate::{
app::{AppContextTrait, PROJECT_NAME},
errors::RecorderError,
models::auth::AuthType,
};
pub struct OidcHttpClient(pub Arc<HttpClient>); pub struct OidcHttpClient(pub Arc<HttpClient>);
@@ -334,9 +337,9 @@ impl AuthServiceTrait for OidcAuthService {
} }
} }
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await { let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(RecorderError::DbError { Err(RecorderError::ModelEntityNotFound { .. }) => {
source: DbErr::RecordNotFound(..), crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await, }
r => r, r => r,
} }
.map_err(|e| { .map_err(|e| {
@@ -351,7 +354,7 @@ impl AuthServiceTrait for OidcAuthService {
} }
fn www_authenticate_header_value(&self) -> Option<HeaderValue> { fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
Some(HeaderValue::from_static(r#"Bearer realm="konobangu""#)) Some(HeaderValue::from_str(format!("Bearer realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
} }
fn auth_type(&self) -> AuthType { fn auth_type(&self) -> AuthType {

View File

@@ -5,8 +5,7 @@ use axum::{
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use fetch::{FetchError, HttpClientError, reqwest, reqwest_middleware}; use fetch::{FetchError, HttpClientError, reqwest, reqwest_middleware};
use http::StatusCode; use http::{HeaderMap, StatusCode};
use serde::{Deserialize, Deserializer, Serialize};
use snafu::Snafu; use snafu::Snafu;
use crate::{ use crate::{
@@ -19,6 +18,30 @@ use crate::{
#[derive(Snafu, Debug)] #[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))] #[snafu(visibility(pub(crate)))]
pub enum RecorderError { pub enum RecorderError {
#[snafu(transparent)]
ChronoTzParseError { source: chrono_tz::ParseError },
#[snafu(transparent)]
SeaographyError { source: seaography::SeaographyError },
#[snafu(transparent)]
CronError { source: croner::errors::CronError },
#[snafu(display(
"HTTP {status} {reason}, source = {source:?}",
status = status,
reason = status.canonical_reason().unwrap_or("Unknown")
))]
HttpResponseError {
status: StatusCode,
headers: Option<HeaderMap>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(transparent)]
ImageError { source: image::ImageError },
#[cfg(feature = "jxl")]
#[snafu(transparent)]
JxlEncodeError { source: jpegxl_rs::EncodeError },
#[snafu(transparent, context(false))]
HttpError { source: http::Error },
#[snafu(transparent, context(false))] #[snafu(transparent, context(false))]
FancyRegexError { FancyRegexError {
#[snafu(source(from(fancy_regex::Error, Box::new)))] #[snafu(source(from(fancy_regex::Error, Box::new)))]
@@ -28,12 +51,16 @@ pub enum RecorderError {
NetAddrParseError { source: std::net::AddrParseError }, NetAddrParseError { source: std::net::AddrParseError },
#[snafu(transparent)] #[snafu(transparent)]
RegexError { source: regex::Error }, RegexError { source: regex::Error },
#[snafu(display("Invalid method"))]
InvalidMethodError,
#[snafu(display("Invalid header value"))]
InvalidHeaderValueError,
#[snafu(transparent)] #[snafu(transparent)]
InvalidMethodError { source: http::method::InvalidMethod }, QuickXmlDeserializeError { source: quick_xml::DeError },
#[snafu(transparent)] #[snafu(display("Invalid header name"))]
InvalidHeaderNameError { InvalidHeaderNameError,
source: http::header::InvalidHeaderName, #[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))]
}, MissingOriginError,
#[snafu(transparent)] #[snafu(transparent)]
TracingAppenderInitError { TracingAppenderInitError {
source: tracing_appender::rolling::InitError, source: tracing_appender::rolling::InitError,
@@ -73,10 +100,6 @@ pub enum RecorderError {
source: Box<opendal::Error>, source: Box<opendal::Error>,
}, },
#[snafu(transparent)] #[snafu(transparent)]
InvalidHeaderValueError {
source: http::header::InvalidHeaderValue,
},
#[snafu(transparent)]
HttpClientError { source: HttpClientError }, HttpClientError { source: HttpClientError },
#[cfg(feature = "testcontainers")] #[cfg(feature = "testcontainers")]
#[snafu(transparent)] #[snafu(transparent)]
@@ -103,8 +126,13 @@ pub enum RecorderError {
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))] #[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr, source: OptDynErr,
}, },
#[snafu(display("Model Entity {entity} not found"))] #[snafu(display("Model Entity {entity} not found or not belong to subscriber{}", (
ModelEntityNotFound { entity: Cow<'static, str> }, detail.as_ref().map(|detail| format!(" : {detail}"))).unwrap_or_default()
))]
ModelEntityNotFound {
entity: Cow<'static, str>,
detail: Option<String>,
},
#[snafu(transparent)] #[snafu(transparent)]
FetchError { source: FetchError }, FetchError { source: FetchError },
#[snafu(display("Credential3rdError: {message}, source = {source}"))] #[snafu(display("Credential3rdError: {message}, source = {source}"))]
@@ -123,9 +151,27 @@ pub enum RecorderError {
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))] #[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr, source: OptDynErr,
}, },
#[snafu(display("Invalid task id: {message}"))]
InvalidTaskId { message: String },
} }
impl RecorderError { impl RecorderError {
pub fn from_status(status: StatusCode) -> Self {
Self::HttpResponseError {
status,
headers: None,
source: None.into(),
}
}
pub fn from_status_and_headers(status: StatusCode, headers: HeaderMap) -> Self {
Self::HttpResponseError {
status,
headers: Some(headers),
source: None.into(),
}
}
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self { pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError { Self::MikanMetaMissingFieldError {
field, field,
@@ -150,9 +196,17 @@ impl RecorderError {
} }
} }
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self { pub fn from_entity_not_found<E: sea_orm::EntityTrait>() -> Self {
Self::DbError { Self::ModelEntityNotFound {
source: sea_orm::DbErr::RecordNotFound(detail.to_string()), entity: std::any::type_name::<E::Model>().into(),
detail: None,
}
}
pub fn from_entity_not_found_detail<E: sea_orm::EntityTrait, T: ToString>(detail: T) -> Self {
Self::ModelEntityNotFound {
entity: std::any::type_name::<E::Model>().into(),
detail: Some(detail.to_string()),
} }
} }
} }
@@ -175,10 +229,53 @@ impl snafu::FromString for RecorderError {
} }
} }
impl From<StatusCode> for RecorderError {
fn from(status: StatusCode) -> Self {
Self::HttpResponseError {
status,
headers: None,
source: None.into(),
}
}
}
impl From<(StatusCode, HeaderMap)> for RecorderError {
fn from((status, headers): (StatusCode, HeaderMap)) -> Self {
Self::HttpResponseError {
status,
headers: Some(headers),
source: None.into(),
}
}
}
impl IntoResponse for RecorderError { impl IntoResponse for RecorderError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
match self { match self {
Self::AuthError { source: auth_error } => auth_error.into_response(), Self::AuthError { source: auth_error } => auth_error.into_response(),
Self::HttpResponseError {
status,
headers,
source,
} => {
let message = source
.into_inner()
.map(|s| s.to_string())
.unwrap_or_else(|| {
String::from(status.canonical_reason().unwrap_or("Unknown"))
});
(
status,
headers,
Json::<StandardErrorResponse>(StandardErrorResponse::from(message)),
)
.into_response()
}
merr @ Self::ModelEntityNotFound { .. } => (
StatusCode::NOT_FOUND,
Json::<StandardErrorResponse>(StandardErrorResponse::from(merr.to_string())),
)
.into_response(),
err => ( err => (
StatusCode::INTERNAL_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR,
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())), Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
@@ -188,28 +285,6 @@ impl IntoResponse for RecorderError {
} }
} }
impl Serialize for RecorderError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for RecorderError {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(Self::Whatever {
message: s,
source: None.into(),
})
}
}
impl From<reqwest::Error> for RecorderError { impl From<reqwest::Error> for RecorderError {
fn from(error: reqwest::Error) -> Self { fn from(error: reqwest::Error) -> Self {
FetchError::from(error).into() FetchError::from(error).into()
@@ -222,4 +297,28 @@ impl From<reqwest_middleware::Error> for RecorderError {
} }
} }
impl From<http::header::InvalidHeaderValue> for RecorderError {
fn from(_error: http::header::InvalidHeaderValue) -> Self {
Self::InvalidHeaderValueError
}
}
impl From<http::header::InvalidHeaderName> for RecorderError {
fn from(_error: http::header::InvalidHeaderName) -> Self {
Self::InvalidHeaderNameError
}
}
impl From<http::method::InvalidMethod> for RecorderError {
fn from(_error: http::method::InvalidMethod) -> Self {
Self::InvalidMethodError
}
}
impl From<async_graphql::Error> for RecorderError {
fn from(error: async_graphql::Error) -> Self {
seaography::SeaographyError::AsyncGraphQLError(error).into()
}
}
pub type RecorderResult<T> = Result<T, RecorderError>; pub type RecorderResult<T> = Result<T, RecorderError>;

View File

@@ -1,323 +1,9 @@
use fancy_regex::Regex as FancyRegex; use chrono::{DateTime, Utc};
use lazy_static::lazy_static;
use quirks_path::Path;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::{OptionExt, whatever};
use crate::{ #[derive(Clone, Debug)]
errors::app_error::{RecorderError, RecorderResult}, pub struct EpisodeEnclosureMeta {
extract::defs::SUBTITLE_LANG, pub magnet_link: Option<String>,
}; pub torrent_link: Option<String>,
pub pub_date: Option<DateTime<Utc>>,
lazy_static! { pub content_length: Option<i64>,
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
vec![
FancyRegex::new(
r"(.*) - (\d{1,4}(?!\d|p)|\d{1,4}\.\d{1,2}(?!\d|p))(?:v\d{1,2})?(?: )?(?:END)?(.*)",
)
.unwrap(),
FancyRegex::new(
r"(.*)[\[\ E](\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)",
)
.unwrap(),
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
]
};
static ref GET_FANSUB_SPLIT_RE: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
static ref GET_FANSUB_FULL_MATCH_RE: Regex = Regex::new(r"^\d+$").unwrap();
static ref GET_SEASON_AND_TITLE_SUB_RE: Regex = Regex::new(r"([Ss]|Season )\d{1,3}").unwrap();
static ref GET_SEASON_AND_TITLE_FIND_RE: Regex =
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeMediaMeta {
pub fansub: Option<String>,
pub title: String,
pub season: i32,
pub episode_index: i32,
pub extname: String,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeSubtitleMeta {
pub media: TorrentEpisodeMediaMeta,
pub lang: Option<String>,
}
fn get_fansub(group_and_title: &str) -> (Option<&str>, &str) {
let n = GET_FANSUB_SPLIT_RE
.split(group_and_title)
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
match (n.first(), n.get(1)) {
(None, None) => (None, ""),
(Some(n0), None) => (None, *n0),
(Some(n0), Some(n1)) => {
if GET_FANSUB_FULL_MATCH_RE.is_match(n1) {
(None, group_and_title)
} else {
(Some(*n0), *n1)
}
}
_ => unreachable!("vec contains n1 must contains n0"),
}
}
fn get_season_and_title(season_and_title: &str) -> (String, i32) {
let replaced_title = GET_SEASON_AND_TITLE_SUB_RE.replace_all(season_and_title, "");
let title = replaced_title.trim().to_string();
let season = GET_SEASON_AND_TITLE_FIND_RE
.captures(season_and_title)
.map(|m| {
m.get(2)
.unwrap_or_else(|| unreachable!("season regex should have 2 groups"))
.as_str()
.parse::<i32>()
.unwrap_or_else(|_| unreachable!("season should be a number"))
})
.unwrap_or(1);
(title, season)
}
fn get_subtitle_lang(media_name: &str) -> Option<&str> {
let media_name_lower = media_name.to_lowercase();
for (lang, lang_aliases) in SUBTITLE_LANG.iter() {
if lang_aliases
.iter()
.any(|alias| media_name_lower.contains(alias))
{
return Some(lang);
}
}
None
}
pub fn parse_episode_media_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> RecorderResult<TorrentEpisodeMediaMeta> {
let media_name = torrent_path
.file_name()
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {torrent_path}")
})?;
let mut match_obj = None;
for rule in TORRENT_EP_PARSE_RULES.iter() {
match_obj = if let Some(torrent_name) = torrent_name.as_ref() {
rule.captures(torrent_name)?
} else {
rule.captures(media_name)?
};
if match_obj.is_some() {
break;
}
}
if let Some(match_obj) = match_obj {
let group_season_and_title = match_obj
.get(1)
.whatever_context::<_, RecorderError>("should have 1 group")?
.as_str();
let (fansub, season_and_title) = get_fansub(group_season_and_title);
let (title, season) = if let Some(season) = season {
let (title, _) = get_season_and_title(season_and_title);
(title, season)
} else {
get_season_and_title(season_and_title)
};
let episode_index = match_obj
.get(2)
.whatever_context::<_, RecorderError>("should have 2 group")?
.as_str()
.parse::<i32>()
.unwrap_or(1);
let extname = torrent_path
.extension()
.map(|e| format!(".{e}"))
.unwrap_or_default();
Ok(TorrentEpisodeMediaMeta {
fansub: fansub.map(|s| s.to_string()),
title,
season,
episode_index,
extname,
})
} else {
whatever!(
"failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'",
torrent_path,
torrent_name
)
}
}
pub fn parse_episode_subtitle_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> RecorderResult<TorrentEpisodeSubtitleMeta> {
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
let media_name = torrent_path
.file_name()
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {torrent_path}")
})?;
let lang = get_subtitle_lang(media_name);
Ok(TorrentEpisodeSubtitleMeta {
media: media_meta,
lang: lang.map(|s| s.to_string()),
})
}
#[cfg(test)]
mod tests {
use quirks_path::Path;
use super::{
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta, parse_episode_media_meta_from_torrent,
parse_episode_subtitle_meta_from_torrent,
};
#[test]
fn test_lilith_raws_media() {
test_torrent_ep_parser(
r#"[Lilith-Raws] Boku no Kokoro no Yabai Yatsu - 01 [Baha][WEB-DL][1080p][AVC AAC][CHT][MP4].mp4"#,
r#"{"fansub": "Lilith-Raws", "title": "Boku no Kokoro no Yabai Yatsu", "season": 1, "episode_index": 1, "extname": ".mp4"}"#,
);
}
#[test]
fn test_sakurato_media() {
test_torrent_ep_parser(
r#"[Sakurato] Tonikaku Kawaii S2 [03][AVC-8bit 1080p AAC][CHS].mp4"#,
r#"{"fansub": "Sakurato", "title": "Tonikaku Kawaii", "season": 2, "episode_index": 3, "extname": ".mp4"}"#,
)
}
#[test]
fn test_lolihouse_media() {
test_torrent_ep_parser(
r#"[SweetSub&LoliHouse] Heavenly Delusion - 08 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"#,
r#"{"fansub": "SweetSub&LoliHouse", "title": "Heavenly Delusion", "season": 1, "episode_index": 8, "extname": ".mkv"}"#,
)
}
#[test]
fn test_sbsub_media() {
test_torrent_ep_parser(
r#"[SBSUB][CONAN][1082][V2][1080P][AVC_AAC][CHS_JP](C1E4E331).mp4"#,
r#"{"fansub": "SBSUB", "title": "CONAN", "season": 1, "episode_index": 1082, "extname": ".mp4"}"#,
)
}
#[test]
fn test_non_fansub_media() {
test_torrent_ep_parser(
r#"海盗战记 (2019) S04E11.mp4"#,
r#"{"title": "海盗战记 (2019)", "season": 4, "episode_index": 11, "extname": ".mp4"}"#,
)
}
#[test]
fn test_non_fansub_media_with_dirname() {
test_torrent_ep_parser(
r#"海盗战记/海盗战记 S01E01.mp4"#,
r#"{"title": "海盗战记", "season": 1, "episode_index": 1, "extname": ".mp4"}"#,
);
}
#[test]
fn test_non_fansub_tc_subtitle() {
test_torrent_ep_parser(
r#"海盗战记 S01E08.zh-tw.ass"#,
r#"{"media": { "title": "海盗战记", "season": 1, "episode_index": 8, "extname": ".ass" }, "lang": "zh-tw"}"#,
);
}
#[test]
fn test_non_fansub_sc_subtitle() {
test_torrent_ep_parser(
r#"海盗战记 S01E01.SC.srt"#,
r#"{ "media": { "title": "海盗战记", "season": 1, "episode_index": 1, "extname": ".srt" }, "lang": "zh" }"#,
)
}
#[test]
fn test_non_fansub_media_with_season_zero() {
test_torrent_ep_parser(
r#"水星的魔女(2022) S00E19.mp4"#,
r#"{"fansub": null,"title": "水星的魔女(2022)","season": 0,"episode_index": 19,"extname": ".mp4"}"#,
)
}
#[test]
fn test_shimian_fansub_media() {
test_torrent_ep_parser(
r#"【失眠搬运组】放学后失眠的你-Kimi wa Houkago Insomnia - 06 [bilibili - 1080p AVC1 CHS-JP].mp4"#,
r#"{"fansub": "失眠搬运组","title": "放学后失眠的你-Kimi wa Houkago Insomnia","season": 1,"episode_index": 6,"extname": ".mp4"}"#,
)
}
pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) {
let extname = Path::new(raw_name)
.extension()
.map(|e| format!(".{e}"))
.unwrap_or_default()
.to_lowercase();
if extname == ".srt" || extname == ".ass" {
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
let found_raw =
parse_episode_subtitle_meta_from_torrent(Path::new(raw_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {
if found_raw.is_ok() {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
} else {
println!(
"expected {} and found {:#?} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
found_raw
)
}
}
assert_eq!(expected, found);
} else {
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
let found_raw = parse_episode_media_meta_from_torrent(Path::new(raw_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {
if found_raw.is_ok() {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
} else {
println!(
"expected {} and found {:#?} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
found_raw
)
}
}
assert_eq!(expected, found);
}
}
} }

View File

@@ -1,73 +0,0 @@
use std::collections::HashMap;
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use maplit::hashmap;
use regex::Regex;
const LANG_ZH_TW: &str = "zh-tw";
const LANG_ZH: &str = "zh";
const LANG_EN: &str = "en";
const LANG_JP: &str = "jp";
lazy_static! {
pub static ref SEASON_REGEX: Regex =
Regex::new(r"(S\|[Ss]eason\s+)(\d+)").expect("Invalid regex");
pub static ref TORRENT_PRASE_RULE_REGS: Vec<FancyRegex> = vec![
FancyRegex::new(
r"(.*) - (\d{1,4}(?!\d|p)|\d{1,4}\.\d{1,2}(?!\d|p))(?:v\d{1,2})?(?: )?(?:END)?(.*)"
)
.unwrap(),
FancyRegex::new(
r"(.*)[\[\ E](\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)"
)
.unwrap(),
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
];
pub static ref SUBTITLE_LANG: Vec<(&'static str, Vec<&'static str>)> = {
vec![
(LANG_ZH_TW, vec!["tc", "cht", "", "zh-tw"]),
(LANG_ZH, vec!["sc", "chs", "", "zh", "zh-cn"]),
(LANG_EN, vec!["en", "eng", ""]),
(LANG_JP, vec!["jp", "jpn", ""]),
]
};
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {
hashmap! {
"" => 0,
"" => 1,
"" => 2,
"" => 3,
"" => 4,
"" => 5,
"" => 6,
"" => 7,
"" => 8,
"" => 9,
"" => 10,
"廿" => 20,
"" => 100,
"" => 1000,
"" => 0,
"" => 1,
"" => 2,
"" => 3,
"" => 4,
"" => 5,
"" => 6,
"" => 7,
"" => 8,
"" => 9,
"" => 10,
"" => 20,
"" => 100,
"" => 1000,
}
};
pub static ref ZH_NUM_RE: Regex =
Regex::new(r"[〇一二三四五六七八九十廿百千零壹贰叁肆伍陆柒捌玖拾念佰仟]").unwrap();
}

View File

@@ -1,7 +1,12 @@
use axum::http::{HeaderName, HeaderValue, Uri, header, request::Parts}; use axum::{
extract::FromRequestParts,
http::{HeaderName, HeaderValue, Uri, header, request::Parts},
};
use itertools::Itertools; use itertools::Itertools;
use url::Url; use url::Url;
use crate::errors::RecorderError;
/// Fields from a "Forwarded" header per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4) /// Fields from a "Forwarded" header per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4)
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ForwardedHeader { pub struct ForwardedHeader {
@@ -101,9 +106,13 @@ pub struct ForwardedRelatedInfo {
pub origin: Option<String>, pub origin: Option<String>,
} }
impl ForwardedRelatedInfo { impl<T> FromRequestParts<T> for ForwardedRelatedInfo {
pub fn from_request_parts(request_parts: &Parts) -> ForwardedRelatedInfo { type Rejection = RecorderError;
let headers = &request_parts.headers; fn from_request_parts(
parts: &mut Parts,
_state: &T,
) -> impl Future<Output = Result<Self, Self::Rejection>> + Send {
let headers = &parts.headers;
let forwarded = headers let forwarded = headers
.get(header::FORWARDED) .get(header::FORWARDED)
.and_then(|s| ForwardedHeader::try_from(s.clone()).ok()); .and_then(|s| ForwardedHeader::try_from(s.clone()).ok());
@@ -132,17 +141,19 @@ impl ForwardedRelatedInfo {
.get(header::ORIGIN) .get(header::ORIGIN)
.and_then(|s| s.to_str().map(String::from).ok()); .and_then(|s| s.to_str().map(String::from).ok());
ForwardedRelatedInfo { futures::future::ready(Ok(ForwardedRelatedInfo {
host, host,
x_forwarded_for, x_forwarded_for,
x_forwarded_host, x_forwarded_host,
x_forwarded_proto, x_forwarded_proto,
forwarded, forwarded,
uri: request_parts.uri.clone(), uri: parts.uri.clone(),
origin, origin,
} }))
} }
}
impl ForwardedRelatedInfo {
pub fn resolved_protocol(&self) -> Option<&str> { pub fn resolved_protocol(&self) -> Option<&str> {
self.forwarded self.forwarded
.as_ref() .as_ref()
@@ -156,6 +167,7 @@ impl ForwardedRelatedInfo {
.as_ref() .as_ref()
.and_then(|s| s.host.as_deref()) .and_then(|s| s.host.as_deref())
.or(self.x_forwarded_host.as_deref()) .or(self.x_forwarded_host.as_deref())
.or(self.host.as_deref())
.or(self.uri.host()) .or(self.uri.host())
} }

View File

@@ -2,10 +2,6 @@ use url::Url;
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> { pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
let mut image_url = base_url.join(image_src).ok()?; let mut image_url = base_url.join(image_src).ok()?;
if let Some((_, value)) = image_url.query_pairs().find(|(key, _)| key == "webp") { image_url.set_query(None);
image_url.set_query(Some(&format!("webp={value}")));
} else {
image_url.set_query(None);
}
Some(image_url) Some(image_url)
} }

View File

@@ -4,7 +4,7 @@ use fetch::{HttpClient, HttpClientTrait};
use maplit::hashmap; use maplit::hashmap;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use sea_orm::{ use sea_orm::{
ActiveModelTrait, ActiveValue::Set, ColumnTrait, DbErr, EntityTrait, QueryFilter, TryIntoModel, ActiveModelTrait, ActiveValue::Set, ColumnTrait, EntityTrait, QueryFilter, TryIntoModel,
}; };
use url::Url; use url::Url;
use util::OptDynErr; use util::OptDynErr;
@@ -227,9 +227,12 @@ impl MikanClient {
self.fork_with_userpass_credential(userpass_credential) self.fork_with_userpass_credential(userpass_credential)
.await .await
} else { } else {
Err(RecorderError::from_db_record_not_found( Err(RecorderError::from_entity_not_found_detail::<
DbErr::RecordNotFound(format!("credential={credential_id} not found")), credential_3rd::Entity,
)) _,
>(format!(
"credential id {credential_id} not found"
)))
} }
} }

View File

@@ -12,6 +12,7 @@ pub const MIKAN_BANGUMI_POSTER_PATH: &str = "/images/Bangumi";
pub const MIKAN_EPISODE_TORRENT_PATH: &str = "/Download"; pub const MIKAN_EPISODE_TORRENT_PATH: &str = "/Download";
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH: &str = "/RSS/MyBangumi"; pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH: &str = "/RSS/MyBangumi";
pub const MIKAN_BANGUMI_RSS_PATH: &str = "/RSS/Bangumi"; pub const MIKAN_BANGUMI_RSS_PATH: &str = "/RSS/Bangumi";
pub const MIKAN_FANSUB_HOMEPAGE_PATH: &str = "/Home/PublishGroup";
pub const MIKAN_BANGUMI_ID_QUERY_KEY: &str = "bangumiId"; pub const MIKAN_BANGUMI_ID_QUERY_KEY: &str = "bangumiId";
pub const MIKAN_FANSUB_ID_QUERY_KEY: &str = "subgroupid"; pub const MIKAN_FANSUB_ID_QUERY_KEY: &str = "subgroupid";
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY: &str = "token"; pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY: &str = "token";

View File

@@ -2,6 +2,7 @@ mod client;
mod config; mod config;
mod constants; mod constants;
mod credential; mod credential;
mod rss;
mod subscription; mod subscription;
mod web; mod web;
@@ -11,22 +12,26 @@ pub use constants::{
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH, MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH,
MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH, MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH,
MIKAN_BANGUMI_RSS_PATH, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_EPISODE_TORRENT_PATH, MIKAN_BANGUMI_RSS_PATH, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_EPISODE_TORRENT_PATH,
MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_LOGIN_PAGE_PATH, MIKAN_LOGIN_PAGE_SEARCH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_LOGIN_PAGE_PATH,
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY, MIKAN_LOGIN_PAGE_SEARCH, MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_SEASON_STR_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH,
MIKAN_UNKNOWN_FANSUB_ID, MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_UNKNOWN_FANSUB_ID,
MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
}; };
pub use credential::MikanCredentialForm; pub use credential::MikanCredentialForm;
pub use rss::{
MikanRssChannel, MikanRssItem, MikanRssItemMeta, MikanRssItemTorrentExtension, MikanRssRoot,
build_mikan_bangumi_subscription_rss_url, build_mikan_subscriber_subscription_rss_url,
};
pub use subscription::{ pub use subscription::{
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription, MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
}; };
pub use web::{ pub use web::{
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta, MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanRssEpisodeItem, MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanFansubHash,
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionRssUrlMeta, MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionUrlMeta,
build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url, build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url,
build_mikan_bangumi_subscription_rss_url, build_mikan_episode_homepage_url, build_mikan_episode_homepage_url, build_mikan_season_flow_url,
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment, extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
extract_mikan_bangumi_meta_from_expand_subscribed_fragment, extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
extract_mikan_episode_meta_from_episode_homepage_html, extract_mikan_episode_meta_from_episode_homepage_html,

View File

@@ -0,0 +1,215 @@
use std::{borrow::Cow, str::FromStr};
use chrono::{DateTime, Utc};
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
errors::{RecorderResult, app_error::RecorderError},
extract::{
bittorrent::EpisodeEnclosureMeta,
mikan::{
MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_RSS_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
MikanEpisodeHash, build_mikan_episode_homepage_url,
},
},
};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssItemEnclosure {
#[serde(rename = "@type")]
pub r#type: String,
#[serde(rename = "@length")]
pub length: i64,
#[serde(rename = "@url")]
pub url: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct MikanRssItemTorrentExtension {
pub pub_date: String,
pub content_length: i64,
pub link: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssItem {
pub torrent: MikanRssItemTorrentExtension,
pub link: String,
pub title: String,
pub enclosure: MikanRssItemEnclosure,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssChannel {
#[serde(rename = "item", default)]
pub items: Vec<MikanRssItem>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssRoot {
pub channel: MikanRssChannel,
}
impl FromStr for MikanRssRoot {
type Err = RecorderError;
fn from_str(source: &str) -> RecorderResult<Self> {
let me = quick_xml::de::from_str(source)?;
Ok(me)
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanRssItemMeta {
pub title: String,
pub torrent_link: Url,
pub content_length: i64,
pub mime: String,
pub pub_date: Option<DateTime<Utc>>,
pub mikan_episode_id: String,
pub magnet_link: Option<String>,
}
impl MikanRssItemMeta {
pub fn build_homepage_url(&self, mikan_base_url: Url) -> Url {
build_mikan_episode_homepage_url(mikan_base_url, &self.mikan_episode_id)
}
pub fn parse_pub_date(pub_date: &str) -> chrono::ParseResult<DateTime<Utc>> {
DateTime::parse_from_rfc2822(pub_date)
.or_else(|_| DateTime::parse_from_rfc3339(pub_date))
.or_else(|_| DateTime::parse_from_rfc3339(&format!("{pub_date}+08:00")))
.map(|s| s.with_timezone(&Utc))
}
}
impl TryFrom<MikanRssItem> for MikanRssItemMeta {
type Error = RecorderError;
fn try_from(item: MikanRssItem) -> Result<Self, Self::Error> {
let torrent = item.torrent;
let enclosure = item.enclosure;
let mime_type = enclosure.r#type;
if mime_type != BITTORRENT_MIME_TYPE {
return Err(RecorderError::MimeError {
expected: String::from(BITTORRENT_MIME_TYPE),
found: mime_type.to_string(),
desc: String::from("MikanRssItem"),
});
}
let title = item.title;
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
RecorderError::from_mikan_rss_invalid_field_and_source(
"enclosure_url:enclosure.link".into(),
err,
)
})?;
let homepage = Url::parse(&item.link).map_err(|err| {
RecorderError::from_mikan_rss_invalid_field_and_source(
"enclosure_url:enclosure.link".into(),
err,
)
})?;
let MikanEpisodeHash {
mikan_episode_id, ..
} = MikanEpisodeHash::from_homepage_url(&homepage).ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
})?;
Ok(MikanRssItemMeta {
title,
torrent_link: enclosure_url,
content_length: enclosure.length,
mime: mime_type,
pub_date: Self::parse_pub_date(&torrent.pub_date).ok(),
mikan_episode_id,
magnet_link: None,
})
}
}
impl From<MikanRssItemMeta> for EpisodeEnclosureMeta {
fn from(item: MikanRssItemMeta) -> Self {
Self {
magnet_link: item.magnet_link,
torrent_link: Some(item.torrent_link.to_string()),
pub_date: item.pub_date,
content_length: Some(item.content_length),
}
}
}
pub fn build_mikan_subscriber_subscription_rss_url(
mikan_base_url: Url,
mikan_subscription_token: &str,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH);
url.query_pairs_mut().append_pair(
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
mikan_subscription_token,
);
url
}
pub fn build_mikan_bangumi_subscription_rss_url(
mikan_base_url: Url,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_BANGUMI_RSS_PATH);
url.query_pairs_mut()
.append_pair(MIKAN_BANGUMI_ID_QUERY_KEY, mikan_bangumi_id);
if let Some(mikan_fansub_id) = mikan_fansub_id {
url.query_pairs_mut()
.append_pair(MIKAN_FANSUB_ID_QUERY_KEY, mikan_fansub_id);
};
url
}
#[cfg(test)]
mod test {
#![allow(unused_variables)]
use std::fs;
use rstest::{fixture, rstest};
use tracing::Level;
use super::*;
use crate::{errors::RecorderResult, test_utils::tracing::try_init_testing_tracing};
#[fixture]
fn before_each() {
try_init_testing_tracing(Level::DEBUG);
}
#[rstest]
#[test]
fn test_mikan_rss_episode_item_try_from_rss_item(before_each: ()) -> RecorderResult<()> {
let rss_str = fs::read_to_string(
"tests/resources/mikan/doppel/RSS/Bangumi-bangumiId%3D3288%26subgroupid%3D370.html",
)?;
let mut channel = MikanRssRoot::from_str(&rss_str)?.channel;
assert!(!channel.items.is_empty());
let item = channel.items.pop().unwrap();
let episode_item = MikanRssItemMeta::try_from(item.clone())?;
assert!(episode_item.pub_date.is_some());
Ok(())
}
}

View File

@@ -1,11 +1,13 @@
use std::{ use std::{
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
fmt::Debug, fmt::Debug,
str::FromStr,
sync::Arc, sync::Arc,
}; };
use async_graphql::{InputObject, SimpleObject}; use async_graphql::{InputObject, SimpleObject};
use fetch::fetch_bytes; use async_stream::try_stream;
use fetch::fetch_html;
use futures::{Stream, TryStreamExt, pin_mut, try_join}; use futures::{Stream, TryStreamExt, pin_mut, try_join};
use maplit::hashmap; use maplit::hashmap;
use sea_orm::{ use sea_orm::{
@@ -19,12 +21,15 @@ use super::scrape_mikan_bangumi_meta_stream_from_season_flow_url;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::{RecorderError, RecorderResult}, errors::{RecorderError, RecorderResult},
extract::mikan::{ extract::{
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta, bittorrent::EpisodeEnclosureMeta,
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr, mikan::{
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url, MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url, MikanRssItemMeta, MikanRssRoot, MikanSeasonFlowUrlMeta, MikanSeasonStr,
scrape_mikan_episode_meta_from_episode_homepage_url, MikanSubscriberSubscriptionUrlMeta, build_mikan_bangumi_subscription_rss_url,
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
scrape_mikan_episode_meta_from_episode_homepage_url,
},
}, },
models::{ models::{
bangumi, episodes, subscription_bangumi, subscription_episode, bangumi, episodes, subscription_bangumi, subscription_episode,
@@ -35,7 +40,7 @@ use crate::{
#[tracing::instrument(err, skip(ctx, rss_item_list))] #[tracing::instrument(err, skip(ctx, rss_item_list))]
async fn sync_mikan_feeds_from_rss_item_list( async fn sync_mikan_feeds_from_rss_item_list(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
rss_item_list: Vec<MikanRssEpisodeItem>, rss_item_list: Vec<MikanRssItemMeta>,
subscriber_id: i32, subscriber_id: i32,
subscription_id: i32, subscription_id: i32,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
@@ -53,7 +58,7 @@ async fn sync_mikan_feeds_from_rss_item_list(
.map(|(episode_id, hash, bangumi_id)| (hash.mikan_episode_id, (episode_id, bangumi_id))) .map(|(episode_id, hash, bangumi_id)| (hash.mikan_episode_id, (episode_id, bangumi_id)))
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
let mut new_episode_meta_list: Vec<MikanEpisodeMeta> = vec![]; let mut new_episode_meta_list: Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)> = vec![];
let mikan_client = ctx.mikan(); let mikan_client = ctx.mikan();
for to_insert_rss_item in rss_item_list.into_iter().filter(|rss_item| { for to_insert_rss_item in rss_item_list.into_iter().filter(|rss_item| {
@@ -64,7 +69,8 @@ async fn sync_mikan_feeds_from_rss_item_list(
to_insert_rss_item.build_homepage_url(mikan_base_url.clone()), to_insert_rss_item.build_homepage_url(mikan_base_url.clone()),
) )
.await?; .await?;
new_episode_meta_list.push(episode_meta); let episode_enclosure_meta = EpisodeEnclosureMeta::from(to_insert_rss_item);
new_episode_meta_list.push((episode_meta, episode_enclosure_meta));
} }
(new_episode_meta_list, existed_episode_hash2id_map) (new_episode_meta_list, existed_episode_hash2id_map)
@@ -91,22 +97,22 @@ async fn sync_mikan_feeds_from_rss_item_list(
let new_episode_meta_list_group_by_bangumi_hash: HashMap< let new_episode_meta_list_group_by_bangumi_hash: HashMap<
MikanBangumiHash, MikanBangumiHash,
Vec<MikanEpisodeMeta>, Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)>,
> = { > = {
let mut m = hashmap! {}; let mut m = hashmap! {};
for episode_meta in new_episode_meta_list { for (episode_meta, episode_enclosure_meta) in new_episode_meta_list {
let bangumi_hash = episode_meta.bangumi_hash(); let bangumi_hash = episode_meta.bangumi_hash();
m.entry(bangumi_hash) m.entry(bangumi_hash)
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.push(episode_meta); .push((episode_meta, episode_enclosure_meta));
} }
m m
}; };
for (group_bangumi_hash, group_episode_meta_list) in new_episode_meta_list_group_by_bangumi_hash for (group_bangumi_hash, group_episode_meta_list) in new_episode_meta_list_group_by_bangumi_hash
{ {
let first_episode_meta = group_episode_meta_list.first().unwrap(); let (first_episode_meta, _) = group_episode_meta_list.first().unwrap();
let group_bangumi_model = bangumi::Model::get_or_insert_from_mikan( let group_bangumi_model = bangumi::Model::get_or_insert_from_mikan(
ctx, ctx,
group_bangumi_hash, group_bangumi_hash,
@@ -125,9 +131,12 @@ async fn sync_mikan_feeds_from_rss_item_list(
}, },
) )
.await?; .await?;
let group_episode_creation_list = group_episode_meta_list let group_episode_creation_list =
.into_iter() group_episode_meta_list
.map(|episode_meta| (&group_bangumi_model, episode_meta)); .into_iter()
.map(|(episode_meta, episode_enclosure_meta)| {
(&group_bangumi_model, episode_meta, episode_enclosure_meta)
});
episodes::Model::add_mikan_episodes_for_subscription( episodes::Model::add_mikan_episodes_for_subscription(
ctx, ctx,
@@ -194,7 +203,7 @@ impl SubscriptionTrait for MikanSubscriberSubscription {
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> { fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
let source_url = Url::parse(&model.source_url)?; let source_url = Url::parse(&model.source_url)?;
let meta = MikanSubscriberSubscriptionRssUrlMeta::from_rss_url(&source_url) let meta = MikanSubscriberSubscriptionUrlMeta::from_rss_url(&source_url)
.with_whatever_context::<_, String, RecorderError>(|| { .with_whatever_context::<_, String, RecorderError>(|| {
format!( format!(
"MikanSubscriberSubscription should extract mikan_subscription_token from \ "MikanSubscriberSubscription should extract mikan_subscription_token from \
@@ -216,19 +225,19 @@ impl MikanSubscriberSubscription {
async fn get_rss_item_list_from_source_url( async fn get_rss_item_list_from_source_url(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssEpisodeItem>> { ) -> RecorderResult<Vec<MikanRssItemMeta>> {
let mikan_base_url = ctx.mikan().base_url().clone(); let mikan_base_url = ctx.mikan().base_url().clone();
let rss_url = build_mikan_subscriber_subscription_rss_url( let rss_url = build_mikan_subscriber_subscription_rss_url(
mikan_base_url.clone(), mikan_base_url.clone(),
&self.mikan_subscription_token, &self.mikan_subscription_token,
); );
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = MikanRssRoot::from_str(&html)?.channel;
let mut result = vec![]; let mut result = vec![];
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item) let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@@ -241,7 +250,7 @@ impl MikanSubscriberSubscription {
async fn get_rss_item_list_from_subsribed_url_rss_link( async fn get_rss_item_list_from_subsribed_url_rss_link(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssEpisodeItem>> { ) -> RecorderResult<Vec<MikanRssItemMeta>> {
let subscribed_bangumi_list = let subscribed_bangumi_list =
bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.subscription_id) bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.subscription_id)
.await?; .await?;
@@ -256,12 +265,12 @@ impl MikanSubscriberSubscription {
self.subscription_id, subscribed_bangumi.display_name self.subscription_id, subscribed_bangumi.display_name
) )
})?; })?;
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = MikanRssRoot::from_str(&html)?.channel;
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item) let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@@ -272,7 +281,7 @@ impl MikanSubscriberSubscription {
} }
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanSeasonSubscription { pub struct MikanSeasonSubscription {
pub subscription_id: i32, pub subscription_id: i32,
pub year: i32, pub year: i32,
@@ -292,17 +301,19 @@ impl SubscriptionTrait for MikanSeasonSubscription {
} }
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> { async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
let rss_item_list = self let rss_item_stream = self.get_rss_item_stream_from_subsribed_url_rss_link(ctx.as_ref());
.get_rss_item_list_from_subsribed_url_rss_link(ctx.as_ref())
.await?;
sync_mikan_feeds_from_rss_item_list( pin_mut!(rss_item_stream);
ctx.as_ref(),
rss_item_list, while let Some(rss_item_chunk_list) = rss_item_stream.try_next().await? {
self.get_subscriber_id(), sync_mikan_feeds_from_rss_item_list(
self.get_subscription_id(), ctx.as_ref(),
) rss_item_chunk_list,
.await?; self.get_subscriber_id(),
self.get_subscription_id(),
)
.await?;
}
Ok(()) Ok(())
} }
@@ -393,48 +404,53 @@ impl MikanSeasonSubscription {
) )
} }
#[tracing::instrument(err, skip(ctx))] fn get_rss_item_stream_from_subsribed_url_rss_link(
async fn get_rss_item_list_from_subsribed_url_rss_link(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssEpisodeItem>> { ) -> impl Stream<Item = RecorderResult<Vec<MikanRssItemMeta>>> {
let db = ctx.db(); try_stream! {
let subscribed_bangumi_list = bangumi::Entity::find() let db = ctx.db();
.filter(
Condition::all()
.add(subscription_bangumi::Column::SubscriptionId.eq(self.subscription_id)),
)
.join_rev(
JoinType::InnerJoin,
subscription_bangumi::Relation::Bangumi.def(),
)
.all(db)
.await?;
let mut rss_item_list = vec![]; let subscribed_bangumi_list = bangumi::Entity::find()
for subscribed_bangumi in subscribed_bangumi_list { .filter(
let rss_url = subscribed_bangumi Condition::all()
.rss_link .add(subscription_bangumi::Column::SubscriptionId.eq(self.subscription_id)),
.with_whatever_context::<_, String, RecorderError>(|| { )
format!( .join_rev(
"rss_link is required, subscription_id = {}, bangumi_name = {}", JoinType::InnerJoin,
self.subscription_id, subscribed_bangumi.display_name subscription_bangumi::Relation::Bangumi.def(),
) )
})?; .all(db)
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; .await?;
let channel = rss::Channel::read_from(&bytes[..])?;
for (idx, item) in channel.items.into_iter().enumerate() { for subscribed_bangumi in subscribed_bangumi_list {
let item = MikanRssEpisodeItem::try_from(item) let rss_url = subscribed_bangumi
.with_whatever_context::<_, String, RecorderError>(|_| { .rss_link
format!("failed to extract rss item at idx {idx}") .with_whatever_context::<_, String, RecorderError>(|| {
format!(
"rss_link is required, subscription_id = {}, bangumi_name = {}",
self.subscription_id, subscribed_bangumi.display_name
)
})?; })?;
rss_item_list.push(item); let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = MikanRssRoot::from_str(&html)?.channel;
let mut rss_item_list = vec![];
for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}")
})?;
rss_item_list.push(item);
}
yield rss_item_list;
} }
} }
Ok(rss_item_list)
} }
} }
@@ -504,20 +520,20 @@ impl MikanBangumiSubscription {
async fn get_rss_item_list_from_source_url( async fn get_rss_item_list_from_source_url(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssEpisodeItem>> { ) -> RecorderResult<Vec<MikanRssItemMeta>> {
let mikan_base_url = ctx.mikan().base_url().clone(); let mikan_base_url = ctx.mikan().base_url().clone();
let rss_url = build_mikan_bangumi_subscription_rss_url( let rss_url = build_mikan_bangumi_subscription_rss_url(
mikan_base_url.clone(), mikan_base_url.clone(),
&self.mikan_bangumi_id, &self.mikan_bangumi_id,
Some(&self.mikan_fansub_id), Some(&self.mikan_fansub_id),
); );
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = MikanRssRoot::from_str(&html)?.channel;
let mut result = vec![]; let mut result = vec![];
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item) let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@@ -530,66 +546,27 @@ impl MikanBangumiSubscription {
#[cfg(test)] #[cfg(test)]
#[allow(unused_variables)] #[allow(unused_variables)]
mod tests { mod tests {
use std::sync::Arc;
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait}; use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait};
use tracing::Level; use tracing::Level;
use crate::{ use crate::{
app::AppContextTrait,
errors::RecorderResult, errors::RecorderResult,
extract::mikan::{ extract::mikan::{
MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr,
MikanSubscriberSubscriptionRssUrlMeta, MikanSubscriberSubscriptionUrlMeta,
}, },
models::{ models::{
bangumi, episodes, bangumi, episodes,
subscriptions::{self, SubscriptionTrait}, subscriptions::{self, SubscriptionTrait},
}, },
test_utils::{ test_utils::{
app::TestingAppContext, app::TestingPreset, mikan::build_testing_mikan_credential_form,
crypto::build_testing_crypto_service,
database::build_testing_database_service,
mikan::{
MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential_form,
},
storage::build_testing_storage_service,
tracing::try_init_testing_tracing, tracing::try_init_testing_tracing,
}, },
}; };
struct TestingResources {
pub app_ctx: Arc<dyn AppContextTrait>,
pub mikan_server: MikanMockServer,
}
async fn build_testing_app_context() -> RecorderResult<TestingResources> {
let mikan_server = MikanMockServer::new().await?;
let mikan_base_url = mikan_server.base_url().clone();
let app_ctx = {
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
let db_service = build_testing_database_service(Default::default()).await?;
let crypto_service = build_testing_crypto_service().await?;
let storage_service = build_testing_storage_service().await?;
let app_ctx = TestingAppContext::builder()
.mikan(mikan_client)
.db(db_service)
.crypto(crypto_service)
.storage(storage_service)
.build();
Arc::new(app_ctx)
};
Ok(TestingResources {
app_ctx,
mikan_server,
})
}
#[fixture] #[fixture]
fn before_each() { fn before_each() {
try_init_testing_tracing(Level::DEBUG); try_init_testing_tracing(Level::DEBUG);
@@ -598,10 +575,10 @@ mod tests {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> { async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources { let mut preset = TestingPreset::default().await?;
app_ctx, let app_ctx = preset.app_ctx.clone();
mut mikan_server,
} = build_testing_app_context().await?; let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel(); let _resources_mock = mikan_server.mock_resources_with_doppel();
@@ -660,10 +637,11 @@ mod tests {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_mikan_subscriber_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> { async fn test_mikan_subscriber_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources { let mut preset = TestingPreset::default().await?;
app_ctx,
mut mikan_server, let app_ctx = preset.app_ctx.clone();
} = build_testing_app_context().await?;
let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel(); let _resources_mock = mikan_server.mock_resources_with_doppel();
@@ -676,7 +654,7 @@ mod tests {
subscriber_id: ActiveValue::Set(subscriber_id), subscriber_id: ActiveValue::Set(subscriber_id),
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber), category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber),
source_url: ActiveValue::Set( source_url: ActiveValue::Set(
MikanSubscriberSubscriptionRssUrlMeta { MikanSubscriberSubscriptionUrlMeta {
mikan_subscription_token: "test".into(), mikan_subscription_token: "test".into(),
} }
.build_rss_url(mikan_server.base_url().clone()) .build_rss_url(mikan_server.base_url().clone())
@@ -727,10 +705,11 @@ mod tests {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> { async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources { let mut preset = TestingPreset::default().await?;
app_ctx,
mut mikan_server, let app_ctx = preset.app_ctx.clone();
} = build_testing_app_context().await?;
let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel(); let _resources_mock = mikan_server.mock_resources_with_doppel();

View File

@@ -2,7 +2,7 @@ use std::{borrow::Cow, fmt, str::FromStr, sync::Arc};
use async_stream::try_stream; use async_stream::try_stream;
use bytes::Bytes; use bytes::Bytes;
use chrono::DateTime; use chrono::{DateTime, Utc};
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE; use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
use fetch::{html::fetch_html, image::fetch_image}; use fetch::{html::fetch_html, image::fetch_image};
use futures::{Stream, TryStreamExt, pin_mut}; use futures::{Stream, TryStreamExt, pin_mut};
@@ -17,28 +17,36 @@ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::app_error::{RecorderError, RecorderResult}, errors::app_error::{RecorderError, RecorderResult},
extract::{ extract::{
bittorrent::EpisodeEnclosureMeta,
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref}, html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str, media::extract_image_src_from_str,
mikan::{ mikan::{
MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH, MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH, MIKAN_BANGUMI_HOMEPAGE_PATH,
MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH, MIKAN_BANGUMI_RSS_PATH, MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH, MIKAN_BANGUMI_RSS_PATH,
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_POSTER_BUCKET_KEY, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY, MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
MIKAN_YEAR_QUERY_KEY, MikanClient, MIKAN_UNKNOWN_FANSUB_ID, MIKAN_YEAR_QUERY_KEY, MikanClient,
build_mikan_bangumi_subscription_rss_url, build_mikan_subscriber_subscription_rss_url,
}, },
}, },
storage::{StorageContentCategory, StorageServiceTrait}, media::{
AutoOptimizeImageFormat, EncodeAvifOptions, EncodeImageOptions, EncodeJxlOptions,
EncodeWebpOptions,
},
storage::StorageContentCategory,
task::OptimizeImageTask,
}; };
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanRssEpisodeItem { pub struct MikanRssEpisodeItem {
pub title: String, pub title: String,
pub url: Url, pub torrent_link: Url,
pub content_length: Option<u64>, pub content_length: Option<i64>,
pub mime: String, pub mime: String,
pub pub_date: Option<i64>, pub pub_date: Option<DateTime<Utc>>,
pub mikan_episode_id: String, pub mikan_episode_id: String,
pub magnet_link: Option<String>,
} }
impl MikanRssEpisodeItem { impl MikanRssEpisodeItem {
@@ -88,31 +96,60 @@ impl TryFrom<rss::Item> for MikanRssEpisodeItem {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id")) RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
})?; })?;
let pub_date = item
.extensions
.get("torrent")
.and_then(|t| t.get("pubDate"))
.and_then(|e| e.first())
.and_then(|e| e.value.as_deref());
Ok(MikanRssEpisodeItem { Ok(MikanRssEpisodeItem {
title, title,
url: enclosure_url, torrent_link: enclosure_url,
content_length: enclosure.length.parse().ok(), content_length: enclosure.length.parse().ok(),
mime: mime_type, mime: mime_type,
pub_date: item pub_date: pub_date.and_then(|s| {
.pub_date DateTime::parse_from_rfc2822(s)
.and_then(|s| DateTime::parse_from_rfc2822(&s).ok()) .ok()
.map(|s| s.timestamp_millis()), .map(|s| s.with_timezone(&Utc))
.or_else(|| {
DateTime::parse_from_rfc3339(s)
.ok()
.map(|s| s.with_timezone(&Utc))
})
.or_else(|| {
DateTime::parse_from_rfc3339(&format!("{s}+08:00"))
.ok()
.map(|s| s.with_timezone(&Utc))
})
}),
mikan_episode_id, mikan_episode_id,
magnet_link: None,
}) })
} }
} }
impl From<MikanRssEpisodeItem> for EpisodeEnclosureMeta {
fn from(item: MikanRssEpisodeItem) -> Self {
Self {
magnet_link: item.magnet_link,
torrent_link: Some(item.torrent_link.to_string()),
pub_date: item.pub_date,
content_length: item.content_length,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanSubscriberSubscriptionRssUrlMeta { pub struct MikanSubscriberSubscriptionUrlMeta {
pub mikan_subscription_token: String, pub mikan_subscription_token: String,
} }
impl MikanSubscriberSubscriptionRssUrlMeta { impl MikanSubscriberSubscriptionUrlMeta {
pub fn from_rss_url(url: &Url) -> Option<Self> { pub fn from_rss_url(url: &Url) -> Option<Self> {
if url.path() == MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH { if url.path() == MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH {
url.query_pairs() url.query_pairs()
.find(|(k, _)| k == MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY) .find(|(k, _)| k == MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY)
.map(|(_, v)| MikanSubscriberSubscriptionRssUrlMeta { .map(|(_, v)| MikanSubscriberSubscriptionUrlMeta {
mikan_subscription_token: v.to_string(), mikan_subscription_token: v.to_string(),
}) })
} else { } else {
@@ -125,19 +162,6 @@ impl MikanSubscriberSubscriptionRssUrlMeta {
} }
} }
pub fn build_mikan_subscriber_subscription_rss_url(
mikan_base_url: Url,
mikan_subscription_token: &str,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH);
url.query_pairs_mut().append_pair(
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
mikan_subscription_token,
);
url
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Eq)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Eq)]
pub struct MikanBangumiIndexMeta { pub struct MikanBangumiIndexMeta {
pub homepage: Url, pub homepage: Url,
@@ -200,6 +224,32 @@ impl MikanBangumiMeta {
} }
} }
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct MikanFansubHash {
pub mikan_fansub_id: String,
}
impl MikanFansubHash {
pub fn from_homepage_url(url: &Url) -> Option<Self> {
let path = url.path();
if path.starts_with(MIKAN_FANSUB_HOMEPAGE_PATH) {
let mikan_fansub_id = path.replace(&format!("{MIKAN_FANSUB_HOMEPAGE_PATH}/"), "");
Some(Self { mikan_fansub_id })
} else {
None
}
}
pub fn build_homepage_url(self, mikan_base_url: Url) -> Url {
let mut url = mikan_base_url;
url.set_path(&format!(
"{MIKAN_FANSUB_HOMEPAGE_PATH}/{}",
self.mikan_fansub_id
));
url
}
}
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct MikanEpisodeMeta { pub struct MikanEpisodeMeta {
pub homepage: Url, pub homepage: Url,
@@ -227,22 +277,6 @@ pub struct MikanBangumiPosterMeta {
pub poster_src: Option<String>, pub poster_src: Option<String>,
} }
pub fn build_mikan_bangumi_subscription_rss_url(
mikan_base_url: Url,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_BANGUMI_RSS_PATH);
url.query_pairs_mut()
.append_pair(MIKAN_BANGUMI_ID_QUERY_KEY, mikan_bangumi_id);
if let Some(mikan_fansub_id) = mikan_fansub_id {
url.query_pairs_mut()
.append_pair(MIKAN_FANSUB_ID_QUERY_KEY, mikan_fansub_id);
};
url
}
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiIndexHash { pub struct MikanBangumiIndexHash {
pub mikan_bangumi_id: String, pub mikan_bangumi_id: String,
@@ -530,16 +564,17 @@ pub fn extract_mikan_episode_meta_from_episode_homepage_html(
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id")) RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
})?; })?;
let fansub_name = html let fansub_name = if mikan_fansub_id == MIKAN_UNKNOWN_FANSUB_ID {
.select( MIKAN_UNKNOWN_FANSUB_ID.to_string()
} else {
html.select(
&Selector::parse(".bangumi-info a.magnet-link-wrap[href^='/Home/PublishGroup/']") &Selector::parse(".bangumi-info a.magnet-link-wrap[href^='/Home/PublishGroup/']")
.unwrap(), .unwrap(),
) )
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.ok_or_else(|| { .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))?
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")) };
})?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| { let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
el.value() el.value()
@@ -651,6 +686,13 @@ pub fn extract_mikan_fansub_meta_from_bangumi_homepage_html(
html: &Html, html: &Html,
mikan_fansub_id: String, mikan_fansub_id: String,
) -> Option<MikanFansubMeta> { ) -> Option<MikanFansubMeta> {
if mikan_fansub_id == MIKAN_UNKNOWN_FANSUB_ID {
return Some(MikanFansubMeta {
mikan_fansub_id,
fansub: MIKAN_UNKNOWN_FANSUB_ID.to_string(),
});
}
html.select( html.select(
&Selector::parse(&format!( &Selector::parse(&format!(
"a.subgroup-name[data-anchor='#{mikan_fansub_id}']" "a.subgroup-name[data-anchor='#{mikan_fansub_id}']"
@@ -710,6 +752,7 @@ pub async fn scrape_mikan_bangumi_meta_from_bangumi_homepage_url(
) )
} }
#[allow(dead_code)]
#[instrument(err, skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))] #[instrument(err, skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))]
pub async fn scrape_mikan_bangumi_index_meta_from_bangumi_homepage_url( pub async fn scrape_mikan_bangumi_index_meta_from_bangumi_homepage_url(
mikan_client: &MikanClient, mikan_client: &MikanClient,
@@ -737,48 +780,96 @@ pub async fn scrape_mikan_poster_data_from_image_url(
#[instrument(skip_all, fields(origin_poster_src_url = origin_poster_src_url.as_str()))] #[instrument(skip_all, fields(origin_poster_src_url = origin_poster_src_url.as_str()))]
pub async fn scrape_mikan_poster_meta_from_image_url( pub async fn scrape_mikan_poster_meta_from_image_url(
mikan_client: &MikanClient, ctx: &dyn AppContextTrait,
storage_service: &dyn StorageServiceTrait,
origin_poster_src_url: Url, origin_poster_src_url: Url,
subscriber_id: i32,
) -> RecorderResult<MikanBangumiPosterMeta> { ) -> RecorderResult<MikanBangumiPosterMeta> {
if let Some(poster_src) = storage_service let storage_service = ctx.storage();
.exists_object( let media_service = ctx.media();
StorageContentCategory::Image, let mikan_client = ctx.mikan();
subscriber_id, let task_service = ctx.task();
Some(MIKAN_POSTER_BUCKET_KEY),
&origin_poster_src_url let storage_path = storage_service.build_public_object_path(
.path() StorageContentCategory::Image,
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""), MIKAN_POSTER_BUCKET_KEY,
) &origin_poster_src_url
.await? .path()
{ .replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
return Ok(MikanBangumiPosterMeta { );
let meta = if let Some(poster_src) = storage_service.exists(&storage_path).await? {
MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url, origin_poster_src: origin_poster_src_url,
poster_src: Some(poster_src.to_string()), poster_src: Some(poster_src.to_string()),
}); }
} } else {
let poster_data =
scrape_mikan_poster_data_from_image_url(mikan_client, origin_poster_src_url.clone())
.await?;
let poster_data = let poster_str = storage_service
scrape_mikan_poster_data_from_image_url(mikan_client, origin_poster_src_url.clone()) .write(storage_path.clone(), poster_data)
.await?; .await?;
let poster_str = storage_service MikanBangumiPosterMeta {
.store_object( origin_poster_src: origin_poster_src_url,
StorageContentCategory::Image, poster_src: Some(poster_str.to_string()),
subscriber_id, }
Some(MIKAN_POSTER_BUCKET_KEY), };
&origin_poster_src_url
.path()
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
poster_data,
)
.await?;
Ok(MikanBangumiPosterMeta { if meta.poster_src.is_some()
origin_poster_src: origin_poster_src_url, && storage_path
poster_src: Some(poster_str.to_string()), .extension()
}) .is_some_and(|ext| media_service.is_legacy_image_format(ext))
{
let auto_optimize_formats = &media_service.config.auto_optimize_formats;
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Webp) {
let webp_storage_path = storage_path.with_extension("webp");
if storage_service.exists(&webp_storage_path).await?.is_none() {
task_service
.add_system_task(
OptimizeImageTask::builder()
.source_path(storage_path.clone().to_string())
.target_path(webp_storage_path.to_string())
.format_options(EncodeImageOptions::Webp(EncodeWebpOptions::default()))
.build()
.into(),
)
.await?;
}
}
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Avif) {
let avif_storage_path = storage_path.with_extension("avif");
if storage_service.exists(&avif_storage_path).await?.is_none() {
task_service
.add_system_task(
OptimizeImageTask::builder()
.source_path(storage_path.clone().to_string())
.target_path(avif_storage_path.to_string())
.format_options(EncodeImageOptions::Avif(EncodeAvifOptions::default()))
.build()
.into(),
)
.await?;
}
}
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Jxl) {
let jxl_storage_path = storage_path.with_extension("jxl");
if storage_service.exists(&jxl_storage_path).await?.is_none() {
task_service
.add_system_task(
OptimizeImageTask::builder()
.source_path(storage_path.clone().to_string())
.target_path(jxl_storage_path.to_string())
.format_options(EncodeImageOptions::Jxl(EncodeJxlOptions::default()))
.build()
.into(),
)
.await?;
}
}
}
Ok(meta)
} }
pub fn extract_mikan_bangumi_index_meta_list_from_season_flow_fragment( pub fn extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(
@@ -1005,24 +1096,23 @@ pub async fn scrape_mikan_bangumi_meta_list_from_season_flow_url(
#[cfg(test)] #[cfg(test)]
mod test { mod test {
#![allow(unused_variables)] #![allow(unused_variables)]
use std::{fs, sync::Arc}; use std::{fs, io::Cursor, sync::Arc};
use futures::StreamExt; use futures::StreamExt;
use image::{ImageFormat, ImageReader};
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
use tracing::Level; use tracing::Level;
use url::Url; use url::Url;
use zune_image::{codecs::ImageFormat, image::Image};
use super::*; use super::*;
use crate::test_utils::{ use crate::test_utils::{
app::TestingAppContext, app::{TestingAppContext, TestingPreset},
crypto::build_testing_crypto_service, crypto::build_testing_crypto_service,
database::build_testing_database_service, database::build_testing_database_service,
mikan::{ mikan::{
MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential, MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential,
build_testing_mikan_credential_form, build_testing_mikan_credential_form,
}, },
storage::build_testing_storage_service,
tracing::try_init_testing_tracing, tracing::try_init_testing_tracing,
}; };
@@ -1047,12 +1137,14 @@ mod test {
scrape_mikan_poster_data_from_image_url(&mikan_client, bangumi_poster_url).await?; scrape_mikan_poster_data_from_image_url(&mikan_client, bangumi_poster_url).await?;
resources_mock.shared_resource_mock.expect(1); resources_mock.shared_resource_mock.expect(1);
let image = Image::read(bgm_poster_data.to_vec(), Default::default());
let image = {
let c = Cursor::new(bgm_poster_data);
ImageReader::new(c)
};
let image_format = image.with_guessed_format().ok().and_then(|i| i.format());
assert!( assert!(
image.is_ok_and(|img| img image_format.is_some_and(|fmt| matches!(fmt, ImageFormat::Jpeg)),
.metadata()
.get_image_format()
.is_some_and(|fmt| matches!(fmt, ImageFormat::JPEG))),
"should start with valid jpeg data magic number" "should start with valid jpeg data magic number"
); );
@@ -1062,43 +1154,47 @@ mod test {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_scrape_mikan_poster_meta_from_image_url(before_each: ()) -> RecorderResult<()> { async fn test_scrape_mikan_poster_meta_from_image_url(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = MikanMockServer::new().await?; let mut preset = TestingPreset::default().await?;
let mikan_base_url = mikan_server.base_url().clone(); let app_ctx = preset.app_ctx.clone();
let resources_mock = mikan_server.mock_resources_with_doppel(); let mikan_base_url = preset.mikan_server.base_url().clone();
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let resources_mock = preset.mikan_server.mock_resources_with_doppel();
let storage_service = build_testing_storage_service().await?;
let storage_operator = storage_service.get_operator()?;
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?; let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;
let bgm_poster = scrape_mikan_poster_meta_from_image_url( let bgm_poster =
&mikan_client, scrape_mikan_poster_meta_from_image_url(app_ctx.as_ref(), bangumi_poster_url).await?;
&storage_service,
bangumi_poster_url,
1,
)
.await?;
resources_mock.shared_resource_mock.expect(1); resources_mock.shared_resource_mock.expect(1);
let storage_fullname = storage_service.get_fullname( let storage_service = app_ctx.storage();
let storage_fullname = storage_service.build_public_object_path(
StorageContentCategory::Image, StorageContentCategory::Image,
1, MIKAN_POSTER_BUCKET_KEY,
Some(MIKAN_POSTER_BUCKET_KEY),
"202309/5ce9fed1.jpg", "202309/5ce9fed1.jpg",
); );
let storage_fullename_str = storage_fullname.as_str();
assert!(storage_operator.exists(storage_fullename_str).await?); assert!(
storage_service.exists(&storage_fullname).await?.is_some(),
"storage_fullename_str = {}, list public = {:?}",
&storage_fullname,
storage_service.list_public().await?
);
let expected_data = let bgm_poster_data = storage_service.read(&storage_fullname).await?;
fs::read("tests/resources/mikan/doppel/images/Bangumi/202309/5ce9fed1.jpg")?;
let found_data = storage_operator.read(storage_fullename_str).await?.to_vec(); let image = {
assert_eq!(expected_data, found_data); let c = Cursor::new(bgm_poster_data.to_vec());
ImageReader::new(c)
};
let image_format = image.with_guessed_format().ok().and_then(|i| i.format());
assert!(
image_format.is_some_and(|fmt| matches!(fmt, ImageFormat::Jpeg)),
"should start with valid jpeg data magic number"
);
Ok(()) Ok(())
} }

View File

@@ -1,7 +1,6 @@
pub mod defs; pub mod bittorrent;
pub mod html; pub mod html;
pub mod http; pub mod http;
pub mod media; pub mod media;
pub mod mikan; pub mod mikan;
pub mod rawname; pub mod origin;
pub mod bittorrent;

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +0,0 @@
pub mod parser;
pub use parser::{
RawEpisodeMeta, extract_episode_meta_from_raw_name, extract_season_from_title_body,
};

View File

@@ -1,845 +0,0 @@
/**
* @TODO: rewrite with nom
*/
use std::borrow::Cow;
use itertools::Itertools;
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::whatever;
use crate::{
errors::RecorderResult,
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
};
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
lazy_static! {
static ref TITLE_RE: Regex = Regex::new(
r#"(.*|\[.*])( -? \d+|\[\d+]|\[\d+.?[vV]\d]|第\d+[话話集]|\[第?\d+[话話集]]|\[\d+.?END]|[Ee][Pp]?\d+|\[\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*[话話集]\s*])(.*)"#
).unwrap();
static ref EP_COLLECTION_RE:Regex = Regex::new(r#"\[?\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*合?[话話集]\s*]?"#).unwrap();
static ref MOVIE_TITLE_RE:Regex = Regex::new(r#"(.*|\[.*])(剧场版|[Mm]ovie|电影)(.*?)$"#).unwrap();
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
static ref SOURCE_L1_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|W[Ee][Bb][Rr][Ii][Pp]|Sentai|B[Dd][Rr][Ii][Pp]|UHD[Rr][Ii][Pp]|NETFLIX").unwrap();
static ref SOURCE_L2_RE: Regex = Regex::new(r"AMZ|CR|W[Ee][Bb]|B[Dd]").unwrap();
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap();
static ref PREFIX_RE: Regex =
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
static ref MOVIE_SEASON_EXTRACT_RE: Regex = Regex::new(r"剧场版|Movie|电影").unwrap();
static ref MAIN_TITLE_PREFIX_PROCESS_RE1: Regex = Regex::new(r"新番|月?番").unwrap();
static ref MAIN_TITLE_PREFIX_PROCESS_RE2: Regex = Regex::new(r"[港澳台]{1,3}地区").unwrap();
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE: Regex = Regex::new(r"\[.+\]").unwrap();
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1: Regex = Regex::new(r"^.*?\[").unwrap();
static ref SEASON_EXTRACT_SEASON_ALL_RE: Regex = Regex::new(r"S\d{1,2}|Season \d{1,2}|[第].[季期]|1st|2nd|3rd|\d{1,2}th").unwrap();
static ref SEASON_EXTRACT_SEASON_EN_PREFIX_RE: Regex = Regex::new(r"Season|S").unwrap();
static ref SEASON_EXTRACT_SEASON_EN_NTH_RE: Regex = Regex::new(r"1st|2nd|3rd|\d{1,2}th").unwrap();
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_RE: Regex = Regex::new(r"[第 ].*[季期(部分)]|部分").unwrap();
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE: Regex = Regex::new(r"[第季期 ]").unwrap();
static ref NAME_EXTRACT_REMOVE_RE: Regex = Regex::new(r"[(]仅限[港澳台]{1,3}地区[)]").unwrap();
static ref NAME_EXTRACT_SPLIT_RE: Regex = Regex::new(r"/|\s{2}|-\s{2}|\]\[").unwrap();
static ref NAME_EXTRACT_REPLACE_ADHOC1_RE: Regex = Regex::new(r"([\p{scx=Han}\s\(\)]{5,})_([a-zA-Z]{2,})").unwrap();
static ref NAME_JP_TEST: Regex = Regex::new(r"[\p{scx=Hira}\p{scx=Kana}]{2,}").unwrap();
static ref NAME_ZH_TEST: Regex = Regex::new(r"[\p{scx=Han}]{2,}").unwrap();
static ref NAME_EN_TEST: Regex = Regex::new(r"[a-zA-Z]{3,}").unwrap();
static ref TAGS_EXTRACT_SPLIT_RE: Regex = Regex::new(r"[\[\]()_]").unwrap();
static ref CLEAR_SUB_RE: Regex = Regex::new(r"_MP4|_MKV").unwrap();
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
pub struct RawEpisodeMeta {
pub name_en: Option<String>,
pub name_en_no_season: Option<String>,
pub name_jp: Option<String>,
pub name_jp_no_season: Option<String>,
pub name_zh: Option<String>,
pub name_zh_no_season: Option<String>,
pub season: i32,
pub season_raw: Option<String>,
pub episode_index: i32,
pub subtitle: Option<String>,
pub source: Option<String>,
pub fansub: Option<String>,
pub resolution: Option<String>,
}
fn extract_fansub(raw_name: &str) -> Option<&str> {
let mut groups = EN_BRACKET_SPLIT_RE.splitn(raw_name, 3);
groups.nth(1)
}
fn replace_ch_bracket_to_en(raw_name: &str) -> String {
raw_name.replace('【', "[").replace('】', "]")
}
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RecorderResult<String> {
let raw_without_fansub = if let Some(fansub) = fansub {
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
fan_sub_re.replace_all(title_body, "")
} else {
Cow::Borrowed(title_body)
};
let raw_with_prefix_replaced = PREFIX_RE.replace_all(&raw_without_fansub, "/");
let mut arg_group = raw_with_prefix_replaced
.split('/')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
if arg_group.len() == 1 {
arg_group = arg_group.first_mut().unwrap().split(' ').collect();
}
let mut raw = raw_without_fansub.to_string();
for arg in arg_group.iter() {
if (arg_group.len() <= 5 && MAIN_TITLE_PREFIX_PROCESS_RE1.is_match(arg))
|| (MAIN_TITLE_PREFIX_PROCESS_RE2.is_match(arg))
{
let sub = Regex::new(&format!(".{arg}."))?;
raw = sub.replace_all(&raw, "").to_string();
}
}
if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw)
&& m.len() as f32 > (raw.len() as f32) * 0.5
{
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1
.replace(&raw, "")
.chars()
.collect_vec();
while let Some(ch) = raw1.pop() {
if ch == ']' {
break;
}
}
raw = raw1.into_iter().collect();
}
Ok(raw.to_string())
}
pub fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, i32) {
let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " ");
let seasons = SEASON_EXTRACT_SEASON_ALL_RE
.find(&name_and_season)
.into_iter()
.map(|s| s.as_str())
.collect_vec();
if seasons.is_empty() {
return (title_body.to_string(), None, 1);
}
let mut season = 1;
let mut season_raw = None;
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
for s in seasons {
season_raw = Some(s);
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s)
&& let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
.replace_all(m.as_str(), "")
.parse::<i32>()
{
season = s;
break;
}
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s)
&& let Some(s) = DIGIT_1PLUS_REG
.find(m.as_str())
.and_then(|s| s.as_str().parse::<i32>().ok())
{
season = s;
break;
}
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
.replace(m.as_str(), "")
.parse::<i32>()
{
season = s;
break;
}
if let Some(m) = ZH_NUM_RE.find(m.as_str()) {
season = ZH_NUM_MAP[m.as_str()];
break;
}
}
}
(name.to_string(), season_raw.map(|s| s.to_string()), season)
}
fn extract_name_from_title_body_name_section(
title_body_name_section: &str,
) -> (Option<String>, Option<String>, Option<String>) {
let mut name_en = None;
let mut name_zh = None;
let mut name_jp = None;
let replaced1 = NAME_EXTRACT_REMOVE_RE.replace_all(title_body_name_section, "");
let replaced2 = NAME_EXTRACT_REPLACE_ADHOC1_RE
.replace_all(&replaced1, NAME_EXTRACT_REPLACE_ADHOC1_REPLACED);
let trimmed = replaced2.trim();
let mut split = NAME_EXTRACT_SPLIT_RE
.split(trimmed)
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.map(|s| s.to_string())
.collect_vec();
if split.len() == 1 {
let mut split_space = split[0].split(' ').collect_vec();
let mut search_indices = vec![0];
if split_space.len() > 1 {
search_indices.push(split_space.len() - 1);
}
for i in search_indices {
if NAME_ZH_TEST.is_match(split_space[i]) {
let chs = split_space[i];
split_space.remove(i);
split = vec![chs.to_string(), split_space.join(" ")];
break;
}
}
}
for item in split {
if NAME_JP_TEST.is_match(&item) && name_jp.is_none() {
name_jp = Some(item);
} else if NAME_ZH_TEST.is_match(&item) && name_zh.is_none() {
name_zh = Some(item);
} else if NAME_EN_TEST.is_match(&item) && name_en.is_none() {
name_en = Some(item);
}
}
(name_en, name_zh, name_jp)
}
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32> {
DIGIT_1PLUS_REG
.find(title_episode)?
.as_str()
.parse::<i32>()
.ok()
}
fn clear_sub(sub: Option<String>) -> Option<String> {
sub.map(|s| CLEAR_SUB_RE.replace_all(&s, "").to_string())
}
fn extract_tags_from_title_extra(
title_extra: &str,
) -> (Option<String>, Option<String>, Option<String>) {
let replaced = TAGS_EXTRACT_SPLIT_RE.replace_all(title_extra, " ");
let elements = replaced
.split(' ')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect_vec();
let mut sub = None;
let mut resolution = None;
let mut source = None;
for element in elements.iter() {
if SUB_RE.is_match(element) {
sub = Some(element.to_string())
} else if RESOLUTION_RE.is_match(element) {
resolution = Some(element.to_string())
} else if SOURCE_L1_RE.is_match(element) {
source = Some(element.to_string())
}
}
if source.is_none() {
for element in elements {
if SOURCE_L2_RE.is_match(element) {
source = Some(element.to_string())
}
}
}
(clear_sub(sub), resolution, source)
}
pub fn check_is_movie(title: &str) -> bool {
MOVIE_TITLE_RE.is_match(title)
}
pub fn extract_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
let raw_title = s.trim();
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
let fansub = extract_fansub(&raw_title_without_ch_brackets);
let movie_capture = check_is_movie(&raw_title_without_ch_brackets);
if let Some(title_re_match_obj) = MOVIE_TITLE_RE
.captures(&raw_title_without_ch_brackets)
.or(TITLE_RE.captures(&raw_title_without_ch_brackets))
{
let mut title_body = title_re_match_obj
.get(1)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"))
.to_string();
let mut title_episode = title_re_match_obj
.get(2)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
let title_extra = title_re_match_obj
.get(3)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
if movie_capture {
title_body += title_episode;
title_episode = "";
} else if EP_COLLECTION_RE.is_match(title_episode) {
title_episode = "";
}
let title_body = title_body_pre_process(&title_body, fansub)?;
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
let (name_en_no_season, name_zh_no_season, name_jp_no_season) =
extract_name_from_title_body_name_section(&name_without_season);
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(1);
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
Ok(RawEpisodeMeta {
name_en,
name_en_no_season,
name_jp,
name_jp_no_season,
name_zh,
name_zh_no_season,
season,
season_raw,
episode_index,
subtitle: sub,
source,
fansub: fansub.map(|s| s.to_string()),
resolution,
})
} else {
whatever!("Can not parse episode meta from raw filename {}", raw_title)
}
}
#[cfg(test)]
mod tests {
use super::{RawEpisodeMeta, extract_episode_meta_from_raw_name};
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap_or_default();
let found = extract_episode_meta_from_raw_name(raw_name).ok();
if expected != found {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
}
assert_eq!(expected, found);
}
#[test]
fn test_parse_ep_with_all_parts_wrapped() {
test_raw_ep_parser_case(
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
r#"{
"name_zh": "我心里危险的东西",
"name_zh_no_season": "我心里危险的东西",
"season": 2,
"season_raw": "第二季",
"episode_index": 5,
"subtitle": "简日双语",
"source": null,
"fansub": "新Sub",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_title_wrapped_by_one_square_bracket_and_season_prefix() {
test_raw_ep_parser_case(
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
r#"{
"name_en": "Boku no Kokoro no Yabai Yatsu",
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu",
"name_zh": "我内心的糟糕念头",
"name_zh_no_season": "我内心的糟糕念头",
"season": 1,
"season_raw": null,
"episode_index": 18,
"subtitle": "简日双语",
"source": null,
"fansub": "喵萌奶茶屋",
"resolution": "1080p"
}"#,
);
}
#[test]
fn test_parse_ep_with_ep_and_version() {
test_raw_ep_parser_case(
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{
"name_en": "Shin no Nakama 2nd",
"name_en_no_season": "Shin no Nakama",
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
"season": 2,
"season_raw": "2nd",
"episode_index": 8,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_en_title_only() {
test_raw_ep_parser_case(
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
r#"{
"name_en": "THE MARGINAL SERVICE",
"name_en_no_season": "THE MARGINAL SERVICE",
"season": 1,
"episode_index": 8,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "动漫国字幕组&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_two_zh_title() {
test_raw_ep_parser_case(
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{
"name_en": "Nozomanu Fushi no Boukensha",
"name_en_no_season": "Nozomanu Fushi no Boukensha",
"name_zh": "事与愿违的不死冒险者",
"name_zh_no_season": "事与愿违的不死冒险者",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_en_zh_jp_titles() {
test_raw_ep_parser_case(
r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
r#"{
"name_en": "Pon no Michi",
"name_jp": "ぽんのみち",
"name_zh": "碰之道",
"name_en_no_season": "Pon no Michi",
"name_jp_no_season": "ぽんのみち",
"name_zh_no_season": "碰之道",
"season": 1,
"season_raw": null,
"episode_index": 7,
"subtitle": "简繁日内封字幕",
"source": "WebRip",
"fansub": "喵萌奶茶屋&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_nth_season() {
test_raw_ep_parser_case(
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_en": "Yowai Character Tomozakikun",
"name_en_no_season": "Yowai Character Tomozakikun",
"name_zh": "弱角友崎同学 2nd STAGE",
"name_zh_no_season": "弱角友崎同学",
"season": 2,
"season_raw": "2nd",
"episode_index": 9,
"subtitle": "CHT",
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_season_en_and_season_zh() {
test_raw_ep_parser_case(
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
r#"{
"name_en": "Kingdom S5",
"name_en_no_season": "Kingdom",
"name_zh": "王者天下 第五季",
"name_zh_no_season": "王者天下",
"season": 5,
"season_raw": "第五季",
"episode_index": 7,
"subtitle": "简繁外挂字幕",
"source": "WebRip",
"fansub": "豌豆字幕组&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_airota_fansub_style_case1() {
test_raw_ep_parser_case(
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
r#"{
"name_en": "Alice to Therese no Maboroshi Koujou",
"name_en_no_season": "Alice to Therese no Maboroshi Koujou",
"name_zh": "爱丽丝与特蕾丝的虚幻工厂",
"name_zh_no_season": "爱丽丝与特蕾丝的虚幻工厂",
"season": 1,
"episode_index": 1,
"subtitle": "简繁内封",
"source": "WebRip",
"fansub": "千夏字幕组",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_airota_fansub_style_case2() {
test_raw_ep_parser_case(
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
r#"{
"name_en": "Yuru Camp Movie",
"name_en_no_season": "Yuru Camp Movie",
"name_zh": "电影 轻旅轻营 (摇曳露营)",
"name_zh_no_season": "电影 轻旅轻营 (摇曳露营)",
"season": 1,
"episode_index": 1,
"subtitle": "繁体",
"source": "UHDRip",
"fansub": "千夏字幕组&喵萌奶茶屋",
"resolution": "2160p"
}"#,
)
}
#[test]
fn test_parse_ep_with_large_episode_style() {
test_raw_ep_parser_case(
r#"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"#,
r#"{
"name_en": "New Doraemon",
"name_en_no_season": "New Doraemon",
"name_zh": "哆啦A梦新番",
"name_zh_no_season": "哆啦A梦新番",
"season": 1,
"episode_index": 747,
"subtitle": "GB",
"fansub": "梦蓝字幕组",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_many_square_brackets_split_title() {
test_raw_ep_parser_case(
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
r#"{
"name_en": "Yuru Camp",
"name_en_no_season": "Yuru Camp",
"name_zh": "剧场版-摇曳露营",
"name_zh_no_season": "剧场版-摇曳露营",
"season": 1,
"episode_index": 1,
"subtitle": "简日双语",
"fansub": "MCE汉化组",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_implicit_lang_title_sep() {
test_raw_ep_parser_case(
r#"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"#,
r#"{
"name_en": "NieR Automata Ver1.1a",
"name_en_no_season": "NieR Automata Ver1.1a",
"name_zh": "尼尔:机械纪元",
"name_zh_no_season": "尼尔:机械纪元",
"season": 1,
"episode_index": 2,
"subtitle": "简日双语",
"fansub": "织梦字幕组",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_square_brackets_wrapped_and_space_split() {
test_raw_ep_parser_case(
r#"[天月搬运组][迷宫饭 Delicious in Dungeon][03][日语中字][MKV][1080P][NETFLIX][高画质版]"#,
r#"
{
"name_en": "Delicious in Dungeon",
"name_en_no_season": "Delicious in Dungeon",
"name_zh": "迷宫饭",
"name_zh_no_season": "迷宫饭",
"season": 1,
"episode_index": 3,
"subtitle": "日语中字",
"source": "NETFLIX",
"fansub": "天月搬运组",
"resolution": "1080P"
}
"#,
)
}
#[test]
fn test_parse_ep_with_start_with_brackets_wrapped_season_info_prefix() {
test_raw_ep_parser_case(
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
r#"{
"name_en": "Dungeon Meshi",
"name_en_no_season": "Dungeon Meshi",
"name_zh": "迷宫饭",
"name_zh_no_season": "迷宫饭",
"season": 1,
"episode_index": 1,
"subtitle": "简日双语",
"fansub": "爱恋字幕社",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_small_no_title_extra_brackets_case() {
test_raw_ep_parser_case(
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_en": "Mahou Shoujo ni Akogarete",
"name_en_no_season": "Mahou Shoujo ni Akogarete",
"name_zh": "梦想成为魔法少女 [年龄限制版]",
"name_zh_no_season": "梦想成为魔法少女 [年龄限制版]",
"season": 1,
"episode_index": 9,
"subtitle": "CHT",
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_title_leading_space_style() {
test_raw_ep_parser_case(
r#"[ANi] 16bit 的感动 ANOTHER LAYER - 01 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_zh": "16bit 的感动 ANOTHER LAYER",
"name_zh_no_season": "16bit 的感动 ANOTHER LAYER",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "CHT",
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_title_leading_month_and_wrapped_brackets_style() {
test_raw_ep_parser_case(
r#"【喵萌奶茶屋】★07月新番★[银砂糖师与黑妖精 ~ Sugar Apple Fairy Tale ~][13][1080p][简日双语][招募翻译]"#,
r#"{
"name_en": "~ Sugar Apple Fairy Tale ~",
"name_en_no_season": "~ Sugar Apple Fairy Tale ~",
"name_zh": "银砂糖师与黑妖精",
"name_zh_no_season": "银砂糖师与黑妖精",
"season": 1,
"episode_index": 13,
"subtitle": "简日双语",
"fansub": "喵萌奶茶屋",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_title_leading_month_style() {
test_raw_ep_parser_case(
r#"【极影字幕社】★4月新番 天国大魔境 Tengoku Daimakyou 第05话 GB 720P MP4字幕社招人内详"#,
r#"{
"name_en": "Tengoku Daimakyou",
"name_en_no_season": "Tengoku Daimakyou",
"name_zh": "天国大魔境",
"name_zh_no_season": "天国大魔境",
"season": 1,
"episode_index": 5,
"subtitle": "字幕社招人内详",
"source": null,
"fansub": "极影字幕社",
"resolution": "720P"
}"#,
)
}
#[test]
fn test_parse_ep_tokusatsu_style() {
test_raw_ep_parser_case(
r#"[MagicStar] 假面骑士Geats / 仮面ライダーギーツ EP33 [WEBDL] [1080p] [TTFC]【生】"#,
r#"{
"name_jp": "仮面ライダーギーツ",
"name_jp_no_season": "仮面ライダーギーツ",
"name_zh": "假面骑士Geats",
"name_zh_no_season": "假面骑士Geats",
"season": 1,
"episode_index": 33,
"source": "WEBDL",
"fansub": "MagicStar",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_multi_lang_zh_title() {
test_raw_ep_parser_case(
r#"[百冬练习组&LoliHouse] BanG Dream! 少女乐团派对☆PICO FEVER / Garupa Pico: Fever! - 26 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕][END] [101.69 MB]"#,
r#"{
"name_en": "Garupa Pico: Fever!",
"name_en_no_season": "Garupa Pico: Fever!",
"name_zh": "BanG Dream! 少女乐团派对☆PICO FEVER",
"name_zh_no_season": "BanG Dream! 少女乐团派对☆PICO FEVER",
"season": 1,
"episode_index": 26,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "百冬练习组&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_ep_collections() {
test_raw_ep_parser_case(
r#"[奶²&LoliHouse] 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简日内封字幕]"#,
r#"{
"name_en": "Kinokoinu: Mushroom Pup",
"name_en_no_season": "Kinokoinu: Mushroom Pup",
"name_zh": "蘑菇狗",
"name_zh_no_season": "蘑菇狗",
"season": 1,
"episode_index": 1,
"subtitle": "简日内封字幕",
"source": "WebRip",
"fansub": "奶²&LoliHouse",
"resolution": "1080p",
"name": " 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集]"
}"#,
);
test_raw_ep_parser_case(
r#"[LoliHouse] 叹气的亡灵想隐退 / Nageki no Bourei wa Intai shitai [01-13 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
r#"{
"name_en": "Nageki no Bourei wa Intai shitai",
"name_en_no_season": "Nageki no Bourei wa Intai shitai",
"name_jp": null,
"name_jp_no_season": null,
"name_zh": "叹气的亡灵想隐退",
"name_zh_no_season": "叹气的亡灵想隐退",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
);
test_raw_ep_parser_case(
r#"[LoliHouse] 精灵幻想记 第二季 / Seirei Gensouki S2 [01-12 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
r#"{
"name_en": "Seirei Gensouki S2",
"name_en_no_season": "Seirei Gensouki",
"name_zh": "精灵幻想记 第二季",
"name_zh_no_season": "精灵幻想记",
"season": 2,
"season_raw": "第二季",
"episode_index": 1,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
);
test_raw_ep_parser_case(
r#"[喵萌奶茶屋&LoliHouse] 超自然武装当哒当 / 胆大党 / Dandadan [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简繁日内封字幕][Fin]"#,
r#" {
"name_en": "Dandadan",
"name_en_no_season": "Dandadan",
"name_zh": "超自然武装当哒当",
"name_zh_no_season": "超自然武装当哒当",
"season": 1,
"episode_index": 1,
"subtitle": "简繁日内封字幕",
"source": "WebRip",
"fansub": "喵萌奶茶屋&LoliHouse",
"resolution": "1080p"
}"#,
);
}
// TODO: FIXME
#[test]
fn test_bad_cases() {
test_raw_ep_parser_case(
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
r#"{
"name_zh": "摇曳露营△剧场版",
"name_zh_no_season": "摇曳露营△剧场版",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "简繁字幕",
"source": "BDrip",
"fansub": "7³ACG x 桜都字幕组",
"resolution": "1080p"
}"#,
);
test_raw_ep_parser_case(
r#"【幻樱字幕组】【4月新番】【古见同学有交流障碍症 第二季 Komi-san wa, Komyushou Desu. S02】【22】【GB_MP4】【1920X1080】"#,
r#"{
"name_en": "第二季 Komi-san wa, Komyushou Desu. S02",
"name_en_no_season": "Komi-san wa, Komyushou Desu.",
"name_zh": "古见同学有交流障碍症",
"name_zh_no_season": "古见同学有交流障碍症",
"season": 2,
"season_raw": "第二季",
"episode_index": 22,
"subtitle": "GB",
"fansub": "幻樱字幕组",
"resolution": "1920X1080"
}"#,
);
}
}

View File

@@ -0,0 +1,19 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::bangumi,
};
pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId);
}
pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<bangumi::BangumiType>();
register_entity_default_writable!(builder, bangumi, false)
}

View File

@@ -1,40 +1,28 @@
use std::sync::Arc; use std::sync::Arc;
use async_graphql::dynamic::{ use async_graphql::dynamic::{Field, FieldFuture, FieldValue, Object, TypeRef};
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef, use sea_orm::{EntityTrait, QueryFilter};
}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use seaography::Builder as SeaographyBuilder;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use util_derive::DynamicGraphql; use util_derive::DynamicGraphql;
use crate::{ use crate::{
app::AppContextTrait, auth::AuthUserInfo, errors::RecorderError, models::credential_3rd, app::AppContextTrait,
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::{
crypto::{
register_crypto_column_input_conversion_to_schema_context,
register_crypto_column_output_conversion_to_schema_context,
},
custom::{generate_entity_filtered_mutation_field, register_entity_default_writable},
name::get_entity_custom_mutation_field_name,
},
},
models::credential_3rd,
}; };
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
struct Credential3rdCheckAvailableInput {
pub id: i32,
}
impl Credential3rdCheckAvailableInput {
fn input_type_name() -> &'static str {
"Credential3rdCheckAvailableInput"
}
fn arg_name() -> &'static str {
"filter"
}
fn generate_input_object() -> InputObject {
InputObject::new(Self::input_type_name())
.description("The input of the credential3rdCheckAvailable query")
.field(InputValue::new(
Credential3rdCheckAvailableInputFieldEnum::Id.as_str(),
TypeRef::named_nn(TypeRef::INT),
))
}
}
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)] #[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
pub struct Credential3rdCheckAvailableInfo { pub struct Credential3rdCheckAvailableInfo {
pub available: bool, pub available: bool,
@@ -63,53 +51,87 @@ impl Credential3rdCheckAvailableInfo {
} }
} }
pub fn register_credential3rd_to_schema_context(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
) {
restrict_subscriber_for_entity::<credential_3rd::Entity>(
context,
&credential_3rd::Column::SubscriberId,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Password,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx,
&credential_3rd::Column::Password,
);
}
pub fn register_credential3rd_to_schema_builder( pub fn register_credential3rd_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
builder.schema = builder builder.register_enumeration::<credential_3rd::Credential3rdType>();
.schema builder = register_entity_default_writable!(builder, credential_3rd, false);
.register(Credential3rdCheckAvailableInput::generate_input_object());
builder.schema = builder builder.schema = builder
.schema .schema
.register(Credential3rdCheckAvailableInfo::generate_output_object()); .register(Credential3rdCheckAvailableInfo::generate_output_object());
builder.queries.push( let builder_context = &builder.context;
Field::new( {
"credential3rdCheckAvailable", let check_available_mutation_name = get_entity_custom_mutation_field_name::<
TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()), credential_3rd::Entity,
move |ctx| { >(builder_context, "CheckAvailable");
FieldFuture::new(async move { let check_available_mutation =
let auth_user_info = ctx.data::<AuthUserInfo>()?; generate_entity_filtered_mutation_field::<credential_3rd::Entity, _, _>(
let input: Credential3rdCheckAvailableInput = ctx builder_context,
.args check_available_mutation_name,
.get(Credential3rdCheckAvailableInput::arg_name()) TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()),
.unwrap() Arc::new(|_resolver_ctx, app_ctx, filters| {
.deserialize()?; Box::pin(async move {
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?; let db = app_ctx.db();
let credential_model = credential_3rd::Model::find_by_id_and_subscriber_id( let credential_model = credential_3rd::Entity::find()
app_ctx.as_ref(), .filter(filters)
input.id, .one(db)
auth_user_info.subscriber_auth.subscriber_id, .await?
) .ok_or_else(|| {
.await? RecorderError::from_entity_not_found::<credential_3rd::Entity>()
.ok_or_else(|| RecorderError::Credential3rdError { })?;
message: format!("credential = {} not found", input.id),
source: None.into(),
})?;
let available = credential_model.check_available(app_ctx.as_ref()).await?; let available = credential_model.check_available(app_ctx.as_ref()).await?;
Ok(Some(FieldValue::owned_any( Ok(Some(FieldValue::owned_any(
Credential3rdCheckAvailableInfo { available }, Credential3rdCheckAvailableInfo { available },
))) )))
}) })
}, }),
) );
.argument(InputValue::new( builder.mutations.push(check_available_mutation);
Credential3rdCheckAvailableInput::arg_name(), }
TypeRef::named_nn(Credential3rdCheckAvailableInput::input_type_name()),
)),
);
builder builder
} }

View File

@@ -0,0 +1,64 @@
use sea_orm::Iterable;
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::{
domains::{
subscriber_tasks::restrict_subscriber_tasks_for_entity,
subscribers::restrict_subscriber_for_entity,
system_tasks::restrict_system_tasks_for_entity,
},
infra::{custom::register_entity_default_writable, name::get_entity_and_column_name},
},
models::cron,
};
fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in cron::Column::iter() {
if matches!(
column,
cron::Column::SubscriberTaskCron
| cron::Column::SystemTaskCron
| cron::Column::CronExpr
| cron::Column::CronTimezone
| cron::Column::Enabled
| cron::Column::TimeoutMs
| cron::Column::MaxAttempts
) {
continue;
}
let entity_column_key = get_entity_and_column_name::<cron::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
for column in cron::Column::iter() {
if matches!(column, |cron::Column::CronExpr| cron::Column::CronTimezone
| cron::Column::Enabled
| cron::Column::TimeoutMs
| cron::Column::Priority
| cron::Column::MaxAttempts)
{
continue;
}
let entity_column_key = get_entity_and_column_name::<cron::Entity>(context, &column);
context.entity_input.update_skips.push(entity_column_key);
}
}
pub fn register_cron_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<cron::Entity>(context, &cron::Column::SubscriberId);
restrict_subscriber_tasks_for_entity::<cron::Entity>(
context,
&cron::Column::SubscriberTaskCron,
);
restrict_system_tasks_for_entity::<cron::Entity>(context, &cron::Column::SystemTaskCron);
skip_columns_for_entity_input(context);
}
pub fn register_cron_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<cron::CronStatus>();
builder = register_entity_default_writable!(builder, cron, true);
builder
}

View File

@@ -1,106 +0,0 @@
use std::sync::Arc;
use async_graphql::dynamic::{ResolverContext, ValueAccessor};
use sea_orm::{EntityTrait, Value as SeaValue};
use seaography::{BuilderContext, SeaResult};
use crate::{
app::AppContextTrait,
graphql::infra::util::{get_column_key, get_entity_key},
models::credential_3rd,
};
fn register_crypto_column_input_conversion_to_schema_context<T>(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
column: &T::Column,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.input_conversions.insert(
format!("{entity_name}.{column_name}"),
Box::new(
move |_resolve_context: &ResolverContext<'_>,
value: &ValueAccessor|
-> SeaResult<sea_orm::Value> {
let source = value.string()?;
let encrypted = ctx.crypto().encrypt_string(source.into())?;
Ok(encrypted.into())
},
),
);
}
fn register_crypto_column_output_conversion_to_schema_context<T>(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
column: &T::Column,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.output_conversions.insert(
format!("{entity_name}.{column_name}"),
Box::new(
move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> {
if let SeaValue::String(s) = value {
if let Some(s) = s {
let decrypted = ctx.crypto().decrypt_string(s)?;
Ok(async_graphql::Value::String(decrypted))
} else {
Ok(async_graphql::Value::Null)
}
} else {
Err(async_graphql::Error::new("crypto column must be string column").into())
}
},
),
);
}
pub fn register_crypto_to_schema_context(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
) {
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Password,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx,
&credential_3rd::Column::Password,
);
}

View File

@@ -0,0 +1,23 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::downloaders,
};
pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloaders::Entity>(
context,
&downloaders::Column::SubscriberId,
);
}
pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloaders::DownloaderCategory>();
builder = register_entity_default_writable!(builder, downloaders, false);
builder
}

View File

@@ -0,0 +1,21 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::downloads,
};
pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId);
}
pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<downloads::DownloadMime>();
builder = register_entity_default_writable!(builder, downloads, false);
builder
}

View File

@@ -0,0 +1,20 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::episodes,
};
pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId);
}
pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<episodes::EpisodeType>();
builder = register_entity_default_writable!(builder, episodes, false);
builder
}

View File

@@ -0,0 +1,58 @@
use std::sync::Arc;
use async_graphql::dynamic::ResolverContext;
use sea_orm::Value as SeaValue;
use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult};
use uuid::Uuid;
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::{
custom::register_entity_default_writable,
name::{
get_entity_and_column_name, get_entity_create_batch_mutation_field_name,
get_entity_create_one_mutation_field_name,
},
},
},
models::feeds,
};
pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId);
{
let entity_create_one_mutation_field_name = Arc::new(
get_entity_create_one_mutation_field_name::<feeds::Entity>(context),
);
let entity_create_batch_mutation_field_name =
Arc::new(get_entity_create_batch_mutation_field_name::<feeds::Entity>(context));
context.types.input_none_conversions.insert(
get_entity_and_column_name::<feeds::Entity>(context, &feeds::Column::Token),
Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name();
if field_name == entity_create_one_mutation_field_name.as_str()
|| field_name == entity_create_batch_mutation_field_name.as_str()
{
Ok(Some(SeaValue::String(Some(Box::new(
Uuid::now_v7().to_string(),
)))))
} else {
Ok(None)
}
},
),
);
}
}
pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<feeds::FeedType>();
builder.register_enumeration::<feeds::FeedSource>();
builder = register_entity_default_writable!(builder, feeds, false);
builder
}

View File

@@ -1,5 +1,14 @@
pub mod credential_3rd; pub mod credential_3rd;
pub mod crypto;
pub mod bangumi;
pub mod cron;
pub mod downloaders;
pub mod downloads;
pub mod episodes;
pub mod feeds;
pub mod subscriber_tasks; pub mod subscriber_tasks;
pub mod subscribers; pub mod subscribers;
pub mod subscription_bangumi;
pub mod subscription_episode;
pub mod subscriptions; pub mod subscriptions;
pub mod system_tasks;

View File

@@ -1,19 +1,253 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use std::{ops::Deref, sync::Arc};
use async_graphql::dynamic::{FieldValue, Scalar, TypeRef};
use convert_case::Case;
use sea_orm::{
ActiveModelBehavior, ColumnTrait, ConnectionTrait, EntityTrait, Iterable, QueryFilter,
QuerySelect, QueryTrait, prelude::Expr, sea_query::Query,
};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, SeaographyError, prepare_active_model,
};
use ts_rs::TS;
use crate::{ use crate::{
graphql::infra::json::restrict_jsonb_filter_input_for_entity, models::subscriber_tasks, auth::AuthUserInfo,
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::{
custom::{
generate_entity_create_one_mutation_field,
generate_entity_default_basic_entity_object,
generate_entity_default_insert_input_object, generate_entity_delete_mutation_field,
generate_entity_filtered_mutation_field, register_entity_default_readonly,
},
json::{convert_jsonb_output_for_entity, restrict_jsonb_filter_input_for_entity},
name::{
get_entity_and_column_name, get_entity_basic_type_name,
get_entity_custom_mutation_field_name,
},
},
},
migrations::defs::{ApalisJobs, ApalisSchema},
models::subscriber_tasks,
task::SubscriberTaskTrait,
}; };
fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in subscriber_tasks::Column::iter() {
if matches!(
column,
subscriber_tasks::Column::Job | subscriber_tasks::Column::SubscriberId
) {
continue;
}
let entity_column_key =
get_entity_and_column_name::<subscriber_tasks::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
}
pub fn restrict_subscriber_tasks_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_and_column = get_entity_and_column_name::<T>(context, column);
restrict_jsonb_filter_input_for_entity::<T>(context, column);
convert_jsonb_output_for_entity::<T>(context, column, Some(Case::Camel));
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.input_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(subscriber_tasks::SubscriberTask::ident().into()),
);
context.types.output_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(subscriber_tasks::SubscriberTask::ident().into()),
);
context.types.input_conversions.insert(
entity_column_name.clone(),
Box::new(move |resolve_context, value_accessor| {
let task: subscriber_tasks::SubscriberTaskInput = value_accessor.deserialize()?;
let subscriber_id = resolve_context
.data::<AuthUserInfo>()?
.subscriber_auth
.subscriber_id;
let task = subscriber_tasks::SubscriberTask::from_input(task, subscriber_id);
let json_value = serde_json::to_value(task).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_name}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
context.entity_input.update_skips.push(entity_and_column);
}
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) { pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>( restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::SubscriberId,
);
restrict_subscriber_tasks_for_entity::<subscriber_tasks::Entity>(
context, context,
&subscriber_tasks::Column::Job, &subscriber_tasks::Column::Job,
); );
skip_columns_for_entity_input(context);
} }
pub fn register_subscriber_tasks_to_schema_builder( pub fn register_subscriber_tasks_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
builder.schema = builder.schema.register(
Scalar::new(subscriber_tasks::SubscriberTask::ident())
.description(subscriber_tasks::SubscriberTask::decl()),
);
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>(); builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>();
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
builder = register_entity_default_readonly!(builder, subscriber_tasks);
let builder_context = builder.context;
{
builder
.outputs
.push(generate_entity_default_basic_entity_object::<
subscriber_tasks::Entity,
>(builder_context));
}
{
let delete_mutation = generate_entity_delete_mutation_field::<subscriber_tasks::Entity>(
builder_context,
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let select_subquery = subscriber_tasks::Entity::find()
.select_only()
.column(subscriber_tasks::Column::Id)
.filter(filters);
let delete_query = Query::delete()
.from_table((ApalisSchema::Schema, ApalisJobs::Table))
.and_where(
Expr::col(ApalisJobs::Id).in_subquery(select_subquery.into_query()),
)
.to_owned();
let db_backend = db.deref().get_database_backend();
let delete_statement = db_backend.build(&delete_query);
let result = db.execute(delete_statement).await?;
Ok::<_, RecorderError>(result.rows_affected())
})
}),
);
builder.mutations.push(delete_mutation);
}
{
let entity_retry_one_mutation_name = get_entity_custom_mutation_field_name::<
subscriber_tasks::Entity,
>(builder_context, "RetryOne");
let retry_one_mutation =
generate_entity_filtered_mutation_field::<subscriber_tasks::Entity, _, _>(
builder_context,
entity_retry_one_mutation_name,
TypeRef::named_nn(get_entity_basic_type_name::<subscriber_tasks::Entity>(
builder_context,
)),
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let job_id = subscriber_tasks::Entity::find()
.filter(filters)
.select_only()
.column(subscriber_tasks::Column::Id)
.into_tuple::<String>()
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?;
let task = app_ctx.task();
task.retry_subscriber_task(job_id.clone()).await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(&job_id))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?;
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(retry_one_mutation);
}
{
builder
.inputs
.push(generate_entity_default_insert_input_object::<
subscriber_tasks::Entity,
>(builder_context));
let create_one_mutation =
generate_entity_create_one_mutation_field::<subscriber_tasks::Entity>(
builder_context,
Arc::new(move |resolver_ctx, app_ctx, input_object| {
Box::pin(async move {
let active_model: Result<subscriber_tasks::ActiveModel, _> =
prepare_active_model(builder_context, &input_object, resolver_ctx);
let task_service = app_ctx.task();
let active_model = active_model?;
let db = app_ctx.db();
let active_model = active_model.before_save(db, true).await?;
let task = active_model.job.unwrap();
let subscriber_id = active_model.subscriber_id.unwrap();
if task.get_subscriber_id() != subscriber_id {
Err(async_graphql::Error::new(
"subscriber_id does not match with job.subscriber_id",
))?;
}
let task_id = task_service.add_subscriber_task(task).await?.to_string();
let db = app_ctx.db();
let task = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(&task_id))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?;
Ok::<_, RecorderError>(task)
})
}),
);
builder.mutations.push(create_one_mutation);
}
builder builder
} }

View File

@@ -7,12 +7,22 @@ use sea_orm::{ColumnTrait, Condition, EntityTrait, Iterable, Value as SeaValue};
use seaography::{ use seaography::{
Builder as SeaographyBuilder, BuilderContext, FilterInfo, Builder as SeaographyBuilder, BuilderContext, FilterInfo,
FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper, FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper,
FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult, SeaographyError, FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult,
}; };
use crate::{ use crate::{
auth::{AuthError, AuthUserInfo}, auth::{AuthError, AuthUserInfo},
graphql::infra::util::{get_column_key, get_entity_column_key, get_entity_key}, graphql::infra::{
custom::register_entity_default_readonly,
name::{
get_column_name, get_entity_and_column_name,
get_entity_create_batch_mutation_data_field_name,
get_entity_create_batch_mutation_field_name,
get_entity_create_one_mutation_data_field_name,
get_entity_create_one_mutation_field_name, get_entity_name,
get_entity_update_mutation_data_field_name, get_entity_update_mutation_field_name,
},
},
models::subscribers, models::subscribers,
}; };
@@ -82,32 +92,19 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context); let column_name = Arc::new(get_column_name::<T>(context, column));
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key); let entity_create_one_mutation_field_name =
let column_key = get_column_key::<T>(context, column); Arc::new(get_entity_create_one_mutation_field_name::<T>(context));
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
&entity_key,
&column_key,
));
let entity_create_one_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_one_mutation_data_field_name = let entity_create_one_mutation_data_field_name =
Arc::new(context.entity_create_one_mutation.data_field.clone()); Arc::new(get_entity_create_one_mutation_data_field_name(context).to_string());
let entity_create_batch_mutation_field_name = Arc::new(format!( let entity_create_batch_mutation_field_name =
"{}{}", Arc::new(get_entity_create_batch_mutation_field_name::<T>(context));
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
let entity_create_batch_mutation_data_field_name = let entity_create_batch_mutation_data_field_name =
Arc::new(context.entity_create_batch_mutation.data_field.clone()); Arc::new(get_entity_create_batch_mutation_data_field_name(context).to_string());
let entity_update_mutation_field_name = Arc::new(format!( let entity_update_mutation_field_name =
"{}{}", Arc::new(get_entity_update_mutation_field_name::<T>(context));
entity_name, context.entity_update_mutation.mutation_suffix
));
let entity_update_mutation_data_field_name = let entity_update_mutation_data_field_name =
Arc::new(context.entity_update_mutation.data_field.clone()); Arc::new(get_entity_update_mutation_data_field_name(context).to_string());
Box::new(move |context: &ResolverContext| -> GuardAction { Box::new(move |context: &ResolverContext| -> GuardAction {
match context.ctx.data::<AuthUserInfo>() { match context.ctx.data::<AuthUserInfo>() {
@@ -222,11 +219,10 @@ where
if let Some(value) = filter.get("eq") { if let Some(value) = filter.get("eq") {
let value: i32 = value.i64()?.try_into()?; let value: i32 = value.i64()?.try_into()?;
if value != subscriber_id { if value != subscriber_id {
return Err(SeaographyError::AsyncGraphQLError( return Err(async_graphql::Error::new(
async_graphql::Error::new( "subscriber_id and auth_info does not match",
"subscriber_id and auth_info does not match", )
), .into());
));
} }
} }
} }
@@ -253,17 +249,10 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context); let entity_create_one_mutation_field_name =
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key); Arc::new(get_entity_create_one_mutation_field_name::<T>(context));
let entity_create_one_mutation_field_name = Arc::new(format!( let entity_create_batch_mutation_field_name =
"{}{}", Arc::new(get_entity_create_batch_mutation_field_name::<T>(context));
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
Box::new( Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> { move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name(); let field_name = context.field().name();
@@ -289,43 +278,39 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context); let entity_and_column = get_entity_and_column_name::<T>(context, column);
let entity_column_key = get_entity_column_key::<T>(context, column);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &entity_column_key);
context.guards.entity_guards.insert( context.guards.entity_guards.insert(
entity_key.clone(), get_entity_name::<T>(context),
guard_entity_with_subscriber_id::<T>(context, column), guard_entity_with_subscriber_id::<T>(context, column),
); );
context.guards.field_guards.insert( context.guards.field_guards.insert(
entity_column_key.clone(), get_entity_and_column_name::<T>(context, column),
guard_field_with_subscriber_id::<T>(context, column), guard_field_with_subscriber_id::<T>(context, column),
); );
context.filter_types.overwrites.insert( context.filter_types.overwrites.insert(
entity_column_key.clone(), get_entity_and_column_name::<T>(context, column),
Some(FilterType::Custom( Some(FilterType::Custom(
SUBSCRIBER_ID_FILTER_INFO.type_name.clone(), SUBSCRIBER_ID_FILTER_INFO.type_name.clone(),
)), )),
); );
context.filter_types.condition_functions.insert( context.filter_types.condition_functions.insert(
entity_column_key.clone(), entity_and_column.clone(),
generate_subscriber_id_filter_condition::<T>(context, column), generate_subscriber_id_filter_condition::<T>(context, column),
); );
context.types.input_none_conversions.insert( context.types.input_none_conversions.insert(
column_name.clone(), entity_and_column.clone(),
generate_default_subscriber_id_input_conversion::<T>(context, column), generate_default_subscriber_id_input_conversion::<T>(context, column),
); );
context
.entity_input
.insert_skips
.push(entity_column_key.clone());
context.entity_input.update_skips.push(entity_column_key); context.entity_input.update_skips.push(entity_and_column);
} }
pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) { pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscribers::Entity>(context, &subscribers::Column::Id);
for column in subscribers::Column::iter() { for column in subscribers::Column::iter() {
if !matches!(column, subscribers::Column::Id) { if !matches!(column, subscribers::Column::Id) {
let key = get_entity_column_key::<subscribers::Entity>(context, &column); let key = get_entity_and_column_name::<subscribers::Entity>(context, &column);
context.filter_types.overwrites.insert(key, None); context.filter_types.overwrites.insert(key, None);
} }
} }
@@ -333,24 +318,14 @@ pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
{ {
let filter_types_map_helper = FilterTypesMapHelper {
context: builder.context,
};
builder.schema = builder builder.schema = builder
.schema .schema
.register(filter_types_map_helper.generate_filter_input(&SUBSCRIBER_ID_FILTER_INFO)); .register(FilterTypesMapHelper::generate_filter_input(
&SUBSCRIBER_ID_FILTER_INFO,
));
} }
{ builder = register_entity_default_readonly!(builder, subscribers);
builder.register_entity::<subscribers::Entity>(
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
}
builder builder
} }

View File

@@ -0,0 +1,24 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::subscription_bangumi,
};
pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
context,
&subscription_bangumi::Column::SubscriberId,
);
}
pub fn register_subscription_bangumi_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder = register_entity_default_writable!(builder, subscription_bangumi, false);
builder
}

View File

@@ -0,0 +1,24 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::subscription_episode,
};
pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscription_episode::Entity>(
context,
&subscription_episode::Column::SubscriberId,
);
}
pub fn register_subscription_episode_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder = register_entity_default_writable!(builder, subscription_episode, false);
builder
}

View File

@@ -1,226 +1,24 @@
use std::sync::Arc; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef,
};
use seaography::Builder as SeaographyBuilder;
use serde::{Deserialize, Serialize};
use util_derive::DynamicGraphql;
use crate::{ use crate::{
app::AppContextTrait, graphql::{
auth::AuthUserInfo, domains::subscribers::restrict_subscriber_for_entity,
models::subscriptions::{self, SubscriptionTrait}, infra::custom::register_entity_default_writable,
task::SubscriberTask, },
models::subscriptions,
}; };
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)] pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) {
struct SyncOneSubscriptionFilterInput { restrict_subscriber_for_entity::<subscriptions::Entity>(
pub id: i32, context,
} &subscriptions::Column::SubscriberId,
);
impl SyncOneSubscriptionFilterInput {
fn input_type_name() -> &'static str {
"SyncOneSubscriptionFilterInput"
}
fn arg_name() -> &'static str {
"filter"
}
fn generate_input_object() -> InputObject {
InputObject::new(Self::input_type_name())
.description("The input of the subscriptionSyncOne series of mutations")
.field(InputValue::new(
SyncOneSubscriptionFilterInputFieldEnum::Id.as_str(),
TypeRef::named_nn(TypeRef::INT),
))
}
}
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
pub struct SyncOneSubscriptionInfo {
pub task_id: String,
}
impl SyncOneSubscriptionInfo {
fn object_type_name() -> &'static str {
"SyncOneSubscriptionInfo"
}
fn generate_output_object() -> Object {
Object::new(Self::object_type_name())
.description("The output of the subscriptionSyncOne series of mutations")
.field(Field::new(
SyncOneSubscriptionInfoFieldEnum::TaskId,
TypeRef::named_nn(TypeRef::STRING),
move |ctx| {
FieldFuture::new(async move {
let subscription_info = ctx.parent_value.try_downcast_ref::<Self>()?;
Ok(Some(async_graphql::Value::from(
subscription_info.task_id.as_str(),
)))
})
},
))
}
} }
pub fn register_subscriptions_to_schema_builder( pub fn register_subscriptions_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
builder.schema = builder builder.register_enumeration::<subscriptions::SubscriptionCategory>();
.schema builder = register_entity_default_writable!(builder, subscriptions, false);
.register(SyncOneSubscriptionFilterInput::generate_input_object());
builder.schema = builder
.schema
.register(SyncOneSubscriptionInfo::generate_output_object());
builder.mutations.push(
Field::new(
"subscriptionSyncOneFeedsIncremental",
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
move |ctx| {
FieldFuture::new(async move {
let auth_user_info = ctx.data::<AuthUserInfo>()?;
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
let filter_input: SyncOneSubscriptionFilterInput = ctx
.args
.get(SyncOneSubscriptionFilterInput::arg_name())
.unwrap()
.deserialize()?;
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
app_ctx.as_ref(),
filter_input.id,
subscriber_id,
)
.await?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
auth_user_info.subscriber_auth.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsIncremental(
subscription.into(),
),
)
.await?;
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
task_id: task_id.to_string(),
})))
})
},
)
.argument(InputValue::new(
SyncOneSubscriptionFilterInput::arg_name(),
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
)),
);
builder.mutations.push(
Field::new(
"subscriptionSyncOneFeedsFull",
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
move |ctx| {
FieldFuture::new(async move {
let auth_user_info = ctx.data::<AuthUserInfo>()?;
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
let filter_input: SyncOneSubscriptionFilterInput = ctx
.args
.get(SyncOneSubscriptionFilterInput::arg_name())
.unwrap()
.deserialize()?;
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
app_ctx.as_ref(),
filter_input.id,
subscriber_id,
)
.await?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
auth_user_info.subscriber_auth.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsFull(subscription.into()),
)
.await?;
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
task_id: task_id.to_string(),
})))
})
},
)
.argument(InputValue::new(
SyncOneSubscriptionFilterInput::arg_name(),
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
)),
);
builder.mutations.push(
Field::new(
"subscriptionSyncOneSources",
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
move |ctx| {
FieldFuture::new(async move {
let auth_user_info = ctx.data::<AuthUserInfo>()?;
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
let filter_input: SyncOneSubscriptionFilterInput = ctx
.args
.get(SyncOneSubscriptionFilterInput::arg_name())
.unwrap()
.deserialize()?;
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
app_ctx.as_ref(),
filter_input.id,
subscriber_id,
)
.await?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
auth_user_info.subscriber_auth.subscriber_id,
SubscriberTask::SyncOneSubscriptionSources(subscription.into()),
)
.await?;
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
task_id: task_id.to_string(),
})))
})
},
)
.argument(InputValue::new(
SyncOneSubscriptionFilterInput::arg_name(),
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
)),
);
builder builder
} }

View File

@@ -0,0 +1,258 @@
use std::{ops::Deref, sync::Arc};
use async_graphql::dynamic::{FieldValue, Scalar, TypeRef};
use convert_case::Case;
use sea_orm::{
ActiveModelBehavior, ColumnTrait, ConnectionTrait, EntityTrait, Iterable, QueryFilter,
QuerySelect, QueryTrait, prelude::Expr, sea_query::Query,
};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, GuardAction, SeaographyError,
prepare_active_model,
};
use ts_rs::TS;
use crate::{
auth::AuthUserInfo,
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::{
custom::{
generate_entity_create_one_mutation_field,
generate_entity_default_basic_entity_object,
generate_entity_default_insert_input_object, generate_entity_delete_mutation_field,
generate_entity_filtered_mutation_field, register_entity_default_readonly,
},
json::{convert_jsonb_output_for_entity, restrict_jsonb_filter_input_for_entity},
name::{
get_entity_and_column_name, get_entity_basic_type_name,
get_entity_custom_mutation_field_name,
},
},
},
migrations::defs::{ApalisJobs, ApalisSchema},
models::system_tasks,
task::SystemTaskTrait,
};
fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in system_tasks::Column::iter() {
if matches!(
column,
system_tasks::Column::Job | system_tasks::Column::SubscriberId
) {
continue;
}
let entity_column_key =
get_entity_and_column_name::<system_tasks::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
}
pub fn restrict_system_tasks_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_and_column = get_entity_and_column_name::<T>(context, column);
restrict_jsonb_filter_input_for_entity::<T>(context, column);
convert_jsonb_output_for_entity::<T>(context, column, Some(Case::Camel));
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.guards.field_guards.insert(
entity_column_name.clone(),
Box::new(|_resolver_ctx| {
GuardAction::Block(Some(
"SystemTask can not be created by subscribers now".to_string(),
))
}),
);
context.types.input_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(system_tasks::SystemTask::ident().into()),
);
context.types.output_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(system_tasks::SystemTask::ident().into()),
);
context.types.input_conversions.insert(
entity_column_name.clone(),
Box::new(move |resolve_context, value_accessor| {
let task: system_tasks::SystemTaskInput = value_accessor.deserialize()?;
let subscriber_id = resolve_context
.data::<AuthUserInfo>()?
.subscriber_auth
.subscriber_id;
let task = system_tasks::SystemTask::from_input(task, Some(subscriber_id));
let json_value = serde_json::to_value(task).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_name}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
context.entity_input.update_skips.push(entity_and_column);
}
pub fn register_system_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<system_tasks::Entity>(
context,
&system_tasks::Column::SubscriberId,
);
restrict_system_tasks_for_entity::<system_tasks::Entity>(context, &system_tasks::Column::Job);
skip_columns_for_entity_input(context);
}
pub fn register_system_tasks_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.schema = builder.schema.register(
Scalar::new(system_tasks::SystemTask::ident())
.description(system_tasks::SystemTask::decl()),
);
builder.register_enumeration::<system_tasks::SystemTaskType>();
builder.register_enumeration::<system_tasks::SystemTaskStatus>();
builder = register_entity_default_readonly!(builder, system_tasks);
let builder_context = builder.context;
{
builder
.outputs
.push(generate_entity_default_basic_entity_object::<
system_tasks::Entity,
>(builder_context));
}
{
let delete_mutation = generate_entity_delete_mutation_field::<system_tasks::Entity>(
builder_context,
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let select_subquery = system_tasks::Entity::find()
.select_only()
.column(system_tasks::Column::Id)
.filter(filters);
let delete_query = Query::delete()
.from_table((ApalisSchema::Schema, ApalisJobs::Table))
.and_where(
Expr::col(ApalisJobs::Id).in_subquery(select_subquery.into_query()),
)
.to_owned();
let db_backend = db.deref().get_database_backend();
let delete_statement = db_backend.build(&delete_query);
let result = db.execute(delete_statement).await?;
Ok::<_, RecorderError>(result.rows_affected())
})
}),
);
builder.mutations.push(delete_mutation);
}
{
let entity_retry_one_mutation_name = get_entity_custom_mutation_field_name::<
system_tasks::Entity,
>(builder_context, "RetryOne");
let retry_one_mutation =
generate_entity_filtered_mutation_field::<system_tasks::Entity, _, _>(
builder_context,
entity_retry_one_mutation_name,
TypeRef::named_nn(get_entity_basic_type_name::<system_tasks::Entity>(
builder_context,
)),
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let job_id = system_tasks::Entity::find()
.filter(filters)
.select_only()
.column(system_tasks::Column::Id)
.into_tuple::<String>()
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
let task = app_ctx.task();
task.retry_subscriber_task(job_id.clone()).await?;
let task_model = system_tasks::Entity::find()
.filter(system_tasks::Column::Id.eq(&job_id))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(retry_one_mutation);
}
{
builder
.inputs
.push(generate_entity_default_insert_input_object::<
system_tasks::Entity,
>(builder_context));
let create_one_mutation = generate_entity_create_one_mutation_field::<system_tasks::Entity>(
builder_context,
Arc::new(move |resolver_ctx, app_ctx, input_object| {
Box::pin(async move {
let active_model: Result<system_tasks::ActiveModel, _> =
prepare_active_model(builder_context, &input_object, resolver_ctx);
let task_service = app_ctx.task();
let active_model = active_model?;
let db = app_ctx.db();
let active_model = active_model.before_save(db, true).await?;
let task = active_model.job.unwrap();
let subscriber_id = active_model.subscriber_id.unwrap();
if task.get_subscriber_id() != subscriber_id {
Err(async_graphql::Error::new(
"subscriber_id does not match with job.subscriber_id",
))?;
}
let task_id = task_service.add_system_task(task).await?.to_string();
let db = app_ctx.db();
let task = system_tasks::Entity::find()
.filter(system_tasks::Column::Id.eq(&task_id))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
Ok::<_, RecorderError>(task)
})
}),
);
builder.mutations.push(create_one_mutation);
}
builder
}

View File

@@ -0,0 +1,56 @@
use std::sync::Arc;
use async_graphql::dynamic::{ResolverContext, ValueAccessor};
use sea_orm::{EntityTrait, Value as SeaValue};
use seaography::{BuilderContext, SeaResult};
use crate::{app::AppContextTrait, graphql::infra::name::get_entity_and_column_name};
pub fn register_crypto_column_input_conversion_to_schema_context<T>(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
column: &T::Column,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
context.types.input_conversions.insert(
get_entity_and_column_name::<T>(context, column),
Box::new(
move |_resolve_context: &ResolverContext<'_>,
value: &ValueAccessor|
-> SeaResult<sea_orm::Value> {
let source = value.string()?;
let encrypted = ctx.crypto().encrypt_string(source.into())?;
Ok(encrypted.into())
},
),
);
}
pub fn register_crypto_column_output_conversion_to_schema_context<T>(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
column: &T::Column,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
context.types.output_conversions.insert(
get_entity_and_column_name::<T>(context, column),
Box::new(
move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> {
if let SeaValue::String(s) = value {
if let Some(s) = s {
let decrypted = ctx.crypto().decrypt_string(s)?;
Ok(async_graphql::Value::String(decrypted))
} else {
Ok(async_graphql::Value::Null)
}
} else {
Err(async_graphql::Error::new("crypto column must be string column").into())
}
},
),
);
}

View File

@@ -0,0 +1,441 @@
use std::{iter::FusedIterator, pin::Pin, sync::Arc};
use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, ObjectAccessor,
ResolverContext, TypeRef,
};
use sea_orm::{ActiveModelTrait, Condition, EntityTrait, IntoActiveModel};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityCreateBatchMutationBuilder,
EntityCreateOneMutationBuilder, EntityDeleteMutationBuilder, EntityInputBuilder,
EntityObjectBuilder, EntityUpdateMutationBuilder, GuardAction, RelationBuilder,
get_filter_conditions,
};
use crate::{
app::AppContextTrait,
errors::RecorderResult,
graphql::infra::name::{
get_entity_filter_input_type_name, get_entity_name,
get_entity_renormalized_filter_field_name,
},
};
pub type FilterMutationFn = Arc<
dyn for<'a> Fn(
&ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Condition,
) -> Pin<
Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>,
> + Send
+ Sync,
>;
pub type CreateOneMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
ObjectAccessor<'a>,
) -> Pin<Box<dyn Future<Output = RecorderResult<M>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type CreateBatchMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Vec<ObjectAccessor<'a>>,
) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type UpdateMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Condition,
ObjectAccessor<'a>,
) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type DeleteMutationFn = Arc<
dyn for<'a> Fn(
&ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Condition,
) -> Pin<Box<dyn Future<Output = RecorderResult<u64>> + Send + 'a>>
+ Send
+ Sync,
>;
pub fn generate_entity_default_insert_input_object<T>(context: &BuilderContext) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
EntityInputBuilder::insert_input_object::<T>(context)
}
pub fn generate_entity_default_update_input_object<T>(context: &BuilderContext) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
EntityInputBuilder::update_input_object::<T>(context)
}
pub fn generate_entity_default_basic_entity_object<T>(context: &'static BuilderContext) -> Object
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_object_builder = EntityObjectBuilder { context };
entity_object_builder.basic_to_object::<T>()
}
pub fn generate_entity_input_object<T>(
context: &'static BuilderContext,
is_insert: bool,
) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
if is_insert {
EntityInputBuilder::insert_input_object::<T>(context)
} else {
EntityInputBuilder::update_input_object::<T>(context)
}
}
pub fn generate_entity_filtered_mutation_field<E, N, R>(
builder_context: &'static BuilderContext,
field_name: N,
type_ref: R,
mutation_fn: FilterMutationFn,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
N: Into<String>,
R: Into<TypeRef>,
{
let object_name: String = get_entity_name::<E>(builder_context);
let guard = builder_context.guards.entity_guards.get(&object_name);
Field::new(field_name, type_ref, move |resolve_context| {
let mutation_fn = mutation_fn.clone();
FieldFuture::new(async move {
let guard_flag = if let Some(guard) = guard {
(*guard)(&resolve_context)
} else {
GuardAction::Allow
};
if let GuardAction::Block(reason) = guard_flag {
return Err::<Option<_>, async_graphql::Error>(async_graphql::Error::new(
reason.unwrap_or("Entity guard triggered.".into()),
));
}
let filters = resolve_context
.args
.get(get_entity_renormalized_filter_field_name());
let filters = get_filter_conditions::<E>(&resolve_context, builder_context, filters);
let app_ctx = resolve_context.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(&resolve_context, app_ctx.clone(), filters).await?;
Ok(result)
})
})
.argument(InputValue::new(
get_entity_renormalized_filter_field_name(),
TypeRef::named(get_entity_filter_input_type_name::<E>(builder_context)),
))
}
pub fn generate_entity_create_one_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: CreateOneMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_create_one_mutation_builder = EntityCreateOneMutationBuilder {
context: builder_context,
};
entity_create_one_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, input_object| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(resolver_ctx, app_ctx.clone(), input_object).await?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_create_one_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_create_one_mutation_builder = EntityCreateOneMutationBuilder {
context: builder_context,
};
entity_create_one_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_create_batch_mutation_field<E, ID>(
builder_context: &'static BuilderContext,
mutation_fn: CreateBatchMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_create_batch_mutation_builder = EntityCreateBatchMutationBuilder {
context: builder_context,
};
entity_create_batch_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, input_objects| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(resolver_ctx, app_ctx.clone(), input_objects).await?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_create_batch_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_create_batch_mutation_builder = EntityCreateBatchMutationBuilder {
context: builder_context,
};
entity_create_batch_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_update_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: UpdateMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_update_mutation_builder = EntityUpdateMutationBuilder {
context: builder_context,
};
entity_update_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, filters, input_object| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(
resolver_ctx,
app_ctx.clone(),
get_filter_conditions::<E>(resolver_ctx, builder_context, filters),
input_object,
)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_update_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_update_mutation_builder = EntityUpdateMutationBuilder {
context: builder_context,
};
entity_update_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_delete_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: DeleteMutationFn,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_delete_mutation_builder = EntityDeleteMutationBuilder {
context: builder_context,
};
entity_delete_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, filters| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(
resolver_ctx,
app_ctx.clone(),
get_filter_conditions::<E>(resolver_ctx, builder_context, filters),
)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_delete_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_delete_mutation_builder = EntityDeleteMutationBuilder {
context: builder_context,
};
entity_delete_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn register_entity_default_mutations<E, A>(
mut builder: SeaographyBuilder,
active_model_hooks: bool,
) -> SeaographyBuilder
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let builder_context = builder.context;
builder
.outputs
.push(generate_entity_default_basic_entity_object::<E>(
builder_context,
));
builder.inputs.extend([
generate_entity_default_insert_input_object::<E>(builder_context),
generate_entity_default_update_input_object::<E>(builder_context),
]);
builder.mutations.extend([
generate_entity_default_create_one_mutation_field::<E, A>(
builder_context,
active_model_hooks,
),
generate_entity_default_create_batch_mutation_field::<E, A>(
builder_context,
active_model_hooks,
),
generate_entity_default_update_mutation_field::<E, A>(builder_context, active_model_hooks),
generate_entity_default_delete_mutation_field::<E, A>(builder_context, active_model_hooks),
]);
builder
}
pub(crate) fn register_entity_default_readonly_impl<T, RE, I>(
mut builder: SeaographyBuilder,
entity: T,
) -> SeaographyBuilder
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
RE: sea_orm::Iterable<Iterator = I> + RelationBuilder,
I: Iterator<Item = RE> + Clone + DoubleEndedIterator + ExactSizeIterator + FusedIterator,
{
builder.register_entity::<T>(
<RE as sea_orm::Iterable>::iter()
.map(|rel| RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(entity, tokio::spawn);
builder
}
pub(crate) fn register_entity_default_writable_impl<T, RE, A, I>(
mut builder: SeaographyBuilder,
entity: T,
active_model_hooks: bool,
) -> SeaographyBuilder
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = T> + sea_orm::ActiveModelBehavior + std::marker::Send,
RE: sea_orm::Iterable<Iterator = I> + RelationBuilder,
I: Iterator<Item = RE> + Clone + DoubleEndedIterator + ExactSizeIterator + FusedIterator,
{
builder = register_entity_default_readonly_impl::<T, RE, I>(builder, entity);
builder = register_entity_default_mutations::<T, A>(builder, active_model_hooks);
builder
}
macro_rules! register_entity_default_readonly {
($builder:expr, $module_path:ident) => {
$crate::graphql::infra::custom::register_entity_default_readonly_impl::<
$module_path::Entity,
$module_path::RelatedEntity,
_,
>($builder, $module_path::Entity)
};
}
macro_rules! register_entity_default_writable {
($builder:expr, $module_path:ident, $active_model_hooks:expr) => {
$crate::graphql::infra::custom::register_entity_default_writable_impl::<
$module_path::Entity,
$module_path::RelatedEntity,
$module_path::ActiveModel,
_,
>($builder, $module_path::Entity, $active_model_hooks)
};
}
pub(crate) use register_entity_default_readonly;
pub(crate) use register_entity_default_writable;

View File

@@ -3,6 +3,7 @@ use async_graphql::{
dynamic::{ResolverContext, Scalar, SchemaError}, dynamic::{ResolverContext, Scalar, SchemaError},
to_value, to_value,
}; };
use convert_case::Case;
use itertools::Itertools; use itertools::Itertools;
use rust_decimal::{Decimal, prelude::FromPrimitive}; use rust_decimal::{Decimal, prelude::FromPrimitive};
use sea_orm::{ use sea_orm::{
@@ -12,9 +13,13 @@ use sea_orm::{
use seaography::{ use seaography::{
Builder as SeaographyBuilder, BuilderContext, FilterType, FnFilterCondition, SeaographyError, Builder as SeaographyBuilder, BuilderContext, FilterType, FnFilterCondition, SeaographyError,
}; };
use serde::{Serialize, de::DeserializeOwned};
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use crate::{errors::RecorderResult, graphql::infra::util::get_entity_column_key}; use crate::{
errors::RecorderResult, graphql::infra::name::get_entity_and_column_name,
utils::json::convert_json_keys,
};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Copy)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Copy)]
pub enum JsonbFilterOperation { pub enum JsonbFilterOperation {
@@ -906,18 +911,15 @@ where
Box::new( Box::new(
move |_resolve_context: &ResolverContext<'_>, condition, filter| { move |_resolve_context: &ResolverContext<'_>, condition, filter| {
if let Some(filter) = filter { if let Some(filter) = filter {
let filter_value = to_value(filter.as_index_map()).map_err(|e| { let filter_value =
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e)) to_value(filter.as_index_map()).map_err(GraphqlError::new_with_source)?;
})?;
let filter_json: JsonValue = filter_value.into_json().map_err(|e| { let filter_json: JsonValue = filter_value
SeaographyError::AsyncGraphQLError(GraphqlError::new(format!("{e:?}"))) .into_json()
})?; .map_err(GraphqlError::new_with_source)?;
let cond_where = prepare_jsonb_filter_input(&Expr::col(column), filter_json) let cond_where = prepare_jsonb_filter_input(&Expr::col(column), filter_json)
.map_err(|e| { .map_err(GraphqlError::new_with_source)?;
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e))
})?;
let condition = condition.add(cond_where); let condition = condition.add(cond_where);
Ok(condition) Ok(condition)
@@ -941,11 +943,80 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_column_key = get_entity_column_key::<T>(context, column); let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.filter_types.overwrites.insert( context.filter_types.overwrites.insert(
entity_column_key.clone(), get_entity_and_column_name::<T>(context, column),
Some(FilterType::Custom(JSONB_FILTER_NAME.to_string())), Some(FilterType::Custom(JSONB_FILTER_NAME.to_string())),
); );
context.filter_types.condition_functions.insert(
entity_column_name.clone(),
generate_jsonb_filter_condition_function::<T>(context, column),
);
}
pub fn try_convert_jsonb_input_for_entity<T, S>(
context: &mut BuilderContext,
column: &T::Column,
case: Option<Case<'static>>,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
S: DeserializeOwned + Serialize,
{
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.input_conversions.insert(
entity_column_name.clone(),
Box::new(move |_resolve_context, accessor| {
let mut json_value: serde_json::Value = accessor.deserialize()?;
if let Some(case) = case {
json_value = convert_json_keys(json_value, case);
}
serde_json::from_value::<S>(json_value.clone()).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_name}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
}
pub fn convert_jsonb_output_for_entity<T>(
context: &mut BuilderContext,
column: &T::Column,
case: Option<Case<'static>>,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.output_conversions.insert(
entity_column_name.clone(),
Box::new(move |value| {
if let sea_orm::Value::Json(Some(json)) = value {
let mut json_value = json.as_ref().clone();
if let Some(case) = case {
json_value = convert_json_keys(json_value, case);
}
let result = async_graphql::Value::from_json(json_value).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_name}"),
)
})?;
Ok(result)
} else {
Err(SeaographyError::TypeConversionError(
"value should be json".to_string(),
format!("Json - {entity_column_name}"),
))
}
}),
);
} }
#[cfg(test)] #[cfg(test)]

View File

@@ -1,2 +1,4 @@
pub mod crypto;
pub mod custom;
pub mod json; pub mod json;
pub mod util; pub mod name;

View File

@@ -0,0 +1,203 @@
use std::fmt::Display;
use sea_orm::{EntityName, EntityTrait, IdenStatic};
use seaography::BuilderContext;
pub fn get_entity_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let t = T::default();
let name = <T as EntityName>::table_name(&t);
context.entity_object.type_name.as_ref()(name)
}
pub fn get_column_name<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.entity_object.column_name.as_ref()(&entity_name, column.as_str())
}
pub fn get_entity_and_column_name<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
let column_name = get_column_name::<T>(context, column);
format!("{entity_name}.{column_name}")
}
pub fn get_entity_and_column_name_from_column_str<T>(
context: &BuilderContext,
column_str: &str,
) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}.{column_str}")
}
pub fn get_entity_basic_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let t = T::default();
let name = <T as EntityName>::table_name(&t);
format!(
"{}{}",
context.entity_object.type_name.as_ref()(name),
context.entity_object.basic_type_suffix
)
}
pub fn get_entity_query_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.entity_query_field.type_name.as_ref()(&entity_name)
}
pub fn get_entity_filter_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.filter_input.type_name.as_ref()(&entity_name)
}
pub fn get_entity_insert_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}{}", context.entity_input.insert_suffix)
}
pub fn get_entity_update_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}{}", context.entity_input.update_suffix)
}
pub fn get_entity_create_one_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_create_one_mutation.mutation_suffix
)
}
pub fn get_entity_create_batch_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_create_batch_mutation.mutation_suffix
)
}
pub fn get_entity_delete_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_delete_mutation.mutation_suffix
)
}
pub fn get_entity_update_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_update_mutation.mutation_suffix
)
}
pub fn get_entity_custom_mutation_field_name<T>(
context: &BuilderContext,
mutation_suffix: impl Display,
) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!("{query_field_name}{mutation_suffix}")
}
pub fn get_entity_renormalized_filter_field_name() -> &'static str {
"filter"
}
pub fn get_entity_query_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_query_field.filters
}
pub fn get_entity_update_mutation_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_update_mutation.filter_field
}
pub fn get_entity_delete_mutation_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_delete_mutation.filter_field
}
pub fn renormalize_filter_field_names_to_schema_context(context: &mut BuilderContext) {
let renormalized_filter_field_name = get_entity_renormalized_filter_field_name();
context.entity_query_field.filters = renormalized_filter_field_name.to_string();
context.entity_update_mutation.filter_field = renormalized_filter_field_name.to_string();
context.entity_delete_mutation.filter_field = renormalized_filter_field_name.to_string();
}
pub fn get_entity_renormalized_data_field_name() -> &'static str {
"data"
}
pub fn get_entity_create_one_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_create_one_mutation.data_field
}
pub fn get_entity_create_batch_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_create_batch_mutation.data_field
}
pub fn get_entity_update_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_update_mutation.data_field
}
pub fn renormalize_data_field_names_to_schema_context(context: &mut BuilderContext) {
let renormalized_data_field_name = get_entity_renormalized_data_field_name();
context.entity_create_one_mutation.data_field = renormalized_data_field_name.to_string();
context.entity_create_batch_mutation.data_field = renormalized_data_field_name.to_string();
context.entity_update_mutation.data_field = renormalized_data_field_name.to_string();
}

View File

@@ -1,30 +0,0 @@
use sea_orm::{EntityName, EntityTrait, IdenStatic};
use seaography::BuilderContext;
pub fn get_entity_key<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
context.entity_object.type_name.as_ref()(<T as EntityName>::table_name(&T::default()))
}
pub fn get_column_key<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_key::<T>(context);
context.entity_object.column_name.as_ref()(&entity_name, column.as_str())
}
pub fn get_entity_column_key<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
format!("{}.{}", &entity_name, &column_name)
}

View File

@@ -8,19 +8,48 @@ use crate::{
app::AppContextTrait, app::AppContextTrait,
graphql::{ graphql::{
domains::{ domains::{
credential_3rd::register_credential3rd_to_schema_builder, bangumi::{register_bangumi_to_schema_builder, register_bangumi_to_schema_context},
crypto::register_crypto_to_schema_context, credential_3rd::{
register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context,
},
cron::{register_cron_to_schema_builder, register_cron_to_schema_context},
downloaders::{
register_downloaders_to_schema_builder, register_downloaders_to_schema_context,
},
downloads::{
register_downloads_to_schema_builder, register_downloads_to_schema_context,
},
episodes::{register_episodes_to_schema_builder, register_episodes_to_schema_context},
feeds::{register_feeds_to_schema_builder, register_feeds_to_schema_context},
subscriber_tasks::{ subscriber_tasks::{
register_subscriber_tasks_to_schema_builder, register_subscriber_tasks_to_schema_builder,
register_subscriber_tasks_to_schema_context, register_subscriber_tasks_to_schema_context,
}, },
subscribers::{ subscribers::{
register_subscribers_to_schema_builder, register_subscribers_to_schema_context, register_subscribers_to_schema_builder, register_subscribers_to_schema_context,
restrict_subscriber_for_entity,
}, },
subscriptions::register_subscriptions_to_schema_builder, subscription_bangumi::{
register_subscription_bangumi_to_schema_builder,
register_subscription_bangumi_to_schema_context,
},
subscription_episode::{
register_subscription_episode_to_schema_builder,
register_subscription_episode_to_schema_context,
},
subscriptions::{
register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context,
},
system_tasks::{
register_system_tasks_to_schema_builder, register_system_tasks_to_schema_context,
},
},
infra::{
json::register_jsonb_input_filter_to_schema_builder,
name::{
renormalize_data_field_names_to_schema_context,
renormalize_filter_field_names_to_schema_context,
},
}, },
infra::json::register_jsonb_input_filter_to_schema_builder,
}, },
}; };
@@ -31,61 +60,29 @@ pub fn build_schema(
depth: Option<usize>, depth: Option<usize>,
complexity: Option<usize>, complexity: Option<usize>,
) -> Result<Schema, SchemaError> { ) -> Result<Schema, SchemaError> {
use crate::models::*;
let database = app_ctx.db().as_ref().clone(); let database = app_ctx.db().as_ref().clone();
let context = CONTEXT.get_or_init(|| { let context = CONTEXT.get_or_init(|| {
let mut context = BuilderContext::default(); let mut context = BuilderContext::default();
renormalize_filter_field_names_to_schema_context(&mut context);
renormalize_data_field_names_to_schema_context(&mut context);
{ {
// domains // domains
register_feeds_to_schema_context(&mut context);
register_subscribers_to_schema_context(&mut context); register_subscribers_to_schema_context(&mut context);
register_subscriptions_to_schema_context(&mut context);
{
restrict_subscriber_for_entity::<bangumi::Entity>(
&mut context,
&bangumi::Column::SubscriberId,
);
restrict_subscriber_for_entity::<downloaders::Entity>(
&mut context,
&downloaders::Column::SubscriberId,
);
restrict_subscriber_for_entity::<downloads::Entity>(
&mut context,
&downloads::Column::SubscriberId,
);
restrict_subscriber_for_entity::<episodes::Entity>(
&mut context,
&episodes::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscriptions::Entity>(
&mut context,
&subscriptions::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscribers::Entity>(
&mut context,
&subscribers::Column::Id,
);
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
&mut context,
&subscription_bangumi::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscription_episode::Entity>(
&mut context,
&subscription_episode::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
&mut context,
&subscriber_tasks::Column::SubscriberId,
);
restrict_subscriber_for_entity::<credential_3rd::Entity>(
&mut context,
&credential_3rd::Column::SubscriberId,
);
}
register_crypto_to_schema_context(&mut context, app_ctx.clone());
register_subscriber_tasks_to_schema_context(&mut context); register_subscriber_tasks_to_schema_context(&mut context);
register_credential3rd_to_schema_context(&mut context, app_ctx.clone());
register_downloaders_to_schema_context(&mut context);
register_downloads_to_schema_context(&mut context);
register_episodes_to_schema_context(&mut context);
register_subscription_bangumi_to_schema_context(&mut context);
register_subscription_episode_to_schema_context(&mut context);
register_bangumi_to_schema_context(&mut context);
register_cron_to_schema_context(&mut context);
register_system_tasks_to_schema_context(&mut context);
} }
context context
}); });
@@ -99,33 +96,18 @@ pub fn build_schema(
{ {
// domains // domains
builder = register_subscribers_to_schema_builder(builder); builder = register_subscribers_to_schema_builder(builder);
builder = register_feeds_to_schema_builder(builder);
seaography::register_entities!( builder = register_episodes_to_schema_builder(builder);
builder, builder = register_subscription_bangumi_to_schema_builder(builder);
[ builder = register_subscription_episode_to_schema_builder(builder);
bangumi, builder = register_downloaders_to_schema_builder(builder);
downloaders, builder = register_downloads_to_schema_builder(builder);
downloads,
episodes,
subscription_bangumi,
subscription_episode,
subscriptions,
subscriber_tasks,
credential_3rd
]
);
{
builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
builder.register_enumeration::<downloaders::DownloaderCategory>();
builder.register_enumeration::<downloads::DownloadMime>();
builder.register_enumeration::<credential_3rd::Credential3rdType>();
}
builder = register_subscriptions_to_schema_builder(builder); builder = register_subscriptions_to_schema_builder(builder);
builder = register_credential3rd_to_schema_builder(builder); builder = register_credential3rd_to_schema_builder(builder);
builder = register_subscriber_tasks_to_schema_builder(builder); builder = register_subscriber_tasks_to_schema_builder(builder);
builder = register_bangumi_to_schema_builder(builder);
builder = register_cron_to_schema_builder(builder);
builder = register_system_tasks_to_schema_builder(builder);
} }
let schema = builder.schema_builder(); let schema = builder.schema_builder();

View File

@@ -7,11 +7,11 @@
async_fn_traits, async_fn_traits,
error_generic_member_access, error_generic_member_access,
associated_type_defaults, associated_type_defaults,
let_chains let_chains,
impl_trait_in_fn_trait_return
)] )]
#![allow(clippy::enum_variant_names)] #![allow(clippy::enum_variant_names)]
pub use downloader; pub use downloader;
pub mod app; pub mod app;
pub mod auth; pub mod auth;
pub mod cache; pub mod cache;
@@ -21,10 +21,14 @@ pub mod errors;
pub mod extract; pub mod extract;
pub mod graphql; pub mod graphql;
pub mod logger; pub mod logger;
pub mod media;
pub mod message; pub mod message;
pub mod migrations; pub mod migrations;
pub mod models; pub mod models;
pub mod storage; pub mod storage;
pub mod task; pub mod task;
pub mod test_utils; pub mod utils;
pub mod web; pub mod web;
#[cfg(any(test, feature = "test-utils"))]
pub mod test_utils;

View File

@@ -0,0 +1,111 @@
use serde::{Deserialize, Serialize};
use ts_rs::TS;
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, TS)]
#[ts(rename_all = "camelCase")]
pub enum AutoOptimizeImageFormat {
#[serde(rename = "image/webp")]
Webp,
#[serde(rename = "image/avif")]
Avif,
#[serde(rename = "image/jxl")]
Jxl,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeWebpOptions {
pub quality: Option<f32>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeAvifOptions {
pub quality: Option<u8>,
pub speed: Option<u8>,
pub threads: Option<u8>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeJxlOptions {
pub quality: Option<f32>,
pub speed: Option<u8>,
}
#[derive(Clone, Debug, Serialize, Deserialize, TS, PartialEq)]
#[ts(tag = "mimeType")]
#[serde(tag = "mime_type")]
pub enum EncodeImageOptions {
#[serde(rename = "image/webp")]
Webp(EncodeWebpOptions),
#[serde(rename = "image/avif")]
Avif(EncodeAvifOptions),
#[serde(rename = "image/jxl")]
Jxl(EncodeJxlOptions),
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MediaConfig {
#[serde(default = "default_webp_quality")]
pub webp_quality: f32,
#[serde(default = "default_avif_quality")]
pub avif_quality: u8,
#[serde(default = "default_avif_speed")]
pub avif_speed: u8,
#[serde(default = "default_avif_threads")]
pub avif_threads: u8,
#[serde(default = "default_jxl_quality")]
pub jxl_quality: f32,
#[serde(default = "default_jxl_speed")]
pub jxl_speed: u8,
#[serde(default = "default_auto_optimize_formats")]
pub auto_optimize_formats: Vec<AutoOptimizeImageFormat>,
}
impl Default for MediaConfig {
fn default() -> Self {
Self {
webp_quality: default_webp_quality(),
avif_quality: default_avif_quality(),
avif_speed: default_avif_speed(),
avif_threads: default_avif_threads(),
jxl_quality: default_jxl_quality(),
jxl_speed: default_jxl_speed(),
auto_optimize_formats: default_auto_optimize_formats(),
}
}
}
fn default_webp_quality() -> f32 {
80.0
}
fn default_avif_quality() -> u8 {
80
}
fn default_avif_speed() -> u8 {
6
}
fn default_avif_threads() -> u8 {
1
}
fn default_jxl_quality() -> f32 {
80.0
}
fn default_jxl_speed() -> u8 {
7
}
fn default_auto_optimize_formats() -> Vec<AutoOptimizeImageFormat> {
vec![
AutoOptimizeImageFormat::Webp,
// AutoOptimizeImageFormat::Avif, // TOO SLOW */
#[cfg(feature = "jxl")]
AutoOptimizeImageFormat::Jxl,
]
}

View File

@@ -0,0 +1,8 @@
mod config;
mod service;
pub use config::{
AutoOptimizeImageFormat, EncodeAvifOptions, EncodeImageOptions, EncodeJxlOptions,
EncodeWebpOptions, MediaConfig,
};
pub use service::MediaService;

View File

@@ -0,0 +1,199 @@
use std::io::Cursor;
use bytes::Bytes;
use image::{GenericImageView, ImageEncoder, ImageReader, codecs::avif::AvifEncoder};
use quirks_path::Path;
use snafu::ResultExt;
use crate::{
errors::{RecorderError, RecorderResult},
media::{EncodeAvifOptions, EncodeJxlOptions, EncodeWebpOptions, MediaConfig},
};
#[derive(Debug)]
pub struct MediaService {
pub config: MediaConfig,
}
impl MediaService {
pub async fn from_config(config: MediaConfig) -> RecorderResult<Self> {
Ok(Self { config })
}
pub fn is_legacy_image_format(&self, ext: &str) -> bool {
matches!(ext, "jpeg" | "jpg" | "png")
}
pub async fn optimize_image_to_webp(
&self,
path: impl AsRef<Path>,
data: impl Into<Bytes>,
options: Option<EncodeWebpOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.and_then(|o| o.quality)
.unwrap_or(self.config.webp_quality);
let data = data.into();
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let img = image_reader.decode()?;
let (width, height) = (img.width(), img.height());
let color = img.color();
let webp_data = if color.has_alpha() {
let rgba_image = img.into_rgba8();
let encoder = webp::Encoder::from_rgba(&rgba_image, width, height);
encoder.encode(quality)
} else {
let rgba_image = img.into_rgb8();
let encoder = webp::Encoder::from_rgb(&rgba_image, width, height);
encoder.encode(quality)
};
Ok(Bytes::from(webp_data.to_vec()))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to webp: {}",
path.as_ref().display()
)
})?
}
pub async fn optimize_image_to_avif(
&self,
path: impl AsRef<Path>,
data: Bytes,
options: Option<EncodeAvifOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.as_ref()
.and_then(|o| o.quality)
.unwrap_or(self.config.avif_quality);
let speed = options
.as_ref()
.and_then(|o| o.speed)
.unwrap_or(self.config.avif_speed);
let threads = options
.as_ref()
.and_then(|o| o.threads)
.unwrap_or(self.config.avif_threads);
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
let mut buf = vec![];
{
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let img = image_reader.decode()?;
let (width, height) = img.dimensions();
let color_type = img.color();
let encoder = AvifEncoder::new_with_speed_quality(&mut buf, speed, quality)
.with_num_threads(Some(threads as usize));
encoder.write_image(img.as_bytes(), width, height, color_type.into())?;
}
Ok(Bytes::from(buf))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to avif: {}",
path.as_ref().display()
)
})?
}
#[cfg(feature = "jxl")]
pub async fn optimize_image_to_jxl(
&self,
path: impl AsRef<Path>,
data: Bytes,
options: Option<EncodeJxlOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.as_ref()
.and_then(|o| o.quality)
.unwrap_or(self.config.jxl_quality);
let speed = options
.as_ref()
.and_then(|o| o.speed)
.unwrap_or(self.config.jxl_speed);
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
use jpegxl_rs::encode::{ColorEncoding, EncoderResult, EncoderSpeed};
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let image = image_reader.decode()?;
let (width, height) = image.dimensions();
let color = image.color();
let has_alpha = color.has_alpha();
let libjxl_speed = {
match speed {
0 | 1 => EncoderSpeed::Lightning,
2 => EncoderSpeed::Thunder,
3 => EncoderSpeed::Falcon,
4 => EncoderSpeed::Cheetah,
5 => EncoderSpeed::Hare,
6 => EncoderSpeed::Wombat,
7 => EncoderSpeed::Squirrel,
8 => EncoderSpeed::Kitten,
_ => EncoderSpeed::Tortoise,
}
};
let mut encoder_builder = jpegxl_rs::encoder_builder()
.lossless(false)
.has_alpha(has_alpha)
.color_encoding(ColorEncoding::Srgb)
.speed(libjxl_speed)
.jpeg_quality(quality)
.build()?;
let buffer: EncoderResult<u8> = if color.has_alpha() {
let sample = image.into_rgba8();
encoder_builder.encode(&sample, width, height)?
} else {
let sample = image.into_rgb8();
encoder_builder.encode(&sample, width, height)?
};
Ok(Bytes::from(buffer.data))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to avif: {}",
path.as_ref().display()
)
})?
}
#[cfg(not(feature = "jxl"))]
pub async fn optimize_image_to_jxl(
&self,
_path: impl AsRef<Path>,
_data: Bytes,
_options: Option<EncodeJxlOptions>,
) -> RecorderResult<Bytes> {
Err(RecorderError::Whatever {
message: "jxl feature is not enabled".to_string(),
source: None.into(),
})
}
}

View File

@@ -43,7 +43,7 @@ pub enum Bangumi {
MikanBangumiId, MikanBangumiId,
DisplayName, DisplayName,
SubscriberId, SubscriberId,
RawName, OriginName,
Season, Season,
SeasonRaw, SeasonRaw,
Fansub, Fansub,
@@ -51,8 +51,13 @@ pub enum Bangumi {
Filter, Filter,
RssLink, RssLink,
PosterLink, PosterLink,
OriginPosterLink,
/**
* @deprecated
*/
SavePath, SavePath,
Homepage, Homepage,
BangumiType,
} }
#[derive(DeriveIden)] #[derive(DeriveIden)]
@@ -69,22 +74,30 @@ pub enum Episodes {
Table, Table,
Id, Id,
MikanEpisodeId, MikanEpisodeId,
RawName, OriginName,
DisplayName, DisplayName,
BangumiId, BangumiId,
SubscriberId, SubscriberId,
DownloadId, DownloadId,
/**
* @deprecated
*/
SavePath, SavePath,
Resolution, Resolution,
Season, Season,
SeasonRaw, SeasonRaw,
Fansub, Fansub,
PosterLink, PosterLink,
OriginPosterLink,
EpisodeIndex, EpisodeIndex,
Homepage, Homepage,
Subtitle, Subtitle,
Source, Source,
Extra, EpisodeType,
EnclosureTorrentLink,
EnclosureMagnetLink,
EnclosurePubDate,
EnclosureContentLength,
} }
#[derive(DeriveIden)] #[derive(DeriveIden)]
@@ -100,7 +113,7 @@ pub enum SubscriptionEpisode {
pub enum Downloads { pub enum Downloads {
Table, Table,
Id, Id,
RawName, OriginName,
DisplayName, DisplayName,
SubscriberId, SubscriberId,
DownloaderId, DownloaderId,
@@ -147,6 +160,70 @@ pub enum Credential3rd {
UserAgent, UserAgent,
} }
#[derive(DeriveIden)]
pub enum Feeds {
Table,
Id,
Token,
FeedType,
FeedSource,
SubscriberId,
SubscriptionId,
}
#[derive(DeriveIden)]
pub enum Cron {
Table,
Id,
SubscriberId,
SubscriptionId,
CronExpr,
CronTimezone,
NextRun,
LastRun,
LastError,
Enabled,
LockedBy,
LockedAt,
TimeoutMs,
Attempts,
MaxAttempts,
Priority,
Status,
SubscriberTaskCron,
SystemTaskCron,
}
#[derive(sea_query::Iden)]
pub enum ApalisSchema {
#[iden = "apalis"]
Schema,
}
#[derive(DeriveIden)]
pub enum ApalisJobs {
#[sea_orm(iden = "jobs")]
Table,
SubscriberId,
SubscriptionId,
Job,
JobType,
Status,
TaskType,
Id,
Attempts,
MaxAttempts,
RunAt,
LastError,
LockAt,
LockBy,
DoneAt,
Priority,
CronId,
}
macro_rules! create_postgres_enum_for_active_enum { macro_rules! create_postgres_enum_for_active_enum {
($manager: expr, $active_enum: expr, $($enum_value:expr),+) => { ($manager: expr, $active_enum: expr, $($enum_value:expr),+) => {
{ {

View File

@@ -52,8 +52,7 @@ impl MigrationTrait for Migration {
subscriptions::SubscriptionCategoryEnum, subscriptions::SubscriptionCategoryEnum,
subscriptions::SubscriptionCategory::MikanSubscriber, subscriptions::SubscriptionCategory::MikanSubscriber,
subscriptions::SubscriptionCategory::MikanBangumi, subscriptions::SubscriptionCategory::MikanBangumi,
subscriptions::SubscriptionCategory::MikanSeason, subscriptions::SubscriptionCategory::MikanSeason
subscriptions::SubscriptionCategory::Manual
) )
.await?; .await?;
@@ -96,7 +95,7 @@ impl MigrationTrait for Migration {
.col(text_null(Bangumi::MikanBangumiId)) .col(text_null(Bangumi::MikanBangumiId))
.col(integer(Bangumi::SubscriberId)) .col(integer(Bangumi::SubscriberId))
.col(text(Bangumi::DisplayName)) .col(text(Bangumi::DisplayName))
.col(text(Bangumi::RawName)) .col(text(Bangumi::OriginName))
.col(integer(Bangumi::Season)) .col(integer(Bangumi::Season))
.col(text_null(Bangumi::SeasonRaw)) .col(text_null(Bangumi::SeasonRaw))
.col(text_null(Bangumi::Fansub)) .col(text_null(Bangumi::Fansub))
@@ -104,6 +103,7 @@ impl MigrationTrait for Migration {
.col(json_binary_null(Bangumi::Filter)) .col(json_binary_null(Bangumi::Filter))
.col(text_null(Bangumi::RssLink)) .col(text_null(Bangumi::RssLink))
.col(text_null(Bangumi::PosterLink)) .col(text_null(Bangumi::PosterLink))
.col(text_null(Bangumi::OriginPosterLink))
.col(text_null(Bangumi::SavePath)) .col(text_null(Bangumi::SavePath))
.col(text_null(Bangumi::Homepage)) .col(text_null(Bangumi::Homepage))
.foreign_key( .foreign_key(
@@ -220,7 +220,7 @@ impl MigrationTrait for Migration {
table_auto_z(Episodes::Table) table_auto_z(Episodes::Table)
.col(pk_auto(Episodes::Id)) .col(pk_auto(Episodes::Id))
.col(text_null(Episodes::MikanEpisodeId)) .col(text_null(Episodes::MikanEpisodeId))
.col(text(Episodes::RawName)) .col(text(Episodes::OriginName))
.col(text(Episodes::DisplayName)) .col(text(Episodes::DisplayName))
.col(integer(Episodes::BangumiId)) .col(integer(Episodes::BangumiId))
.col(integer(Episodes::SubscriberId)) .col(integer(Episodes::SubscriberId))
@@ -230,6 +230,7 @@ impl MigrationTrait for Migration {
.col(text_null(Episodes::SeasonRaw)) .col(text_null(Episodes::SeasonRaw))
.col(text_null(Episodes::Fansub)) .col(text_null(Episodes::Fansub))
.col(text_null(Episodes::PosterLink)) .col(text_null(Episodes::PosterLink))
.col(text_null(Episodes::OriginPosterLink))
.col(integer(Episodes::EpisodeIndex)) .col(integer(Episodes::EpisodeIndex))
.col(text_null(Episodes::Homepage)) .col(text_null(Episodes::Homepage))
.col(text_null(Episodes::Subtitle)) .col(text_null(Episodes::Subtitle))

View File

@@ -80,7 +80,7 @@ impl MigrationTrait for Migration {
.create_table( .create_table(
table_auto_z(Downloads::Table) table_auto_z(Downloads::Table)
.col(pk_auto(Downloads::Id)) .col(pk_auto(Downloads::Id))
.col(string(Downloads::RawName)) .col(string(Downloads::OriginName))
.col(string(Downloads::DisplayName)) .col(string(Downloads::DisplayName))
.col(integer(Downloads::SubscriberId)) .col(integer(Downloads::SubscriberId))
.col(integer(Downloads::DownloaderId)) .col(integer(Downloads::DownloaderId))
@@ -95,8 +95,8 @@ impl MigrationTrait for Migration {
DownloadMimeEnum, DownloadMimeEnum,
DownloadMime::iden_values(), DownloadMime::iden_values(),
)) ))
.col(big_unsigned(Downloads::AllSize)) .col(big_integer(Downloads::AllSize))
.col(big_unsigned(Downloads::CurrSize)) .col(big_integer(Downloads::CurrSize))
.col(text(Downloads::Url)) .col(text(Downloads::Url))
.col(text_null(Downloads::Homepage)) .col(text_null(Downloads::Homepage))
.col(text_null(Downloads::SavePath)) .col(text_null(Downloads::SavePath))

View File

@@ -90,6 +90,11 @@ impl MigrationTrait for Migration {
SimpleExpr::from(AuthType::Basic).as_enum(AuthTypeEnum), SimpleExpr::from(AuthType::Basic).as_enum(AuthTypeEnum),
seed_subscriber_id.into(), seed_subscriber_id.into(),
]) ])
.on_conflict(
OnConflict::columns([Auth::Pid, Auth::AuthType])
.do_nothing()
.to_owned(),
)
.to_owned(), .to_owned(),
) )
.await?; .await?;

View File

@@ -95,6 +95,7 @@ impl MigrationTrait for Migration {
Table::alter() Table::alter()
.table(Subscriptions::Table) .table(Subscriptions::Table)
.drop_column(Subscriptions::CredentialId) .drop_column(Subscriptions::CredentialId)
.drop_foreign_key("fk_subscriptions_credential_id")
.to_owned(), .to_owned(),
) )
.await?; .await?;

View File

@@ -0,0 +1,221 @@
use async_trait::async_trait;
use sea_orm_migration::{prelude::*, schema::*};
use super::defs::{ApalisJobs, ApalisSchema};
use crate::{
migrations::defs::{Subscribers, Subscriptions},
task::{
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME,
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME, SUBSCRIBER_TASK_APALIS_NAME,
SYSTEM_TASK_APALIS_NAME,
},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.add_column_if_not_exists(integer_null(ApalisJobs::SubscriberId))
.add_column_if_not_exists(integer_null(ApalisJobs::SubscriptionId))
.add_column_if_not_exists(text_null(ApalisJobs::TaskType))
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_subscriber_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::SubscriberId)
.to_tbl(Subscribers::Table)
.to_col(Subscribers::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_subscription_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::SubscriptionId)
.to_tbl(Subscriptions::Table)
.to_col(Subscriptions::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"UPDATE {apalis_schema}.{apalis_table} SET {subscriber_id} = ({job} ->> '{subscriber_id}')::integer, {task_type} = ({job} ->> '{task_type}')::text, {subscription_id} = ({job} ->> '{subscription_id}')::integer"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
job = ApalisJobs::Job.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE TRIGGER {SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME}
BEFORE INSERT OR UPDATE ON {apalis_schema}.{apalis_table}
FOR EACH ROW
EXECUTE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}();"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string()
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_tasks")
.await?;
db.execute_unprepared("DROP VIEW IF EXISTS system_tasks")
.await?;
db.execute_unprepared(&format!(
r#"DROP TRIGGER IF EXISTS {SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME} ON {apalis_schema}.{apalis_table}"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string()
)).await?;
db.execute_unprepared(&format!(
r#"DROP FUNCTION IF EXISTS {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}()"#,
apalis_schema = ApalisSchema::Schema.to_string(),
))
.await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.drop_foreign_key("fk_apalis_jobs_subscriber_id")
.drop_foreign_key("fk_apalis_jobs_subscription_id")
.drop_column(ApalisJobs::SubscriberId)
.drop_column(ApalisJobs::SubscriptionId)
.to_owned(),
)
.await?;
Ok(())
}
}

View File

@@ -1,64 +0,0 @@
use async_trait::async_trait;
use sea_orm_migration::prelude::*;
use crate::task::SUBSCRIBER_TASK_APALIS_NAME;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
job,
job_type,
status,
(job ->> 'subscriber_id'::text)::integer AS subscriber_id,
job ->> 'task_type'::text AS task_type,
id,
attempts,
max_attempts,
run_at,
last_error,
lock_at,
lock_by,
done_at,
priority
FROM apalis.jobs
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscriber_id
ON apalis.jobs (((job -> 'subscriber_id')::integer))
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(
r#"DROP INDEX IF EXISTS idx_apalis_jobs_subscriber_id
ON apalis.jobs"#,
)
.await?;
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_tasks")
.await?;
Ok(())
}
}

View File

@@ -0,0 +1,95 @@
use async_trait::async_trait;
use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{
CustomSchemaManagerExt, Feeds, GeneralIds, Subscribers, Subscriptions, table_auto_z,
},
models::feeds::{FeedSource, FeedSourceEnum, FeedType, FeedTypeEnum},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
create_postgres_enum_for_active_enum!(manager, FeedTypeEnum, FeedType::Rss).await?;
create_postgres_enum_for_active_enum!(
manager,
FeedSourceEnum,
FeedSource::SubscriptionEpisode
)
.await?;
manager
.create_table(
table_auto_z(Feeds::Table)
.col(pk_auto(Feeds::Id))
.col(text(Feeds::Token))
.col(enumeration(
Feeds::FeedType,
FeedTypeEnum,
FeedType::iden_values(),
))
.col(
enumeration(Feeds::FeedSource, FeedSourceEnum, FeedSource::iden_values())
.not_null(),
)
.col(integer_null(Feeds::SubscriberId))
.col(integer_null(Feeds::SubscriptionId))
.index(
Index::create()
.if_not_exists()
.name("idx_feeds_token")
.table(Feeds::Table)
.col(Feeds::Token)
.unique(),
)
.foreign_key(
ForeignKey::create()
.name("fk_feeds_subscriber_id")
.from(Feeds::Table, Feeds::SubscriberId)
.to(Subscribers::Table, Subscribers::Id)
.on_update(ForeignKeyAction::Cascade)
.on_delete(ForeignKeyAction::Cascade),
)
.foreign_key(
ForeignKey::create()
.name("fk_feeds_subscription_id")
.from(Feeds::Table, Feeds::SubscriptionId)
.to(Subscriptions::Table, Subscriptions::Id)
.on_update(ForeignKeyAction::Cascade)
.on_delete(ForeignKeyAction::Cascade),
)
.to_owned(),
)
.await?;
manager
.create_postgres_auto_update_ts_trigger_for_col(Feeds::Table, GeneralIds::UpdatedAt)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_postgres_auto_update_ts_trigger_for_col(Feeds::Table, GeneralIds::UpdatedAt)
.await?;
manager
.drop_table(Table::drop().if_exists().table(Feeds::Table).to_owned())
.await?;
manager
.drop_postgres_enum_for_active_enum(FeedTypeEnum)
.await?;
manager
.drop_postgres_enum_for_active_enum(FeedSourceEnum)
.await?;
Ok(())
}
}

View File

@@ -0,0 +1,171 @@
use async_trait::async_trait;
use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{Bangumi, CustomSchemaManagerExt, Episodes},
models::{
bangumi::{BangumiType, BangumiTypeEnum},
episodes::{EpisodeType, EpisodeTypeEnum},
},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan).await?;
{
create_postgres_enum_for_active_enum!(manager, BangumiTypeEnum, BangumiType::Mikan)
.await?;
manager
.alter_table(
Table::alter()
.table(Bangumi::Table)
.add_column_if_not_exists(enumeration_null(
Bangumi::BangumiType,
BangumiTypeEnum,
BangumiType::iden_values(),
))
.to_owned(),
)
.await?;
db.execute_unprepared(&format!(
r#"ALTER TABLE {bangumi} DROP COLUMN IF EXISTS {save_path}"#,
bangumi = Bangumi::Table.to_string(),
save_path = Bangumi::SavePath.to_string(),
))
.await?;
manager
.exec_stmt(
UpdateStatement::new()
.table(Bangumi::Table)
.value(
Bangumi::BangumiType,
BangumiType::Mikan.as_enum(BangumiTypeEnum),
)
.and_where(Expr::col(Bangumi::BangumiType).is_null())
.and_where(Expr::col(Bangumi::MikanBangumiId).is_not_null())
.to_owned(),
)
.await?;
manager
.alter_table(
Table::alter()
.table(Bangumi::Table)
.modify_column(enumeration(
Bangumi::BangumiType,
BangumiTypeEnum,
BangumiType::iden_values(),
))
.to_owned(),
)
.await?;
}
{
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan)
.await?;
manager
.alter_table(
Table::alter()
.table(Episodes::Table)
.add_column_if_not_exists(enumeration_null(
Episodes::EpisodeType,
EpisodeTypeEnum,
EpisodeType::enum_type_name(),
))
.add_column_if_not_exists(text_null(Episodes::EnclosureMagnetLink))
.add_column_if_not_exists(text_null(Episodes::EnclosureTorrentLink))
.add_column_if_not_exists(timestamp_with_time_zone_null(
Episodes::EnclosurePubDate,
))
.add_column_if_not_exists(big_integer_null(
Episodes::EnclosureContentLength,
))
.to_owned(),
)
.await?;
db.execute_unprepared(&format!(
r#"ALTER TABLE {episodes} DROP COLUMN IF EXISTS {save_path}"#,
episodes = Episodes::Table.to_string(),
save_path = Episodes::SavePath.to_string(),
))
.await?;
manager
.exec_stmt(
UpdateStatement::new()
.table(Episodes::Table)
.value(
Episodes::EpisodeType,
EpisodeType::Mikan.as_enum(EpisodeTypeEnum),
)
.and_where(Expr::col(Episodes::EpisodeType).is_null())
.and_where(Expr::col(Episodes::MikanEpisodeId).is_not_null())
.to_owned(),
)
.await?;
manager
.alter_table(
Table::alter()
.table(Episodes::Table)
.modify_column(enumeration(
Episodes::EpisodeType,
EpisodeTypeEnum,
EpisodeType::enum_type_name(),
))
.to_owned(),
)
.await?;
}
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
Table::alter()
.table(Bangumi::Table)
.add_column_if_not_exists(text_null(Bangumi::SavePath))
.drop_column(Bangumi::BangumiType)
.to_owned(),
)
.await?;
manager
.drop_postgres_enum_for_active_enum(BangumiTypeEnum)
.await?;
manager
.alter_table(
Table::alter()
.table(Episodes::Table)
.add_column_if_not_exists(text_null(Episodes::SavePath))
.drop_column(Episodes::EpisodeType)
.drop_column(Episodes::EnclosureMagnetLink)
.drop_column(Episodes::EnclosureTorrentLink)
.drop_column(Episodes::EnclosurePubDate)
.drop_column(Episodes::EnclosureContentLength)
.to_owned(),
)
.await?;
manager
.drop_postgres_enum_for_active_enum(EpisodeTypeEnum)
.await?;
Ok(())
}
}

View File

@@ -0,0 +1,542 @@
use async_trait::async_trait;
use sea_orm::ActiveEnum;
use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{
ApalisJobs, ApalisSchema, Cron, CustomSchemaManagerExt, GeneralIds, Subscribers,
Subscriptions, table_auto_z,
},
models::cron::{
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_DEBUG_EVENT, CRON_DUE_EVENT,
CronStatus, CronStatusEnum, NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME,
NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME,
SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME,
},
task::{
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SUBSCRIBER_TASK_APALIS_NAME,
SYSTEM_TASK_APALIS_NAME,
},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
create_postgres_enum_for_active_enum!(
manager,
CronStatusEnum,
CronStatus::Pending,
CronStatus::Running,
CronStatus::Completed,
CronStatus::Failed,
CronStatus::Disabled
)
.await?;
manager
.create_table(
table_auto_z(Cron::Table)
.col(pk_auto(Cron::Id))
.col(string(Cron::CronExpr))
.col(string(Cron::CronTimezone))
.col(integer_null(Cron::SubscriberId))
.col(integer_null(Cron::SubscriptionId))
.col(timestamp_with_time_zone_null(Cron::NextRun))
.col(timestamp_with_time_zone_null(Cron::LastRun))
.col(string_null(Cron::LastError))
.col(boolean(Cron::Enabled).default(true))
.col(string_null(Cron::LockedBy))
.col(timestamp_with_time_zone_null(Cron::LockedAt))
.col(integer_null(Cron::TimeoutMs).default(5000))
.col(integer(Cron::Attempts).default(0))
.col(integer(Cron::MaxAttempts).default(1))
.col(integer(Cron::Priority).default(0))
.col(
enumeration(Cron::Status, CronStatusEnum, CronStatus::iden_values())
.default(CronStatus::Pending),
)
.col(json_binary_null(Cron::SubscriberTaskCron))
.col(json_binary_null(Cron::SystemTaskCron))
.foreign_key(
ForeignKey::create()
.name("fk_cron_subscriber_id")
.from(Cron::Table, Cron::SubscriberId)
.to(Subscribers::Table, Subscribers::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.foreign_key(
ForeignKey::create()
.name("fk_cron_subscription_id")
.from(Cron::Table, Cron::SubscriptionId)
.to(Subscriptions::Table, Subscriptions::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
manager
.create_postgres_auto_update_ts_trigger_for_col(Cron::Table, GeneralIds::UpdatedAt)
.await?;
manager
.create_index(
IndexCreateStatement::new()
.if_not_exists()
.name("idx_cron_next_run")
.table(Cron::Table)
.col(Cron::NextRun)
.to_owned(),
)
.await?;
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_subscriber_task_subscriber_id integer;
new_subscriber_task_subscription_id integer;
new_system_task_subscriber_id integer;
BEGIN
new_subscriber_task_subscriber_id = (NEW.{subscriber_task_cron} ->> 'subscriber_id')::integer;
new_subscriber_task_subscription_id = (NEW.{subscriber_task_cron} ->> 'subscription_id')::integer;
new_system_task_subscriber_id = (NEW.{system_task_cron} ->> 'subscriber_id')::integer;
IF new_subscriber_task_subscriber_id IS DISTINCT FROM (OLD.{subscriber_task_cron} ->> 'subscriber_id')::integer AND new_subscriber_task_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_subscriber_task_subscriber_id;
END IF;
IF new_subscriber_task_subscription_id IS DISTINCT FROM (OLD.{subscriber_task_cron} ->> 'subscription_id')::integer AND new_subscriber_task_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_subscriber_task_subscription_id;
END IF;
IF new_system_task_subscriber_id IS DISTINCT FROM (OLD.{system_task_cron} ->> 'subscriber_id')::integer AND new_system_task_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_system_task_subscriber_id;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
subscriber_task_cron = &Cron::SubscriberTaskCron.to_string(),
subscriber_id = &Cron::SubscriberId.to_string(),
subscription_id = &Cron::SubscriptionId.to_string(),
system_task_cron = &Cron::SystemTaskCron.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE TRIGGER {SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME}
BEFORE INSERT OR UPDATE ON {table}
FOR EACH ROW
EXECUTE FUNCTION {SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}();"#,
table = &Cron::Table.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME}() RETURNS trigger AS $$
BEGIN
-- Check if the cron is due to run
IF NEW.{next_run} IS NOT NULL
AND NEW.{next_run} <= CURRENT_TIMESTAMP
AND NEW.{enabled} = true
AND NEW.{status} = '{pending}'::{status_type}
AND NEW.{attempts} < NEW.{max_attempts}
-- Check if not locked or lock timeout
AND (
NEW.{locked_at} IS NULL
OR (
NEW.{timeout_ms} IS NOT NULL
AND (NEW.{locked_at} + NEW.{timeout_ms} * INTERVAL '1 millisecond') <= CURRENT_TIMESTAMP
)
)
-- Make sure the cron is a new due event, not a repeat event
AND (
OLD.{next_run} IS NULL
OR OLD.{next_run} > CURRENT_TIMESTAMP
OR OLD.{enabled} = false
OR OLD.{status} IS DISTINCT FROM '{pending}'
OR OLD.{attempts} IS DISTINCT FROM NEW.{attempts}
)
THEN
PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(NEW)::text);
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
next_run = &Cron::NextRun.to_string(),
enabled = &Cron::Enabled.to_string(),
locked_at = &Cron::LockedAt.to_string(),
timeout_ms = &Cron::TimeoutMs.to_string(),
status = &Cron::Status.to_string(),
pending = &CronStatus::Pending.to_value(),
attempts = &Cron::Attempts.to_string(),
max_attempts = &Cron::MaxAttempts.to_string(),
status_type = &CronStatus::name().to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE TRIGGER {NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME}
AFTER INSERT OR UPDATE ON {table}
FOR EACH ROW
EXECUTE FUNCTION {NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME}();"#,
table = &Cron::Table.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME}() RETURNS INTEGER AS $$
DECLARE
cron_record RECORD;
notification_count INTEGER := 0;
BEGIN
FOR cron_record IN
SELECT * FROM {table}
WHERE {next_run} IS NOT NULL
AND {next_run} <= CURRENT_TIMESTAMP
AND {enabled} = true
AND {status} = '{pending}'::{status_type}
AND {attempts} < {max_attempts}
AND (
{locked_at} IS NULL
OR (
{timeout_ms} IS NOT NULL
AND {locked_at} + {timeout_ms} * INTERVAL '1 millisecond' <= CURRENT_TIMESTAMP
)
)
ORDER BY {priority} ASC, {next_run} ASC
FOR UPDATE SKIP LOCKED
LOOP
-- PERFORM pg_notify('{CRON_DUE_DEBUG_EVENT}',format('Found due cron: value=%s; Now time: %s', row_to_json(cron_record)::text, CURRENT_TIMESTAMP));
PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(cron_record)::text);
notification_count := notification_count + 1;
END LOOP;
-- PERFORM pg_notify('{CRON_DUE_DEBUG_EVENT}', format('Notification count: %I; Now time: %s', notification_count, CURRENT_TIMESTAMP));
RETURN notification_count;
END;
$$ LANGUAGE plpgsql;"#,
table = &Cron::Table.to_string(),
next_run = &Cron::NextRun.to_string(),
enabled = &Cron::Enabled.to_string(),
status = &Cron::Status.to_string(),
pending = &CronStatus::Pending.to_value(),
locked_at = &Cron::LockedAt.to_string(),
timeout_ms = &Cron::TimeoutMs.to_string(),
priority = &Cron::Priority.to_string(),
attempts = &Cron::Attempts.to_string(),
max_attempts = &Cron::MaxAttempts.to_string(),
status_type = &CronStatus::name().to_string(),
))
.await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.add_column_if_not_exists(integer_null(ApalisJobs::CronId))
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_cron_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::CronId)
.to_tbl(Cron::Table)
.to_col(Cron::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id},
{cron_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{cron_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"
UPDATE {apalis_schema}.{apalis_table} SET {cron_id} = ({job} ->> '{cron_id}')::integer
"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_cron_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_cron_id = (NEW.{job} ->> '{cron_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_cron_id IS DISTINCT FROM (OLD.{job} ->> '{cron_id}')::integer AND new_job_cron_id IS DISTINCT FROM NEW.{cron_id} THEN
NEW.{cron_id} = new_job_cron_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
))
.await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.drop_column(ApalisJobs::CronId)
.drop_foreign_key("fk_apalis_jobs_cron_id")
.to_owned(),
)
.await?;
db.execute_unprepared(&format!(
r#"DROP TRIGGER IF EXISTS {NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME} ON {table};"#,
table = &Cron::Table.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"DROP FUNCTION IF EXISTS {NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME}();"#,
))
.await?;
db.execute_unprepared(&format!(
r#"DROP FUNCTION IF EXISTS {CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME}();"#,
))
.await?;
manager
.drop_table(
TableDropStatement::new()
.if_exists()
.table(Cron::Table)
.to_owned(),
)
.await?;
manager
.drop_postgres_enum_for_active_enum(CronStatusEnum)
.await?;
Ok(())
}
}

View File

@@ -7,7 +7,10 @@ pub mod m20220101_000001_init;
pub mod m20240224_082543_add_downloads; pub mod m20240224_082543_add_downloads;
pub mod m20241231_000001_auth; pub mod m20241231_000001_auth;
pub mod m20250501_021523_credential_3rd; pub mod m20250501_021523_credential_3rd;
pub mod m20250520_021135_subscriber_tasks; pub mod m20250520_021135_add_tasks;
pub mod m20250622_015618_feeds;
pub mod m20250622_020819_bangumi_and_episode_type;
pub mod m20250629_065628_add_cron;
pub struct Migrator; pub struct Migrator;
@@ -19,7 +22,10 @@ impl MigratorTrait for Migrator {
Box::new(m20240224_082543_add_downloads::Migration), Box::new(m20240224_082543_add_downloads::Migration),
Box::new(m20241231_000001_auth::Migration), Box::new(m20241231_000001_auth::Migration),
Box::new(m20250501_021523_credential_3rd::Migration), Box::new(m20250501_021523_credential_3rd::Migration),
Box::new(m20250520_021135_subscriber_tasks::Migration), Box::new(m20250520_021135_add_tasks::Migration),
Box::new(m20250622_015618_feeds::Migration),
Box::new(m20250622_020819_bangumi_and_episode_type::Migration),
Box::new(m20250629_065628_add_cron::Migration),
] ]
} }
} }

View File

@@ -63,7 +63,11 @@ impl Model {
.filter(Column::Pid.eq(pid)) .filter(Column::Pid.eq(pid))
.one(db) .one(db)
.await? .await?
.ok_or_else(|| RecorderError::from_db_record_not_found("auth::find_by_pid"))?; .ok_or_else(|| {
RecorderError::from_entity_not_found_detail::<Entity, _>(format!(
"pid {pid} not found"
))
})?;
Ok(subscriber_auth) Ok(subscriber_auth)
} }
@@ -99,7 +103,9 @@ impl Model {
..Default::default() ..Default::default()
}; };
let new_item: Model = new_item.save(&txn).await?.try_into()?; let new_item: Model = new_item.insert(&txn).await?;
txn.commit().await?;
Ok(new_item) Ok(new_item)
} }

View File

@@ -17,7 +17,7 @@ use crate::{
MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url, MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url,
scrape_mikan_poster_meta_from_image_url, scrape_mikan_poster_meta_from_image_url,
}, },
rawname::extract_season_from_title_body, origin::{BangumiComps, OriginCompTrait},
}, },
}; };
@@ -29,7 +29,14 @@ pub struct BangumiFilter {
pub group: Option<Vec<String>>, pub group: Option<Vec<String>>,
} }
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "bangumi_type")]
pub enum BangumiType {
#[sea_orm(string_value = "mikan")]
Mikan,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "bangumi")] #[sea_orm(table_name = "bangumi")]
pub struct Model { pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")] #[sea_orm(default_expr = "Expr::current_timestamp()")]
@@ -39,9 +46,10 @@ pub struct Model {
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub id: i32, pub id: i32,
pub mikan_bangumi_id: Option<String>, pub mikan_bangumi_id: Option<String>,
pub bangumi_type: BangumiType,
pub subscriber_id: i32, pub subscriber_id: i32,
pub display_name: String, pub display_name: String,
pub raw_name: String, pub origin_name: String,
pub season: i32, pub season: i32,
pub season_raw: Option<String>, pub season_raw: Option<String>,
pub fansub: Option<String>, pub fansub: Option<String>,
@@ -49,7 +57,7 @@ pub struct Model {
pub filter: Option<BangumiFilter>, pub filter: Option<BangumiFilter>,
pub rss_link: Option<String>, pub rss_link: Option<String>,
pub poster_link: Option<String>, pub poster_link: Option<String>,
pub save_path: Option<String>, pub origin_poster_link: Option<String>,
pub homepage: Option<String>, pub homepage: Option<String>,
} }
@@ -120,9 +128,13 @@ impl ActiveModel {
_subscription_id: i32, _subscription_id: i32,
) -> RecorderResult<Self> { ) -> RecorderResult<Self> {
let mikan_client = ctx.mikan(); let mikan_client = ctx.mikan();
let storage_service = ctx.storage();
let mikan_base_url = mikan_client.base_url(); let mikan_base_url = mikan_client.base_url();
let (_, season_raw, season_index) = extract_season_from_title_body(&meta.bangumi_title); let season_comp = BangumiComps::parse_comp(&meta.bangumi_title)
.ok()
.map(|(_, s)| s)
.and_then(|s| s.season);
let season_index = season_comp.as_ref().map(|s| s.num).unwrap_or(1);
let season_raw = season_comp.map(|s| s.source.to_string());
let rss_url = build_mikan_bangumi_subscription_rss_url( let rss_url = build_mikan_bangumi_subscription_rss_url(
mikan_base_url.clone(), mikan_base_url.clone(),
@@ -130,14 +142,9 @@ impl ActiveModel {
Some(&meta.mikan_fansub_id), Some(&meta.mikan_fansub_id),
); );
let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src { let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src.clone() {
let poster_meta = scrape_mikan_poster_meta_from_image_url( let poster_meta =
mikan_client, scrape_mikan_poster_meta_from_image_url(ctx, origin_poster_src).await?;
storage_service,
origin_poster_src,
subscriber_id,
)
.await?;
poster_meta.poster_src poster_meta.poster_src
} else { } else {
None None
@@ -148,13 +155,15 @@ impl ActiveModel {
mikan_fansub_id: ActiveValue::Set(Some(meta.mikan_fansub_id)), mikan_fansub_id: ActiveValue::Set(Some(meta.mikan_fansub_id)),
subscriber_id: ActiveValue::Set(subscriber_id), subscriber_id: ActiveValue::Set(subscriber_id),
display_name: ActiveValue::Set(meta.bangumi_title.clone()), display_name: ActiveValue::Set(meta.bangumi_title.clone()),
raw_name: ActiveValue::Set(meta.bangumi_title), origin_name: ActiveValue::Set(meta.bangumi_title),
season: ActiveValue::Set(season_index), season: ActiveValue::Set(season_index),
season_raw: ActiveValue::Set(season_raw), season_raw: ActiveValue::Set(season_raw),
fansub: ActiveValue::Set(Some(meta.fansub)), fansub: ActiveValue::Set(Some(meta.fansub)),
poster_link: ActiveValue::Set(poster_link), poster_link: ActiveValue::Set(poster_link),
origin_poster_link: ActiveValue::Set(meta.origin_poster_src.map(|src| src.to_string())),
homepage: ActiveValue::Set(Some(meta.homepage.to_string())), homepage: ActiveValue::Set(Some(meta.homepage.to_string())),
rss_link: ActiveValue::Set(Some(rss_url.to_string())), rss_link: ActiveValue::Set(Some(rss_url.to_string())),
bangumi_type: ActiveValue::Set(BangumiType::Mikan),
..Default::default() ..Default::default()
}) })
} }
@@ -228,9 +237,10 @@ impl Model {
Column::SubscriberId, Column::SubscriberId,
]) ])
.update_columns([ .update_columns([
Column::RawName, Column::OriginName,
Column::Fansub, Column::Fansub,
Column::PosterLink, Column::PosterLink,
Column::OriginPosterLink,
Column::Season, Column::Season,
Column::SeasonRaw, Column::SeasonRaw,
Column::RssLink, Column::RssLink,

View File

@@ -0,0 +1,11 @@
pub const CRON_DUE_EVENT: &str = "cron_due";
pub const CRON_DUE_DEBUG_EVENT: &str = "cron_due_debug";
pub const CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME: &str = "check_and_trigger_due_crons";
pub const NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME: &str = "notify_due_cron_when_mutating";
pub const NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME: &str =
"notify_due_cron_when_mutating_trigger";
pub const SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME: &str = "setup_cron_extra_foreign_keys";
pub const SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME: &str =
"setup_cron_extra_foreign_keys_trigger";

View File

@@ -0,0 +1,452 @@
mod core;
pub use core::{
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_DEBUG_EVENT, CRON_DUE_EVENT,
NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME, NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME,
SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME,
};
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use chrono_tz::Tz;
use croner::Cron;
use sea_orm::{
ActiveValue::{self, Set},
Condition, DeriveActiveEnum, DeriveDisplay, DeriveEntityModel, EnumIter, QuerySelect,
Statement, TransactionTrait,
entity::prelude::*,
sea_query::{ExprTrait, LockBehavior, LockType},
sqlx::postgres::PgNotification,
};
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::RecorderResult,
models::{subscriber_tasks, system_tasks},
task::{SubscriberTaskTrait, SystemTaskTrait},
};
#[derive(
Debug, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "cron_status")]
#[serde(rename_all = "snake_case")]
pub enum CronStatus {
#[sea_orm(string_value = "pending")]
Pending,
#[sea_orm(string_value = "running")]
Running,
#[sea_orm(string_value = "completed")]
Completed,
#[sea_orm(string_value = "failed")]
Failed,
#[sea_orm(string_value = "disabled")]
Disabled,
}
#[derive(Debug, Clone, DeriveEntityModel, PartialEq, Serialize, Deserialize)]
#[sea_orm(table_name = "cron")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTimeUtc,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)]
pub id: i32,
pub subscriber_id: Option<i32>,
pub subscription_id: Option<i32>,
pub cron_expr: String,
pub cron_timezone: String,
pub next_run: Option<DateTimeUtc>,
pub last_run: Option<DateTimeUtc>,
pub last_error: Option<String>,
pub locked_by: Option<String>,
pub locked_at: Option<DateTimeUtc>,
// default_expr = "5000"
pub timeout_ms: Option<i32>,
#[sea_orm(default_expr = "0")]
pub attempts: i32,
#[sea_orm(default_expr = "1")]
pub max_attempts: i32,
#[sea_orm(default_expr = "0")]
pub priority: i32,
pub status: CronStatus,
#[sea_orm(default_expr = "true")]
pub enabled: bool,
pub subscriber_task_cron: Option<subscriber_tasks::SubscriberTask>,
pub system_task_cron: Option<system_tasks::SystemTask>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Subscriber,
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Subscription,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(has_many = "super::system_tasks::Entity")]
SystemTask,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::subscriber_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriberTask.def()
}
}
impl Related<super::system_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SystemTask.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::subscriptions::Entity")]
Subscription,
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(entity = "super::system_tasks::Entity")]
SystemTask,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
match (
&self.cron_expr as &ActiveValue<String>,
&self.cron_timezone as &ActiveValue<String>,
) {
(ActiveValue::Set(cron_expr), ActiveValue::Set(timezone)) => {
if matches!(
&self.next_run,
ActiveValue::NotSet | ActiveValue::Unchanged(_)
) {
let next_run = Model::calculate_next_run(cron_expr, timezone)
.map_err(|e| DbErr::Custom(e.to_string()))?;
self.next_run = Set(Some(next_run));
}
}
(
ActiveValue::Unchanged(_) | ActiveValue::NotSet,
ActiveValue::Unchanged(_) | ActiveValue::NotSet,
) => {}
(_, _) => {
if matches!(
self.next_run,
ActiveValue::NotSet | ActiveValue::Unchanged(_)
) {
return Err(DbErr::Custom(
"Cron expr and timezone must be insert or update at same time when next \
run is not set"
.to_string(),
));
}
}
};
if let ActiveValue::Set(Some(subscriber_id)) = self.subscriber_id
&& let ActiveValue::Set(Some(ref subscriber_task)) = self.subscriber_task_cron
&& subscriber_task.get_subscriber_id() != subscriber_id
{
return Err(DbErr::Custom(
"Cron subscriber_id does not match subscriber_task_cron.subscriber_id".to_string(),
));
}
if let ActiveValue::Set(Some(subscriber_id)) = self.subscriber_id
&& let ActiveValue::Set(Some(ref system_task)) = self.system_task_cron
&& system_task.get_subscriber_id() != Some(subscriber_id)
{
return Err(DbErr::Custom(
"Cron subscriber_id does not match system_task_cron.subscriber_id".to_string(),
));
}
if let ActiveValue::Set(enabled) = self.enabled
&& !insert
{
if enabled {
self.status = Set(CronStatus::Pending)
} else {
self.status = Set(CronStatus::Disabled)
}
}
Ok(self)
}
}
impl Model {
pub async fn handle_cron_notification(
ctx: &dyn AppContextTrait,
notification: PgNotification,
worker_id: &str,
retry_duration: chrono::Duration,
) -> RecorderResult<()> {
let payload: Self = serde_json::from_str(notification.payload())?;
let cron_id = payload.id;
tracing::debug!("Cron notification received for cron {cron_id} and worker {worker_id}");
match Self::try_acquire_lock_with_cron_id(ctx, cron_id, worker_id).await? {
Some(cron) => match cron.exec_cron(ctx).await {
Ok(()) => {
tracing::debug!("Cron {cron_id} executed successfully");
cron.mark_cron_completed(ctx).await?;
}
Err(e) => {
tracing::error!("Error executing cron {cron_id}: {e}");
cron.mark_cron_failed(ctx, &e.to_string(), retry_duration)
.await?;
}
},
None => {
tracing::debug!(
"Cron lock not acquired for cron {cron_id} and worker {worker_id}, skipping..."
);
}
}
Ok(())
}
async fn try_acquire_lock_with_cron_id(
ctx: &dyn AppContextTrait,
cron_id: i32,
worker_id: &str,
) -> RecorderResult<Option<Self>> {
let db = ctx.db();
let txn = db.begin().await?;
let cron = Entity::find_by_id(cron_id)
.lock_with_behavior(LockType::Update, LockBehavior::SkipLocked)
.one(&txn)
.await?;
if let Some(cron) = cron {
if cron.enabled
&& cron.attempts < cron.max_attempts
&& cron.status == CronStatus::Pending
&& (cron.locked_at.is_none_or(|locked_at| {
cron.timeout_ms.is_some_and(|cron_timeout_ms| {
locked_at + chrono::Duration::milliseconds(cron_timeout_ms as i64)
<= Utc::now()
})
}))
&& cron.next_run.is_some_and(|next_run| next_run <= Utc::now())
{
let cron_active_model = ActiveModel {
id: Set(cron.id),
locked_by: Set(Some(worker_id.to_string())),
locked_at: Set(Some(Utc::now())),
status: Set(CronStatus::Running),
attempts: Set(cron.attempts + 1),
..Default::default()
};
let cron_model = cron_active_model.update(&txn).await?;
txn.commit().await?;
return Ok(Some(cron_model));
}
txn.commit().await?;
return Ok(Some(cron));
}
txn.rollback().await?;
Ok(None)
}
async fn exec_cron(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
if let Some(subscriber_task) = self.subscriber_task_cron.as_ref() {
let task_service = ctx.task();
let mut new_subscriber_task = subscriber_task.clone();
new_subscriber_task.set_cron_id(Some(self.id));
task_service
.add_subscriber_task(new_subscriber_task)
.await?;
} else if let Some(system_task) = self.system_task_cron.as_ref() {
let task_service = ctx.task();
let mut new_system_task = system_task.clone();
new_system_task.set_cron_id(Some(self.id));
task_service.add_system_task(new_system_task).await?;
} else {
unimplemented!("Cron without unknown task is not supported now");
}
Ok(())
}
async fn mark_cron_completed(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
let db = ctx.db();
let next_run = Self::calculate_next_run(&self.cron_expr, &self.cron_timezone)?;
ActiveModel {
id: Set(self.id),
next_run: Set(Some(next_run)),
last_run: Set(Some(Utc::now())),
status: Set(CronStatus::Pending),
locked_by: Set(None),
locked_at: Set(None),
attempts: Set(0),
last_error: Set(None),
..Default::default()
}
.update(db)
.await?;
Ok(())
}
async fn mark_cron_failed(
&self,
ctx: &dyn AppContextTrait,
error: &str,
retry_duration: chrono::Duration,
) -> RecorderResult<()> {
let db = ctx.db();
let should_retry = self.attempts < self.max_attempts;
let status = if should_retry {
CronStatus::Pending
} else {
CronStatus::Failed
};
let next_run = if should_retry {
Some(Utc::now() + retry_duration)
} else {
Some(Self::calculate_next_run(
&self.cron_expr,
&self.cron_timezone,
)?)
};
ActiveModel {
id: Set(self.id),
next_run: Set(next_run),
status: Set(status),
locked_by: Set(None),
locked_at: Set(None),
last_run: Set(Some(Utc::now())),
last_error: Set(Some(error.to_string())),
attempts: Set(if should_retry { self.attempts + 1 } else { 0 }),
..Default::default()
}
.update(db)
.await?;
Ok(())
}
pub async fn check_and_trigger_due_crons(ctx: &dyn AppContextTrait) -> RecorderResult<()> {
let db = ctx.db();
db.execute(Statement::from_string(
db.get_database_backend(),
format!("SELECT {CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME}()"),
))
.await?;
Ok(())
}
pub async fn check_and_cleanup_expired_cron_locks(
ctx: &dyn AppContextTrait,
retry_duration: chrono::Duration,
) -> RecorderResult<()> {
let db = ctx.db();
let condition = Condition::all()
.add(Column::Status.eq(CronStatus::Running))
.add(Column::LastRun.is_not_null())
.add(Column::TimeoutMs.is_not_null())
.add(
Expr::col(Column::LastRun)
.add(Expr::col(Column::TimeoutMs).mul(Expr::cust("INTERVAL '1 millisecond'")))
.lte(Expr::current_timestamp()),
);
let cron_ids = Entity::find()
.select_only()
.column(Column::Id)
.filter(condition.clone())
.lock_with_behavior(LockType::Update, LockBehavior::SkipLocked)
.into_tuple::<i32>()
.all(db)
.await?;
for cron_id in cron_ids {
let txn = db.begin().await?;
let locked_cron = Entity::find_by_id(cron_id)
.filter(condition.clone())
.lock_with_behavior(LockType::Update, LockBehavior::SkipLocked)
.one(&txn)
.await?;
if let Some(locked_cron) = locked_cron {
locked_cron
.mark_cron_failed(
ctx,
format!(
"Cron timeout of {}ms",
locked_cron
.timeout_ms
.as_ref()
.map(|s| s.to_string())
.unwrap_or_else(|| "Infinite".to_string())
)
.as_str(),
retry_duration,
)
.await?;
txn.commit().await?;
} else {
txn.rollback().await?;
}
}
Ok(())
}
pub fn calculate_next_run(cron_expr: &str, timezone: &str) -> RecorderResult<DateTime<Utc>> {
let user_tz = timezone.parse::<Tz>()?;
let user_tz_now = Utc::now().with_timezone(&user_tz);
let cron_expr = Cron::new(cron_expr).with_seconds_optional().parse()?;
let next = cron_expr.find_next_occurrence(&user_tz_now, false)?;
let next_utc = next.with_timezone(&Utc);
Ok(next_utc)
}
}

View File

@@ -44,7 +44,7 @@ pub struct Model {
pub updated_at: DateTimeUtc, pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub id: i32, pub id: i32,
pub raw_name: String, pub origin_name: String,
pub display_name: String, pub display_name: String,
pub downloader_id: i32, pub downloader_id: i32,
pub episode_id: i32, pub episode_id: i32,
@@ -52,8 +52,8 @@ pub struct Model {
pub status: DownloadStatus, pub status: DownloadStatus,
pub mime: DownloadMime, pub mime: DownloadMime,
pub url: String, pub url: String,
pub all_size: Option<u64>, pub all_size: Option<i64>,
pub curr_size: Option<u64>, pub curr_size: Option<i64>,
pub homepage: Option<String>, pub homepage: Option<String>,
pub save_path: Option<String>, pub save_path: Option<String>,
} }

View File

@@ -9,11 +9,19 @@ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::RecorderResult, errors::RecorderResult,
extract::{ extract::{
bittorrent::EpisodeEnclosureMeta,
mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url}, mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url},
rawname::extract_episode_meta_from_raw_name, origin::{OriginCompTrait, OriginNameRoot},
}, },
}; };
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "episode_type")]
pub enum EpisodeType {
#[sea_orm(string_value = "mikan")]
Mikan,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "episodes")] #[sea_orm(table_name = "episodes")]
pub struct Model { pub struct Model {
@@ -25,16 +33,21 @@ pub struct Model {
pub id: i32, pub id: i32,
#[sea_orm(indexed)] #[sea_orm(indexed)]
pub mikan_episode_id: Option<String>, pub mikan_episode_id: Option<String>,
pub raw_name: String, pub enclosure_torrent_link: Option<String>,
pub enclosure_magnet_link: Option<String>,
pub enclosure_pub_date: Option<DateTimeUtc>,
pub enclosure_content_length: Option<i64>,
pub episode_type: EpisodeType,
pub origin_name: String,
pub display_name: String, pub display_name: String,
pub bangumi_id: i32, pub bangumi_id: i32,
pub subscriber_id: i32, pub subscriber_id: i32,
pub save_path: Option<String>,
pub resolution: Option<String>, pub resolution: Option<String>,
pub season: i32, pub season: i32,
pub season_raw: Option<String>, pub season_raw: Option<String>,
pub fansub: Option<String>, pub fansub: Option<String>,
pub poster_link: Option<String>, pub poster_link: Option<String>,
pub origin_poster_link: Option<String>,
pub episode_index: i32, pub episode_index: i32,
pub homepage: Option<String>, pub homepage: Option<String>,
pub subtitle: Option<String>, pub subtitle: Option<String>,
@@ -116,14 +129,15 @@ pub enum RelatedEntity {
} }
impl ActiveModel { impl ActiveModel {
#[tracing::instrument(err, skip(ctx), fields(bangumi_id = ?bangumi.id, mikan_episode_id = ?episode.mikan_episode_id))] #[tracing::instrument(err, skip_all, fields(bangumi_id = ?bangumi.id, mikan_episode_id = ?episode.mikan_episode_id))]
pub fn from_mikan_bangumi_and_episode_meta( pub fn from_mikan_bangumi_and_episode_meta(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
bangumi: &bangumi::Model, bangumi: &bangumi::Model,
episode: MikanEpisodeMeta, episode: MikanEpisodeMeta,
enclosure_meta: EpisodeEnclosureMeta,
) -> RecorderResult<Self> { ) -> RecorderResult<Self> {
let mikan_base_url = ctx.mikan().base_url().clone(); let mikan_base_url = ctx.mikan().base_url().clone();
let episode_extention_meta = extract_episode_meta_from_raw_name(&episode.episode_title) let episode_extention_meta = OriginNameRoot::parse_comp(&episode.episode_title)
.inspect_err(|err| { .inspect_err(|err| {
tracing::error!( tracing::error!(
err = ?err, err = ?err,
@@ -131,12 +145,13 @@ impl ActiveModel {
"Failed to parse episode extension meta from episode title, skip" "Failed to parse episode extension meta from episode title, skip"
); );
}) })
.map(|(_, e)| e.into_meta())
.ok(); .ok();
let homepage = build_mikan_episode_homepage_url(mikan_base_url, &episode.mikan_episode_id); let homepage = build_mikan_episode_homepage_url(mikan_base_url, &episode.mikan_episode_id);
let mut episode_active_model = Self { let mut episode_active_model = Self {
mikan_episode_id: ActiveValue::Set(Some(episode.mikan_episode_id)), mikan_episode_id: ActiveValue::Set(Some(episode.mikan_episode_id)),
raw_name: ActiveValue::Set(episode.episode_title.clone()), origin_name: ActiveValue::Set(episode.episode_title.clone()),
display_name: ActiveValue::Set(episode.episode_title.clone()), display_name: ActiveValue::Set(episode.episode_title.clone()),
bangumi_id: ActiveValue::Set(bangumi.id), bangumi_id: ActiveValue::Set(bangumi.id),
subscriber_id: ActiveValue::Set(bangumi.subscriber_id), subscriber_id: ActiveValue::Set(bangumi.subscriber_id),
@@ -145,7 +160,13 @@ impl ActiveModel {
season: ActiveValue::Set(bangumi.season), season: ActiveValue::Set(bangumi.season),
fansub: ActiveValue::Set(bangumi.fansub.clone()), fansub: ActiveValue::Set(bangumi.fansub.clone()),
poster_link: ActiveValue::Set(bangumi.poster_link.clone()), poster_link: ActiveValue::Set(bangumi.poster_link.clone()),
origin_poster_link: ActiveValue::Set(bangumi.origin_poster_link.clone()),
episode_index: ActiveValue::Set(0), episode_index: ActiveValue::Set(0),
enclosure_torrent_link: ActiveValue::Set(enclosure_meta.torrent_link),
enclosure_magnet_link: ActiveValue::Set(enclosure_meta.magnet_link),
enclosure_pub_date: ActiveValue::Set(enclosure_meta.pub_date),
enclosure_content_length: ActiveValue::Set(enclosure_meta.content_length),
episode_type: ActiveValue::Set(EpisodeType::Mikan),
..Default::default() ..Default::default()
}; };
@@ -213,14 +234,19 @@ impl Model {
pub async fn add_mikan_episodes_for_subscription( pub async fn add_mikan_episodes_for_subscription(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
creations: impl Iterator<Item = (&bangumi::Model, MikanEpisodeMeta)>, creations: impl Iterator<Item = (&bangumi::Model, MikanEpisodeMeta, EpisodeEnclosureMeta)>,
subscriber_id: i32, subscriber_id: i32,
subscription_id: i32, subscription_id: i32,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
let db = ctx.db(); let db = ctx.db();
let new_episode_active_modes: Vec<ActiveModel> = creations let new_episode_active_modes: Vec<ActiveModel> = creations
.map(|(bangumi, episode_meta)| { .map(|(bangumi, episode_meta, enclosure_meta)| {
ActiveModel::from_mikan_bangumi_and_episode_meta(ctx, bangumi, episode_meta) ActiveModel::from_mikan_bangumi_and_episode_meta(
ctx,
bangumi,
episode_meta,
enclosure_meta,
)
}) })
.collect::<Result<_, _>>()?; .collect::<Result<_, _>>()?;
@@ -231,7 +257,23 @@ impl Model {
let new_episode_ids = Entity::insert_many(new_episode_active_modes) let new_episode_ids = Entity::insert_many(new_episode_active_modes)
.on_conflict( .on_conflict(
OnConflict::columns([Column::MikanEpisodeId, Column::SubscriberId]) OnConflict::columns([Column::MikanEpisodeId, Column::SubscriberId])
.update_columns([Column::RawName, Column::PosterLink, Column::Homepage]) .update_columns([
Column::OriginName,
Column::PosterLink,
Column::OriginPosterLink,
Column::Homepage,
Column::EnclosureContentLength,
Column::EnclosurePubDate,
Column::EnclosureTorrentLink,
Column::EnclosureMagnetLink,
Column::EpisodeIndex,
Column::Subtitle,
Column::Source,
Column::Resolution,
Column::Season,
Column::SeasonRaw,
Column::Fansub,
])
.to_owned(), .to_owned(),
) )
.exec_with_returning_columns(db, [Column::Id]) .exec_with_returning_columns(db, [Column::Id])

Some files were not shown because too many files have changed in this diff Show More