Compare commits

...

25 Commits
master ... dev

Author SHA1 Message Date
94919878ea fix: fix issues 2025-07-02 01:33:32 +08:00
81bf27ed28 fix: fix 2025-07-08 00:54:34 +08:00
5be5b9f634 fix: fix cron builder 2025-07-07 01:34:56 +08:00
6cdd8c27ce fix: fix typos 2025-07-06 05:05:07 +08:00
4174cea728 fix: fix cron webui 2025-07-06 02:35:55 +08:00
3aad31a36b feat: more cron webui 2025-07-05 04:08:56 +08:00
004fed9b2e feat: init cron webui 2025-07-05 02:08:55 +08:00
a1c2eeded1 temp save 2025-07-04 05:59:56 +08:00
147df00155 build: add prod build 2025-07-04 05:06:45 +08:00
5155c59293 fix: fix migrations 2025-07-04 01:25:07 +08:00
b5b3c77ba3 fix: fix migrations 2025-07-03 04:25:50 +08:00
1d0aa8d7f1 feat: support system tasks 2025-07-03 03:48:23 +08:00
5b001f9584 refactor: refactor graphql 2025-07-02 01:25:44 +08:00
d06acde882 fix: temp save 2025-07-01 03:45:56 +08:00
bacfe99ef2 fix: fix issues 2025-06-30 02:05:23 +08:00
b4090e74c0 fix: fix webui compability 2025-06-29 02:05:44 +08:00
c3e546e256 refactor: refactor graphql more 2025-06-27 05:54:25 +08:00
f83371bbf9 fix: fix task lifetime 2025-06-28 04:10:18 +08:00
c858cc7d44 fix: fix cron timeout clean 2025-06-28 03:38:53 +08:00
65505f91b2 refactor: refactor graphql 2025-06-27 04:06:58 +08:00
c8501b1768 fix: remove inconsistent cleanup function 2025-06-27 02:18:23 +08:00
3a8eb88e1a feat: add cron 2025-06-26 02:56:55 +08:00
003d8840fd fix: fix dotenv loader inconsistent and many ui issues 2025-06-25 06:36:15 +08:00
41ff5c2a11 fix: fix production issues 2025-06-25 05:21:08 +08:00
571caf50ff fix: fix feed rss 2025-06-25 01:26:06 +08:00
197 changed files with 11524 additions and 3319 deletions

10
.vscode/settings.json vendored
View File

@ -41,12 +41,4 @@
], ],
"rust-analyzer.cargo.features": "all", "rust-analyzer.cargo.features": "all",
"rust-analyzer.testExplorer": true "rust-analyzer.testExplorer": true
// https://github.com/rust-lang/rust/issues/141540 }
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
// "rust-analyzer.check.extraEnv": {
// "CARGO_TARGET_DIR": "target/rust-analyzer"
// },
// "rust-analyzer.cargo.extraEnv": {
// "CARGO_TARGET_DIR": "target/analyzer"
// }
}

288
Cargo.lock generated
View File

@ -356,9 +356,9 @@ dependencies = [
[[package]] [[package]]
name = "async-channel" name = "async-channel"
version = "2.3.1" version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" checksum = "16c74e56284d2188cabb6ad99603d1ace887a5d7e7b695d01b728155ed9ed427"
dependencies = [ dependencies = [
"concurrent-queue", "concurrent-queue",
"event-listener-strategy", "event-listener-strategy",
@ -404,7 +404,7 @@ dependencies = [
"futures-util", "futures-util",
"handlebars", "handlebars",
"http", "http",
"indexmap 2.9.0", "indexmap 2.10.0",
"lru", "lru",
"mime", "mime",
"multer", "multer",
@ -474,7 +474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ecdaff7c9cffa3614a9f9999bf9ee4c3078fe3ce4d6a6e161736b56febf2de" checksum = "34ecdaff7c9cffa3614a9f9999bf9ee4c3078fe3ce4d6a6e161736b56febf2de"
dependencies = [ dependencies = [
"bytes", "bytes",
"indexmap 2.9.0", "indexmap 2.10.0",
"serde", "serde",
"serde_json", "serde_json",
] ]
@ -551,7 +551,8 @@ dependencies = [
"derive_builder", "derive_builder",
"diligent-date-parser", "diligent-date-parser",
"never", "never",
"quick-xml", "quick-xml 0.37.5",
"serde",
] ]
[[package]] [[package]]
@ -591,9 +592,9 @@ dependencies = [
[[package]] [[package]]
name = "avif-serialize" name = "avif-serialize"
version = "0.8.3" version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98922d6a4cfbcb08820c69d8eeccc05bb1f29bfa06b4f5b1dbfe9a868bd7608e" checksum = "19135c0c7a60bfee564dbe44ab5ce0557c6bf3884e5291a50be76a15640c4fbd"
dependencies = [ dependencies = [
"arrayvec", "arrayvec",
] ]
@ -1008,9 +1009,9 @@ checksum = "56ed6191a7e78c36abdb16ab65341eefd73d64d303fffccdbb00d51e4205967b"
[[package]] [[package]]
name = "bumpalo" name = "bumpalo"
version = "3.18.1" version = "3.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db76d6187cd04dff33004d8e6c9cc4e05cd330500379d2394209271b4aeee" checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
[[package]] [[package]]
name = "bytecheck" name = "bytecheck"
@ -1259,9 +1260,9 @@ dependencies = [
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.40" version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f" checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive", "clap_derive",
@ -1269,9 +1270,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.5.40" version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e" checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d"
dependencies = [ dependencies = [
"anstream", "anstream",
"anstyle", "anstyle",
@ -1281,9 +1282,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_derive" name = "clap_derive"
version = "4.5.40" version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce" checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491"
dependencies = [ dependencies = [
"heck 0.5.0", "heck 0.5.0",
"proc-macro2", "proc-macro2",
@ -1579,6 +1580,15 @@ dependencies = [
"cfg-if", "cfg-if",
] ]
[[package]]
name = "croner"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c344b0690c1ad1c7176fe18eb173e0c927008fdaaa256e40dfd43ddd149c0843"
dependencies = [
"chrono",
]
[[package]] [[package]]
name = "crossbeam-channel" name = "crossbeam-channel"
version = "0.5.15" version = "0.5.15"
@ -1662,9 +1672,9 @@ dependencies = [
[[package]] [[package]]
name = "crunchy" name = "crunchy"
version = "0.2.3" version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]] [[package]]
name = "crypto-bigint" name = "crypto-bigint"
@ -1912,6 +1922,17 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "derivative"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]] [[package]]
name = "derive_builder" name = "derive_builder"
version = "0.20.2" version = "0.20.2"
@ -2322,11 +2343,12 @@ checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
[[package]] [[package]]
name = "fancy-regex" name = "fancy-regex"
version = "0.14.0" version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298" checksum = "d6215aee357f8c7c989ebb4b8466ca4d7dc93b3957039f2fc3ea2ade8ea5f279"
dependencies = [ dependencies = [
"bit-set", "bit-set",
"derivative",
"regex-automata 0.4.9", "regex-automata 0.4.9",
"regex-syntax 0.8.5", "regex-syntax 0.8.5",
] ]
@ -2771,9 +2793,9 @@ dependencies = [
[[package]] [[package]]
name = "gif" name = "gif"
version = "0.13.2" version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc37f9a2bfe731e69f1e08d29d91d30604b9ce24bcb2880a961e82d89c6ed89" checksum = "4ae047235e33e2829703574b54fdec96bfbad892062d97fed2f76022287de61b"
dependencies = [ dependencies = [
"color_quant", "color_quant",
"weezl", "weezl",
@ -2863,9 +2885,9 @@ dependencies = [
[[package]] [[package]]
name = "h2" name = "h2"
version = "0.4.10" version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9421a676d1b147b16b82c9225157dc629087ef8ec4d5e2960f9437a90dac0a5" checksum = "17da50a276f1e01e0ba6c029e47b7100754904ee8a278f886546e98575380785"
dependencies = [ dependencies = [
"atomic-waker", "atomic-waker",
"bytes", "bytes",
@ -2873,7 +2895,7 @@ dependencies = [
"futures-core", "futures-core",
"futures-sink", "futures-sink",
"http", "http",
"indexmap 2.9.0", "indexmap 2.10.0",
"slab", "slab",
"tokio", "tokio",
"tokio-util", "tokio-util",
@ -3837,9 +3859,9 @@ dependencies = [
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "2.9.0" version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
dependencies = [ dependencies = [
"equivalent", "equivalent",
"hashbrown 0.15.4", "hashbrown 0.15.4",
@ -3957,6 +3979,17 @@ dependencies = [
"smallvec", "smallvec",
] ]
[[package]]
name = "io-uring"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013"
dependencies = [
"bitflags 2.9.1",
"cfg-if",
"libc",
]
[[package]] [[package]]
name = "ipnet" name = "ipnet"
version = "2.11.0" version = "2.11.0"
@ -4148,9 +4181,9 @@ checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
[[package]] [[package]]
name = "libfuzzer-sys" name = "libfuzzer-sys"
version = "0.4.9" version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf78f52d400cf2d84a3a973a78a592b4adc535739e0a5597a0da6f0c357adc75" checksum = "5037190e1f70cbeef565bd267599242926f724d3b8a9f510fd7e0b540cfa4404"
dependencies = [ dependencies = [
"arbitrary", "arbitrary",
"cc", "cc",
@ -4164,9 +4197,9 @@ checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
[[package]] [[package]]
name = "libredox" name = "libredox"
version = "0.1.3" version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" checksum = "1580801010e535496706ba011c15f8532df6b42297d2e471fec38ceadd8c0638"
dependencies = [ dependencies = [
"bitflags 2.9.1", "bitflags 2.9.1",
"libc", "libc",
@ -4298,7 +4331,7 @@ dependencies = [
"dashmap 6.1.0", "dashmap 6.1.0",
"futures", "futures",
"hex 0.4.3", "hex 0.4.3",
"indexmap 2.9.0", "indexmap 2.10.0",
"leaky-bucket", "leaky-bucket",
"librqbit-bencode", "librqbit-bencode",
"librqbit-clone-to-owned", "librqbit-clone-to-owned",
@ -4373,7 +4406,7 @@ dependencies = [
"futures", "futures",
"httparse", "httparse",
"network-interface", "network-interface",
"quick-xml", "quick-xml 0.37.5",
"reqwest", "reqwest",
"serde", "serde",
"tokio", "tokio",
@ -4413,9 +4446,9 @@ dependencies = [
[[package]] [[package]]
name = "lightningcss" name = "lightningcss"
version = "1.0.0-alpha.66" version = "1.0.0-alpha.67"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a73ffa17de66534e4b527232f44aa0a89fad22c4f4e0735f9be35494f058e54" checksum = "798fba4e1205eed356b8ed7754cc3f7f04914e27855ca641409f4a532e992149"
dependencies = [ dependencies = [
"ahash 0.8.12", "ahash 0.8.12",
"bitflags 2.9.1", "bitflags 2.9.1",
@ -4425,7 +4458,7 @@ dependencies = [
"dashmap 5.5.3", "dashmap 5.5.3",
"data-encoding", "data-encoding",
"getrandom 0.2.16", "getrandom 0.2.16",
"indexmap 2.9.0", "indexmap 2.10.0",
"itertools 0.10.5", "itertools 0.10.5",
"lazy_static", "lazy_static",
"lightningcss-derive", "lightningcss-derive",
@ -4825,15 +4858,6 @@ dependencies = [
"version_check", "version_check",
] ]
[[package]]
name = "nanoid"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ffa00dec017b5b1a8b7cf5e2c008bfda1aa7e0697ac1508b491fdf2622fb4d8"
dependencies = [
"rand 0.8.5",
]
[[package]] [[package]]
name = "native-tls" name = "native-tls"
version = "0.2.14" version = "0.2.14"
@ -5154,7 +5178,7 @@ dependencies = [
"itertools 0.14.0", "itertools 0.14.0",
"parking_lot 0.12.4", "parking_lot 0.12.4",
"percent-encoding", "percent-encoding",
"quick-xml", "quick-xml 0.37.5",
"rand 0.9.1", "rand 0.9.1",
"reqwest", "reqwest",
"ring", "ring",
@ -5207,7 +5231,7 @@ dependencies = [
"log", "log",
"md-5", "md-5",
"percent-encoding", "percent-encoding",
"quick-xml", "quick-xml 0.37.5",
"reqwest", "reqwest",
"serde", "serde",
"serde_json", "serde_json",
@ -5352,9 +5376,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]] [[package]]
name = "owo-colors" name = "owo-colors"
version = "4.2.1" version = "4.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26995317201fa17f3656c36716aed4a7c81743a9634ac4c99c0eeda495db0cec" checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e"
[[package]] [[package]]
name = "p256" name = "p256"
@ -5848,7 +5872,7 @@ dependencies = [
"either", "either",
"hashbrown 0.14.5", "hashbrown 0.14.5",
"hashbrown 0.15.4", "hashbrown 0.15.4",
"indexmap 2.9.0", "indexmap 2.10.0",
"itoa", "itoa",
"num-traits", "num-traits",
"polars-arrow", "polars-arrow",
@ -6009,7 +6033,7 @@ dependencies = [
"either", "either",
"hashbrown 0.15.4", "hashbrown 0.15.4",
"hex 0.4.3", "hex 0.4.3",
"indexmap 2.9.0", "indexmap 2.10.0",
"libm", "libm",
"memchr", "memchr",
"num-traits", "num-traits",
@ -6118,7 +6142,7 @@ version = "0.49.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ada7c7e2fbbeffbdd67628cd8a89f02b0a8d21c71d34e297e2463a7c17575203" checksum = "ada7c7e2fbbeffbdd67628cd8a89f02b0a8d21c71d34e297e2463a7c17575203"
dependencies = [ dependencies = [
"indexmap 2.9.0", "indexmap 2.10.0",
"polars-error", "polars-error",
"polars-utils", "polars-utils",
"serde", "serde",
@ -6219,7 +6243,7 @@ dependencies = [
"flate2", "flate2",
"foldhash", "foldhash",
"hashbrown 0.15.4", "hashbrown 0.15.4",
"indexmap 2.9.0", "indexmap 2.10.0",
"libc", "libc",
"memmap2 0.9.5", "memmap2 0.9.5",
"num-traits", "num-traits",
@ -6493,6 +6517,16 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "quick-xml"
version = "0.38.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8927b0664f5c5a98265138b7e3f90aa19a6b21353182469ace36d4ac527b7b1b"
dependencies = [
"memchr",
"serde",
]
[[package]] [[package]]
name = "quinn" name = "quinn"
version = "0.11.8" version = "0.11.8"
@ -6745,10 +6779,12 @@ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"bytes", "bytes",
"chrono", "chrono",
"chrono-tz 0.10.3",
"clap", "clap",
"cocoon", "cocoon",
"color-eyre", "color-eyre",
"convert_case 0.8.0", "convert_case 0.8.0",
"croner",
"ctor", "ctor",
"dotenvy", "dotenvy",
"downloader", "downloader",
@ -6775,15 +6811,16 @@ dependencies = [
"mime_guess", "mime_guess",
"mockito", "mockito",
"moka", "moka",
"nanoid",
"nom 8.0.0", "nom 8.0.0",
"num-traits", "num-traits",
"num_cpus", "num_cpus",
"once_cell", "once_cell",
"opendal", "opendal",
"openidconnect", "openidconnect",
"paste",
"percent-encoding", "percent-encoding",
"polars", "polars",
"quick-xml 0.38.0",
"quirks_path", "quirks_path",
"rand 0.9.1", "rand 0.9.1",
"regex", "regex",
@ -6795,6 +6832,7 @@ dependencies = [
"sea-orm", "sea-orm",
"sea-orm-migration", "sea-orm-migration",
"seaography", "seaography",
"secrecy",
"serde", "serde",
"serde_json", "serde_json",
"serde_variant", "serde_variant",
@ -6811,7 +6849,9 @@ dependencies = [
"tracing", "tracing",
"tracing-appender", "tracing-appender",
"tracing-subscriber", "tracing-subscriber",
"tracing-test",
"tracing-tree", "tracing-tree",
"ts-rs",
"typed-builder 0.21.0", "typed-builder 0.21.0",
"url", "url",
"util", "util",
@ -6971,9 +7011,9 @@ dependencies = [
[[package]] [[package]]
name = "reqwest" name = "reqwest"
version = "0.12.20" version = "0.12.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eabf4c97d9130e2bf606614eb937e86edac8292eaa6f422f995d7e8de1eb1813" checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531"
dependencies = [ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"bytes", "bytes",
@ -7215,7 +7255,8 @@ dependencies = [
"atom_syndication", "atom_syndication",
"derive_builder", "derive_builder",
"never", "never",
"quick-xml", "quick-xml 0.37.5",
"serde",
] ]
[[package]] [[package]]
@ -7424,6 +7465,18 @@ dependencies = [
"serde_json", "serde_json",
] ]
[[package]]
name = "schemars"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1375ba8ef45a6f15d83fa8748f1079428295d403d6ea991d09ab100155fbc06d"
dependencies = [
"dyn-clone",
"ref-cast",
"serde",
"serde_json",
]
[[package]] [[package]]
name = "scoped-tls" name = "scoped-tls"
version = "1.0.1" version = "1.0.1"
@ -7472,9 +7525,9 @@ dependencies = [
[[package]] [[package]]
name = "sea-orm" name = "sea-orm"
version = "1.1.12" version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18b7272b88bd608cd846de24f41b74a0315a135fe761b0aed4ec1ce6a6327a93" checksum = "560ea59f07472886a236e7919b9425cf16914fee1d663d3c32f1af2e922b83f0"
dependencies = [ dependencies = [
"async-stream", "async-stream",
"async-trait", "async-trait",
@ -7501,9 +7554,9 @@ dependencies = [
[[package]] [[package]]
name = "sea-orm-cli" name = "sea-orm-cli"
version = "1.1.12" version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a4961b0d9098a9dc992d6e75fb761f9e5c442bb46746eeffa08e47b53759fce" checksum = "00dd755ba3faca11692d8aaca46b68f1b4955c5dfdd6a3f1f9fba3a679a3ec1d"
dependencies = [ dependencies = [
"chrono", "chrono",
"clap", "clap",
@ -7519,9 +7572,9 @@ dependencies = [
[[package]] [[package]]
name = "sea-orm-macros" name = "sea-orm-macros"
version = "1.1.12" version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c38255a6b2e6d1ae2d5df35696507a345f03c036ae32caeb0a3b922dbab610d" checksum = "70d0ea50bb4317c8a58ed34dc410a79d685128e7b77ddcd9e8b59ae6416a56d9"
dependencies = [ dependencies = [
"heck 0.5.0", "heck 0.5.0",
"proc-macro-crate", "proc-macro-crate",
@ -7534,9 +7587,9 @@ dependencies = [
[[package]] [[package]]
name = "sea-orm-migration" name = "sea-orm-migration"
version = "1.1.12" version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82f58c3b1dcf6c137f08394f0228f9baf1574a2a799e93dc5da3cd9228bef9c5" checksum = "3e06e0f3ca090091ad58da2bc02cdb63f9afbd276baf029f065f6ff09e79cbe9"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"clap", "clap",
@ -7629,16 +7682,16 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]] [[package]]
name = "seaography" name = "seaography"
version = "1.1.4" version = "1.1.4"
source = "git+https://github.com/dumtruck/seaography.git?rev=a787c3a#a787c3ab83cf1f8275894e1bc1ca3c766b54674b" source = "git+https://github.com/dumtruck/seaography.git?rev=292cdd2#292cdd248217fdcf81c41aa97fe1c047c9b5f4de"
dependencies = [ dependencies = [
"async-graphql", "async-graphql",
"fnv", "fnv",
"heck 0.4.1", "heck 0.5.0",
"itertools 0.12.1", "itertools 0.14.0",
"lazy_static", "lazy_static",
"sea-orm", "sea-orm",
"serde_json", "serde_json",
"thiserror 1.0.69", "thiserror 2.0.12",
] ]
[[package]] [[package]]
@ -7655,6 +7708,16 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "secrecy"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a"
dependencies = [
"serde",
"zeroize",
]
[[package]] [[package]]
name = "security-framework" name = "security-framework"
version = "2.11.1" version = "2.11.1"
@ -7820,16 +7883,17 @@ dependencies = [
[[package]] [[package]]
name = "serde_with" name = "serde_with"
version = "3.13.0" version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf65a400f8f66fb7b0552869ad70157166676db75ed8181f8104ea91cf9d0b42" checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5"
dependencies = [ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"chrono", "chrono",
"hex 0.4.3", "hex 0.4.3",
"indexmap 1.9.3", "indexmap 1.9.3",
"indexmap 2.9.0", "indexmap 2.10.0",
"schemars", "schemars 0.9.0",
"schemars 1.0.3",
"serde", "serde",
"serde_derive", "serde_derive",
"serde_json", "serde_json",
@ -7839,9 +7903,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_with_macros" name = "serde_with_macros"
version = "3.13.0" version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81679d9ed988d5e9a5e6531dc3f2c28efbd639cbd1dfb628df08edea6004da77" checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f"
dependencies = [ dependencies = [
"darling", "darling",
"proc-macro2", "proc-macro2",
@ -7855,7 +7919,7 @@ version = "0.9.34+deprecated"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
dependencies = [ dependencies = [
"indexmap 2.9.0", "indexmap 2.10.0",
"itoa", "itoa",
"ryu", "ryu",
"serde", "serde",
@ -8201,7 +8265,7 @@ dependencies = [
"futures-util", "futures-util",
"hashbrown 0.15.4", "hashbrown 0.15.4",
"hashlink", "hashlink",
"indexmap 2.9.0", "indexmap 2.10.0",
"log", "log",
"memchr", "memchr",
"once_cell", "once_cell",
@ -8677,6 +8741,15 @@ dependencies = [
"unic-segment", "unic-segment",
] ]
[[package]]
name = "termcolor"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
dependencies = [
"winapi-util",
]
[[package]] [[package]]
name = "testcontainers" name = "testcontainers"
version = "0.24.0" version = "0.24.0"
@ -8857,17 +8930,19 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.45.1" version = "1.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779" checksum = "1140bb80481756a8cbe10541f37433b459c5aa1e727b4c020fbfebdc25bf3ec4"
dependencies = [ dependencies = [
"backtrace", "backtrace",
"bytes", "bytes",
"io-uring",
"libc", "libc",
"mio 1.0.4", "mio 1.0.4",
"parking_lot 0.12.4", "parking_lot 0.12.4",
"pin-project-lite", "pin-project-lite",
"signal-hook-registry", "signal-hook-registry",
"slab",
"socket2", "socket2",
"tokio-macros", "tokio-macros",
"windows-sys 0.52.0", "windows-sys 0.52.0",
@ -9005,7 +9080,7 @@ version = "0.22.27"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
dependencies = [ dependencies = [
"indexmap 2.9.0", "indexmap 2.10.0",
"serde", "serde",
"serde_spanned", "serde_spanned",
"toml_datetime", "toml_datetime",
@ -9183,6 +9258,27 @@ dependencies = [
"tracing-serde", "tracing-serde",
] ]
[[package]]
name = "tracing-test"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68"
dependencies = [
"tracing-core",
"tracing-subscriber",
"tracing-test-macro",
]
[[package]]
name = "tracing-test-macro"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568"
dependencies = [
"quote",
"syn 2.0.104",
]
[[package]] [[package]]
name = "tracing-tree" name = "tracing-tree"
version = "0.4.0" version = "0.4.0"
@ -9201,6 +9297,28 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "ts-rs"
version = "11.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ef1b7a6d914a34127ed8e1fa927eb7088903787bcded4fa3eef8f85ee1568be"
dependencies = [
"thiserror 2.0.12",
"ts-rs-macros",
]
[[package]]
name = "ts-rs-macros"
version = "11.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9d4ed7b4c18cc150a6a0a1e9ea1ecfa688791220781af6e119f9599a8502a0a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"termcolor",
]
[[package]] [[package]]
name = "tungstenite" name = "tungstenite"
version = "0.26.2" version = "0.26.2"
@ -9867,9 +9985,9 @@ dependencies = [
[[package]] [[package]]
name = "windows-registry" name = "windows-registry"
version = "0.5.2" version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3bab093bdd303a1240bb99b8aba8ea8a69ee19d34c9e2ef9594e708a4878820" checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e"
dependencies = [ dependencies = [
"windows-link", "windows-link",
"windows-result", "windows-result",
@ -10168,9 +10286,9 @@ dependencies = [
[[package]] [[package]]
name = "xattr" name = "xattr"
version = "1.5.0" version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e" checksum = "af3a19837351dc82ba89f8a125e22a3c475f05aba604acc023d62b2739ae2909"
dependencies = [ dependencies = [
"libc", "libc",
"rustix 1.0.7", "rustix 1.0.7",
@ -10343,9 +10461,9 @@ dependencies = [
[[package]] [[package]]
name = "zune-jpeg" name = "zune-jpeg"
version = "0.4.18" version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7384255a918371b5af158218d131530f694de9ad3815ebdd0453a940485cb0fa" checksum = "2c9e525af0a6a658e031e95f14b7f889976b74a11ba0eca5a5fc9ac8a1c43a6a"
dependencies = [ dependencies = [
"zune-core", "zune-core",
] ]

View File

@ -13,9 +13,6 @@ members = [
resolver = "2" resolver = "2"
[profile.dev] [profile.dev]
debug = 0
# https://github.com/rust-lang/rust/issues/141540
incremental = false
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171) # [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
# codegen-backend = "cranelift" # codegen-backend = "cranelift"
@ -34,22 +31,22 @@ reqwest = { version = "0.12.20", features = [
"macos-system-configuration", "macos-system-configuration",
"cookies", "cookies",
] } ] }
moka = "0.12" moka = "0.12.10"
futures = "0.3" futures = "0.3.31"
quirks_path = "0.1" quirks_path = "0.1.1"
snafu = { version = "0.8", features = ["futures"] } snafu = { version = "0.8.0", features = ["futures"] }
testcontainers = { version = "0.24" } testcontainers = { version = "0.24.0" }
testcontainers-modules = { version = "0.12.1" } testcontainers-modules = { version = "0.12.1" }
testcontainers-ext = { version = "0.1.0", features = ["tracing"] } testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
serde = { version = "1", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
tokio = { version = "1.45.1", features = [ tokio = { version = "1.46", features = [
"macros", "macros",
"fs", "fs",
"rt-multi-thread", "rt-multi-thread",
"signal", "signal",
] } ] }
serde_json = "1" serde_json = "1.0.140"
async-trait = "0.1" async-trait = "0.1.88"
tracing = "0.1" tracing = "0.1"
url = "2.5.2" url = "2.5.2"
anyhow = "1" anyhow = "1"
@ -67,7 +64,7 @@ convert_case = "0.8"
color-eyre = "0.6.5" color-eyre = "0.6.5"
inquire = "0.7.5" inquire = "0.7.5"
image = "0.25.6" image = "0.25.6"
uuid = { version = "1.6.0", features = ["v4"] } uuid = { version = "1.6.0", features = ["v7"] }
maplit = "1.0.2" maplit = "1.0.2"
once_cell = "1.20.2" once_cell = "1.20.2"
rand = "0.9.1" rand = "0.9.1"
@ -80,11 +77,12 @@ http = "1.2.0"
async-stream = "0.3.6" async-stream = "0.3.6"
serde_variant = "0.1.3" serde_variant = "0.1.3"
tracing-appender = "0.2.3" tracing-appender = "0.2.3"
clap = "4.5.40" clap = "4.5.41"
ipnetwork = "0.21.1" ipnetwork = "0.21.1"
typed-builder = "0.21.0" typed-builder = "0.21.0"
nanoid = "0.4.0" nanoid = "0.4.0"
webp = "0.3.0" webp = "0.3.0"
[patch.crates-io] [patch.crates-io]
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" } seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "292cdd2" }

View File

@ -0,0 +1,8 @@
```x-forwarded.json
{
"X-Forwarded-Host": "konobangu.com",
"X-Forwarded-Proto": "https"
}
```
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/$1

View File

@ -1 +1 @@
{"filesOrder":["konobangu","mikan_doppel"],"selectedList":["konobangu","mikan_doppel"],"disabledDefalutRules":true,"defalutRules":""} {"filesOrder":["konobangu","konobangu-prod","mikan-doppel"],"selectedList":["mikan-doppel","konobangu"],"disabledDefalutRules":true,"defalutRules":""}

View File

@ -13,7 +13,7 @@ name = "mikan_doppel"
path = "src/bin/mikan_doppel.rs" path = "src/bin/mikan_doppel.rs"
[dependencies] [dependencies]
recorder = { workspace = true } recorder = { workspace = true, features = ["playground"] }
tokio = { workspace = true } tokio = { workspace = true }
tracing-subscriber = { workspace = true } tracing-subscriber = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }

View File

@ -1,17 +0,0 @@
HOST="konobangu.com"
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
STORAGE_DATA_DIR = "./data"
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# OIDC_EXTRA_CLAIM_KEY = ""
# OIDC_EXTRA_CLAIM_VALUE = ""
# MIKAN_PROXY = ""
# MIKAN_PROXY_AUTH_HEADER = ""
# MIKAN_NO_PROXY = ""
# MIKAN_PROXY_ACCEPT_INVALID_CERTS = "true"

View File

@ -1,17 +0,0 @@
HOST="konobangu.com"
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
STORAGE_DATA_DIR = "./data"
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# OIDC_EXTRA_CLAIM_KEY = ""
# OIDC_EXTRA_CLAIM_VALUE = ""
MIKAN_PROXY = "http://127.0.0.1:8899"
# MIKAN_PROXY_AUTH_HEADER = ""
# MIKAN_NO_PROXY = ""
MIKAN_PROXY_ACCEPT_INVALID_CERTS = true

View File

@ -0,0 +1,18 @@
LOGGER__LEVEL = "debug"
DATABASE__URI = "postgres://konobangu:konobangu@localhost:5432/konobangu"
AUTH__AUTH_TYPE = "basic"
AUTH__BASIC_USER = "konobangu"
AUTH__BASIC_PASSWORD = "konobangu"
# AUTH__OIDC_ISSUER = "https://auth.logto.io/oidc"
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
# AUTH__OIDC_CLIENT_ID = "client_id"
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# AUTH__OIDC_EXTRA_CLAIM_KEY = ""
# AUTH__OIDC_EXTRA_CLAIM_VALUE = ""
MIKAN__HTTP_CLIENT__PROXY__ACCEPT_INVALID_CERTS = true
MIKAN__HTTP_CLIENT__PROXY__SERVER = "http://127.0.0.1:8899"

View File

@ -0,0 +1,15 @@
HOST="konobangu.com"
DATABASE__URI = "postgres://konobangu:konobangu@localhost:5432/konobangu"
AUTH__AUTH_TYPE = "basic" # or oidc
AUTH__BASIC_USER = "konobangu"
AUTH__BASIC_PASSWORD = "konobangu"
# AUTH__OIDC_ISSUER="https://auth.logto.io/oidc"
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
# AUTH__OIDC_CLIENT_ID = "client_id"
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# AUTH__OIDC_EXTRA_CLAIM_KEY = ""
# AUTH__OIDC_EXTRA_CLAIM_VALUE = ""

View File

@ -28,4 +28,6 @@ dist/
temp/* temp/*
!temp/.gitkeep !temp/.gitkeep
tests/resources/mikan/classic_episodes/*/* tests/resources/mikan/classic_episodes/*/*
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet !tests/resources/mikan/classic_episodes/parquet/tiny.parquet
webui/
data/

View File

@ -6,7 +6,7 @@ edition = "2024"
[features] [features]
default = ["jxl"] default = ["jxl"]
playground = ["dep:inquire", "dep:color-eyre", "dep:polars"] playground = ["dep:inquire", "dep:color-eyre", "dep:polars", "test-utils"]
testcontainers = [ testcontainers = [
"dep:testcontainers", "dep:testcontainers",
"dep:testcontainers-modules", "dep:testcontainers-modules",
@ -15,6 +15,7 @@ testcontainers = [
"testcontainers-modules/postgres", "testcontainers-modules/postgres",
] ]
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"] jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
test-utils = []
[lib] [lib]
name = "recorder" name = "recorder"
@ -96,7 +97,6 @@ tracing-appender = { workspace = true }
clap = { workspace = true } clap = { workspace = true }
ipnetwork = { workspace = true } ipnetwork = { workspace = true }
typed-builder = { workspace = true } typed-builder = { workspace = true }
nanoid = { workspace = true }
webp = { workspace = true } webp = { workspace = true }
sea-orm = { version = "1.1", features = [ sea-orm = { version = "1.1", features = [
@ -108,8 +108,8 @@ sea-orm = { version = "1.1", features = [
] } ] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] } figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] } sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
rss = "2" rss = { version = "2", features = ["builders", "with-serde"] }
fancy-regex = "0.14" fancy-regex = "0.15"
lightningcss = "1.0.0-alpha.66" lightningcss = "1.0.0-alpha.66"
html-escape = "0.2.13" html-escape = "0.2.13"
opendal = { version = "0.53", features = ["default", "services-fs"] } opendal = { version = "0.53", features = ["default", "services-fs"] }
@ -125,8 +125,9 @@ seaography = { version = "1.1", features = [
"with-bigdecimal", "with-bigdecimal",
"with-postgres-array", "with-postgres-array",
"with-json-as-scalar", "with-json-as-scalar",
"with-custom-as-json",
] } ] }
tower = "0.5.2" tower = { version = "0.5.2", features = ["util"] }
tower-http = { version = "0.6", features = [ tower-http = { version = "0.6", features = [
"trace", "trace",
"catch-panic", "catch-panic",
@ -159,11 +160,22 @@ polars = { version = "0.49.1", features = [
"lazy", "lazy",
"diagonal_concat", "diagonal_concat",
], optional = true } ], optional = true }
quick-xml = { version = "0.38", features = [
"serialize",
"serde-types",
"serde",
] }
croner = "2.2.0"
ts-rs = "11.0.1"
secrecy = { version = "0.10.3", features = ["serde"] }
paste = "1.0.15"
chrono-tz = "0.10.3"
[dev-dependencies] [dev-dependencies]
inquire = { workspace = true } inquire = { workspace = true }
color-eyre = { workspace = true } color-eyre = { workspace = true }
serial_test = "3" serial_test = "3"
insta = { version = "1", features = ["redactions", "toml", "filters"] } insta = { version = "1", features = ["redactions", "toml", "filters"] }
rstest = "0.25"
ctor = "0.4.0" ctor = "0.4.0"
tracing-test = "0.2.5"
rstest = "0.25"

View File

@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncOneSubscriptionFeedsFullTaskInput } from "./SyncOneSubscriptionFeedsFullTaskInput";
import type { SyncOneSubscriptionFeedsIncrementalTaskInput } from "./SyncOneSubscriptionFeedsIncrementalTaskInput";
import type { SyncOneSubscriptionSourcesTaskInput } from "./SyncOneSubscriptionSourcesTaskInput";
export type SubscriberTaskInput = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTaskInput | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTaskInput | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTaskInput;

View File

@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncOneSubscriptionFeedsFullTask } from "./SyncOneSubscriptionFeedsFullTask";
import type { SyncOneSubscriptionFeedsIncrementalTask } from "./SyncOneSubscriptionFeedsIncrementalTask";
import type { SyncOneSubscriptionSourcesTask } from "./SyncOneSubscriptionSourcesTask";
export type SubscriberTaskType = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTask | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTask | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTask;

View File

@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsFullTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsFullTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsIncrementalTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsIncrementalTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionSourcesTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionSourcesTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@ -1,4 +1,4 @@
use std::time::Duration; use std::{str::FromStr, time::Duration};
use color_eyre::{Result, eyre::OptionExt}; use color_eyre::{Result, eyre::OptionExt};
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest}; use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
@ -6,7 +6,8 @@ use inquire::{Password, Text, validator::Validation};
use recorder::{ use recorder::{
crypto::UserPassCredential, crypto::UserPassCredential,
extract::mikan::{ extract::mikan::{
MikanClient, MikanConfig, MikanRssEpisodeItem, build_mikan_bangumi_expand_subscribed_url, MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
build_mikan_bangumi_expand_subscribed_url,
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment, extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
extract_mikan_bangumi_meta_from_expand_subscribed_fragment, extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
}, },
@ -190,10 +191,10 @@ async fn main() -> Result<()> {
); );
String::from_utf8(bangumi_rss_doppel_path.read()?)? String::from_utf8(bangumi_rss_doppel_path.read()?)?
}; };
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items; let rss_items = MikanRssRoot::from_str(&bangumi_rss_data)?.channel.items;
rss_items rss_items
.into_iter() .into_iter()
.map(MikanRssEpisodeItem::try_from) .map(MikanRssItemMeta::try_from)
.collect::<Result<Vec<_>, _>>() .collect::<Result<Vec<_>, _>>()
}?; }?;
for rss_item in rss_items { for rss_item in rss_items {

View File

@ -1,10 +1,10 @@
use std::time::Duration; use std::{str::FromStr, time::Duration};
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest}; use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
use recorder::{ use recorder::{
errors::RecorderResult, errors::RecorderResult,
extract::mikan::{ extract::mikan::{
MikanClient, MikanConfig, MikanRssEpisodeItem, MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
extract_mikan_episode_meta_from_episode_homepage_html, extract_mikan_episode_meta_from_episode_homepage_html,
}, },
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath}, test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
@ -41,12 +41,12 @@ async fn main() -> RecorderResult<()> {
let mikan_base_url = mikan_scrape_client.base_url().clone(); let mikan_base_url = mikan_scrape_client.base_url().clone();
tracing::info!("Scraping subscriber subscription..."); tracing::info!("Scraping subscriber subscription...");
let subscriber_subscription = let subscriber_subscription =
fs::read("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?; fs::read_to_string("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
let channel = rss::Channel::read_from(&subscriber_subscription[..])?; let channel = MikanRssRoot::from_str(&subscriber_subscription)?.channel;
let rss_items: Vec<MikanRssEpisodeItem> = channel let rss_items: Vec<MikanRssItemMeta> = channel
.items .items
.into_iter() .into_iter()
.map(MikanRssEpisodeItem::try_from) .map(MikanRssItemMeta::try_from)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items { for rss_item in rss_items {
let episode_homepage_meta = { let episode_homepage_meta = {
@ -150,11 +150,11 @@ async fn main() -> RecorderResult<()> {
String::from_utf8(bangumi_rss_doppel_path.read()?)? String::from_utf8(bangumi_rss_doppel_path.read()?)?
}; };
let channel = rss::Channel::read_from(bangumi_rss_data.as_bytes())?; let rss_items: Vec<MikanRssItemMeta> = MikanRssRoot::from_str(&bangumi_rss_data)?
let rss_items: Vec<MikanRssEpisodeItem> = channel .channel
.items .items
.into_iter() .into_iter()
.map(MikanRssEpisodeItem::try_from) .map(MikanRssItemMeta::try_from)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items { for rss_item in rss_items {
{ {

View File

@ -0,0 +1,6 @@
{
"name": "recorder",
"version": "0.0.1",
"private": true,
"type": "module"
}

View File

@ -4,8 +4,8 @@
enable = true enable = true
# Enable pretty backtrace (sets RUST_BACKTRACE=1) # Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace = true pretty_backtrace = true
level = "info"
# Log level, options: trace, debug, info, warn or error. # Log level, options: trace, debug, info, warn or error.
level = "debug"
# Define the logging format. options: compact, pretty or Json # Define the logging format. options: compact, pretty or Json
format = "compact" format = "compact"
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries # By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
@ -77,7 +77,7 @@ max_connections = 10
auto_migrate = true auto_migrate = true
[storage] [storage]
data_dir = '{{ get_env(name="STORAGE_DATA_DIR", default="./data") }}' data_dir = './data'
[mikan] [mikan]
base_url = "https://mikanani.me/" base_url = "https://mikanani.me/"
@ -89,26 +89,6 @@ leaky_bucket_initial_tokens = 1
leaky_bucket_refill_tokens = 1 leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500 leaky_bucket_refill_interval = 500
[mikan.http_client.proxy]
server = '{{ get_env(name="MIKAN_PROXY", default = "") }}'
auth_header = '{{ get_env(name="MIKAN_PROXY_AUTH_HEADER", default = "") }}'
no_proxy = '{{ get_env(name="MIKAN_NO_PROXY", default = "") }}'
accept_invalid_certs = '{{ get_env(name="MIKAN_PROXY_ACCEPT_INVALID_CERTS", default = "false") }}'
[auth]
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
basic_password = '{{ get_env(name="BASIC_PASSWORD", default = "konobangu") }}'
oidc_issuer = '{{ get_env(name="OIDC_ISSUER", default = "") }}'
oidc_audience = '{{ get_env(name="OIDC_AUDIENCE", default = "") }}'
oidc_client_id = '{{ get_env(name="OIDC_CLIENT_ID", default = "") }}'
oidc_client_secret = '{{ get_env(name="OIDC_CLIENT_SECRET", default = "") }}'
oidc_extra_scopes = '{{ get_env(name="OIDC_EXTRA_SCOPES", default = "") }}'
oidc_extra_claim_key = '{{ get_env(name="OIDC_EXTRA_CLAIM_KEY", default = "") }}'
oidc_extra_claim_value = '{{ get_env(name="OIDC_EXTRA_CLAIM_VALUE", default = "") }}'
[graphql] [graphql]
# depth_limit = inf # depth_limit = inf
# complexity_limit = inf # complexity_limit = inf

View File

@ -72,6 +72,11 @@ impl AppBuilder {
} }
pub async fn build(self) -> RecorderResult<App> { pub async fn build(self) -> RecorderResult<App> {
if self.working_dir != "." {
std::env::set_current_dir(&self.working_dir)?;
println!("set current dir to working dir: {}", self.working_dir);
}
self.load_env().await?; self.load_env().await?;
let config = self.load_config().await?; let config = self.load_config().await?;
@ -86,22 +91,12 @@ impl AppBuilder {
} }
pub async fn load_env(&self) -> RecorderResult<()> { pub async fn load_env(&self) -> RecorderResult<()> {
AppConfig::load_dotenv( AppConfig::load_dotenv(&self.environment, self.dotenv_file.as_deref()).await?;
&self.environment,
&self.working_dir,
self.dotenv_file.as_deref(),
)
.await?;
Ok(()) Ok(())
} }
pub async fn load_config(&self) -> RecorderResult<AppConfig> { pub async fn load_config(&self) -> RecorderResult<AppConfig> {
let config = AppConfig::load_config( let config = AppConfig::load_config(&self.environment, self.config_file.as_deref()).await?;
&self.environment,
&self.working_dir,
self.config_file.as_deref(),
)
.await?;
Ok(config) Ok(config)
} }
@ -136,11 +131,12 @@ impl AppBuilder {
} }
pub fn working_dir_from_manifest_dir(self) -> Self { pub fn working_dir_from_manifest_dir(self) -> Self {
let manifest_dir = if cfg!(debug_assertions) || cfg!(test) { #[cfg(any(test, debug_assertions, feature = "test-utils"))]
env!("CARGO_MANIFEST_DIR") let manifest_dir = env!("CARGO_MANIFEST_DIR");
} else {
"./apps/recorder" #[cfg(not(any(test, debug_assertions, feature = "test-utils")))]
}; let manifest_dir = "./apps/recorder";
self.working_dir(manifest_dir.to_string()) self.working_dir(manifest_dir.to_string())
} }
} }

View File

@ -1,8 +1,13 @@
use std::{fs, path::Path, str}; use std::{
collections::HashMap,
fs,
path::Path,
str::{self, FromStr},
};
use figment::{ use figment::{
Figment, Provider, Figment, Provider,
providers::{Format, Json, Toml, Yaml}, providers::{Env, Format, Json, Toml, Yaml},
}; };
use itertools::Itertools; use itertools::Itertools;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -55,8 +60,8 @@ impl AppConfig {
format!(".{}.local", environment.full_name()), format!(".{}.local", environment.full_name()),
format!(".{}.local", environment.short_name()), format!(".{}.local", environment.short_name()),
String::from(".local"), String::from(".local"),
environment.full_name().to_string(), format!(".{}", environment.full_name()),
environment.short_name().to_string(), format!(".{}", environment.short_name()),
String::from(""), String::from(""),
] ]
} }
@ -65,6 +70,102 @@ impl AppConfig {
Toml::string(DEFAULT_CONFIG_MIXIN) Toml::string(DEFAULT_CONFIG_MIXIN)
} }
fn build_enhanced_tera_engine() -> tera::Tera {
let mut tera = tera::Tera::default();
tera.register_filter(
"cast_to",
|value: &tera::Value,
args: &HashMap<String, tera::Value>|
-> tera::Result<tera::Value> {
let target_type = args
.get("type")
.and_then(|v| v.as_str())
.ok_or_else(|| tera::Error::msg("invalid target type: should be string"))?;
let target_type = TeraCastToFilterType::from_str(target_type)
.map_err(|e| tera::Error::msg(format!("invalid target type: {e}")))?;
let input_str = value.as_str().unwrap_or("");
match target_type {
TeraCastToFilterType::Boolean => {
let is_true = matches!(input_str.to_lowercase().as_str(), "true" | "1");
let is_false = matches!(input_str.to_lowercase().as_str(), "false" | "0");
if is_true {
Ok(tera::Value::Bool(true))
} else if is_false {
Ok(tera::Value::Bool(false))
} else {
Err(tera::Error::msg(
"target type is bool but value is not a boolean like true, false, \
1, 0",
))
}
}
TeraCastToFilterType::Integer => {
let parsed = input_str.parse::<i64>().map_err(|e| {
tera::Error::call_filter("invalid integer".to_string(), e)
})?;
Ok(tera::Value::Number(serde_json::Number::from(parsed)))
}
TeraCastToFilterType::Unsigned => {
let parsed = input_str.parse::<u64>().map_err(|e| {
tera::Error::call_filter("invalid unsigned integer".to_string(), e)
})?;
Ok(tera::Value::Number(serde_json::Number::from(parsed)))
}
TeraCastToFilterType::Float => {
let parsed = input_str.parse::<f64>().map_err(|e| {
tera::Error::call_filter("invalid float".to_string(), e)
})?;
Ok(tera::Value::Number(
serde_json::Number::from_f64(parsed).ok_or_else(|| {
tera::Error::msg("failed to convert f64 to serde_json::Number")
})?,
))
}
TeraCastToFilterType::String => Ok(tera::Value::String(input_str.to_string())),
TeraCastToFilterType::Null => Ok(tera::Value::Null),
}
},
);
tera.register_filter(
"try_auto_cast",
|value: &tera::Value,
_args: &HashMap<String, tera::Value>|
-> tera::Result<tera::Value> {
let input_str = value.as_str().unwrap_or("");
if input_str == "null" {
return Ok(tera::Value::Null);
}
if matches!(input_str, "true" | "false") {
return Ok(tera::Value::Bool(input_str == "true"));
}
if let Ok(parsed) = input_str.parse::<i64>() {
return Ok(tera::Value::Number(serde_json::Number::from(parsed)));
}
if let Ok(parsed) = input_str.parse::<u64>() {
return Ok(tera::Value::Number(serde_json::Number::from(parsed)));
}
if let Ok(parsed) = input_str.parse::<f64>() {
return Ok(tera::Value::Number(
serde_json::Number::from_f64(parsed).ok_or_else(|| {
tera::Error::msg("failed to convert f64 to serde_json::Number")
})?,
));
}
Ok(tera::Value::String(input_str.to_string()))
},
);
tera
}
pub fn merge_provider_from_file( pub fn merge_provider_from_file(
fig: Figment, fig: Figment,
filepath: impl AsRef<Path>, filepath: impl AsRef<Path>,
@ -72,11 +173,9 @@ impl AppConfig {
) -> RecorderResult<Figment> { ) -> RecorderResult<Figment> {
let content = fs::read_to_string(filepath)?; let content = fs::read_to_string(filepath)?;
let rendered = tera::Tera::one_off( let mut tera_engine = AppConfig::build_enhanced_tera_engine();
&content, let rendered =
&tera::Context::from_value(serde_json::json!({}))?, tera_engine.render_str(&content, &tera::Context::from_value(serde_json::json!({}))?)?;
false,
)?;
Ok(match ext { Ok(match ext {
".toml" => fig.merge(Toml::string(&rendered)), ".toml" => fig.merge(Toml::string(&rendered)),
@ -88,13 +187,12 @@ impl AppConfig {
pub async fn load_dotenv( pub async fn load_dotenv(
environment: &Environment, environment: &Environment,
working_dir: &str,
dotenv_file: Option<&str>, dotenv_file: Option<&str>,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
let try_dotenv_file_or_dirs = if dotenv_file.is_some() { let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
vec![dotenv_file] vec![dotenv_file]
} else { } else {
vec![Some(working_dir)] vec![Some(".")]
}; };
let priority_suffix = &AppConfig::priority_suffix(environment); let priority_suffix = &AppConfig::priority_suffix(environment);
@ -111,11 +209,16 @@ impl AppConfig {
for f in try_filenames.iter() { for f in try_filenames.iter() {
let p = try_dotenv_file_or_dir_path.join(f); let p = try_dotenv_file_or_dir_path.join(f);
if p.exists() && p.is_file() { if p.exists() && p.is_file() {
println!("Loading dotenv file: {}", p.display());
dotenvy::from_path(p)?; dotenvy::from_path(p)?;
break; break;
} }
} }
} else if try_dotenv_file_or_dir_path.is_file() { } else if try_dotenv_file_or_dir_path.is_file() {
println!(
"Loading dotenv file: {}",
try_dotenv_file_or_dir_path.display()
);
dotenvy::from_path(try_dotenv_file_or_dir_path)?; dotenvy::from_path(try_dotenv_file_or_dir_path)?;
break; break;
} }
@ -127,13 +230,12 @@ impl AppConfig {
pub async fn load_config( pub async fn load_config(
environment: &Environment, environment: &Environment,
working_dir: &str,
config_file: Option<&str>, config_file: Option<&str>,
) -> RecorderResult<AppConfig> { ) -> RecorderResult<AppConfig> {
let try_config_file_or_dirs = if config_file.is_some() { let try_config_file_or_dirs = if config_file.is_some() {
vec![config_file] vec![config_file]
} else { } else {
vec![Some(working_dir)] vec![Some(".")]
}; };
let allowed_extensions = &AppConfig::allowed_extension(); let allowed_extensions = &AppConfig::allowed_extension();
@ -159,6 +261,7 @@ impl AppConfig {
let p = try_config_file_or_dir_path.join(f); let p = try_config_file_or_dir_path.join(f);
if p.exists() && p.is_file() { if p.exists() && p.is_file() {
fig = AppConfig::merge_provider_from_file(fig, &p, ext)?; fig = AppConfig::merge_provider_from_file(fig, &p, ext)?;
println!("Loaded config file: {}", p.display());
break; break;
} }
} }
@ -169,13 +272,52 @@ impl AppConfig {
{ {
fig = fig =
AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?; AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?;
println!(
"Loaded config file: {}",
try_config_file_or_dir_path.display()
);
break; break;
} }
} }
} }
fig = fig.merge(Env::prefixed("").split("__").lowercase(true));
let app_config: AppConfig = fig.extract()?; let app_config: AppConfig = fig.extract()?;
Ok(app_config) Ok(app_config)
} }
} }
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
enum TeraCastToFilterType {
#[serde(alias = "str")]
String,
#[serde(alias = "bool")]
Boolean,
#[serde(alias = "int")]
Integer,
#[serde(alias = "uint")]
Unsigned,
#[serde(alias = "float")]
Float,
#[serde(alias = "null")]
Null,
}
impl FromStr for TeraCastToFilterType {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"string" | "str" => Ok(TeraCastToFilterType::String),
"boolean" | "bool" => Ok(TeraCastToFilterType::Boolean),
"integer" | "int" => Ok(TeraCastToFilterType::Integer),
"unsigned" | "uint" => Ok(TeraCastToFilterType::Unsigned),
"float" => Ok(TeraCastToFilterType::Float),
"null" => Ok(TeraCastToFilterType::Null),
_ => Err(format!("invalid target type: {s}")),
}
}
}

View File

@ -1,11 +1,13 @@
use std::{net::SocketAddr, sync::Arc}; use std::{net::SocketAddr, sync::Arc};
use axum::Router; use axum::{Router, middleware::from_fn_with_state};
use tokio::{net::TcpSocket, signal}; use tokio::{net::TcpSocket, signal};
use tower_http::services::{ServeDir, ServeFile};
use tracing::instrument; use tracing::instrument;
use super::{builder::AppBuilder, context::AppContextTrait}; use super::{builder::AppBuilder, context::AppContextTrait};
use crate::{ use crate::{
auth::webui_auth_middleware,
errors::{RecorderError, RecorderResult}, errors::{RecorderError, RecorderResult},
web::{ web::{
controller::{self, core::ControllerTrait}, controller::{self, core::ControllerTrait},
@ -58,13 +60,19 @@ impl App {
controller::oidc::create(context.clone()), controller::oidc::create(context.clone()),
controller::metadata::create(context.clone()), controller::metadata::create(context.clone()),
controller::r#static::create(context.clone()), controller::r#static::create(context.clone()),
controller::feeds::create(context.clone()), controller::feeds::create(context.clone())
)?; )?;
for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] { for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
router = c.apply_to(router); router = c.apply_to(router);
} }
router = router
.fallback_service(
ServeDir::new("webui").not_found_service(ServeFile::new("webui/index.html")),
)
.layer(from_fn_with_state(context.clone(), webui_auth_middleware));
let middlewares = default_middleware_stack(context.clone()); let middlewares = default_middleware_stack(context.clone());
for mid in middlewares { for mid in middlewares {
if mid.is_enabled() { if mid.is_enabled() {
@ -99,26 +107,12 @@ impl App {
Ok::<(), RecorderError>(()) Ok::<(), RecorderError>(())
}, },
async { async {
{ task.run_with_signal(if graceful_shutdown {
let monitor = task.setup_monitor().await?; Some(Self::shutdown_signal)
if graceful_shutdown { } else {
monitor None
.run_with_signal(async move { })
Self::shutdown_signal().await; .await?;
tracing::info!("apalis shutting down...");
Ok(())
})
.await?;
} else {
monitor.run().await?;
}
}
Ok::<(), RecorderError>(())
},
async {
let listener = task.setup_listener().await?;
listener.listen().await?;
Ok::<(), RecorderError>(()) Ok::<(), RecorderError>(())
} }

View File

@ -7,7 +7,10 @@ use axum::{
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use crate::{app::AppContextTrait, auth::AuthServiceTrait}; use crate::{
app::AppContextTrait,
auth::{AuthService, AuthServiceTrait},
};
pub async fn auth_middleware( pub async fn auth_middleware(
State(ctx): State<Arc<dyn AppContextTrait>>, State(ctx): State<Arc<dyn AppContextTrait>>,
@ -38,3 +41,37 @@ pub async fn auth_middleware(
response response
} }
pub async fn webui_auth_middleware(
State(ctx): State<Arc<dyn AppContextTrait>>,
request: Request,
next: Next,
) -> Response {
if (!request.uri().path().starts_with("/api"))
&& let AuthService::Basic(auth_service) = ctx.auth()
{
let (mut parts, body) = request.into_parts();
let mut response = match auth_service
.extract_user_info(ctx.as_ref() as &dyn AppContextTrait, &mut parts)
.await
{
Ok(auth_user_info) => {
let mut request = Request::from_parts(parts, body);
request.extensions_mut().insert(auth_user_info);
next.run(request).await
}
Err(auth_error) => auth_error.into_response(),
};
if let Some(header_value) = auth_service.www_authenticate_header_value() {
response
.headers_mut()
.insert(header::WWW_AUTHENTICATE, header_value);
};
response
} else {
next.run(request).await
}
}

View File

@ -7,5 +7,5 @@ pub mod service;
pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig}; pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig};
pub use errors::AuthError; pub use errors::AuthError;
pub use middleware::auth_middleware; pub use middleware::{auth_middleware, webui_auth_middleware};
pub use service::{AuthService, AuthServiceTrait, AuthUserInfo}; pub use service::{AuthService, AuthServiceTrait, AuthUserInfo};

View File

@ -21,7 +21,6 @@ use openidconnect::{
OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse, OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse,
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata}, core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata},
}; };
use sea_orm::DbErr;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use snafu::ResultExt; use snafu::ResultExt;
@ -338,9 +337,9 @@ impl AuthServiceTrait for OidcAuthService {
} }
} }
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await { let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(RecorderError::DbError { Err(RecorderError::ModelEntityNotFound { .. }) => {
source: DbErr::RecordNotFound(..), crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await, }
r => r, r => r,
} }
.map_err(|e| { .map_err(|e| {

View File

@ -18,6 +18,12 @@ use crate::{
#[derive(Snafu, Debug)] #[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))] #[snafu(visibility(pub(crate)))]
pub enum RecorderError { pub enum RecorderError {
#[snafu(transparent)]
ChronoTzParseError { source: chrono_tz::ParseError },
#[snafu(transparent)]
SeaographyError { source: seaography::SeaographyError },
#[snafu(transparent)]
CronError { source: croner::errors::CronError },
#[snafu(display( #[snafu(display(
"HTTP {status} {reason}, source = {source:?}", "HTTP {status} {reason}, source = {source:?}",
status = status, status = status,
@ -49,6 +55,8 @@ pub enum RecorderError {
InvalidMethodError, InvalidMethodError,
#[snafu(display("Invalid header value"))] #[snafu(display("Invalid header value"))]
InvalidHeaderValueError, InvalidHeaderValueError,
#[snafu(transparent)]
QuickXmlDeserializeError { source: quick_xml::DeError },
#[snafu(display("Invalid header name"))] #[snafu(display("Invalid header name"))]
InvalidHeaderNameError, InvalidHeaderNameError,
#[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))] #[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))]
@ -118,8 +126,13 @@ pub enum RecorderError {
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))] #[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr, source: OptDynErr,
}, },
#[snafu(display("Model Entity {entity} not found or not belong to subscriber"))] #[snafu(display("Model Entity {entity} not found or not belong to subscriber{}", (
ModelEntityNotFound { entity: Cow<'static, str> }, detail.as_ref().map(|detail| format!(" : {detail}"))).unwrap_or_default()
))]
ModelEntityNotFound {
entity: Cow<'static, str>,
detail: Option<String>,
},
#[snafu(transparent)] #[snafu(transparent)]
FetchError { source: FetchError }, FetchError { source: FetchError },
#[snafu(display("Credential3rdError: {message}, source = {source}"))] #[snafu(display("Credential3rdError: {message}, source = {source}"))]
@ -183,9 +196,17 @@ impl RecorderError {
} }
} }
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self { pub fn from_entity_not_found<E: sea_orm::EntityTrait>() -> Self {
Self::DbError { Self::ModelEntityNotFound {
source: sea_orm::DbErr::RecordNotFound(detail.to_string()), entity: std::any::type_name::<E::Model>().into(),
detail: None,
}
}
pub fn from_entity_not_found_detail<E: sea_orm::EntityTrait, T: ToString>(detail: T) -> Self {
Self::ModelEntityNotFound {
entity: std::any::type_name::<E::Model>().into(),
detail: Some(detail.to_string()),
} }
} }
} }
@ -250,9 +271,9 @@ impl IntoResponse for RecorderError {
) )
.into_response() .into_response()
} }
Self::ModelEntityNotFound { entity } => ( merr @ Self::ModelEntityNotFound { .. } => (
StatusCode::NOT_FOUND, StatusCode::NOT_FOUND,
Json::<StandardErrorResponse>(StandardErrorResponse::from(entity.to_string())), Json::<StandardErrorResponse>(StandardErrorResponse::from(merr.to_string())),
) )
.into_response(), .into_response(),
err => ( err => (
@ -294,4 +315,10 @@ impl From<http::method::InvalidMethod> for RecorderError {
} }
} }
impl From<async_graphql::Error> for RecorderError {
fn from(error: async_graphql::Error) -> Self {
seaography::SeaographyError::AsyncGraphQLError(error).into()
}
}
pub type RecorderResult<T> = Result<T, RecorderError>; pub type RecorderResult<T> = Result<T, RecorderError>;

View File

@ -1,38 +1,4 @@
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use quirks_path::Path;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::{OptionExt, whatever};
use crate::{
errors::app_error::{RecorderError, RecorderResult},
extract::defs::SUBTITLE_LANG,
};
lazy_static! {
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
vec![
FancyRegex::new(
r"(.*) - (\d{1,4}(?!\d|p)|\d{1,4}\.\d{1,2}(?!\d|p))(?:v\d{1,2})?(?: )?(?:END)?(.*)",
)
.unwrap(),
FancyRegex::new(
r"(.*)[\[\ E](\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)",
)
.unwrap(),
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
]
};
static ref GET_FANSUB_SPLIT_RE: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
static ref GET_FANSUB_FULL_MATCH_RE: Regex = Regex::new(r"^\d+$").unwrap();
static ref GET_SEASON_AND_TITLE_SUB_RE: Regex = Regex::new(r"([Ss]|Season )\d{1,3}").unwrap();
static ref GET_SEASON_AND_TITLE_FIND_RE: Regex =
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct EpisodeEnclosureMeta { pub struct EpisodeEnclosureMeta {
@ -41,293 +7,3 @@ pub struct EpisodeEnclosureMeta {
pub pub_date: Option<DateTime<Utc>>, pub pub_date: Option<DateTime<Utc>>,
pub content_length: Option<i64>, pub content_length: Option<i64>,
} }
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeMediaMeta {
pub fansub: Option<String>,
pub title: String,
pub season: i32,
pub episode_index: i32,
pub extname: String,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeSubtitleMeta {
pub media: TorrentEpisodeMediaMeta,
pub lang: Option<String>,
}
fn get_fansub(group_and_title: &str) -> (Option<&str>, &str) {
let n = GET_FANSUB_SPLIT_RE
.split(group_and_title)
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
match (n.first(), n.get(1)) {
(None, None) => (None, ""),
(Some(n0), None) => (None, *n0),
(Some(n0), Some(n1)) => {
if GET_FANSUB_FULL_MATCH_RE.is_match(n1) {
(None, group_and_title)
} else {
(Some(*n0), *n1)
}
}
_ => unreachable!("vec contains n1 must contains n0"),
}
}
fn get_season_and_title(season_and_title: &str) -> (String, i32) {
let replaced_title = GET_SEASON_AND_TITLE_SUB_RE.replace_all(season_and_title, "");
let title = replaced_title.trim().to_string();
let season = GET_SEASON_AND_TITLE_FIND_RE
.captures(season_and_title)
.map(|m| {
m.get(2)
.unwrap_or_else(|| unreachable!("season regex should have 2 groups"))
.as_str()
.parse::<i32>()
.unwrap_or_else(|_| unreachable!("season should be a number"))
})
.unwrap_or(1);
(title, season)
}
fn get_subtitle_lang(media_name: &str) -> Option<&str> {
let media_name_lower = media_name.to_lowercase();
for (lang, lang_aliases) in SUBTITLE_LANG.iter() {
if lang_aliases
.iter()
.any(|alias| media_name_lower.contains(alias))
{
return Some(lang);
}
}
None
}
pub fn parse_episode_media_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> RecorderResult<TorrentEpisodeMediaMeta> {
let media_name = torrent_path
.file_name()
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {torrent_path}")
})?;
let mut match_obj = None;
for rule in TORRENT_EP_PARSE_RULES.iter() {
match_obj = if let Some(torrent_name) = torrent_name.as_ref() {
rule.captures(torrent_name)?
} else {
rule.captures(media_name)?
};
if match_obj.is_some() {
break;
}
}
if let Some(match_obj) = match_obj {
let group_season_and_title = match_obj
.get(1)
.whatever_context::<_, RecorderError>("should have 1 group")?
.as_str();
let (fansub, season_and_title) = get_fansub(group_season_and_title);
let (title, season) = if let Some(season) = season {
let (title, _) = get_season_and_title(season_and_title);
(title, season)
} else {
get_season_and_title(season_and_title)
};
let episode_index = match_obj
.get(2)
.whatever_context::<_, RecorderError>("should have 2 group")?
.as_str()
.parse::<i32>()
.unwrap_or(1);
let extname = torrent_path
.extension()
.map(|e| format!(".{e}"))
.unwrap_or_default();
Ok(TorrentEpisodeMediaMeta {
fansub: fansub.map(|s| s.to_string()),
title,
season,
episode_index,
extname,
})
} else {
whatever!(
"failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'",
torrent_path,
torrent_name
)
}
}
pub fn parse_episode_subtitle_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> RecorderResult<TorrentEpisodeSubtitleMeta> {
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
let media_name = torrent_path
.file_name()
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {torrent_path}")
})?;
let lang = get_subtitle_lang(media_name);
Ok(TorrentEpisodeSubtitleMeta {
media: media_meta,
lang: lang.map(|s| s.to_string()),
})
}
#[cfg(test)]
mod tests {
use quirks_path::Path;
use super::{
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta, parse_episode_media_meta_from_torrent,
parse_episode_subtitle_meta_from_torrent,
};
#[test]
fn test_lilith_raws_media() {
test_torrent_ep_parser(
r#"[Lilith-Raws] Boku no Kokoro no Yabai Yatsu - 01 [Baha][WEB-DL][1080p][AVC AAC][CHT][MP4].mp4"#,
r#"{"fansub": "Lilith-Raws", "title": "Boku no Kokoro no Yabai Yatsu", "season": 1, "episode_index": 1, "extname": ".mp4"}"#,
);
}
#[test]
fn test_sakurato_media() {
test_torrent_ep_parser(
r#"[Sakurato] Tonikaku Kawaii S2 [03][AVC-8bit 1080p AAC][CHS].mp4"#,
r#"{"fansub": "Sakurato", "title": "Tonikaku Kawaii", "season": 2, "episode_index": 3, "extname": ".mp4"}"#,
)
}
#[test]
fn test_lolihouse_media() {
test_torrent_ep_parser(
r#"[SweetSub&LoliHouse] Heavenly Delusion - 08 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"#,
r#"{"fansub": "SweetSub&LoliHouse", "title": "Heavenly Delusion", "season": 1, "episode_index": 8, "extname": ".mkv"}"#,
)
}
#[test]
fn test_sbsub_media() {
test_torrent_ep_parser(
r#"[SBSUB][CONAN][1082][V2][1080P][AVC_AAC][CHS_JP](C1E4E331).mp4"#,
r#"{"fansub": "SBSUB", "title": "CONAN", "season": 1, "episode_index": 1082, "extname": ".mp4"}"#,
)
}
#[test]
fn test_non_fansub_media() {
test_torrent_ep_parser(
r#"海盗战记 (2019) S04E11.mp4"#,
r#"{"title": "海盗战记 (2019)", "season": 4, "episode_index": 11, "extname": ".mp4"}"#,
)
}
#[test]
fn test_non_fansub_media_with_dirname() {
test_torrent_ep_parser(
r#"海盗战记/海盗战记 S01E01.mp4"#,
r#"{"title": "海盗战记", "season": 1, "episode_index": 1, "extname": ".mp4"}"#,
);
}
#[test]
fn test_non_fansub_tc_subtitle() {
test_torrent_ep_parser(
r#"海盗战记 S01E08.zh-tw.ass"#,
r#"{"media": { "title": "海盗战记", "season": 1, "episode_index": 8, "extname": ".ass" }, "lang": "zh-tw"}"#,
);
}
#[test]
fn test_non_fansub_sc_subtitle() {
test_torrent_ep_parser(
r#"海盗战记 S01E01.SC.srt"#,
r#"{ "media": { "title": "海盗战记", "season": 1, "episode_index": 1, "extname": ".srt" }, "lang": "zh" }"#,
)
}
#[test]
fn test_non_fansub_media_with_season_zero() {
test_torrent_ep_parser(
r#"水星的魔女(2022) S00E19.mp4"#,
r#"{"fansub": null,"title": "水星的魔女(2022)","season": 0,"episode_index": 19,"extname": ".mp4"}"#,
)
}
#[test]
fn test_shimian_fansub_media() {
test_torrent_ep_parser(
r#"【失眠搬运组】放学后失眠的你-Kimi wa Houkago Insomnia - 06 [bilibili - 1080p AVC1 CHS-JP].mp4"#,
r#"{"fansub": "失眠搬运组","title": "放学后失眠的你-Kimi wa Houkago Insomnia","season": 1,"episode_index": 6,"extname": ".mp4"}"#,
)
}
pub fn test_torrent_ep_parser(origin_name: &str, expected: &str) {
let extname = Path::new(origin_name)
.extension()
.map(|e| format!(".{e}"))
.unwrap_or_default()
.to_lowercase();
if extname == ".srt" || extname == ".ass" {
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
let found_raw =
parse_episode_subtitle_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {
if found_raw.is_ok() {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
} else {
println!(
"expected {} and found {:#?} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
found_raw
)
}
}
assert_eq!(expected, found);
} else {
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
let found_raw =
parse_episode_media_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {
if found_raw.is_ok() {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
} else {
println!(
"expected {} and found {:#?} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
found_raw
)
}
}
assert_eq!(expected, found);
}
}
}

View File

@ -1,34 +0,0 @@
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use regex::Regex;
const LANG_ZH_TW: &str = "zh-tw";
const LANG_ZH: &str = "zh";
const LANG_EN: &str = "en";
const LANG_JP: &str = "jp";
lazy_static! {
pub static ref SEASON_REGEX: Regex =
Regex::new(r"(S\|[Ss]eason\s+)(\d+)").expect("Invalid regex");
pub static ref TORRENT_PRASE_RULE_REGS: Vec<FancyRegex> = vec![
FancyRegex::new(
r"(.*) - (\d{1,4}(?!\d|p)|\d{1,4}\.\d{1,2}(?!\d|p))(?:v\d{1,2})?(?: )?(?:END)?(.*)"
)
.unwrap(),
FancyRegex::new(
r"(.*)[\[\ E](\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)"
)
.unwrap(),
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
];
pub static ref SUBTITLE_LANG: Vec<(&'static str, Vec<&'static str>)> = {
vec![
(LANG_ZH_TW, vec!["tc", "cht", "", "zh-tw"]),
(LANG_ZH, vec!["sc", "chs", "", "zh", "zh-cn"]),
(LANG_EN, vec!["en", "eng", ""]),
(LANG_JP, vec!["jp", "jpn", ""]),
]
};
}

View File

@ -167,6 +167,7 @@ impl ForwardedRelatedInfo {
.as_ref() .as_ref()
.and_then(|s| s.host.as_deref()) .and_then(|s| s.host.as_deref())
.or(self.x_forwarded_host.as_deref()) .or(self.x_forwarded_host.as_deref())
.or(self.host.as_deref())
.or(self.uri.host()) .or(self.uri.host())
} }

View File

@ -4,7 +4,7 @@ use fetch::{HttpClient, HttpClientTrait};
use maplit::hashmap; use maplit::hashmap;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use sea_orm::{ use sea_orm::{
ActiveModelTrait, ActiveValue::Set, ColumnTrait, DbErr, EntityTrait, QueryFilter, TryIntoModel, ActiveModelTrait, ActiveValue::Set, ColumnTrait, EntityTrait, QueryFilter, TryIntoModel,
}; };
use url::Url; use url::Url;
use util::OptDynErr; use util::OptDynErr;
@ -227,9 +227,12 @@ impl MikanClient {
self.fork_with_userpass_credential(userpass_credential) self.fork_with_userpass_credential(userpass_credential)
.await .await
} else { } else {
Err(RecorderError::from_db_record_not_found( Err(RecorderError::from_entity_not_found_detail::<
DbErr::RecordNotFound(format!("credential={credential_id} not found")), credential_3rd::Entity,
)) _,
>(format!(
"credential id {credential_id} not found"
)))
} }
} }

View File

@ -2,6 +2,7 @@ mod client;
mod config; mod config;
mod constants; mod constants;
mod credential; mod credential;
mod rss;
mod subscription; mod subscription;
mod web; mod web;
@ -18,17 +19,19 @@ pub use constants::{
MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY, MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
}; };
pub use credential::MikanCredentialForm; pub use credential::MikanCredentialForm;
pub use rss::{
MikanRssChannel, MikanRssItem, MikanRssItemMeta, MikanRssItemTorrentExtension, MikanRssRoot,
build_mikan_bangumi_subscription_rss_url, build_mikan_subscriber_subscription_rss_url,
};
pub use subscription::{ pub use subscription::{
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription, MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
}; };
pub use web::{ pub use web::{
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta, MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanFansubHash, MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanFansubHash,
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionUrlMeta,
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url,
build_mikan_bangumi_homepage_url, build_mikan_bangumi_subscription_rss_url,
build_mikan_episode_homepage_url, build_mikan_season_flow_url, build_mikan_episode_homepage_url, build_mikan_season_flow_url,
build_mikan_subscriber_subscription_rss_url,
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment, extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
extract_mikan_bangumi_meta_from_expand_subscribed_fragment, extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
extract_mikan_episode_meta_from_episode_homepage_html, extract_mikan_episode_meta_from_episode_homepage_html,

View File

@ -0,0 +1,215 @@
use std::{borrow::Cow, str::FromStr};
use chrono::{DateTime, Utc};
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
errors::{RecorderResult, app_error::RecorderError},
extract::{
bittorrent::EpisodeEnclosureMeta,
mikan::{
MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_RSS_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
MikanEpisodeHash, build_mikan_episode_homepage_url,
},
},
};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssItemEnclosure {
#[serde(rename = "@type")]
pub r#type: String,
#[serde(rename = "@length")]
pub length: i64,
#[serde(rename = "@url")]
pub url: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct MikanRssItemTorrentExtension {
pub pub_date: String,
pub content_length: i64,
pub link: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssItem {
pub torrent: MikanRssItemTorrentExtension,
pub link: String,
pub title: String,
pub enclosure: MikanRssItemEnclosure,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssChannel {
#[serde(rename = "item", default)]
pub items: Vec<MikanRssItem>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssRoot {
pub channel: MikanRssChannel,
}
impl FromStr for MikanRssRoot {
type Err = RecorderError;
fn from_str(source: &str) -> RecorderResult<Self> {
let me = quick_xml::de::from_str(source)?;
Ok(me)
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanRssItemMeta {
pub title: String,
pub torrent_link: Url,
pub content_length: i64,
pub mime: String,
pub pub_date: Option<DateTime<Utc>>,
pub mikan_episode_id: String,
pub magnet_link: Option<String>,
}
impl MikanRssItemMeta {
pub fn build_homepage_url(&self, mikan_base_url: Url) -> Url {
build_mikan_episode_homepage_url(mikan_base_url, &self.mikan_episode_id)
}
pub fn parse_pub_date(pub_date: &str) -> chrono::ParseResult<DateTime<Utc>> {
DateTime::parse_from_rfc2822(pub_date)
.or_else(|_| DateTime::parse_from_rfc3339(pub_date))
.or_else(|_| DateTime::parse_from_rfc3339(&format!("{pub_date}+08:00")))
.map(|s| s.with_timezone(&Utc))
}
}
impl TryFrom<MikanRssItem> for MikanRssItemMeta {
type Error = RecorderError;
fn try_from(item: MikanRssItem) -> Result<Self, Self::Error> {
let torrent = item.torrent;
let enclosure = item.enclosure;
let mime_type = enclosure.r#type;
if mime_type != BITTORRENT_MIME_TYPE {
return Err(RecorderError::MimeError {
expected: String::from(BITTORRENT_MIME_TYPE),
found: mime_type.to_string(),
desc: String::from("MikanRssItem"),
});
}
let title = item.title;
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
RecorderError::from_mikan_rss_invalid_field_and_source(
"enclosure_url:enclosure.link".into(),
err,
)
})?;
let homepage = Url::parse(&item.link).map_err(|err| {
RecorderError::from_mikan_rss_invalid_field_and_source(
"enclosure_url:enclosure.link".into(),
err,
)
})?;
let MikanEpisodeHash {
mikan_episode_id, ..
} = MikanEpisodeHash::from_homepage_url(&homepage).ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
})?;
Ok(MikanRssItemMeta {
title,
torrent_link: enclosure_url,
content_length: enclosure.length,
mime: mime_type,
pub_date: Self::parse_pub_date(&torrent.pub_date).ok(),
mikan_episode_id,
magnet_link: None,
})
}
}
impl From<MikanRssItemMeta> for EpisodeEnclosureMeta {
fn from(item: MikanRssItemMeta) -> Self {
Self {
magnet_link: item.magnet_link,
torrent_link: Some(item.torrent_link.to_string()),
pub_date: item.pub_date,
content_length: Some(item.content_length),
}
}
}
pub fn build_mikan_subscriber_subscription_rss_url(
mikan_base_url: Url,
mikan_subscription_token: &str,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH);
url.query_pairs_mut().append_pair(
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
mikan_subscription_token,
);
url
}
pub fn build_mikan_bangumi_subscription_rss_url(
mikan_base_url: Url,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_BANGUMI_RSS_PATH);
url.query_pairs_mut()
.append_pair(MIKAN_BANGUMI_ID_QUERY_KEY, mikan_bangumi_id);
if let Some(mikan_fansub_id) = mikan_fansub_id {
url.query_pairs_mut()
.append_pair(MIKAN_FANSUB_ID_QUERY_KEY, mikan_fansub_id);
};
url
}
#[cfg(test)]
mod test {
#![allow(unused_variables)]
use std::fs;
use rstest::{fixture, rstest};
use tracing::Level;
use super::*;
use crate::{errors::RecorderResult, test_utils::tracing::try_init_testing_tracing};
#[fixture]
fn before_each() {
try_init_testing_tracing(Level::DEBUG);
}
#[rstest]
#[test]
fn test_mikan_rss_episode_item_try_from_rss_item(before_each: ()) -> RecorderResult<()> {
let rss_str = fs::read_to_string(
"tests/resources/mikan/doppel/RSS/Bangumi-bangumiId%3D3288%26subgroupid%3D370.html",
)?;
let mut channel = MikanRssRoot::from_str(&rss_str)?.channel;
assert!(!channel.items.is_empty());
let item = channel.items.pop().unwrap();
let episode_item = MikanRssItemMeta::try_from(item.clone())?;
assert!(episode_item.pub_date.is_some());
Ok(())
}
}

View File

@ -1,12 +1,13 @@
use std::{ use std::{
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
fmt::Debug, fmt::Debug,
str::FromStr,
sync::Arc, sync::Arc,
}; };
use async_graphql::{InputObject, SimpleObject}; use async_graphql::{InputObject, SimpleObject};
use async_stream::try_stream; use async_stream::try_stream;
use fetch::fetch_bytes; use fetch::fetch_html;
use futures::{Stream, TryStreamExt, pin_mut, try_join}; use futures::{Stream, TryStreamExt, pin_mut, try_join};
use maplit::hashmap; use maplit::hashmap;
use sea_orm::{ use sea_orm::{
@ -24,8 +25,8 @@ use crate::{
bittorrent::EpisodeEnclosureMeta, bittorrent::EpisodeEnclosureMeta,
mikan::{ mikan::{
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanRssItemMeta, MikanRssRoot, MikanSeasonFlowUrlMeta, MikanSeasonStr,
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url, MikanSubscriberSubscriptionUrlMeta, build_mikan_bangumi_subscription_rss_url,
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url, build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
scrape_mikan_episode_meta_from_episode_homepage_url, scrape_mikan_episode_meta_from_episode_homepage_url,
}, },
@ -39,7 +40,7 @@ use crate::{
#[tracing::instrument(err, skip(ctx, rss_item_list))] #[tracing::instrument(err, skip(ctx, rss_item_list))]
async fn sync_mikan_feeds_from_rss_item_list( async fn sync_mikan_feeds_from_rss_item_list(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
rss_item_list: Vec<MikanRssEpisodeItem>, rss_item_list: Vec<MikanRssItemMeta>,
subscriber_id: i32, subscriber_id: i32,
subscription_id: i32, subscription_id: i32,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
@ -202,7 +203,7 @@ impl SubscriptionTrait for MikanSubscriberSubscription {
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> { fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
let source_url = Url::parse(&model.source_url)?; let source_url = Url::parse(&model.source_url)?;
let meta = MikanSubscriberSubscriptionRssUrlMeta::from_rss_url(&source_url) let meta = MikanSubscriberSubscriptionUrlMeta::from_rss_url(&source_url)
.with_whatever_context::<_, String, RecorderError>(|| { .with_whatever_context::<_, String, RecorderError>(|| {
format!( format!(
"MikanSubscriberSubscription should extract mikan_subscription_token from \ "MikanSubscriberSubscription should extract mikan_subscription_token from \
@ -224,19 +225,19 @@ impl MikanSubscriberSubscription {
async fn get_rss_item_list_from_source_url( async fn get_rss_item_list_from_source_url(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssEpisodeItem>> { ) -> RecorderResult<Vec<MikanRssItemMeta>> {
let mikan_base_url = ctx.mikan().base_url().clone(); let mikan_base_url = ctx.mikan().base_url().clone();
let rss_url = build_mikan_subscriber_subscription_rss_url( let rss_url = build_mikan_subscriber_subscription_rss_url(
mikan_base_url.clone(), mikan_base_url.clone(),
&self.mikan_subscription_token, &self.mikan_subscription_token,
); );
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = MikanRssRoot::from_str(&html)?.channel;
let mut result = vec![]; let mut result = vec![];
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item) let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@ -249,7 +250,7 @@ impl MikanSubscriberSubscription {
async fn get_rss_item_list_from_subsribed_url_rss_link( async fn get_rss_item_list_from_subsribed_url_rss_link(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssEpisodeItem>> { ) -> RecorderResult<Vec<MikanRssItemMeta>> {
let subscribed_bangumi_list = let subscribed_bangumi_list =
bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.subscription_id) bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.subscription_id)
.await?; .await?;
@ -264,12 +265,12 @@ impl MikanSubscriberSubscription {
self.subscription_id, subscribed_bangumi.display_name self.subscription_id, subscribed_bangumi.display_name
) )
})?; })?;
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = MikanRssRoot::from_str(&html)?.channel;
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item) let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@ -406,7 +407,7 @@ impl MikanSeasonSubscription {
fn get_rss_item_stream_from_subsribed_url_rss_link( fn get_rss_item_stream_from_subsribed_url_rss_link(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> impl Stream<Item = RecorderResult<Vec<MikanRssEpisodeItem>>> { ) -> impl Stream<Item = RecorderResult<Vec<MikanRssItemMeta>>> {
try_stream! { try_stream! {
let db = ctx.db(); let db = ctx.db();
@ -433,14 +434,14 @@ impl MikanSeasonSubscription {
self.subscription_id, subscribed_bangumi.display_name self.subscription_id, subscribed_bangumi.display_name
) )
})?; })?;
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = MikanRssRoot::from_str(&html)?.channel;
let mut rss_item_list = vec![]; let mut rss_item_list = vec![];
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item) let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@ -519,20 +520,20 @@ impl MikanBangumiSubscription {
async fn get_rss_item_list_from_source_url( async fn get_rss_item_list_from_source_url(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssEpisodeItem>> { ) -> RecorderResult<Vec<MikanRssItemMeta>> {
let mikan_base_url = ctx.mikan().base_url().clone(); let mikan_base_url = ctx.mikan().base_url().clone();
let rss_url = build_mikan_bangumi_subscription_rss_url( let rss_url = build_mikan_bangumi_subscription_rss_url(
mikan_base_url.clone(), mikan_base_url.clone(),
&self.mikan_bangumi_id, &self.mikan_bangumi_id,
Some(&self.mikan_fansub_id), Some(&self.mikan_fansub_id),
); );
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = MikanRssRoot::from_str(&html)?.channel;
let mut result = vec![]; let mut result = vec![];
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item) let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@ -545,52 +546,27 @@ impl MikanBangumiSubscription {
#[cfg(test)] #[cfg(test)]
#[allow(unused_variables)] #[allow(unused_variables)]
mod tests { mod tests {
use std::sync::Arc;
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait}; use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait};
use tracing::Level; use tracing::Level;
use crate::{ use crate::{
app::AppContextTrait,
errors::RecorderResult, errors::RecorderResult,
extract::mikan::{ extract::mikan::{
MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr,
MikanSubscriberSubscriptionRssUrlMeta, MikanSubscriberSubscriptionUrlMeta,
}, },
models::{ models::{
bangumi, episodes, bangumi, episodes,
subscriptions::{self, SubscriptionTrait}, subscriptions::{self, SubscriptionTrait},
}, },
test_utils::{ test_utils::{
app::{TestingAppContext, TestingAppContextPreset}, app::TestingPreset, mikan::build_testing_mikan_credential_form,
mikan::{MikanMockServer, build_testing_mikan_credential_form},
tracing::try_init_testing_tracing, tracing::try_init_testing_tracing,
}, },
}; };
struct TestingResources {
pub app_ctx: Arc<dyn AppContextTrait>,
pub mikan_server: MikanMockServer,
}
async fn build_testing_app_context() -> RecorderResult<TestingResources> {
let mikan_server = MikanMockServer::new().await?;
let mikan_base_url = mikan_server.base_url().clone();
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
mikan_base_url: mikan_base_url.to_string(),
database_config: None,
})
.await?;
Ok(TestingResources {
app_ctx,
mikan_server,
})
}
#[fixture] #[fixture]
fn before_each() { fn before_each() {
try_init_testing_tracing(Level::DEBUG); try_init_testing_tracing(Level::DEBUG);
@ -599,10 +575,10 @@ mod tests {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> { async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources { let mut preset = TestingPreset::default().await?;
app_ctx, let app_ctx = preset.app_ctx.clone();
mut mikan_server,
} = build_testing_app_context().await?; let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel(); let _resources_mock = mikan_server.mock_resources_with_doppel();
@ -661,10 +637,11 @@ mod tests {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_mikan_subscriber_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> { async fn test_mikan_subscriber_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources { let mut preset = TestingPreset::default().await?;
app_ctx,
mut mikan_server, let app_ctx = preset.app_ctx.clone();
} = build_testing_app_context().await?;
let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel(); let _resources_mock = mikan_server.mock_resources_with_doppel();
@ -677,7 +654,7 @@ mod tests {
subscriber_id: ActiveValue::Set(subscriber_id), subscriber_id: ActiveValue::Set(subscriber_id),
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber), category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber),
source_url: ActiveValue::Set( source_url: ActiveValue::Set(
MikanSubscriberSubscriptionRssUrlMeta { MikanSubscriberSubscriptionUrlMeta {
mikan_subscription_token: "test".into(), mikan_subscription_token: "test".into(),
} }
.build_rss_url(mikan_server.base_url().clone()) .build_rss_url(mikan_server.base_url().clone())
@ -728,10 +705,11 @@ mod tests {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> { async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources { let mut preset = TestingPreset::default().await?;
app_ctx,
mut mikan_server, let app_ctx = preset.app_ctx.clone();
} = build_testing_app_context().await?;
let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel(); let _resources_mock = mikan_server.mock_resources_with_doppel();

View File

@ -26,7 +26,8 @@ use crate::{
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY, MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
MIKAN_YEAR_QUERY_KEY, MikanClient, MIKAN_UNKNOWN_FANSUB_ID, MIKAN_YEAR_QUERY_KEY, MikanClient,
build_mikan_bangumi_subscription_rss_url, build_mikan_subscriber_subscription_rss_url,
}, },
}, },
media::{ media::{
@ -34,7 +35,7 @@ use crate::{
EncodeWebpOptions, EncodeWebpOptions,
}, },
storage::StorageContentCategory, storage::StorageContentCategory,
task::{OptimizeImageTask, SystemTask}, task::OptimizeImageTask,
}; };
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
@ -139,16 +140,16 @@ impl From<MikanRssEpisodeItem> for EpisodeEnclosureMeta {
} }
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanSubscriberSubscriptionRssUrlMeta { pub struct MikanSubscriberSubscriptionUrlMeta {
pub mikan_subscription_token: String, pub mikan_subscription_token: String,
} }
impl MikanSubscriberSubscriptionRssUrlMeta { impl MikanSubscriberSubscriptionUrlMeta {
pub fn from_rss_url(url: &Url) -> Option<Self> { pub fn from_rss_url(url: &Url) -> Option<Self> {
if url.path() == MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH { if url.path() == MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH {
url.query_pairs() url.query_pairs()
.find(|(k, _)| k == MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY) .find(|(k, _)| k == MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY)
.map(|(_, v)| MikanSubscriberSubscriptionRssUrlMeta { .map(|(_, v)| MikanSubscriberSubscriptionUrlMeta {
mikan_subscription_token: v.to_string(), mikan_subscription_token: v.to_string(),
}) })
} else { } else {
@ -161,19 +162,6 @@ impl MikanSubscriberSubscriptionRssUrlMeta {
} }
} }
pub fn build_mikan_subscriber_subscription_rss_url(
mikan_base_url: Url,
mikan_subscription_token: &str,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH);
url.query_pairs_mut().append_pair(
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
mikan_subscription_token,
);
url
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Eq)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Eq)]
pub struct MikanBangumiIndexMeta { pub struct MikanBangumiIndexMeta {
pub homepage: Url, pub homepage: Url,
@ -289,22 +277,6 @@ pub struct MikanBangumiPosterMeta {
pub poster_src: Option<String>, pub poster_src: Option<String>,
} }
pub fn build_mikan_bangumi_subscription_rss_url(
mikan_base_url: Url,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_BANGUMI_RSS_PATH);
url.query_pairs_mut()
.append_pair(MIKAN_BANGUMI_ID_QUERY_KEY, mikan_bangumi_id);
if let Some(mikan_fansub_id) = mikan_fansub_id {
url.query_pairs_mut()
.append_pair(MIKAN_FANSUB_ID_QUERY_KEY, mikan_fansub_id);
};
url
}
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiIndexHash { pub struct MikanBangumiIndexHash {
pub mikan_bangumi_id: String, pub mikan_bangumi_id: String,
@ -592,16 +564,17 @@ pub fn extract_mikan_episode_meta_from_episode_homepage_html(
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id")) RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
})?; })?;
let fansub_name = html let fansub_name = if mikan_fansub_id == MIKAN_UNKNOWN_FANSUB_ID {
.select( MIKAN_UNKNOWN_FANSUB_ID.to_string()
} else {
html.select(
&Selector::parse(".bangumi-info a.magnet-link-wrap[href^='/Home/PublishGroup/']") &Selector::parse(".bangumi-info a.magnet-link-wrap[href^='/Home/PublishGroup/']")
.unwrap(), .unwrap(),
) )
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.ok_or_else(|| { .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))?
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")) };
})?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| { let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
el.value() el.value()
@ -713,6 +686,13 @@ pub fn extract_mikan_fansub_meta_from_bangumi_homepage_html(
html: &Html, html: &Html,
mikan_fansub_id: String, mikan_fansub_id: String,
) -> Option<MikanFansubMeta> { ) -> Option<MikanFansubMeta> {
if mikan_fansub_id == MIKAN_UNKNOWN_FANSUB_ID {
return Some(MikanFansubMeta {
mikan_fansub_id,
fansub: MIKAN_UNKNOWN_FANSUB_ID.to_string(),
});
}
html.select( html.select(
&Selector::parse(&format!( &Selector::parse(&format!(
"a.subgroup-name[data-anchor='#{mikan_fansub_id}']" "a.subgroup-name[data-anchor='#{mikan_fansub_id}']"
@ -829,11 +809,6 @@ pub async fn scrape_mikan_poster_meta_from_image_url(
.write(storage_path.clone(), poster_data) .write(storage_path.clone(), poster_data)
.await?; .await?;
tracing::warn!(
poster_str = poster_str.to_string(),
"mikan poster meta extracted"
);
MikanBangumiPosterMeta { MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url, origin_poster_src: origin_poster_src_url,
poster_src: Some(poster_str.to_string()), poster_src: Some(poster_str.to_string()),
@ -851,11 +826,14 @@ pub async fn scrape_mikan_poster_meta_from_image_url(
let webp_storage_path = storage_path.with_extension("webp"); let webp_storage_path = storage_path.with_extension("webp");
if storage_service.exists(&webp_storage_path).await?.is_none() { if storage_service.exists(&webp_storage_path).await?.is_none() {
task_service task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask { .add_system_task(
source_path: storage_path.clone().to_string(), OptimizeImageTask::builder()
target_path: webp_storage_path.to_string(), .source_path(storage_path.clone().to_string())
format_options: EncodeImageOptions::Webp(EncodeWebpOptions::default()), .target_path(webp_storage_path.to_string())
})) .format_options(EncodeImageOptions::Webp(EncodeWebpOptions::default()))
.build()
.into(),
)
.await?; .await?;
} }
} }
@ -863,11 +841,14 @@ pub async fn scrape_mikan_poster_meta_from_image_url(
let avif_storage_path = storage_path.with_extension("avif"); let avif_storage_path = storage_path.with_extension("avif");
if storage_service.exists(&avif_storage_path).await?.is_none() { if storage_service.exists(&avif_storage_path).await?.is_none() {
task_service task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask { .add_system_task(
source_path: storage_path.clone().to_string(), OptimizeImageTask::builder()
target_path: avif_storage_path.to_string(), .source_path(storage_path.clone().to_string())
format_options: EncodeImageOptions::Avif(EncodeAvifOptions::default()), .target_path(avif_storage_path.to_string())
})) .format_options(EncodeImageOptions::Avif(EncodeAvifOptions::default()))
.build()
.into(),
)
.await?; .await?;
} }
} }
@ -875,11 +856,14 @@ pub async fn scrape_mikan_poster_meta_from_image_url(
let jxl_storage_path = storage_path.with_extension("jxl"); let jxl_storage_path = storage_path.with_extension("jxl");
if storage_service.exists(&jxl_storage_path).await?.is_none() { if storage_service.exists(&jxl_storage_path).await?.is_none() {
task_service task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask { .add_system_task(
source_path: storage_path.clone().to_string(), OptimizeImageTask::builder()
target_path: jxl_storage_path.to_string(), .source_path(storage_path.clone().to_string())
format_options: EncodeImageOptions::Jxl(EncodeJxlOptions::default()), .target_path(jxl_storage_path.to_string())
})) .format_options(EncodeImageOptions::Jxl(EncodeJxlOptions::default()))
.build()
.into(),
)
.await?; .await?;
} }
} }
@ -1122,7 +1106,7 @@ mod test {
use super::*; use super::*;
use crate::test_utils::{ use crate::test_utils::{
app::{TestingAppContext, TestingAppContextPreset}, app::{TestingAppContext, TestingPreset},
crypto::build_testing_crypto_service, crypto::build_testing_crypto_service,
database::build_testing_database_service, database::build_testing_database_service,
mikan::{ mikan::{
@ -1170,17 +1154,13 @@ mod test {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_scrape_mikan_poster_meta_from_image_url(before_each: ()) -> RecorderResult<()> { async fn test_scrape_mikan_poster_meta_from_image_url(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = MikanMockServer::new().await?; let mut preset = TestingPreset::default().await?;
let mikan_base_url = mikan_server.base_url().clone(); let app_ctx = preset.app_ctx.clone();
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset { let mikan_base_url = preset.mikan_server.base_url().clone();
mikan_base_url: mikan_base_url.to_string(),
database_config: None,
})
.await?;
let resources_mock = mikan_server.mock_resources_with_doppel(); let resources_mock = preset.mikan_server.mock_resources_with_doppel();
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?; let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;

View File

@ -1,5 +1,4 @@
pub mod bittorrent; pub mod bittorrent;
pub mod defs;
pub mod html; pub mod html;
pub mod http; pub mod http;
pub mod media; pub mod media;

View File

@ -47,8 +47,27 @@ impl<'a> EpisodeComp<'a> {
Ok((input, f32::round(num) as i32)) Ok((input, f32::round(num) as i32))
} }
fn parse_ep_special_num(input: &'a str) -> IResult<&'a str, i32> {
terminated(
alt((
value(0, tag_no_case("ova")),
value(0, tag_no_case("oad")),
value(0, tag_no_case("sp")),
value(0, tag_no_case("ex")),
)),
(space0, opt(parse_int::<i32>)),
)
.parse(input)
}
fn parse_ep_num(input: &'a str) -> IResult<&'a str, i32> { fn parse_ep_num(input: &'a str) -> IResult<&'a str, i32> {
alt((parse_int::<i32>, Self::parse_ep_round_num, ZhNum::parse_int)).parse(input) alt((
parse_int::<i32>,
Self::parse_ep_round_num,
ZhNum::parse_int,
Self::parse_ep_special_num,
))
.parse(input)
} }
fn parse_ep_nums_core(input: &'a str) -> IResult<&'a str, (i32, Option<i32>)> { fn parse_ep_nums_core(input: &'a str) -> IResult<&'a str, (i32, Option<i32>)> {
@ -175,8 +194,13 @@ impl<'a> std::fmt::Debug for MoiveComp<'a> {
impl<'a> OriginCompTrait<'a> for MoiveComp<'a> { impl<'a> OriginCompTrait<'a> for MoiveComp<'a> {
#[cfg_attr(debug_assertions, instrument(level = Level::TRACE, ret, err(level=Level::TRACE), "MoiveComp::parse_comp"))] #[cfg_attr(debug_assertions, instrument(level = Level::TRACE, ret, err(level=Level::TRACE), "MoiveComp::parse_comp"))]
fn parse_comp(input: &'a str) -> IResult<&'a str, Self> { fn parse_comp(input: &'a str) -> IResult<&'a str, Self> {
let (input, source) = let (input, source) = alt((
alt((tag("剧场版"), tag("电影"), tag_no_case("movie"))).parse(input)?; tag("剧场版"),
tag("电影"),
tag_no_case("movie"),
tag_no_case("film"),
))
.parse(input)?;
Ok(( Ok((
input, input,
Self { Self {

View File

@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::bangumi}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::bangumi,
};
pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) { pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId); restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId);
@ -8,7 +14,6 @@ pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<bangumi::BangumiType>(); builder.register_enumeration::<bangumi::BangumiType>();
seaography::register_entity!(builder, bangumi);
builder register_entity_default_writable!(builder, bangumi, false)
} }

View File

@ -1,50 +1,28 @@
use std::sync::Arc; use std::sync::Arc;
use async_graphql::dynamic::{ use async_graphql::dynamic::{Field, FieldFuture, FieldValue, Object, TypeRef};
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef, use sea_orm::{EntityTrait, QueryFilter};
};
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use util_derive::DynamicGraphql; use util_derive::DynamicGraphql;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
auth::AuthUserInfo,
errors::RecorderError, errors::RecorderError,
graphql::{ graphql::{
domains::subscribers::restrict_subscriber_for_entity, domains::subscribers::restrict_subscriber_for_entity,
infra::crypto::{ infra::{
register_crypto_column_input_conversion_to_schema_context, crypto::{
register_crypto_column_output_conversion_to_schema_context, register_crypto_column_input_conversion_to_schema_context,
register_crypto_column_output_conversion_to_schema_context,
},
custom::{generate_entity_filtered_mutation_field, register_entity_default_writable},
name::get_entity_custom_mutation_field_name,
}, },
}, },
models::credential_3rd, models::credential_3rd,
}; };
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
struct Credential3rdCheckAvailableInput {
pub id: i32,
}
impl Credential3rdCheckAvailableInput {
fn input_type_name() -> &'static str {
"Credential3rdCheckAvailableInput"
}
fn arg_name() -> &'static str {
"filter"
}
fn generate_input_object() -> InputObject {
InputObject::new(Self::input_type_name())
.description("The input of the credential3rdCheckAvailable query")
.field(InputValue::new(
Credential3rdCheckAvailableInputFieldEnum::Id.as_str(),
TypeRef::named_nn(TypeRef::INT),
))
}
}
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)] #[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
pub struct Credential3rdCheckAvailableInfo { pub struct Credential3rdCheckAvailableInfo {
pub available: bool, pub available: bool,
@ -117,52 +95,43 @@ pub fn register_credential3rd_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
builder.register_enumeration::<credential_3rd::Credential3rdType>(); builder.register_enumeration::<credential_3rd::Credential3rdType>();
seaography::register_entity!(builder, credential_3rd); builder = register_entity_default_writable!(builder, credential_3rd, false);
builder.schema = builder
.schema
.register(Credential3rdCheckAvailableInput::generate_input_object());
builder.schema = builder builder.schema = builder
.schema .schema
.register(Credential3rdCheckAvailableInfo::generate_output_object()); .register(Credential3rdCheckAvailableInfo::generate_output_object());
builder.queries.push( let builder_context = &builder.context;
Field::new( {
"credential3rdCheckAvailable", let check_available_mutation_name = get_entity_custom_mutation_field_name::<
TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()), credential_3rd::Entity,
move |ctx| { >(builder_context, "CheckAvailable");
FieldFuture::new(async move { let check_available_mutation =
let auth_user_info = ctx.data::<AuthUserInfo>()?; generate_entity_filtered_mutation_field::<credential_3rd::Entity, _, _>(
let input: Credential3rdCheckAvailableInput = ctx builder_context,
.args check_available_mutation_name,
.get(Credential3rdCheckAvailableInput::arg_name()) TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()),
.unwrap() Arc::new(|_resolver_ctx, app_ctx, filters| {
.deserialize()?; Box::pin(async move {
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?; let db = app_ctx.db();
let credential_model = credential_3rd::Model::find_by_id_and_subscriber_id( let credential_model = credential_3rd::Entity::find()
app_ctx.as_ref(), .filter(filters)
input.id, .one(db)
auth_user_info.subscriber_auth.subscriber_id, .await?
) .ok_or_else(|| {
.await? RecorderError::from_entity_not_found::<credential_3rd::Entity>()
.ok_or_else(|| RecorderError::Credential3rdError { })?;
message: format!("credential = {} not found", input.id),
source: None.into(),
})?;
let available = credential_model.check_available(app_ctx.as_ref()).await?; let available = credential_model.check_available(app_ctx.as_ref()).await?;
Ok(Some(FieldValue::owned_any( Ok(Some(FieldValue::owned_any(
Credential3rdCheckAvailableInfo { available }, Credential3rdCheckAvailableInfo { available },
))) )))
}) })
}, }),
) );
.argument(InputValue::new( builder.mutations.push(check_available_mutation);
Credential3rdCheckAvailableInput::arg_name(), }
TypeRef::named_nn(Credential3rdCheckAvailableInput::input_type_name()),
)),
);
builder builder
} }

View File

@ -0,0 +1,64 @@
use sea_orm::Iterable;
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::{
domains::{
subscriber_tasks::restrict_subscriber_tasks_for_entity,
subscribers::restrict_subscriber_for_entity,
system_tasks::restrict_system_tasks_for_entity,
},
infra::{custom::register_entity_default_writable, name::get_entity_and_column_name},
},
models::cron,
};
fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in cron::Column::iter() {
if matches!(
column,
cron::Column::SubscriberTaskCron
| cron::Column::SystemTaskCron
| cron::Column::CronExpr
| cron::Column::CronTimezone
| cron::Column::Enabled
| cron::Column::TimeoutMs
| cron::Column::MaxAttempts
) {
continue;
}
let entity_column_key = get_entity_and_column_name::<cron::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
for column in cron::Column::iter() {
if matches!(column, |cron::Column::CronExpr| cron::Column::CronTimezone
| cron::Column::Enabled
| cron::Column::TimeoutMs
| cron::Column::Priority
| cron::Column::MaxAttempts)
{
continue;
}
let entity_column_key = get_entity_and_column_name::<cron::Entity>(context, &column);
context.entity_input.update_skips.push(entity_column_key);
}
}
pub fn register_cron_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<cron::Entity>(context, &cron::Column::SubscriberId);
restrict_subscriber_tasks_for_entity::<cron::Entity>(
context,
&cron::Column::SubscriberTaskCron,
);
restrict_system_tasks_for_entity::<cron::Entity>(context, &cron::Column::SystemTaskCron);
skip_columns_for_entity_input(context);
}
pub fn register_cron_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<cron::CronStatus>();
builder = register_entity_default_writable!(builder, cron, true);
builder
}

View File

@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloaders}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::downloaders,
};
pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) { pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloaders::Entity>( restrict_subscriber_for_entity::<downloaders::Entity>(
@ -11,7 +17,7 @@ pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloaders::DownloaderCategory>(); builder.register_enumeration::<downloaders::DownloaderCategory>();
seaography::register_entity!(builder, downloaders); builder = register_entity_default_writable!(builder, downloaders, false);
builder builder
} }

View File

@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloads}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::downloads,
};
pub fn register_downloads_to_schema_context(context: &mut BuilderContext) { pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId); restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId);
@ -9,7 +15,7 @@ pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloads::DownloadStatus>(); builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<downloads::DownloadMime>(); builder.register_enumeration::<downloads::DownloadMime>();
seaography::register_entity!(builder, downloads); builder = register_entity_default_writable!(builder, downloads, false);
builder builder
} }

View File

@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::episodes}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::episodes,
};
pub fn register_episodes_to_schema_context(context: &mut BuilderContext) { pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId); restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId);
@ -8,7 +14,7 @@ pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<episodes::EpisodeType>(); builder.register_enumeration::<episodes::EpisodeType>();
seaography::register_entity!(builder, episodes); builder = register_entity_default_writable!(builder, episodes, false);
builder builder
} }

View File

@ -3,11 +3,18 @@ use std::sync::Arc;
use async_graphql::dynamic::ResolverContext; use async_graphql::dynamic::ResolverContext;
use sea_orm::Value as SeaValue; use sea_orm::Value as SeaValue;
use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult}; use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult};
use uuid::Uuid;
use crate::{ use crate::{
graphql::{ graphql::{
domains::subscribers::restrict_subscriber_for_entity, domains::subscribers::restrict_subscriber_for_entity,
infra::util::{get_entity_column_key, get_entity_key}, infra::{
custom::register_entity_default_writable,
name::{
get_entity_and_column_name, get_entity_create_batch_mutation_field_name,
get_entity_create_one_mutation_field_name,
},
},
}, },
models::feeds, models::feeds,
}; };
@ -15,29 +22,23 @@ use crate::{
pub fn register_feeds_to_schema_context(context: &mut BuilderContext) { pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId); restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId);
{ {
let entity_column_key = let entity_create_one_mutation_field_name = Arc::new(
get_entity_column_key::<feeds::Entity>(context, &feeds::Column::Token); get_entity_create_one_mutation_field_name::<feeds::Entity>(context),
let entity_key = get_entity_key::<feeds::Entity>(context); );
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key); let entity_create_batch_mutation_field_name =
let entity_create_one_mutation_field_name = Arc::new(format!( Arc::new(get_entity_create_batch_mutation_field_name::<feeds::Entity>(context));
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
context.types.input_none_conversions.insert( context.types.input_none_conversions.insert(
entity_column_key, get_entity_and_column_name::<feeds::Entity>(context, &feeds::Column::Token),
Box::new( Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> { move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name(); let field_name = context.field().name();
if field_name == entity_create_one_mutation_field_name.as_str() if field_name == entity_create_one_mutation_field_name.as_str()
|| field_name == entity_create_batch_mutation_field_name.as_str() || field_name == entity_create_batch_mutation_field_name.as_str()
{ {
Ok(Some(SeaValue::String(Some(Box::new(nanoid::nanoid!()))))) Ok(Some(SeaValue::String(Some(Box::new(
Uuid::now_v7().to_string(),
)))))
} else { } else {
Ok(None) Ok(None)
} }
@ -50,7 +51,8 @@ pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<feeds::FeedType>(); builder.register_enumeration::<feeds::FeedType>();
builder.register_enumeration::<feeds::FeedSource>(); builder.register_enumeration::<feeds::FeedSource>();
seaography::register_entity!(builder, feeds);
builder = register_entity_default_writable!(builder, feeds, false);
builder builder
} }

View File

@ -1,6 +1,7 @@
pub mod credential_3rd; pub mod credential_3rd;
pub mod bangumi; pub mod bangumi;
pub mod cron;
pub mod downloaders; pub mod downloaders;
pub mod downloads; pub mod downloads;
pub mod episodes; pub mod episodes;
@ -10,3 +11,4 @@ pub mod subscribers;
pub mod subscription_bangumi; pub mod subscription_bangumi;
pub mod subscription_episode; pub mod subscription_episode;
pub mod subscriptions; pub mod subscriptions;
pub mod system_tasks;

View File

@ -1,51 +1,143 @@
use std::{ops::Deref, sync::Arc}; use std::{ops::Deref, sync::Arc};
use async_graphql::dynamic::{FieldValue, TypeRef}; use async_graphql::dynamic::{FieldValue, Scalar, TypeRef};
use convert_case::Case;
use sea_orm::{ use sea_orm::{
ColumnTrait, ConnectionTrait, EntityTrait, QueryFilter, QuerySelect, QueryTrait, prelude::Expr, ActiveModelBehavior, ColumnTrait, ConnectionTrait, EntityTrait, Iterable, QueryFilter,
sea_query::Query, QuerySelect, QueryTrait, prelude::Expr, sea_query::Query,
}; };
use seaography::{ use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityDeleteMutationBuilder, EntityObjectBuilder, Builder as SeaographyBuilder, BuilderContext, SeaographyError, prepare_active_model,
EntityQueryFieldBuilder, get_filter_conditions,
}; };
use ts_rs::TS;
use crate::{ use crate::{
auth::AuthUserInfo,
errors::RecorderError, errors::RecorderError,
graphql::{ graphql::{
domains::subscribers::restrict_subscriber_for_entity, domains::subscribers::restrict_subscriber_for_entity,
infra::{ infra::{
custom::generate_entity_filter_mutation_field, custom::{
json::{convert_jsonb_output_case_for_entity, restrict_jsonb_filter_input_for_entity}, generate_entity_create_one_mutation_field,
generate_entity_default_basic_entity_object,
generate_entity_default_insert_input_object, generate_entity_delete_mutation_field,
generate_entity_filtered_mutation_field, register_entity_default_readonly,
},
json::{convert_jsonb_output_for_entity, restrict_jsonb_filter_input_for_entity},
name::{
get_entity_and_column_name, get_entity_basic_type_name,
get_entity_custom_mutation_field_name,
},
}, },
}, },
migrations::defs::{ApalisJobs, ApalisSchema},
models::subscriber_tasks, models::subscriber_tasks,
task::{ApalisJobs, ApalisSchema}, task::SubscriberTaskTrait,
}; };
pub fn register_subscriber_tasks_entity_mutations( fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in subscriber_tasks::Column::iter() {
if matches!(
column,
subscriber_tasks::Column::Job | subscriber_tasks::Column::SubscriberId
) {
continue;
}
let entity_column_key =
get_entity_and_column_name::<subscriber_tasks::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
}
pub fn restrict_subscriber_tasks_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_and_column = get_entity_and_column_name::<T>(context, column);
restrict_jsonb_filter_input_for_entity::<T>(context, column);
convert_jsonb_output_for_entity::<T>(context, column, Some(Case::Camel));
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.input_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(subscriber_tasks::SubscriberTask::ident().into()),
);
context.types.output_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(subscriber_tasks::SubscriberTask::ident().into()),
);
context.types.input_conversions.insert(
entity_column_name.clone(),
Box::new(move |resolve_context, value_accessor| {
let task: subscriber_tasks::SubscriberTaskInput = value_accessor.deserialize()?;
let subscriber_id = resolve_context
.data::<AuthUserInfo>()?
.subscriber_auth
.subscriber_id;
let task = subscriber_tasks::SubscriberTask::from_input(task, subscriber_id);
let json_value = serde_json::to_value(task).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_name}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
context.entity_input.update_skips.push(entity_and_column);
}
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::SubscriberId,
);
restrict_subscriber_tasks_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
);
skip_columns_for_entity_input(context);
}
pub fn register_subscriber_tasks_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
let context = builder.context; builder.schema = builder.schema.register(
Scalar::new(subscriber_tasks::SubscriberTask::ident())
.description(subscriber_tasks::SubscriberTask::decl()),
);
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>();
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
builder = register_entity_default_readonly!(builder, subscriber_tasks);
let builder_context = builder.context;
{ {
let entitity_delete_mutation_builder = EntityDeleteMutationBuilder { context }; builder
let delete_mutation = generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>( .outputs
context, .push(generate_entity_default_basic_entity_object::<
entitity_delete_mutation_builder.type_name::<subscriber_tasks::Entity>(), subscriber_tasks::Entity,
TypeRef::named_nn(TypeRef::INT), >(builder_context));
Arc::new(|resolver_ctx, app_ctx, filters| { }
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>( {
resolver_ctx, let delete_mutation = generate_entity_delete_mutation_field::<subscriber_tasks::Entity>(
context, builder_context,
filters, Arc::new(|_resolver_ctx, app_ctx, filters| {
);
Box::pin(async move { Box::pin(async move {
let db = app_ctx.db(); let db = app_ctx.db();
let select_subquery = subscriber_tasks::Entity::find() let select_subquery = subscriber_tasks::Entity::find()
.select_only() .select_only()
.column(subscriber_tasks::Column::Id) .column(subscriber_tasks::Column::Id)
.filter(filters_condition); .filter(filters);
let delete_query = Query::delete() let delete_query = Query::delete()
.from_table((ApalisSchema::Schema, ApalisJobs::Table)) .from_table((ApalisSchema::Schema, ApalisJobs::Table))
@ -59,42 +151,36 @@ pub fn register_subscriber_tasks_entity_mutations(
let result = db.execute(delete_statement).await?; let result = db.execute(delete_statement).await?;
Ok::<_, RecorderError>(Some(FieldValue::value(result.rows_affected() as i32))) Ok::<_, RecorderError>(result.rows_affected())
}) })
}), }),
); );
builder.mutations.push(delete_mutation); builder.mutations.push(delete_mutation);
} }
{ {
let entity_object_builder = EntityObjectBuilder { context }; let entity_retry_one_mutation_name = get_entity_custom_mutation_field_name::<
let entity_query_field = EntityQueryFieldBuilder { context }; subscriber_tasks::Entity,
let entity_retry_one_mutation_name = format!( >(builder_context, "RetryOne");
"{}RetryOne",
entity_query_field.type_name::<subscriber_tasks::Entity>()
);
let retry_one_mutation = let retry_one_mutation =
generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>( generate_entity_filtered_mutation_field::<subscriber_tasks::Entity, _, _>(
context, builder_context,
entity_retry_one_mutation_name, entity_retry_one_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()), TypeRef::named_nn(get_entity_basic_type_name::<subscriber_tasks::Entity>(
Arc::new(|resolver_ctx, app_ctx, filters| { builder_context,
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>( )),
resolver_ctx, Arc::new(|_resolver_ctx, app_ctx, filters| {
context,
filters,
);
Box::pin(async move { Box::pin(async move {
let db = app_ctx.db(); let db = app_ctx.db();
let job_id = subscriber_tasks::Entity::find() let job_id = subscriber_tasks::Entity::find()
.filter(filters_condition) .filter(filters)
.select_only() .select_only()
.column(subscriber_tasks::Column::Id) .column(subscriber_tasks::Column::Id)
.into_tuple::<String>() .into_tuple::<String>()
.one(db) .one(db)
.await? .await?
.ok_or_else(|| RecorderError::ModelEntityNotFound { .ok_or_else(|| {
entity: "SubscriberTask".into(), RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?; })?;
let task = app_ctx.task(); let task = app_ctx.task();
@ -104,8 +190,8 @@ pub fn register_subscriber_tasks_entity_mutations(
.filter(subscriber_tasks::Column::Id.eq(&job_id)) .filter(subscriber_tasks::Column::Id.eq(&job_id))
.one(db) .one(db)
.await? .await?
.ok_or_else(|| RecorderError::ModelEntityNotFound { .ok_or_else(|| {
entity: "SubscriberTask".into(), RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?; })?;
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model))) Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
@ -114,38 +200,54 @@ pub fn register_subscriber_tasks_entity_mutations(
); );
builder.mutations.push(retry_one_mutation); builder.mutations.push(retry_one_mutation);
} }
{
builder
.inputs
.push(generate_entity_default_insert_input_object::<
subscriber_tasks::Entity,
>(builder_context));
let create_one_mutation =
generate_entity_create_one_mutation_field::<subscriber_tasks::Entity>(
builder_context,
Arc::new(move |resolver_ctx, app_ctx, input_object| {
Box::pin(async move {
let active_model: Result<subscriber_tasks::ActiveModel, _> =
prepare_active_model(builder_context, &input_object, resolver_ctx);
builder let task_service = app_ctx.task();
}
let active_model = active_model?;
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscriber_tasks::Entity>( let db = app_ctx.db();
context,
&subscriber_tasks::Column::SubscriberId, let active_model = active_model.before_save(db, true).await?;
);
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>( let task = active_model.job.unwrap();
context, let subscriber_id = active_model.subscriber_id.unwrap();
&subscriber_tasks::Column::Job,
); if task.get_subscriber_id() != subscriber_id {
convert_jsonb_output_case_for_entity::<subscriber_tasks::Entity>( Err(async_graphql::Error::new(
context, "subscriber_id does not match with job.subscriber_id",
&subscriber_tasks::Column::Job, ))?;
); }
}
let task_id = task_service.add_subscriber_task(task).await?.to_string();
pub fn register_subscriber_tasks_to_schema_builder(
mut builder: SeaographyBuilder, let db = app_ctx.db();
) -> SeaographyBuilder {
builder.register_entity::<subscriber_tasks::Entity>( let task = subscriber_tasks::Entity::find()
<subscriber_tasks::RelatedEntity as sea_orm::Iterable>::iter() .filter(subscriber_tasks::Column::Id.eq(&task_id))
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context)) .one(db)
.collect(), .await?
); .ok_or_else(|| {
builder = builder.register_entity_dataloader_one_to_one(subscriber_tasks::Entity, tokio::spawn); RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
builder = })?;
builder.register_entity_dataloader_one_to_many(subscriber_tasks::Entity, tokio::spawn);
builder = register_subscriber_tasks_entity_mutations(builder); Ok::<_, RecorderError>(task)
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>(); })
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>(); }),
);
builder.mutations.push(create_one_mutation);
}
builder builder
} }

View File

@ -7,12 +7,22 @@ use sea_orm::{ColumnTrait, Condition, EntityTrait, Iterable, Value as SeaValue};
use seaography::{ use seaography::{
Builder as SeaographyBuilder, BuilderContext, FilterInfo, Builder as SeaographyBuilder, BuilderContext, FilterInfo,
FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper, FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper,
FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult, SeaographyError, FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult,
}; };
use crate::{ use crate::{
auth::{AuthError, AuthUserInfo}, auth::{AuthError, AuthUserInfo},
graphql::infra::util::{get_column_key, get_entity_column_key, get_entity_key}, graphql::infra::{
custom::register_entity_default_readonly,
name::{
get_column_name, get_entity_and_column_name,
get_entity_create_batch_mutation_data_field_name,
get_entity_create_batch_mutation_field_name,
get_entity_create_one_mutation_data_field_name,
get_entity_create_one_mutation_field_name, get_entity_name,
get_entity_update_mutation_data_field_name, get_entity_update_mutation_field_name,
},
},
models::subscribers, models::subscribers,
}; };
@ -82,32 +92,19 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context); let column_name = Arc::new(get_column_name::<T>(context, column));
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key); let entity_create_one_mutation_field_name =
let column_key = get_column_key::<T>(context, column); Arc::new(get_entity_create_one_mutation_field_name::<T>(context));
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
&entity_key,
&column_key,
));
let entity_create_one_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_one_mutation_data_field_name = let entity_create_one_mutation_data_field_name =
Arc::new(context.entity_create_one_mutation.data_field.clone()); Arc::new(get_entity_create_one_mutation_data_field_name(context).to_string());
let entity_create_batch_mutation_field_name = Arc::new(format!( let entity_create_batch_mutation_field_name =
"{}{}", Arc::new(get_entity_create_batch_mutation_field_name::<T>(context));
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
let entity_create_batch_mutation_data_field_name = let entity_create_batch_mutation_data_field_name =
Arc::new(context.entity_create_batch_mutation.data_field.clone()); Arc::new(get_entity_create_batch_mutation_data_field_name(context).to_string());
let entity_update_mutation_field_name = Arc::new(format!( let entity_update_mutation_field_name =
"{}{}", Arc::new(get_entity_update_mutation_field_name::<T>(context));
entity_name, context.entity_update_mutation.mutation_suffix
));
let entity_update_mutation_data_field_name = let entity_update_mutation_data_field_name =
Arc::new(context.entity_update_mutation.data_field.clone()); Arc::new(get_entity_update_mutation_data_field_name(context).to_string());
Box::new(move |context: &ResolverContext| -> GuardAction { Box::new(move |context: &ResolverContext| -> GuardAction {
match context.ctx.data::<AuthUserInfo>() { match context.ctx.data::<AuthUserInfo>() {
@ -222,11 +219,10 @@ where
if let Some(value) = filter.get("eq") { if let Some(value) = filter.get("eq") {
let value: i32 = value.i64()?.try_into()?; let value: i32 = value.i64()?.try_into()?;
if value != subscriber_id { if value != subscriber_id {
return Err(SeaographyError::AsyncGraphQLError( return Err(async_graphql::Error::new(
async_graphql::Error::new( "subscriber_id and auth_info does not match",
"subscriber_id and auth_info does not match", )
), .into());
));
} }
} }
} }
@ -253,17 +249,10 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context); let entity_create_one_mutation_field_name =
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key); Arc::new(get_entity_create_one_mutation_field_name::<T>(context));
let entity_create_one_mutation_field_name = Arc::new(format!( let entity_create_batch_mutation_field_name =
"{}{}", Arc::new(get_entity_create_batch_mutation_field_name::<T>(context));
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
Box::new( Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> { move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name(); let field_name = context.field().name();
@ -289,40 +278,39 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context); let entity_and_column = get_entity_and_column_name::<T>(context, column);
let entity_column_key = get_entity_column_key::<T>(context, column);
context.guards.entity_guards.insert( context.guards.entity_guards.insert(
entity_key.clone(), get_entity_name::<T>(context),
guard_entity_with_subscriber_id::<T>(context, column), guard_entity_with_subscriber_id::<T>(context, column),
); );
context.guards.field_guards.insert( context.guards.field_guards.insert(
entity_column_key.clone(), get_entity_and_column_name::<T>(context, column),
guard_field_with_subscriber_id::<T>(context, column), guard_field_with_subscriber_id::<T>(context, column),
); );
context.filter_types.overwrites.insert( context.filter_types.overwrites.insert(
entity_column_key.clone(), get_entity_and_column_name::<T>(context, column),
Some(FilterType::Custom( Some(FilterType::Custom(
SUBSCRIBER_ID_FILTER_INFO.type_name.clone(), SUBSCRIBER_ID_FILTER_INFO.type_name.clone(),
)), )),
); );
context.filter_types.condition_functions.insert( context.filter_types.condition_functions.insert(
entity_column_key.clone(), entity_and_column.clone(),
generate_subscriber_id_filter_condition::<T>(context, column), generate_subscriber_id_filter_condition::<T>(context, column),
); );
context.types.input_none_conversions.insert( context.types.input_none_conversions.insert(
entity_column_key.clone(), entity_and_column.clone(),
generate_default_subscriber_id_input_conversion::<T>(context, column), generate_default_subscriber_id_input_conversion::<T>(context, column),
); );
context.entity_input.update_skips.push(entity_column_key); context.entity_input.update_skips.push(entity_and_column);
} }
pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) { pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscribers::Entity>(context, &subscribers::Column::Id); restrict_subscriber_for_entity::<subscribers::Entity>(context, &subscribers::Column::Id);
for column in subscribers::Column::iter() { for column in subscribers::Column::iter() {
if !matches!(column, subscribers::Column::Id) { if !matches!(column, subscribers::Column::Id) {
let key = get_entity_column_key::<subscribers::Entity>(context, &column); let key = get_entity_and_column_name::<subscribers::Entity>(context, &column);
context.filter_types.overwrites.insert(key, None); context.filter_types.overwrites.insert(key, None);
} }
} }
@ -330,24 +318,14 @@ pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
{ {
let filter_types_map_helper = FilterTypesMapHelper {
context: builder.context,
};
builder.schema = builder builder.schema = builder
.schema .schema
.register(filter_types_map_helper.generate_filter_input(&SUBSCRIBER_ID_FILTER_INFO)); .register(FilterTypesMapHelper::generate_filter_input(
&SUBSCRIBER_ID_FILTER_INFO,
));
} }
{ builder = register_entity_default_readonly!(builder, subscribers);
builder.register_entity::<subscribers::Entity>(
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
}
builder builder
} }

View File

@ -1,7 +1,11 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{ use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_bangumi, graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::subscription_bangumi,
}; };
pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) { pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) {
@ -14,7 +18,7 @@ pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderCont
pub fn register_subscription_bangumi_to_schema_builder( pub fn register_subscription_bangumi_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_bangumi); builder = register_entity_default_writable!(builder, subscription_bangumi, false);
builder builder
} }

View File

@ -1,7 +1,11 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{ use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_episode, graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::subscription_episode,
}; };
pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) { pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) {
@ -14,7 +18,7 @@ pub fn register_subscription_episode_to_schema_context(context: &mut BuilderCont
pub fn register_subscription_episode_to_schema_builder( pub fn register_subscription_episode_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_episode); builder = register_entity_default_writable!(builder, subscription_episode, false);
builder builder
} }

View File

@ -1,23 +1,11 @@
use std::sync::Arc; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use async_graphql::dynamic::{FieldValue, TypeRef};
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityObjectBuilder, EntityQueryFieldBuilder,
get_filter_conditions,
};
use crate::{ use crate::{
errors::RecorderError,
graphql::{ graphql::{
domains::subscribers::restrict_subscriber_for_entity, domains::subscribers::restrict_subscriber_for_entity,
infra::custom::generate_entity_filter_mutation_field, infra::custom::register_entity_default_writable,
}, },
models::{ models::subscriptions,
subscriber_tasks,
subscriptions::{self, SubscriptionTrait},
},
task::SubscriberTask,
}; };
pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) { pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) {
@ -31,184 +19,6 @@ pub fn register_subscriptions_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
builder.register_enumeration::<subscriptions::SubscriptionCategory>(); builder.register_enumeration::<subscriptions::SubscriptionCategory>();
seaography::register_entity!(builder, subscriptions); builder = register_entity_default_writable!(builder, subscriptions, false);
let context = builder.context;
let entity_object_builder = EntityObjectBuilder { context };
let entity_query_field = EntityQueryFieldBuilder { context };
{
let sync_one_feeds_incremental_mutation_name = format!(
"{}SyncOneFeedsIncremental",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_feeds_incremental_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_incremental_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsIncremental(
subscription.into(),
),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_feeds_incremental_mutation);
}
{
let sync_one_feeds_full_mutation_name = format!(
"{}SyncOneFeedsFull",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_feeds_full_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_full_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsFull(subscription.into()),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_feeds_full_mutation);
}
{
let sync_one_sources_mutation_name = format!(
"{}SyncOneSources",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_sources_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_sources_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionSources(subscription.into()),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_sources_mutation);
}
builder builder
} }

View File

@ -0,0 +1,258 @@
use std::{ops::Deref, sync::Arc};
use async_graphql::dynamic::{FieldValue, Scalar, TypeRef};
use convert_case::Case;
use sea_orm::{
ActiveModelBehavior, ColumnTrait, ConnectionTrait, EntityTrait, Iterable, QueryFilter,
QuerySelect, QueryTrait, prelude::Expr, sea_query::Query,
};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, GuardAction, SeaographyError,
prepare_active_model,
};
use ts_rs::TS;
use crate::{
auth::AuthUserInfo,
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::{
custom::{
generate_entity_create_one_mutation_field,
generate_entity_default_basic_entity_object,
generate_entity_default_insert_input_object, generate_entity_delete_mutation_field,
generate_entity_filtered_mutation_field, register_entity_default_readonly,
},
json::{convert_jsonb_output_for_entity, restrict_jsonb_filter_input_for_entity},
name::{
get_entity_and_column_name, get_entity_basic_type_name,
get_entity_custom_mutation_field_name,
},
},
},
migrations::defs::{ApalisJobs, ApalisSchema},
models::system_tasks,
task::SystemTaskTrait,
};
fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in system_tasks::Column::iter() {
if matches!(
column,
system_tasks::Column::Job | system_tasks::Column::SubscriberId
) {
continue;
}
let entity_column_key =
get_entity_and_column_name::<system_tasks::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
}
pub fn restrict_system_tasks_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_and_column = get_entity_and_column_name::<T>(context, column);
restrict_jsonb_filter_input_for_entity::<T>(context, column);
convert_jsonb_output_for_entity::<T>(context, column, Some(Case::Camel));
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.guards.field_guards.insert(
entity_column_name.clone(),
Box::new(|_resolver_ctx| {
GuardAction::Block(Some(
"SystemTask can not be created by subscribers now".to_string(),
))
}),
);
context.types.input_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(system_tasks::SystemTask::ident().into()),
);
context.types.output_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(system_tasks::SystemTask::ident().into()),
);
context.types.input_conversions.insert(
entity_column_name.clone(),
Box::new(move |resolve_context, value_accessor| {
let task: system_tasks::SystemTaskInput = value_accessor.deserialize()?;
let subscriber_id = resolve_context
.data::<AuthUserInfo>()?
.subscriber_auth
.subscriber_id;
let task = system_tasks::SystemTask::from_input(task, Some(subscriber_id));
let json_value = serde_json::to_value(task).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_name}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
context.entity_input.update_skips.push(entity_and_column);
}
pub fn register_system_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<system_tasks::Entity>(
context,
&system_tasks::Column::SubscriberId,
);
restrict_system_tasks_for_entity::<system_tasks::Entity>(context, &system_tasks::Column::Job);
skip_columns_for_entity_input(context);
}
pub fn register_system_tasks_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.schema = builder.schema.register(
Scalar::new(system_tasks::SystemTask::ident())
.description(system_tasks::SystemTask::decl()),
);
builder.register_enumeration::<system_tasks::SystemTaskType>();
builder.register_enumeration::<system_tasks::SystemTaskStatus>();
builder = register_entity_default_readonly!(builder, system_tasks);
let builder_context = builder.context;
{
builder
.outputs
.push(generate_entity_default_basic_entity_object::<
system_tasks::Entity,
>(builder_context));
}
{
let delete_mutation = generate_entity_delete_mutation_field::<system_tasks::Entity>(
builder_context,
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let select_subquery = system_tasks::Entity::find()
.select_only()
.column(system_tasks::Column::Id)
.filter(filters);
let delete_query = Query::delete()
.from_table((ApalisSchema::Schema, ApalisJobs::Table))
.and_where(
Expr::col(ApalisJobs::Id).in_subquery(select_subquery.into_query()),
)
.to_owned();
let db_backend = db.deref().get_database_backend();
let delete_statement = db_backend.build(&delete_query);
let result = db.execute(delete_statement).await?;
Ok::<_, RecorderError>(result.rows_affected())
})
}),
);
builder.mutations.push(delete_mutation);
}
{
let entity_retry_one_mutation_name = get_entity_custom_mutation_field_name::<
system_tasks::Entity,
>(builder_context, "RetryOne");
let retry_one_mutation =
generate_entity_filtered_mutation_field::<system_tasks::Entity, _, _>(
builder_context,
entity_retry_one_mutation_name,
TypeRef::named_nn(get_entity_basic_type_name::<system_tasks::Entity>(
builder_context,
)),
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let job_id = system_tasks::Entity::find()
.filter(filters)
.select_only()
.column(system_tasks::Column::Id)
.into_tuple::<String>()
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
let task = app_ctx.task();
task.retry_subscriber_task(job_id.clone()).await?;
let task_model = system_tasks::Entity::find()
.filter(system_tasks::Column::Id.eq(&job_id))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(retry_one_mutation);
}
{
builder
.inputs
.push(generate_entity_default_insert_input_object::<
system_tasks::Entity,
>(builder_context));
let create_one_mutation = generate_entity_create_one_mutation_field::<system_tasks::Entity>(
builder_context,
Arc::new(move |resolver_ctx, app_ctx, input_object| {
Box::pin(async move {
let active_model: Result<system_tasks::ActiveModel, _> =
prepare_active_model(builder_context, &input_object, resolver_ctx);
let task_service = app_ctx.task();
let active_model = active_model?;
let db = app_ctx.db();
let active_model = active_model.before_save(db, true).await?;
let task = active_model.job.unwrap();
let subscriber_id = active_model.subscriber_id.unwrap();
if task.get_subscriber_id() != subscriber_id {
Err(async_graphql::Error::new(
"subscriber_id does not match with job.subscriber_id",
))?;
}
let task_id = task_service.add_system_task(task).await?.to_string();
let db = app_ctx.db();
let task = system_tasks::Entity::find()
.filter(system_tasks::Column::Id.eq(&task_id))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
Ok::<_, RecorderError>(task)
})
}),
);
builder.mutations.push(create_one_mutation);
}
builder
}

View File

@ -4,10 +4,7 @@ use async_graphql::dynamic::{ResolverContext, ValueAccessor};
use sea_orm::{EntityTrait, Value as SeaValue}; use sea_orm::{EntityTrait, Value as SeaValue};
use seaography::{BuilderContext, SeaResult}; use seaography::{BuilderContext, SeaResult};
use crate::{ use crate::{app::AppContextTrait, graphql::infra::name::get_entity_and_column_name};
app::AppContextTrait,
graphql::infra::util::{get_column_key, get_entity_key},
};
pub fn register_crypto_column_input_conversion_to_schema_context<T>( pub fn register_crypto_column_input_conversion_to_schema_context<T>(
context: &mut BuilderContext, context: &mut BuilderContext,
@ -17,13 +14,8 @@ pub fn register_crypto_column_input_conversion_to_schema_context<T>(
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.input_conversions.insert( context.types.input_conversions.insert(
format!("{entity_name}.{column_name}"), get_entity_and_column_name::<T>(context, column),
Box::new( Box::new(
move |_resolve_context: &ResolverContext<'_>, move |_resolve_context: &ResolverContext<'_>,
value: &ValueAccessor| value: &ValueAccessor|
@ -44,13 +36,8 @@ pub fn register_crypto_column_output_conversion_to_schema_context<T>(
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.output_conversions.insert( context.types.output_conversions.insert(
format!("{entity_name}.{column_name}"), get_entity_and_column_name::<T>(context, column),
Box::new( Box::new(
move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> { move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> {
if let SeaValue::String(s) = value { if let SeaValue::String(s) = value {

View File

@ -1,53 +1,140 @@
use std::{pin::Pin, sync::Arc}; use std::{iter::FusedIterator, pin::Pin, sync::Arc};
use async_graphql::dynamic::{ use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputValue, ResolverContext, TypeRef, ValueAccessor, Field, FieldFuture, FieldValue, InputObject, InputValue, Object, ObjectAccessor,
ResolverContext, TypeRef,
};
use sea_orm::{ActiveModelTrait, Condition, EntityTrait, IntoActiveModel};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityCreateBatchMutationBuilder,
EntityCreateOneMutationBuilder, EntityDeleteMutationBuilder, EntityInputBuilder,
EntityObjectBuilder, EntityUpdateMutationBuilder, GuardAction, RelationBuilder,
get_filter_conditions,
}; };
use sea_orm::EntityTrait;
use seaography::{BuilderContext, EntityObjectBuilder, FilterInputBuilder, GuardAction};
use crate::{app::AppContextTrait, errors::RecorderResult}; use crate::{
app::AppContextTrait,
errors::RecorderResult,
graphql::infra::name::{
get_entity_filter_input_type_name, get_entity_name,
get_entity_renormalized_filter_field_name,
},
};
pub type FilterMutationFn = Arc< pub type FilterMutationFn = Arc<
dyn for<'a> Fn( dyn for<'a> Fn(
&ResolverContext<'a>, &ResolverContext<'a>,
Arc<dyn AppContextTrait>, Arc<dyn AppContextTrait>,
Option<ValueAccessor<'_>>, Condition,
) -> Pin< ) -> Pin<
Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>, Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>,
> + Send > + Send
+ Sync, + Sync,
>; >;
pub fn generate_entity_filter_mutation_field<T, N, R>( pub type CreateOneMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
ObjectAccessor<'a>,
) -> Pin<Box<dyn Future<Output = RecorderResult<M>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type CreateBatchMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Vec<ObjectAccessor<'a>>,
) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type UpdateMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Condition,
ObjectAccessor<'a>,
) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type DeleteMutationFn = Arc<
dyn for<'a> Fn(
&ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Condition,
) -> Pin<Box<dyn Future<Output = RecorderResult<u64>> + Send + 'a>>
+ Send
+ Sync,
>;
pub fn generate_entity_default_insert_input_object<T>(context: &BuilderContext) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
EntityInputBuilder::insert_input_object::<T>(context)
}
pub fn generate_entity_default_update_input_object<T>(context: &BuilderContext) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
EntityInputBuilder::update_input_object::<T>(context)
}
pub fn generate_entity_default_basic_entity_object<T>(context: &'static BuilderContext) -> Object
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_object_builder = EntityObjectBuilder { context };
entity_object_builder.basic_to_object::<T>()
}
pub fn generate_entity_input_object<T>(
context: &'static BuilderContext,
is_insert: bool,
) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
if is_insert {
EntityInputBuilder::insert_input_object::<T>(context)
} else {
EntityInputBuilder::update_input_object::<T>(context)
}
}
pub fn generate_entity_filtered_mutation_field<E, N, R>(
builder_context: &'static BuilderContext, builder_context: &'static BuilderContext,
field_name: N, field_name: N,
type_ref: R, type_ref: R,
mutation_fn: FilterMutationFn, mutation_fn: FilterMutationFn,
) -> Field ) -> Field
where where
T: EntityTrait, E: EntityTrait,
<T as EntityTrait>::Model: Sync, <E as EntityTrait>::Model: Sync,
N: Into<String>, N: Into<String>,
R: Into<TypeRef>, R: Into<TypeRef>,
{ {
let entity_filter_input_builder = FilterInputBuilder { let object_name: String = get_entity_name::<E>(builder_context);
context: builder_context,
};
let entity_object_builder = EntityObjectBuilder {
context: builder_context,
};
let object_name: String = entity_object_builder.type_name::<T>();
let context = builder_context;
let guard = builder_context.guards.entity_guards.get(&object_name); let guard = builder_context.guards.entity_guards.get(&object_name);
Field::new(field_name, type_ref, move |ctx| { Field::new(field_name, type_ref, move |resolve_context| {
let mutation_fn = mutation_fn.clone(); let mutation_fn = mutation_fn.clone();
FieldFuture::new(async move { FieldFuture::new(async move {
let guard_flag = if let Some(guard) = guard { let guard_flag = if let Some(guard) = guard {
(*guard)(&ctx) (*guard)(&resolve_context)
} else { } else {
GuardAction::Allow GuardAction::Allow
}; };
@ -58,19 +145,297 @@ where
)); ));
} }
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?; let filters = resolve_context
.args
.get(get_entity_renormalized_filter_field_name());
let filters = ctx.args.get(&context.entity_delete_mutation.filter_field); let filters = get_filter_conditions::<E>(&resolve_context, builder_context, filters);
let result = mutation_fn(&ctx, app_ctx.clone(), filters) let app_ctx = resolve_context.data::<Arc<dyn AppContextTrait>>()?;
.await
.map_err(async_graphql::Error::new_with_source)?; let result = mutation_fn(&resolve_context, app_ctx.clone(), filters).await?;
Ok(result) Ok(result)
}) })
}) })
.argument(InputValue::new( .argument(InputValue::new(
&context.entity_delete_mutation.filter_field, get_entity_renormalized_filter_field_name(),
TypeRef::named(entity_filter_input_builder.type_name(&object_name)), TypeRef::named(get_entity_filter_input_type_name::<E>(builder_context)),
)) ))
} }
pub fn generate_entity_create_one_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: CreateOneMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_create_one_mutation_builder = EntityCreateOneMutationBuilder {
context: builder_context,
};
entity_create_one_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, input_object| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(resolver_ctx, app_ctx.clone(), input_object).await?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_create_one_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_create_one_mutation_builder = EntityCreateOneMutationBuilder {
context: builder_context,
};
entity_create_one_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_create_batch_mutation_field<E, ID>(
builder_context: &'static BuilderContext,
mutation_fn: CreateBatchMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_create_batch_mutation_builder = EntityCreateBatchMutationBuilder {
context: builder_context,
};
entity_create_batch_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, input_objects| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(resolver_ctx, app_ctx.clone(), input_objects).await?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_create_batch_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_create_batch_mutation_builder = EntityCreateBatchMutationBuilder {
context: builder_context,
};
entity_create_batch_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_update_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: UpdateMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_update_mutation_builder = EntityUpdateMutationBuilder {
context: builder_context,
};
entity_update_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, filters, input_object| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(
resolver_ctx,
app_ctx.clone(),
get_filter_conditions::<E>(resolver_ctx, builder_context, filters),
input_object,
)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_update_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_update_mutation_builder = EntityUpdateMutationBuilder {
context: builder_context,
};
entity_update_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_delete_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: DeleteMutationFn,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_delete_mutation_builder = EntityDeleteMutationBuilder {
context: builder_context,
};
entity_delete_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, filters| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(
resolver_ctx,
app_ctx.clone(),
get_filter_conditions::<E>(resolver_ctx, builder_context, filters),
)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_delete_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_delete_mutation_builder = EntityDeleteMutationBuilder {
context: builder_context,
};
entity_delete_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn register_entity_default_mutations<E, A>(
mut builder: SeaographyBuilder,
active_model_hooks: bool,
) -> SeaographyBuilder
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let builder_context = builder.context;
builder
.outputs
.push(generate_entity_default_basic_entity_object::<E>(
builder_context,
));
builder.inputs.extend([
generate_entity_default_insert_input_object::<E>(builder_context),
generate_entity_default_update_input_object::<E>(builder_context),
]);
builder.mutations.extend([
generate_entity_default_create_one_mutation_field::<E, A>(
builder_context,
active_model_hooks,
),
generate_entity_default_create_batch_mutation_field::<E, A>(
builder_context,
active_model_hooks,
),
generate_entity_default_update_mutation_field::<E, A>(builder_context, active_model_hooks),
generate_entity_default_delete_mutation_field::<E, A>(builder_context, active_model_hooks),
]);
builder
}
pub(crate) fn register_entity_default_readonly_impl<T, RE, I>(
mut builder: SeaographyBuilder,
entity: T,
) -> SeaographyBuilder
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
RE: sea_orm::Iterable<Iterator = I> + RelationBuilder,
I: Iterator<Item = RE> + Clone + DoubleEndedIterator + ExactSizeIterator + FusedIterator,
{
builder.register_entity::<T>(
<RE as sea_orm::Iterable>::iter()
.map(|rel| RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(entity, tokio::spawn);
builder
}
pub(crate) fn register_entity_default_writable_impl<T, RE, A, I>(
mut builder: SeaographyBuilder,
entity: T,
active_model_hooks: bool,
) -> SeaographyBuilder
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = T> + sea_orm::ActiveModelBehavior + std::marker::Send,
RE: sea_orm::Iterable<Iterator = I> + RelationBuilder,
I: Iterator<Item = RE> + Clone + DoubleEndedIterator + ExactSizeIterator + FusedIterator,
{
builder = register_entity_default_readonly_impl::<T, RE, I>(builder, entity);
builder = register_entity_default_mutations::<T, A>(builder, active_model_hooks);
builder
}
macro_rules! register_entity_default_readonly {
($builder:expr, $module_path:ident) => {
$crate::graphql::infra::custom::register_entity_default_readonly_impl::<
$module_path::Entity,
$module_path::RelatedEntity,
_,
>($builder, $module_path::Entity)
};
}
macro_rules! register_entity_default_writable {
($builder:expr, $module_path:ident, $active_model_hooks:expr) => {
$crate::graphql::infra::custom::register_entity_default_writable_impl::<
$module_path::Entity,
$module_path::RelatedEntity,
$module_path::ActiveModel,
_,
>($builder, $module_path::Entity, $active_model_hooks)
};
}
pub(crate) use register_entity_default_readonly;
pub(crate) use register_entity_default_writable;

View File

@ -17,7 +17,7 @@ use serde::{Serialize, de::DeserializeOwned};
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use crate::{ use crate::{
errors::RecorderResult, graphql::infra::util::get_entity_column_key, errors::RecorderResult, graphql::infra::name::get_entity_and_column_name,
utils::json::convert_json_keys, utils::json::convert_json_keys,
}; };
@ -911,18 +911,15 @@ where
Box::new( Box::new(
move |_resolve_context: &ResolverContext<'_>, condition, filter| { move |_resolve_context: &ResolverContext<'_>, condition, filter| {
if let Some(filter) = filter { if let Some(filter) = filter {
let filter_value = to_value(filter.as_index_map()).map_err(|e| { let filter_value =
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e)) to_value(filter.as_index_map()).map_err(GraphqlError::new_with_source)?;
})?;
let filter_json: JsonValue = filter_value.into_json().map_err(|e| { let filter_json: JsonValue = filter_value
SeaographyError::AsyncGraphQLError(GraphqlError::new(format!("{e:?}"))) .into_json()
})?; .map_err(GraphqlError::new_with_source)?;
let cond_where = prepare_jsonb_filter_input(&Expr::col(column), filter_json) let cond_where = prepare_jsonb_filter_input(&Expr::col(column), filter_json)
.map_err(|e| { .map_err(GraphqlError::new_with_source)?;
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e))
})?;
let condition = condition.add(cond_where); let condition = condition.add(cond_where);
Ok(condition) Ok(condition)
@ -946,65 +943,76 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_column_key = get_entity_column_key::<T>(context, column); let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.filter_types.overwrites.insert( context.filter_types.overwrites.insert(
entity_column_key.clone(), get_entity_and_column_name::<T>(context, column),
Some(FilterType::Custom(JSONB_FILTER_NAME.to_string())), Some(FilterType::Custom(JSONB_FILTER_NAME.to_string())),
); );
context.filter_types.condition_functions.insert(
entity_column_name.clone(),
generate_jsonb_filter_condition_function::<T>(context, column),
);
} }
pub fn validate_jsonb_input_for_entity<T, S>(context: &mut BuilderContext, column: &T::Column) pub fn try_convert_jsonb_input_for_entity<T, S>(
where context: &mut BuilderContext,
column: &T::Column,
case: Option<Case<'static>>,
) where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
S: DeserializeOwned + Serialize, S: DeserializeOwned + Serialize,
{ {
let entity_column_key = get_entity_column_key::<T>(context, column); let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.input_conversions.insert( context.types.input_conversions.insert(
entity_column_key.clone(), entity_column_name.clone(),
Box::new(move |_resolve_context, accessor| { Box::new(move |_resolve_context, accessor| {
let deserialized = accessor.deserialize::<S>().map_err(|err| { let mut json_value: serde_json::Value = accessor.deserialize()?;
SeaographyError::TypeConversionError(
err.message, if let Some(case) = case {
format!("Json - {entity_column_key}"), json_value = convert_json_keys(json_value, case);
) }
})?;
let json_value = serde_json::to_value(deserialized).map_err(|err| { serde_json::from_value::<S>(json_value.clone()).map_err(|err| {
SeaographyError::TypeConversionError( SeaographyError::TypeConversionError(
err.to_string(), err.to_string(),
format!("Json - {entity_column_key}"), format!("Json - {entity_column_name}"),
) )
})?; })?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value)))) Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}), }),
); );
} }
pub fn convert_jsonb_output_case_for_entity<T>(context: &mut BuilderContext, column: &T::Column) pub fn convert_jsonb_output_for_entity<T>(
where context: &mut BuilderContext,
column: &T::Column,
case: Option<Case<'static>>,
) where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_column_key = get_entity_column_key::<T>(context, column); let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.output_conversions.insert( context.types.output_conversions.insert(
entity_column_key.clone(), entity_column_name.clone(),
Box::new(move |value| { Box::new(move |value| {
if let sea_orm::Value::Json(Some(json)) = value { if let sea_orm::Value::Json(Some(json)) = value {
let result = async_graphql::Value::from_json(convert_json_keys( let mut json_value = json.as_ref().clone();
json.as_ref().clone(), if let Some(case) = case {
Case::Camel, json_value = convert_json_keys(json_value, case);
)) }
.map_err(|err| { let result = async_graphql::Value::from_json(json_value).map_err(|err| {
SeaographyError::TypeConversionError( SeaographyError::TypeConversionError(
err.to_string(), err.to_string(),
format!("Json - {entity_column_key}"), format!("Json - {entity_column_name}"),
) )
})?; })?;
Ok(result) Ok(result)
} else { } else {
Err(SeaographyError::TypeConversionError( Err(SeaographyError::TypeConversionError(
"value should be json".to_string(), "value should be json".to_string(),
format!("Json - {entity_column_key}"), format!("Json - {entity_column_name}"),
)) ))
} }
}), }),

View File

@ -1,4 +1,4 @@
pub mod crypto; pub mod crypto;
pub mod custom; pub mod custom;
pub mod json; pub mod json;
pub mod util; pub mod name;

View File

@ -0,0 +1,203 @@
use std::fmt::Display;
use sea_orm::{EntityName, EntityTrait, IdenStatic};
use seaography::BuilderContext;
pub fn get_entity_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let t = T::default();
let name = <T as EntityName>::table_name(&t);
context.entity_object.type_name.as_ref()(name)
}
pub fn get_column_name<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.entity_object.column_name.as_ref()(&entity_name, column.as_str())
}
pub fn get_entity_and_column_name<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
let column_name = get_column_name::<T>(context, column);
format!("{entity_name}.{column_name}")
}
pub fn get_entity_and_column_name_from_column_str<T>(
context: &BuilderContext,
column_str: &str,
) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}.{column_str}")
}
pub fn get_entity_basic_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let t = T::default();
let name = <T as EntityName>::table_name(&t);
format!(
"{}{}",
context.entity_object.type_name.as_ref()(name),
context.entity_object.basic_type_suffix
)
}
pub fn get_entity_query_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.entity_query_field.type_name.as_ref()(&entity_name)
}
pub fn get_entity_filter_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.filter_input.type_name.as_ref()(&entity_name)
}
pub fn get_entity_insert_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}{}", context.entity_input.insert_suffix)
}
pub fn get_entity_update_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}{}", context.entity_input.update_suffix)
}
pub fn get_entity_create_one_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_create_one_mutation.mutation_suffix
)
}
pub fn get_entity_create_batch_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_create_batch_mutation.mutation_suffix
)
}
pub fn get_entity_delete_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_delete_mutation.mutation_suffix
)
}
pub fn get_entity_update_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_update_mutation.mutation_suffix
)
}
pub fn get_entity_custom_mutation_field_name<T>(
context: &BuilderContext,
mutation_suffix: impl Display,
) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!("{query_field_name}{mutation_suffix}")
}
pub fn get_entity_renormalized_filter_field_name() -> &'static str {
"filter"
}
pub fn get_entity_query_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_query_field.filters
}
pub fn get_entity_update_mutation_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_update_mutation.filter_field
}
pub fn get_entity_delete_mutation_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_delete_mutation.filter_field
}
pub fn renormalize_filter_field_names_to_schema_context(context: &mut BuilderContext) {
let renormalized_filter_field_name = get_entity_renormalized_filter_field_name();
context.entity_query_field.filters = renormalized_filter_field_name.to_string();
context.entity_update_mutation.filter_field = renormalized_filter_field_name.to_string();
context.entity_delete_mutation.filter_field = renormalized_filter_field_name.to_string();
}
pub fn get_entity_renormalized_data_field_name() -> &'static str {
"data"
}
pub fn get_entity_create_one_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_create_one_mutation.data_field
}
pub fn get_entity_create_batch_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_create_batch_mutation.data_field
}
pub fn get_entity_update_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_update_mutation.data_field
}
pub fn renormalize_data_field_names_to_schema_context(context: &mut BuilderContext) {
let renormalized_data_field_name = get_entity_renormalized_data_field_name();
context.entity_create_one_mutation.data_field = renormalized_data_field_name.to_string();
context.entity_create_batch_mutation.data_field = renormalized_data_field_name.to_string();
context.entity_update_mutation.data_field = renormalized_data_field_name.to_string();
}

View File

@ -1,30 +0,0 @@
use sea_orm::{EntityName, EntityTrait, IdenStatic};
use seaography::BuilderContext;
pub fn get_entity_key<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
context.entity_object.type_name.as_ref()(<T as EntityName>::table_name(&T::default()))
}
pub fn get_column_key<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_key::<T>(context);
context.entity_object.column_name.as_ref()(&entity_name, column.as_str())
}
pub fn get_entity_column_key<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
format!("{}.{}", &entity_name, &column_name)
}

View File

@ -12,6 +12,7 @@ use crate::{
credential_3rd::{ credential_3rd::{
register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context, register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context,
}, },
cron::{register_cron_to_schema_builder, register_cron_to_schema_context},
downloaders::{ downloaders::{
register_downloaders_to_schema_builder, register_downloaders_to_schema_context, register_downloaders_to_schema_builder, register_downloaders_to_schema_context,
}, },
@ -38,8 +39,17 @@ use crate::{
subscriptions::{ subscriptions::{
register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context, register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context,
}, },
system_tasks::{
register_system_tasks_to_schema_builder, register_system_tasks_to_schema_context,
},
},
infra::{
json::register_jsonb_input_filter_to_schema_builder,
name::{
renormalize_data_field_names_to_schema_context,
renormalize_filter_field_names_to_schema_context,
},
}, },
infra::json::register_jsonb_input_filter_to_schema_builder,
}, },
}; };
@ -55,6 +65,9 @@ pub fn build_schema(
let context = CONTEXT.get_or_init(|| { let context = CONTEXT.get_or_init(|| {
let mut context = BuilderContext::default(); let mut context = BuilderContext::default();
renormalize_filter_field_names_to_schema_context(&mut context);
renormalize_data_field_names_to_schema_context(&mut context);
{ {
// domains // domains
register_feeds_to_schema_context(&mut context); register_feeds_to_schema_context(&mut context);
@ -68,6 +81,8 @@ pub fn build_schema(
register_subscription_bangumi_to_schema_context(&mut context); register_subscription_bangumi_to_schema_context(&mut context);
register_subscription_episode_to_schema_context(&mut context); register_subscription_episode_to_schema_context(&mut context);
register_bangumi_to_schema_context(&mut context); register_bangumi_to_schema_context(&mut context);
register_cron_to_schema_context(&mut context);
register_system_tasks_to_schema_context(&mut context);
} }
context context
}); });
@ -91,6 +106,8 @@ pub fn build_schema(
builder = register_credential3rd_to_schema_builder(builder); builder = register_credential3rd_to_schema_builder(builder);
builder = register_subscriber_tasks_to_schema_builder(builder); builder = register_subscriber_tasks_to_schema_builder(builder);
builder = register_bangumi_to_schema_builder(builder); builder = register_bangumi_to_schema_builder(builder);
builder = register_cron_to_schema_builder(builder);
builder = register_system_tasks_to_schema_builder(builder);
} }
let schema = builder.schema_builder(); let schema = builder.schema_builder();

View File

@ -7,11 +7,11 @@
async_fn_traits, async_fn_traits,
error_generic_member_access, error_generic_member_access,
associated_type_defaults, associated_type_defaults,
let_chains let_chains,
impl_trait_in_fn_trait_return
)] )]
#![allow(clippy::enum_variant_names)] #![allow(clippy::enum_variant_names)]
pub use downloader; pub use downloader;
pub mod app; pub mod app;
pub mod auth; pub mod auth;
pub mod cache; pub mod cache;
@ -27,6 +27,8 @@ pub mod migrations;
pub mod models; pub mod models;
pub mod storage; pub mod storage;
pub mod task; pub mod task;
pub mod test_utils;
pub mod utils; pub mod utils;
pub mod web; pub mod web;
#[cfg(any(test, feature = "test-utils"))]
pub mod test_utils;

View File

@ -1,6 +1,8 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ts_rs::TS;
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, TS)]
#[ts(rename_all = "camelCase")]
pub enum AutoOptimizeImageFormat { pub enum AutoOptimizeImageFormat {
#[serde(rename = "image/webp")] #[serde(rename = "image/webp")]
Webp, Webp,
@ -10,25 +12,29 @@ pub enum AutoOptimizeImageFormat {
Jxl, Jxl,
} }
#[derive(Clone, Debug, Serialize, Deserialize, Default)] #[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeWebpOptions { pub struct EncodeWebpOptions {
pub quality: Option<f32>, pub quality: Option<f32>,
} }
#[derive(Clone, Debug, Serialize, Deserialize, Default)] #[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeAvifOptions { pub struct EncodeAvifOptions {
pub quality: Option<u8>, pub quality: Option<u8>,
pub speed: Option<u8>, pub speed: Option<u8>,
pub threads: Option<u8>, pub threads: Option<u8>,
} }
#[derive(Clone, Debug, Serialize, Deserialize, Default)] #[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeJxlOptions { pub struct EncodeJxlOptions {
pub quality: Option<f32>, pub quality: Option<f32>,
pub speed: Option<u8>, pub speed: Option<u8>,
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize, TS, PartialEq)]
#[ts(tag = "mimeType")]
#[serde(tag = "mime_type")] #[serde(tag = "mime_type")]
pub enum EncodeImageOptions { pub enum EncodeImageOptions {
#[serde(rename = "image/webp")] #[serde(rename = "image/webp")]

View File

@ -171,6 +171,59 @@ pub enum Feeds {
SubscriptionId, SubscriptionId,
} }
#[derive(DeriveIden)]
pub enum Cron {
Table,
Id,
SubscriberId,
SubscriptionId,
CronExpr,
CronTimezone,
NextRun,
LastRun,
LastError,
Enabled,
LockedBy,
LockedAt,
TimeoutMs,
Attempts,
MaxAttempts,
Priority,
Status,
SubscriberTaskCron,
SystemTaskCron,
}
#[derive(sea_query::Iden)]
pub enum ApalisSchema {
#[iden = "apalis"]
Schema,
}
#[derive(DeriveIden)]
pub enum ApalisJobs {
#[sea_orm(iden = "jobs")]
Table,
SubscriberId,
SubscriptionId,
Job,
JobType,
Status,
TaskType,
Id,
Attempts,
MaxAttempts,
RunAt,
LastError,
LockAt,
LockBy,
DoneAt,
Priority,
CronId,
}
macro_rules! create_postgres_enum_for_active_enum { macro_rules! create_postgres_enum_for_active_enum {
($manager: expr, $active_enum: expr, $($enum_value:expr),+) => { ($manager: expr, $active_enum: expr, $($enum_value:expr),+) => {
{ {

View File

@ -52,8 +52,7 @@ impl MigrationTrait for Migration {
subscriptions::SubscriptionCategoryEnum, subscriptions::SubscriptionCategoryEnum,
subscriptions::SubscriptionCategory::MikanSubscriber, subscriptions::SubscriptionCategory::MikanSubscriber,
subscriptions::SubscriptionCategory::MikanBangumi, subscriptions::SubscriptionCategory::MikanBangumi,
subscriptions::SubscriptionCategory::MikanSeason, subscriptions::SubscriptionCategory::MikanSeason
subscriptions::SubscriptionCategory::Manual
) )
.await?; .await?;

View File

@ -90,6 +90,11 @@ impl MigrationTrait for Migration {
SimpleExpr::from(AuthType::Basic).as_enum(AuthTypeEnum), SimpleExpr::from(AuthType::Basic).as_enum(AuthTypeEnum),
seed_subscriber_id.into(), seed_subscriber_id.into(),
]) ])
.on_conflict(
OnConflict::columns([Auth::Pid, Auth::AuthType])
.do_nothing()
.to_owned(),
)
.to_owned(), .to_owned(),
) )
.await?; .await?;

View File

@ -95,6 +95,7 @@ impl MigrationTrait for Migration {
Table::alter() Table::alter()
.table(Subscriptions::Table) .table(Subscriptions::Table)
.drop_column(Subscriptions::CredentialId) .drop_column(Subscriptions::CredentialId)
.drop_foreign_key("fk_subscriptions_credential_id")
.to_owned(), .to_owned(),
) )
.await?; .await?;

View File

@ -0,0 +1,221 @@
use async_trait::async_trait;
use sea_orm_migration::{prelude::*, schema::*};
use super::defs::{ApalisJobs, ApalisSchema};
use crate::{
migrations::defs::{Subscribers, Subscriptions},
task::{
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME,
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME, SUBSCRIBER_TASK_APALIS_NAME,
SYSTEM_TASK_APALIS_NAME,
},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.add_column_if_not_exists(integer_null(ApalisJobs::SubscriberId))
.add_column_if_not_exists(integer_null(ApalisJobs::SubscriptionId))
.add_column_if_not_exists(text_null(ApalisJobs::TaskType))
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_subscriber_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::SubscriberId)
.to_tbl(Subscribers::Table)
.to_col(Subscribers::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_subscription_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::SubscriptionId)
.to_tbl(Subscriptions::Table)
.to_col(Subscriptions::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"UPDATE {apalis_schema}.{apalis_table} SET {subscriber_id} = ({job} ->> '{subscriber_id}')::integer, {task_type} = ({job} ->> '{task_type}')::text, {subscription_id} = ({job} ->> '{subscription_id}')::integer"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
job = ApalisJobs::Job.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE TRIGGER {SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME}
BEFORE INSERT OR UPDATE ON {apalis_schema}.{apalis_table}
FOR EACH ROW
EXECUTE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}();"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string()
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_tasks")
.await?;
db.execute_unprepared("DROP VIEW IF EXISTS system_tasks")
.await?;
db.execute_unprepared(&format!(
r#"DROP TRIGGER IF EXISTS {SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME} ON {apalis_schema}.{apalis_table}"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string()
)).await?;
db.execute_unprepared(&format!(
r#"DROP FUNCTION IF EXISTS {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}()"#,
apalis_schema = ApalisSchema::Schema.to_string(),
))
.await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.drop_foreign_key("fk_apalis_jobs_subscriber_id")
.drop_foreign_key("fk_apalis_jobs_subscription_id")
.drop_column(ApalisJobs::SubscriberId)
.drop_column(ApalisJobs::SubscriptionId)
.to_owned(),
)
.await?;
Ok(())
}
}

View File

@ -1,64 +0,0 @@
use async_trait::async_trait;
use sea_orm_migration::prelude::*;
use crate::task::SUBSCRIBER_TASK_APALIS_NAME;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
job,
job_type,
status,
(job ->> 'subscriber_id'::text)::integer AS subscriber_id,
job ->> 'task_type'::text AS task_type,
id,
attempts,
max_attempts,
run_at,
last_error,
lock_at,
lock_by,
done_at,
priority
FROM apalis.jobs
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscriber_id
ON apalis.jobs (((job -> 'subscriber_id')::integer))
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(
r#"DROP INDEX IF EXISTS idx_apalis_jobs_subscriber_id
ON apalis.jobs"#,
)
.await?;
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_tasks")
.await?;
Ok(())
}
}

View File

@ -15,6 +15,8 @@ pub struct Migration;
#[async_trait] #[async_trait]
impl MigrationTrait for Migration { impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan).await?; create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan).await?;
{ {
@ -29,11 +31,17 @@ impl MigrationTrait for Migration {
BangumiTypeEnum, BangumiTypeEnum,
BangumiType::iden_values(), BangumiType::iden_values(),
)) ))
.drop_column(Bangumi::SavePath)
.to_owned(), .to_owned(),
) )
.await?; .await?;
db.execute_unprepared(&format!(
r#"ALTER TABLE {bangumi} DROP COLUMN IF EXISTS {save_path}"#,
bangumi = Bangumi::Table.to_string(),
save_path = Bangumi::SavePath.to_string(),
))
.await?;
manager manager
.exec_stmt( .exec_stmt(
UpdateStatement::new() UpdateStatement::new()
@ -83,11 +91,17 @@ impl MigrationTrait for Migration {
.add_column_if_not_exists(big_integer_null( .add_column_if_not_exists(big_integer_null(
Episodes::EnclosureContentLength, Episodes::EnclosureContentLength,
)) ))
.drop_column(Episodes::SavePath)
.to_owned(), .to_owned(),
) )
.await?; .await?;
db.execute_unprepared(&format!(
r#"ALTER TABLE {episodes} DROP COLUMN IF EXISTS {save_path}"#,
episodes = Episodes::Table.to_string(),
save_path = Episodes::SavePath.to_string(),
))
.await?;
manager manager
.exec_stmt( .exec_stmt(
UpdateStatement::new() UpdateStatement::new()
@ -120,10 +134,34 @@ impl MigrationTrait for Migration {
} }
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
Table::alter()
.table(Bangumi::Table)
.add_column_if_not_exists(text_null(Bangumi::SavePath))
.drop_column(Bangumi::BangumiType)
.to_owned(),
)
.await?;
manager manager
.drop_postgres_enum_for_active_enum(BangumiTypeEnum) .drop_postgres_enum_for_active_enum(BangumiTypeEnum)
.await?; .await?;
manager
.alter_table(
Table::alter()
.table(Episodes::Table)
.add_column_if_not_exists(text_null(Episodes::SavePath))
.drop_column(Episodes::EpisodeType)
.drop_column(Episodes::EnclosureMagnetLink)
.drop_column(Episodes::EnclosureTorrentLink)
.drop_column(Episodes::EnclosurePubDate)
.drop_column(Episodes::EnclosureContentLength)
.to_owned(),
)
.await?;
manager manager
.drop_postgres_enum_for_active_enum(EpisodeTypeEnum) .drop_postgres_enum_for_active_enum(EpisodeTypeEnum)
.await?; .await?;

View File

@ -0,0 +1,542 @@
use async_trait::async_trait;
use sea_orm::ActiveEnum;
use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{
ApalisJobs, ApalisSchema, Cron, CustomSchemaManagerExt, GeneralIds, Subscribers,
Subscriptions, table_auto_z,
},
models::cron::{
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_DEBUG_EVENT, CRON_DUE_EVENT,
CronStatus, CronStatusEnum, NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME,
NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME,
SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME,
},
task::{
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SUBSCRIBER_TASK_APALIS_NAME,
SYSTEM_TASK_APALIS_NAME,
},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
create_postgres_enum_for_active_enum!(
manager,
CronStatusEnum,
CronStatus::Pending,
CronStatus::Running,
CronStatus::Completed,
CronStatus::Failed,
CronStatus::Disabled
)
.await?;
manager
.create_table(
table_auto_z(Cron::Table)
.col(pk_auto(Cron::Id))
.col(string(Cron::CronExpr))
.col(string(Cron::CronTimezone))
.col(integer_null(Cron::SubscriberId))
.col(integer_null(Cron::SubscriptionId))
.col(timestamp_with_time_zone_null(Cron::NextRun))
.col(timestamp_with_time_zone_null(Cron::LastRun))
.col(string_null(Cron::LastError))
.col(boolean(Cron::Enabled).default(true))
.col(string_null(Cron::LockedBy))
.col(timestamp_with_time_zone_null(Cron::LockedAt))
.col(integer_null(Cron::TimeoutMs).default(5000))
.col(integer(Cron::Attempts).default(0))
.col(integer(Cron::MaxAttempts).default(1))
.col(integer(Cron::Priority).default(0))
.col(
enumeration(Cron::Status, CronStatusEnum, CronStatus::iden_values())
.default(CronStatus::Pending),
)
.col(json_binary_null(Cron::SubscriberTaskCron))
.col(json_binary_null(Cron::SystemTaskCron))
.foreign_key(
ForeignKey::create()
.name("fk_cron_subscriber_id")
.from(Cron::Table, Cron::SubscriberId)
.to(Subscribers::Table, Subscribers::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.foreign_key(
ForeignKey::create()
.name("fk_cron_subscription_id")
.from(Cron::Table, Cron::SubscriptionId)
.to(Subscriptions::Table, Subscriptions::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
manager
.create_postgres_auto_update_ts_trigger_for_col(Cron::Table, GeneralIds::UpdatedAt)
.await?;
manager
.create_index(
IndexCreateStatement::new()
.if_not_exists()
.name("idx_cron_next_run")
.table(Cron::Table)
.col(Cron::NextRun)
.to_owned(),
)
.await?;
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_subscriber_task_subscriber_id integer;
new_subscriber_task_subscription_id integer;
new_system_task_subscriber_id integer;
BEGIN
new_subscriber_task_subscriber_id = (NEW.{subscriber_task_cron} ->> 'subscriber_id')::integer;
new_subscriber_task_subscription_id = (NEW.{subscriber_task_cron} ->> 'subscription_id')::integer;
new_system_task_subscriber_id = (NEW.{system_task_cron} ->> 'subscriber_id')::integer;
IF new_subscriber_task_subscriber_id IS DISTINCT FROM (OLD.{subscriber_task_cron} ->> 'subscriber_id')::integer AND new_subscriber_task_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_subscriber_task_subscriber_id;
END IF;
IF new_subscriber_task_subscription_id IS DISTINCT FROM (OLD.{subscriber_task_cron} ->> 'subscription_id')::integer AND new_subscriber_task_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_subscriber_task_subscription_id;
END IF;
IF new_system_task_subscriber_id IS DISTINCT FROM (OLD.{system_task_cron} ->> 'subscriber_id')::integer AND new_system_task_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_system_task_subscriber_id;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
subscriber_task_cron = &Cron::SubscriberTaskCron.to_string(),
subscriber_id = &Cron::SubscriberId.to_string(),
subscription_id = &Cron::SubscriptionId.to_string(),
system_task_cron = &Cron::SystemTaskCron.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE TRIGGER {SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME}
BEFORE INSERT OR UPDATE ON {table}
FOR EACH ROW
EXECUTE FUNCTION {SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}();"#,
table = &Cron::Table.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME}() RETURNS trigger AS $$
BEGIN
-- Check if the cron is due to run
IF NEW.{next_run} IS NOT NULL
AND NEW.{next_run} <= CURRENT_TIMESTAMP
AND NEW.{enabled} = true
AND NEW.{status} = '{pending}'::{status_type}
AND NEW.{attempts} < NEW.{max_attempts}
-- Check if not locked or lock timeout
AND (
NEW.{locked_at} IS NULL
OR (
NEW.{timeout_ms} IS NOT NULL
AND (NEW.{locked_at} + NEW.{timeout_ms} * INTERVAL '1 millisecond') <= CURRENT_TIMESTAMP
)
)
-- Make sure the cron is a new due event, not a repeat event
AND (
OLD.{next_run} IS NULL
OR OLD.{next_run} > CURRENT_TIMESTAMP
OR OLD.{enabled} = false
OR OLD.{status} IS DISTINCT FROM '{pending}'
OR OLD.{attempts} IS DISTINCT FROM NEW.{attempts}
)
THEN
PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(NEW)::text);
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
next_run = &Cron::NextRun.to_string(),
enabled = &Cron::Enabled.to_string(),
locked_at = &Cron::LockedAt.to_string(),
timeout_ms = &Cron::TimeoutMs.to_string(),
status = &Cron::Status.to_string(),
pending = &CronStatus::Pending.to_value(),
attempts = &Cron::Attempts.to_string(),
max_attempts = &Cron::MaxAttempts.to_string(),
status_type = &CronStatus::name().to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE TRIGGER {NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME}
AFTER INSERT OR UPDATE ON {table}
FOR EACH ROW
EXECUTE FUNCTION {NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME}();"#,
table = &Cron::Table.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME}() RETURNS INTEGER AS $$
DECLARE
cron_record RECORD;
notification_count INTEGER := 0;
BEGIN
FOR cron_record IN
SELECT * FROM {table}
WHERE {next_run} IS NOT NULL
AND {next_run} <= CURRENT_TIMESTAMP
AND {enabled} = true
AND {status} = '{pending}'::{status_type}
AND {attempts} < {max_attempts}
AND (
{locked_at} IS NULL
OR (
{timeout_ms} IS NOT NULL
AND {locked_at} + {timeout_ms} * INTERVAL '1 millisecond' <= CURRENT_TIMESTAMP
)
)
ORDER BY {priority} ASC, {next_run} ASC
FOR UPDATE SKIP LOCKED
LOOP
-- PERFORM pg_notify('{CRON_DUE_DEBUG_EVENT}',format('Found due cron: value=%s; Now time: %s', row_to_json(cron_record)::text, CURRENT_TIMESTAMP));
PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(cron_record)::text);
notification_count := notification_count + 1;
END LOOP;
-- PERFORM pg_notify('{CRON_DUE_DEBUG_EVENT}', format('Notification count: %I; Now time: %s', notification_count, CURRENT_TIMESTAMP));
RETURN notification_count;
END;
$$ LANGUAGE plpgsql;"#,
table = &Cron::Table.to_string(),
next_run = &Cron::NextRun.to_string(),
enabled = &Cron::Enabled.to_string(),
status = &Cron::Status.to_string(),
pending = &CronStatus::Pending.to_value(),
locked_at = &Cron::LockedAt.to_string(),
timeout_ms = &Cron::TimeoutMs.to_string(),
priority = &Cron::Priority.to_string(),
attempts = &Cron::Attempts.to_string(),
max_attempts = &Cron::MaxAttempts.to_string(),
status_type = &CronStatus::name().to_string(),
))
.await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.add_column_if_not_exists(integer_null(ApalisJobs::CronId))
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_cron_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::CronId)
.to_tbl(Cron::Table)
.to_col(Cron::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id},
{cron_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{cron_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"
UPDATE {apalis_schema}.{apalis_table} SET {cron_id} = ({job} ->> '{cron_id}')::integer
"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_cron_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_cron_id = (NEW.{job} ->> '{cron_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_cron_id IS DISTINCT FROM (OLD.{job} ->> '{cron_id}')::integer AND new_job_cron_id IS DISTINCT FROM NEW.{cron_id} THEN
NEW.{cron_id} = new_job_cron_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
))
.await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.drop_column(ApalisJobs::CronId)
.drop_foreign_key("fk_apalis_jobs_cron_id")
.to_owned(),
)
.await?;
db.execute_unprepared(&format!(
r#"DROP TRIGGER IF EXISTS {NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME} ON {table};"#,
table = &Cron::Table.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"DROP FUNCTION IF EXISTS {NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME}();"#,
))
.await?;
db.execute_unprepared(&format!(
r#"DROP FUNCTION IF EXISTS {CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME}();"#,
))
.await?;
manager
.drop_table(
TableDropStatement::new()
.if_exists()
.table(Cron::Table)
.to_owned(),
)
.await?;
manager
.drop_postgres_enum_for_active_enum(CronStatusEnum)
.await?;
Ok(())
}
}

View File

@ -7,9 +7,10 @@ pub mod m20220101_000001_init;
pub mod m20240224_082543_add_downloads; pub mod m20240224_082543_add_downloads;
pub mod m20241231_000001_auth; pub mod m20241231_000001_auth;
pub mod m20250501_021523_credential_3rd; pub mod m20250501_021523_credential_3rd;
pub mod m20250520_021135_subscriber_tasks; pub mod m20250520_021135_add_tasks;
pub mod m20250622_015618_feeds; pub mod m20250622_015618_feeds;
pub mod m20250622_020819_bangumi_and_episode_type; pub mod m20250622_020819_bangumi_and_episode_type;
pub mod m20250629_065628_add_cron;
pub struct Migrator; pub struct Migrator;
@ -21,9 +22,10 @@ impl MigratorTrait for Migrator {
Box::new(m20240224_082543_add_downloads::Migration), Box::new(m20240224_082543_add_downloads::Migration),
Box::new(m20241231_000001_auth::Migration), Box::new(m20241231_000001_auth::Migration),
Box::new(m20250501_021523_credential_3rd::Migration), Box::new(m20250501_021523_credential_3rd::Migration),
Box::new(m20250520_021135_subscriber_tasks::Migration), Box::new(m20250520_021135_add_tasks::Migration),
Box::new(m20250622_015618_feeds::Migration), Box::new(m20250622_015618_feeds::Migration),
Box::new(m20250622_020819_bangumi_and_episode_type::Migration), Box::new(m20250622_020819_bangumi_and_episode_type::Migration),
Box::new(m20250629_065628_add_cron::Migration),
] ]
} }
} }

View File

@ -63,7 +63,11 @@ impl Model {
.filter(Column::Pid.eq(pid)) .filter(Column::Pid.eq(pid))
.one(db) .one(db)
.await? .await?
.ok_or_else(|| RecorderError::from_db_record_not_found("auth::find_by_pid"))?; .ok_or_else(|| {
RecorderError::from_entity_not_found_detail::<Entity, _>(format!(
"pid {pid} not found"
))
})?;
Ok(subscriber_auth) Ok(subscriber_auth)
} }

View File

@ -0,0 +1,11 @@
pub const CRON_DUE_EVENT: &str = "cron_due";
pub const CRON_DUE_DEBUG_EVENT: &str = "cron_due_debug";
pub const CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME: &str = "check_and_trigger_due_crons";
pub const NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME: &str = "notify_due_cron_when_mutating";
pub const NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME: &str =
"notify_due_cron_when_mutating_trigger";
pub const SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME: &str = "setup_cron_extra_foreign_keys";
pub const SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME: &str =
"setup_cron_extra_foreign_keys_trigger";

View File

@ -0,0 +1,452 @@
mod core;
pub use core::{
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_DEBUG_EVENT, CRON_DUE_EVENT,
NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME, NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME,
SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME,
};
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use chrono_tz::Tz;
use croner::Cron;
use sea_orm::{
ActiveValue::{self, Set},
Condition, DeriveActiveEnum, DeriveDisplay, DeriveEntityModel, EnumIter, QuerySelect,
Statement, TransactionTrait,
entity::prelude::*,
sea_query::{ExprTrait, LockBehavior, LockType},
sqlx::postgres::PgNotification,
};
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::RecorderResult,
models::{subscriber_tasks, system_tasks},
task::{SubscriberTaskTrait, SystemTaskTrait},
};
#[derive(
Debug, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "cron_status")]
#[serde(rename_all = "snake_case")]
pub enum CronStatus {
#[sea_orm(string_value = "pending")]
Pending,
#[sea_orm(string_value = "running")]
Running,
#[sea_orm(string_value = "completed")]
Completed,
#[sea_orm(string_value = "failed")]
Failed,
#[sea_orm(string_value = "disabled")]
Disabled,
}
#[derive(Debug, Clone, DeriveEntityModel, PartialEq, Serialize, Deserialize)]
#[sea_orm(table_name = "cron")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTimeUtc,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)]
pub id: i32,
pub subscriber_id: Option<i32>,
pub subscription_id: Option<i32>,
pub cron_expr: String,
pub cron_timezone: String,
pub next_run: Option<DateTimeUtc>,
pub last_run: Option<DateTimeUtc>,
pub last_error: Option<String>,
pub locked_by: Option<String>,
pub locked_at: Option<DateTimeUtc>,
// default_expr = "5000"
pub timeout_ms: Option<i32>,
#[sea_orm(default_expr = "0")]
pub attempts: i32,
#[sea_orm(default_expr = "1")]
pub max_attempts: i32,
#[sea_orm(default_expr = "0")]
pub priority: i32,
pub status: CronStatus,
#[sea_orm(default_expr = "true")]
pub enabled: bool,
pub subscriber_task_cron: Option<subscriber_tasks::SubscriberTask>,
pub system_task_cron: Option<system_tasks::SystemTask>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Subscriber,
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Subscription,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(has_many = "super::system_tasks::Entity")]
SystemTask,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::subscriber_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriberTask.def()
}
}
impl Related<super::system_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SystemTask.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::subscriptions::Entity")]
Subscription,
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(entity = "super::system_tasks::Entity")]
SystemTask,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
match (
&self.cron_expr as &ActiveValue<String>,
&self.cron_timezone as &ActiveValue<String>,
) {
(ActiveValue::Set(cron_expr), ActiveValue::Set(timezone)) => {
if matches!(
&self.next_run,
ActiveValue::NotSet | ActiveValue::Unchanged(_)
) {
let next_run = Model::calculate_next_run(cron_expr, timezone)
.map_err(|e| DbErr::Custom(e.to_string()))?;
self.next_run = Set(Some(next_run));
}
}
(
ActiveValue::Unchanged(_) | ActiveValue::NotSet,
ActiveValue::Unchanged(_) | ActiveValue::NotSet,
) => {}
(_, _) => {
if matches!(
self.next_run,
ActiveValue::NotSet | ActiveValue::Unchanged(_)
) {
return Err(DbErr::Custom(
"Cron expr and timezone must be insert or update at same time when next \
run is not set"
.to_string(),
));
}
}
};
if let ActiveValue::Set(Some(subscriber_id)) = self.subscriber_id
&& let ActiveValue::Set(Some(ref subscriber_task)) = self.subscriber_task_cron
&& subscriber_task.get_subscriber_id() != subscriber_id
{
return Err(DbErr::Custom(
"Cron subscriber_id does not match subscriber_task_cron.subscriber_id".to_string(),
));
}
if let ActiveValue::Set(Some(subscriber_id)) = self.subscriber_id
&& let ActiveValue::Set(Some(ref system_task)) = self.system_task_cron
&& system_task.get_subscriber_id() != Some(subscriber_id)
{
return Err(DbErr::Custom(
"Cron subscriber_id does not match system_task_cron.subscriber_id".to_string(),
));
}
if let ActiveValue::Set(enabled) = self.enabled
&& !insert
{
if enabled {
self.status = Set(CronStatus::Pending)
} else {
self.status = Set(CronStatus::Disabled)
}
}
Ok(self)
}
}
impl Model {
pub async fn handle_cron_notification(
ctx: &dyn AppContextTrait,
notification: PgNotification,
worker_id: &str,
retry_duration: chrono::Duration,
) -> RecorderResult<()> {
let payload: Self = serde_json::from_str(notification.payload())?;
let cron_id = payload.id;
tracing::debug!("Cron notification received for cron {cron_id} and worker {worker_id}");
match Self::try_acquire_lock_with_cron_id(ctx, cron_id, worker_id).await? {
Some(cron) => match cron.exec_cron(ctx).await {
Ok(()) => {
tracing::debug!("Cron {cron_id} executed successfully");
cron.mark_cron_completed(ctx).await?;
}
Err(e) => {
tracing::error!("Error executing cron {cron_id}: {e}");
cron.mark_cron_failed(ctx, &e.to_string(), retry_duration)
.await?;
}
},
None => {
tracing::debug!(
"Cron lock not acquired for cron {cron_id} and worker {worker_id}, skipping..."
);
}
}
Ok(())
}
async fn try_acquire_lock_with_cron_id(
ctx: &dyn AppContextTrait,
cron_id: i32,
worker_id: &str,
) -> RecorderResult<Option<Self>> {
let db = ctx.db();
let txn = db.begin().await?;
let cron = Entity::find_by_id(cron_id)
.lock_with_behavior(LockType::Update, LockBehavior::SkipLocked)
.one(&txn)
.await?;
if let Some(cron) = cron {
if cron.enabled
&& cron.attempts < cron.max_attempts
&& cron.status == CronStatus::Pending
&& (cron.locked_at.is_none_or(|locked_at| {
cron.timeout_ms.is_some_and(|cron_timeout_ms| {
locked_at + chrono::Duration::milliseconds(cron_timeout_ms as i64)
<= Utc::now()
})
}))
&& cron.next_run.is_some_and(|next_run| next_run <= Utc::now())
{
let cron_active_model = ActiveModel {
id: Set(cron.id),
locked_by: Set(Some(worker_id.to_string())),
locked_at: Set(Some(Utc::now())),
status: Set(CronStatus::Running),
attempts: Set(cron.attempts + 1),
..Default::default()
};
let cron_model = cron_active_model.update(&txn).await?;
txn.commit().await?;
return Ok(Some(cron_model));
}
txn.commit().await?;
return Ok(Some(cron));
}
txn.rollback().await?;
Ok(None)
}
async fn exec_cron(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
if let Some(subscriber_task) = self.subscriber_task_cron.as_ref() {
let task_service = ctx.task();
let mut new_subscriber_task = subscriber_task.clone();
new_subscriber_task.set_cron_id(Some(self.id));
task_service
.add_subscriber_task(new_subscriber_task)
.await?;
} else if let Some(system_task) = self.system_task_cron.as_ref() {
let task_service = ctx.task();
let mut new_system_task = system_task.clone();
new_system_task.set_cron_id(Some(self.id));
task_service.add_system_task(new_system_task).await?;
} else {
unimplemented!("Cron without unknown task is not supported now");
}
Ok(())
}
async fn mark_cron_completed(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
let db = ctx.db();
let next_run = Self::calculate_next_run(&self.cron_expr, &self.cron_timezone)?;
ActiveModel {
id: Set(self.id),
next_run: Set(Some(next_run)),
last_run: Set(Some(Utc::now())),
status: Set(CronStatus::Pending),
locked_by: Set(None),
locked_at: Set(None),
attempts: Set(0),
last_error: Set(None),
..Default::default()
}
.update(db)
.await?;
Ok(())
}
async fn mark_cron_failed(
&self,
ctx: &dyn AppContextTrait,
error: &str,
retry_duration: chrono::Duration,
) -> RecorderResult<()> {
let db = ctx.db();
let should_retry = self.attempts < self.max_attempts;
let status = if should_retry {
CronStatus::Pending
} else {
CronStatus::Failed
};
let next_run = if should_retry {
Some(Utc::now() + retry_duration)
} else {
Some(Self::calculate_next_run(
&self.cron_expr,
&self.cron_timezone,
)?)
};
ActiveModel {
id: Set(self.id),
next_run: Set(next_run),
status: Set(status),
locked_by: Set(None),
locked_at: Set(None),
last_run: Set(Some(Utc::now())),
last_error: Set(Some(error.to_string())),
attempts: Set(if should_retry { self.attempts + 1 } else { 0 }),
..Default::default()
}
.update(db)
.await?;
Ok(())
}
pub async fn check_and_trigger_due_crons(ctx: &dyn AppContextTrait) -> RecorderResult<()> {
let db = ctx.db();
db.execute(Statement::from_string(
db.get_database_backend(),
format!("SELECT {CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME}()"),
))
.await?;
Ok(())
}
pub async fn check_and_cleanup_expired_cron_locks(
ctx: &dyn AppContextTrait,
retry_duration: chrono::Duration,
) -> RecorderResult<()> {
let db = ctx.db();
let condition = Condition::all()
.add(Column::Status.eq(CronStatus::Running))
.add(Column::LastRun.is_not_null())
.add(Column::TimeoutMs.is_not_null())
.add(
Expr::col(Column::LastRun)
.add(Expr::col(Column::TimeoutMs).mul(Expr::cust("INTERVAL '1 millisecond'")))
.lte(Expr::current_timestamp()),
);
let cron_ids = Entity::find()
.select_only()
.column(Column::Id)
.filter(condition.clone())
.lock_with_behavior(LockType::Update, LockBehavior::SkipLocked)
.into_tuple::<i32>()
.all(db)
.await?;
for cron_id in cron_ids {
let txn = db.begin().await?;
let locked_cron = Entity::find_by_id(cron_id)
.filter(condition.clone())
.lock_with_behavior(LockType::Update, LockBehavior::SkipLocked)
.one(&txn)
.await?;
if let Some(locked_cron) = locked_cron {
locked_cron
.mark_cron_failed(
ctx,
format!(
"Cron timeout of {}ms",
locked_cron
.timeout_ms
.as_ref()
.map(|s| s.to_string())
.unwrap_or_else(|| "Infinite".to_string())
)
.as_str(),
retry_duration,
)
.await?;
txn.commit().await?;
} else {
txn.rollback().await?;
}
}
Ok(())
}
pub fn calculate_next_run(cron_expr: &str, timezone: &str) -> RecorderResult<DateTime<Utc>> {
let user_tz = timezone.parse::<Tz>()?;
let user_tz_now = Utc::now().with_timezone(&user_tz);
let cron_expr = Cron::new(cron_expr).with_seconds_optional().parse()?;
let next = cron_expr.find_next_occurrence(&user_tz_now, false)?;
let next_utc = next.with_timezone(&Utc);
Ok(next_utc)
}
}

View File

@ -129,7 +129,7 @@ pub enum RelatedEntity {
} }
impl ActiveModel { impl ActiveModel {
#[tracing::instrument(err, skip(ctx), fields(bangumi_id = ?bangumi.id, mikan_episode_id = ?episode.mikan_episode_id))] #[tracing::instrument(err, skip_all, fields(bangumi_id = ?bangumi.id, mikan_episode_id = ?episode.mikan_episode_id))]
pub fn from_mikan_bangumi_and_episode_meta( pub fn from_mikan_bangumi_and_episode_meta(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
bangumi: &bangumi::Model, bangumi: &bangumi::Model,

View File

@ -102,7 +102,7 @@ impl ActiveModelBehavior for ActiveModel {
C: ConnectionTrait, C: ConnectionTrait,
{ {
if insert && let ActiveValue::NotSet = self.token { if insert && let ActiveValue::NotSet = self.token {
let token = nanoid::nanoid!(10); let token = Uuid::now_v7().to_string();
self.token = ActiveValue::Set(token); self.token = ActiveValue::Set(token);
} }
Ok(self) Ok(self)
@ -122,9 +122,7 @@ impl Model {
.filter(Column::FeedType.eq(FeedType::Rss)) .filter(Column::FeedType.eq(FeedType::Rss))
.one(db) .one(db)
.await? .await?
.ok_or(RecorderError::ModelEntityNotFound { .ok_or(RecorderError::from_entity_not_found::<Entity>())?;
entity: "Feed".into(),
})?;
let feed = Feed::from_model(ctx, feed_model).await?; let feed = Feed::from_model(ctx, feed_model).await?;

View File

@ -1,5 +1,7 @@
use rss::Channel; use rss::Channel;
use sea_orm::{ColumnTrait, EntityTrait, JoinType, QueryFilter, QuerySelect, RelationTrait}; use sea_orm::{
ColumnTrait, EntityTrait, JoinType, Order, QueryFilter, QueryOrder, QuerySelect, RelationTrait,
};
use url::Url; use url::Url;
use crate::{ use crate::{
@ -37,13 +39,12 @@ impl Feed {
subscription_episode::Relation::Subscription.def(), subscription_episode::Relation::Subscription.def(),
) )
.filter(subscriptions::Column::Id.eq(subscription_id)) .filter(subscriptions::Column::Id.eq(subscription_id))
.order_by(episodes::Column::EnclosurePubDate, Order::Desc)
.all(db) .all(db)
.await?; .await?;
(subscription, episodes) (subscription, episodes)
} else { } else {
return Err(RecorderError::ModelEntityNotFound { return Err(RecorderError::from_entity_not_found::<subscriptions::Entity>());
entity: "Subscription".into(),
});
}; };
Ok(Feed::SubscritpionEpisodes( Ok(Feed::SubscritpionEpisodes(

View File

@ -24,6 +24,7 @@ pub trait RssFeedItemTrait: Sized {
-> Option<Cow<'_, str>>; -> Option<Cow<'_, str>>;
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>>; fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>>;
fn get_enclosure_content_length(&self) -> Option<i64>; fn get_enclosure_content_length(&self) -> Option<i64>;
fn get_xmlns(&self) -> Cow<'_, str>;
fn into_item(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Item> { fn into_item(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Item> {
let enclosure_mime_type = let enclosure_mime_type =
self.get_enclosure_mime() self.get_enclosure_mime()
@ -53,32 +54,49 @@ pub trait RssFeedItemTrait: Sized {
let mut extensions = ExtensionMap::default(); let mut extensions = ExtensionMap::default();
if enclosure_mime_type == BITTORRENT_MIME_TYPE { if enclosure_mime_type == BITTORRENT_MIME_TYPE {
extensions.insert("torrent".to_string(), { let xmlns = self.get_xmlns();
let mut map = btreemap! {
"link".to_string() => vec![ let torrent_extension = ExtensionBuilder::default()
ExtensionBuilder::default().name( .name("torrent")
"link" .attrs(btreemap! {
).value(enclosure_link.to_string()).build() "xmlns".to_string() => xmlns.to_string()
], })
"contentLength".to_string() => vec![ .children({
ExtensionBuilder::default().name( let mut m = btreemap! {
"contentLength" "link".to_string() => vec![
).value(enclosure_content_length.to_string()).build()
],
};
if let Some(pub_date) = enclosure_pub_date {
map.insert(
"pubDate".to_string(),
vec![
ExtensionBuilder::default() ExtensionBuilder::default()
.name("pubDate") .name("link")
.value(pub_date.to_rfc3339()) .value(link.to_string())
.build(), .build()
], ],
); "contentLength".to_string() => vec![
} ExtensionBuilder::default()
map .name("contentLength")
}); .value(enclosure_content_length.to_string())
.build()
]
};
if let Some(pub_date) = enclosure_pub_date {
m.insert(
"pubDate".to_string(),
vec![
ExtensionBuilder::default()
.name("pubDate")
.value(pub_date.to_rfc3339())
.build(),
],
);
};
m
})
.build();
extensions.insert(
"".to_string(),
btreemap! {
"torrent".to_string() => vec![torrent_extension]
},
);
}; };
let enclosure = EnclosureBuilder::default() let enclosure = EnclosureBuilder::default()

View File

@ -42,6 +42,12 @@ impl RssFeedItemTrait for episodes::Model {
Cow::Owned(format!("{PROJECT_NAME}:episode:{}", self.id)) Cow::Owned(format!("{PROJECT_NAME}:episode:{}", self.id))
} }
fn get_xmlns(&self) -> Cow<'_, str> {
match self.episode_type {
episodes::EpisodeType::Mikan => Cow::Borrowed("https://mikanani.me/0.1/"),
}
}
fn get_title(&self) -> Cow<'_, str> { fn get_title(&self) -> Cow<'_, str> {
Cow::Borrowed(&self.display_name) Cow::Borrowed(&self.display_name)
} }

View File

@ -1,6 +1,7 @@
pub mod auth; pub mod auth;
pub mod bangumi; pub mod bangumi;
pub mod credential_3rd; pub mod credential_3rd;
pub mod cron;
pub mod downloaders; pub mod downloaders;
pub mod downloads; pub mod downloads;
pub mod episodes; pub mod episodes;
@ -11,3 +12,4 @@ pub mod subscribers;
pub mod subscription_bangumi; pub mod subscription_bangumi;
pub mod subscription_episode; pub mod subscription_episode;
pub mod subscriptions; pub mod subscriptions;
pub mod system_tasks;

View File

@ -1,7 +1,7 @@
use async_trait::async_trait; use async_trait::async_trait;
use sea_orm::{ use sea_orm::{
ActiveModelTrait, ColumnTrait, ConnectionTrait, DbErr, EntityTrait, Insert, IntoActiveModel, ActiveModelTrait, ColumnTrait, ConnectionTrait, DbErr, EntityTrait, Insert, IntoActiveModel,
Iterable, QueryResult, QueryTrait, SelectModel, SelectorRaw, sea_query::Query, QueryResult, QueryTrait, sea_query::Query,
}; };
#[async_trait] #[async_trait]
@ -10,13 +10,6 @@ where
<A::Entity as EntityTrait>::Model: IntoActiveModel<A>, <A::Entity as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait, A: ActiveModelTrait,
{ {
fn exec_with_returning_models<C>(
self,
db: &C,
) -> SelectorRaw<SelectModel<<A::Entity as EntityTrait>::Model>>
where
C: ConnectionTrait;
async fn exec_with_returning_columns<C, I>( async fn exec_with_returning_columns<C, I>(
self, self,
db: &C, db: &C,
@ -33,26 +26,6 @@ where
<A::Entity as EntityTrait>::Model: IntoActiveModel<A>, <A::Entity as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait + Send, A: ActiveModelTrait + Send,
{ {
fn exec_with_returning_models<C>(
self,
db: &C,
) -> SelectorRaw<SelectModel<<A::Entity as EntityTrait>::Model>>
where
C: ConnectionTrait,
{
let mut insert_statement = self.into_query();
let db_backend = db.get_database_backend();
let returning = Query::returning().exprs(
<A::Entity as EntityTrait>::Column::iter()
.map(|c| c.select_as(c.into_returning_expr(db_backend))),
);
insert_statement.returning(returning);
let insert_statement = db_backend.build(&insert_statement);
SelectorRaw::<SelectModel<<A::Entity as EntityTrait>::Model>>::from_statement(
insert_statement,
)
}
async fn exec_with_returning_columns<C, I>( async fn exec_with_returning_columns<C, I>(
self, self,
db: &C, db: &C,

View File

@ -0,0 +1,120 @@
use async_trait::async_trait;
use sea_orm::{ActiveValue, entity::prelude::*};
use crate::task::SubscriberTaskTrait;
pub use crate::task::{
SubscriberTask, SubscriberTaskInput, SubscriberTaskType, SubscriberTaskTypeEnum,
SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter,
};
#[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SubscriberTaskStatus {
#[sea_orm(string_value = "Pending")]
Pending,
#[sea_orm(string_value = "Scheduled")]
Scheduled,
#[sea_orm(string_value = "Running")]
Running,
#[sea_orm(string_value = "Done")]
Done,
#[sea_orm(string_value = "Failed")]
Failed,
#[sea_orm(string_value = "Killed")]
Killed,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "subscriber_tasks")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: String,
pub subscriber_id: i32,
pub subscription_id: Option<i32>,
pub cron_id: Option<i32>,
pub job: SubscriberTask,
pub task_type: SubscriberTaskType,
pub status: SubscriberTaskStatus,
pub attempts: i32,
pub max_attempts: i32,
pub run_at: DateTimeUtc,
pub last_error: Option<String>,
pub lock_at: Option<DateTimeUtc>,
pub lock_by: Option<String>,
pub done_at: Option<DateTimeUtc>,
pub priority: i32,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Subscriber,
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Subscription,
#[sea_orm(
belongs_to = "super::cron::Entity",
from = "Column::CronId",
to = "super::cron::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Cron,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::cron::Entity> for Entity {
fn to() -> RelationDef {
Relation::Cron.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::subscriptions::Entity")]
Subscription,
#[sea_orm(entity = "super::cron::Entity")]
Cron,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, _insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
if let ActiveValue::Set(subscriber_id) = self.subscriber_id
&& let ActiveValue::Set(ref job) = self.job
&& job.get_subscriber_id() != subscriber_id
{
return Err(DbErr::Custom(
"SubscriberTask subscriber_id does not match job.subscriber_id".to_string(),
));
}
Ok(self)
}
}

View File

@ -45,6 +45,8 @@ pub enum Relation {
Feed, Feed,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")] #[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask, SubscriberTask,
#[sea_orm(has_many = "super::system_tasks::Entity")]
SystemTask,
} }
impl Related<super::subscriptions::Entity> for Entity { impl Related<super::subscriptions::Entity> for Entity {
@ -95,6 +97,12 @@ impl Related<super::subscriber_tasks::Entity> for Entity {
} }
} }
impl Related<super::system_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SystemTask.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity { pub enum RelatedEntity {
#[sea_orm(entity = "super::subscriptions::Entity")] #[sea_orm(entity = "super::subscriptions::Entity")]
@ -111,6 +119,8 @@ pub enum RelatedEntity {
Feed, Feed,
#[sea_orm(entity = "super::subscriber_tasks::Entity")] #[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask, SubscriberTask,
#[sea_orm(entity = "super::system_tasks::Entity")]
SystemTask,
} }
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
@ -130,10 +140,9 @@ impl Model {
pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RecorderResult<Self> { pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RecorderResult<Self> {
let db = ctx.db(); let db = ctx.db();
let subscriber = Entity::find_by_id(id) let subscriber = Entity::find_by_id(id).one(db).await?.ok_or_else(|| {
.one(db) RecorderError::from_entity_not_found_detail::<Entity, _>(format!("id {id} not found"))
.await? })?;
.ok_or_else(|| RecorderError::from_db_record_not_found("subscribers::find_by_id"))?;
Ok(subscriber) Ok(subscriber)
} }

View File

@ -11,10 +11,7 @@ pub use registry::{
use sea_orm::entity::prelude::*; use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{app::AppContextTrait, errors::RecorderResult};
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")] #[sea_orm(table_name = "subscriptions")]
@ -61,6 +58,10 @@ pub enum Relation {
Credential3rd, Credential3rd,
#[sea_orm(has_many = "super::feeds::Entity")] #[sea_orm(has_many = "super::feeds::Entity")]
Feed, Feed,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(has_many = "super::cron::Entity")]
Cron,
} }
impl Related<super::subscribers::Entity> for Entity { impl Related<super::subscribers::Entity> for Entity {
@ -121,6 +122,18 @@ impl Related<super::credential_3rd::Entity> for Entity {
} }
} }
impl Related<super::subscriber_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriberTask.def()
}
}
impl Related<super::cron::Entity> for Entity {
fn to() -> RelationDef {
Relation::Cron.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity { pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")] #[sea_orm(entity = "super::subscribers::Entity")]
@ -137,6 +150,10 @@ pub enum RelatedEntity {
Credential3rd, Credential3rd,
#[sea_orm(entity = "super::feeds::Entity")] #[sea_orm(entity = "super::feeds::Entity")]
Feed, Feed,
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(entity = "super::cron::Entity")]
Cron,
} }
#[async_trait] #[async_trait]
@ -145,51 +162,7 @@ impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {} impl ActiveModel {}
impl Model { impl Model {
pub async fn toggle_with_ids( pub async fn exec_cron(&self, _ctx: &dyn AppContextTrait) -> RecorderResult<()> {
ctx: &dyn AppContextTrait, todo!()
ids: impl Iterator<Item = i32>,
enabled: bool,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::update_many()
.col_expr(Column::Enabled, Expr::value(enabled))
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn delete_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::delete_many()
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn find_by_id_and_subscriber_id(
ctx: &dyn AppContextTrait,
subscriber_id: i32,
subscription_id: i32,
) -> RecorderResult<Self> {
let db = ctx.db();
let subscription_model = Entity::find_by_id(subscription_id)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
if subscription_model.subscriber_id != subscriber_id {
Err(RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
}
Ok(subscription_model)
} }
} }

View File

@ -1,129 +1,147 @@
use std::{fmt::Debug, sync::Arc}; use std::{fmt::Debug, sync::Arc};
use async_trait::async_trait;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter}; use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
app::AppContextTrait, errors::RecorderResult,
errors::{RecorderError, RecorderResult},
extract::mikan::{ extract::mikan::{
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription, MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
}, },
models::subscriptions::{self, SubscriptionTrait}, models::subscriptions::{self, SubscriptionTrait},
}; };
#[derive( macro_rules! register_subscription_type {
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay, (
)] subscription_category_enum: {
#[sea_orm( $(#[$subscription_category_enum_meta:meta])*
rs_type = "String", pub enum $type_enum_name:ident {
db_type = "Enum", $(
enum_name = "subscription_category" $(#[$variant_meta:meta])*
)] $variant:ident => $string_value:literal
#[serde(rename_all = "snake_case")] ),* $(,)?
pub enum SubscriptionCategory {
#[sea_orm(string_value = "mikan_subscriber")]
MikanSubscriber,
#[sea_orm(string_value = "mikan_season")]
MikanSeason,
#[sea_orm(string_value = "mikan_bangumi")]
MikanBangumi,
#[sea_orm(string_value = "manual")]
Manual,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "category")]
pub enum Subscription {
#[serde(rename = "mikan_subscriber")]
MikanSubscriber(MikanSubscriberSubscription),
#[serde(rename = "mikan_season")]
MikanSeason(MikanSeasonSubscription),
#[serde(rename = "mikan_bangumi")]
MikanBangumi(MikanBangumiSubscription),
#[serde(rename = "manual")]
Manual,
}
impl Subscription {
pub fn category(&self) -> SubscriptionCategory {
match self {
Self::MikanSubscriber(_) => SubscriptionCategory::MikanSubscriber,
Self::MikanSeason(_) => SubscriptionCategory::MikanSeason,
Self::MikanBangumi(_) => SubscriptionCategory::MikanBangumi,
Self::Manual => SubscriptionCategory::Manual,
}
}
}
#[async_trait]
impl SubscriptionTrait for Subscription {
fn get_subscriber_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscriber_id(),
Self::MikanSeason(subscription) => subscription.get_subscriber_id(),
Self::MikanBangumi(subscription) => subscription.get_subscriber_id(),
Self::Manual => unreachable!(),
}
}
fn get_subscription_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscription_id(),
Self::MikanSeason(subscription) => subscription.get_subscription_id(),
Self::MikanBangumi(subscription) => subscription.get_subscription_id(),
Self::Manual => unreachable!(),
}
}
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_full(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_sources(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_sources(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_sources(ctx).await,
Self::Manual => Ok(()),
}
}
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
match model.category {
SubscriptionCategory::MikanSubscriber => {
MikanSubscriberSubscription::try_from_model(model).map(Self::MikanSubscriber)
} }
SubscriptionCategory::MikanSeason => { }$(,)?
MikanSeasonSubscription::try_from_model(model).map(Self::MikanSeason) subscription_enum: {
$(#[$subscription_enum_meta:meta])*
pub enum $subscription_enum_name:ident {
$(
$subscription_variant:ident($subscription_type:ty)
),* $(,)?
} }
SubscriptionCategory::MikanBangumi => { }
MikanBangumiSubscription::try_from_model(model).map(Self::MikanBangumi) ) => {
$(#[$subscription_category_enum_meta])*
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
pub enum $type_enum_name {
$(
$(#[$variant_meta])*
#[serde(rename = $string_value)]
#[sea_orm(string_value = $string_value)]
$variant,
)*
}
$(#[$subscription_enum_meta])*
#[serde(tag = "category")]
pub enum $subscription_enum_name {
$(
#[serde(rename = $string_value)]
$subscription_variant($subscription_type),
)*
}
impl $subscription_enum_name {
pub fn category(&self) -> $type_enum_name {
match self {
$(Self::$subscription_variant(_) => $type_enum_name::$variant,)*
}
} }
SubscriptionCategory::Manual => Ok(Self::Manual), }
#[async_trait::async_trait]
impl $crate::models::subscriptions::SubscriptionTrait for $subscription_enum_name {
fn get_subscriber_id(&self) -> i32 {
match self {
$(Self::$subscription_variant(subscription) => subscription.get_subscriber_id(),)*
}
}
fn get_subscription_id(&self) -> i32 {
match self {
$(Self::$subscription_variant(subscription) => subscription.get_subscription_id(),)*
}
}
async fn sync_feeds_incremental(&self, ctx: Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$subscription_variant(subscription) => subscription.sync_feeds_incremental(ctx).await,)*
}
}
async fn sync_feeds_full(&self, ctx: Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$subscription_variant(subscription) => subscription.sync_feeds_full(ctx).await,)*
}
}
async fn sync_sources(&self, ctx: Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$subscription_variant(subscription) => subscription.sync_sources(ctx).await,)*
}
}
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
match model.category {
$($type_enum_name::$variant => {
<$subscription_type as $crate::models::subscriptions::SubscriptionTrait>::try_from_model(model).map(Self::$subscription_variant)
})*
}
}
}
impl TryFrom<&$crate::models::subscriptions::Model> for $subscription_enum_name {
type Error = $crate::errors::RecorderError;
fn try_from(model: &$crate::models::subscriptions::Model) -> Result<Self, Self::Error> {
Self::try_from_model(model)
}
}
};
}
register_subscription_type! {
subscription_category_enum: {
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
pub enum SubscriptionCategory {
MikanSubscriber => "mikan_subscriber",
MikanSeason => "mikan_season",
MikanBangumi => "mikan_bangumi",
}
}
subscription_enum: {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum Subscription {
MikanSubscriber(MikanSubscriberSubscription),
MikanSeason(MikanSeasonSubscription),
MikanBangumi(MikanBangumiSubscription)
} }
} }
} }
impl TryFrom<&subscriptions::Model> for Subscription {
type Error = RecorderError;
fn try_from(model: &subscriptions::Model) -> Result<Self, Self::Error> {
Self::try_from_model(model)
}
}

View File

@ -1,14 +1,14 @@
use async_trait::async_trait; use async_trait::async_trait;
use sea_orm::entity::prelude::*; use sea_orm::{ActiveValue, entity::prelude::*};
pub use crate::task::{ pub use crate::task::{
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant, SystemTask, SystemTaskInput, SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant,
SubscriberTaskTypeVariantIter, SystemTaskTypeVariantIter,
}; };
#[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)] #[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)]
#[sea_orm(rs_type = "String", db_type = "Text")] #[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SubscriberTaskStatus { pub enum SystemTaskStatus {
#[sea_orm(string_value = "Pending")] #[sea_orm(string_value = "Pending")]
Pending, Pending,
#[sea_orm(string_value = "Scheduled")] #[sea_orm(string_value = "Scheduled")]
@ -23,15 +23,16 @@ pub enum SubscriberTaskStatus {
Killed, Killed,
} }
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "subscriber_tasks")] #[sea_orm(table_name = "system_tasks")]
pub struct Model { pub struct Model {
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub id: String, pub id: String,
pub subscriber_id: i32, pub subscriber_id: Option<i32>,
pub job: SubscriberTask, pub cron_id: Option<i32>,
pub task_type: SubscriberTaskType, pub job: SystemTask,
pub status: SubscriberTaskStatus, pub task_type: SystemTaskType,
pub status: SystemTaskStatus,
pub attempts: i32, pub attempts: i32,
pub max_attempts: i32, pub max_attempts: i32,
pub run_at: DateTimeUtc, pub run_at: DateTimeUtc,
@ -49,9 +50,17 @@ pub enum Relation {
from = "Column::SubscriberId", from = "Column::SubscriberId",
to = "super::subscribers::Column::Id", to = "super::subscribers::Column::Id",
on_update = "Cascade", on_update = "Cascade",
on_delete = "Cascade" on_delete = "Restrict"
)] )]
Subscriber, Subscriber,
#[sea_orm(
belongs_to = "super::cron::Entity",
from = "Column::CronId",
to = "super::cron::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Cron,
} }
impl Related<super::subscribers::Entity> for Entity { impl Related<super::subscribers::Entity> for Entity {
@ -60,11 +69,31 @@ impl Related<super::subscribers::Entity> for Entity {
} }
} }
impl Related<super::cron::Entity> for Entity {
fn to() -> RelationDef {
Relation::Cron.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity { pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")] #[sea_orm(entity = "super::subscribers::Entity")]
Subscriber, Subscriber,
#[sea_orm(entity = "super::cron::Entity")]
Cron,
} }
#[async_trait] #[async_trait]
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, _insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
if let ActiveValue::Set(Some(..)) = self.subscriber_id {
return Err(DbErr::Custom(
"SystemTask can not be created by subscribers now".to_string(),
));
}
Ok(self)
}
}

View File

@ -89,6 +89,13 @@ impl StorageService {
p p
} }
#[cfg(any(test, feature = "test-utils"))]
pub fn build_test_path(&self, path: impl AsRef<Path>) -> PathBuf {
let mut p = PathBuf::from("/test");
p.push(path);
p
}
pub fn build_public_path(&self, path: impl AsRef<Path>) -> PathBuf { pub fn build_public_path(&self, path: impl AsRef<Path>) -> PathBuf {
let mut p = PathBuf::from("/public"); let mut p = PathBuf::from("/public");
p.push(path); p.push(path);
@ -209,7 +216,7 @@ impl StorageService {
lister.try_collect().await lister.try_collect().await
} }
#[instrument(skip_all, err, fields(storage_path = %storage_path.as_ref(), range = ?range, accept = ?accept))] #[instrument(skip_all, err, fields(storage_path = %storage_path.as_ref(), range = ?range, accept = accept.to_string()))]
pub async fn serve_optimized_image( pub async fn serve_optimized_image(
&self, &self,
storage_path: impl AsRef<Path>, storage_path: impl AsRef<Path>,
@ -271,7 +278,7 @@ impl StorageService {
if let Some(mut ranges) = ranges { if let Some(mut ranges) = ranges {
if ranges.len() > 1 { if ranges.len() > 1 {
let boundary = Uuid::new_v4().to_string(); let boundary = Uuid::now_v7().to_string();
let reader = self.reader(storage_path.as_ref()).await?; let reader = self.reader(storage_path.as_ref()).await?;
let stream: impl Stream<Item = Result<Bytes, RecorderError>> = { let stream: impl Stream<Item = Result<Bytes, RecorderError>> = {
let boundary = boundary.clone(); let boundary = boundary.clone();

View File

@ -8,10 +8,14 @@ pub struct TaskConfig {
pub subscriber_task_concurrency: u32, pub subscriber_task_concurrency: u32,
#[serde(default = "default_system_task_workers")] #[serde(default = "default_system_task_workers")]
pub system_task_concurrency: u32, pub system_task_concurrency: u32,
#[serde(default = "default_subscriber_task_timeout")] #[serde(default = "default_subscriber_task_reenqueue_orphaned_after")]
pub subscriber_task_timeout: Duration, pub subscriber_task_reenqueue_orphaned_after: Duration,
#[serde(default = "default_system_task_timeout")] #[serde(default = "default_system_task_reenqueue_orphaned_after")]
pub system_task_timeout: Duration, pub system_task_reenqueue_orphaned_after: Duration,
#[serde(default = "default_cron_retry_duration")]
pub cron_retry_duration: Duration,
#[serde(default = "default_cron_interval_duration")]
pub cron_interval_duration: Duration,
} }
impl Default for TaskConfig { impl Default for TaskConfig {
@ -19,8 +23,11 @@ impl Default for TaskConfig {
Self { Self {
subscriber_task_concurrency: default_subscriber_task_workers(), subscriber_task_concurrency: default_subscriber_task_workers(),
system_task_concurrency: default_system_task_workers(), system_task_concurrency: default_system_task_workers(),
subscriber_task_timeout: default_subscriber_task_timeout(), subscriber_task_reenqueue_orphaned_after:
system_task_timeout: default_system_task_timeout(), default_subscriber_task_reenqueue_orphaned_after(),
system_task_reenqueue_orphaned_after: default_system_task_reenqueue_orphaned_after(),
cron_retry_duration: default_cron_retry_duration(),
cron_interval_duration: default_cron_interval_duration(),
} }
} }
} }
@ -41,10 +48,18 @@ pub fn default_system_task_workers() -> u32 {
} }
} }
pub fn default_subscriber_task_timeout() -> Duration { pub fn default_cron_interval_duration() -> Duration {
Duration::from_secs(30)
}
pub fn default_subscriber_task_reenqueue_orphaned_after() -> Duration {
Duration::from_secs(3600) Duration::from_secs(3600)
} }
pub fn default_system_task_timeout() -> Duration { pub fn default_system_task_reenqueue_orphaned_after() -> Duration {
Duration::from_secs(3600) Duration::from_secs(3600)
} }
pub fn default_cron_retry_duration() -> Duration {
Duration::from_secs(5)
}

View File

@ -1,34 +1,74 @@
use std::sync::Arc; use std::sync::Arc;
use futures::Stream; use async_trait::async_trait;
use futures::{Stream, StreamExt, pin_mut};
use serde::{Serialize, de::DeserializeOwned}; use serde::{Serialize, de::DeserializeOwned};
use crate::{app::AppContextTrait, errors::RecorderResult}; use crate::{app::AppContextTrait, errors::RecorderResult};
pub const SYSTEM_TASK_APALIS_NAME: &str = "system_task"; pub const SYSTEM_TASK_APALIS_NAME: &str = "system_task";
pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task"; pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task";
pub const SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME: &str =
"setup_apalis_jobs_extra_foreign_keys";
pub const SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME: &str =
"setup_apalis_jobs_extra_foreign_keys_trigger";
#[async_trait::async_trait] #[async_trait]
pub trait AsyncTaskTrait: Serialize + DeserializeOwned + Sized { pub trait AsyncTaskTrait: Serialize + DeserializeOwned + Sized {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>; async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.run_async(ctx).await?;
Ok(())
}
} }
#[async_trait::async_trait] pub trait StreamTaskTrait {
pub trait StreamTaskTrait: Serialize + DeserializeOwned + Sized {
type Yield: Serialize + DeserializeOwned + Send; type Yield: Serialize + DeserializeOwned + Send;
fn run_stream( fn run_stream(
self, self,
ctx: Arc<dyn AppContextTrait>, ctx: Arc<dyn AppContextTrait>,
) -> impl Stream<Item = RecorderResult<Self::Yield>> + Send; ) -> impl Stream<Item = RecorderResult<Self::Yield>> + Send;
}
async fn run(self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> { #[async_trait]
unimplemented!() impl<T> AsyncTaskTrait for T
where
T: StreamTaskTrait + Serialize + DeserializeOwned + Sized + Send,
{
async fn run_async(self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
let s = self.run_stream(_ctx);
pin_mut!(s);
while let Some(item) = s.next().await {
item?;
}
Ok(())
} }
} }
pub trait SystemTaskTrait: AsyncTaskTrait {
type InputType: Serialize + DeserializeOwned + Sized + Send;
fn get_subscriber_id(&self) -> Option<i32>;
fn set_subscriber_id(&mut self, subscriber_id: Option<i32>);
fn get_cron_id(&self) -> Option<i32>;
fn set_cron_id(&mut self, cron_id: Option<i32>);
fn from_input(input: Self::InputType, subscriber_id: Option<i32>) -> Self;
}
pub trait SubscriberTaskTrait: AsyncTaskTrait {
type InputType: Serialize + DeserializeOwned + Sized + Send;
fn get_subscriber_id(&self) -> i32;
fn set_subscriber_id(&mut self, subscriber_id: i32);
fn get_cron_id(&self) -> Option<i32>;
fn set_cron_id(&mut self, cron_id: Option<i32>);
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self;
}

View File

@ -1,16 +0,0 @@
use sea_orm::sea_query;
#[derive(sea_query::Iden)]
pub enum ApalisSchema {
#[iden = "apalis"]
Schema,
}
#[derive(sea_query::Iden)]
pub enum ApalisJobs {
#[iden = "jobs"]
Table,
Id,
}

View File

@ -1,19 +1,22 @@
mod config; mod config;
mod core; mod core;
mod r#extern;
mod registry; mod registry;
mod service; mod service;
pub use core::{ pub use core::{
AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, StreamTaskTrait, AsyncTaskTrait, SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME,
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME, SUBSCRIBER_TASK_APALIS_NAME,
SYSTEM_TASK_APALIS_NAME, StreamTaskTrait, SubscriberTaskTrait, SystemTaskTrait,
}; };
pub use config::TaskConfig; pub use config::TaskConfig;
pub use r#extern::{ApalisJobs, ApalisSchema};
pub use registry::{ pub use registry::{
OptimizeImageTask, SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, EchoTask, OptimizeImageTask, SubscriberTask, SubscriberTaskInput, SubscriberTaskType,
SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter,
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask, SystemTask, SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant, SystemTaskTypeVariantIter, SyncOneSubscriptionSourcesTask, SystemTask, SystemTaskInput, SystemTaskType,
SystemTaskTypeEnum, SystemTaskTypeVariant, SystemTaskTypeVariantIter,
}; };
#[allow(unused_imports)]
pub(crate) use registry::{register_subscriber_task_type, register_system_task_type};
pub use service::TaskService; pub use service::TaskService;

View File

@ -1,134 +1,14 @@
mod media; mod subscriber;
mod subscription; mod system;
use std::sync::Arc;
pub use media::OptimizeImageTask; pub(crate) use subscriber::register_subscriber_task_type;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult}; pub use subscriber::{
use serde::{Deserialize, Serialize}; SubscriberTask, SubscriberTaskInput, SubscriberTaskType, SubscriberTaskTypeEnum,
pub use subscription::{ SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask,
SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask,
SyncOneSubscriptionSourcesTask,
}; };
pub(crate) use system::register_system_task_type;
use crate::{ pub use system::{
app::AppContextTrait, EchoTask, OptimizeImageTask, SystemTask, SystemTaskInput, SystemTaskType, SystemTaskTypeEnum,
errors::{RecorderError, RecorderResult}, SystemTaskTypeVariant, SystemTaskTypeVariantIter,
models::subscriptions::SubscriptionTrait,
task::AsyncTaskTrait,
}; };
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SubscriberTaskType {
#[serde(rename = "sync_one_subscription_feeds_incremental")]
#[sea_orm(string_value = "sync_one_subscription_feeds_incremental")]
SyncOneSubscriptionFeedsIncremental,
#[serde(rename = "sync_one_subscription_feeds_full")]
#[sea_orm(string_value = "sync_one_subscription_feeds_full")]
SyncOneSubscriptionFeedsFull,
#[serde(rename = "sync_one_subscription_sources")]
#[sea_orm(string_value = "sync_one_subscription_sources")]
SyncOneSubscriptionSources,
}
impl TryFrom<&SubscriberTask> for serde_json::Value {
type Error = RecorderError;
fn try_from(value: &SubscriberTask) -> Result<Self, Self::Error> {
let json_value = serde_json::to_value(value)?;
Ok(match json_value {
serde_json::Value::Object(mut map) => {
map.remove("task_type");
serde_json::Value::Object(map)
}
_ => {
unreachable!("subscriber task must be an json object");
}
})
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, FromJsonQueryResult)]
#[serde(tag = "task_type")]
pub enum SubscriberTask {
#[serde(rename = "sync_one_subscription_feeds_incremental")]
SyncOneSubscriptionFeedsIncremental(SyncOneSubscriptionFeedsIncrementalTask),
#[serde(rename = "sync_one_subscription_feeds_full")]
SyncOneSubscriptionFeedsFull(SyncOneSubscriptionFeedsFullTask),
#[serde(rename = "sync_one_subscription_sources")]
SyncOneSubscriptionSources(SyncOneSubscriptionSourcesTask),
}
impl SubscriberTask {
pub fn get_subscriber_id(&self) -> i32 {
match self {
Self::SyncOneSubscriptionFeedsIncremental(task) => task.0.get_subscriber_id(),
Self::SyncOneSubscriptionFeedsFull(task) => task.0.get_subscriber_id(),
Self::SyncOneSubscriptionSources(task) => task.0.get_subscriber_id(),
}
}
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::SyncOneSubscriptionFeedsIncremental(task) => task.run(ctx).await,
Self::SyncOneSubscriptionFeedsFull(task) => task.run(ctx).await,
Self::SyncOneSubscriptionSources(task) => task.run(ctx).await,
}
}
pub fn task_type(&self) -> SubscriberTaskType {
match self {
Self::SyncOneSubscriptionFeedsIncremental(_) => {
SubscriberTaskType::SyncOneSubscriptionFeedsIncremental
}
Self::SyncOneSubscriptionFeedsFull(_) => {
SubscriberTaskType::SyncOneSubscriptionFeedsFull
}
Self::SyncOneSubscriptionSources(_) => SubscriberTaskType::SyncOneSubscriptionSources,
}
}
}
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SystemTaskType {
#[serde(rename = "optimize_image")]
#[sea_orm(string_value = "optimize_image")]
OptimizeImage,
}
#[derive(Clone, Debug, Serialize, Deserialize, FromJsonQueryResult)]
pub enum SystemTask {
#[serde(rename = "optimize_image")]
OptimizeImage(OptimizeImageTask),
}
impl SystemTask {
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::OptimizeImage(task) => task.run(ctx).await,
}
}
}

View File

@ -0,0 +1,66 @@
macro_rules! register_subscriber_task_type {
(
$(#[$type_meta:meta])*
$task_vis:vis struct $task_name:ident {
$($(#[$field_meta:meta])* pub $field_name:ident: $field_type:ty),* $(,)?
}
) => {
$(#[$type_meta])*
#[derive(typed_builder::TypedBuilder, ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[ts(rename_all = "camelCase")]
$task_vis struct $task_name {
$($(#[$field_meta])* pub $field_name: $field_type,)*
pub subscriber_id: i32,
#[builder(default = None)]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cron_id: Option<i32>,
}
paste::paste! {
$(#[$type_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
$task_vis struct [<$task_name Input>] {
$($(#[$field_meta])* pub $field_name: $field_type,)*
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subscriber_id: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cron_id: Option<i32>,
}
}
impl $crate::task::SubscriberTaskTrait for $task_name {
paste::paste! {
type InputType = [<$task_name Input>];
}
fn get_subscriber_id(&self) -> i32 {
self.subscriber_id
}
fn get_cron_id(&self) -> Option<i32> {
self.cron_id
}
fn set_subscriber_id(&mut self, subscriber_id: i32) {
self.subscriber_id = subscriber_id;
}
fn set_cron_id(&mut self, cron_id: Option<i32>) {
self.cron_id = cron_id;
}
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self {
Self {
$($field_name: input.$field_name,)*
cron_id: input.cron_id,
subscriber_id: input.subscriber_id.unwrap_or(subscriber_id),
}
}
}
}
}
pub(crate) use register_subscriber_task_type;

View File

@ -0,0 +1,165 @@
mod base;
mod subscription;
pub(crate) use base::register_subscriber_task_type;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
pub use subscription::{
SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SyncOneSubscriptionSourcesTask,
};
macro_rules! register_subscriber_task_types {
(
task_type_enum: {
$(#[$type_enum_meta:meta])*
$type_vis:vis enum $type_enum_name:ident {
$(
$(#[$variant_meta:meta])*
$variant:ident => $string_value:literal
),* $(,)?
}
},
task_enum: {
$(#[$task_enum_meta:meta])*
$task_vis:vis enum $task_enum_name:ident {
$(
$(#[$task_variant_meta:meta])*
$task_variant:ident($task_type:ty)
),* $(,)?
}
}
) => {
$(#[$type_enum_meta])*
#[derive(serde::Serialize, serde::Deserialize)]
#[sea_orm(rs_type = "String", db_type = "Text")]
$type_vis enum $type_enum_name {
$(
$(#[$variant_meta])*
#[serde(rename = $string_value)]
#[sea_orm(string_value = $string_value)]
$variant,
)*
}
$(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(tag = "task_type")]
#[ts(export, rename = "SubscriberTaskType", rename_all = "camelCase", tag = "taskType")]
$task_vis enum $task_enum_name {
$(
$(#[$task_variant_meta])*
#[serde(rename = $string_value)]
$task_variant($task_type),
)*
}
paste::paste! {
$(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(tag = "taskType", rename_all = "camelCase")]
#[ts(export, rename_all = "camelCase", tag = "taskType")]
$task_vis enum [<$task_enum_name Input>] {
$(
$(#[$task_variant_meta])*
#[serde(rename = $string_value)]
$task_variant(<$task_type as $crate::task::SubscriberTaskTrait>::InputType),
)*
}
}
impl $task_enum_name {
pub fn task_type(&self) -> $type_enum_name {
match self {
$(Self::$task_variant(_) => $type_enum_name::$variant,)*
}
}
}
#[async_trait::async_trait]
impl $crate::task::AsyncTaskTrait for $task_enum_name {
async fn run_async(self, ctx: std::sync::Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$task_variant(t) =>
<$task_type as $crate::task::AsyncTaskTrait>::run_async(t, ctx).await,)*
}
}
}
impl $crate::task::SubscriberTaskTrait for $task_enum_name {
paste::paste! {
type InputType = [<$task_enum_name Input>];
}
fn get_subscriber_id(&self) -> i32 {
match self {
$(Self::$task_variant(t) =>
<$task_type as $crate::task::SubscriberTaskTrait>::get_subscriber_id(t),)*
}
}
fn get_cron_id(&self) -> Option<i32> {
match self {
$(Self::$task_variant(t) =>
<$task_type as $crate::task::SubscriberTaskTrait>::get_cron_id(t),)*
}
}
fn set_subscriber_id(&mut self, subscriber_id: i32) {
match self {
$(Self::$task_variant(t) => t.set_subscriber_id(subscriber_id),)*
}
}
fn set_cron_id(&mut self, cron_id: Option<i32>) {
match self {
$(Self::$task_variant(t) => t.set_cron_id(cron_id),)*
}
}
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self {
match input {
$(Self::InputType::$task_variant(t) =>
Self::$task_variant(<$task_type as $crate::task::SubscriberTaskTrait>::from_input(t, subscriber_id)),)*
}
}
}
$(
impl From<$task_type> for $task_enum_name {
fn from(task: $task_type) -> Self {
Self::$task_variant(task)
}
}
)*
};
}
register_subscriber_task_types!(
task_type_enum: {
#[derive(
Clone,
Debug,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
pub enum SubscriberTaskType {
SyncOneSubscriptionFeedsIncremental => "sync_one_subscription_feeds_incremental",
SyncOneSubscriptionFeedsFull => "sync_one_subscription_feeds_full",
SyncOneSubscriptionSources => "sync_one_subscription_sources"
}
},
task_enum: {
#[derive(Clone, Debug, PartialEq, FromJsonQueryResult)]
pub enum SubscriberTask {
SyncOneSubscriptionFeedsIncremental(SyncOneSubscriptionFeedsIncrementalTask),
SyncOneSubscriptionFeedsFull(SyncOneSubscriptionFeedsFullTask),
SyncOneSubscriptionSources(SyncOneSubscriptionSourcesTask),
}
}
);

View File

@ -0,0 +1,66 @@
use sea_orm::prelude::*;
use super::base::register_subscriber_task_type;
use crate::{errors::RecorderResult, models::subscriptions::SubscriptionTrait};
macro_rules! register_subscription_task_type {
(
$(#[$type_meta:meta])* pub struct $task_name:ident {
$($(#[$field_meta:meta])* pub $field_name:ident: $field_type:ty),* $(,)?
} => async |$subscription_param:ident, $ctx_param:ident| -> $task_return_type:ty $method_body:block
) => {
register_subscriber_task_type! {
$(#[$type_meta])*
pub struct $task_name {
$($(#[$field_meta])* pub $field_name: $field_type,)*
pub subscription_id: i32,
}
}
#[async_trait::async_trait]
impl $crate::task::AsyncTaskTrait for $task_name {
async fn run_async(self, ctx: std::sync::Arc<dyn $crate::app::AppContextTrait>) -> $task_return_type {
use $crate::models::subscriptions::{
Entity, Column, Subscription,
};
let subscription_model = Entity::find()
.filter(Column::Id.eq(self.subscription_id))
.filter(Column::SubscriberId.eq(self.subscriber_id))
.one(ctx.db())
.await?
.ok_or_else(|| $crate::errors::RecorderError::from_entity_not_found::<Entity>())?;
let $subscription_param = Subscription::try_from_model(&subscription_model)?;
let $ctx_param = ctx;
$method_body
}
}
}
}
register_subscription_task_type! {
#[derive(Clone, Debug, PartialEq)]
pub struct SyncOneSubscriptionFeedsIncrementalTask {
} => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_feeds_incremental(ctx).await?;
Ok(())
}
}
register_subscription_task_type! {
#[derive(Clone, Debug, PartialEq)]
pub struct SyncOneSubscriptionFeedsFullTask {
} => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_feeds_full(ctx).await?;
Ok(())
}
}
register_subscription_task_type! {
#[derive(Clone, Debug, PartialEq)]
pub struct SyncOneSubscriptionSourcesTask {
} => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_sources(ctx).await?;
Ok(())
}
}

View File

@ -1,62 +0,0 @@
use std::sync::Arc;
use sea_orm::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::RecorderResult,
models::subscriptions::{self, SubscriptionTrait},
task::AsyncTaskTrait,
};
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct SyncOneSubscriptionFeedsIncrementalTask(pub subscriptions::Subscription);
impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsIncrementalTask {
fn from(subscription: subscriptions::Subscription) -> Self {
Self(subscription)
}
}
#[async_trait::async_trait]
impl AsyncTaskTrait for SyncOneSubscriptionFeedsIncrementalTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_feeds_incremental(ctx).await?;
Ok(())
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct SyncOneSubscriptionFeedsFullTask(pub subscriptions::Subscription);
impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsFullTask {
fn from(subscription: subscriptions::Subscription) -> Self {
Self(subscription)
}
}
#[async_trait::async_trait]
impl AsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_feeds_full(ctx).await?;
Ok(())
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct SyncOneSubscriptionSourcesTask(pub subscriptions::Subscription);
#[async_trait::async_trait]
impl AsyncTaskTrait for SyncOneSubscriptionSourcesTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_sources(ctx).await?;
Ok(())
}
}
impl From<subscriptions::Subscription> for SyncOneSubscriptionSourcesTask {
fn from(subscription: subscriptions::Subscription) -> Self {
Self(subscription)
}
}

View File

@ -0,0 +1,67 @@
macro_rules! register_system_task_type {
(
$(#[$type_meta:meta])*
$task_vis:vis struct $task_name:ident {
$($(#[$field_meta:meta])* pub $field_name:ident: $field_type:ty),* $(,)?
}
) => {
$(#[$type_meta])*
#[derive(typed_builder::TypedBuilder, ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[ts(rename_all = "camelCase")]
$task_vis struct $task_name {
$($(#[$field_meta])* pub $field_name: $field_type,)*
#[serde(default, skip_serializing_if = "Option::is_none")]
#[builder(default = None)]
pub subscriber_id: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
#[builder(default = None)]
pub cron_id: Option<i32>,
}
paste::paste! {
$(#[$type_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
$task_vis struct [<$task_name Input>] {
$($(#[$field_meta])* pub $field_name: $field_type,)*
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subscriber_id: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cron_id: Option<i32>,
}
}
impl $crate::task::SystemTaskTrait for $task_name {
paste::paste! {
type InputType = [<$task_name Input>];
}
fn get_subscriber_id(&self) -> Option<i32> {
self.subscriber_id
}
fn get_cron_id(&self) -> Option<i32> {
self.cron_id
}
fn set_subscriber_id(&mut self, subscriber_id: Option<i32>) {
self.subscriber_id = subscriber_id;
}
fn set_cron_id(&mut self, cron_id: Option<i32>) {
self.cron_id = cron_id;
}
fn from_input(input: Self::InputType, subscriber_id: Option<i32>) -> Self {
Self {
$($field_name: input.$field_name,)*
subscriber_id: input.subscriber_id.or(subscriber_id),
cron_id: input.cron_id,
}
}
}
}
}
pub(crate) use register_system_task_type;

Some files were not shown because too many files have changed in this diff Show More