Compare commits

..

18 Commits

Author SHA1 Message Date
94919878ea fix: fix issues 2025-07-02 01:33:32 +08:00
81bf27ed28 fix: fix 2025-07-08 00:54:34 +08:00
5be5b9f634 fix: fix cron builder 2025-07-07 01:34:56 +08:00
6cdd8c27ce fix: fix typos 2025-07-06 05:05:07 +08:00
4174cea728 fix: fix cron webui 2025-07-06 02:35:55 +08:00
3aad31a36b feat: more cron webui 2025-07-05 04:08:56 +08:00
004fed9b2e feat: init cron webui 2025-07-05 02:08:55 +08:00
a1c2eeded1 temp save 2025-07-04 05:59:56 +08:00
147df00155 build: add prod build 2025-07-04 05:06:45 +08:00
5155c59293 fix: fix migrations 2025-07-04 01:25:07 +08:00
b5b3c77ba3 fix: fix migrations 2025-07-03 04:25:50 +08:00
1d0aa8d7f1 feat: support system tasks 2025-07-03 03:48:23 +08:00
5b001f9584 refactor: refactor graphql 2025-07-02 01:25:44 +08:00
d06acde882 fix: temp save 2025-07-01 03:45:56 +08:00
bacfe99ef2 fix: fix issues 2025-06-30 02:05:23 +08:00
b4090e74c0 fix: fix webui compability 2025-06-29 02:05:44 +08:00
c3e546e256 refactor: refactor graphql more 2025-06-27 05:54:25 +08:00
65505f91b2 refactor: refactor graphql 2025-06-27 04:06:58 +08:00
164 changed files with 10024 additions and 3188 deletions

View File

@@ -41,12 +41,4 @@
],
"rust-analyzer.cargo.features": "all",
"rust-analyzer.testExplorer": true
// https://github.com/rust-lang/rust/issues/141540
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
// "rust-analyzer.check.extraEnv": {
// "CARGO_TARGET_DIR": "target/rust-analyzer"
// },
// "rust-analyzer.cargo.extraEnv": {
// "CARGO_TARGET_DIR": "target/analyzer"
// }
}

277
Cargo.lock generated
View File

@@ -356,9 +356,9 @@ dependencies = [
[[package]]
name = "async-channel"
version = "2.3.1"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a"
checksum = "16c74e56284d2188cabb6ad99603d1ace887a5d7e7b695d01b728155ed9ed427"
dependencies = [
"concurrent-queue",
"event-listener-strategy",
@@ -404,7 +404,7 @@ dependencies = [
"futures-util",
"handlebars",
"http",
"indexmap 2.9.0",
"indexmap 2.10.0",
"lru",
"mime",
"multer",
@@ -474,7 +474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ecdaff7c9cffa3614a9f9999bf9ee4c3078fe3ce4d6a6e161736b56febf2de"
dependencies = [
"bytes",
"indexmap 2.9.0",
"indexmap 2.10.0",
"serde",
"serde_json",
]
@@ -551,7 +551,7 @@ dependencies = [
"derive_builder",
"diligent-date-parser",
"never",
"quick-xml",
"quick-xml 0.37.5",
"serde",
]
@@ -592,9 +592,9 @@ dependencies = [
[[package]]
name = "avif-serialize"
version = "0.8.3"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98922d6a4cfbcb08820c69d8eeccc05bb1f29bfa06b4f5b1dbfe9a868bd7608e"
checksum = "19135c0c7a60bfee564dbe44ab5ce0557c6bf3884e5291a50be76a15640c4fbd"
dependencies = [
"arrayvec",
]
@@ -1009,9 +1009,9 @@ checksum = "56ed6191a7e78c36abdb16ab65341eefd73d64d303fffccdbb00d51e4205967b"
[[package]]
name = "bumpalo"
version = "3.18.1"
version = "3.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db76d6187cd04dff33004d8e6c9cc4e05cd330500379d2394209271b4aeee"
checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
[[package]]
name = "bytecheck"
@@ -1260,9 +1260,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.40"
version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f"
checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9"
dependencies = [
"clap_builder",
"clap_derive",
@@ -1270,9 +1270,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.40"
version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e"
checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d"
dependencies = [
"anstream",
"anstyle",
@@ -1282,9 +1282,9 @@ dependencies = [
[[package]]
name = "clap_derive"
version = "4.5.40"
version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce"
checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491"
dependencies = [
"heck 0.5.0",
"proc-macro2",
@@ -1672,9 +1672,9 @@ dependencies = [
[[package]]
name = "crunchy"
version = "0.2.3"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929"
checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "crypto-bigint"
@@ -1922,6 +1922,17 @@ dependencies = [
"serde",
]
[[package]]
name = "derivative"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "derive_builder"
version = "0.20.2"
@@ -2332,11 +2343,12 @@ checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
[[package]]
name = "fancy-regex"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
checksum = "d6215aee357f8c7c989ebb4b8466ca4d7dc93b3957039f2fc3ea2ade8ea5f279"
dependencies = [
"bit-set",
"derivative",
"regex-automata 0.4.9",
"regex-syntax 0.8.5",
]
@@ -2781,9 +2793,9 @@ dependencies = [
[[package]]
name = "gif"
version = "0.13.2"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc37f9a2bfe731e69f1e08d29d91d30604b9ce24bcb2880a961e82d89c6ed89"
checksum = "4ae047235e33e2829703574b54fdec96bfbad892062d97fed2f76022287de61b"
dependencies = [
"color_quant",
"weezl",
@@ -2873,9 +2885,9 @@ dependencies = [
[[package]]
name = "h2"
version = "0.4.10"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9421a676d1b147b16b82c9225157dc629087ef8ec4d5e2960f9437a90dac0a5"
checksum = "17da50a276f1e01e0ba6c029e47b7100754904ee8a278f886546e98575380785"
dependencies = [
"atomic-waker",
"bytes",
@@ -2883,7 +2895,7 @@ dependencies = [
"futures-core",
"futures-sink",
"http",
"indexmap 2.9.0",
"indexmap 2.10.0",
"slab",
"tokio",
"tokio-util",
@@ -3847,9 +3859,9 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.9.0"
version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
dependencies = [
"equivalent",
"hashbrown 0.15.4",
@@ -3967,6 +3979,17 @@ dependencies = [
"smallvec",
]
[[package]]
name = "io-uring"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013"
dependencies = [
"bitflags 2.9.1",
"cfg-if",
"libc",
]
[[package]]
name = "ipnet"
version = "2.11.0"
@@ -4158,9 +4181,9 @@ checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
[[package]]
name = "libfuzzer-sys"
version = "0.4.9"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf78f52d400cf2d84a3a973a78a592b4adc535739e0a5597a0da6f0c357adc75"
checksum = "5037190e1f70cbeef565bd267599242926f724d3b8a9f510fd7e0b540cfa4404"
dependencies = [
"arbitrary",
"cc",
@@ -4174,9 +4197,9 @@ checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
[[package]]
name = "libredox"
version = "0.1.3"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
checksum = "1580801010e535496706ba011c15f8532df6b42297d2e471fec38ceadd8c0638"
dependencies = [
"bitflags 2.9.1",
"libc",
@@ -4308,7 +4331,7 @@ dependencies = [
"dashmap 6.1.0",
"futures",
"hex 0.4.3",
"indexmap 2.9.0",
"indexmap 2.10.0",
"leaky-bucket",
"librqbit-bencode",
"librqbit-clone-to-owned",
@@ -4383,7 +4406,7 @@ dependencies = [
"futures",
"httparse",
"network-interface",
"quick-xml",
"quick-xml 0.37.5",
"reqwest",
"serde",
"tokio",
@@ -4423,9 +4446,9 @@ dependencies = [
[[package]]
name = "lightningcss"
version = "1.0.0-alpha.66"
version = "1.0.0-alpha.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a73ffa17de66534e4b527232f44aa0a89fad22c4f4e0735f9be35494f058e54"
checksum = "798fba4e1205eed356b8ed7754cc3f7f04914e27855ca641409f4a532e992149"
dependencies = [
"ahash 0.8.12",
"bitflags 2.9.1",
@@ -4435,7 +4458,7 @@ dependencies = [
"dashmap 5.5.3",
"data-encoding",
"getrandom 0.2.16",
"indexmap 2.9.0",
"indexmap 2.10.0",
"itertools 0.10.5",
"lazy_static",
"lightningcss-derive",
@@ -4835,15 +4858,6 @@ dependencies = [
"version_check",
]
[[package]]
name = "nanoid"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ffa00dec017b5b1a8b7cf5e2c008bfda1aa7e0697ac1508b491fdf2622fb4d8"
dependencies = [
"rand 0.8.5",
]
[[package]]
name = "native-tls"
version = "0.2.14"
@@ -5164,7 +5178,7 @@ dependencies = [
"itertools 0.14.0",
"parking_lot 0.12.4",
"percent-encoding",
"quick-xml",
"quick-xml 0.37.5",
"rand 0.9.1",
"reqwest",
"ring",
@@ -5217,7 +5231,7 @@ dependencies = [
"log",
"md-5",
"percent-encoding",
"quick-xml",
"quick-xml 0.37.5",
"reqwest",
"serde",
"serde_json",
@@ -5362,9 +5376,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "owo-colors"
version = "4.2.1"
version = "4.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26995317201fa17f3656c36716aed4a7c81743a9634ac4c99c0eeda495db0cec"
checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e"
[[package]]
name = "p256"
@@ -5858,7 +5872,7 @@ dependencies = [
"either",
"hashbrown 0.14.5",
"hashbrown 0.15.4",
"indexmap 2.9.0",
"indexmap 2.10.0",
"itoa",
"num-traits",
"polars-arrow",
@@ -6019,7 +6033,7 @@ dependencies = [
"either",
"hashbrown 0.15.4",
"hex 0.4.3",
"indexmap 2.9.0",
"indexmap 2.10.0",
"libm",
"memchr",
"num-traits",
@@ -6128,7 +6142,7 @@ version = "0.49.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ada7c7e2fbbeffbdd67628cd8a89f02b0a8d21c71d34e297e2463a7c17575203"
dependencies = [
"indexmap 2.9.0",
"indexmap 2.10.0",
"polars-error",
"polars-utils",
"serde",
@@ -6229,7 +6243,7 @@ dependencies = [
"flate2",
"foldhash",
"hashbrown 0.15.4",
"indexmap 2.9.0",
"indexmap 2.10.0",
"libc",
"memmap2 0.9.5",
"num-traits",
@@ -6503,6 +6517,16 @@ dependencies = [
"serde",
]
[[package]]
name = "quick-xml"
version = "0.38.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8927b0664f5c5a98265138b7e3f90aa19a6b21353182469ace36d4ac527b7b1b"
dependencies = [
"memchr",
"serde",
]
[[package]]
name = "quinn"
version = "0.11.8"
@@ -6755,6 +6779,7 @@ dependencies = [
"base64 0.22.1",
"bytes",
"chrono",
"chrono-tz 0.10.3",
"clap",
"cocoon",
"color-eyre",
@@ -6786,16 +6811,16 @@ dependencies = [
"mime_guess",
"mockito",
"moka",
"nanoid",
"nom 8.0.0",
"num-traits",
"num_cpus",
"once_cell",
"opendal",
"openidconnect",
"paste",
"percent-encoding",
"polars",
"quick-xml",
"quick-xml 0.38.0",
"quirks_path",
"rand 0.9.1",
"regex",
@@ -6807,6 +6832,7 @@ dependencies = [
"sea-orm",
"sea-orm-migration",
"seaography",
"secrecy",
"serde",
"serde_json",
"serde_variant",
@@ -6823,7 +6849,9 @@ dependencies = [
"tracing",
"tracing-appender",
"tracing-subscriber",
"tracing-test",
"tracing-tree",
"ts-rs",
"typed-builder 0.21.0",
"url",
"util",
@@ -6983,9 +7011,9 @@ dependencies = [
[[package]]
name = "reqwest"
version = "0.12.20"
version = "0.12.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eabf4c97d9130e2bf606614eb937e86edac8292eaa6f422f995d7e8de1eb1813"
checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531"
dependencies = [
"base64 0.22.1",
"bytes",
@@ -7227,7 +7255,7 @@ dependencies = [
"atom_syndication",
"derive_builder",
"never",
"quick-xml",
"quick-xml 0.37.5",
"serde",
]
@@ -7437,6 +7465,18 @@ dependencies = [
"serde_json",
]
[[package]]
name = "schemars"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1375ba8ef45a6f15d83fa8748f1079428295d403d6ea991d09ab100155fbc06d"
dependencies = [
"dyn-clone",
"ref-cast",
"serde",
"serde_json",
]
[[package]]
name = "scoped-tls"
version = "1.0.1"
@@ -7485,9 +7525,9 @@ dependencies = [
[[package]]
name = "sea-orm"
version = "1.1.12"
version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18b7272b88bd608cd846de24f41b74a0315a135fe761b0aed4ec1ce6a6327a93"
checksum = "560ea59f07472886a236e7919b9425cf16914fee1d663d3c32f1af2e922b83f0"
dependencies = [
"async-stream",
"async-trait",
@@ -7514,9 +7554,9 @@ dependencies = [
[[package]]
name = "sea-orm-cli"
version = "1.1.12"
version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a4961b0d9098a9dc992d6e75fb761f9e5c442bb46746eeffa08e47b53759fce"
checksum = "00dd755ba3faca11692d8aaca46b68f1b4955c5dfdd6a3f1f9fba3a679a3ec1d"
dependencies = [
"chrono",
"clap",
@@ -7532,9 +7572,9 @@ dependencies = [
[[package]]
name = "sea-orm-macros"
version = "1.1.12"
version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c38255a6b2e6d1ae2d5df35696507a345f03c036ae32caeb0a3b922dbab610d"
checksum = "70d0ea50bb4317c8a58ed34dc410a79d685128e7b77ddcd9e8b59ae6416a56d9"
dependencies = [
"heck 0.5.0",
"proc-macro-crate",
@@ -7547,9 +7587,9 @@ dependencies = [
[[package]]
name = "sea-orm-migration"
version = "1.1.12"
version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82f58c3b1dcf6c137f08394f0228f9baf1574a2a799e93dc5da3cd9228bef9c5"
checksum = "3e06e0f3ca090091ad58da2bc02cdb63f9afbd276baf029f065f6ff09e79cbe9"
dependencies = [
"async-trait",
"clap",
@@ -7642,16 +7682,16 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]]
name = "seaography"
version = "1.1.4"
source = "git+https://github.com/dumtruck/seaography.git?rev=a787c3a#a787c3ab83cf1f8275894e1bc1ca3c766b54674b"
source = "git+https://github.com/dumtruck/seaography.git?rev=292cdd2#292cdd248217fdcf81c41aa97fe1c047c9b5f4de"
dependencies = [
"async-graphql",
"fnv",
"heck 0.4.1",
"itertools 0.12.1",
"heck 0.5.0",
"itertools 0.14.0",
"lazy_static",
"sea-orm",
"serde_json",
"thiserror 1.0.69",
"thiserror 2.0.12",
]
[[package]]
@@ -7668,6 +7708,16 @@ dependencies = [
"zeroize",
]
[[package]]
name = "secrecy"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a"
dependencies = [
"serde",
"zeroize",
]
[[package]]
name = "security-framework"
version = "2.11.1"
@@ -7833,16 +7883,17 @@ dependencies = [
[[package]]
name = "serde_with"
version = "3.13.0"
version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf65a400f8f66fb7b0552869ad70157166676db75ed8181f8104ea91cf9d0b42"
checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5"
dependencies = [
"base64 0.22.1",
"chrono",
"hex 0.4.3",
"indexmap 1.9.3",
"indexmap 2.9.0",
"schemars",
"indexmap 2.10.0",
"schemars 0.9.0",
"schemars 1.0.3",
"serde",
"serde_derive",
"serde_json",
@@ -7852,9 +7903,9 @@ dependencies = [
[[package]]
name = "serde_with_macros"
version = "3.13.0"
version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81679d9ed988d5e9a5e6531dc3f2c28efbd639cbd1dfb628df08edea6004da77"
checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f"
dependencies = [
"darling",
"proc-macro2",
@@ -7868,7 +7919,7 @@ version = "0.9.34+deprecated"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
dependencies = [
"indexmap 2.9.0",
"indexmap 2.10.0",
"itoa",
"ryu",
"serde",
@@ -8214,7 +8265,7 @@ dependencies = [
"futures-util",
"hashbrown 0.15.4",
"hashlink",
"indexmap 2.9.0",
"indexmap 2.10.0",
"log",
"memchr",
"once_cell",
@@ -8690,6 +8741,15 @@ dependencies = [
"unic-segment",
]
[[package]]
name = "termcolor"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
dependencies = [
"winapi-util",
]
[[package]]
name = "testcontainers"
version = "0.24.0"
@@ -8870,17 +8930,19 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tokio"
version = "1.45.1"
version = "1.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779"
checksum = "1140bb80481756a8cbe10541f37433b459c5aa1e727b4c020fbfebdc25bf3ec4"
dependencies = [
"backtrace",
"bytes",
"io-uring",
"libc",
"mio 1.0.4",
"parking_lot 0.12.4",
"pin-project-lite",
"signal-hook-registry",
"slab",
"socket2",
"tokio-macros",
"windows-sys 0.52.0",
@@ -9018,7 +9080,7 @@ version = "0.22.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
dependencies = [
"indexmap 2.9.0",
"indexmap 2.10.0",
"serde",
"serde_spanned",
"toml_datetime",
@@ -9196,6 +9258,27 @@ dependencies = [
"tracing-serde",
]
[[package]]
name = "tracing-test"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68"
dependencies = [
"tracing-core",
"tracing-subscriber",
"tracing-test-macro",
]
[[package]]
name = "tracing-test-macro"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568"
dependencies = [
"quote",
"syn 2.0.104",
]
[[package]]
name = "tracing-tree"
version = "0.4.0"
@@ -9214,6 +9297,28 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "ts-rs"
version = "11.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ef1b7a6d914a34127ed8e1fa927eb7088903787bcded4fa3eef8f85ee1568be"
dependencies = [
"thiserror 2.0.12",
"ts-rs-macros",
]
[[package]]
name = "ts-rs-macros"
version = "11.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9d4ed7b4c18cc150a6a0a1e9ea1ecfa688791220781af6e119f9599a8502a0a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"termcolor",
]
[[package]]
name = "tungstenite"
version = "0.26.2"
@@ -9880,9 +9985,9 @@ dependencies = [
[[package]]
name = "windows-registry"
version = "0.5.2"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3bab093bdd303a1240bb99b8aba8ea8a69ee19d34c9e2ef9594e708a4878820"
checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e"
dependencies = [
"windows-link",
"windows-result",
@@ -10181,9 +10286,9 @@ dependencies = [
[[package]]
name = "xattr"
version = "1.5.0"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e"
checksum = "af3a19837351dc82ba89f8a125e22a3c475f05aba604acc023d62b2739ae2909"
dependencies = [
"libc",
"rustix 1.0.7",
@@ -10356,9 +10461,9 @@ dependencies = [
[[package]]
name = "zune-jpeg"
version = "0.4.18"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7384255a918371b5af158218d131530f694de9ad3815ebdd0453a940485cb0fa"
checksum = "2c9e525af0a6a658e031e95f14b7f889976b74a11ba0eca5a5fc9ac8a1c43a6a"
dependencies = [
"zune-core",
]

View File

@@ -13,9 +13,6 @@ members = [
resolver = "2"
[profile.dev]
debug = 0
# https://github.com/rust-lang/rust/issues/141540
incremental = false
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
# codegen-backend = "cranelift"
@@ -34,22 +31,22 @@ reqwest = { version = "0.12.20", features = [
"macos-system-configuration",
"cookies",
] }
moka = "0.12"
futures = "0.3"
quirks_path = "0.1"
snafu = { version = "0.8", features = ["futures"] }
testcontainers = { version = "0.24" }
moka = "0.12.10"
futures = "0.3.31"
quirks_path = "0.1.1"
snafu = { version = "0.8.0", features = ["futures"] }
testcontainers = { version = "0.24.0" }
testcontainers-modules = { version = "0.12.1" }
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
serde = { version = "1", features = ["derive"] }
tokio = { version = "1.45.1", features = [
serde = { version = "1.0.219", features = ["derive"] }
tokio = { version = "1.46", features = [
"macros",
"fs",
"rt-multi-thread",
"signal",
] }
serde_json = "1"
async-trait = "0.1"
serde_json = "1.0.140"
async-trait = "0.1.88"
tracing = "0.1"
url = "2.5.2"
anyhow = "1"
@@ -67,7 +64,7 @@ convert_case = "0.8"
color-eyre = "0.6.5"
inquire = "0.7.5"
image = "0.25.6"
uuid = { version = "1.6.0", features = ["v4"] }
uuid = { version = "1.6.0", features = ["v7"] }
maplit = "1.0.2"
once_cell = "1.20.2"
rand = "0.9.1"
@@ -80,11 +77,12 @@ http = "1.2.0"
async-stream = "0.3.6"
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.40"
clap = "4.5.41"
ipnetwork = "0.21.1"
typed-builder = "0.21.0"
nanoid = "0.4.0"
webp = "0.3.0"
[patch.crates-io]
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" }
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "292cdd2" }

View File

@@ -0,0 +1,8 @@
```x-forwarded.json
{
"X-Forwarded-Host": "konobangu.com",
"X-Forwarded-Proto": "https"
}
```
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/$1

View File

@@ -1 +1 @@
{"filesOrder":["konobangu","mikan_doppel"],"selectedList":["konobangu","mikan_doppel"],"disabledDefalutRules":true,"defalutRules":""}
{"filesOrder":["konobangu","konobangu-prod","mikan-doppel"],"selectedList":["mikan-doppel","konobangu"],"disabledDefalutRules":true,"defalutRules":""}

View File

@@ -13,7 +13,7 @@ name = "mikan_doppel"
path = "src/bin/mikan_doppel.rs"
[dependencies]
recorder = { workspace = true }
recorder = { workspace = true, features = ["playground"] }
tokio = { workspace = true }
tracing-subscriber = { workspace = true }
tracing = { workspace = true }

View File

@@ -6,7 +6,7 @@ edition = "2024"
[features]
default = ["jxl"]
playground = ["dep:inquire", "dep:color-eyre", "dep:polars"]
playground = ["dep:inquire", "dep:color-eyre", "dep:polars", "test-utils"]
testcontainers = [
"dep:testcontainers",
"dep:testcontainers-modules",
@@ -15,6 +15,7 @@ testcontainers = [
"testcontainers-modules/postgres",
]
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
test-utils = []
[lib]
name = "recorder"
@@ -96,7 +97,6 @@ tracing-appender = { workspace = true }
clap = { workspace = true }
ipnetwork = { workspace = true }
typed-builder = { workspace = true }
nanoid = { workspace = true }
webp = { workspace = true }
sea-orm = { version = "1.1", features = [
@@ -109,7 +109,7 @@ sea-orm = { version = "1.1", features = [
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
rss = { version = "2", features = ["builders", "with-serde"] }
fancy-regex = "0.14"
fancy-regex = "0.15"
lightningcss = "1.0.0-alpha.66"
html-escape = "0.2.13"
opendal = { version = "0.53", features = ["default", "services-fs"] }
@@ -125,6 +125,7 @@ seaography = { version = "1.1", features = [
"with-bigdecimal",
"with-postgres-array",
"with-json-as-scalar",
"with-custom-as-json",
] }
tower = { version = "0.5.2", features = ["util"] }
tower-http = { version = "0.6", features = [
@@ -159,17 +160,22 @@ polars = { version = "0.49.1", features = [
"lazy",
"diagonal_concat",
], optional = true }
quick-xml = { version = "0.37.5", features = [
quick-xml = { version = "0.38", features = [
"serialize",
"serde-types",
"serde",
] }
croner = "2.2.0"
ts-rs = "11.0.1"
secrecy = { version = "0.10.3", features = ["serde"] }
paste = "1.0.15"
chrono-tz = "0.10.3"
[dev-dependencies]
inquire = { workspace = true }
color-eyre = { workspace = true }
serial_test = "3"
insta = { version = "1", features = ["redactions", "toml", "filters"] }
rstest = "0.25"
ctor = "0.4.0"
tracing-test = "0.2.5"
rstest = "0.25"

View File

@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncOneSubscriptionFeedsFullTaskInput } from "./SyncOneSubscriptionFeedsFullTaskInput";
import type { SyncOneSubscriptionFeedsIncrementalTaskInput } from "./SyncOneSubscriptionFeedsIncrementalTaskInput";
import type { SyncOneSubscriptionSourcesTaskInput } from "./SyncOneSubscriptionSourcesTaskInput";
export type SubscriberTaskInput = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTaskInput | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTaskInput | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTaskInput;

View File

@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncOneSubscriptionFeedsFullTask } from "./SyncOneSubscriptionFeedsFullTask";
import type { SyncOneSubscriptionFeedsIncrementalTask } from "./SyncOneSubscriptionFeedsIncrementalTask";
import type { SyncOneSubscriptionSourcesTask } from "./SyncOneSubscriptionSourcesTask";
export type SubscriberTaskType = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTask | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTask | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTask;

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsFullTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsFullTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsIncrementalTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsIncrementalTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionSourcesTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionSourcesTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -0,0 +1,6 @@
{
"name": "recorder",
"version": "0.0.1",
"private": true,
"type": "module"
}

View File

@@ -131,11 +131,12 @@ impl AppBuilder {
}
pub fn working_dir_from_manifest_dir(self) -> Self {
let manifest_dir = if cfg!(debug_assertions) || cfg!(test) || cfg!(feature = "playground") {
env!("CARGO_MANIFEST_DIR")
} else {
"./apps/recorder"
};
#[cfg(any(test, debug_assertions, feature = "test-utils"))]
let manifest_dir = env!("CARGO_MANIFEST_DIR");
#[cfg(not(any(test, debug_assertions, feature = "test-utils")))]
let manifest_dir = "./apps/recorder";
self.working_dir(manifest_dir.to_string())
}
}

View File

@@ -107,7 +107,7 @@ impl App {
Ok::<(), RecorderError>(())
},
async {
task.run(if graceful_shutdown {
task.run_with_signal(if graceful_shutdown {
Some(Self::shutdown_signal)
} else {
None

View File

@@ -18,6 +18,10 @@ use crate::{
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum RecorderError {
#[snafu(transparent)]
ChronoTzParseError { source: chrono_tz::ParseError },
#[snafu(transparent)]
SeaographyError { source: seaography::SeaographyError },
#[snafu(transparent)]
CronError { source: croner::errors::CronError },
#[snafu(display(
@@ -192,20 +196,17 @@ impl RecorderError {
}
}
pub fn from_model_not_found_detail<C: Into<Cow<'static, str>>, T: ToString>(
model: C,
detail: T,
) -> Self {
pub fn from_entity_not_found<E: sea_orm::EntityTrait>() -> Self {
Self::ModelEntityNotFound {
entity: model.into(),
detail: Some(detail.to_string()),
entity: std::any::type_name::<E::Model>().into(),
detail: None,
}
}
pub fn from_model_not_found<C: Into<Cow<'static, str>>>(model: C) -> Self {
pub fn from_entity_not_found_detail<E: sea_orm::EntityTrait, T: ToString>(detail: T) -> Self {
Self::ModelEntityNotFound {
entity: model.into(),
detail: None,
entity: std::any::type_name::<E::Model>().into(),
detail: Some(detail.to_string()),
}
}
}
@@ -314,4 +315,10 @@ impl From<http::method::InvalidMethod> for RecorderError {
}
}
impl From<async_graphql::Error> for RecorderError {
fn from(error: async_graphql::Error) -> Self {
seaography::SeaographyError::AsyncGraphQLError(error).into()
}
}
pub type RecorderResult<T> = Result<T, RecorderError>;

View File

@@ -1,38 +1,4 @@
use chrono::{DateTime, Utc};
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use quirks_path::Path;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::{OptionExt, whatever};
use crate::{
errors::app_error::{RecorderError, RecorderResult},
extract::defs::SUBTITLE_LANG,
};
lazy_static! {
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
vec![
FancyRegex::new(
r"(.*) - (\d{1,4}(?!\d|p)|\d{1,4}\.\d{1,2}(?!\d|p))(?:v\d{1,2})?(?: )?(?:END)?(.*)",
)
.unwrap(),
FancyRegex::new(
r"(.*)[\[\ E](\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)",
)
.unwrap(),
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
]
};
static ref GET_FANSUB_SPLIT_RE: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
static ref GET_FANSUB_FULL_MATCH_RE: Regex = Regex::new(r"^\d+$").unwrap();
static ref GET_SEASON_AND_TITLE_SUB_RE: Regex = Regex::new(r"([Ss]|Season )\d{1,3}").unwrap();
static ref GET_SEASON_AND_TITLE_FIND_RE: Regex =
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
}
#[derive(Clone, Debug)]
pub struct EpisodeEnclosureMeta {
@@ -41,293 +7,3 @@ pub struct EpisodeEnclosureMeta {
pub pub_date: Option<DateTime<Utc>>,
pub content_length: Option<i64>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeMediaMeta {
pub fansub: Option<String>,
pub title: String,
pub season: i32,
pub episode_index: i32,
pub extname: String,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeSubtitleMeta {
pub media: TorrentEpisodeMediaMeta,
pub lang: Option<String>,
}
fn get_fansub(group_and_title: &str) -> (Option<&str>, &str) {
let n = GET_FANSUB_SPLIT_RE
.split(group_and_title)
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
match (n.first(), n.get(1)) {
(None, None) => (None, ""),
(Some(n0), None) => (None, *n0),
(Some(n0), Some(n1)) => {
if GET_FANSUB_FULL_MATCH_RE.is_match(n1) {
(None, group_and_title)
} else {
(Some(*n0), *n1)
}
}
_ => unreachable!("vec contains n1 must contains n0"),
}
}
fn get_season_and_title(season_and_title: &str) -> (String, i32) {
let replaced_title = GET_SEASON_AND_TITLE_SUB_RE.replace_all(season_and_title, "");
let title = replaced_title.trim().to_string();
let season = GET_SEASON_AND_TITLE_FIND_RE
.captures(season_and_title)
.map(|m| {
m.get(2)
.unwrap_or_else(|| unreachable!("season regex should have 2 groups"))
.as_str()
.parse::<i32>()
.unwrap_or_else(|_| unreachable!("season should be a number"))
})
.unwrap_or(1);
(title, season)
}
fn get_subtitle_lang(media_name: &str) -> Option<&str> {
let media_name_lower = media_name.to_lowercase();
for (lang, lang_aliases) in SUBTITLE_LANG.iter() {
if lang_aliases
.iter()
.any(|alias| media_name_lower.contains(alias))
{
return Some(lang);
}
}
None
}
pub fn parse_episode_media_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> RecorderResult<TorrentEpisodeMediaMeta> {
let media_name = torrent_path
.file_name()
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {torrent_path}")
})?;
let mut match_obj = None;
for rule in TORRENT_EP_PARSE_RULES.iter() {
match_obj = if let Some(torrent_name) = torrent_name.as_ref() {
rule.captures(torrent_name)?
} else {
rule.captures(media_name)?
};
if match_obj.is_some() {
break;
}
}
if let Some(match_obj) = match_obj {
let group_season_and_title = match_obj
.get(1)
.whatever_context::<_, RecorderError>("should have 1 group")?
.as_str();
let (fansub, season_and_title) = get_fansub(group_season_and_title);
let (title, season) = if let Some(season) = season {
let (title, _) = get_season_and_title(season_and_title);
(title, season)
} else {
get_season_and_title(season_and_title)
};
let episode_index = match_obj
.get(2)
.whatever_context::<_, RecorderError>("should have 2 group")?
.as_str()
.parse::<i32>()
.unwrap_or(1);
let extname = torrent_path
.extension()
.map(|e| format!(".{e}"))
.unwrap_or_default();
Ok(TorrentEpisodeMediaMeta {
fansub: fansub.map(|s| s.to_string()),
title,
season,
episode_index,
extname,
})
} else {
whatever!(
"failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'",
torrent_path,
torrent_name
)
}
}
pub fn parse_episode_subtitle_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> RecorderResult<TorrentEpisodeSubtitleMeta> {
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
let media_name = torrent_path
.file_name()
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {torrent_path}")
})?;
let lang = get_subtitle_lang(media_name);
Ok(TorrentEpisodeSubtitleMeta {
media: media_meta,
lang: lang.map(|s| s.to_string()),
})
}
#[cfg(test)]
mod tests {
use quirks_path::Path;
use super::{
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta, parse_episode_media_meta_from_torrent,
parse_episode_subtitle_meta_from_torrent,
};
#[test]
fn test_lilith_raws_media() {
test_torrent_ep_parser(
r#"[Lilith-Raws] Boku no Kokoro no Yabai Yatsu - 01 [Baha][WEB-DL][1080p][AVC AAC][CHT][MP4].mp4"#,
r#"{"fansub": "Lilith-Raws", "title": "Boku no Kokoro no Yabai Yatsu", "season": 1, "episode_index": 1, "extname": ".mp4"}"#,
);
}
#[test]
fn test_sakurato_media() {
test_torrent_ep_parser(
r#"[Sakurato] Tonikaku Kawaii S2 [03][AVC-8bit 1080p AAC][CHS].mp4"#,
r#"{"fansub": "Sakurato", "title": "Tonikaku Kawaii", "season": 2, "episode_index": 3, "extname": ".mp4"}"#,
)
}
#[test]
fn test_lolihouse_media() {
test_torrent_ep_parser(
r#"[SweetSub&LoliHouse] Heavenly Delusion - 08 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"#,
r#"{"fansub": "SweetSub&LoliHouse", "title": "Heavenly Delusion", "season": 1, "episode_index": 8, "extname": ".mkv"}"#,
)
}
#[test]
fn test_sbsub_media() {
test_torrent_ep_parser(
r#"[SBSUB][CONAN][1082][V2][1080P][AVC_AAC][CHS_JP](C1E4E331).mp4"#,
r#"{"fansub": "SBSUB", "title": "CONAN", "season": 1, "episode_index": 1082, "extname": ".mp4"}"#,
)
}
#[test]
fn test_non_fansub_media() {
test_torrent_ep_parser(
r#"海盗战记 (2019) S04E11.mp4"#,
r#"{"title": "海盗战记 (2019)", "season": 4, "episode_index": 11, "extname": ".mp4"}"#,
)
}
#[test]
fn test_non_fansub_media_with_dirname() {
test_torrent_ep_parser(
r#"海盗战记/海盗战记 S01E01.mp4"#,
r#"{"title": "海盗战记", "season": 1, "episode_index": 1, "extname": ".mp4"}"#,
);
}
#[test]
fn test_non_fansub_tc_subtitle() {
test_torrent_ep_parser(
r#"海盗战记 S01E08.zh-tw.ass"#,
r#"{"media": { "title": "海盗战记", "season": 1, "episode_index": 8, "extname": ".ass" }, "lang": "zh-tw"}"#,
);
}
#[test]
fn test_non_fansub_sc_subtitle() {
test_torrent_ep_parser(
r#"海盗战记 S01E01.SC.srt"#,
r#"{ "media": { "title": "海盗战记", "season": 1, "episode_index": 1, "extname": ".srt" }, "lang": "zh" }"#,
)
}
#[test]
fn test_non_fansub_media_with_season_zero() {
test_torrent_ep_parser(
r#"水星的魔女(2022) S00E19.mp4"#,
r#"{"fansub": null,"title": "水星的魔女(2022)","season": 0,"episode_index": 19,"extname": ".mp4"}"#,
)
}
#[test]
fn test_shimian_fansub_media() {
test_torrent_ep_parser(
r#"【失眠搬运组】放学后失眠的你-Kimi wa Houkago Insomnia - 06 [bilibili - 1080p AVC1 CHS-JP].mp4"#,
r#"{"fansub": "失眠搬运组","title": "放学后失眠的你-Kimi wa Houkago Insomnia","season": 1,"episode_index": 6,"extname": ".mp4"}"#,
)
}
pub fn test_torrent_ep_parser(origin_name: &str, expected: &str) {
let extname = Path::new(origin_name)
.extension()
.map(|e| format!(".{e}"))
.unwrap_or_default()
.to_lowercase();
if extname == ".srt" || extname == ".ass" {
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
let found_raw =
parse_episode_subtitle_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {
if found_raw.is_ok() {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
} else {
println!(
"expected {} and found {:#?} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
found_raw
)
}
}
assert_eq!(expected, found);
} else {
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
let found_raw =
parse_episode_media_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {
if found_raw.is_ok() {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
} else {
println!(
"expected {} and found {:#?} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
found_raw
)
}
}
assert_eq!(expected, found);
}
}
}

View File

@@ -1,34 +0,0 @@
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use regex::Regex;
const LANG_ZH_TW: &str = "zh-tw";
const LANG_ZH: &str = "zh";
const LANG_EN: &str = "en";
const LANG_JP: &str = "jp";
lazy_static! {
pub static ref SEASON_REGEX: Regex =
Regex::new(r"(S\|[Ss]eason\s+)(\d+)").expect("Invalid regex");
pub static ref TORRENT_PRASE_RULE_REGS: Vec<FancyRegex> = vec![
FancyRegex::new(
r"(.*) - (\d{1,4}(?!\d|p)|\d{1,4}\.\d{1,2}(?!\d|p))(?:v\d{1,2})?(?: )?(?:END)?(.*)"
)
.unwrap(),
FancyRegex::new(
r"(.*)[\[\ E](\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)"
)
.unwrap(),
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
];
pub static ref SUBTITLE_LANG: Vec<(&'static str, Vec<&'static str>)> = {
vec![
(LANG_ZH_TW, vec!["tc", "cht", "", "zh-tw"]),
(LANG_ZH, vec!["sc", "chs", "", "zh", "zh-cn"]),
(LANG_EN, vec!["en", "eng", ""]),
(LANG_JP, vec!["jp", "jpn", ""]),
]
};
}

View File

@@ -227,10 +227,12 @@ impl MikanClient {
self.fork_with_userpass_credential(userpass_credential)
.await
} else {
Err(RecorderError::from_model_not_found_detail(
"credential",
format!("credential id {credential_id} not found"),
))
Err(RecorderError::from_entity_not_found_detail::<
credential_3rd::Entity,
_,
>(format!(
"credential id {credential_id} not found"
)))
}
}

View File

@@ -546,14 +546,12 @@ impl MikanBangumiSubscription {
#[cfg(test)]
#[allow(unused_variables)]
mod tests {
use std::sync::Arc;
use rstest::{fixture, rstest};
use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait};
use tracing::Level;
use crate::{
app::AppContextTrait,
errors::RecorderResult,
extract::mikan::{
MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr,
@@ -564,34 +562,11 @@ mod tests {
subscriptions::{self, SubscriptionTrait},
},
test_utils::{
app::{TestingAppContext, TestingAppContextPreset},
mikan::{MikanMockServer, build_testing_mikan_credential_form},
app::TestingPreset, mikan::build_testing_mikan_credential_form,
tracing::try_init_testing_tracing,
},
};
struct TestingResources {
pub app_ctx: Arc<dyn AppContextTrait>,
pub mikan_server: MikanMockServer,
}
async fn build_testing_app_context() -> RecorderResult<TestingResources> {
let mikan_server = MikanMockServer::new().await?;
let mikan_base_url = mikan_server.base_url().clone();
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
mikan_base_url: mikan_base_url.to_string(),
database_config: None,
})
.await?;
Ok(TestingResources {
app_ctx,
mikan_server,
})
}
#[fixture]
fn before_each() {
try_init_testing_tracing(Level::DEBUG);
@@ -600,10 +575,10 @@ mod tests {
#[rstest]
#[tokio::test]
async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources {
app_ctx,
mut mikan_server,
} = build_testing_app_context().await?;
let mut preset = TestingPreset::default().await?;
let app_ctx = preset.app_ctx.clone();
let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel();
@@ -662,10 +637,11 @@ mod tests {
#[rstest]
#[tokio::test]
async fn test_mikan_subscriber_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources {
app_ctx,
mut mikan_server,
} = build_testing_app_context().await?;
let mut preset = TestingPreset::default().await?;
let app_ctx = preset.app_ctx.clone();
let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel();
@@ -729,10 +705,11 @@ mod tests {
#[rstest]
#[tokio::test]
async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources {
app_ctx,
mut mikan_server,
} = build_testing_app_context().await?;
let mut preset = TestingPreset::default().await?;
let app_ctx = preset.app_ctx.clone();
let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel();

View File

@@ -26,8 +26,8 @@ use crate::{
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
MIKAN_YEAR_QUERY_KEY, MikanClient, build_mikan_bangumi_subscription_rss_url,
build_mikan_subscriber_subscription_rss_url,
MIKAN_UNKNOWN_FANSUB_ID, MIKAN_YEAR_QUERY_KEY, MikanClient,
build_mikan_bangumi_subscription_rss_url, build_mikan_subscriber_subscription_rss_url,
},
},
media::{
@@ -35,7 +35,7 @@ use crate::{
EncodeWebpOptions,
},
storage::StorageContentCategory,
task::{OptimizeImageTask, SystemTask},
task::OptimizeImageTask,
};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
@@ -564,16 +564,17 @@ pub fn extract_mikan_episode_meta_from_episode_homepage_html(
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
})?;
let fansub_name = html
.select(
let fansub_name = if mikan_fansub_id == MIKAN_UNKNOWN_FANSUB_ID {
MIKAN_UNKNOWN_FANSUB_ID.to_string()
} else {
html.select(
&Selector::parse(".bangumi-info a.magnet-link-wrap[href^='/Home/PublishGroup/']")
.unwrap(),
)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name"))
})?;
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))?
};
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
el.value()
@@ -685,6 +686,13 @@ pub fn extract_mikan_fansub_meta_from_bangumi_homepage_html(
html: &Html,
mikan_fansub_id: String,
) -> Option<MikanFansubMeta> {
if mikan_fansub_id == MIKAN_UNKNOWN_FANSUB_ID {
return Some(MikanFansubMeta {
mikan_fansub_id,
fansub: MIKAN_UNKNOWN_FANSUB_ID.to_string(),
});
}
html.select(
&Selector::parse(&format!(
"a.subgroup-name[data-anchor='#{mikan_fansub_id}']"
@@ -818,11 +826,14 @@ pub async fn scrape_mikan_poster_meta_from_image_url(
let webp_storage_path = storage_path.with_extension("webp");
if storage_service.exists(&webp_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: webp_storage_path.to_string(),
format_options: EncodeImageOptions::Webp(EncodeWebpOptions::default()),
}))
.add_system_task(
OptimizeImageTask::builder()
.source_path(storage_path.clone().to_string())
.target_path(webp_storage_path.to_string())
.format_options(EncodeImageOptions::Webp(EncodeWebpOptions::default()))
.build()
.into(),
)
.await?;
}
}
@@ -830,11 +841,14 @@ pub async fn scrape_mikan_poster_meta_from_image_url(
let avif_storage_path = storage_path.with_extension("avif");
if storage_service.exists(&avif_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: avif_storage_path.to_string(),
format_options: EncodeImageOptions::Avif(EncodeAvifOptions::default()),
}))
.add_system_task(
OptimizeImageTask::builder()
.source_path(storage_path.clone().to_string())
.target_path(avif_storage_path.to_string())
.format_options(EncodeImageOptions::Avif(EncodeAvifOptions::default()))
.build()
.into(),
)
.await?;
}
}
@@ -842,11 +856,14 @@ pub async fn scrape_mikan_poster_meta_from_image_url(
let jxl_storage_path = storage_path.with_extension("jxl");
if storage_service.exists(&jxl_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: jxl_storage_path.to_string(),
format_options: EncodeImageOptions::Jxl(EncodeJxlOptions::default()),
}))
.add_system_task(
OptimizeImageTask::builder()
.source_path(storage_path.clone().to_string())
.target_path(jxl_storage_path.to_string())
.format_options(EncodeImageOptions::Jxl(EncodeJxlOptions::default()))
.build()
.into(),
)
.await?;
}
}
@@ -1089,7 +1106,7 @@ mod test {
use super::*;
use crate::test_utils::{
app::{TestingAppContext, TestingAppContextPreset},
app::{TestingAppContext, TestingPreset},
crypto::build_testing_crypto_service,
database::build_testing_database_service,
mikan::{
@@ -1137,17 +1154,13 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_scrape_mikan_poster_meta_from_image_url(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = MikanMockServer::new().await?;
let mut preset = TestingPreset::default().await?;
let mikan_base_url = mikan_server.base_url().clone();
let app_ctx = preset.app_ctx.clone();
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
mikan_base_url: mikan_base_url.to_string(),
database_config: None,
})
.await?;
let mikan_base_url = preset.mikan_server.base_url().clone();
let resources_mock = mikan_server.mock_resources_with_doppel();
let resources_mock = preset.mikan_server.mock_resources_with_doppel();
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;

View File

@@ -1,5 +1,4 @@
pub mod bittorrent;
pub mod defs;
pub mod html;
pub mod http;
pub mod media;

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::bangumi};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::bangumi,
};
pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId);
@@ -8,7 +14,6 @@ pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<bangumi::BangumiType>();
seaography::register_entity!(builder, bangumi);
builder
register_entity_default_writable!(builder, bangumi, false)
}

View File

@@ -1,50 +1,28 @@
use std::sync::Arc;
use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef,
};
use async_graphql::dynamic::{Field, FieldFuture, FieldValue, Object, TypeRef};
use sea_orm::{EntityTrait, QueryFilter};
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use serde::{Deserialize, Serialize};
use util_derive::DynamicGraphql;
use crate::{
app::AppContextTrait,
auth::AuthUserInfo,
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::crypto::{
register_crypto_column_input_conversion_to_schema_context,
register_crypto_column_output_conversion_to_schema_context,
infra::{
crypto::{
register_crypto_column_input_conversion_to_schema_context,
register_crypto_column_output_conversion_to_schema_context,
},
custom::{generate_entity_filtered_mutation_field, register_entity_default_writable},
name::get_entity_custom_mutation_field_name,
},
},
models::credential_3rd,
};
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
struct Credential3rdCheckAvailableInput {
pub id: i32,
}
impl Credential3rdCheckAvailableInput {
fn input_type_name() -> &'static str {
"Credential3rdCheckAvailableInput"
}
fn arg_name() -> &'static str {
"filter"
}
fn generate_input_object() -> InputObject {
InputObject::new(Self::input_type_name())
.description("The input of the credential3rdCheckAvailable query")
.field(InputValue::new(
Credential3rdCheckAvailableInputFieldEnum::Id.as_str(),
TypeRef::named_nn(TypeRef::INT),
))
}
}
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
pub struct Credential3rdCheckAvailableInfo {
pub available: bool,
@@ -117,52 +95,43 @@ pub fn register_credential3rd_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.register_enumeration::<credential_3rd::Credential3rdType>();
seaography::register_entity!(builder, credential_3rd);
builder = register_entity_default_writable!(builder, credential_3rd, false);
builder.schema = builder
.schema
.register(Credential3rdCheckAvailableInput::generate_input_object());
builder.schema = builder
.schema
.register(Credential3rdCheckAvailableInfo::generate_output_object());
builder.queries.push(
Field::new(
"credential3rdCheckAvailable",
TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()),
move |ctx| {
FieldFuture::new(async move {
let auth_user_info = ctx.data::<AuthUserInfo>()?;
let input: Credential3rdCheckAvailableInput = ctx
.args
.get(Credential3rdCheckAvailableInput::arg_name())
.unwrap()
.deserialize()?;
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let builder_context = &builder.context;
{
let check_available_mutation_name = get_entity_custom_mutation_field_name::<
credential_3rd::Entity,
>(builder_context, "CheckAvailable");
let check_available_mutation =
generate_entity_filtered_mutation_field::<credential_3rd::Entity, _, _>(
builder_context,
check_available_mutation_name,
TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()),
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let credential_model = credential_3rd::Model::find_by_id_and_subscriber_id(
app_ctx.as_ref(),
input.id,
auth_user_info.subscriber_auth.subscriber_id,
)
.await?
.ok_or_else(|| RecorderError::Credential3rdError {
message: format!("credential = {} not found", input.id),
source: None.into(),
})?;
let credential_model = credential_3rd::Entity::find()
.filter(filters)
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<credential_3rd::Entity>()
})?;
let available = credential_model.check_available(app_ctx.as_ref()).await?;
Ok(Some(FieldValue::owned_any(
Credential3rdCheckAvailableInfo { available },
)))
})
},
)
.argument(InputValue::new(
Credential3rdCheckAvailableInput::arg_name(),
TypeRef::named_nn(Credential3rdCheckAvailableInput::input_type_name()),
)),
);
let available = credential_model.check_available(app_ctx.as_ref()).await?;
Ok(Some(FieldValue::owned_any(
Credential3rdCheckAvailableInfo { available },
)))
})
}),
);
builder.mutations.push(check_available_mutation);
}
builder
}

View File

@@ -0,0 +1,64 @@
use sea_orm::Iterable;
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::{
domains::{
subscriber_tasks::restrict_subscriber_tasks_for_entity,
subscribers::restrict_subscriber_for_entity,
system_tasks::restrict_system_tasks_for_entity,
},
infra::{custom::register_entity_default_writable, name::get_entity_and_column_name},
},
models::cron,
};
fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in cron::Column::iter() {
if matches!(
column,
cron::Column::SubscriberTaskCron
| cron::Column::SystemTaskCron
| cron::Column::CronExpr
| cron::Column::CronTimezone
| cron::Column::Enabled
| cron::Column::TimeoutMs
| cron::Column::MaxAttempts
) {
continue;
}
let entity_column_key = get_entity_and_column_name::<cron::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
for column in cron::Column::iter() {
if matches!(column, |cron::Column::CronExpr| cron::Column::CronTimezone
| cron::Column::Enabled
| cron::Column::TimeoutMs
| cron::Column::Priority
| cron::Column::MaxAttempts)
{
continue;
}
let entity_column_key = get_entity_and_column_name::<cron::Entity>(context, &column);
context.entity_input.update_skips.push(entity_column_key);
}
}
pub fn register_cron_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<cron::Entity>(context, &cron::Column::SubscriberId);
restrict_subscriber_tasks_for_entity::<cron::Entity>(
context,
&cron::Column::SubscriberTaskCron,
);
restrict_system_tasks_for_entity::<cron::Entity>(context, &cron::Column::SystemTaskCron);
skip_columns_for_entity_input(context);
}
pub fn register_cron_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<cron::CronStatus>();
builder = register_entity_default_writable!(builder, cron, true);
builder
}

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloaders};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::downloaders,
};
pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloaders::Entity>(
@@ -11,7 +17,7 @@ pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloaders::DownloaderCategory>();
seaography::register_entity!(builder, downloaders);
builder = register_entity_default_writable!(builder, downloaders, false);
builder
}

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloads};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::downloads,
};
pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId);
@@ -9,7 +15,7 @@ pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<downloads::DownloadMime>();
seaography::register_entity!(builder, downloads);
builder = register_entity_default_writable!(builder, downloads, false);
builder
}

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::episodes};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::episodes,
};
pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId);
@@ -8,7 +14,7 @@ pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<episodes::EpisodeType>();
seaography::register_entity!(builder, episodes);
builder = register_entity_default_writable!(builder, episodes, false);
builder
}

View File

@@ -3,11 +3,18 @@ use std::sync::Arc;
use async_graphql::dynamic::ResolverContext;
use sea_orm::Value as SeaValue;
use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult};
use uuid::Uuid;
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::util::{get_entity_column_key, get_entity_key},
infra::{
custom::register_entity_default_writable,
name::{
get_entity_and_column_name, get_entity_create_batch_mutation_field_name,
get_entity_create_one_mutation_field_name,
},
},
},
models::feeds,
};
@@ -15,29 +22,23 @@ use crate::{
pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId);
{
let entity_column_key =
get_entity_column_key::<feeds::Entity>(context, &feeds::Column::Token);
let entity_key = get_entity_key::<feeds::Entity>(context);
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
let entity_create_one_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
let entity_create_one_mutation_field_name = Arc::new(
get_entity_create_one_mutation_field_name::<feeds::Entity>(context),
);
let entity_create_batch_mutation_field_name =
Arc::new(get_entity_create_batch_mutation_field_name::<feeds::Entity>(context));
context.types.input_none_conversions.insert(
entity_column_key,
get_entity_and_column_name::<feeds::Entity>(context, &feeds::Column::Token),
Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name();
if field_name == entity_create_one_mutation_field_name.as_str()
|| field_name == entity_create_batch_mutation_field_name.as_str()
{
Ok(Some(SeaValue::String(Some(Box::new(nanoid::nanoid!())))))
Ok(Some(SeaValue::String(Some(Box::new(
Uuid::now_v7().to_string(),
)))))
} else {
Ok(None)
}
@@ -50,7 +51,8 @@ pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<feeds::FeedType>();
builder.register_enumeration::<feeds::FeedSource>();
seaography::register_entity!(builder, feeds);
builder = register_entity_default_writable!(builder, feeds, false);
builder
}

View File

@@ -1,6 +1,7 @@
pub mod credential_3rd;
pub mod bangumi;
pub mod cron;
pub mod downloaders;
pub mod downloads;
pub mod episodes;
@@ -10,3 +11,4 @@ pub mod subscribers;
pub mod subscription_bangumi;
pub mod subscription_episode;
pub mod subscriptions;
pub mod system_tasks;

View File

@@ -1,51 +1,143 @@
use std::{ops::Deref, sync::Arc};
use async_graphql::dynamic::{FieldValue, TypeRef};
use async_graphql::dynamic::{FieldValue, Scalar, TypeRef};
use convert_case::Case;
use sea_orm::{
ColumnTrait, ConnectionTrait, EntityTrait, QueryFilter, QuerySelect, QueryTrait, prelude::Expr,
sea_query::Query,
ActiveModelBehavior, ColumnTrait, ConnectionTrait, EntityTrait, Iterable, QueryFilter,
QuerySelect, QueryTrait, prelude::Expr, sea_query::Query,
};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityDeleteMutationBuilder, EntityObjectBuilder,
EntityQueryFieldBuilder, get_filter_conditions,
Builder as SeaographyBuilder, BuilderContext, SeaographyError, prepare_active_model,
};
use ts_rs::TS;
use crate::{
auth::AuthUserInfo,
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::{
custom::generate_entity_filter_mutation_field,
json::{convert_jsonb_output_case_for_entity, restrict_jsonb_filter_input_for_entity},
custom::{
generate_entity_create_one_mutation_field,
generate_entity_default_basic_entity_object,
generate_entity_default_insert_input_object, generate_entity_delete_mutation_field,
generate_entity_filtered_mutation_field, register_entity_default_readonly,
},
json::{convert_jsonb_output_for_entity, restrict_jsonb_filter_input_for_entity},
name::{
get_entity_and_column_name, get_entity_basic_type_name,
get_entity_custom_mutation_field_name,
},
},
},
migrations::defs::{ApalisJobs, ApalisSchema},
models::subscriber_tasks,
task::{ApalisJobs, ApalisSchema},
task::SubscriberTaskTrait,
};
pub fn register_subscriber_tasks_entity_mutations(
fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in subscriber_tasks::Column::iter() {
if matches!(
column,
subscriber_tasks::Column::Job | subscriber_tasks::Column::SubscriberId
) {
continue;
}
let entity_column_key =
get_entity_and_column_name::<subscriber_tasks::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
}
pub fn restrict_subscriber_tasks_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_and_column = get_entity_and_column_name::<T>(context, column);
restrict_jsonb_filter_input_for_entity::<T>(context, column);
convert_jsonb_output_for_entity::<T>(context, column, Some(Case::Camel));
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.input_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(subscriber_tasks::SubscriberTask::ident().into()),
);
context.types.output_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(subscriber_tasks::SubscriberTask::ident().into()),
);
context.types.input_conversions.insert(
entity_column_name.clone(),
Box::new(move |resolve_context, value_accessor| {
let task: subscriber_tasks::SubscriberTaskInput = value_accessor.deserialize()?;
let subscriber_id = resolve_context
.data::<AuthUserInfo>()?
.subscriber_auth
.subscriber_id;
let task = subscriber_tasks::SubscriberTask::from_input(task, subscriber_id);
let json_value = serde_json::to_value(task).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_name}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
context.entity_input.update_skips.push(entity_and_column);
}
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::SubscriberId,
);
restrict_subscriber_tasks_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
);
skip_columns_for_entity_input(context);
}
pub fn register_subscriber_tasks_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
let context = builder.context;
builder.schema = builder.schema.register(
Scalar::new(subscriber_tasks::SubscriberTask::ident())
.description(subscriber_tasks::SubscriberTask::decl()),
);
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>();
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
builder = register_entity_default_readonly!(builder, subscriber_tasks);
let builder_context = builder.context;
{
let entitity_delete_mutation_builder = EntityDeleteMutationBuilder { context };
let delete_mutation = generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>(
context,
entitity_delete_mutation_builder.type_name::<subscriber_tasks::Entity>(),
TypeRef::named_nn(TypeRef::INT),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>(
resolver_ctx,
context,
filters,
);
builder
.outputs
.push(generate_entity_default_basic_entity_object::<
subscriber_tasks::Entity,
>(builder_context));
}
{
let delete_mutation = generate_entity_delete_mutation_field::<subscriber_tasks::Entity>(
builder_context,
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let select_subquery = subscriber_tasks::Entity::find()
.select_only()
.column(subscriber_tasks::Column::Id)
.filter(filters_condition);
.filter(filters);
let delete_query = Query::delete()
.from_table((ApalisSchema::Schema, ApalisJobs::Table))
@@ -59,41 +151,37 @@ pub fn register_subscriber_tasks_entity_mutations(
let result = db.execute(delete_statement).await?;
Ok::<_, RecorderError>(Some(FieldValue::value(result.rows_affected() as i32)))
Ok::<_, RecorderError>(result.rows_affected())
})
}),
);
builder.mutations.push(delete_mutation);
}
{
let entity_object_builder = EntityObjectBuilder { context };
let entity_query_field = EntityQueryFieldBuilder { context };
let entity_retry_one_mutation_name = format!(
"{}RetryOne",
entity_query_field.type_name::<subscriber_tasks::Entity>()
);
let entity_retry_one_mutation_name = get_entity_custom_mutation_field_name::<
subscriber_tasks::Entity,
>(builder_context, "RetryOne");
let retry_one_mutation =
generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>(
context,
generate_entity_filtered_mutation_field::<subscriber_tasks::Entity, _, _>(
builder_context,
entity_retry_one_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>(
resolver_ctx,
context,
filters,
);
TypeRef::named_nn(get_entity_basic_type_name::<subscriber_tasks::Entity>(
builder_context,
)),
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let job_id = subscriber_tasks::Entity::find()
.filter(filters_condition)
.filter(filters)
.select_only()
.column(subscriber_tasks::Column::Id)
.into_tuple::<String>()
.one(db)
.await?
.ok_or_else(|| RecorderError::from_model_not_found("SubscriberTask"))?;
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?;
let task = app_ctx.task();
task.retry_subscriber_task(job_id.clone()).await?;
@@ -102,7 +190,9 @@ pub fn register_subscriber_tasks_entity_mutations(
.filter(subscriber_tasks::Column::Id.eq(&job_id))
.one(db)
.await?
.ok_or_else(|| RecorderError::from_model_not_found("SubscriberTask"))?;
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?;
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
})
@@ -110,38 +200,54 @@ pub fn register_subscriber_tasks_entity_mutations(
);
builder.mutations.push(retry_one_mutation);
}
{
builder
.inputs
.push(generate_entity_default_insert_input_object::<
subscriber_tasks::Entity,
>(builder_context));
let create_one_mutation =
generate_entity_create_one_mutation_field::<subscriber_tasks::Entity>(
builder_context,
Arc::new(move |resolver_ctx, app_ctx, input_object| {
Box::pin(async move {
let active_model: Result<subscriber_tasks::ActiveModel, _> =
prepare_active_model(builder_context, &input_object, resolver_ctx);
builder
}
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::SubscriberId,
);
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
);
convert_jsonb_output_case_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
);
}
pub fn register_subscriber_tasks_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.register_entity::<subscriber_tasks::Entity>(
<subscriber_tasks::RelatedEntity as sea_orm::Iterable>::iter()
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(subscriber_tasks::Entity, tokio::spawn);
builder =
builder.register_entity_dataloader_one_to_many(subscriber_tasks::Entity, tokio::spawn);
builder = register_subscriber_tasks_entity_mutations(builder);
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>();
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
let task_service = app_ctx.task();
let active_model = active_model?;
let db = app_ctx.db();
let active_model = active_model.before_save(db, true).await?;
let task = active_model.job.unwrap();
let subscriber_id = active_model.subscriber_id.unwrap();
if task.get_subscriber_id() != subscriber_id {
Err(async_graphql::Error::new(
"subscriber_id does not match with job.subscriber_id",
))?;
}
let task_id = task_service.add_subscriber_task(task).await?.to_string();
let db = app_ctx.db();
let task = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(&task_id))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?;
Ok::<_, RecorderError>(task)
})
}),
);
builder.mutations.push(create_one_mutation);
}
builder
}

View File

@@ -7,12 +7,22 @@ use sea_orm::{ColumnTrait, Condition, EntityTrait, Iterable, Value as SeaValue};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, FilterInfo,
FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper,
FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult, SeaographyError,
FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult,
};
use crate::{
auth::{AuthError, AuthUserInfo},
graphql::infra::util::{get_column_key, get_entity_column_key, get_entity_key},
graphql::infra::{
custom::register_entity_default_readonly,
name::{
get_column_name, get_entity_and_column_name,
get_entity_create_batch_mutation_data_field_name,
get_entity_create_batch_mutation_field_name,
get_entity_create_one_mutation_data_field_name,
get_entity_create_one_mutation_field_name, get_entity_name,
get_entity_update_mutation_data_field_name, get_entity_update_mutation_field_name,
},
},
models::subscribers,
};
@@ -82,32 +92,19 @@ where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
let column_key = get_column_key::<T>(context, column);
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
&entity_key,
&column_key,
));
let entity_create_one_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let column_name = Arc::new(get_column_name::<T>(context, column));
let entity_create_one_mutation_field_name =
Arc::new(get_entity_create_one_mutation_field_name::<T>(context));
let entity_create_one_mutation_data_field_name =
Arc::new(context.entity_create_one_mutation.data_field.clone());
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
Arc::new(get_entity_create_one_mutation_data_field_name(context).to_string());
let entity_create_batch_mutation_field_name =
Arc::new(get_entity_create_batch_mutation_field_name::<T>(context));
let entity_create_batch_mutation_data_field_name =
Arc::new(context.entity_create_batch_mutation.data_field.clone());
let entity_update_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_update_mutation.mutation_suffix
));
Arc::new(get_entity_create_batch_mutation_data_field_name(context).to_string());
let entity_update_mutation_field_name =
Arc::new(get_entity_update_mutation_field_name::<T>(context));
let entity_update_mutation_data_field_name =
Arc::new(context.entity_update_mutation.data_field.clone());
Arc::new(get_entity_update_mutation_data_field_name(context).to_string());
Box::new(move |context: &ResolverContext| -> GuardAction {
match context.ctx.data::<AuthUserInfo>() {
@@ -222,11 +219,10 @@ where
if let Some(value) = filter.get("eq") {
let value: i32 = value.i64()?.try_into()?;
if value != subscriber_id {
return Err(SeaographyError::AsyncGraphQLError(
async_graphql::Error::new(
"subscriber_id and auth_info does not match",
),
));
return Err(async_graphql::Error::new(
"subscriber_id and auth_info does not match",
)
.into());
}
}
}
@@ -253,17 +249,10 @@ where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
let entity_create_one_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
let entity_create_one_mutation_field_name =
Arc::new(get_entity_create_one_mutation_field_name::<T>(context));
let entity_create_batch_mutation_field_name =
Arc::new(get_entity_create_batch_mutation_field_name::<T>(context));
Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name();
@@ -289,40 +278,39 @@ where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let entity_column_key = get_entity_column_key::<T>(context, column);
let entity_and_column = get_entity_and_column_name::<T>(context, column);
context.guards.entity_guards.insert(
entity_key.clone(),
get_entity_name::<T>(context),
guard_entity_with_subscriber_id::<T>(context, column),
);
context.guards.field_guards.insert(
entity_column_key.clone(),
get_entity_and_column_name::<T>(context, column),
guard_field_with_subscriber_id::<T>(context, column),
);
context.filter_types.overwrites.insert(
entity_column_key.clone(),
get_entity_and_column_name::<T>(context, column),
Some(FilterType::Custom(
SUBSCRIBER_ID_FILTER_INFO.type_name.clone(),
)),
);
context.filter_types.condition_functions.insert(
entity_column_key.clone(),
entity_and_column.clone(),
generate_subscriber_id_filter_condition::<T>(context, column),
);
context.types.input_none_conversions.insert(
entity_column_key.clone(),
entity_and_column.clone(),
generate_default_subscriber_id_input_conversion::<T>(context, column),
);
context.entity_input.update_skips.push(entity_column_key);
context.entity_input.update_skips.push(entity_and_column);
}
pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscribers::Entity>(context, &subscribers::Column::Id);
for column in subscribers::Column::iter() {
if !matches!(column, subscribers::Column::Id) {
let key = get_entity_column_key::<subscribers::Entity>(context, &column);
let key = get_entity_and_column_name::<subscribers::Entity>(context, &column);
context.filter_types.overwrites.insert(key, None);
}
}
@@ -330,24 +318,14 @@ pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
{
let filter_types_map_helper = FilterTypesMapHelper {
context: builder.context,
};
builder.schema = builder
.schema
.register(filter_types_map_helper.generate_filter_input(&SUBSCRIBER_ID_FILTER_INFO));
.register(FilterTypesMapHelper::generate_filter_input(
&SUBSCRIBER_ID_FILTER_INFO,
));
}
{
builder.register_entity::<subscribers::Entity>(
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
}
builder = register_entity_default_readonly!(builder, subscribers);
builder
}

View File

@@ -1,7 +1,11 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_bangumi,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::subscription_bangumi,
};
pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) {
@@ -14,7 +18,7 @@ pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderCont
pub fn register_subscription_bangumi_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_bangumi);
builder = register_entity_default_writable!(builder, subscription_bangumi, false);
builder
}

View File

@@ -1,7 +1,11 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_episode,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::subscription_episode,
};
pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) {
@@ -14,7 +18,7 @@ pub fn register_subscription_episode_to_schema_context(context: &mut BuilderCont
pub fn register_subscription_episode_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_episode);
builder = register_entity_default_writable!(builder, subscription_episode, false);
builder
}

View File

@@ -1,23 +1,11 @@
use std::sync::Arc;
use async_graphql::dynamic::{FieldValue, TypeRef};
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityObjectBuilder, EntityQueryFieldBuilder,
get_filter_conditions,
};
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::generate_entity_filter_mutation_field,
infra::custom::register_entity_default_writable,
},
models::{
subscriber_tasks,
subscriptions::{self, SubscriptionTrait},
},
task::SubscriberTask,
models::subscriptions,
};
pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) {
@@ -31,172 +19,6 @@ pub fn register_subscriptions_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
seaography::register_entity!(builder, subscriptions);
let context = builder.context;
let entity_object_builder = EntityObjectBuilder { context };
let entity_query_field = EntityQueryFieldBuilder { context };
{
let sync_one_feeds_incremental_mutation_name = format!(
"{}SyncOneFeedsIncremental",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_feeds_incremental_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_incremental_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::from_model_not_found("Subscription"))?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsIncremental(
subscription.into(),
),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::from_model_not_found("SubscriberTask"))?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_feeds_incremental_mutation);
}
{
let sync_one_feeds_full_mutation_name = format!(
"{}SyncOneFeedsFull",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_feeds_full_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_full_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::from_model_not_found("Subscription"))?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsFull(subscription.into()),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::from_model_not_found("SubscriberTask"))?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_feeds_full_mutation);
}
{
let sync_one_sources_mutation_name = format!(
"{}SyncOneSources",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_sources_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_sources_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::from_model_not_found("Subscription"))?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionSources(subscription.into()),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::from_model_not_found("SubscriberTask"))?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_sources_mutation);
}
builder = register_entity_default_writable!(builder, subscriptions, false);
builder
}

View File

@@ -0,0 +1,258 @@
use std::{ops::Deref, sync::Arc};
use async_graphql::dynamic::{FieldValue, Scalar, TypeRef};
use convert_case::Case;
use sea_orm::{
ActiveModelBehavior, ColumnTrait, ConnectionTrait, EntityTrait, Iterable, QueryFilter,
QuerySelect, QueryTrait, prelude::Expr, sea_query::Query,
};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, GuardAction, SeaographyError,
prepare_active_model,
};
use ts_rs::TS;
use crate::{
auth::AuthUserInfo,
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::{
custom::{
generate_entity_create_one_mutation_field,
generate_entity_default_basic_entity_object,
generate_entity_default_insert_input_object, generate_entity_delete_mutation_field,
generate_entity_filtered_mutation_field, register_entity_default_readonly,
},
json::{convert_jsonb_output_for_entity, restrict_jsonb_filter_input_for_entity},
name::{
get_entity_and_column_name, get_entity_basic_type_name,
get_entity_custom_mutation_field_name,
},
},
},
migrations::defs::{ApalisJobs, ApalisSchema},
models::system_tasks,
task::SystemTaskTrait,
};
fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in system_tasks::Column::iter() {
if matches!(
column,
system_tasks::Column::Job | system_tasks::Column::SubscriberId
) {
continue;
}
let entity_column_key =
get_entity_and_column_name::<system_tasks::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
}
pub fn restrict_system_tasks_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_and_column = get_entity_and_column_name::<T>(context, column);
restrict_jsonb_filter_input_for_entity::<T>(context, column);
convert_jsonb_output_for_entity::<T>(context, column, Some(Case::Camel));
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.guards.field_guards.insert(
entity_column_name.clone(),
Box::new(|_resolver_ctx| {
GuardAction::Block(Some(
"SystemTask can not be created by subscribers now".to_string(),
))
}),
);
context.types.input_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(system_tasks::SystemTask::ident().into()),
);
context.types.output_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(system_tasks::SystemTask::ident().into()),
);
context.types.input_conversions.insert(
entity_column_name.clone(),
Box::new(move |resolve_context, value_accessor| {
let task: system_tasks::SystemTaskInput = value_accessor.deserialize()?;
let subscriber_id = resolve_context
.data::<AuthUserInfo>()?
.subscriber_auth
.subscriber_id;
let task = system_tasks::SystemTask::from_input(task, Some(subscriber_id));
let json_value = serde_json::to_value(task).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_name}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
context.entity_input.update_skips.push(entity_and_column);
}
pub fn register_system_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<system_tasks::Entity>(
context,
&system_tasks::Column::SubscriberId,
);
restrict_system_tasks_for_entity::<system_tasks::Entity>(context, &system_tasks::Column::Job);
skip_columns_for_entity_input(context);
}
pub fn register_system_tasks_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.schema = builder.schema.register(
Scalar::new(system_tasks::SystemTask::ident())
.description(system_tasks::SystemTask::decl()),
);
builder.register_enumeration::<system_tasks::SystemTaskType>();
builder.register_enumeration::<system_tasks::SystemTaskStatus>();
builder = register_entity_default_readonly!(builder, system_tasks);
let builder_context = builder.context;
{
builder
.outputs
.push(generate_entity_default_basic_entity_object::<
system_tasks::Entity,
>(builder_context));
}
{
let delete_mutation = generate_entity_delete_mutation_field::<system_tasks::Entity>(
builder_context,
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let select_subquery = system_tasks::Entity::find()
.select_only()
.column(system_tasks::Column::Id)
.filter(filters);
let delete_query = Query::delete()
.from_table((ApalisSchema::Schema, ApalisJobs::Table))
.and_where(
Expr::col(ApalisJobs::Id).in_subquery(select_subquery.into_query()),
)
.to_owned();
let db_backend = db.deref().get_database_backend();
let delete_statement = db_backend.build(&delete_query);
let result = db.execute(delete_statement).await?;
Ok::<_, RecorderError>(result.rows_affected())
})
}),
);
builder.mutations.push(delete_mutation);
}
{
let entity_retry_one_mutation_name = get_entity_custom_mutation_field_name::<
system_tasks::Entity,
>(builder_context, "RetryOne");
let retry_one_mutation =
generate_entity_filtered_mutation_field::<system_tasks::Entity, _, _>(
builder_context,
entity_retry_one_mutation_name,
TypeRef::named_nn(get_entity_basic_type_name::<system_tasks::Entity>(
builder_context,
)),
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let job_id = system_tasks::Entity::find()
.filter(filters)
.select_only()
.column(system_tasks::Column::Id)
.into_tuple::<String>()
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
let task = app_ctx.task();
task.retry_subscriber_task(job_id.clone()).await?;
let task_model = system_tasks::Entity::find()
.filter(system_tasks::Column::Id.eq(&job_id))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(retry_one_mutation);
}
{
builder
.inputs
.push(generate_entity_default_insert_input_object::<
system_tasks::Entity,
>(builder_context));
let create_one_mutation = generate_entity_create_one_mutation_field::<system_tasks::Entity>(
builder_context,
Arc::new(move |resolver_ctx, app_ctx, input_object| {
Box::pin(async move {
let active_model: Result<system_tasks::ActiveModel, _> =
prepare_active_model(builder_context, &input_object, resolver_ctx);
let task_service = app_ctx.task();
let active_model = active_model?;
let db = app_ctx.db();
let active_model = active_model.before_save(db, true).await?;
let task = active_model.job.unwrap();
let subscriber_id = active_model.subscriber_id.unwrap();
if task.get_subscriber_id() != subscriber_id {
Err(async_graphql::Error::new(
"subscriber_id does not match with job.subscriber_id",
))?;
}
let task_id = task_service.add_system_task(task).await?.to_string();
let db = app_ctx.db();
let task = system_tasks::Entity::find()
.filter(system_tasks::Column::Id.eq(&task_id))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
Ok::<_, RecorderError>(task)
})
}),
);
builder.mutations.push(create_one_mutation);
}
builder
}

View File

@@ -4,10 +4,7 @@ use async_graphql::dynamic::{ResolverContext, ValueAccessor};
use sea_orm::{EntityTrait, Value as SeaValue};
use seaography::{BuilderContext, SeaResult};
use crate::{
app::AppContextTrait,
graphql::infra::util::{get_column_key, get_entity_key},
};
use crate::{app::AppContextTrait, graphql::infra::name::get_entity_and_column_name};
pub fn register_crypto_column_input_conversion_to_schema_context<T>(
context: &mut BuilderContext,
@@ -17,13 +14,8 @@ pub fn register_crypto_column_input_conversion_to_schema_context<T>(
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.input_conversions.insert(
format!("{entity_name}.{column_name}"),
get_entity_and_column_name::<T>(context, column),
Box::new(
move |_resolve_context: &ResolverContext<'_>,
value: &ValueAccessor|
@@ -44,13 +36,8 @@ pub fn register_crypto_column_output_conversion_to_schema_context<T>(
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.output_conversions.insert(
format!("{entity_name}.{column_name}"),
get_entity_and_column_name::<T>(context, column),
Box::new(
move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> {
if let SeaValue::String(s) = value {

View File

@@ -1,53 +1,140 @@
use std::{pin::Pin, sync::Arc};
use std::{iter::FusedIterator, pin::Pin, sync::Arc};
use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputValue, ResolverContext, TypeRef, ValueAccessor,
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, ObjectAccessor,
ResolverContext, TypeRef,
};
use sea_orm::{ActiveModelTrait, Condition, EntityTrait, IntoActiveModel};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityCreateBatchMutationBuilder,
EntityCreateOneMutationBuilder, EntityDeleteMutationBuilder, EntityInputBuilder,
EntityObjectBuilder, EntityUpdateMutationBuilder, GuardAction, RelationBuilder,
get_filter_conditions,
};
use sea_orm::EntityTrait;
use seaography::{BuilderContext, EntityObjectBuilder, FilterInputBuilder, GuardAction};
use crate::{app::AppContextTrait, errors::RecorderResult};
use crate::{
app::AppContextTrait,
errors::RecorderResult,
graphql::infra::name::{
get_entity_filter_input_type_name, get_entity_name,
get_entity_renormalized_filter_field_name,
},
};
pub type FilterMutationFn = Arc<
dyn for<'a> Fn(
&ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Option<ValueAccessor<'_>>,
Condition,
) -> Pin<
Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>,
> + Send
+ Sync,
>;
pub fn generate_entity_filter_mutation_field<T, N, R>(
pub type CreateOneMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
ObjectAccessor<'a>,
) -> Pin<Box<dyn Future<Output = RecorderResult<M>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type CreateBatchMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Vec<ObjectAccessor<'a>>,
) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type UpdateMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Condition,
ObjectAccessor<'a>,
) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type DeleteMutationFn = Arc<
dyn for<'a> Fn(
&ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Condition,
) -> Pin<Box<dyn Future<Output = RecorderResult<u64>> + Send + 'a>>
+ Send
+ Sync,
>;
pub fn generate_entity_default_insert_input_object<T>(context: &BuilderContext) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
EntityInputBuilder::insert_input_object::<T>(context)
}
pub fn generate_entity_default_update_input_object<T>(context: &BuilderContext) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
EntityInputBuilder::update_input_object::<T>(context)
}
pub fn generate_entity_default_basic_entity_object<T>(context: &'static BuilderContext) -> Object
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_object_builder = EntityObjectBuilder { context };
entity_object_builder.basic_to_object::<T>()
}
pub fn generate_entity_input_object<T>(
context: &'static BuilderContext,
is_insert: bool,
) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
if is_insert {
EntityInputBuilder::insert_input_object::<T>(context)
} else {
EntityInputBuilder::update_input_object::<T>(context)
}
}
pub fn generate_entity_filtered_mutation_field<E, N, R>(
builder_context: &'static BuilderContext,
field_name: N,
type_ref: R,
mutation_fn: FilterMutationFn,
) -> Field
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
N: Into<String>,
R: Into<TypeRef>,
{
let entity_filter_input_builder = FilterInputBuilder {
context: builder_context,
};
let entity_object_builder = EntityObjectBuilder {
context: builder_context,
};
let object_name: String = entity_object_builder.type_name::<T>();
let context = builder_context;
let object_name: String = get_entity_name::<E>(builder_context);
let guard = builder_context.guards.entity_guards.get(&object_name);
Field::new(field_name, type_ref, move |ctx| {
Field::new(field_name, type_ref, move |resolve_context| {
let mutation_fn = mutation_fn.clone();
FieldFuture::new(async move {
let guard_flag = if let Some(guard) = guard {
(*guard)(&ctx)
(*guard)(&resolve_context)
} else {
GuardAction::Allow
};
@@ -58,19 +145,297 @@ where
));
}
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let filters = resolve_context
.args
.get(get_entity_renormalized_filter_field_name());
let filters = ctx.args.get(&context.entity_delete_mutation.filter_field);
let filters = get_filter_conditions::<E>(&resolve_context, builder_context, filters);
let result = mutation_fn(&ctx, app_ctx.clone(), filters)
.await
.map_err(async_graphql::Error::new_with_source)?;
let app_ctx = resolve_context.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(&resolve_context, app_ctx.clone(), filters).await?;
Ok(result)
})
})
.argument(InputValue::new(
&context.entity_delete_mutation.filter_field,
TypeRef::named(entity_filter_input_builder.type_name(&object_name)),
get_entity_renormalized_filter_field_name(),
TypeRef::named(get_entity_filter_input_type_name::<E>(builder_context)),
))
}
pub fn generate_entity_create_one_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: CreateOneMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_create_one_mutation_builder = EntityCreateOneMutationBuilder {
context: builder_context,
};
entity_create_one_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, input_object| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(resolver_ctx, app_ctx.clone(), input_object).await?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_create_one_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_create_one_mutation_builder = EntityCreateOneMutationBuilder {
context: builder_context,
};
entity_create_one_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_create_batch_mutation_field<E, ID>(
builder_context: &'static BuilderContext,
mutation_fn: CreateBatchMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_create_batch_mutation_builder = EntityCreateBatchMutationBuilder {
context: builder_context,
};
entity_create_batch_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, input_objects| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(resolver_ctx, app_ctx.clone(), input_objects).await?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_create_batch_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_create_batch_mutation_builder = EntityCreateBatchMutationBuilder {
context: builder_context,
};
entity_create_batch_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_update_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: UpdateMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_update_mutation_builder = EntityUpdateMutationBuilder {
context: builder_context,
};
entity_update_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, filters, input_object| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(
resolver_ctx,
app_ctx.clone(),
get_filter_conditions::<E>(resolver_ctx, builder_context, filters),
input_object,
)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_update_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_update_mutation_builder = EntityUpdateMutationBuilder {
context: builder_context,
};
entity_update_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_delete_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: DeleteMutationFn,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_delete_mutation_builder = EntityDeleteMutationBuilder {
context: builder_context,
};
entity_delete_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, filters| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(
resolver_ctx,
app_ctx.clone(),
get_filter_conditions::<E>(resolver_ctx, builder_context, filters),
)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_delete_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_delete_mutation_builder = EntityDeleteMutationBuilder {
context: builder_context,
};
entity_delete_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn register_entity_default_mutations<E, A>(
mut builder: SeaographyBuilder,
active_model_hooks: bool,
) -> SeaographyBuilder
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let builder_context = builder.context;
builder
.outputs
.push(generate_entity_default_basic_entity_object::<E>(
builder_context,
));
builder.inputs.extend([
generate_entity_default_insert_input_object::<E>(builder_context),
generate_entity_default_update_input_object::<E>(builder_context),
]);
builder.mutations.extend([
generate_entity_default_create_one_mutation_field::<E, A>(
builder_context,
active_model_hooks,
),
generate_entity_default_create_batch_mutation_field::<E, A>(
builder_context,
active_model_hooks,
),
generate_entity_default_update_mutation_field::<E, A>(builder_context, active_model_hooks),
generate_entity_default_delete_mutation_field::<E, A>(builder_context, active_model_hooks),
]);
builder
}
pub(crate) fn register_entity_default_readonly_impl<T, RE, I>(
mut builder: SeaographyBuilder,
entity: T,
) -> SeaographyBuilder
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
RE: sea_orm::Iterable<Iterator = I> + RelationBuilder,
I: Iterator<Item = RE> + Clone + DoubleEndedIterator + ExactSizeIterator + FusedIterator,
{
builder.register_entity::<T>(
<RE as sea_orm::Iterable>::iter()
.map(|rel| RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(entity, tokio::spawn);
builder
}
pub(crate) fn register_entity_default_writable_impl<T, RE, A, I>(
mut builder: SeaographyBuilder,
entity: T,
active_model_hooks: bool,
) -> SeaographyBuilder
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = T> + sea_orm::ActiveModelBehavior + std::marker::Send,
RE: sea_orm::Iterable<Iterator = I> + RelationBuilder,
I: Iterator<Item = RE> + Clone + DoubleEndedIterator + ExactSizeIterator + FusedIterator,
{
builder = register_entity_default_readonly_impl::<T, RE, I>(builder, entity);
builder = register_entity_default_mutations::<T, A>(builder, active_model_hooks);
builder
}
macro_rules! register_entity_default_readonly {
($builder:expr, $module_path:ident) => {
$crate::graphql::infra::custom::register_entity_default_readonly_impl::<
$module_path::Entity,
$module_path::RelatedEntity,
_,
>($builder, $module_path::Entity)
};
}
macro_rules! register_entity_default_writable {
($builder:expr, $module_path:ident, $active_model_hooks:expr) => {
$crate::graphql::infra::custom::register_entity_default_writable_impl::<
$module_path::Entity,
$module_path::RelatedEntity,
$module_path::ActiveModel,
_,
>($builder, $module_path::Entity, $active_model_hooks)
};
}
pub(crate) use register_entity_default_readonly;
pub(crate) use register_entity_default_writable;

View File

@@ -17,7 +17,7 @@ use serde::{Serialize, de::DeserializeOwned};
use serde_json::Value as JsonValue;
use crate::{
errors::RecorderResult, graphql::infra::util::get_entity_column_key,
errors::RecorderResult, graphql::infra::name::get_entity_and_column_name,
utils::json::convert_json_keys,
};
@@ -911,18 +911,15 @@ where
Box::new(
move |_resolve_context: &ResolverContext<'_>, condition, filter| {
if let Some(filter) = filter {
let filter_value = to_value(filter.as_index_map()).map_err(|e| {
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e))
})?;
let filter_value =
to_value(filter.as_index_map()).map_err(GraphqlError::new_with_source)?;
let filter_json: JsonValue = filter_value.into_json().map_err(|e| {
SeaographyError::AsyncGraphQLError(GraphqlError::new(format!("{e:?}")))
})?;
let filter_json: JsonValue = filter_value
.into_json()
.map_err(GraphqlError::new_with_source)?;
let cond_where = prepare_jsonb_filter_input(&Expr::col(column), filter_json)
.map_err(|e| {
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e))
})?;
.map_err(GraphqlError::new_with_source)?;
let condition = condition.add(cond_where);
Ok(condition)
@@ -946,65 +943,76 @@ where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_column_key = get_entity_column_key::<T>(context, column);
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.filter_types.overwrites.insert(
entity_column_key.clone(),
get_entity_and_column_name::<T>(context, column),
Some(FilterType::Custom(JSONB_FILTER_NAME.to_string())),
);
context.filter_types.condition_functions.insert(
entity_column_name.clone(),
generate_jsonb_filter_condition_function::<T>(context, column),
);
}
pub fn validate_jsonb_input_for_entity<T, S>(context: &mut BuilderContext, column: &T::Column)
where
pub fn try_convert_jsonb_input_for_entity<T, S>(
context: &mut BuilderContext,
column: &T::Column,
case: Option<Case<'static>>,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
S: DeserializeOwned + Serialize,
{
let entity_column_key = get_entity_column_key::<T>(context, column);
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.input_conversions.insert(
entity_column_key.clone(),
entity_column_name.clone(),
Box::new(move |_resolve_context, accessor| {
let deserialized = accessor.deserialize::<S>().map_err(|err| {
SeaographyError::TypeConversionError(
err.message,
format!("Json - {entity_column_key}"),
)
})?;
let json_value = serde_json::to_value(deserialized).map_err(|err| {
let mut json_value: serde_json::Value = accessor.deserialize()?;
if let Some(case) = case {
json_value = convert_json_keys(json_value, case);
}
serde_json::from_value::<S>(json_value.clone()).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_key}"),
format!("Json - {entity_column_name}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
}
pub fn convert_jsonb_output_case_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
pub fn convert_jsonb_output_for_entity<T>(
context: &mut BuilderContext,
column: &T::Column,
case: Option<Case<'static>>,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_column_key = get_entity_column_key::<T>(context, column);
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.output_conversions.insert(
entity_column_key.clone(),
entity_column_name.clone(),
Box::new(move |value| {
if let sea_orm::Value::Json(Some(json)) = value {
let result = async_graphql::Value::from_json(convert_json_keys(
json.as_ref().clone(),
Case::Camel,
))
.map_err(|err| {
let mut json_value = json.as_ref().clone();
if let Some(case) = case {
json_value = convert_json_keys(json_value, case);
}
let result = async_graphql::Value::from_json(json_value).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_key}"),
format!("Json - {entity_column_name}"),
)
})?;
Ok(result)
} else {
Err(SeaographyError::TypeConversionError(
"value should be json".to_string(),
format!("Json - {entity_column_key}"),
format!("Json - {entity_column_name}"),
))
}
}),

View File

@@ -1,4 +1,4 @@
pub mod crypto;
pub mod custom;
pub mod json;
pub mod util;
pub mod name;

View File

@@ -0,0 +1,203 @@
use std::fmt::Display;
use sea_orm::{EntityName, EntityTrait, IdenStatic};
use seaography::BuilderContext;
pub fn get_entity_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let t = T::default();
let name = <T as EntityName>::table_name(&t);
context.entity_object.type_name.as_ref()(name)
}
pub fn get_column_name<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.entity_object.column_name.as_ref()(&entity_name, column.as_str())
}
pub fn get_entity_and_column_name<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
let column_name = get_column_name::<T>(context, column);
format!("{entity_name}.{column_name}")
}
pub fn get_entity_and_column_name_from_column_str<T>(
context: &BuilderContext,
column_str: &str,
) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}.{column_str}")
}
pub fn get_entity_basic_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let t = T::default();
let name = <T as EntityName>::table_name(&t);
format!(
"{}{}",
context.entity_object.type_name.as_ref()(name),
context.entity_object.basic_type_suffix
)
}
pub fn get_entity_query_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.entity_query_field.type_name.as_ref()(&entity_name)
}
pub fn get_entity_filter_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.filter_input.type_name.as_ref()(&entity_name)
}
pub fn get_entity_insert_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}{}", context.entity_input.insert_suffix)
}
pub fn get_entity_update_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}{}", context.entity_input.update_suffix)
}
pub fn get_entity_create_one_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_create_one_mutation.mutation_suffix
)
}
pub fn get_entity_create_batch_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_create_batch_mutation.mutation_suffix
)
}
pub fn get_entity_delete_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_delete_mutation.mutation_suffix
)
}
pub fn get_entity_update_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_update_mutation.mutation_suffix
)
}
pub fn get_entity_custom_mutation_field_name<T>(
context: &BuilderContext,
mutation_suffix: impl Display,
) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!("{query_field_name}{mutation_suffix}")
}
pub fn get_entity_renormalized_filter_field_name() -> &'static str {
"filter"
}
pub fn get_entity_query_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_query_field.filters
}
pub fn get_entity_update_mutation_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_update_mutation.filter_field
}
pub fn get_entity_delete_mutation_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_delete_mutation.filter_field
}
pub fn renormalize_filter_field_names_to_schema_context(context: &mut BuilderContext) {
let renormalized_filter_field_name = get_entity_renormalized_filter_field_name();
context.entity_query_field.filters = renormalized_filter_field_name.to_string();
context.entity_update_mutation.filter_field = renormalized_filter_field_name.to_string();
context.entity_delete_mutation.filter_field = renormalized_filter_field_name.to_string();
}
pub fn get_entity_renormalized_data_field_name() -> &'static str {
"data"
}
pub fn get_entity_create_one_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_create_one_mutation.data_field
}
pub fn get_entity_create_batch_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_create_batch_mutation.data_field
}
pub fn get_entity_update_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_update_mutation.data_field
}
pub fn renormalize_data_field_names_to_schema_context(context: &mut BuilderContext) {
let renormalized_data_field_name = get_entity_renormalized_data_field_name();
context.entity_create_one_mutation.data_field = renormalized_data_field_name.to_string();
context.entity_create_batch_mutation.data_field = renormalized_data_field_name.to_string();
context.entity_update_mutation.data_field = renormalized_data_field_name.to_string();
}

View File

@@ -1,30 +0,0 @@
use sea_orm::{EntityName, EntityTrait, IdenStatic};
use seaography::BuilderContext;
pub fn get_entity_key<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
context.entity_object.type_name.as_ref()(<T as EntityName>::table_name(&T::default()))
}
pub fn get_column_key<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_key::<T>(context);
context.entity_object.column_name.as_ref()(&entity_name, column.as_str())
}
pub fn get_entity_column_key<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
format!("{}.{}", &entity_name, &column_name)
}

View File

@@ -12,6 +12,7 @@ use crate::{
credential_3rd::{
register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context,
},
cron::{register_cron_to_schema_builder, register_cron_to_schema_context},
downloaders::{
register_downloaders_to_schema_builder, register_downloaders_to_schema_context,
},
@@ -38,8 +39,17 @@ use crate::{
subscriptions::{
register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context,
},
system_tasks::{
register_system_tasks_to_schema_builder, register_system_tasks_to_schema_context,
},
},
infra::{
json::register_jsonb_input_filter_to_schema_builder,
name::{
renormalize_data_field_names_to_schema_context,
renormalize_filter_field_names_to_schema_context,
},
},
infra::json::register_jsonb_input_filter_to_schema_builder,
},
};
@@ -55,6 +65,9 @@ pub fn build_schema(
let context = CONTEXT.get_or_init(|| {
let mut context = BuilderContext::default();
renormalize_filter_field_names_to_schema_context(&mut context);
renormalize_data_field_names_to_schema_context(&mut context);
{
// domains
register_feeds_to_schema_context(&mut context);
@@ -68,6 +81,8 @@ pub fn build_schema(
register_subscription_bangumi_to_schema_context(&mut context);
register_subscription_episode_to_schema_context(&mut context);
register_bangumi_to_schema_context(&mut context);
register_cron_to_schema_context(&mut context);
register_system_tasks_to_schema_context(&mut context);
}
context
});
@@ -91,6 +106,8 @@ pub fn build_schema(
builder = register_credential3rd_to_schema_builder(builder);
builder = register_subscriber_tasks_to_schema_builder(builder);
builder = register_bangumi_to_schema_builder(builder);
builder = register_cron_to_schema_builder(builder);
builder = register_system_tasks_to_schema_builder(builder);
}
let schema = builder.schema_builder();

View File

@@ -12,7 +12,6 @@
)]
#![allow(clippy::enum_variant_names)]
pub use downloader;
pub mod app;
pub mod auth;
pub mod cache;
@@ -28,6 +27,8 @@ pub mod migrations;
pub mod models;
pub mod storage;
pub mod task;
pub mod test_utils;
pub mod utils;
pub mod web;
#[cfg(any(test, feature = "test-utils"))]
pub mod test_utils;

View File

@@ -1,6 +1,8 @@
use serde::{Deserialize, Serialize};
use ts_rs::TS;
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, TS)]
#[ts(rename_all = "camelCase")]
pub enum AutoOptimizeImageFormat {
#[serde(rename = "image/webp")]
Webp,
@@ -10,25 +12,29 @@ pub enum AutoOptimizeImageFormat {
Jxl,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
#[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeWebpOptions {
pub quality: Option<f32>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
#[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeAvifOptions {
pub quality: Option<u8>,
pub speed: Option<u8>,
pub threads: Option<u8>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
#[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeJxlOptions {
pub quality: Option<f32>,
pub speed: Option<u8>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[derive(Clone, Debug, Serialize, Deserialize, TS, PartialEq)]
#[ts(tag = "mimeType")]
#[serde(tag = "mime_type")]
pub enum EncodeImageOptions {
#[serde(rename = "image/webp")]

View File

@@ -175,10 +175,10 @@ pub enum Feeds {
pub enum Cron {
Table,
Id,
CronSource,
SubscriberId,
SubscriptionId,
CronExpr,
CronTimezone,
NextRun,
LastRun,
LastError,
@@ -190,6 +190,38 @@ pub enum Cron {
MaxAttempts,
Priority,
Status,
SubscriberTaskCron,
SystemTaskCron,
}
#[derive(sea_query::Iden)]
pub enum ApalisSchema {
#[iden = "apalis"]
Schema,
}
#[derive(DeriveIden)]
pub enum ApalisJobs {
#[sea_orm(iden = "jobs")]
Table,
SubscriberId,
SubscriptionId,
Job,
JobType,
Status,
TaskType,
Id,
Attempts,
MaxAttempts,
RunAt,
LastError,
LockAt,
LockBy,
DoneAt,
Priority,
CronId,
}
macro_rules! create_postgres_enum_for_active_enum {

View File

@@ -52,8 +52,7 @@ impl MigrationTrait for Migration {
subscriptions::SubscriptionCategoryEnum,
subscriptions::SubscriptionCategory::MikanSubscriber,
subscriptions::SubscriptionCategory::MikanBangumi,
subscriptions::SubscriptionCategory::MikanSeason,
subscriptions::SubscriptionCategory::Manual
subscriptions::SubscriptionCategory::MikanSeason
)
.await?;

View File

@@ -90,6 +90,11 @@ impl MigrationTrait for Migration {
SimpleExpr::from(AuthType::Basic).as_enum(AuthTypeEnum),
seed_subscriber_id.into(),
])
.on_conflict(
OnConflict::columns([Auth::Pid, Auth::AuthType])
.do_nothing()
.to_owned(),
)
.to_owned(),
)
.await?;

View File

@@ -95,6 +95,7 @@ impl MigrationTrait for Migration {
Table::alter()
.table(Subscriptions::Table)
.drop_column(Subscriptions::CredentialId)
.drop_foreign_key("fk_subscriptions_credential_id")
.to_owned(),
)
.await?;

View File

@@ -0,0 +1,221 @@
use async_trait::async_trait;
use sea_orm_migration::{prelude::*, schema::*};
use super::defs::{ApalisJobs, ApalisSchema};
use crate::{
migrations::defs::{Subscribers, Subscriptions},
task::{
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME,
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME, SUBSCRIBER_TASK_APALIS_NAME,
SYSTEM_TASK_APALIS_NAME,
},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.add_column_if_not_exists(integer_null(ApalisJobs::SubscriberId))
.add_column_if_not_exists(integer_null(ApalisJobs::SubscriptionId))
.add_column_if_not_exists(text_null(ApalisJobs::TaskType))
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_subscriber_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::SubscriberId)
.to_tbl(Subscribers::Table)
.to_col(Subscribers::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_subscription_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::SubscriptionId)
.to_tbl(Subscriptions::Table)
.to_col(Subscriptions::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"UPDATE {apalis_schema}.{apalis_table} SET {subscriber_id} = ({job} ->> '{subscriber_id}')::integer, {task_type} = ({job} ->> '{task_type}')::text, {subscription_id} = ({job} ->> '{subscription_id}')::integer"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
job = ApalisJobs::Job.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE TRIGGER {SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME}
BEFORE INSERT OR UPDATE ON {apalis_schema}.{apalis_table}
FOR EACH ROW
EXECUTE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}();"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string()
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_tasks")
.await?;
db.execute_unprepared("DROP VIEW IF EXISTS system_tasks")
.await?;
db.execute_unprepared(&format!(
r#"DROP TRIGGER IF EXISTS {SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME} ON {apalis_schema}.{apalis_table}"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string()
)).await?;
db.execute_unprepared(&format!(
r#"DROP FUNCTION IF EXISTS {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}()"#,
apalis_schema = ApalisSchema::Schema.to_string(),
))
.await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.drop_foreign_key("fk_apalis_jobs_subscriber_id")
.drop_foreign_key("fk_apalis_jobs_subscription_id")
.drop_column(ApalisJobs::SubscriberId)
.drop_column(ApalisJobs::SubscriptionId)
.to_owned(),
)
.await?;
Ok(())
}
}

View File

@@ -1,64 +0,0 @@
use async_trait::async_trait;
use sea_orm_migration::prelude::*;
use crate::task::SUBSCRIBER_TASK_APALIS_NAME;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
job,
job_type,
status,
(job ->> 'subscriber_id'::text)::integer AS subscriber_id,
job ->> 'task_type'::text AS task_type,
id,
attempts,
max_attempts,
run_at,
last_error,
lock_at,
lock_by,
done_at,
priority
FROM apalis.jobs
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscriber_id
ON apalis.jobs (((job -> 'subscriber_id')::integer))
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(
r#"DROP INDEX IF EXISTS idx_apalis_jobs_subscriber_id
ON apalis.jobs"#,
)
.await?;
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_tasks")
.await?;
Ok(())
}
}

View File

@@ -15,6 +15,8 @@ pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan).await?;
{
@@ -29,11 +31,17 @@ impl MigrationTrait for Migration {
BangumiTypeEnum,
BangumiType::iden_values(),
))
.drop_column(Bangumi::SavePath)
.to_owned(),
)
.await?;
db.execute_unprepared(&format!(
r#"ALTER TABLE {bangumi} DROP COLUMN IF EXISTS {save_path}"#,
bangumi = Bangumi::Table.to_string(),
save_path = Bangumi::SavePath.to_string(),
))
.await?;
manager
.exec_stmt(
UpdateStatement::new()
@@ -83,11 +91,17 @@ impl MigrationTrait for Migration {
.add_column_if_not_exists(big_integer_null(
Episodes::EnclosureContentLength,
))
.drop_column(Episodes::SavePath)
.to_owned(),
)
.await?;
db.execute_unprepared(&format!(
r#"ALTER TABLE {episodes} DROP COLUMN IF EXISTS {save_path}"#,
episodes = Episodes::Table.to_string(),
save_path = Episodes::SavePath.to_string(),
))
.await?;
manager
.exec_stmt(
UpdateStatement::new()
@@ -120,10 +134,34 @@ impl MigrationTrait for Migration {
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
Table::alter()
.table(Bangumi::Table)
.add_column_if_not_exists(text_null(Bangumi::SavePath))
.drop_column(Bangumi::BangumiType)
.to_owned(),
)
.await?;
manager
.drop_postgres_enum_for_active_enum(BangumiTypeEnum)
.await?;
manager
.alter_table(
Table::alter()
.table(Episodes::Table)
.add_column_if_not_exists(text_null(Episodes::SavePath))
.drop_column(Episodes::EpisodeType)
.drop_column(Episodes::EnclosureMagnetLink)
.drop_column(Episodes::EnclosureTorrentLink)
.drop_column(Episodes::EnclosurePubDate)
.drop_column(Episodes::EnclosureContentLength)
.to_owned(),
)
.await?;
manager
.drop_postgres_enum_for_active_enum(EpisodeTypeEnum)
.await?;

View File

@@ -1,62 +0,0 @@
use async_trait::async_trait;
use sea_orm_migration::prelude::*;
use crate::task::SUBSCRIBER_TASK_APALIS_NAME;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
job,
job_type,
status,
(job ->> 'subscriber_id'::text)::integer AS subscriber_id,
job ->> 'task_type'::text AS task_type,
id,
attempts,
max_attempts,
run_at,
last_error,
lock_at,
lock_by,
done_at,
priority,
(job ->> 'subscription_id'::text)::integer AS subscription_id
FROM apalis.jobs
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscription_id
ON apalis.jobs (((job -> 'subscription_id')::integer))
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscription_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(
r#"DROP INDEX IF EXISTS idx_apalis_jobs_subscription_id
ON apalis.jobs"#,
)
.await?;
Ok(())
}
}

View File

@@ -4,12 +4,18 @@ use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{
Cron, CustomSchemaManagerExt, GeneralIds, Subscribers, Subscriptions, table_auto_z,
ApalisJobs, ApalisSchema, Cron, CustomSchemaManagerExt, GeneralIds, Subscribers,
Subscriptions, table_auto_z,
},
models::cron::{
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_EVENT, CronSource, CronSourceEnum,
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_DEBUG_EVENT, CRON_DUE_EVENT,
CronStatus, CronStatusEnum, NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME,
NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME,
NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME,
SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME,
},
task::{
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SUBSCRIBER_TASK_APALIS_NAME,
SYSTEM_TASK_APALIS_NAME,
},
};
@@ -19,16 +25,14 @@ pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
create_postgres_enum_for_active_enum!(manager, CronSourceEnum, CronSource::Subscription)
.await?;
create_postgres_enum_for_active_enum!(
manager,
CronStatusEnum,
CronStatus::Pending,
CronStatus::Running,
CronStatus::Completed,
CronStatus::Failed
CronStatus::Failed,
CronStatus::Disabled
)
.await?;
@@ -37,11 +41,7 @@ impl MigrationTrait for Migration {
table_auto_z(Cron::Table)
.col(pk_auto(Cron::Id))
.col(string(Cron::CronExpr))
.col(enumeration(
Cron::CronSource,
CronSourceEnum,
CronSource::iden_values(),
))
.col(string(Cron::CronTimezone))
.col(integer_null(Cron::SubscriberId))
.col(integer_null(Cron::SubscriptionId))
.col(timestamp_with_time_zone_null(Cron::NextRun))
@@ -50,22 +50,23 @@ impl MigrationTrait for Migration {
.col(boolean(Cron::Enabled).default(true))
.col(string_null(Cron::LockedBy))
.col(timestamp_with_time_zone_null(Cron::LockedAt))
.col(integer_null(Cron::TimeoutMs))
.col(integer(Cron::Attempts))
.col(integer(Cron::MaxAttempts))
.col(integer(Cron::Priority))
.col(enumeration(
Cron::Status,
CronStatusEnum,
CronStatus::iden_values(),
))
.col(integer_null(Cron::TimeoutMs).default(5000))
.col(integer(Cron::Attempts).default(0))
.col(integer(Cron::MaxAttempts).default(1))
.col(integer(Cron::Priority).default(0))
.col(
enumeration(Cron::Status, CronStatusEnum, CronStatus::iden_values())
.default(CronStatus::Pending),
)
.col(json_binary_null(Cron::SubscriberTaskCron))
.col(json_binary_null(Cron::SystemTaskCron))
.foreign_key(
ForeignKey::create()
.name("fk_cron_subscriber_id")
.from(Cron::Table, Cron::SubscriberId)
.to(Subscribers::Table, Subscribers::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade),
.on_update(ForeignKeyAction::Restrict),
)
.foreign_key(
ForeignKey::create()
@@ -73,7 +74,7 @@ impl MigrationTrait for Migration {
.from(Cron::Table, Cron::SubscriptionId)
.to(Subscriptions::Table, Subscriptions::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade),
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
@@ -83,17 +84,6 @@ impl MigrationTrait for Migration {
.create_postgres_auto_update_ts_trigger_for_col(Cron::Table, GeneralIds::UpdatedAt)
.await?;
manager
.create_index(
IndexCreateStatement::new()
.if_not_exists()
.name("idx_cron_cron_source")
.table(Cron::Table)
.col(Cron::CronSource)
.to_owned(),
)
.await?;
manager
.create_index(
IndexCreateStatement::new()
@@ -107,6 +97,43 @@ impl MigrationTrait for Migration {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_subscriber_task_subscriber_id integer;
new_subscriber_task_subscription_id integer;
new_system_task_subscriber_id integer;
BEGIN
new_subscriber_task_subscriber_id = (NEW.{subscriber_task_cron} ->> 'subscriber_id')::integer;
new_subscriber_task_subscription_id = (NEW.{subscriber_task_cron} ->> 'subscription_id')::integer;
new_system_task_subscriber_id = (NEW.{system_task_cron} ->> 'subscriber_id')::integer;
IF new_subscriber_task_subscriber_id IS DISTINCT FROM (OLD.{subscriber_task_cron} ->> 'subscriber_id')::integer AND new_subscriber_task_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_subscriber_task_subscriber_id;
END IF;
IF new_subscriber_task_subscription_id IS DISTINCT FROM (OLD.{subscriber_task_cron} ->> 'subscription_id')::integer AND new_subscriber_task_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_subscriber_task_subscription_id;
END IF;
IF new_system_task_subscriber_id IS DISTINCT FROM (OLD.{system_task_cron} ->> 'subscriber_id')::integer AND new_system_task_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_system_task_subscriber_id;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
subscriber_task_cron = &Cron::SubscriberTaskCron.to_string(),
subscriber_id = &Cron::SubscriberId.to_string(),
subscription_id = &Cron::SubscriptionId.to_string(),
system_task_cron = &Cron::SystemTaskCron.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE TRIGGER {SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME}
BEFORE INSERT OR UPDATE ON {table}
FOR EACH ROW
EXECUTE FUNCTION {SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}();"#,
table = &Cron::Table.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME}() RETURNS trigger AS $$
BEGIN
@@ -114,7 +141,7 @@ impl MigrationTrait for Migration {
IF NEW.{next_run} IS NOT NULL
AND NEW.{next_run} <= CURRENT_TIMESTAMP
AND NEW.{enabled} = true
AND NEW.{status} = '{pending}'
AND NEW.{status} = '{pending}'::{status_type}
AND NEW.{attempts} < NEW.{max_attempts}
-- Check if not locked or lock timeout
AND (
@@ -129,8 +156,8 @@ impl MigrationTrait for Migration {
OLD.{next_run} IS NULL
OR OLD.{next_run} > CURRENT_TIMESTAMP
OR OLD.{enabled} = false
OR OLD.{status} != '{pending}'
OR OLD.{attempts} != NEW.{attempts}
OR OLD.{status} IS DISTINCT FROM '{pending}'
OR OLD.{attempts} IS DISTINCT FROM NEW.{attempts}
)
THEN
PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(NEW)::text);
@@ -146,6 +173,7 @@ impl MigrationTrait for Migration {
pending = &CronStatus::Pending.to_value(),
attempts = &Cron::Attempts.to_string(),
max_attempts = &Cron::MaxAttempts.to_string(),
status_type = &CronStatus::name().to_string(),
))
.await?;
@@ -169,7 +197,7 @@ impl MigrationTrait for Migration {
WHERE {next_run} IS NOT NULL
AND {next_run} <= CURRENT_TIMESTAMP
AND {enabled} = true
AND {status} = '{pending}'
AND {status} = '{pending}'::{status_type}
AND {attempts} < {max_attempts}
AND (
{locked_at} IS NULL
@@ -181,9 +209,12 @@ impl MigrationTrait for Migration {
ORDER BY {priority} ASC, {next_run} ASC
FOR UPDATE SKIP LOCKED
LOOP
-- PERFORM pg_notify('{CRON_DUE_DEBUG_EVENT}',format('Found due cron: value=%s; Now time: %s', row_to_json(cron_record)::text, CURRENT_TIMESTAMP));
PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(cron_record)::text);
notification_count := notification_count + 1;
END LOOP;
-- PERFORM pg_notify('{CRON_DUE_DEBUG_EVENT}', format('Notification count: %I; Now time: %s', notification_count, CURRENT_TIMESTAMP));
RETURN notification_count;
END;
$$ LANGUAGE plpgsql;"#,
@@ -197,15 +228,286 @@ impl MigrationTrait for Migration {
priority = &Cron::Priority.to_string(),
attempts = &Cron::Attempts.to_string(),
max_attempts = &Cron::MaxAttempts.to_string(),
status_type = &CronStatus::name().to_string(),
))
.await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.add_column_if_not_exists(integer_null(ApalisJobs::CronId))
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_cron_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::CronId)
.to_tbl(Cron::Table)
.to_col(Cron::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id},
{cron_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{cron_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"
UPDATE {apalis_schema}.{apalis_table} SET {cron_id} = ({job} ->> '{cron_id}')::integer
"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_cron_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_cron_id = (NEW.{job} ->> '{cron_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_cron_id IS DISTINCT FROM (OLD.{job} ->> '{cron_id}')::integer AND new_job_cron_id IS DISTINCT FROM NEW.{cron_id} THEN
NEW.{cron_id} = new_job_cron_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
))
.await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.drop_column(ApalisJobs::CronId)
.drop_foreign_key("fk_apalis_jobs_cron_id")
.to_owned(),
)
.await?;
db.execute_unprepared(&format!(
r#"DROP TRIGGER IF EXISTS {NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME} ON {table};"#,
table = &Cron::Table.to_string(),
@@ -231,10 +533,6 @@ impl MigrationTrait for Migration {
)
.await?;
manager
.drop_postgres_enum_for_active_enum(CronSourceEnum)
.await?;
manager
.drop_postgres_enum_for_active_enum(CronStatusEnum)
.await?;

View File

@@ -7,10 +7,9 @@ pub mod m20220101_000001_init;
pub mod m20240224_082543_add_downloads;
pub mod m20241231_000001_auth;
pub mod m20250501_021523_credential_3rd;
pub mod m20250520_021135_subscriber_tasks;
pub mod m20250520_021135_add_tasks;
pub mod m20250622_015618_feeds;
pub mod m20250622_020819_bangumi_and_episode_type;
pub mod m20250625_060701_add_subscription_id_to_subscriber_tasks;
pub mod m20250629_065628_add_cron;
pub struct Migrator;
@@ -23,10 +22,9 @@ impl MigratorTrait for Migrator {
Box::new(m20240224_082543_add_downloads::Migration),
Box::new(m20241231_000001_auth::Migration),
Box::new(m20250501_021523_credential_3rd::Migration),
Box::new(m20250520_021135_subscriber_tasks::Migration),
Box::new(m20250520_021135_add_tasks::Migration),
Box::new(m20250622_015618_feeds::Migration),
Box::new(m20250622_020819_bangumi_and_episode_type::Migration),
Box::new(m20250625_060701_add_subscription_id_to_subscriber_tasks::Migration),
Box::new(m20250629_065628_add_cron::Migration),
]
}

View File

@@ -64,7 +64,9 @@ impl Model {
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_model_not_found_detail("auth", format!("pid {pid} not found"))
RecorderError::from_entity_not_found_detail::<Entity, _>(format!(
"pid {pid} not found"
))
})?;
Ok(subscriber_auth)
}

View File

@@ -1,7 +1,11 @@
pub const CRON_DUE_EVENT: &str = "cron_due";
pub const CRON_DUE_DEBUG_EVENT: &str = "cron_due_debug";
pub const CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME: &str = "check_and_trigger_due_crons";
pub const NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME: &str = "notify_due_cron_when_mutating";
pub const NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME: &str =
"notify_due_cron_when_mutating_trigger";
pub const SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME: &str = "setup_cron_extra_foreign_keys";
pub const SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME: &str =
"setup_cron_extra_foreign_keys_trigger";

View File

@@ -1,16 +1,17 @@
mod core;
mod registry;
pub use core::{
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_EVENT,
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_DEBUG_EVENT, CRON_DUE_EVENT,
NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME, NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME,
SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME,
};
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use chrono_tz::Tz;
use croner::Cron;
use sea_orm::{
ActiveValue::Set,
ActiveValue::{self, Set},
Condition, DeriveActiveEnum, DeriveDisplay, DeriveEntityModel, EnumIter, QuerySelect,
Statement, TransactionTrait,
entity::prelude::*,
@@ -21,20 +22,11 @@ use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
models::subscriptions::{self},
errors::RecorderResult,
models::{subscriber_tasks, system_tasks},
task::{SubscriberTaskTrait, SystemTaskTrait},
};
#[derive(
Debug, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "cron_source")]
#[serde(rename_all = "snake_case")]
pub enum CronSource {
#[sea_orm(string_value = "subscription")]
Subscription,
}
#[derive(
Debug, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay, Serialize, Deserialize,
)]
@@ -49,9 +41,11 @@ pub enum CronStatus {
Completed,
#[sea_orm(string_value = "failed")]
Failed,
#[sea_orm(string_value = "disabled")]
Disabled,
}
#[derive(Debug, Clone, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
#[derive(Debug, Clone, DeriveEntityModel, PartialEq, Serialize, Deserialize)]
#[sea_orm(table_name = "cron")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
@@ -60,16 +54,17 @@ pub struct Model {
pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)]
pub id: i32,
pub cron_source: CronSource,
pub subscriber_id: Option<i32>,
pub subscription_id: Option<i32>,
pub cron_expr: String,
pub cron_timezone: String,
pub next_run: Option<DateTimeUtc>,
pub last_run: Option<DateTimeUtc>,
pub last_error: Option<String>,
pub locked_by: Option<String>,
pub locked_at: Option<DateTimeUtc>,
pub timeout_ms: i32,
// default_expr = "5000"
pub timeout_ms: Option<i32>,
#[sea_orm(default_expr = "0")]
pub attempts: i32,
#[sea_orm(default_expr = "1")]
@@ -79,6 +74,8 @@ pub struct Model {
pub status: CronStatus,
#[sea_orm(default_expr = "true")]
pub enabled: bool,
pub subscriber_task_cron: Option<subscriber_tasks::SubscriberTask>,
pub system_task_cron: Option<system_tasks::SystemTask>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -88,7 +85,7 @@ pub enum Relation {
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
on_delete = "Restrict"
)]
Subscriber,
#[sea_orm(
@@ -96,9 +93,13 @@ pub enum Relation {
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
on_delete = "Restrict"
)]
Subscription,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(has_many = "super::system_tasks::Entity")]
SystemTask,
}
impl Related<super::subscribers::Entity> for Entity {
@@ -113,16 +114,96 @@ impl Related<super::subscriptions::Entity> for Entity {
}
}
impl Related<super::subscriber_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriberTask.def()
}
}
impl Related<super::system_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SystemTask.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::subscriptions::Entity")]
Subscription,
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(entity = "super::system_tasks::Entity")]
SystemTask,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
match (
&self.cron_expr as &ActiveValue<String>,
&self.cron_timezone as &ActiveValue<String>,
) {
(ActiveValue::Set(cron_expr), ActiveValue::Set(timezone)) => {
if matches!(
&self.next_run,
ActiveValue::NotSet | ActiveValue::Unchanged(_)
) {
let next_run = Model::calculate_next_run(cron_expr, timezone)
.map_err(|e| DbErr::Custom(e.to_string()))?;
self.next_run = Set(Some(next_run));
}
}
(
ActiveValue::Unchanged(_) | ActiveValue::NotSet,
ActiveValue::Unchanged(_) | ActiveValue::NotSet,
) => {}
(_, _) => {
if matches!(
self.next_run,
ActiveValue::NotSet | ActiveValue::Unchanged(_)
) {
return Err(DbErr::Custom(
"Cron expr and timezone must be insert or update at same time when next \
run is not set"
.to_string(),
));
}
}
};
if let ActiveValue::Set(Some(subscriber_id)) = self.subscriber_id
&& let ActiveValue::Set(Some(ref subscriber_task)) = self.subscriber_task_cron
&& subscriber_task.get_subscriber_id() != subscriber_id
{
return Err(DbErr::Custom(
"Cron subscriber_id does not match subscriber_task_cron.subscriber_id".to_string(),
));
}
if let ActiveValue::Set(Some(subscriber_id)) = self.subscriber_id
&& let ActiveValue::Set(Some(ref system_task)) = self.system_task_cron
&& system_task.get_subscriber_id() != Some(subscriber_id)
{
return Err(DbErr::Custom(
"Cron subscriber_id does not match system_task_cron.subscriber_id".to_string(),
));
}
if let ActiveValue::Set(enabled) = self.enabled
&& !insert
{
if enabled {
self.status = Set(CronStatus::Pending)
} else {
self.status = Set(CronStatus::Disabled)
}
}
Ok(self)
}
}
impl Model {
pub async fn handle_cron_notification(
@@ -176,7 +257,10 @@ impl Model {
&& cron.attempts < cron.max_attempts
&& cron.status == CronStatus::Pending
&& (cron.locked_at.is_none_or(|locked_at| {
locked_at + chrono::Duration::milliseconds(cron.timeout_ms as i64) <= Utc::now()
cron.timeout_ms.is_some_and(|cron_timeout_ms| {
locked_at + chrono::Duration::milliseconds(cron_timeout_ms as i64)
<= Utc::now()
})
}))
&& cron.next_run.is_some_and(|next_run| next_run <= Utc::now())
{
@@ -200,19 +284,20 @@ impl Model {
}
async fn exec_cron(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
match self.cron_source {
CronSource::Subscription => {
let subscription_id = self.subscription_id.unwrap_or_else(|| {
unreachable!("Subscription cron must have a subscription id")
});
let subscription = subscriptions::Entity::find_by_id(subscription_id)
.one(ctx.db())
.await?
.ok_or_else(|| RecorderError::from_model_not_found("Subscription"))?;
subscription.exec_cron(ctx).await?;
}
if let Some(subscriber_task) = self.subscriber_task_cron.as_ref() {
let task_service = ctx.task();
let mut new_subscriber_task = subscriber_task.clone();
new_subscriber_task.set_cron_id(Some(self.id));
task_service
.add_subscriber_task(new_subscriber_task)
.await?;
} else if let Some(system_task) = self.system_task_cron.as_ref() {
let task_service = ctx.task();
let mut new_system_task = system_task.clone();
new_system_task.set_cron_id(Some(self.id));
task_service.add_system_task(new_system_task).await?;
} else {
unimplemented!("Cron without unknown task is not supported now");
}
Ok(())
@@ -221,7 +306,7 @@ impl Model {
async fn mark_cron_completed(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
let db = ctx.db();
let next_run = self.calculate_next_run(&self.cron_expr)?;
let next_run = Self::calculate_next_run(&self.cron_expr, &self.cron_timezone)?;
ActiveModel {
id: Set(self.id),
@@ -259,7 +344,10 @@ impl Model {
let next_run = if should_retry {
Some(Utc::now() + retry_duration)
} else {
Some(self.calculate_next_run(&self.cron_expr)?)
Some(Self::calculate_next_run(
&self.cron_expr,
&self.cron_timezone,
)?)
};
ActiveModel {
@@ -328,7 +416,15 @@ impl Model {
locked_cron
.mark_cron_failed(
ctx,
format!("Cron timeout of {}ms", locked_cron.timeout_ms).as_str(),
format!(
"Cron timeout of {}ms",
locked_cron
.timeout_ms
.as_ref()
.map(|s| s.to_string())
.unwrap_or_else(|| "Infinite".to_string())
)
.as_str(),
retry_duration,
)
.await?;
@@ -340,11 +436,17 @@ impl Model {
Ok(())
}
fn calculate_next_run(&self, cron_expr: &str) -> RecorderResult<DateTime<Utc>> {
let cron_expr = Cron::new(cron_expr).parse()?;
pub fn calculate_next_run(cron_expr: &str, timezone: &str) -> RecorderResult<DateTime<Utc>> {
let user_tz = timezone.parse::<Tz>()?;
let next = cron_expr.find_next_occurrence(&Utc::now(), false)?;
let user_tz_now = Utc::now().with_timezone(&user_tz);
Ok(next)
let cron_expr = Cron::new(cron_expr).with_seconds_optional().parse()?;
let next = cron_expr.find_next_occurrence(&user_tz_now, false)?;
let next_utc = next.with_timezone(&Utc);
Ok(next_utc)
}
}

View File

@@ -1 +0,0 @@

View File

@@ -102,7 +102,7 @@ impl ActiveModelBehavior for ActiveModel {
C: ConnectionTrait,
{
if insert && let ActiveValue::NotSet = self.token {
let token = nanoid::nanoid!(10);
let token = Uuid::now_v7().to_string();
self.token = ActiveValue::Set(token);
}
Ok(self)
@@ -122,7 +122,7 @@ impl Model {
.filter(Column::FeedType.eq(FeedType::Rss))
.one(db)
.await?
.ok_or(RecorderError::from_model_not_found("Feed"))?;
.ok_or(RecorderError::from_entity_not_found::<Entity>())?;
let feed = Feed::from_model(ctx, feed_model).await?;

View File

@@ -44,7 +44,7 @@ impl Feed {
.await?;
(subscription, episodes)
} else {
return Err(RecorderError::from_model_not_found("Subscription"));
return Err(RecorderError::from_entity_not_found::<subscriptions::Entity>());
};
Ok(Feed::SubscritpionEpisodes(

View File

@@ -1,6 +1,7 @@
pub mod auth;
pub mod bangumi;
pub mod credential_3rd;
pub mod cron;
pub mod downloaders;
pub mod downloads;
pub mod episodes;
@@ -11,4 +12,4 @@ pub mod subscribers;
pub mod subscription_bangumi;
pub mod subscription_episode;
pub mod subscriptions;
pub mod cron;
pub mod system_tasks;

View File

@@ -1,7 +1,7 @@
use async_trait::async_trait;
use sea_orm::{
ActiveModelTrait, ColumnTrait, ConnectionTrait, DbErr, EntityTrait, Insert, IntoActiveModel,
Iterable, QueryResult, QueryTrait, SelectModel, SelectorRaw, sea_query::Query,
QueryResult, QueryTrait, sea_query::Query,
};
#[async_trait]
@@ -10,13 +10,6 @@ where
<A::Entity as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait,
{
fn exec_with_returning_models<C>(
self,
db: &C,
) -> SelectorRaw<SelectModel<<A::Entity as EntityTrait>::Model>>
where
C: ConnectionTrait;
async fn exec_with_returning_columns<C, I>(
self,
db: &C,
@@ -33,26 +26,6 @@ where
<A::Entity as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait + Send,
{
fn exec_with_returning_models<C>(
self,
db: &C,
) -> SelectorRaw<SelectModel<<A::Entity as EntityTrait>::Model>>
where
C: ConnectionTrait,
{
let mut insert_statement = self.into_query();
let db_backend = db.get_database_backend();
let returning = Query::returning().exprs(
<A::Entity as EntityTrait>::Column::iter()
.map(|c| c.select_as(c.into_returning_expr(db_backend))),
);
insert_statement.returning(returning);
let insert_statement = db_backend.build(&insert_statement);
SelectorRaw::<SelectModel<<A::Entity as EntityTrait>::Model>>::from_statement(
insert_statement,
)
}
async fn exec_with_returning_columns<C, I>(
self,
db: &C,

View File

@@ -1,9 +1,10 @@
use async_trait::async_trait;
use sea_orm::entity::prelude::*;
use sea_orm::{ActiveValue, entity::prelude::*};
use crate::task::SubscriberTaskTrait;
pub use crate::task::{
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant,
SubscriberTaskTypeVariantIter,
SubscriberTask, SubscriberTaskInput, SubscriberTaskType, SubscriberTaskTypeEnum,
SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter,
};
#[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)]
@@ -23,13 +24,14 @@ pub enum SubscriberTaskStatus {
Killed,
}
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "subscriber_tasks")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: String,
pub subscriber_id: i32,
pub subscription_id: Option<i32>,
pub cron_id: Option<i32>,
pub job: SubscriberTask,
pub task_type: SubscriberTaskType,
pub status: SubscriberTaskStatus,
@@ -50,17 +52,25 @@ pub enum Relation {
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
on_delete = "Restrict"
)]
Subscriber,
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id",
on_update = "NoAction",
on_delete = "NoAction"
on_update = "Cascade",
on_delete = "Restrict"
)]
Subscription,
#[sea_orm(
belongs_to = "super::cron::Entity",
from = "Column::CronId",
to = "super::cron::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Cron,
}
impl Related<super::subscribers::Entity> for Entity {
@@ -75,13 +85,36 @@ impl Related<super::subscriptions::Entity> for Entity {
}
}
impl Related<super::cron::Entity> for Entity {
fn to() -> RelationDef {
Relation::Cron.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::subscriptions::Entity")]
Subscription,
#[sea_orm(entity = "super::cron::Entity")]
Cron,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, _insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
if let ActiveValue::Set(subscriber_id) = self.subscriber_id
&& let ActiveValue::Set(ref job) = self.job
&& job.get_subscriber_id() != subscriber_id
{
return Err(DbErr::Custom(
"SubscriberTask subscriber_id does not match job.subscriber_id".to_string(),
));
}
Ok(self)
}
}

View File

@@ -45,6 +45,8 @@ pub enum Relation {
Feed,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(has_many = "super::system_tasks::Entity")]
SystemTask,
}
impl Related<super::subscriptions::Entity> for Entity {
@@ -95,6 +97,12 @@ impl Related<super::subscriber_tasks::Entity> for Entity {
}
}
impl Related<super::system_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SystemTask.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscriptions::Entity")]
@@ -111,6 +119,8 @@ pub enum RelatedEntity {
Feed,
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(entity = "super::system_tasks::Entity")]
SystemTask,
}
#[derive(Debug, Deserialize, Serialize)]
@@ -131,7 +141,7 @@ impl Model {
let db = ctx.db();
let subscriber = Entity::find_by_id(id).one(db).await?.ok_or_else(|| {
RecorderError::from_model_not_found_detail("subscribers", format!("id {id} not found"))
RecorderError::from_entity_not_found_detail::<Entity, _>(format!("id {id} not found"))
})?;
Ok(subscriber)
}

View File

@@ -11,10 +11,7 @@ pub use registry::{
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
};
use crate::{app::AppContextTrait, errors::RecorderResult};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")]
@@ -63,6 +60,8 @@ pub enum Relation {
Feed,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(has_many = "super::cron::Entity")]
Cron,
}
impl Related<super::subscribers::Entity> for Entity {
@@ -129,6 +128,12 @@ impl Related<super::subscriber_tasks::Entity> for Entity {
}
}
impl Related<super::cron::Entity> for Entity {
fn to() -> RelationDef {
Relation::Cron.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
@@ -147,6 +152,8 @@ pub enum RelatedEntity {
Feed,
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(entity = "super::cron::Entity")]
Cron,
}
#[async_trait]
@@ -155,50 +162,6 @@ impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {}
impl Model {
pub async fn toggle_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
enabled: bool,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::update_many()
.col_expr(Column::Enabled, Expr::value(enabled))
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn delete_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::delete_many()
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn find_by_id_and_subscriber_id(
ctx: &dyn AppContextTrait,
subscriber_id: i32,
subscription_id: i32,
) -> RecorderResult<Self> {
let db = ctx.db();
let subscription_model = Entity::find_by_id(subscription_id)
.one(db)
.await?
.ok_or_else(|| RecorderError::from_model_not_found("Subscription"))?;
if subscription_model.subscriber_id != subscriber_id {
Err(RecorderError::from_model_not_found("Subscription"))?;
}
Ok(subscription_model)
}
pub async fn exec_cron(&self, _ctx: &dyn AppContextTrait) -> RecorderResult<()> {
todo!()
}

View File

@@ -1,129 +1,147 @@
use std::{fmt::Debug, sync::Arc};
use async_trait::async_trait;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter};
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
errors::RecorderResult,
extract::mikan::{
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
},
models::subscriptions::{self, SubscriptionTrait},
};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
#[serde(rename_all = "snake_case")]
pub enum SubscriptionCategory {
#[sea_orm(string_value = "mikan_subscriber")]
MikanSubscriber,
#[sea_orm(string_value = "mikan_season")]
MikanSeason,
#[sea_orm(string_value = "mikan_bangumi")]
MikanBangumi,
#[sea_orm(string_value = "manual")]
Manual,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "category")]
pub enum Subscription {
#[serde(rename = "mikan_subscriber")]
MikanSubscriber(MikanSubscriberSubscription),
#[serde(rename = "mikan_season")]
MikanSeason(MikanSeasonSubscription),
#[serde(rename = "mikan_bangumi")]
MikanBangumi(MikanBangumiSubscription),
#[serde(rename = "manual")]
Manual,
}
impl Subscription {
pub fn category(&self) -> SubscriptionCategory {
match self {
Self::MikanSubscriber(_) => SubscriptionCategory::MikanSubscriber,
Self::MikanSeason(_) => SubscriptionCategory::MikanSeason,
Self::MikanBangumi(_) => SubscriptionCategory::MikanBangumi,
Self::Manual => SubscriptionCategory::Manual,
}
}
}
#[async_trait]
impl SubscriptionTrait for Subscription {
fn get_subscriber_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscriber_id(),
Self::MikanSeason(subscription) => subscription.get_subscriber_id(),
Self::MikanBangumi(subscription) => subscription.get_subscriber_id(),
Self::Manual => unreachable!(),
}
}
fn get_subscription_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscription_id(),
Self::MikanSeason(subscription) => subscription.get_subscription_id(),
Self::MikanBangumi(subscription) => subscription.get_subscription_id(),
Self::Manual => unreachable!(),
}
}
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_full(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_sources(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_sources(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_sources(ctx).await,
Self::Manual => Ok(()),
}
}
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
match model.category {
SubscriptionCategory::MikanSubscriber => {
MikanSubscriberSubscription::try_from_model(model).map(Self::MikanSubscriber)
macro_rules! register_subscription_type {
(
subscription_category_enum: {
$(#[$subscription_category_enum_meta:meta])*
pub enum $type_enum_name:ident {
$(
$(#[$variant_meta:meta])*
$variant:ident => $string_value:literal
),* $(,)?
}
SubscriptionCategory::MikanSeason => {
MikanSeasonSubscription::try_from_model(model).map(Self::MikanSeason)
}$(,)?
subscription_enum: {
$(#[$subscription_enum_meta:meta])*
pub enum $subscription_enum_name:ident {
$(
$subscription_variant:ident($subscription_type:ty)
),* $(,)?
}
SubscriptionCategory::MikanBangumi => {
MikanBangumiSubscription::try_from_model(model).map(Self::MikanBangumi)
}
) => {
$(#[$subscription_category_enum_meta])*
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
pub enum $type_enum_name {
$(
$(#[$variant_meta])*
#[serde(rename = $string_value)]
#[sea_orm(string_value = $string_value)]
$variant,
)*
}
$(#[$subscription_enum_meta])*
#[serde(tag = "category")]
pub enum $subscription_enum_name {
$(
#[serde(rename = $string_value)]
$subscription_variant($subscription_type),
)*
}
impl $subscription_enum_name {
pub fn category(&self) -> $type_enum_name {
match self {
$(Self::$subscription_variant(_) => $type_enum_name::$variant,)*
}
}
SubscriptionCategory::Manual => Ok(Self::Manual),
}
#[async_trait::async_trait]
impl $crate::models::subscriptions::SubscriptionTrait for $subscription_enum_name {
fn get_subscriber_id(&self) -> i32 {
match self {
$(Self::$subscription_variant(subscription) => subscription.get_subscriber_id(),)*
}
}
fn get_subscription_id(&self) -> i32 {
match self {
$(Self::$subscription_variant(subscription) => subscription.get_subscription_id(),)*
}
}
async fn sync_feeds_incremental(&self, ctx: Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$subscription_variant(subscription) => subscription.sync_feeds_incremental(ctx).await,)*
}
}
async fn sync_feeds_full(&self, ctx: Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$subscription_variant(subscription) => subscription.sync_feeds_full(ctx).await,)*
}
}
async fn sync_sources(&self, ctx: Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$subscription_variant(subscription) => subscription.sync_sources(ctx).await,)*
}
}
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
match model.category {
$($type_enum_name::$variant => {
<$subscription_type as $crate::models::subscriptions::SubscriptionTrait>::try_from_model(model).map(Self::$subscription_variant)
})*
}
}
}
impl TryFrom<&$crate::models::subscriptions::Model> for $subscription_enum_name {
type Error = $crate::errors::RecorderError;
fn try_from(model: &$crate::models::subscriptions::Model) -> Result<Self, Self::Error> {
Self::try_from_model(model)
}
}
};
}
register_subscription_type! {
subscription_category_enum: {
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
pub enum SubscriptionCategory {
MikanSubscriber => "mikan_subscriber",
MikanSeason => "mikan_season",
MikanBangumi => "mikan_bangumi",
}
}
subscription_enum: {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum Subscription {
MikanSubscriber(MikanSubscriberSubscription),
MikanSeason(MikanSeasonSubscription),
MikanBangumi(MikanBangumiSubscription)
}
}
}
impl TryFrom<&subscriptions::Model> for Subscription {
type Error = RecorderError;
fn try_from(model: &subscriptions::Model) -> Result<Self, Self::Error> {
Self::try_from_model(model)
}
}

View File

@@ -0,0 +1,99 @@
use async_trait::async_trait;
use sea_orm::{ActiveValue, entity::prelude::*};
pub use crate::task::{
SystemTask, SystemTaskInput, SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant,
SystemTaskTypeVariantIter,
};
#[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SystemTaskStatus {
#[sea_orm(string_value = "Pending")]
Pending,
#[sea_orm(string_value = "Scheduled")]
Scheduled,
#[sea_orm(string_value = "Running")]
Running,
#[sea_orm(string_value = "Done")]
Done,
#[sea_orm(string_value = "Failed")]
Failed,
#[sea_orm(string_value = "Killed")]
Killed,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "system_tasks")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: String,
pub subscriber_id: Option<i32>,
pub cron_id: Option<i32>,
pub job: SystemTask,
pub task_type: SystemTaskType,
pub status: SystemTaskStatus,
pub attempts: i32,
pub max_attempts: i32,
pub run_at: DateTimeUtc,
pub last_error: Option<String>,
pub lock_at: Option<DateTimeUtc>,
pub lock_by: Option<String>,
pub done_at: Option<DateTimeUtc>,
pub priority: i32,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Subscriber,
#[sea_orm(
belongs_to = "super::cron::Entity",
from = "Column::CronId",
to = "super::cron::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Cron,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::cron::Entity> for Entity {
fn to() -> RelationDef {
Relation::Cron.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::cron::Entity")]
Cron,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, _insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
if let ActiveValue::Set(Some(..)) = self.subscriber_id {
return Err(DbErr::Custom(
"SystemTask can not be created by subscribers now".to_string(),
));
}
Ok(self)
}
}

View File

@@ -89,6 +89,13 @@ impl StorageService {
p
}
#[cfg(any(test, feature = "test-utils"))]
pub fn build_test_path(&self, path: impl AsRef<Path>) -> PathBuf {
let mut p = PathBuf::from("/test");
p.push(path);
p
}
pub fn build_public_path(&self, path: impl AsRef<Path>) -> PathBuf {
let mut p = PathBuf::from("/public");
p.push(path);
@@ -271,7 +278,7 @@ impl StorageService {
if let Some(mut ranges) = ranges {
if ranges.len() > 1 {
let boundary = Uuid::new_v4().to_string();
let boundary = Uuid::now_v7().to_string();
let reader = self.reader(storage_path.as_ref()).await?;
let stream: impl Stream<Item = Result<Bytes, RecorderError>> = {
let boundary = boundary.clone();

View File

@@ -14,6 +14,8 @@ pub struct TaskConfig {
pub system_task_reenqueue_orphaned_after: Duration,
#[serde(default = "default_cron_retry_duration")]
pub cron_retry_duration: Duration,
#[serde(default = "default_cron_interval_duration")]
pub cron_interval_duration: Duration,
}
impl Default for TaskConfig {
@@ -25,6 +27,7 @@ impl Default for TaskConfig {
default_subscriber_task_reenqueue_orphaned_after(),
system_task_reenqueue_orphaned_after: default_system_task_reenqueue_orphaned_after(),
cron_retry_duration: default_cron_retry_duration(),
cron_interval_duration: default_cron_interval_duration(),
}
}
}
@@ -45,6 +48,10 @@ pub fn default_system_task_workers() -> u32 {
}
}
pub fn default_cron_interval_duration() -> Duration {
Duration::from_secs(30)
}
pub fn default_subscriber_task_reenqueue_orphaned_after() -> Duration {
Duration::from_secs(3600)
}

View File

@@ -1,34 +1,74 @@
use std::sync::Arc;
use futures::Stream;
use async_trait::async_trait;
use futures::{Stream, StreamExt, pin_mut};
use serde::{Serialize, de::DeserializeOwned};
use crate::{app::AppContextTrait, errors::RecorderResult};
pub const SYSTEM_TASK_APALIS_NAME: &str = "system_task";
pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task";
pub const SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME: &str =
"setup_apalis_jobs_extra_foreign_keys";
pub const SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME: &str =
"setup_apalis_jobs_extra_foreign_keys_trigger";
#[async_trait::async_trait]
#[async_trait]
pub trait AsyncTaskTrait: Serialize + DeserializeOwned + Sized {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.run_async(ctx).await?;
Ok(())
}
}
#[async_trait::async_trait]
pub trait StreamTaskTrait: Serialize + DeserializeOwned + Sized {
pub trait StreamTaskTrait {
type Yield: Serialize + DeserializeOwned + Send;
fn run_stream(
self,
ctx: Arc<dyn AppContextTrait>,
) -> impl Stream<Item = RecorderResult<Self::Yield>> + Send;
}
async fn run(self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
unimplemented!()
#[async_trait]
impl<T> AsyncTaskTrait for T
where
T: StreamTaskTrait + Serialize + DeserializeOwned + Sized + Send,
{
async fn run_async(self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
let s = self.run_stream(_ctx);
pin_mut!(s);
while let Some(item) = s.next().await {
item?;
}
Ok(())
}
}
pub trait SystemTaskTrait: AsyncTaskTrait {
type InputType: Serialize + DeserializeOwned + Sized + Send;
fn get_subscriber_id(&self) -> Option<i32>;
fn set_subscriber_id(&mut self, subscriber_id: Option<i32>);
fn get_cron_id(&self) -> Option<i32>;
fn set_cron_id(&mut self, cron_id: Option<i32>);
fn from_input(input: Self::InputType, subscriber_id: Option<i32>) -> Self;
}
pub trait SubscriberTaskTrait: AsyncTaskTrait {
type InputType: Serialize + DeserializeOwned + Sized + Send;
fn get_subscriber_id(&self) -> i32;
fn set_subscriber_id(&mut self, subscriber_id: i32);
fn get_cron_id(&self) -> Option<i32>;
fn set_cron_id(&mut self, cron_id: Option<i32>);
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self;
}

View File

@@ -1,16 +0,0 @@
use sea_orm::sea_query;
#[derive(sea_query::Iden)]
pub enum ApalisSchema {
#[iden = "apalis"]
Schema,
}
#[derive(sea_query::Iden)]
pub enum ApalisJobs {
#[iden = "jobs"]
Table,
Id,
}

View File

@@ -1,19 +1,22 @@
mod config;
mod core;
mod r#extern;
mod registry;
mod service;
pub use core::{
AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, StreamTaskTrait,
AsyncTaskTrait, SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME,
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME, SUBSCRIBER_TASK_APALIS_NAME,
SYSTEM_TASK_APALIS_NAME, StreamTaskTrait, SubscriberTaskTrait, SystemTaskTrait,
};
pub use config::TaskConfig;
pub use r#extern::{ApalisJobs, ApalisSchema};
pub use registry::{
OptimizeImageTask, SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum,
SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask,
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask, SystemTask,
SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant, SystemTaskTypeVariantIter,
EchoTask, OptimizeImageTask, SubscriberTask, SubscriberTaskInput, SubscriberTaskType,
SubscriberTaskTypeEnum, SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter,
SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SyncOneSubscriptionSourcesTask, SystemTask, SystemTaskInput, SystemTaskType,
SystemTaskTypeEnum, SystemTaskTypeVariant, SystemTaskTypeVariantIter,
};
#[allow(unused_imports)]
pub(crate) use registry::{register_subscriber_task_type, register_system_task_type};
pub use service::TaskService;

View File

@@ -1,18 +1,14 @@
mod media;
mod subscriber;
mod subscription;
mod system;
pub use media::OptimizeImageTask;
pub(crate) use subscriber::register_subscriber_task_type;
pub use subscriber::{
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant,
SubscriberTaskTypeVariantIter,
};
pub use subscription::{
SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SyncOneSubscriptionSourcesTask,
SubscriberTask, SubscriberTaskInput, SubscriberTaskType, SubscriberTaskTypeEnum,
SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask,
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask,
};
pub(crate) use system::register_system_task_type;
pub use system::{
SystemTask, SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant,
SystemTaskTypeVariantIter,
EchoTask, OptimizeImageTask, SystemTask, SystemTaskInput, SystemTaskType, SystemTaskTypeEnum,
SystemTaskTypeVariant, SystemTaskTypeVariantIter,
};

View File

@@ -1,100 +0,0 @@
use std::sync::Arc;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
models::subscriptions::SubscriptionTrait,
task::{
AsyncTaskTrait,
registry::{
SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SyncOneSubscriptionSourcesTask,
},
},
};
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SubscriberTaskType {
#[serde(rename = "sync_one_subscription_feeds_incremental")]
#[sea_orm(string_value = "sync_one_subscription_feeds_incremental")]
SyncOneSubscriptionFeedsIncremental,
#[serde(rename = "sync_one_subscription_feeds_full")]
#[sea_orm(string_value = "sync_one_subscription_feeds_full")]
SyncOneSubscriptionFeedsFull,
#[serde(rename = "sync_one_subscription_sources")]
#[sea_orm(string_value = "sync_one_subscription_sources")]
SyncOneSubscriptionSources,
}
impl TryFrom<&SubscriberTask> for serde_json::Value {
type Error = RecorderError;
fn try_from(value: &SubscriberTask) -> Result<Self, Self::Error> {
let json_value = serde_json::to_value(value)?;
Ok(match json_value {
serde_json::Value::Object(mut map) => {
map.remove("task_type");
serde_json::Value::Object(map)
}
_ => {
unreachable!("subscriber task must be an json object");
}
})
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, FromJsonQueryResult)]
#[serde(tag = "task_type")]
pub enum SubscriberTask {
#[serde(rename = "sync_one_subscription_feeds_incremental")]
SyncOneSubscriptionFeedsIncremental(SyncOneSubscriptionFeedsIncrementalTask),
#[serde(rename = "sync_one_subscription_feeds_full")]
SyncOneSubscriptionFeedsFull(SyncOneSubscriptionFeedsFullTask),
#[serde(rename = "sync_one_subscription_sources")]
SyncOneSubscriptionSources(SyncOneSubscriptionSourcesTask),
}
impl SubscriberTask {
pub fn get_subscriber_id(&self) -> i32 {
match self {
Self::SyncOneSubscriptionFeedsIncremental(task) => task.0.get_subscriber_id(),
Self::SyncOneSubscriptionFeedsFull(task) => task.0.get_subscriber_id(),
Self::SyncOneSubscriptionSources(task) => task.0.get_subscriber_id(),
}
}
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::SyncOneSubscriptionFeedsIncremental(task) => task.run(ctx).await,
Self::SyncOneSubscriptionFeedsFull(task) => task.run(ctx).await,
Self::SyncOneSubscriptionSources(task) => task.run(ctx).await,
}
}
pub fn task_type(&self) -> SubscriberTaskType {
match self {
Self::SyncOneSubscriptionFeedsIncremental(_) => {
SubscriberTaskType::SyncOneSubscriptionFeedsIncremental
}
Self::SyncOneSubscriptionFeedsFull(_) => {
SubscriberTaskType::SyncOneSubscriptionFeedsFull
}
Self::SyncOneSubscriptionSources(_) => SubscriberTaskType::SyncOneSubscriptionSources,
}
}
}

View File

@@ -0,0 +1,66 @@
macro_rules! register_subscriber_task_type {
(
$(#[$type_meta:meta])*
$task_vis:vis struct $task_name:ident {
$($(#[$field_meta:meta])* pub $field_name:ident: $field_type:ty),* $(,)?
}
) => {
$(#[$type_meta])*
#[derive(typed_builder::TypedBuilder, ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[ts(rename_all = "camelCase")]
$task_vis struct $task_name {
$($(#[$field_meta])* pub $field_name: $field_type,)*
pub subscriber_id: i32,
#[builder(default = None)]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cron_id: Option<i32>,
}
paste::paste! {
$(#[$type_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
$task_vis struct [<$task_name Input>] {
$($(#[$field_meta])* pub $field_name: $field_type,)*
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subscriber_id: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cron_id: Option<i32>,
}
}
impl $crate::task::SubscriberTaskTrait for $task_name {
paste::paste! {
type InputType = [<$task_name Input>];
}
fn get_subscriber_id(&self) -> i32 {
self.subscriber_id
}
fn get_cron_id(&self) -> Option<i32> {
self.cron_id
}
fn set_subscriber_id(&mut self, subscriber_id: i32) {
self.subscriber_id = subscriber_id;
}
fn set_cron_id(&mut self, cron_id: Option<i32>) {
self.cron_id = cron_id;
}
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self {
Self {
$($field_name: input.$field_name,)*
cron_id: input.cron_id,
subscriber_id: input.subscriber_id.unwrap_or(subscriber_id),
}
}
}
}
}
pub(crate) use register_subscriber_task_type;

View File

@@ -0,0 +1,165 @@
mod base;
mod subscription;
pub(crate) use base::register_subscriber_task_type;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
pub use subscription::{
SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SyncOneSubscriptionSourcesTask,
};
macro_rules! register_subscriber_task_types {
(
task_type_enum: {
$(#[$type_enum_meta:meta])*
$type_vis:vis enum $type_enum_name:ident {
$(
$(#[$variant_meta:meta])*
$variant:ident => $string_value:literal
),* $(,)?
}
},
task_enum: {
$(#[$task_enum_meta:meta])*
$task_vis:vis enum $task_enum_name:ident {
$(
$(#[$task_variant_meta:meta])*
$task_variant:ident($task_type:ty)
),* $(,)?
}
}
) => {
$(#[$type_enum_meta])*
#[derive(serde::Serialize, serde::Deserialize)]
#[sea_orm(rs_type = "String", db_type = "Text")]
$type_vis enum $type_enum_name {
$(
$(#[$variant_meta])*
#[serde(rename = $string_value)]
#[sea_orm(string_value = $string_value)]
$variant,
)*
}
$(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(tag = "task_type")]
#[ts(export, rename = "SubscriberTaskType", rename_all = "camelCase", tag = "taskType")]
$task_vis enum $task_enum_name {
$(
$(#[$task_variant_meta])*
#[serde(rename = $string_value)]
$task_variant($task_type),
)*
}
paste::paste! {
$(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(tag = "taskType", rename_all = "camelCase")]
#[ts(export, rename_all = "camelCase", tag = "taskType")]
$task_vis enum [<$task_enum_name Input>] {
$(
$(#[$task_variant_meta])*
#[serde(rename = $string_value)]
$task_variant(<$task_type as $crate::task::SubscriberTaskTrait>::InputType),
)*
}
}
impl $task_enum_name {
pub fn task_type(&self) -> $type_enum_name {
match self {
$(Self::$task_variant(_) => $type_enum_name::$variant,)*
}
}
}
#[async_trait::async_trait]
impl $crate::task::AsyncTaskTrait for $task_enum_name {
async fn run_async(self, ctx: std::sync::Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$task_variant(t) =>
<$task_type as $crate::task::AsyncTaskTrait>::run_async(t, ctx).await,)*
}
}
}
impl $crate::task::SubscriberTaskTrait for $task_enum_name {
paste::paste! {
type InputType = [<$task_enum_name Input>];
}
fn get_subscriber_id(&self) -> i32 {
match self {
$(Self::$task_variant(t) =>
<$task_type as $crate::task::SubscriberTaskTrait>::get_subscriber_id(t),)*
}
}
fn get_cron_id(&self) -> Option<i32> {
match self {
$(Self::$task_variant(t) =>
<$task_type as $crate::task::SubscriberTaskTrait>::get_cron_id(t),)*
}
}
fn set_subscriber_id(&mut self, subscriber_id: i32) {
match self {
$(Self::$task_variant(t) => t.set_subscriber_id(subscriber_id),)*
}
}
fn set_cron_id(&mut self, cron_id: Option<i32>) {
match self {
$(Self::$task_variant(t) => t.set_cron_id(cron_id),)*
}
}
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self {
match input {
$(Self::InputType::$task_variant(t) =>
Self::$task_variant(<$task_type as $crate::task::SubscriberTaskTrait>::from_input(t, subscriber_id)),)*
}
}
}
$(
impl From<$task_type> for $task_enum_name {
fn from(task: $task_type) -> Self {
Self::$task_variant(task)
}
}
)*
};
}
register_subscriber_task_types!(
task_type_enum: {
#[derive(
Clone,
Debug,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
pub enum SubscriberTaskType {
SyncOneSubscriptionFeedsIncremental => "sync_one_subscription_feeds_incremental",
SyncOneSubscriptionFeedsFull => "sync_one_subscription_feeds_full",
SyncOneSubscriptionSources => "sync_one_subscription_sources"
}
},
task_enum: {
#[derive(Clone, Debug, PartialEq, FromJsonQueryResult)]
pub enum SubscriberTask {
SyncOneSubscriptionFeedsIncremental(SyncOneSubscriptionFeedsIncrementalTask),
SyncOneSubscriptionFeedsFull(SyncOneSubscriptionFeedsFullTask),
SyncOneSubscriptionSources(SyncOneSubscriptionSourcesTask),
}
}
);

View File

@@ -0,0 +1,66 @@
use sea_orm::prelude::*;
use super::base::register_subscriber_task_type;
use crate::{errors::RecorderResult, models::subscriptions::SubscriptionTrait};
macro_rules! register_subscription_task_type {
(
$(#[$type_meta:meta])* pub struct $task_name:ident {
$($(#[$field_meta:meta])* pub $field_name:ident: $field_type:ty),* $(,)?
} => async |$subscription_param:ident, $ctx_param:ident| -> $task_return_type:ty $method_body:block
) => {
register_subscriber_task_type! {
$(#[$type_meta])*
pub struct $task_name {
$($(#[$field_meta])* pub $field_name: $field_type,)*
pub subscription_id: i32,
}
}
#[async_trait::async_trait]
impl $crate::task::AsyncTaskTrait for $task_name {
async fn run_async(self, ctx: std::sync::Arc<dyn $crate::app::AppContextTrait>) -> $task_return_type {
use $crate::models::subscriptions::{
Entity, Column, Subscription,
};
let subscription_model = Entity::find()
.filter(Column::Id.eq(self.subscription_id))
.filter(Column::SubscriberId.eq(self.subscriber_id))
.one(ctx.db())
.await?
.ok_or_else(|| $crate::errors::RecorderError::from_entity_not_found::<Entity>())?;
let $subscription_param = Subscription::try_from_model(&subscription_model)?;
let $ctx_param = ctx;
$method_body
}
}
}
}
register_subscription_task_type! {
#[derive(Clone, Debug, PartialEq)]
pub struct SyncOneSubscriptionFeedsIncrementalTask {
} => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_feeds_incremental(ctx).await?;
Ok(())
}
}
register_subscription_task_type! {
#[derive(Clone, Debug, PartialEq)]
pub struct SyncOneSubscriptionFeedsFullTask {
} => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_feeds_full(ctx).await?;
Ok(())
}
}
register_subscription_task_type! {
#[derive(Clone, Debug, PartialEq)]
pub struct SyncOneSubscriptionSourcesTask {
} => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_sources(ctx).await?;
Ok(())
}
}

View File

@@ -1,62 +0,0 @@
use std::sync::Arc;
use sea_orm::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::RecorderResult,
models::subscriptions::{self, SubscriptionTrait},
task::AsyncTaskTrait,
};
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct SyncOneSubscriptionFeedsIncrementalTask(pub subscriptions::Subscription);
impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsIncrementalTask {
fn from(subscription: subscriptions::Subscription) -> Self {
Self(subscription)
}
}
#[async_trait::async_trait]
impl AsyncTaskTrait for SyncOneSubscriptionFeedsIncrementalTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_feeds_incremental(ctx).await?;
Ok(())
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct SyncOneSubscriptionFeedsFullTask(pub subscriptions::Subscription);
impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsFullTask {
fn from(subscription: subscriptions::Subscription) -> Self {
Self(subscription)
}
}
#[async_trait::async_trait]
impl AsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_feeds_full(ctx).await?;
Ok(())
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct SyncOneSubscriptionSourcesTask(pub subscriptions::Subscription);
#[async_trait::async_trait]
impl AsyncTaskTrait for SyncOneSubscriptionSourcesTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_sources(ctx).await?;
Ok(())
}
}
impl From<subscriptions::Subscription> for SyncOneSubscriptionSourcesTask {
fn from(subscription: subscriptions::Subscription) -> Self {
Self(subscription)
}
}

View File

@@ -1,43 +0,0 @@
use std::sync::Arc;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::RecorderResult,
task::{AsyncTaskTrait, registry::media::OptimizeImageTask},
};
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SystemTaskType {
#[serde(rename = "optimize_image")]
#[sea_orm(string_value = "optimize_image")]
OptimizeImage,
}
#[derive(Clone, Debug, Serialize, Deserialize, FromJsonQueryResult)]
pub enum SystemTask {
#[serde(rename = "optimize_image")]
OptimizeImage(OptimizeImageTask),
}
impl SystemTask {
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::OptimizeImage(task) => task.run(ctx).await,
}
}
}

View File

@@ -0,0 +1,67 @@
macro_rules! register_system_task_type {
(
$(#[$type_meta:meta])*
$task_vis:vis struct $task_name:ident {
$($(#[$field_meta:meta])* pub $field_name:ident: $field_type:ty),* $(,)?
}
) => {
$(#[$type_meta])*
#[derive(typed_builder::TypedBuilder, ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[ts(rename_all = "camelCase")]
$task_vis struct $task_name {
$($(#[$field_meta])* pub $field_name: $field_type,)*
#[serde(default, skip_serializing_if = "Option::is_none")]
#[builder(default = None)]
pub subscriber_id: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
#[builder(default = None)]
pub cron_id: Option<i32>,
}
paste::paste! {
$(#[$type_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
$task_vis struct [<$task_name Input>] {
$($(#[$field_meta])* pub $field_name: $field_type,)*
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subscriber_id: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cron_id: Option<i32>,
}
}
impl $crate::task::SystemTaskTrait for $task_name {
paste::paste! {
type InputType = [<$task_name Input>];
}
fn get_subscriber_id(&self) -> Option<i32> {
self.subscriber_id
}
fn get_cron_id(&self) -> Option<i32> {
self.cron_id
}
fn set_subscriber_id(&mut self, subscriber_id: Option<i32>) {
self.subscriber_id = subscriber_id;
}
fn set_cron_id(&mut self, cron_id: Option<i32>) {
self.cron_id = cron_id;
}
fn from_input(input: Self::InputType, subscriber_id: Option<i32>) -> Self {
Self {
$($field_name: input.$field_name,)*
subscriber_id: input.subscriber_id.or(subscriber_id),
cron_id: input.cron_id,
}
}
}
}
}
pub(crate) use register_system_task_type;

View File

@@ -1,18 +1,22 @@
use std::sync::Arc;
use quirks_path::Path;
use serde::{Deserialize, Serialize};
use tracing::instrument;
use crate::{
app::AppContextTrait, errors::RecorderResult, media::EncodeImageOptions, task::AsyncTaskTrait,
app::AppContextTrait,
errors::RecorderResult,
media::EncodeImageOptions,
task::{AsyncTaskTrait, register_system_task_type},
};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct OptimizeImageTask {
pub source_path: String,
pub target_path: String,
pub format_options: EncodeImageOptions,
register_system_task_type! {
#[derive(Clone, Debug, PartialEq)]
pub struct OptimizeImageTask {
pub source_path: String,
pub target_path: String,
pub format_options: EncodeImageOptions,
}
}
#[async_trait::async_trait]

View File

@@ -0,0 +1,29 @@
use std::sync::Arc;
use chrono::Utc;
use crate::{
app::AppContextTrait,
errors::RecorderResult,
task::{AsyncTaskTrait, register_system_task_type},
};
register_system_task_type! {
#[derive(Debug, Clone, PartialEq)]
pub struct EchoTask {
pub task_id: String,
}
}
#[async_trait::async_trait]
impl AsyncTaskTrait for EchoTask {
async fn run_async(self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
tracing::info!(
"EchoTask {} start running at {}",
self.task_id,
Utc::now().to_rfc3339()
);
Ok(())
}
}

View File

@@ -0,0 +1,158 @@
mod base;
mod media;
mod misc;
pub(crate) use base::register_system_task_type;
pub use media::OptimizeImageTask;
pub use misc::EchoTask;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
macro_rules! register_system_task_types {
(
task_type_enum: {
$(#[$type_enum_meta:meta])*
$type_vis:vis enum $type_enum_name:ident {
$(
$(#[$variant_meta:meta])*
$variant:ident => $string_value:literal
),* $(,)?
}
},
task_enum: {
$(#[$task_enum_meta:meta])*
$task_vis:vis enum $task_enum_name:ident {
$(
$(#[$task_variant_meta:meta])*
$task_variant:ident($task_type:ty)
),* $(,)?
}
}
) => {
$(#[$type_enum_meta])*
#[derive(serde::Serialize, serde::Deserialize, PartialEq, Eq)]
#[sea_orm(rs_type = "String", db_type = "Text")]
$type_vis enum $type_enum_name {
$(
$(#[$variant_meta])*
#[serde(rename = $string_value)]
#[sea_orm(string_value = $string_value)]
$variant,
)*
}
$(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize, PartialEq)]
#[serde(tag = "task_type")]
#[ts(export, rename = "SystemTaskType", rename_all = "camelCase", tag = "taskType")]
$task_vis enum $task_enum_name {
$(
$(#[$task_variant_meta])*
#[serde(rename = $string_value)]
$task_variant($task_type),
)*
}
impl $task_enum_name {
pub fn task_type(&self) -> $type_enum_name {
match self {
$(Self::$task_variant(_) => $type_enum_name::$variant,)*
}
}
}
paste::paste! {
$(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize, PartialEq)]
#[serde(tag = "taskType", rename_all = "camelCase")]
#[ts(export, rename_all = "camelCase", tag = "taskType")]
$task_vis enum [<$task_enum_name Input>] {
$(
$(#[$task_variant_meta])*
#[serde(rename = $string_value)]
$task_variant(<$task_type as $crate::task::SystemTaskTrait>::InputType),
)*
}
}
#[async_trait::async_trait]
impl $crate::task::AsyncTaskTrait for $task_enum_name {
async fn run_async(self, ctx: std::sync::Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$task_variant(t) =>
<$task_type as $crate::task::AsyncTaskTrait>::run_async(t, ctx).await,)*
}
}
}
impl $crate::task::SystemTaskTrait for $task_enum_name {
paste::paste! {
type InputType = [<$task_enum_name Input>];
}
fn get_subscriber_id(&self) -> Option<i32> {
match self {
$(Self::$task_variant(t) => t.get_subscriber_id(),)*
}
}
fn get_cron_id(&self) -> Option<i32> {
match self {
$(Self::$task_variant(t) => t.get_cron_id(),)*
}
}
fn set_subscriber_id(&mut self, subscriber_id: Option<i32>) {
match self {
$(Self::$task_variant(t) => t.set_subscriber_id(subscriber_id),)*
}
}
fn set_cron_id(&mut self, cron_id: Option<i32>) {
match self {
$(Self::$task_variant(t) => t.set_cron_id(cron_id),)*
}
}
fn from_input(input: Self::InputType, subscriber_id: Option<i32>) -> Self {
match input {
$(Self::InputType::$task_variant(t) =>
Self::$task_variant(<$task_type as $crate::task::SystemTaskTrait>::from_input(t, subscriber_id)),)*
}
}
}
$(
impl From<$task_type> for $task_enum_name {
fn from(task: $task_type) -> Self {
Self::$task_variant(task)
}
}
)*
};
}
register_system_task_types! {
task_type_enum: {
#[derive(
Clone,
Debug,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter
)]
pub enum SystemTaskType {
OptimizeImage => "optimize_image",
Test => "test",
}
},
task_enum: {
#[derive(Clone, Debug, FromJsonQueryResult)]
pub enum SystemTask {
OptimizeImage(OptimizeImageTask),
Echo(EchoTask),
}
}
}

View File

@@ -6,15 +6,17 @@ use apalis_sql::{
context::SqlContext,
postgres::{PgListen as ApalisPgListen, PostgresStorage as ApalisPostgresStorage},
};
use sea_orm::sqlx::postgres::PgListener;
use sea_orm::{ActiveModelTrait, sqlx::postgres::PgListener};
use tokio::sync::RwLock;
use uuid::Uuid;
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
models::cron::{self, CRON_DUE_EVENT},
models::cron::{self, CRON_DUE_DEBUG_EVENT, CRON_DUE_EVENT},
task::{
SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, SubscriberTask, TaskConfig,
AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, SubscriberTask,
TaskConfig,
config::{default_subscriber_task_workers, default_system_task_workers},
registry::SystemTask,
},
@@ -52,7 +54,7 @@ impl TaskService {
Ok(Self {
config,
cron_worker_id: nanoid::nanoid!(),
cron_worker_id: Uuid::now_v7().to_string(),
ctx,
subscriber_task_storage: Arc::new(RwLock::new(subscriber_task_storage)),
system_task_storage: Arc::new(RwLock::new(system_task_storage)),
@@ -65,7 +67,7 @@ impl TaskService {
) -> RecorderResult<()> {
let ctx = data.deref().clone();
job.run(ctx).await
job.run_async(ctx).await
}
async fn run_system_task(
@@ -73,7 +75,7 @@ impl TaskService {
data: Data<Arc<dyn AppContextTrait>>,
) -> RecorderResult<()> {
let ctx = data.deref().clone();
job.run(ctx).await
job.run_async(ctx).await
}
pub async fn retry_subscriber_task(&self, job_id: String) -> RecorderResult<()> {
@@ -104,7 +106,6 @@ impl TaskService {
pub async fn add_subscriber_task(
&self,
_subscriber_id: i32,
subscriber_task: SubscriberTask,
) -> RecorderResult<TaskId> {
let task_id = {
@@ -136,83 +137,110 @@ impl TaskService {
Ok(task_id)
}
pub async fn run<F, Fut>(&self, shutdown_signal: Option<F>) -> RecorderResult<()>
pub async fn add_subscriber_task_cron(
&self,
cm: cron::ActiveModel,
) -> RecorderResult<cron::Model> {
let db = self.ctx.db();
let m = cm.insert(db).await?;
Ok(m)
}
pub async fn add_system_task_cron(&self, cm: cron::ActiveModel) -> RecorderResult<cron::Model> {
let db = self.ctx.db();
let m = cm.insert(db).await?;
Ok(m)
}
pub async fn run(&self) -> RecorderResult<()> {
self.run_with_signal(None::<fn() -> std::future::Ready<()>>)
.await
}
pub async fn run_with_signal<F, Fut>(&self, shutdown_signal: Option<F>) -> RecorderResult<()>
where
F: Fn() -> Fut + Send + 'static,
F: FnOnce() -> Fut + Send + 'static,
Fut: Future<Output = ()> + Send,
{
tokio::try_join!(
async {
tokio::select! {
_ = {
let monitor = self.setup_apalis_monitor().await?;
if let Some(shutdown_signal) = shutdown_signal {
monitor
.run_with_signal(async move {
shutdown_signal().await;
tracing::info!("apalis shutting down...");
Ok(())
})
.await?;
} else {
monitor.run().await?;
async move {
if let Some(shutdown_signal) = shutdown_signal {
monitor
.run_with_signal(async move {
shutdown_signal().await;
tracing::info!("apalis shutting down...");
Ok(())
})
.await?;
} else {
monitor.run().await?;
}
Ok::<_, RecorderError>(())
}
Ok::<_, RecorderError>(())
},
async {
} => {}
_ = {
let listener = self.setup_apalis_listener().await?;
tokio::task::spawn(async move {
async move {
if let Err(e) = listener.listen().await {
tracing::error!("Error listening to apalis: {e}");
}
});
Ok::<_, RecorderError>(())
},
async {
let listener = self.setup_cron_due_listening().await?;
let ctx = self.ctx.clone();
Ok::<_, RecorderError>(())
}
} => {},
_ = {
let mut listener = self.setup_cron_due_listening().await?;
let cron_worker_id = self.cron_worker_id.clone();
let retry_duration = chrono::Duration::milliseconds(
self.config.cron_retry_duration.as_millis() as i64,
);
let retry_duration =
chrono::Duration::milliseconds(self.config.cron_retry_duration.as_millis() as i64);
let cron_interval_duration = self.config.cron_interval_duration;
async move {
listener.listen_all([CRON_DUE_EVENT as &str, CRON_DUE_DEBUG_EVENT as &str]).await?;
tokio::task::spawn(async move {
if let Err(e) =
Self::listen_cron_due(listener, ctx, &cron_worker_id, retry_duration).await
{
tracing::error!("Error listening to cron due: {e}");
}
});
Ok::<_, RecorderError>(())
},
async {
let ctx = self.ctx.clone();
let retry_duration = chrono::Duration::milliseconds(
self.config.cron_retry_duration.as_millis() as i64,
);
tokio::task::spawn(async move {
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(60));
loop {
interval.tick().await;
if let Err(e) = cron::Model::check_and_cleanup_expired_cron_locks(
ctx.as_ref(),
retry_duration,
)
.await
tokio::join!(
{
tracing::error!(
"Error checking and cleaning up expired cron locks: {e}"
);
}
if let Err(e) = cron::Model::check_and_trigger_due_crons(ctx.as_ref()).await
let ctx = self.ctx.clone();
async move {
if let Err(e) =
Self::listen_cron_due(listener, ctx, &cron_worker_id, retry_duration)
.await
{
tracing::error!("Error listening to cron due: {e}");
}
}
},
{
tracing::error!("Error checking and triggering due crons: {e}");
}
}
});
let ctx = self.ctx.clone();
let mut interval = tokio::time::interval(cron_interval_duration);
async move {
loop {
interval.tick().await;
if let Err(e) = cron::Model::check_and_cleanup_expired_cron_locks(
ctx.as_ref(),
retry_duration,
)
.await
{
tracing::error!(
"Error checking and cleaning up expired cron locks: {e}"
);
}
if let Err(e) =
cron::Model::check_and_trigger_due_crons(ctx.as_ref()).await
{
tracing::error!("Error checking and triggering due crons: {e}");
}
}
}
}
);
Ok::<_, RecorderError>(())
}
} => {}
};
Ok::<_, RecorderError>(())
}
)?;
Ok(())
}
@@ -267,6 +295,7 @@ impl TaskService {
async fn setup_cron_due_listening(&self) -> RecorderResult<PgListener> {
let pool = self.ctx.db().get_postgres_connection_pool().clone();
let listener = PgListener::connect_with(&pool).await?;
tracing::debug!("Cron due listener connected to postgres");
Ok(listener)
}
@@ -277,20 +306,129 @@ impl TaskService {
worker_id: &str,
retry_duration: chrono::Duration,
) -> RecorderResult<()> {
listener.listen(CRON_DUE_EVENT).await?;
loop {
let notification = listener.recv().await?;
if let Err(e) = cron::Model::handle_cron_notification(
ctx.as_ref(),
notification,
worker_id,
retry_duration,
)
.await
if notification.channel() == CRON_DUE_DEBUG_EVENT {
tracing::debug!("Received cron due debug event: {:?}", notification);
continue;
} else if notification.channel() == CRON_DUE_EVENT
&& let Err(e) = cron::Model::handle_cron_notification(
ctx.as_ref(),
notification,
worker_id,
retry_duration,
)
.await
{
tracing::error!("Error handling cron notification: {e}");
}
}
}
}
#[cfg(test)]
#[allow(unused_variables)]
mod tests {
use std::time::Duration;
use chrono::Utc;
use rstest::{fixture, rstest};
use sea_orm::ActiveValue;
use tracing::Level;
use super::*;
use crate::{
models::cron,
task::EchoTask,
test_utils::{
app::{TestingAppContextConfig, TestingPreset},
tracing::try_init_testing_tracing,
},
};
#[fixture]
fn before_each() {
try_init_testing_tracing(Level::DEBUG);
}
#[rstest]
#[tokio::test]
#[tracing_test::traced_test]
async fn test_check_and_trigger_due_crons_with_certain_interval(
before_each: (),
) -> RecorderResult<()> {
let preset = TestingPreset::default_with_config(
TestingAppContextConfig::builder()
.task_config(TaskConfig {
cron_interval_duration: Duration::from_millis(1500),
..Default::default()
})
.build(),
)
.await?;
let app_ctx = preset.app_ctx;
let task_service = app_ctx.task();
let task_id = Uuid::now_v7().to_string();
let echo_cron = cron::ActiveModel {
cron_expr: ActiveValue::Set("*/1 * * * * *".to_string()),
cron_timezone: ActiveValue::Set("Asia/Singapore".to_string()),
system_task_cron: ActiveValue::Set(Some(
EchoTask::builder().task_id(task_id.clone()).build().into(),
)),
..Default::default()
};
task_service.add_system_task_cron(echo_cron).await?;
task_service
.run_with_signal(Some(async move || {
tokio::time::sleep(std::time::Duration::from_secs(2)).await;
}))
.await?;
assert!(logs_contain(&format!(
"EchoTask {task_id} start running at"
)));
Ok(())
}
#[rstest]
#[tokio::test]
#[tracing_test::traced_test]
async fn test_trigger_due_cron_when_mutating(before_each: ()) -> RecorderResult<()> {
let preset = TestingPreset::default().await?;
let app_ctx = preset.app_ctx;
let task_service = app_ctx.task();
let task_id = Uuid::now_v7().to_string();
let echo_cron = cron::ActiveModel {
cron_expr: ActiveValue::Set("* * * */1 * *".to_string()),
cron_timezone: ActiveValue::Set("Asia/Singapore".to_string()),
next_run: ActiveValue::Set(Some(Utc::now() + chrono::Duration::seconds(-10))),
system_task_cron: ActiveValue::Set(Some(
EchoTask::builder().task_id(task_id.clone()).build().into(),
)),
..Default::default()
};
let task_runner = task_service.run_with_signal(Some(async move || {
tokio::time::sleep(std::time::Duration::from_millis(500)).await;
}));
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
task_service.add_system_task_cron(echo_cron).await?;
task_runner.await?;
assert!(logs_contain(&format!(
"EchoTask {task_id} start running at"
)));
Ok(())
}
}

View File

@@ -5,11 +5,13 @@ use typed_builder::TypedBuilder;
use crate::{
app::AppContextTrait,
errors::RecorderResult,
task::TaskConfig,
test_utils::{
crypto::build_testing_crypto_service,
database::{TestingDatabaseServiceConfig, build_testing_database_service},
media::build_testing_media_service,
mikan::build_testing_mikan_client,
mikan::{MikanMockServer, build_testing_mikan_client},
storage::build_testing_storage_service,
task::build_testing_task_service,
},
@@ -42,12 +44,11 @@ impl TestingAppContext {
self.task.get_or_init(|| task);
}
pub async fn from_preset(
preset: TestingAppContextPreset,
) -> crate::errors::RecorderResult<Arc<Self>> {
let mikan_client = build_testing_mikan_client(preset.mikan_base_url.clone()).await?;
pub async fn from_config(config: TestingAppContextConfig) -> RecorderResult<Arc<Self>> {
let mikan_base_url = config.mikan_base_url.expect("mikan_base_url is required");
let mikan_client = build_testing_mikan_client(mikan_base_url).await?;
let db_service =
build_testing_database_service(preset.database_config.unwrap_or_default()).await?;
build_testing_database_service(config.database_config.unwrap_or_default()).await?;
let crypto_service = build_testing_crypto_service().await?;
let storage_service = build_testing_storage_service().await?;
let media_service = build_testing_media_service().await?;
@@ -61,7 +62,7 @@ impl TestingAppContext {
.build(),
);
let task_service = build_testing_task_service(app_ctx.clone()).await?;
let task_service = build_testing_task_service(config.task_config, app_ctx.clone()).await?;
app_ctx.set_task(task_service);
@@ -133,7 +134,44 @@ impl AppContextTrait for TestingAppContext {
}
}
pub struct TestingAppContextPreset {
pub mikan_base_url: String,
#[derive(TypedBuilder, Debug)]
#[builder(field_defaults(default, setter(strip_option)))]
pub struct TestingAppContextConfig {
pub mikan_base_url: Option<String>,
pub database_config: Option<TestingDatabaseServiceConfig>,
pub task_config: Option<TaskConfig>,
}
#[derive(TypedBuilder)]
pub struct TestingPreset {
pub mikan_server: MikanMockServer,
pub app_ctx: Arc<dyn AppContextTrait>,
}
impl TestingPreset {
pub async fn default_with_config(config: TestingAppContextConfig) -> RecorderResult<Self> {
let mikan_server = MikanMockServer::new().await?;
let mixed_config = TestingAppContextConfig {
mikan_base_url: Some(mikan_server.base_url().to_string()),
..config
};
let app_ctx = TestingAppContext::from_config(mixed_config).await?;
let preset = Self::builder()
.mikan_server(mikan_server)
.app_ctx(app_ctx)
.build();
Ok(preset)
}
pub async fn default() -> RecorderResult<Self> {
Self::default_with_config(TestingAppContextConfig {
mikan_base_url: None,
database_config: None,
task_config: None,
})
.await
}
}

View File

@@ -3,6 +3,7 @@ use crate::{
errors::RecorderResult,
};
#[derive(Clone, Debug)]
pub struct TestingDatabaseServiceConfig {
pub auto_migrate: bool,
}
@@ -51,7 +52,7 @@ pub async fn build_testing_database_service(
uri: connection_string,
enable_logging: true,
min_connections: 1,
max_connections: 1,
max_connections: 5,
connect_timeout: 5000,
idle_timeout: 10000,
acquire_timeout: None,

View File

@@ -1,5 +1,6 @@
use std::{
collections::HashMap,
fmt::Debug,
ops::{Deref, DerefMut},
path::{self, PathBuf},
};
@@ -148,13 +149,15 @@ impl AsRef<path::Path> for MikanDoppelPath {
}
}
#[cfg(any(test, debug_assertions, feature = "test-utils"))]
lazy_static! {
static ref TEST_RESOURCES_DIR: String =
if cfg!(any(test, debug_assertions, feature = "playground")) {
format!("{}/tests/resources", env!("CARGO_MANIFEST_DIR"))
} else {
"tests/resources".to_string()
};
format!("{}/tests/resources", env!("CARGO_MANIFEST_DIR"));
}
#[cfg(not(any(test, debug_assertions, feature = "test-utils")))]
lazy_static! {
static ref TEST_RESOURCES_DIR: String = "tests/resources".to_string();
}
impl From<Url> for MikanDoppelPath {
@@ -227,6 +230,14 @@ pub struct MikanMockServer {
base_url: Url,
}
impl Debug for MikanMockServer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MikanMockServer")
.field("base_url", &self.base_url)
.finish()
}
}
impl MikanMockServer {
pub async fn new_with_port(port: u16) -> RecorderResult<Self> {
let server = mockito::Server::new_with_opts_async(mockito::ServerOpts {

View File

@@ -7,9 +7,11 @@ use crate::{
};
pub async fn build_testing_task_service(
config: Option<TaskConfig>,
ctx: Arc<dyn AppContextTrait>,
) -> RecorderResult<TaskService> {
let config = TaskConfig::default();
let config = config.unwrap_or_default();
let task_service = TaskService::from_config_and_ctx(config, ctx).await?;
Ok(task_service)
}

View File

@@ -110,7 +110,7 @@ fn make_request_id(maybe_request_id: Option<HeaderValue>) -> String {
});
id.filter(|s| !s.is_empty())
})
.unwrap_or_else(|| Uuid::new_v4().to_string())
.unwrap_or_else(|| Uuid::now_v7().to_string())
}
#[cfg(test)]

View File

@@ -0,0 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"rootDir": ".",
"composite": true,
"module": "ESNext",
"moduleResolution": "bundler"
},
"include": ["bindings"]
}

View File

@@ -12,6 +12,13 @@ const config: CodegenConfig = {
},
config: {
enumsAsConst: true,
useTypeImports: true,
scalars: {
SubscriberTaskType: {
input: 'recorder/bindings/SubscriberTaskInput#SubscriberTaskInput',
output: 'recorder/bindings/SubscriberTaskType#SubscriberTaskType',
},
},
},
},
},

View File

@@ -17,6 +17,8 @@
"@corvu/drawer": "^0.2.4",
"@corvu/otp-field": "^0.1.4",
"@corvu/resizable": "^0.2.5",
"@datasert/cronjs-matcher": "^1.4.0",
"@datasert/cronjs-parser": "^1.4.0",
"@graphiql/toolkit": "^0.11.3",
"@hookform/resolvers": "^5.1.1",
"@outposts/injection-js": "^2.5.1",
@@ -49,6 +51,7 @@
"@rsbuild/plugin-react": "^1.3.2",
"@tanstack/react-form": "^1.12.3",
"@tanstack/react-query": "^5.80.7",
"@tanstack/react-router": "^1.121.2",
"@tanstack/react-table": "^8.21.3",
"@tanstack/store": "^0.7.1",
"arktype": "^2.1.20",
@@ -70,14 +73,15 @@
"react-dom": "^19.1.0",
"react-resizable-panels": "^3.0.2",
"recharts": "^2.15.3",
"recorder": "workspace:*",
"rxjs": "^7.8.2",
"sonner": "^2.0.5",
"tailwind-merge": "^3.3.1",
"tailwind-scrollbar": "^4.0.2",
"tailwindcss": "^4.1.10",
"tw-animate-css": "^1.3.4",
"type-fest": "^4.41.0",
"vaul": "^1.1.2",
"@tanstack/react-router": "^1.121.2"
"vaul": "^1.1.2"
},
"devDependencies": {
"@graphql-codegen/cli": "^5.0.7",
@@ -86,13 +90,14 @@
"@graphql-typed-document-node/core": "^3.2.0",
"@parcel/watcher": "^2.5.1",
"@rsbuild/core": "^1.3.22",
"@rsbuild/plugin-type-check": "^1.2.3",
"@tailwindcss/postcss": "^4.1.10",
"@tanstack/router-devtools": "^1.121.5",
"@tanstack/router-plugin": "^1.121.4",
"@types/react": "^19.1.8",
"@types/react-dom": "^19.1.6",
"chalk": "^5.4.1",
"commander": "^14.0.0",
"postcss": "^8.5.5",
"@tanstack/router-devtools": "^1.121.5",
"@tanstack/router-plugin": "^1.121.4"
"postcss": "^8.5.5"
}
}

View File

@@ -1,13 +1,26 @@
import { defineConfig } from '@rsbuild/core';
import { pluginReact } from '@rsbuild/plugin-react';
import { pluginTypeCheck } from '@rsbuild/plugin-type-check';
import { TanStackRouterRspack } from '@tanstack/router-plugin/rspack';
const TS_NO_CHECK_REGEX =
/[\\/]node_modules[\\/]|[\\/]gql[\\/]|[\\/]components[\\/]ui[\\/]/;
export default defineConfig({
html: {
title: 'Konobangu',
favicon: './public/assets/favicon.ico',
},
plugins: [pluginReact()],
plugins: [
pluginReact(),
pluginTypeCheck({
tsCheckerOptions: {
issue: {
exclude: [({ file = '' }) => TS_NO_CHECK_REGEX.test(file)],
},
},
}),
],
tools: {
rspack: {
plugins: [

View File

@@ -145,3 +145,5 @@
cursor: pointer;
}
}
@plugin "tailwind-scrollbar";

View File

@@ -1,4 +1,3 @@
import type { NavMainGroup } from '@/infra/routes/nav';
import {
BookOpen,
Folders,
@@ -9,6 +8,7 @@ import {
Telescope,
Tv,
} from 'lucide-react';
import type { NavMainGroup } from '@/infra/routes/nav';
export const AppNavMainData: NavMainGroup[] = [
{
@@ -49,13 +49,13 @@ export const AppNavMainData: NavMainGroup[] = [
{
title: 'Manage',
link: {
to: '/bangumi/recorder',
to: '/bangumi',
},
},
{
title: 'Feed',
link: {
to: '/bangumi/feed',
to: '/bangumi',
},
},
],
@@ -65,11 +65,17 @@ export const AppNavMainData: NavMainGroup[] = [
icon: ListTodo,
children: [
{
title: 'Manage',
title: 'Tasks',
link: {
to: '/tasks/manage',
},
},
{
title: 'Crons',
link: {
to: '/tasks/cron/manage',
},
},
],
},
{

Some files were not shown because too many files have changed in this diff Show More