Compare commits

...

16 Commits

Author SHA1 Message Date
94919878ea fix: fix issues 2025-07-02 01:33:32 +08:00
81bf27ed28 fix: fix 2025-07-08 00:54:34 +08:00
5be5b9f634 fix: fix cron builder 2025-07-07 01:34:56 +08:00
6cdd8c27ce fix: fix typos 2025-07-06 05:05:07 +08:00
4174cea728 fix: fix cron webui 2025-07-06 02:35:55 +08:00
3aad31a36b feat: more cron webui 2025-07-05 04:08:56 +08:00
004fed9b2e feat: init cron webui 2025-07-05 02:08:55 +08:00
a1c2eeded1 temp save 2025-07-04 05:59:56 +08:00
147df00155 build: add prod build 2025-07-04 05:06:45 +08:00
5155c59293 fix: fix migrations 2025-07-04 01:25:07 +08:00
b5b3c77ba3 fix: fix migrations 2025-07-03 04:25:50 +08:00
1d0aa8d7f1 feat: support system tasks 2025-07-03 03:48:23 +08:00
5b001f9584 refactor: refactor graphql 2025-07-02 01:25:44 +08:00
d06acde882 fix: temp save 2025-07-01 03:45:56 +08:00
bacfe99ef2 fix: fix issues 2025-06-30 02:05:23 +08:00
b4090e74c0 fix: fix webui compability 2025-06-29 02:05:44 +08:00
152 changed files with 8918 additions and 3403 deletions

View File

@@ -41,12 +41,4 @@
], ],
"rust-analyzer.cargo.features": "all", "rust-analyzer.cargo.features": "all",
"rust-analyzer.testExplorer": true "rust-analyzer.testExplorer": true
// https://github.com/rust-lang/rust/issues/141540
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
// "rust-analyzer.check.extraEnv": {
// "CARGO_TARGET_DIR": "target/rust-analyzer"
// },
// "rust-analyzer.cargo.extraEnv": {
// "CARGO_TARGET_DIR": "target/analyzer"
// }
} }

245
Cargo.lock generated
View File

@@ -356,9 +356,9 @@ dependencies = [
[[package]] [[package]]
name = "async-channel" name = "async-channel"
version = "2.3.1" version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" checksum = "16c74e56284d2188cabb6ad99603d1ace887a5d7e7b695d01b728155ed9ed427"
dependencies = [ dependencies = [
"concurrent-queue", "concurrent-queue",
"event-listener-strategy", "event-listener-strategy",
@@ -404,7 +404,7 @@ dependencies = [
"futures-util", "futures-util",
"handlebars", "handlebars",
"http", "http",
"indexmap 2.9.0", "indexmap 2.10.0",
"lru", "lru",
"mime", "mime",
"multer", "multer",
@@ -474,7 +474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ecdaff7c9cffa3614a9f9999bf9ee4c3078fe3ce4d6a6e161736b56febf2de" checksum = "34ecdaff7c9cffa3614a9f9999bf9ee4c3078fe3ce4d6a6e161736b56febf2de"
dependencies = [ dependencies = [
"bytes", "bytes",
"indexmap 2.9.0", "indexmap 2.10.0",
"serde", "serde",
"serde_json", "serde_json",
] ]
@@ -551,7 +551,7 @@ dependencies = [
"derive_builder", "derive_builder",
"diligent-date-parser", "diligent-date-parser",
"never", "never",
"quick-xml", "quick-xml 0.37.5",
"serde", "serde",
] ]
@@ -592,9 +592,9 @@ dependencies = [
[[package]] [[package]]
name = "avif-serialize" name = "avif-serialize"
version = "0.8.3" version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98922d6a4cfbcb08820c69d8eeccc05bb1f29bfa06b4f5b1dbfe9a868bd7608e" checksum = "19135c0c7a60bfee564dbe44ab5ce0557c6bf3884e5291a50be76a15640c4fbd"
dependencies = [ dependencies = [
"arrayvec", "arrayvec",
] ]
@@ -1009,9 +1009,9 @@ checksum = "56ed6191a7e78c36abdb16ab65341eefd73d64d303fffccdbb00d51e4205967b"
[[package]] [[package]]
name = "bumpalo" name = "bumpalo"
version = "3.18.1" version = "3.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db76d6187cd04dff33004d8e6c9cc4e05cd330500379d2394209271b4aeee" checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
[[package]] [[package]]
name = "bytecheck" name = "bytecheck"
@@ -1260,9 +1260,9 @@ dependencies = [
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.40" version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f" checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive", "clap_derive",
@@ -1270,9 +1270,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.5.40" version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e" checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d"
dependencies = [ dependencies = [
"anstream", "anstream",
"anstyle", "anstyle",
@@ -1282,9 +1282,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_derive" name = "clap_derive"
version = "4.5.40" version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce" checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491"
dependencies = [ dependencies = [
"heck 0.5.0", "heck 0.5.0",
"proc-macro2", "proc-macro2",
@@ -1672,9 +1672,9 @@ dependencies = [
[[package]] [[package]]
name = "crunchy" name = "crunchy"
version = "0.2.3" version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]] [[package]]
name = "crypto-bigint" name = "crypto-bigint"
@@ -1922,6 +1922,17 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "derivative"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]] [[package]]
name = "derive_builder" name = "derive_builder"
version = "0.20.2" version = "0.20.2"
@@ -2332,11 +2343,12 @@ checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
[[package]] [[package]]
name = "fancy-regex" name = "fancy-regex"
version = "0.14.0" version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298" checksum = "d6215aee357f8c7c989ebb4b8466ca4d7dc93b3957039f2fc3ea2ade8ea5f279"
dependencies = [ dependencies = [
"bit-set", "bit-set",
"derivative",
"regex-automata 0.4.9", "regex-automata 0.4.9",
"regex-syntax 0.8.5", "regex-syntax 0.8.5",
] ]
@@ -2781,9 +2793,9 @@ dependencies = [
[[package]] [[package]]
name = "gif" name = "gif"
version = "0.13.2" version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc37f9a2bfe731e69f1e08d29d91d30604b9ce24bcb2880a961e82d89c6ed89" checksum = "4ae047235e33e2829703574b54fdec96bfbad892062d97fed2f76022287de61b"
dependencies = [ dependencies = [
"color_quant", "color_quant",
"weezl", "weezl",
@@ -2873,9 +2885,9 @@ dependencies = [
[[package]] [[package]]
name = "h2" name = "h2"
version = "0.4.10" version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9421a676d1b147b16b82c9225157dc629087ef8ec4d5e2960f9437a90dac0a5" checksum = "17da50a276f1e01e0ba6c029e47b7100754904ee8a278f886546e98575380785"
dependencies = [ dependencies = [
"atomic-waker", "atomic-waker",
"bytes", "bytes",
@@ -2883,7 +2895,7 @@ dependencies = [
"futures-core", "futures-core",
"futures-sink", "futures-sink",
"http", "http",
"indexmap 2.9.0", "indexmap 2.10.0",
"slab", "slab",
"tokio", "tokio",
"tokio-util", "tokio-util",
@@ -3847,9 +3859,9 @@ dependencies = [
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "2.9.0" version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
dependencies = [ dependencies = [
"equivalent", "equivalent",
"hashbrown 0.15.4", "hashbrown 0.15.4",
@@ -3967,6 +3979,17 @@ dependencies = [
"smallvec", "smallvec",
] ]
[[package]]
name = "io-uring"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013"
dependencies = [
"bitflags 2.9.1",
"cfg-if",
"libc",
]
[[package]] [[package]]
name = "ipnet" name = "ipnet"
version = "2.11.0" version = "2.11.0"
@@ -4158,9 +4181,9 @@ checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
[[package]] [[package]]
name = "libfuzzer-sys" name = "libfuzzer-sys"
version = "0.4.9" version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf78f52d400cf2d84a3a973a78a592b4adc535739e0a5597a0da6f0c357adc75" checksum = "5037190e1f70cbeef565bd267599242926f724d3b8a9f510fd7e0b540cfa4404"
dependencies = [ dependencies = [
"arbitrary", "arbitrary",
"cc", "cc",
@@ -4174,9 +4197,9 @@ checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
[[package]] [[package]]
name = "libredox" name = "libredox"
version = "0.1.3" version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" checksum = "1580801010e535496706ba011c15f8532df6b42297d2e471fec38ceadd8c0638"
dependencies = [ dependencies = [
"bitflags 2.9.1", "bitflags 2.9.1",
"libc", "libc",
@@ -4308,7 +4331,7 @@ dependencies = [
"dashmap 6.1.0", "dashmap 6.1.0",
"futures", "futures",
"hex 0.4.3", "hex 0.4.3",
"indexmap 2.9.0", "indexmap 2.10.0",
"leaky-bucket", "leaky-bucket",
"librqbit-bencode", "librqbit-bencode",
"librqbit-clone-to-owned", "librqbit-clone-to-owned",
@@ -4383,7 +4406,7 @@ dependencies = [
"futures", "futures",
"httparse", "httparse",
"network-interface", "network-interface",
"quick-xml", "quick-xml 0.37.5",
"reqwest", "reqwest",
"serde", "serde",
"tokio", "tokio",
@@ -4423,9 +4446,9 @@ dependencies = [
[[package]] [[package]]
name = "lightningcss" name = "lightningcss"
version = "1.0.0-alpha.66" version = "1.0.0-alpha.67"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a73ffa17de66534e4b527232f44aa0a89fad22c4f4e0735f9be35494f058e54" checksum = "798fba4e1205eed356b8ed7754cc3f7f04914e27855ca641409f4a532e992149"
dependencies = [ dependencies = [
"ahash 0.8.12", "ahash 0.8.12",
"bitflags 2.9.1", "bitflags 2.9.1",
@@ -4435,7 +4458,7 @@ dependencies = [
"dashmap 5.5.3", "dashmap 5.5.3",
"data-encoding", "data-encoding",
"getrandom 0.2.16", "getrandom 0.2.16",
"indexmap 2.9.0", "indexmap 2.10.0",
"itertools 0.10.5", "itertools 0.10.5",
"lazy_static", "lazy_static",
"lightningcss-derive", "lightningcss-derive",
@@ -4835,15 +4858,6 @@ dependencies = [
"version_check", "version_check",
] ]
[[package]]
name = "nanoid"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ffa00dec017b5b1a8b7cf5e2c008bfda1aa7e0697ac1508b491fdf2622fb4d8"
dependencies = [
"rand 0.8.5",
]
[[package]] [[package]]
name = "native-tls" name = "native-tls"
version = "0.2.14" version = "0.2.14"
@@ -5164,7 +5178,7 @@ dependencies = [
"itertools 0.14.0", "itertools 0.14.0",
"parking_lot 0.12.4", "parking_lot 0.12.4",
"percent-encoding", "percent-encoding",
"quick-xml", "quick-xml 0.37.5",
"rand 0.9.1", "rand 0.9.1",
"reqwest", "reqwest",
"ring", "ring",
@@ -5217,7 +5231,7 @@ dependencies = [
"log", "log",
"md-5", "md-5",
"percent-encoding", "percent-encoding",
"quick-xml", "quick-xml 0.37.5",
"reqwest", "reqwest",
"serde", "serde",
"serde_json", "serde_json",
@@ -5362,9 +5376,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]] [[package]]
name = "owo-colors" name = "owo-colors"
version = "4.2.1" version = "4.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26995317201fa17f3656c36716aed4a7c81743a9634ac4c99c0eeda495db0cec" checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e"
[[package]] [[package]]
name = "p256" name = "p256"
@@ -5858,7 +5872,7 @@ dependencies = [
"either", "either",
"hashbrown 0.14.5", "hashbrown 0.14.5",
"hashbrown 0.15.4", "hashbrown 0.15.4",
"indexmap 2.9.0", "indexmap 2.10.0",
"itoa", "itoa",
"num-traits", "num-traits",
"polars-arrow", "polars-arrow",
@@ -6019,7 +6033,7 @@ dependencies = [
"either", "either",
"hashbrown 0.15.4", "hashbrown 0.15.4",
"hex 0.4.3", "hex 0.4.3",
"indexmap 2.9.0", "indexmap 2.10.0",
"libm", "libm",
"memchr", "memchr",
"num-traits", "num-traits",
@@ -6128,7 +6142,7 @@ version = "0.49.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ada7c7e2fbbeffbdd67628cd8a89f02b0a8d21c71d34e297e2463a7c17575203" checksum = "ada7c7e2fbbeffbdd67628cd8a89f02b0a8d21c71d34e297e2463a7c17575203"
dependencies = [ dependencies = [
"indexmap 2.9.0", "indexmap 2.10.0",
"polars-error", "polars-error",
"polars-utils", "polars-utils",
"serde", "serde",
@@ -6229,7 +6243,7 @@ dependencies = [
"flate2", "flate2",
"foldhash", "foldhash",
"hashbrown 0.15.4", "hashbrown 0.15.4",
"indexmap 2.9.0", "indexmap 2.10.0",
"libc", "libc",
"memmap2 0.9.5", "memmap2 0.9.5",
"num-traits", "num-traits",
@@ -6503,6 +6517,16 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "quick-xml"
version = "0.38.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8927b0664f5c5a98265138b7e3f90aa19a6b21353182469ace36d4ac527b7b1b"
dependencies = [
"memchr",
"serde",
]
[[package]] [[package]]
name = "quinn" name = "quinn"
version = "0.11.8" version = "0.11.8"
@@ -6755,6 +6779,7 @@ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"bytes", "bytes",
"chrono", "chrono",
"chrono-tz 0.10.3",
"clap", "clap",
"cocoon", "cocoon",
"color-eyre", "color-eyre",
@@ -6786,16 +6811,16 @@ dependencies = [
"mime_guess", "mime_guess",
"mockito", "mockito",
"moka", "moka",
"nanoid",
"nom 8.0.0", "nom 8.0.0",
"num-traits", "num-traits",
"num_cpus", "num_cpus",
"once_cell", "once_cell",
"opendal", "opendal",
"openidconnect", "openidconnect",
"paste",
"percent-encoding", "percent-encoding",
"polars", "polars",
"quick-xml", "quick-xml 0.38.0",
"quirks_path", "quirks_path",
"rand 0.9.1", "rand 0.9.1",
"regex", "regex",
@@ -6807,6 +6832,7 @@ dependencies = [
"sea-orm", "sea-orm",
"sea-orm-migration", "sea-orm-migration",
"seaography", "seaography",
"secrecy",
"serde", "serde",
"serde_json", "serde_json",
"serde_variant", "serde_variant",
@@ -6823,6 +6849,7 @@ dependencies = [
"tracing", "tracing",
"tracing-appender", "tracing-appender",
"tracing-subscriber", "tracing-subscriber",
"tracing-test",
"tracing-tree", "tracing-tree",
"ts-rs", "ts-rs",
"typed-builder 0.21.0", "typed-builder 0.21.0",
@@ -6984,9 +7011,9 @@ dependencies = [
[[package]] [[package]]
name = "reqwest" name = "reqwest"
version = "0.12.20" version = "0.12.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eabf4c97d9130e2bf606614eb937e86edac8292eaa6f422f995d7e8de1eb1813" checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531"
dependencies = [ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"bytes", "bytes",
@@ -7228,7 +7255,7 @@ dependencies = [
"atom_syndication", "atom_syndication",
"derive_builder", "derive_builder",
"never", "never",
"quick-xml", "quick-xml 0.37.5",
"serde", "serde",
] ]
@@ -7438,6 +7465,18 @@ dependencies = [
"serde_json", "serde_json",
] ]
[[package]]
name = "schemars"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1375ba8ef45a6f15d83fa8748f1079428295d403d6ea991d09ab100155fbc06d"
dependencies = [
"dyn-clone",
"ref-cast",
"serde",
"serde_json",
]
[[package]] [[package]]
name = "scoped-tls" name = "scoped-tls"
version = "1.0.1" version = "1.0.1"
@@ -7486,9 +7525,9 @@ dependencies = [
[[package]] [[package]]
name = "sea-orm" name = "sea-orm"
version = "1.1.12" version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18b7272b88bd608cd846de24f41b74a0315a135fe761b0aed4ec1ce6a6327a93" checksum = "560ea59f07472886a236e7919b9425cf16914fee1d663d3c32f1af2e922b83f0"
dependencies = [ dependencies = [
"async-stream", "async-stream",
"async-trait", "async-trait",
@@ -7515,9 +7554,9 @@ dependencies = [
[[package]] [[package]]
name = "sea-orm-cli" name = "sea-orm-cli"
version = "1.1.12" version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a4961b0d9098a9dc992d6e75fb761f9e5c442bb46746eeffa08e47b53759fce" checksum = "00dd755ba3faca11692d8aaca46b68f1b4955c5dfdd6a3f1f9fba3a679a3ec1d"
dependencies = [ dependencies = [
"chrono", "chrono",
"clap", "clap",
@@ -7533,9 +7572,9 @@ dependencies = [
[[package]] [[package]]
name = "sea-orm-macros" name = "sea-orm-macros"
version = "1.1.12" version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c38255a6b2e6d1ae2d5df35696507a345f03c036ae32caeb0a3b922dbab610d" checksum = "70d0ea50bb4317c8a58ed34dc410a79d685128e7b77ddcd9e8b59ae6416a56d9"
dependencies = [ dependencies = [
"heck 0.5.0", "heck 0.5.0",
"proc-macro-crate", "proc-macro-crate",
@@ -7548,9 +7587,9 @@ dependencies = [
[[package]] [[package]]
name = "sea-orm-migration" name = "sea-orm-migration"
version = "1.1.12" version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82f58c3b1dcf6c137f08394f0228f9baf1574a2a799e93dc5da3cd9228bef9c5" checksum = "3e06e0f3ca090091ad58da2bc02cdb63f9afbd276baf029f065f6ff09e79cbe9"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"clap", "clap",
@@ -7643,16 +7682,16 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]] [[package]]
name = "seaography" name = "seaography"
version = "1.1.4" version = "1.1.4"
source = "git+https://github.com/dumtruck/seaography.git?rev=a787c3a#a787c3ab83cf1f8275894e1bc1ca3c766b54674b" source = "git+https://github.com/dumtruck/seaography.git?rev=292cdd2#292cdd248217fdcf81c41aa97fe1c047c9b5f4de"
dependencies = [ dependencies = [
"async-graphql", "async-graphql",
"fnv", "fnv",
"heck 0.4.1", "heck 0.5.0",
"itertools 0.12.1", "itertools 0.14.0",
"lazy_static", "lazy_static",
"sea-orm", "sea-orm",
"serde_json", "serde_json",
"thiserror 1.0.69", "thiserror 2.0.12",
] ]
[[package]] [[package]]
@@ -7669,6 +7708,16 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "secrecy"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a"
dependencies = [
"serde",
"zeroize",
]
[[package]] [[package]]
name = "security-framework" name = "security-framework"
version = "2.11.1" version = "2.11.1"
@@ -7834,16 +7883,17 @@ dependencies = [
[[package]] [[package]]
name = "serde_with" name = "serde_with"
version = "3.13.0" version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf65a400f8f66fb7b0552869ad70157166676db75ed8181f8104ea91cf9d0b42" checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5"
dependencies = [ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"chrono", "chrono",
"hex 0.4.3", "hex 0.4.3",
"indexmap 1.9.3", "indexmap 1.9.3",
"indexmap 2.9.0", "indexmap 2.10.0",
"schemars", "schemars 0.9.0",
"schemars 1.0.3",
"serde", "serde",
"serde_derive", "serde_derive",
"serde_json", "serde_json",
@@ -7853,9 +7903,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_with_macros" name = "serde_with_macros"
version = "3.13.0" version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81679d9ed988d5e9a5e6531dc3f2c28efbd639cbd1dfb628df08edea6004da77" checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f"
dependencies = [ dependencies = [
"darling", "darling",
"proc-macro2", "proc-macro2",
@@ -7869,7 +7919,7 @@ version = "0.9.34+deprecated"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
dependencies = [ dependencies = [
"indexmap 2.9.0", "indexmap 2.10.0",
"itoa", "itoa",
"ryu", "ryu",
"serde", "serde",
@@ -8215,7 +8265,7 @@ dependencies = [
"futures-util", "futures-util",
"hashbrown 0.15.4", "hashbrown 0.15.4",
"hashlink", "hashlink",
"indexmap 2.9.0", "indexmap 2.10.0",
"log", "log",
"memchr", "memchr",
"once_cell", "once_cell",
@@ -8880,17 +8930,19 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.45.1" version = "1.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779" checksum = "1140bb80481756a8cbe10541f37433b459c5aa1e727b4c020fbfebdc25bf3ec4"
dependencies = [ dependencies = [
"backtrace", "backtrace",
"bytes", "bytes",
"io-uring",
"libc", "libc",
"mio 1.0.4", "mio 1.0.4",
"parking_lot 0.12.4", "parking_lot 0.12.4",
"pin-project-lite", "pin-project-lite",
"signal-hook-registry", "signal-hook-registry",
"slab",
"socket2", "socket2",
"tokio-macros", "tokio-macros",
"windows-sys 0.52.0", "windows-sys 0.52.0",
@@ -9028,7 +9080,7 @@ version = "0.22.27"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
dependencies = [ dependencies = [
"indexmap 2.9.0", "indexmap 2.10.0",
"serde", "serde",
"serde_spanned", "serde_spanned",
"toml_datetime", "toml_datetime",
@@ -9206,6 +9258,27 @@ dependencies = [
"tracing-serde", "tracing-serde",
] ]
[[package]]
name = "tracing-test"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68"
dependencies = [
"tracing-core",
"tracing-subscriber",
"tracing-test-macro",
]
[[package]]
name = "tracing-test-macro"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568"
dependencies = [
"quote",
"syn 2.0.104",
]
[[package]] [[package]]
name = "tracing-tree" name = "tracing-tree"
version = "0.4.0" version = "0.4.0"
@@ -9912,9 +9985,9 @@ dependencies = [
[[package]] [[package]]
name = "windows-registry" name = "windows-registry"
version = "0.5.2" version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3bab093bdd303a1240bb99b8aba8ea8a69ee19d34c9e2ef9594e708a4878820" checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e"
dependencies = [ dependencies = [
"windows-link", "windows-link",
"windows-result", "windows-result",
@@ -10213,9 +10286,9 @@ dependencies = [
[[package]] [[package]]
name = "xattr" name = "xattr"
version = "1.5.0" version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e" checksum = "af3a19837351dc82ba89f8a125e22a3c475f05aba604acc023d62b2739ae2909"
dependencies = [ dependencies = [
"libc", "libc",
"rustix 1.0.7", "rustix 1.0.7",
@@ -10388,9 +10461,9 @@ dependencies = [
[[package]] [[package]]
name = "zune-jpeg" name = "zune-jpeg"
version = "0.4.18" version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7384255a918371b5af158218d131530f694de9ad3815ebdd0453a940485cb0fa" checksum = "2c9e525af0a6a658e031e95f14b7f889976b74a11ba0eca5a5fc9ac8a1c43a6a"
dependencies = [ dependencies = [
"zune-core", "zune-core",
] ]

View File

@@ -13,9 +13,6 @@ members = [
resolver = "2" resolver = "2"
[profile.dev] [profile.dev]
debug = 0
# https://github.com/rust-lang/rust/issues/141540
incremental = false
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171) # [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
# codegen-backend = "cranelift" # codegen-backend = "cranelift"
@@ -34,22 +31,22 @@ reqwest = { version = "0.12.20", features = [
"macos-system-configuration", "macos-system-configuration",
"cookies", "cookies",
] } ] }
moka = "0.12" moka = "0.12.10"
futures = "0.3" futures = "0.3.31"
quirks_path = "0.1" quirks_path = "0.1.1"
snafu = { version = "0.8", features = ["futures"] } snafu = { version = "0.8.0", features = ["futures"] }
testcontainers = { version = "0.24" } testcontainers = { version = "0.24.0" }
testcontainers-modules = { version = "0.12.1" } testcontainers-modules = { version = "0.12.1" }
testcontainers-ext = { version = "0.1.0", features = ["tracing"] } testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
serde = { version = "1", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
tokio = { version = "1.45.1", features = [ tokio = { version = "1.46", features = [
"macros", "macros",
"fs", "fs",
"rt-multi-thread", "rt-multi-thread",
"signal", "signal",
] } ] }
serde_json = "1" serde_json = "1.0.140"
async-trait = "0.1" async-trait = "0.1.88"
tracing = "0.1" tracing = "0.1"
url = "2.5.2" url = "2.5.2"
anyhow = "1" anyhow = "1"
@@ -67,7 +64,7 @@ convert_case = "0.8"
color-eyre = "0.6.5" color-eyre = "0.6.5"
inquire = "0.7.5" inquire = "0.7.5"
image = "0.25.6" image = "0.25.6"
uuid = { version = "1.6.0", features = ["v4"] } uuid = { version = "1.6.0", features = ["v7"] }
maplit = "1.0.2" maplit = "1.0.2"
once_cell = "1.20.2" once_cell = "1.20.2"
rand = "0.9.1" rand = "0.9.1"
@@ -80,11 +77,12 @@ http = "1.2.0"
async-stream = "0.3.6" async-stream = "0.3.6"
serde_variant = "0.1.3" serde_variant = "0.1.3"
tracing-appender = "0.2.3" tracing-appender = "0.2.3"
clap = "4.5.40" clap = "4.5.41"
ipnetwork = "0.21.1" ipnetwork = "0.21.1"
typed-builder = "0.21.0" typed-builder = "0.21.0"
nanoid = "0.4.0" nanoid = "0.4.0"
webp = "0.3.0" webp = "0.3.0"
[patch.crates-io] [patch.crates-io]
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" } seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "292cdd2" }

View File

@@ -0,0 +1,8 @@
```x-forwarded.json
{
"X-Forwarded-Host": "konobangu.com",
"X-Forwarded-Proto": "https"
}
```
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/$1

View File

@@ -1 +1 @@
{"filesOrder":["konobangu","mikan_doppel"],"selectedList":["konobangu","mikan_doppel"],"disabledDefalutRules":true,"defalutRules":""} {"filesOrder":["konobangu","konobangu-prod","mikan-doppel"],"selectedList":["mikan-doppel","konobangu"],"disabledDefalutRules":true,"defalutRules":""}

View File

@@ -13,7 +13,7 @@ name = "mikan_doppel"
path = "src/bin/mikan_doppel.rs" path = "src/bin/mikan_doppel.rs"
[dependencies] [dependencies]
recorder = { workspace = true } recorder = { workspace = true, features = ["playground"] }
tokio = { workspace = true } tokio = { workspace = true }
tracing-subscriber = { workspace = true } tracing-subscriber = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }

View File

@@ -6,7 +6,7 @@ edition = "2024"
[features] [features]
default = ["jxl"] default = ["jxl"]
playground = ["dep:inquire", "dep:color-eyre", "dep:polars"] playground = ["dep:inquire", "dep:color-eyre", "dep:polars", "test-utils"]
testcontainers = [ testcontainers = [
"dep:testcontainers", "dep:testcontainers",
"dep:testcontainers-modules", "dep:testcontainers-modules",
@@ -15,6 +15,7 @@ testcontainers = [
"testcontainers-modules/postgres", "testcontainers-modules/postgres",
] ]
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"] jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
test-utils = []
[lib] [lib]
name = "recorder" name = "recorder"
@@ -96,7 +97,6 @@ tracing-appender = { workspace = true }
clap = { workspace = true } clap = { workspace = true }
ipnetwork = { workspace = true } ipnetwork = { workspace = true }
typed-builder = { workspace = true } typed-builder = { workspace = true }
nanoid = { workspace = true }
webp = { workspace = true } webp = { workspace = true }
sea-orm = { version = "1.1", features = [ sea-orm = { version = "1.1", features = [
@@ -109,7 +109,7 @@ sea-orm = { version = "1.1", features = [
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] } figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] } sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
rss = { version = "2", features = ["builders", "with-serde"] } rss = { version = "2", features = ["builders", "with-serde"] }
fancy-regex = "0.14" fancy-regex = "0.15"
lightningcss = "1.0.0-alpha.66" lightningcss = "1.0.0-alpha.66"
html-escape = "0.2.13" html-escape = "0.2.13"
opendal = { version = "0.53", features = ["default", "services-fs"] } opendal = { version = "0.53", features = ["default", "services-fs"] }
@@ -125,6 +125,7 @@ seaography = { version = "1.1", features = [
"with-bigdecimal", "with-bigdecimal",
"with-postgres-array", "with-postgres-array",
"with-json-as-scalar", "with-json-as-scalar",
"with-custom-as-json",
] } ] }
tower = { version = "0.5.2", features = ["util"] } tower = { version = "0.5.2", features = ["util"] }
tower-http = { version = "0.6", features = [ tower-http = { version = "0.6", features = [
@@ -159,18 +160,22 @@ polars = { version = "0.49.1", features = [
"lazy", "lazy",
"diagonal_concat", "diagonal_concat",
], optional = true } ], optional = true }
quick-xml = { version = "0.37.5", features = [ quick-xml = { version = "0.38", features = [
"serialize", "serialize",
"serde-types", "serde-types",
"serde", "serde",
] } ] }
croner = "2.2.0" croner = "2.2.0"
ts-rs = "11.0.1" ts-rs = "11.0.1"
secrecy = { version = "0.10.3", features = ["serde"] }
paste = "1.0.15"
chrono-tz = "0.10.3"
[dev-dependencies] [dev-dependencies]
inquire = { workspace = true } inquire = { workspace = true }
color-eyre = { workspace = true } color-eyre = { workspace = true }
serial_test = "3" serial_test = "3"
insta = { version = "1", features = ["redactions", "toml", "filters"] } insta = { version = "1", features = ["redactions", "toml", "filters"] }
rstest = "0.25"
ctor = "0.4.0" ctor = "0.4.0"
tracing-test = "0.2.5"
rstest = "0.25"

View File

@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncOneSubscriptionFeedsFullTaskInput } from "./SyncOneSubscriptionFeedsFullTaskInput";
import type { SyncOneSubscriptionFeedsIncrementalTaskInput } from "./SyncOneSubscriptionFeedsIncrementalTaskInput";
import type { SyncOneSubscriptionSourcesTaskInput } from "./SyncOneSubscriptionSourcesTaskInput";
export type SubscriberTaskInput = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTaskInput | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTaskInput | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTaskInput;

View File

@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncOneSubscriptionFeedsFullTask } from "./SyncOneSubscriptionFeedsFullTask";
import type { SyncOneSubscriptionFeedsIncrementalTask } from "./SyncOneSubscriptionFeedsIncrementalTask";
import type { SyncOneSubscriptionSourcesTask } from "./SyncOneSubscriptionSourcesTask";
export type SubscriberTaskType = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTask | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTask | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTask;

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsFullTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsFullTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsIncrementalTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsIncrementalTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionSourcesTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionSourcesTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -0,0 +1,6 @@
{
"name": "recorder",
"version": "0.0.1",
"private": true,
"type": "module"
}

View File

@@ -131,11 +131,12 @@ impl AppBuilder {
} }
pub fn working_dir_from_manifest_dir(self) -> Self { pub fn working_dir_from_manifest_dir(self) -> Self {
let manifest_dir = if cfg!(debug_assertions) || cfg!(test) || cfg!(feature = "playground") { #[cfg(any(test, debug_assertions, feature = "test-utils"))]
env!("CARGO_MANIFEST_DIR") let manifest_dir = env!("CARGO_MANIFEST_DIR");
} else {
"./apps/recorder" #[cfg(not(any(test, debug_assertions, feature = "test-utils")))]
}; let manifest_dir = "./apps/recorder";
self.working_dir(manifest_dir.to_string()) self.working_dir(manifest_dir.to_string())
} }
} }

View File

@@ -107,7 +107,7 @@ impl App {
Ok::<(), RecorderError>(()) Ok::<(), RecorderError>(())
}, },
async { async {
task.run(if graceful_shutdown { task.run_with_signal(if graceful_shutdown {
Some(Self::shutdown_signal) Some(Self::shutdown_signal)
} else { } else {
None None

View File

@@ -18,6 +18,8 @@ use crate::{
#[derive(Snafu, Debug)] #[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))] #[snafu(visibility(pub(crate)))]
pub enum RecorderError { pub enum RecorderError {
#[snafu(transparent)]
ChronoTzParseError { source: chrono_tz::ParseError },
#[snafu(transparent)] #[snafu(transparent)]
SeaographyError { source: seaography::SeaographyError }, SeaographyError { source: seaography::SeaographyError },
#[snafu(transparent)] #[snafu(transparent)]
@@ -313,4 +315,10 @@ impl From<http::method::InvalidMethod> for RecorderError {
} }
} }
impl From<async_graphql::Error> for RecorderError {
fn from(error: async_graphql::Error) -> Self {
seaography::SeaographyError::AsyncGraphQLError(error).into()
}
}
pub type RecorderResult<T> = Result<T, RecorderError>; pub type RecorderResult<T> = Result<T, RecorderError>;

View File

@@ -1,38 +1,4 @@
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use quirks_path::Path;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::{OptionExt, whatever};
use crate::{
errors::app_error::{RecorderError, RecorderResult},
extract::defs::SUBTITLE_LANG,
};
lazy_static! {
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
vec![
FancyRegex::new(
r"(.*) - (\d{1,4}(?!\d|p)|\d{1,4}\.\d{1,2}(?!\d|p))(?:v\d{1,2})?(?: )?(?:END)?(.*)",
)
.unwrap(),
FancyRegex::new(
r"(.*)[\[\ E](\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)",
)
.unwrap(),
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
]
};
static ref GET_FANSUB_SPLIT_RE: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
static ref GET_FANSUB_FULL_MATCH_RE: Regex = Regex::new(r"^\d+$").unwrap();
static ref GET_SEASON_AND_TITLE_SUB_RE: Regex = Regex::new(r"([Ss]|Season )\d{1,3}").unwrap();
static ref GET_SEASON_AND_TITLE_FIND_RE: Regex =
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct EpisodeEnclosureMeta { pub struct EpisodeEnclosureMeta {
@@ -41,293 +7,3 @@ pub struct EpisodeEnclosureMeta {
pub pub_date: Option<DateTime<Utc>>, pub pub_date: Option<DateTime<Utc>>,
pub content_length: Option<i64>, pub content_length: Option<i64>,
} }
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeMediaMeta {
pub fansub: Option<String>,
pub title: String,
pub season: i32,
pub episode_index: i32,
pub extname: String,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeSubtitleMeta {
pub media: TorrentEpisodeMediaMeta,
pub lang: Option<String>,
}
fn get_fansub(group_and_title: &str) -> (Option<&str>, &str) {
let n = GET_FANSUB_SPLIT_RE
.split(group_and_title)
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
match (n.first(), n.get(1)) {
(None, None) => (None, ""),
(Some(n0), None) => (None, *n0),
(Some(n0), Some(n1)) => {
if GET_FANSUB_FULL_MATCH_RE.is_match(n1) {
(None, group_and_title)
} else {
(Some(*n0), *n1)
}
}
_ => unreachable!("vec contains n1 must contains n0"),
}
}
fn get_season_and_title(season_and_title: &str) -> (String, i32) {
let replaced_title = GET_SEASON_AND_TITLE_SUB_RE.replace_all(season_and_title, "");
let title = replaced_title.trim().to_string();
let season = GET_SEASON_AND_TITLE_FIND_RE
.captures(season_and_title)
.map(|m| {
m.get(2)
.unwrap_or_else(|| unreachable!("season regex should have 2 groups"))
.as_str()
.parse::<i32>()
.unwrap_or_else(|_| unreachable!("season should be a number"))
})
.unwrap_or(1);
(title, season)
}
fn get_subtitle_lang(media_name: &str) -> Option<&str> {
let media_name_lower = media_name.to_lowercase();
for (lang, lang_aliases) in SUBTITLE_LANG.iter() {
if lang_aliases
.iter()
.any(|alias| media_name_lower.contains(alias))
{
return Some(lang);
}
}
None
}
pub fn parse_episode_media_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> RecorderResult<TorrentEpisodeMediaMeta> {
let media_name = torrent_path
.file_name()
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {torrent_path}")
})?;
let mut match_obj = None;
for rule in TORRENT_EP_PARSE_RULES.iter() {
match_obj = if let Some(torrent_name) = torrent_name.as_ref() {
rule.captures(torrent_name)?
} else {
rule.captures(media_name)?
};
if match_obj.is_some() {
break;
}
}
if let Some(match_obj) = match_obj {
let group_season_and_title = match_obj
.get(1)
.whatever_context::<_, RecorderError>("should have 1 group")?
.as_str();
let (fansub, season_and_title) = get_fansub(group_season_and_title);
let (title, season) = if let Some(season) = season {
let (title, _) = get_season_and_title(season_and_title);
(title, season)
} else {
get_season_and_title(season_and_title)
};
let episode_index = match_obj
.get(2)
.whatever_context::<_, RecorderError>("should have 2 group")?
.as_str()
.parse::<i32>()
.unwrap_or(1);
let extname = torrent_path
.extension()
.map(|e| format!(".{e}"))
.unwrap_or_default();
Ok(TorrentEpisodeMediaMeta {
fansub: fansub.map(|s| s.to_string()),
title,
season,
episode_index,
extname,
})
} else {
whatever!(
"failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'",
torrent_path,
torrent_name
)
}
}
pub fn parse_episode_subtitle_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> RecorderResult<TorrentEpisodeSubtitleMeta> {
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
let media_name = torrent_path
.file_name()
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {torrent_path}")
})?;
let lang = get_subtitle_lang(media_name);
Ok(TorrentEpisodeSubtitleMeta {
media: media_meta,
lang: lang.map(|s| s.to_string()),
})
}
#[cfg(test)]
mod tests {
use quirks_path::Path;
use super::{
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta, parse_episode_media_meta_from_torrent,
parse_episode_subtitle_meta_from_torrent,
};
#[test]
fn test_lilith_raws_media() {
test_torrent_ep_parser(
r#"[Lilith-Raws] Boku no Kokoro no Yabai Yatsu - 01 [Baha][WEB-DL][1080p][AVC AAC][CHT][MP4].mp4"#,
r#"{"fansub": "Lilith-Raws", "title": "Boku no Kokoro no Yabai Yatsu", "season": 1, "episode_index": 1, "extname": ".mp4"}"#,
);
}
#[test]
fn test_sakurato_media() {
test_torrent_ep_parser(
r#"[Sakurato] Tonikaku Kawaii S2 [03][AVC-8bit 1080p AAC][CHS].mp4"#,
r#"{"fansub": "Sakurato", "title": "Tonikaku Kawaii", "season": 2, "episode_index": 3, "extname": ".mp4"}"#,
)
}
#[test]
fn test_lolihouse_media() {
test_torrent_ep_parser(
r#"[SweetSub&LoliHouse] Heavenly Delusion - 08 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"#,
r#"{"fansub": "SweetSub&LoliHouse", "title": "Heavenly Delusion", "season": 1, "episode_index": 8, "extname": ".mkv"}"#,
)
}
#[test]
fn test_sbsub_media() {
test_torrent_ep_parser(
r#"[SBSUB][CONAN][1082][V2][1080P][AVC_AAC][CHS_JP](C1E4E331).mp4"#,
r#"{"fansub": "SBSUB", "title": "CONAN", "season": 1, "episode_index": 1082, "extname": ".mp4"}"#,
)
}
#[test]
fn test_non_fansub_media() {
test_torrent_ep_parser(
r#"海盗战记 (2019) S04E11.mp4"#,
r#"{"title": "海盗战记 (2019)", "season": 4, "episode_index": 11, "extname": ".mp4"}"#,
)
}
#[test]
fn test_non_fansub_media_with_dirname() {
test_torrent_ep_parser(
r#"海盗战记/海盗战记 S01E01.mp4"#,
r#"{"title": "海盗战记", "season": 1, "episode_index": 1, "extname": ".mp4"}"#,
);
}
#[test]
fn test_non_fansub_tc_subtitle() {
test_torrent_ep_parser(
r#"海盗战记 S01E08.zh-tw.ass"#,
r#"{"media": { "title": "海盗战记", "season": 1, "episode_index": 8, "extname": ".ass" }, "lang": "zh-tw"}"#,
);
}
#[test]
fn test_non_fansub_sc_subtitle() {
test_torrent_ep_parser(
r#"海盗战记 S01E01.SC.srt"#,
r#"{ "media": { "title": "海盗战记", "season": 1, "episode_index": 1, "extname": ".srt" }, "lang": "zh" }"#,
)
}
#[test]
fn test_non_fansub_media_with_season_zero() {
test_torrent_ep_parser(
r#"水星的魔女(2022) S00E19.mp4"#,
r#"{"fansub": null,"title": "水星的魔女(2022)","season": 0,"episode_index": 19,"extname": ".mp4"}"#,
)
}
#[test]
fn test_shimian_fansub_media() {
test_torrent_ep_parser(
r#"【失眠搬运组】放学后失眠的你-Kimi wa Houkago Insomnia - 06 [bilibili - 1080p AVC1 CHS-JP].mp4"#,
r#"{"fansub": "失眠搬运组","title": "放学后失眠的你-Kimi wa Houkago Insomnia","season": 1,"episode_index": 6,"extname": ".mp4"}"#,
)
}
pub fn test_torrent_ep_parser(origin_name: &str, expected: &str) {
let extname = Path::new(origin_name)
.extension()
.map(|e| format!(".{e}"))
.unwrap_or_default()
.to_lowercase();
if extname == ".srt" || extname == ".ass" {
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
let found_raw =
parse_episode_subtitle_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {
if found_raw.is_ok() {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
} else {
println!(
"expected {} and found {:#?} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
found_raw
)
}
}
assert_eq!(expected, found);
} else {
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
let found_raw =
parse_episode_media_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {
if found_raw.is_ok() {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
} else {
println!(
"expected {} and found {:#?} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
found_raw
)
}
}
assert_eq!(expected, found);
}
}
}

View File

@@ -1,34 +0,0 @@
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use regex::Regex;
const LANG_ZH_TW: &str = "zh-tw";
const LANG_ZH: &str = "zh";
const LANG_EN: &str = "en";
const LANG_JP: &str = "jp";
lazy_static! {
pub static ref SEASON_REGEX: Regex =
Regex::new(r"(S\|[Ss]eason\s+)(\d+)").expect("Invalid regex");
pub static ref TORRENT_PRASE_RULE_REGS: Vec<FancyRegex> = vec![
FancyRegex::new(
r"(.*) - (\d{1,4}(?!\d|p)|\d{1,4}\.\d{1,2}(?!\d|p))(?:v\d{1,2})?(?: )?(?:END)?(.*)"
)
.unwrap(),
FancyRegex::new(
r"(.*)[\[\ E](\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)"
)
.unwrap(),
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
];
pub static ref SUBTITLE_LANG: Vec<(&'static str, Vec<&'static str>)> = {
vec![
(LANG_ZH_TW, vec!["tc", "cht", "", "zh-tw"]),
(LANG_ZH, vec!["sc", "chs", "", "zh", "zh-cn"]),
(LANG_EN, vec!["en", "eng", ""]),
(LANG_JP, vec!["jp", "jpn", ""]),
]
};
}

View File

@@ -546,14 +546,12 @@ impl MikanBangumiSubscription {
#[cfg(test)] #[cfg(test)]
#[allow(unused_variables)] #[allow(unused_variables)]
mod tests { mod tests {
use std::sync::Arc;
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait}; use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait};
use tracing::Level; use tracing::Level;
use crate::{ use crate::{
app::AppContextTrait,
errors::RecorderResult, errors::RecorderResult,
extract::mikan::{ extract::mikan::{
MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr,
@@ -564,34 +562,11 @@ mod tests {
subscriptions::{self, SubscriptionTrait}, subscriptions::{self, SubscriptionTrait},
}, },
test_utils::{ test_utils::{
app::{TestingAppContext, TestingAppContextPreset}, app::TestingPreset, mikan::build_testing_mikan_credential_form,
mikan::{MikanMockServer, build_testing_mikan_credential_form},
tracing::try_init_testing_tracing, tracing::try_init_testing_tracing,
}, },
}; };
struct TestingResources {
pub app_ctx: Arc<dyn AppContextTrait>,
pub mikan_server: MikanMockServer,
}
async fn build_testing_app_context() -> RecorderResult<TestingResources> {
let mikan_server = MikanMockServer::new().await?;
let mikan_base_url = mikan_server.base_url().clone();
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
mikan_base_url: mikan_base_url.to_string(),
database_config: None,
})
.await?;
Ok(TestingResources {
app_ctx,
mikan_server,
})
}
#[fixture] #[fixture]
fn before_each() { fn before_each() {
try_init_testing_tracing(Level::DEBUG); try_init_testing_tracing(Level::DEBUG);
@@ -600,10 +575,10 @@ mod tests {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> { async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources { let mut preset = TestingPreset::default().await?;
app_ctx, let app_ctx = preset.app_ctx.clone();
mut mikan_server,
} = build_testing_app_context().await?; let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel(); let _resources_mock = mikan_server.mock_resources_with_doppel();
@@ -662,10 +637,11 @@ mod tests {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_mikan_subscriber_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> { async fn test_mikan_subscriber_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources { let mut preset = TestingPreset::default().await?;
app_ctx,
mut mikan_server, let app_ctx = preset.app_ctx.clone();
} = build_testing_app_context().await?;
let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel(); let _resources_mock = mikan_server.mock_resources_with_doppel();
@@ -729,10 +705,11 @@ mod tests {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> { async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
let TestingResources { let mut preset = TestingPreset::default().await?;
app_ctx,
mut mikan_server, let app_ctx = preset.app_ctx.clone();
} = build_testing_app_context().await?;
let mikan_server = &mut preset.mikan_server;
let _resources_mock = mikan_server.mock_resources_with_doppel(); let _resources_mock = mikan_server.mock_resources_with_doppel();

View File

@@ -26,8 +26,8 @@ use crate::{
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY, MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
MIKAN_YEAR_QUERY_KEY, MikanClient, build_mikan_bangumi_subscription_rss_url, MIKAN_UNKNOWN_FANSUB_ID, MIKAN_YEAR_QUERY_KEY, MikanClient,
build_mikan_subscriber_subscription_rss_url, build_mikan_bangumi_subscription_rss_url, build_mikan_subscriber_subscription_rss_url,
}, },
}, },
media::{ media::{
@@ -35,7 +35,7 @@ use crate::{
EncodeWebpOptions, EncodeWebpOptions,
}, },
storage::StorageContentCategory, storage::StorageContentCategory,
task::{OptimizeImageTask, SystemTask}, task::OptimizeImageTask,
}; };
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
@@ -564,16 +564,17 @@ pub fn extract_mikan_episode_meta_from_episode_homepage_html(
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id")) RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
})?; })?;
let fansub_name = html let fansub_name = if mikan_fansub_id == MIKAN_UNKNOWN_FANSUB_ID {
.select( MIKAN_UNKNOWN_FANSUB_ID.to_string()
} else {
html.select(
&Selector::parse(".bangumi-info a.magnet-link-wrap[href^='/Home/PublishGroup/']") &Selector::parse(".bangumi-info a.magnet-link-wrap[href^='/Home/PublishGroup/']")
.unwrap(), .unwrap(),
) )
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.ok_or_else(|| { .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))?
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")) };
})?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| { let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
el.value() el.value()
@@ -685,6 +686,13 @@ pub fn extract_mikan_fansub_meta_from_bangumi_homepage_html(
html: &Html, html: &Html,
mikan_fansub_id: String, mikan_fansub_id: String,
) -> Option<MikanFansubMeta> { ) -> Option<MikanFansubMeta> {
if mikan_fansub_id == MIKAN_UNKNOWN_FANSUB_ID {
return Some(MikanFansubMeta {
mikan_fansub_id,
fansub: MIKAN_UNKNOWN_FANSUB_ID.to_string(),
});
}
html.select( html.select(
&Selector::parse(&format!( &Selector::parse(&format!(
"a.subgroup-name[data-anchor='#{mikan_fansub_id}']" "a.subgroup-name[data-anchor='#{mikan_fansub_id}']"
@@ -818,11 +826,14 @@ pub async fn scrape_mikan_poster_meta_from_image_url(
let webp_storage_path = storage_path.with_extension("webp"); let webp_storage_path = storage_path.with_extension("webp");
if storage_service.exists(&webp_storage_path).await?.is_none() { if storage_service.exists(&webp_storage_path).await?.is_none() {
task_service task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask { .add_system_task(
source_path: storage_path.clone().to_string(), OptimizeImageTask::builder()
target_path: webp_storage_path.to_string(), .source_path(storage_path.clone().to_string())
format_options: EncodeImageOptions::Webp(EncodeWebpOptions::default()), .target_path(webp_storage_path.to_string())
})) .format_options(EncodeImageOptions::Webp(EncodeWebpOptions::default()))
.build()
.into(),
)
.await?; .await?;
} }
} }
@@ -830,11 +841,14 @@ pub async fn scrape_mikan_poster_meta_from_image_url(
let avif_storage_path = storage_path.with_extension("avif"); let avif_storage_path = storage_path.with_extension("avif");
if storage_service.exists(&avif_storage_path).await?.is_none() { if storage_service.exists(&avif_storage_path).await?.is_none() {
task_service task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask { .add_system_task(
source_path: storage_path.clone().to_string(), OptimizeImageTask::builder()
target_path: avif_storage_path.to_string(), .source_path(storage_path.clone().to_string())
format_options: EncodeImageOptions::Avif(EncodeAvifOptions::default()), .target_path(avif_storage_path.to_string())
})) .format_options(EncodeImageOptions::Avif(EncodeAvifOptions::default()))
.build()
.into(),
)
.await?; .await?;
} }
} }
@@ -842,11 +856,14 @@ pub async fn scrape_mikan_poster_meta_from_image_url(
let jxl_storage_path = storage_path.with_extension("jxl"); let jxl_storage_path = storage_path.with_extension("jxl");
if storage_service.exists(&jxl_storage_path).await?.is_none() { if storage_service.exists(&jxl_storage_path).await?.is_none() {
task_service task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask { .add_system_task(
source_path: storage_path.clone().to_string(), OptimizeImageTask::builder()
target_path: jxl_storage_path.to_string(), .source_path(storage_path.clone().to_string())
format_options: EncodeImageOptions::Jxl(EncodeJxlOptions::default()), .target_path(jxl_storage_path.to_string())
})) .format_options(EncodeImageOptions::Jxl(EncodeJxlOptions::default()))
.build()
.into(),
)
.await?; .await?;
} }
} }
@@ -1089,7 +1106,7 @@ mod test {
use super::*; use super::*;
use crate::test_utils::{ use crate::test_utils::{
app::{TestingAppContext, TestingAppContextPreset}, app::{TestingAppContext, TestingPreset},
crypto::build_testing_crypto_service, crypto::build_testing_crypto_service,
database::build_testing_database_service, database::build_testing_database_service,
mikan::{ mikan::{
@@ -1137,17 +1154,13 @@ mod test {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_scrape_mikan_poster_meta_from_image_url(before_each: ()) -> RecorderResult<()> { async fn test_scrape_mikan_poster_meta_from_image_url(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = MikanMockServer::new().await?; let mut preset = TestingPreset::default().await?;
let mikan_base_url = mikan_server.base_url().clone(); let app_ctx = preset.app_ctx.clone();
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset { let mikan_base_url = preset.mikan_server.base_url().clone();
mikan_base_url: mikan_base_url.to_string(),
database_config: None,
})
.await?;
let resources_mock = mikan_server.mock_resources_with_doppel(); let resources_mock = preset.mikan_server.mock_resources_with_doppel();
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?; let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;

View File

@@ -1,5 +1,4 @@
pub mod bittorrent; pub mod bittorrent;
pub mod defs;
pub mod html; pub mod html;
pub mod http; pub mod http;
pub mod media; pub mod media;

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::bangumi}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::bangumi,
};
pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) { pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId); restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId);
@@ -8,7 +14,6 @@ pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<bangumi::BangumiType>(); builder.register_enumeration::<bangumi::BangumiType>();
seaography::register_entity!(builder, bangumi);
builder register_entity_default_writable!(builder, bangumi, false)
} }

View File

@@ -2,7 +2,7 @@ use std::sync::Arc;
use async_graphql::dynamic::{Field, FieldFuture, FieldValue, Object, TypeRef}; use async_graphql::dynamic::{Field, FieldFuture, FieldValue, Object, TypeRef};
use sea_orm::{EntityTrait, QueryFilter}; use sea_orm::{EntityTrait, QueryFilter};
use seaography::{Builder as SeaographyBuilder, BuilderContext, get_filter_conditions}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use util_derive::DynamicGraphql; use util_derive::DynamicGraphql;
@@ -16,7 +16,7 @@ use crate::{
register_crypto_column_input_conversion_to_schema_context, register_crypto_column_input_conversion_to_schema_context,
register_crypto_column_output_conversion_to_schema_context, register_crypto_column_output_conversion_to_schema_context,
}, },
custom::generate_entity_filtered_mutation_field, custom::{generate_entity_filtered_mutation_field, register_entity_default_writable},
name::get_entity_custom_mutation_field_name, name::get_entity_custom_mutation_field_name,
}, },
}, },
@@ -95,13 +95,13 @@ pub fn register_credential3rd_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
builder.register_enumeration::<credential_3rd::Credential3rdType>(); builder.register_enumeration::<credential_3rd::Credential3rdType>();
seaography::register_entity!(builder, credential_3rd); builder = register_entity_default_writable!(builder, credential_3rd, false);
builder.schema = builder builder.schema = builder
.schema .schema
.register(Credential3rdCheckAvailableInfo::generate_output_object()); .register(Credential3rdCheckAvailableInfo::generate_output_object());
let builder_context = builder.context; let builder_context = &builder.context;
{ {
let check_available_mutation_name = get_entity_custom_mutation_field_name::< let check_available_mutation_name = get_entity_custom_mutation_field_name::<
credential_3rd::Entity, credential_3rd::Entity,
@@ -111,18 +111,12 @@ pub fn register_credential3rd_to_schema_builder(
builder_context, builder_context,
check_available_mutation_name, check_available_mutation_name,
TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()), TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()),
Arc::new(|resolver_ctx, app_ctx, filters| { Arc::new(|_resolver_ctx, app_ctx, filters| {
let filters_condition = get_filter_conditions::<credential_3rd::Entity>(
resolver_ctx,
builder_context,
filters,
);
Box::pin(async move { Box::pin(async move {
let db = app_ctx.db(); let db = app_ctx.db();
let credential_model = credential_3rd::Entity::find() let credential_model = credential_3rd::Entity::find()
.filter(filters_condition) .filter(filters)
.one(db) .one(db)
.await? .await?
.ok_or_else(|| { .ok_or_else(|| {

View File

@@ -1,36 +1,26 @@
use convert_case::Case;
use sea_orm::Iterable; use sea_orm::Iterable;
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{ use crate::{
graphql::{ graphql::{
domains::subscribers::restrict_subscriber_for_entity, domains::{
infra::{ subscriber_tasks::restrict_subscriber_tasks_for_entity,
custom::{ subscribers::restrict_subscriber_for_entity,
generate_entity_default_create_batch_mutation_field, system_tasks::restrict_system_tasks_for_entity,
generate_entity_default_create_one_mutation_field,
generate_entity_default_delete_mutation_field,
generate_entity_default_insert_input_object,
generate_entity_default_update_input_object,
generate_entity_default_update_mutation_field,
},
json::{
convert_jsonb_output_case_for_entity, restrict_jsonb_filter_input_for_entity,
validate_jsonb_input_for_entity,
},
name::get_entity_and_column_name,
}, },
infra::{custom::register_entity_default_writable, name::get_entity_and_column_name},
}, },
models::{cron, subscriber_tasks}, models::cron,
}; };
fn skip_columns_for_entity_input(context: &mut BuilderContext) { fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in cron::Column::iter() { for column in cron::Column::iter() {
if matches!( if matches!(
column, column,
cron::Column::SubscriberTask cron::Column::SubscriberTaskCron
| cron::Column::Id | cron::Column::SystemTaskCron
| cron::Column::CronExpr | cron::Column::CronExpr
| cron::Column::CronTimezone
| cron::Column::Enabled | cron::Column::Enabled
| cron::Column::TimeoutMs | cron::Column::TimeoutMs
| cron::Column::MaxAttempts | cron::Column::MaxAttempts
@@ -41,7 +31,8 @@ fn skip_columns_for_entity_input(context: &mut BuilderContext) {
context.entity_input.insert_skips.push(entity_column_key); context.entity_input.insert_skips.push(entity_column_key);
} }
for column in cron::Column::iter() { for column in cron::Column::iter() {
if matches!(column, |cron::Column::CronExpr| cron::Column::Enabled if matches!(column, |cron::Column::CronExpr| cron::Column::CronTimezone
| cron::Column::Enabled
| cron::Column::TimeoutMs | cron::Column::TimeoutMs
| cron::Column::Priority | cron::Column::Priority
| cron::Column::MaxAttempts) | cron::Column::MaxAttempts)
@@ -56,72 +47,18 @@ fn skip_columns_for_entity_input(context: &mut BuilderContext) {
pub fn register_cron_to_schema_context(context: &mut BuilderContext) { pub fn register_cron_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<cron::Entity>(context, &cron::Column::SubscriberId); restrict_subscriber_for_entity::<cron::Entity>(context, &cron::Column::SubscriberId);
restrict_jsonb_filter_input_for_entity::<cron::Entity>(context, &cron::Column::SubscriberTask); restrict_subscriber_tasks_for_entity::<cron::Entity>(
convert_jsonb_output_case_for_entity::<cron::Entity>(
context, context,
&cron::Column::SubscriberTask, &cron::Column::SubscriberTaskCron,
Case::Camel,
);
validate_jsonb_input_for_entity::<cron::Entity, Option<subscriber_tasks::SubscriberTask>>(
context,
&cron::Column::SubscriberTask,
); );
restrict_system_tasks_for_entity::<cron::Entity>(context, &cron::Column::SystemTaskCron);
skip_columns_for_entity_input(context); skip_columns_for_entity_input(context);
} }
pub fn register_cron_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_cron_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_entity::<cron::Entity>(
<cron::RelatedEntity as sea_orm::Iterable>::iter()
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(cron::Entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(cron::Entity, tokio::spawn);
builder.register_enumeration::<cron::CronStatus>(); builder.register_enumeration::<cron::CronStatus>();
let builder_context = builder.context; builder = register_entity_default_writable!(builder, cron, true);
{
builder
.inputs
.push(generate_entity_default_insert_input_object::<cron::Entity>(
builder_context,
));
builder
.mutations
.push(generate_entity_default_create_one_mutation_field::<
cron::Entity,
_,
>(builder_context, true));
builder
.mutations
.push(generate_entity_default_create_batch_mutation_field::<
cron::Entity,
_,
>(builder_context, true));
}
{
builder
.inputs
.push(generate_entity_default_update_input_object::<cron::Entity>(
builder_context,
));
builder
.mutations
.push(generate_entity_default_update_mutation_field::<
cron::Entity,
_,
>(builder_context, true));
}
{
builder
.mutations
.push(generate_entity_default_delete_mutation_field::<
cron::Entity,
_,
>(builder_context, false));
}
builder builder
} }

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloaders}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::downloaders,
};
pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) { pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloaders::Entity>( restrict_subscriber_for_entity::<downloaders::Entity>(
@@ -11,7 +17,7 @@ pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloaders::DownloaderCategory>(); builder.register_enumeration::<downloaders::DownloaderCategory>();
seaography::register_entity!(builder, downloaders); builder = register_entity_default_writable!(builder, downloaders, false);
builder builder
} }

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloads}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::downloads,
};
pub fn register_downloads_to_schema_context(context: &mut BuilderContext) { pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId); restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId);
@@ -9,7 +15,7 @@ pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloads::DownloadStatus>(); builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<downloads::DownloadMime>(); builder.register_enumeration::<downloads::DownloadMime>();
seaography::register_entity!(builder, downloads); builder = register_entity_default_writable!(builder, downloads, false);
builder builder
} }

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::episodes}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::episodes,
};
pub fn register_episodes_to_schema_context(context: &mut BuilderContext) { pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId); restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId);
@@ -8,7 +14,7 @@ pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<episodes::EpisodeType>(); builder.register_enumeration::<episodes::EpisodeType>();
seaography::register_entity!(builder, episodes); builder = register_entity_default_writable!(builder, episodes, false);
builder builder
} }

View File

@@ -3,13 +3,17 @@ use std::sync::Arc;
use async_graphql::dynamic::ResolverContext; use async_graphql::dynamic::ResolverContext;
use sea_orm::Value as SeaValue; use sea_orm::Value as SeaValue;
use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult}; use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult};
use uuid::Uuid;
use crate::{ use crate::{
graphql::{ graphql::{
domains::subscribers::restrict_subscriber_for_entity, domains::subscribers::restrict_subscriber_for_entity,
infra::name::{ infra::{
get_entity_and_column_name, get_entity_create_batch_mutation_field_name, custom::register_entity_default_writable,
get_entity_create_one_mutation_field_name, name::{
get_entity_and_column_name, get_entity_create_batch_mutation_field_name,
get_entity_create_one_mutation_field_name,
},
}, },
}, },
models::feeds, models::feeds,
@@ -32,7 +36,9 @@ pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
if field_name == entity_create_one_mutation_field_name.as_str() if field_name == entity_create_one_mutation_field_name.as_str()
|| field_name == entity_create_batch_mutation_field_name.as_str() || field_name == entity_create_batch_mutation_field_name.as_str()
{ {
Ok(Some(SeaValue::String(Some(Box::new(nanoid::nanoid!()))))) Ok(Some(SeaValue::String(Some(Box::new(
Uuid::now_v7().to_string(),
)))))
} else { } else {
Ok(None) Ok(None)
} }
@@ -45,7 +51,8 @@ pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<feeds::FeedType>(); builder.register_enumeration::<feeds::FeedType>();
builder.register_enumeration::<feeds::FeedSource>(); builder.register_enumeration::<feeds::FeedSource>();
seaography::register_entity!(builder, feeds);
builder = register_entity_default_writable!(builder, feeds, false);
builder builder
} }

View File

@@ -1,6 +1,7 @@
pub mod credential_3rd; pub mod credential_3rd;
pub mod bangumi; pub mod bangumi;
pub mod cron;
pub mod downloaders; pub mod downloaders;
pub mod downloads; pub mod downloads;
pub mod episodes; pub mod episodes;
@@ -10,4 +11,4 @@ pub mod subscribers;
pub mod subscription_bangumi; pub mod subscription_bangumi;
pub mod subscription_episode; pub mod subscription_episode;
pub mod subscriptions; pub mod subscriptions;
pub mod cron; pub mod system_tasks;

View File

@@ -1,14 +1,15 @@
use std::{ops::Deref, sync::Arc}; use std::{ops::Deref, sync::Arc};
use async_graphql::dynamic::{FieldValue, TypeRef, ValueAccessor}; use async_graphql::dynamic::{FieldValue, Scalar, TypeRef};
use convert_case::Case; use convert_case::Case;
use sea_orm::{ use sea_orm::{
ColumnTrait, ConnectionTrait, EntityTrait, Iterable, QueryFilter, QuerySelect, QueryTrait, ActiveModelBehavior, ColumnTrait, ConnectionTrait, EntityTrait, Iterable, QueryFilter,
prelude::Expr, sea_query::Query, QuerySelect, QueryTrait, prelude::Expr, sea_query::Query,
}; };
use seaography::{ use seaography::{
Builder as SeaographyBuilder, BuilderContext, GuardAction, get_filter_conditions, Builder as SeaographyBuilder, BuilderContext, SeaographyError, prepare_active_model,
}; };
use ts_rs::TS;
use crate::{ use crate::{
auth::AuthUserInfo, auth::AuthUserInfo,
@@ -18,62 +19,27 @@ use crate::{
infra::{ infra::{
custom::{ custom::{
generate_entity_create_one_mutation_field, generate_entity_create_one_mutation_field,
generate_entity_default_insert_input_object, generate_entity_default_basic_entity_object,
generate_entity_filtered_mutation_field, generate_entity_default_insert_input_object, generate_entity_delete_mutation_field,
}, generate_entity_filtered_mutation_field, register_entity_default_readonly,
json::{
convert_jsonb_output_case_for_entity, restrict_jsonb_filter_input_for_entity,
validate_jsonb_input_for_entity,
}, },
json::{convert_jsonb_output_for_entity, restrict_jsonb_filter_input_for_entity},
name::{ name::{
get_column_name, get_entity_and_column_name, get_entity_basic_type_name, get_entity_and_column_name, get_entity_basic_type_name,
get_entity_create_batch_mutation_data_field_name, get_entity_custom_mutation_field_name,
get_entity_create_batch_mutation_field_name,
get_entity_create_one_mutation_data_field_name,
get_entity_create_one_mutation_field_name, get_entity_custom_mutation_field_name,
get_entity_delete_mutation_field_name, get_entity_update_mutation_field_name,
}, },
}, },
}, },
migrations::defs::{ApalisJobs, ApalisSchema},
models::subscriber_tasks, models::subscriber_tasks,
task::{ApalisJobs, ApalisSchema}, task::SubscriberTaskTrait,
}; };
pub fn check_entity_and_task_subscriber_id_matches(
value_accessor: &ValueAccessor<'_>,
subscriber_id: i32,
subscriber_id_column_name: &str,
subscriber_task_column_name: &str,
) -> bool {
value_accessor.object().is_ok_and(|input_object| {
input_object
.get(subscriber_task_column_name)
.and_then(|subscriber_task_value| subscriber_task_value.object().ok())
.and_then(|subscriber_task_object| {
subscriber_task_object
.get("subscriber_id")
.and_then(|job_subscriber_id| job_subscriber_id.i64().ok())
})
.is_some_and(|subscriber_task_subscriber_id| {
subscriber_task_subscriber_id as i32
== input_object
.get(subscriber_id_column_name)
.and_then(|subscriber_id_object| subscriber_id_object.i64().ok())
.map(|subscriber_id| subscriber_id as i32)
.unwrap_or(subscriber_id)
})
})
}
fn skip_columns_for_entity_input(context: &mut BuilderContext) { fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in subscriber_tasks::Column::iter() { for column in subscriber_tasks::Column::iter() {
if matches!( if matches!(
column, column,
subscriber_tasks::Column::Job subscriber_tasks::Column::Job | subscriber_tasks::Column::SubscriberId
| subscriber_tasks::Column::Id
| subscriber_tasks::Column::SubscriberId
| subscriber_tasks::Column::Priority
| subscriber_tasks::Column::MaxAttempts
) { ) {
continue; continue;
} }
@@ -83,182 +49,131 @@ fn skip_columns_for_entity_input(context: &mut BuilderContext) {
} }
} }
pub fn restrict_subscriber_tasks_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_and_column = get_entity_and_column_name::<T>(context, column);
restrict_jsonb_filter_input_for_entity::<T>(context, column);
convert_jsonb_output_for_entity::<T>(context, column, Some(Case::Camel));
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.input_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(subscriber_tasks::SubscriberTask::ident().into()),
);
context.types.output_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(subscriber_tasks::SubscriberTask::ident().into()),
);
context.types.input_conversions.insert(
entity_column_name.clone(),
Box::new(move |resolve_context, value_accessor| {
let task: subscriber_tasks::SubscriberTaskInput = value_accessor.deserialize()?;
let subscriber_id = resolve_context
.data::<AuthUserInfo>()?
.subscriber_auth
.subscriber_id;
let task = subscriber_tasks::SubscriberTask::from_input(task, subscriber_id);
let json_value = serde_json::to_value(task).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_name}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
context.entity_input.update_skips.push(entity_and_column);
}
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) { pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscriber_tasks::Entity>( restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
context, context,
&subscriber_tasks::Column::SubscriberId, &subscriber_tasks::Column::SubscriberId,
); );
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>( restrict_subscriber_tasks_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
);
convert_jsonb_output_case_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
Case::Camel,
);
validate_jsonb_input_for_entity::<subscriber_tasks::Entity, subscriber_tasks::SubscriberTask>(
context, context,
&subscriber_tasks::Column::Job, &subscriber_tasks::Column::Job,
); );
skip_columns_for_entity_input(context); skip_columns_for_entity_input(context);
context.guards.field_guards.insert(
get_entity_and_column_name::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
),
{
let create_one_mutation_field_name =
Arc::new(get_entity_create_one_mutation_field_name::<
subscriber_tasks::Entity,
>(context));
let create_one_mutation_data_field_name =
Arc::new(get_entity_create_one_mutation_data_field_name(context).to_string());
let create_batch_mutation_field_name =
Arc::new(get_entity_create_batch_mutation_field_name::<
subscriber_tasks::Entity,
>(context));
let create_batch_mutation_data_field_name =
Arc::new(get_entity_create_batch_mutation_data_field_name(context).to_string());
let update_mutation_field_name = Arc::new(get_entity_update_mutation_field_name::<
subscriber_tasks::Entity,
>(context));
let job_column_name = Arc::new(get_column_name::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
));
let subscriber_id_column_name = Arc::new(get_column_name::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::SubscriberId,
));
Box::new(move |resolve_context| {
let field_name = resolve_context.field().name();
let subscriber_id = resolve_context
.data_opt::<AuthUserInfo>()
.unwrap()
.subscriber_auth
.subscriber_id;
let matched_subscriber_id = match field_name {
field if field == create_one_mutation_field_name.as_str() => resolve_context
.args
.get(create_one_mutation_data_field_name.as_str())
.is_some_and(|value_accessor| {
check_entity_and_task_subscriber_id_matches(
&value_accessor,
subscriber_id,
subscriber_id_column_name.as_str(),
job_column_name.as_str(),
)
}),
field if field == create_batch_mutation_field_name.as_str() => resolve_context
.args
.get(create_batch_mutation_data_field_name.as_str())
.and_then(|value| value.list().ok())
.is_some_and(|list| {
list.iter().all(|value| {
check_entity_and_task_subscriber_id_matches(
&value,
subscriber_id,
subscriber_id_column_name.as_str(),
job_column_name.as_str(),
)
})
}),
field if field == update_mutation_field_name.as_str() => {
unreachable!("subscriberTask entity do not support update job")
}
_ => true,
};
if matched_subscriber_id {
GuardAction::Allow
} else {
GuardAction::Block(Some(
"subscriber_id mismatch between entity and job".to_string(),
))
}
})
},
);
} }
pub fn register_subscriber_tasks_to_schema_builder( pub fn register_subscriber_tasks_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
builder.register_entity::<subscriber_tasks::Entity>( builder.schema = builder.schema.register(
<subscriber_tasks::RelatedEntity as sea_orm::Iterable>::iter() Scalar::new(subscriber_tasks::SubscriberTask::ident())
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context)) .description(subscriber_tasks::SubscriberTask::decl()),
.collect(),
); );
builder = builder.register_entity_dataloader_one_to_one(subscriber_tasks::Entity, tokio::spawn);
builder =
builder.register_entity_dataloader_one_to_many(subscriber_tasks::Entity, tokio::spawn);
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>(); builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>();
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>(); builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
let context = builder.context; builder = register_entity_default_readonly!(builder, subscriber_tasks);
let builder_context = builder.context;
{ {
let delete_mutation = builder
generate_entity_filtered_mutation_field::<subscriber_tasks::Entity, _, _>( .outputs
context, .push(generate_entity_default_basic_entity_object::<
get_entity_delete_mutation_field_name::<subscriber_tasks::Entity>(context), subscriber_tasks::Entity,
TypeRef::named_nn(TypeRef::INT), >(builder_context));
Arc::new(|resolver_ctx, app_ctx, filters| { }
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>( {
resolver_ctx, let delete_mutation = generate_entity_delete_mutation_field::<subscriber_tasks::Entity>(
context, builder_context,
filters, Arc::new(|_resolver_ctx, app_ctx, filters| {
); Box::pin(async move {
Box::pin(async move { let db = app_ctx.db();
let db = app_ctx.db();
let select_subquery = subscriber_tasks::Entity::find() let select_subquery = subscriber_tasks::Entity::find()
.select_only() .select_only()
.column(subscriber_tasks::Column::Id) .column(subscriber_tasks::Column::Id)
.filter(filters_condition); .filter(filters);
let delete_query = Query::delete() let delete_query = Query::delete()
.from_table((ApalisSchema::Schema, ApalisJobs::Table)) .from_table((ApalisSchema::Schema, ApalisJobs::Table))
.and_where( .and_where(
Expr::col(ApalisJobs::Id).in_subquery(select_subquery.into_query()), Expr::col(ApalisJobs::Id).in_subquery(select_subquery.into_query()),
) )
.to_owned(); .to_owned();
let db_backend = db.deref().get_database_backend(); let db_backend = db.deref().get_database_backend();
let delete_statement = db_backend.build(&delete_query); let delete_statement = db_backend.build(&delete_query);
let result = db.execute(delete_statement).await?; let result = db.execute(delete_statement).await?;
Ok::<_, RecorderError>(Some(FieldValue::value( Ok::<_, RecorderError>(result.rows_affected())
result.rows_affected() as i32 })
))) }),
}) );
}),
);
builder.mutations.push(delete_mutation); builder.mutations.push(delete_mutation);
} }
{ {
let entity_retry_one_mutation_name = let entity_retry_one_mutation_name = get_entity_custom_mutation_field_name::<
get_entity_custom_mutation_field_name::<subscriber_tasks::Entity>(context, "RetryOne"); subscriber_tasks::Entity,
>(builder_context, "RetryOne");
let retry_one_mutation = let retry_one_mutation =
generate_entity_filtered_mutation_field::<subscriber_tasks::Entity, _, _>( generate_entity_filtered_mutation_field::<subscriber_tasks::Entity, _, _>(
context, builder_context,
entity_retry_one_mutation_name, entity_retry_one_mutation_name,
TypeRef::named_nn(get_entity_basic_type_name::<subscriber_tasks::Entity>( TypeRef::named_nn(get_entity_basic_type_name::<subscriber_tasks::Entity>(
context, builder_context,
)), )),
Arc::new(|resolver_ctx, app_ctx, filters| { Arc::new(|_resolver_ctx, app_ctx, filters| {
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>(
resolver_ctx,
context,
filters,
);
Box::pin(async move { Box::pin(async move {
let db = app_ctx.db(); let db = app_ctx.db();
let job_id = subscriber_tasks::Entity::find() let job_id = subscriber_tasks::Entity::find()
.filter(filters_condition) .filter(filters)
.select_only() .select_only()
.column(subscriber_tasks::Column::Id) .column(subscriber_tasks::Column::Id)
.into_tuple::<String>() .into_tuple::<String>()
@@ -290,25 +205,32 @@ pub fn register_subscriber_tasks_to_schema_builder(
.inputs .inputs
.push(generate_entity_default_insert_input_object::< .push(generate_entity_default_insert_input_object::<
subscriber_tasks::Entity, subscriber_tasks::Entity,
>(context)); >(builder_context));
let create_one_mutation = let create_one_mutation =
generate_entity_create_one_mutation_field::<subscriber_tasks::Entity, TypeRef>( generate_entity_create_one_mutation_field::<subscriber_tasks::Entity>(
context, builder_context,
None, Arc::new(move |resolver_ctx, app_ctx, input_object| {
Arc::new(|_resolver_ctx, app_ctx, input_object| {
let job_column_name = get_column_name::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
);
let task = input_object
.get(job_column_name.as_str())
.unwrap()
.deserialize::<subscriber_tasks::SubscriberTask>()
.unwrap();
Box::pin(async move { Box::pin(async move {
let active_model: Result<subscriber_tasks::ActiveModel, _> =
prepare_active_model(builder_context, &input_object, resolver_ctx);
let task_service = app_ctx.task(); let task_service = app_ctx.task();
let active_model = active_model?;
let db = app_ctx.db();
let active_model = active_model.before_save(db, true).await?;
let task = active_model.job.unwrap();
let subscriber_id = active_model.subscriber_id.unwrap();
if task.get_subscriber_id() != subscriber_id {
Err(async_graphql::Error::new(
"subscriber_id does not match with job.subscriber_id",
))?;
}
let task_id = task_service.add_subscriber_task(task).await?.to_string(); let task_id = task_service.add_subscriber_task(task).await?.to_string();
let db = app_ctx.db(); let db = app_ctx.db();

View File

@@ -7,18 +7,21 @@ use sea_orm::{ColumnTrait, Condition, EntityTrait, Iterable, Value as SeaValue};
use seaography::{ use seaography::{
Builder as SeaographyBuilder, BuilderContext, FilterInfo, Builder as SeaographyBuilder, BuilderContext, FilterInfo,
FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper, FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper,
FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult, SeaographyError, FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult,
}; };
use crate::{ use crate::{
auth::{AuthError, AuthUserInfo}, auth::{AuthError, AuthUserInfo},
graphql::infra::name::{ graphql::infra::{
get_column_name, get_entity_and_column_name, custom::register_entity_default_readonly,
get_entity_create_batch_mutation_data_field_name, name::{
get_entity_create_batch_mutation_field_name, get_column_name, get_entity_and_column_name,
get_entity_create_one_mutation_data_field_name, get_entity_create_one_mutation_field_name, get_entity_create_batch_mutation_data_field_name,
get_entity_name, get_entity_update_mutation_data_field_name, get_entity_create_batch_mutation_field_name,
get_entity_update_mutation_field_name, get_entity_create_one_mutation_data_field_name,
get_entity_create_one_mutation_field_name, get_entity_name,
get_entity_update_mutation_data_field_name, get_entity_update_mutation_field_name,
},
}, },
models::subscribers, models::subscribers,
}; };
@@ -216,11 +219,10 @@ where
if let Some(value) = filter.get("eq") { if let Some(value) = filter.get("eq") {
let value: i32 = value.i64()?.try_into()?; let value: i32 = value.i64()?.try_into()?;
if value != subscriber_id { if value != subscriber_id {
return Err(SeaographyError::AsyncGraphQLError( return Err(async_graphql::Error::new(
async_graphql::Error::new( "subscriber_id and auth_info does not match",
"subscriber_id and auth_info does not match", )
), .into());
));
} }
} }
} }
@@ -316,24 +318,14 @@ pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
{ {
let filter_types_map_helper = FilterTypesMapHelper {
context: builder.context,
};
builder.schema = builder builder.schema = builder
.schema .schema
.register(filter_types_map_helper.generate_filter_input(&SUBSCRIBER_ID_FILTER_INFO)); .register(FilterTypesMapHelper::generate_filter_input(
&SUBSCRIBER_ID_FILTER_INFO,
));
} }
{ builder = register_entity_default_readonly!(builder, subscribers);
builder.register_entity::<subscribers::Entity>(
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
}
builder builder
} }

View File

@@ -1,7 +1,11 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{ use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_bangumi, graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::subscription_bangumi,
}; };
pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) { pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) {
@@ -14,7 +18,7 @@ pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderCont
pub fn register_subscription_bangumi_to_schema_builder( pub fn register_subscription_bangumi_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_bangumi); builder = register_entity_default_writable!(builder, subscription_bangumi, false);
builder builder
} }

View File

@@ -1,7 +1,11 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{ use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_episode, graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::subscription_episode,
}; };
pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) { pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) {
@@ -14,7 +18,7 @@ pub fn register_subscription_episode_to_schema_context(context: &mut BuilderCont
pub fn register_subscription_episode_to_schema_builder( pub fn register_subscription_episode_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_episode); builder = register_entity_default_writable!(builder, subscription_episode, false);
builder builder
} }

View File

@@ -1,23 +1,11 @@
use std::sync::Arc; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use async_graphql::dynamic::{FieldValue, TypeRef};
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
use seaography::{Builder as SeaographyBuilder, BuilderContext, get_filter_conditions};
use crate::{ use crate::{
errors::RecorderError,
graphql::{ graphql::{
domains::subscribers::restrict_subscriber_for_entity, domains::subscribers::restrict_subscriber_for_entity,
infra::{ infra::custom::register_entity_default_writable,
custom::generate_entity_filtered_mutation_field,
name::{get_entity_basic_type_name, get_entity_custom_mutation_field_name},
},
},
models::{subscriber_tasks, subscriptions},
task::{
SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SyncOneSubscriptionSourcesTask,
}, },
models::subscriptions,
}; };
pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) { pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) {
@@ -31,181 +19,6 @@ pub fn register_subscriptions_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
builder.register_enumeration::<subscriptions::SubscriptionCategory>(); builder.register_enumeration::<subscriptions::SubscriptionCategory>();
seaography::register_entity!(builder, subscriptions); builder = register_entity_default_writable!(builder, subscriptions, false);
let context = builder.context;
{
let sync_one_feeds_incremental_mutation_name = get_entity_custom_mutation_field_name::<
subscriptions::Entity,
>(context, "SyncOneFeedsIncremental");
let sync_one_feeds_incremental_mutation = generate_entity_filtered_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_incremental_mutation_name,
TypeRef::named_nn(get_entity_basic_type_name::<subscriber_tasks::Entity>(
context,
)),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriptions::Entity>()
})?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
SyncOneSubscriptionFeedsIncrementalTask::builder()
.subscriber_id(subscription_model.subscriber_id)
.subscription_id(subscription_model.id)
.build()
.into(),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_feeds_incremental_mutation);
}
{
let sync_one_feeds_full_mutation_name = get_entity_custom_mutation_field_name::<
subscriptions::Entity,
>(builder.context, "SyncOneFeedsFull");
let sync_one_feeds_full_mutation = generate_entity_filtered_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_full_mutation_name,
TypeRef::named_nn(get_entity_basic_type_name::<subscriber_tasks::Entity>(
context,
)),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriptions::Entity>()
})?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
SyncOneSubscriptionFeedsFullTask::builder()
.subscriber_id(subscription_model.subscriber_id)
.subscription_id(subscription_model.id)
.build()
.into(),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_feeds_full_mutation);
}
{
let sync_one_sources_mutation_name = get_entity_custom_mutation_field_name::<
subscriptions::Entity,
>(context, "SyncOneSources");
let sync_one_sources_mutation = generate_entity_filtered_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_sources_mutation_name,
TypeRef::named_nn(get_entity_basic_type_name::<subscriber_tasks::Entity>(
context,
)),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriptions::Entity>()
})?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
SyncOneSubscriptionSourcesTask::builder()
.subscriber_id(subscription_model.subscriber_id)
.subscription_id(subscription_model.id)
.build()
.into(),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_sources_mutation);
}
builder builder
} }

View File

@@ -0,0 +1,258 @@
use std::{ops::Deref, sync::Arc};
use async_graphql::dynamic::{FieldValue, Scalar, TypeRef};
use convert_case::Case;
use sea_orm::{
ActiveModelBehavior, ColumnTrait, ConnectionTrait, EntityTrait, Iterable, QueryFilter,
QuerySelect, QueryTrait, prelude::Expr, sea_query::Query,
};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, GuardAction, SeaographyError,
prepare_active_model,
};
use ts_rs::TS;
use crate::{
auth::AuthUserInfo,
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::{
custom::{
generate_entity_create_one_mutation_field,
generate_entity_default_basic_entity_object,
generate_entity_default_insert_input_object, generate_entity_delete_mutation_field,
generate_entity_filtered_mutation_field, register_entity_default_readonly,
},
json::{convert_jsonb_output_for_entity, restrict_jsonb_filter_input_for_entity},
name::{
get_entity_and_column_name, get_entity_basic_type_name,
get_entity_custom_mutation_field_name,
},
},
},
migrations::defs::{ApalisJobs, ApalisSchema},
models::system_tasks,
task::SystemTaskTrait,
};
fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in system_tasks::Column::iter() {
if matches!(
column,
system_tasks::Column::Job | system_tasks::Column::SubscriberId
) {
continue;
}
let entity_column_key =
get_entity_and_column_name::<system_tasks::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
}
pub fn restrict_system_tasks_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_and_column = get_entity_and_column_name::<T>(context, column);
restrict_jsonb_filter_input_for_entity::<T>(context, column);
convert_jsonb_output_for_entity::<T>(context, column, Some(Case::Camel));
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.guards.field_guards.insert(
entity_column_name.clone(),
Box::new(|_resolver_ctx| {
GuardAction::Block(Some(
"SystemTask can not be created by subscribers now".to_string(),
))
}),
);
context.types.input_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(system_tasks::SystemTask::ident().into()),
);
context.types.output_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(system_tasks::SystemTask::ident().into()),
);
context.types.input_conversions.insert(
entity_column_name.clone(),
Box::new(move |resolve_context, value_accessor| {
let task: system_tasks::SystemTaskInput = value_accessor.deserialize()?;
let subscriber_id = resolve_context
.data::<AuthUserInfo>()?
.subscriber_auth
.subscriber_id;
let task = system_tasks::SystemTask::from_input(task, Some(subscriber_id));
let json_value = serde_json::to_value(task).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_name}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
context.entity_input.update_skips.push(entity_and_column);
}
pub fn register_system_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<system_tasks::Entity>(
context,
&system_tasks::Column::SubscriberId,
);
restrict_system_tasks_for_entity::<system_tasks::Entity>(context, &system_tasks::Column::Job);
skip_columns_for_entity_input(context);
}
pub fn register_system_tasks_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.schema = builder.schema.register(
Scalar::new(system_tasks::SystemTask::ident())
.description(system_tasks::SystemTask::decl()),
);
builder.register_enumeration::<system_tasks::SystemTaskType>();
builder.register_enumeration::<system_tasks::SystemTaskStatus>();
builder = register_entity_default_readonly!(builder, system_tasks);
let builder_context = builder.context;
{
builder
.outputs
.push(generate_entity_default_basic_entity_object::<
system_tasks::Entity,
>(builder_context));
}
{
let delete_mutation = generate_entity_delete_mutation_field::<system_tasks::Entity>(
builder_context,
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let select_subquery = system_tasks::Entity::find()
.select_only()
.column(system_tasks::Column::Id)
.filter(filters);
let delete_query = Query::delete()
.from_table((ApalisSchema::Schema, ApalisJobs::Table))
.and_where(
Expr::col(ApalisJobs::Id).in_subquery(select_subquery.into_query()),
)
.to_owned();
let db_backend = db.deref().get_database_backend();
let delete_statement = db_backend.build(&delete_query);
let result = db.execute(delete_statement).await?;
Ok::<_, RecorderError>(result.rows_affected())
})
}),
);
builder.mutations.push(delete_mutation);
}
{
let entity_retry_one_mutation_name = get_entity_custom_mutation_field_name::<
system_tasks::Entity,
>(builder_context, "RetryOne");
let retry_one_mutation =
generate_entity_filtered_mutation_field::<system_tasks::Entity, _, _>(
builder_context,
entity_retry_one_mutation_name,
TypeRef::named_nn(get_entity_basic_type_name::<system_tasks::Entity>(
builder_context,
)),
Arc::new(|_resolver_ctx, app_ctx, filters| {
Box::pin(async move {
let db = app_ctx.db();
let job_id = system_tasks::Entity::find()
.filter(filters)
.select_only()
.column(system_tasks::Column::Id)
.into_tuple::<String>()
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
let task = app_ctx.task();
task.retry_subscriber_task(job_id.clone()).await?;
let task_model = system_tasks::Entity::find()
.filter(system_tasks::Column::Id.eq(&job_id))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(retry_one_mutation);
}
{
builder
.inputs
.push(generate_entity_default_insert_input_object::<
system_tasks::Entity,
>(builder_context));
let create_one_mutation = generate_entity_create_one_mutation_field::<system_tasks::Entity>(
builder_context,
Arc::new(move |resolver_ctx, app_ctx, input_object| {
Box::pin(async move {
let active_model: Result<system_tasks::ActiveModel, _> =
prepare_active_model(builder_context, &input_object, resolver_ctx);
let task_service = app_ctx.task();
let active_model = active_model?;
let db = app_ctx.db();
let active_model = active_model.before_save(db, true).await?;
let task = active_model.job.unwrap();
let subscriber_id = active_model.subscriber_id.unwrap();
if task.get_subscriber_id() != subscriber_id {
Err(async_graphql::Error::new(
"subscriber_id does not match with job.subscriber_id",
))?;
}
let task_id = task_service.add_system_task(task).await?.to_string();
let db = app_ctx.db();
let task = system_tasks::Entity::find()
.filter(system_tasks::Column::Id.eq(&task_id))
.one(db)
.await?
.ok_or_else(|| {
RecorderError::from_entity_not_found::<system_tasks::Entity>()
})?;
Ok::<_, RecorderError>(task)
})
}),
);
builder.mutations.push(create_one_mutation);
}
builder
}

View File

@@ -1,29 +1,23 @@
use std::{pin::Pin, sync::Arc}; use std::{iter::FusedIterator, pin::Pin, sync::Arc};
use async_graphql::dynamic::{ use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, ObjectAccessor, Field, FieldFuture, FieldValue, InputObject, InputValue, Object, ObjectAccessor,
ResolverContext, TypeRef, ValueAccessor, ResolverContext, TypeRef,
};
use sea_orm::{
ActiveModelTrait, Condition, EntityTrait, IntoActiveModel, QueryFilter, TransactionTrait,
}; };
use sea_orm::{ActiveModelTrait, Condition, EntityTrait, IntoActiveModel};
use seaography::{ use seaography::{
BuilderContext, GuardAction, SeaographyError, get_filter_conditions, prepare_active_model, Builder as SeaographyBuilder, BuilderContext, EntityCreateBatchMutationBuilder,
EntityCreateOneMutationBuilder, EntityDeleteMutationBuilder, EntityInputBuilder,
EntityObjectBuilder, EntityUpdateMutationBuilder, GuardAction, RelationBuilder,
get_filter_conditions,
}; };
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::RecorderResult, errors::RecorderResult,
graphql::infra::name::{ graphql::infra::name::{
get_entity_and_column_name_from_column_str, get_entity_basic_type_name, get_entity_filter_input_type_name, get_entity_name,
get_entity_create_batch_mutation_data_field_name, get_entity_renormalized_filter_field_name,
get_entity_create_batch_mutation_field_name,
get_entity_create_one_mutation_data_field_name, get_entity_create_one_mutation_field_name,
get_entity_delete_mutation_field_name, get_entity_delete_mutation_filter_field_name,
get_entity_filter_input_type_name, get_entity_insert_data_input_type_name, get_entity_name,
get_entity_renormalized_filter_field_name, get_entity_update_data_input_type_name,
get_entity_update_mutation_data_field_name, get_entity_update_mutation_field_name,
get_entity_update_mutation_filter_field_name,
}, },
}; };
@@ -31,7 +25,7 @@ pub type FilterMutationFn = Arc<
dyn for<'a> Fn( dyn for<'a> Fn(
&ResolverContext<'a>, &ResolverContext<'a>,
Arc<dyn AppContextTrait>, Arc<dyn AppContextTrait>,
Option<ValueAccessor<'_>>, Condition,
) -> Pin< ) -> Pin<
Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>, Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>,
> + Send > + Send
@@ -40,9 +34,9 @@ pub type FilterMutationFn = Arc<
pub type CreateOneMutationFn<M> = Arc< pub type CreateOneMutationFn<M> = Arc<
dyn for<'a> Fn( dyn for<'a> Fn(
&ResolverContext<'a>, &'a ResolverContext<'a>,
Arc<dyn AppContextTrait>, Arc<dyn AppContextTrait>,
ObjectAccessor<'_>, ObjectAccessor<'a>,
) -> Pin<Box<dyn Future<Output = RecorderResult<M>> + Send + 'a>> ) -> Pin<Box<dyn Future<Output = RecorderResult<M>> + Send + 'a>>
+ Send + Send
+ Sync, + Sync,
@@ -50,9 +44,9 @@ pub type CreateOneMutationFn<M> = Arc<
pub type CreateBatchMutationFn<M> = Arc< pub type CreateBatchMutationFn<M> = Arc<
dyn for<'a> Fn( dyn for<'a> Fn(
&ResolverContext<'a>, &'a ResolverContext<'a>,
Arc<dyn AppContextTrait>, Arc<dyn AppContextTrait>,
Vec<ObjectAccessor<'_>>, Vec<ObjectAccessor<'a>>,
) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>> ) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>>
+ Send + Send
+ Sync, + Sync,
@@ -60,10 +54,10 @@ pub type CreateBatchMutationFn<M> = Arc<
pub type UpdateMutationFn<M> = Arc< pub type UpdateMutationFn<M> = Arc<
dyn for<'a> Fn( dyn for<'a> Fn(
&ResolverContext<'a>, &'a ResolverContext<'a>,
Arc<dyn AppContextTrait>, Arc<dyn AppContextTrait>,
Condition, Condition,
ObjectAccessor<'_>, ObjectAccessor<'a>,
) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>> ) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>>
+ Send + Send
+ Sync, + Sync,
@@ -79,48 +73,46 @@ pub type DeleteMutationFn = Arc<
+ Sync, + Sync,
>; >;
pub fn generate_entity_default_insert_input_object<T>( pub fn generate_entity_default_insert_input_object<T>(context: &BuilderContext) -> InputObject
builder_context: &'static BuilderContext,
) -> InputObject
where where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_input_builder = seaography::EntityInputBuilder { EntityInputBuilder::insert_input_object::<T>(context)
context: builder_context,
};
entity_input_builder.insert_input_object::<T>()
} }
pub fn generate_entity_default_update_input_object<T>( pub fn generate_entity_default_update_input_object<T>(context: &BuilderContext) -> InputObject
builder_context: &'static BuilderContext,
) -> InputObject
where where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_input_builder = seaography::EntityInputBuilder { EntityInputBuilder::update_input_object::<T>(context)
context: builder_context,
};
entity_input_builder.update_input_object::<T>()
} }
pub fn generate_entity_default_basic_entity_object<T>( pub fn generate_entity_default_basic_entity_object<T>(context: &'static BuilderContext) -> Object
builder_context: &'static BuilderContext,
) -> Object
where where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_object_builder = seaography::EntityObjectBuilder { let entity_object_builder = EntityObjectBuilder { context };
context: builder_context,
};
entity_object_builder.basic_to_object::<T>() entity_object_builder.basic_to_object::<T>()
} }
pub fn generate_entity_input_object<T>(
context: &'static BuilderContext,
is_insert: bool,
) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
if is_insert {
EntityInputBuilder::insert_input_object::<T>(context)
} else {
EntityInputBuilder::update_input_object::<T>(context)
}
}
pub fn generate_entity_filtered_mutation_field<E, N, R>( pub fn generate_entity_filtered_mutation_field<E, N, R>(
builder_context: &'static BuilderContext, builder_context: &'static BuilderContext,
field_name: N, field_name: N,
@@ -137,11 +129,12 @@ where
let guard = builder_context.guards.entity_guards.get(&object_name); let guard = builder_context.guards.entity_guards.get(&object_name);
Field::new(field_name, type_ref, move |ctx| { Field::new(field_name, type_ref, move |resolve_context| {
let mutation_fn = mutation_fn.clone(); let mutation_fn = mutation_fn.clone();
FieldFuture::new(async move { FieldFuture::new(async move {
let guard_flag = if let Some(guard) = guard { let guard_flag = if let Some(guard) = guard {
(*guard)(&ctx) (*guard)(&resolve_context)
} else { } else {
GuardAction::Allow GuardAction::Allow
}; };
@@ -152,13 +145,15 @@ where
)); ));
} }
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?; let filters = resolve_context
.args
.get(get_entity_renormalized_filter_field_name());
let filters = ctx.args.get(get_entity_renormalized_filter_field_name()); let filters = get_filter_conditions::<E>(&resolve_context, builder_context, filters);
let result = mutation_fn(&ctx, app_ctx.clone(), filters) let app_ctx = resolve_context.data::<Arc<dyn AppContextTrait>>()?;
.await
.map_err(async_graphql::Error::new_with_source)?; let result = mutation_fn(&resolve_context, app_ctx.clone(), filters).await?;
Ok(result) Ok(result)
}) })
@@ -169,314 +164,71 @@ where
)) ))
} }
pub fn generate_entity_create_one_mutation_field<E, ID>( pub fn generate_entity_create_one_mutation_field<E>(
builder_context: &'static BuilderContext, builder_context: &'static BuilderContext,
input_data_type_ref: Option<ID>,
mutation_fn: CreateOneMutationFn<E::Model>, mutation_fn: CreateOneMutationFn<E::Model>,
) -> Field ) -> Field
where where
E: EntityTrait, E: EntityTrait,
<E as EntityTrait>::Model: Sync, <E as EntityTrait>::Model: Sync,
ID: Into<TypeRef>,
{ {
let guard = builder_context let entity_create_one_mutation_builder = EntityCreateOneMutationBuilder {
.guards context: builder_context,
.entity_guards };
.get(&get_entity_name::<E>(builder_context)); entity_create_one_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
let field_guards = &builder_context.guards.field_guards; move |resolver_ctx, input_object| {
Field::new(
get_entity_create_one_mutation_field_name::<E>(builder_context),
TypeRef::named_nn(get_entity_basic_type_name::<E>(builder_context)),
move |ctx| {
let mutation_fn = mutation_fn.clone(); let mutation_fn = mutation_fn.clone();
FieldFuture::new(async move {
let guard_flag = if let Some(guard) = guard {
(*guard)(&ctx)
} else {
GuardAction::Allow
};
if let GuardAction::Block(reason) = guard_flag { Box::pin(async move {
return Err::<Option<_>, async_graphql::Error>(async_graphql::Error::new( let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
reason.unwrap_or("Entity guard triggered.".into()),
));
}
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?; let result = mutation_fn(resolver_ctx, app_ctx.clone(), input_object).await?;
let value_accessor = ctx Ok(result)
.args
.get(get_entity_create_one_mutation_data_field_name(
builder_context,
))
.unwrap();
let input_object = value_accessor.object()?;
for (column, _) in input_object.iter() {
let field_guard = field_guards.get(
&get_entity_and_column_name_from_column_str::<E>(builder_context, column),
);
let field_guard_flag = if let Some(field_guard) = field_guard {
(*field_guard)(&ctx)
} else {
GuardAction::Allow
};
if let GuardAction::Block(reason) = field_guard_flag {
return match reason {
Some(reason) => Err::<Option<_>, async_graphql::Error>(
async_graphql::Error::new(reason),
),
None => Err::<Option<_>, async_graphql::Error>(
async_graphql::Error::new("Field guard triggered."),
),
};
}
}
let result = mutation_fn(&ctx, app_ctx.clone(), input_object)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(Some(FieldValue::owned_any(result)))
}) })
}, },
)
.argument(InputValue::new(
get_entity_create_one_mutation_data_field_name(builder_context),
input_data_type_ref.map(|t| t.into()).unwrap_or_else(|| {
TypeRef::named_nn(get_entity_insert_data_input_type_name::<E>(builder_context))
}),
)) ))
} }
pub fn generate_entity_default_create_one_mutation_fn<T, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> CreateOneMutationFn<T::Model>
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = T> + sea_orm::ActiveModelBehavior + std::marker::Send + 'static,
{
Arc::new(move |resolve_context, app_ctx, input_object| {
let entity_input_builder = seaography::EntityInputBuilder {
context: builder_context,
};
let entity_object_builder = seaography::EntityObjectBuilder {
context: builder_context,
};
let active_model = prepare_active_model::<T, A>(
&entity_input_builder,
&entity_object_builder,
&input_object,
resolve_context,
)
.map_err(SeaographyError::AsyncGraphQLError);
Box::pin(async move {
if active_model_hooks {
let transaction = app_ctx.db().begin().await?;
let active_model = active_model?;
let active_model = active_model.before_save(&transaction, true).await?;
let result: T::Model = active_model.insert(&transaction).await?;
let result = A::after_save(result, &transaction, true).await?;
transaction.commit().await?;
Ok(result)
} else {
let db = app_ctx.db();
let active_model = active_model?;
let result: T::Model = active_model.insert(db).await?;
Ok(result)
}
})
})
}
pub fn generate_entity_default_create_one_mutation_field<E, A>( pub fn generate_entity_default_create_one_mutation_field<E, A>(
builder_context: &'static BuilderContext, builder_context: &'static BuilderContext,
active_model_hooks: bool, active_model_hooks: bool,
) -> Field ) -> Field
where where
E: EntityTrait, E: EntityTrait,
<E as EntityTrait>::Model: Sync, <E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
<E as EntityTrait>::Model: IntoActiveModel<A>, A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send + 'static,
{ {
generate_entity_create_one_mutation_field::<E, TypeRef>( let entity_create_one_mutation_builder = EntityCreateOneMutationBuilder {
builder_context, context: builder_context,
None, };
generate_entity_default_create_one_mutation_fn::<E, A>(builder_context, active_model_hooks), entity_create_one_mutation_builder.to_field::<E, A>(active_model_hooks)
)
} }
pub fn generate_entity_create_batch_mutation_field<E, ID>( pub fn generate_entity_create_batch_mutation_field<E, ID>(
builder_context: &'static BuilderContext, builder_context: &'static BuilderContext,
input_data_type_ref: Option<ID>,
mutation_fn: CreateBatchMutationFn<E::Model>, mutation_fn: CreateBatchMutationFn<E::Model>,
) -> Field ) -> Field
where where
E: EntityTrait, E: EntityTrait,
<E as EntityTrait>::Model: Sync, <E as EntityTrait>::Model: Sync,
ID: Into<TypeRef>,
{ {
let object_name: String = get_entity_name::<E>(builder_context); let entity_create_batch_mutation_builder = EntityCreateBatchMutationBuilder {
let guard = builder_context.guards.entity_guards.get(&object_name); context: builder_context,
let field_guards = &builder_context.guards.field_guards; };
entity_create_batch_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
Field::new( move |resolver_ctx, input_objects| {
get_entity_create_batch_mutation_field_name::<E>(builder_context),
TypeRef::named_nn_list_nn(get_entity_basic_type_name::<E>(builder_context)),
move |ctx| {
let mutation_fn = mutation_fn.clone(); let mutation_fn = mutation_fn.clone();
FieldFuture::new(async move {
let guard_flag = if let Some(guard) = guard {
(*guard)(&ctx)
} else {
GuardAction::Allow
};
if let GuardAction::Block(reason) = guard_flag { Box::pin(async move {
return match reason { let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
Some(reason) => Err::<Option<_>, async_graphql::Error>(
async_graphql::Error::new(reason),
),
None => Err::<Option<_>, async_graphql::Error>(async_graphql::Error::new(
"Entity guard triggered.",
)),
};
}
let mut input_objects: Vec<ObjectAccessor<'_>> = vec![]; let result = mutation_fn(resolver_ctx, app_ctx.clone(), input_objects).await?;
let list = ctx
.args
.get(get_entity_create_batch_mutation_data_field_name(
builder_context,
))
.unwrap()
.list()?;
for input in list.iter() {
let input_object = input.object()?;
for (column, _) in input_object.iter() {
let field_guard =
field_guards.get(&get_entity_and_column_name_from_column_str::<E>(
builder_context,
column,
));
let field_guard_flag = if let Some(field_guard) = field_guard {
(*field_guard)(&ctx)
} else {
GuardAction::Allow
};
if let GuardAction::Block(reason) = field_guard_flag {
return match reason {
Some(reason) => Err::<Option<_>, async_graphql::Error>(
async_graphql::Error::new(reason),
),
None => Err::<Option<_>, async_graphql::Error>(
async_graphql::Error::new("Field guard triggered."),
),
};
}
}
input_objects.push(input_object);
}
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let results = mutation_fn(&ctx, app_ctx.clone(), input_objects)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(Some(FieldValue::list(
results.into_iter().map(FieldValue::owned_any),
)))
})
},
)
.argument(InputValue::new(
get_entity_create_batch_mutation_data_field_name(builder_context),
input_data_type_ref.map(|t| t.into()).unwrap_or_else(|| {
TypeRef::named_nn_list_nn(get_entity_insert_data_input_type_name::<E>(builder_context))
}),
))
}
pub fn generate_entity_default_create_batch_mutation_fn<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> CreateBatchMutationFn<E::Model>
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send + 'static,
{
Arc::new(move |resolve_context, app_ctx, input_objects| {
let entity_input_builder = seaography::EntityInputBuilder {
context: builder_context,
};
let entity_object_builder = seaography::EntityObjectBuilder {
context: builder_context,
};
let active_models = input_objects
.into_iter()
.map(|input_object| {
prepare_active_model::<E, A>(
&entity_input_builder,
&entity_object_builder,
&input_object,
resolve_context,
)
})
.collect::<Result<Vec<_>, _>>()
.map_err(SeaographyError::AsyncGraphQLError);
Box::pin(async move {
if active_model_hooks {
let transaction = app_ctx.db().begin().await?;
let mut before_save_models = vec![];
for active_model in active_models? {
let before_save_model = active_model.before_save(&transaction, false).await?;
before_save_models.push(before_save_model);
}
let models: Vec<E::Model> = E::insert_many(before_save_models)
.exec_with_returning_many(&transaction)
.await?;
let mut result = vec![];
for model in models {
let after_save_model = A::after_save(model, &transaction, false).await?;
result.push(after_save_model);
}
transaction.commit().await?;
Ok(result) Ok(result)
} else { })
let db = app_ctx.db(); },
let active_models = active_models?; ))
let results: Vec<E::Model> = E::insert_many(active_models)
.exec_with_returning_many(db)
.await?;
Ok(results)
}
})
})
} }
pub fn generate_entity_default_create_batch_mutation_field<E, A>( pub fn generate_entity_default_create_batch_mutation_field<E, A>(
@@ -487,177 +239,45 @@ where
E: EntityTrait, E: EntityTrait,
<E as EntityTrait>::Model: Sync, <E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>, <E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send + 'static, A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{ {
generate_entity_create_batch_mutation_field::<E, TypeRef>( let entity_create_batch_mutation_builder = EntityCreateBatchMutationBuilder {
builder_context, context: builder_context,
None, };
generate_entity_default_create_batch_mutation_fn::<E, A>( entity_create_batch_mutation_builder.to_field::<E, A>(active_model_hooks)
builder_context,
active_model_hooks,
),
)
} }
pub fn generate_entity_update_mutation_field<E, I>( pub fn generate_entity_update_mutation_field<E>(
builder_context: &'static BuilderContext, builder_context: &'static BuilderContext,
input_data_type_ref: Option<I>,
mutation_fn: UpdateMutationFn<E::Model>, mutation_fn: UpdateMutationFn<E::Model>,
) -> Field ) -> Field
where where
E: EntityTrait, E: EntityTrait,
<E as EntityTrait>::Model: Sync, <E as EntityTrait>::Model: Sync,
I: Into<TypeRef>,
{ {
let guard = builder_context let entity_update_mutation_builder = EntityUpdateMutationBuilder {
.guards context: builder_context,
.entity_guards };
.get(&get_entity_name::<E>(builder_context)); entity_update_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
let field_guards = &builder_context.guards.field_guards; move |resolver_ctx, filters, input_object| {
Field::new(
get_entity_update_mutation_field_name::<E>(builder_context),
TypeRef::named_nn_list_nn(get_entity_basic_type_name::<E>(builder_context)),
move |ctx| {
let mutation_fn = mutation_fn.clone(); let mutation_fn = mutation_fn.clone();
FieldFuture::new(async move {
let guard_flag = if let Some(guard) = guard {
(*guard)(&ctx)
} else {
GuardAction::Allow
};
if let GuardAction::Block(reason) = guard_flag {
return match reason {
Some(reason) => Err::<Option<_>, async_graphql::Error>(
async_graphql::Error::new(reason),
),
None => Err::<Option<_>, async_graphql::Error>(async_graphql::Error::new(
"Entity guard triggered.",
)),
};
}
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let filters = ctx.args.get(get_entity_update_mutation_filter_field_name(
builder_context,
));
let filter_condition = get_filter_conditions::<E>(&ctx, builder_context, filters);
let value_accessor = ctx
.args
.get(get_entity_update_mutation_data_field_name(builder_context))
.unwrap();
let input_object = value_accessor.object()?;
for (column, _) in input_object.iter() {
let field_guard = field_guards.get(
&get_entity_and_column_name_from_column_str::<E>(builder_context, column),
);
let field_guard_flag = if let Some(field_guard) = field_guard {
(*field_guard)(&ctx)
} else {
GuardAction::Allow
};
if let GuardAction::Block(reason) = field_guard_flag {
return match reason {
Some(reason) => Err::<Option<_>, async_graphql::Error>(
async_graphql::Error::new(reason),
),
None => Err::<Option<_>, async_graphql::Error>(
async_graphql::Error::new("Field guard triggered."),
),
};
}
}
let result = mutation_fn(&ctx, app_ctx.clone(), filter_condition, input_object)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(Some(FieldValue::list(
result.into_iter().map(FieldValue::owned_any),
)))
})
},
)
.argument(InputValue::new(
get_entity_update_mutation_data_field_name(builder_context),
input_data_type_ref.map(|t| t.into()).unwrap_or_else(|| {
TypeRef::named_nn(get_entity_update_data_input_type_name::<E>(builder_context))
}),
))
.argument(InputValue::new(
get_entity_update_mutation_filter_field_name(builder_context),
TypeRef::named(get_entity_filter_input_type_name::<E>(builder_context)),
))
}
pub fn generate_entity_default_update_mutation_fn<T, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> UpdateMutationFn<T::Model>
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = T> + sea_orm::ActiveModelBehavior + std::marker::Send + 'static,
{
Arc::new(
move |resolve_context, app_ctx, filter_condition, input_object| {
let entity_input_builder = seaography::EntityInputBuilder {
context: builder_context,
};
let entity_object_builder = seaography::EntityObjectBuilder {
context: builder_context,
};
let active_model = prepare_active_model::<T, A>(
&entity_input_builder,
&entity_object_builder,
&input_object,
resolve_context,
)
.map_err(SeaographyError::AsyncGraphQLError);
Box::pin(async move { Box::pin(async move {
if active_model_hooks { let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let transaction = app_ctx.db().begin().await?;
let active_model = active_model?; let result = mutation_fn(
resolver_ctx,
app_ctx.clone(),
get_filter_conditions::<E>(resolver_ctx, builder_context, filters),
input_object,
)
.await
.map_err(async_graphql::Error::new_with_source)?;
let active_model = active_model.before_save(&transaction, false).await?; Ok(result)
let models = T::update_many()
.set(active_model)
.filter(filter_condition.clone())
.exec_with_returning(&transaction)
.await?;
let mut result = vec![];
for model in models {
result.push(A::after_save(model, &transaction, false).await?);
}
transaction.commit().await?;
Ok(result)
} else {
let db = app_ctx.db();
let active_model = active_model?;
let result = T::update_many()
.set(active_model)
.filter(filter_condition.clone())
.exec_with_returning(db)
.await?;
Ok(result)
}
}) })
}, },
) ))
} }
pub fn generate_entity_default_update_mutation_field<E, A>( pub fn generate_entity_default_update_mutation_field<E, A>(
@@ -668,13 +288,12 @@ where
E: EntityTrait, E: EntityTrait,
<E as EntityTrait>::Model: Sync, <E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>, <E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send + 'static, A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{ {
generate_entity_update_mutation_field::<E, TypeRef>( let entity_update_mutation_builder = EntityUpdateMutationBuilder {
builder_context, context: builder_context,
None, };
generate_entity_default_update_mutation_fn::<E, A>(builder_context, active_model_hooks), entity_update_mutation_builder.to_field::<E, A>(active_model_hooks)
)
} }
pub fn generate_entity_delete_mutation_field<E>( pub fn generate_entity_delete_mutation_field<E>(
@@ -685,109 +304,138 @@ where
E: EntityTrait, E: EntityTrait,
<E as EntityTrait>::Model: Sync, <E as EntityTrait>::Model: Sync,
{ {
let object_name: String = get_entity_name::<E>(builder_context); let entity_delete_mutation_builder = EntityDeleteMutationBuilder {
let guard = builder_context.guards.entity_guards.get(&object_name); context: builder_context,
};
Field::new( entity_delete_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
get_entity_delete_mutation_field_name::<E>(builder_context), move |resolver_ctx, filters| {
TypeRef::named_nn(TypeRef::INT),
move |ctx| {
let mutation_fn = mutation_fn.clone(); let mutation_fn = mutation_fn.clone();
FieldFuture::new(async move {
let guard_flag = if let Some(guard) = guard {
(*guard)(&ctx)
} else {
GuardAction::Allow
};
if let GuardAction::Block(reason) = guard_flag { Box::pin(async move {
return Err::<Option<_>, async_graphql::Error>(async_graphql::Error::new( let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
reason.unwrap_or("Entity guard triggered.".into()), let result = mutation_fn(
)); resolver_ctx,
} app_ctx.clone(),
get_filter_conditions::<E>(resolver_ctx, builder_context, filters),
)
.await
.map_err(async_graphql::Error::new_with_source)?;
let filters = ctx.args.get(get_entity_delete_mutation_filter_field_name( Ok(result)
builder_context,
));
let filter_condition = get_filter_conditions::<E>(&ctx, builder_context, filters);
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let res = mutation_fn(&ctx, app_ctx.clone(), filter_condition)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(Some(async_graphql::Value::from(res)))
}) })
}, },
)
.argument(InputValue::new(
get_entity_delete_mutation_filter_field_name(builder_context),
TypeRef::named(get_entity_filter_input_type_name::<E>(builder_context)),
)) ))
} }
pub fn generate_entity_default_delete_mutation_fn<E, A>(
_builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> DeleteMutationFn
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send + 'static,
{
Arc::new(move |_resolve_context, app_ctx, filter_condition| {
Box::pin(async move {
if active_model_hooks {
let transaction = app_ctx.db().begin().await?;
let models: Vec<E::Model> = E::find()
.filter(filter_condition.clone())
.all(&transaction)
.await?;
let mut active_models: Vec<A> = vec![];
for model in models {
let active_model = model.into_active_model();
active_models.push(active_model.before_delete(&transaction).await?);
}
let result = E::delete_many()
.filter(filter_condition)
.exec(&transaction)
.await?;
for active_model in active_models {
active_model.after_delete(&transaction).await?;
}
transaction.commit().await?;
Ok(result.rows_affected)
} else {
let db = app_ctx.db();
let result = E::delete_many().filter(filter_condition).exec(db).await?;
Ok(result.rows_affected)
}
})
})
}
pub fn generate_entity_default_delete_mutation_field<E, A>( pub fn generate_entity_default_delete_mutation_field<E, A>(
builder_context: &'static BuilderContext, builder_context: &'static BuilderContext,
active_model_hooks: bool, active_model_hooks: bool,
) -> Field ) -> Field
where where
E: EntityTrait, E: EntityTrait,
<E as EntityTrait>::Model: Sync, <E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
<E as EntityTrait>::Model: IntoActiveModel<A>, A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send + 'static,
{ {
generate_entity_delete_mutation_field::<E>( let entity_delete_mutation_builder = EntityDeleteMutationBuilder {
builder_context, context: builder_context,
generate_entity_default_delete_mutation_fn::<E, A>(builder_context, active_model_hooks), };
) entity_delete_mutation_builder.to_field::<E, A>(active_model_hooks)
} }
pub fn register_entity_default_mutations<E, A>(
mut builder: SeaographyBuilder,
active_model_hooks: bool,
) -> SeaographyBuilder
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let builder_context = builder.context;
builder
.outputs
.push(generate_entity_default_basic_entity_object::<E>(
builder_context,
));
builder.inputs.extend([
generate_entity_default_insert_input_object::<E>(builder_context),
generate_entity_default_update_input_object::<E>(builder_context),
]);
builder.mutations.extend([
generate_entity_default_create_one_mutation_field::<E, A>(
builder_context,
active_model_hooks,
),
generate_entity_default_create_batch_mutation_field::<E, A>(
builder_context,
active_model_hooks,
),
generate_entity_default_update_mutation_field::<E, A>(builder_context, active_model_hooks),
generate_entity_default_delete_mutation_field::<E, A>(builder_context, active_model_hooks),
]);
builder
}
pub(crate) fn register_entity_default_readonly_impl<T, RE, I>(
mut builder: SeaographyBuilder,
entity: T,
) -> SeaographyBuilder
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
RE: sea_orm::Iterable<Iterator = I> + RelationBuilder,
I: Iterator<Item = RE> + Clone + DoubleEndedIterator + ExactSizeIterator + FusedIterator,
{
builder.register_entity::<T>(
<RE as sea_orm::Iterable>::iter()
.map(|rel| RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(entity, tokio::spawn);
builder
}
pub(crate) fn register_entity_default_writable_impl<T, RE, A, I>(
mut builder: SeaographyBuilder,
entity: T,
active_model_hooks: bool,
) -> SeaographyBuilder
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = T> + sea_orm::ActiveModelBehavior + std::marker::Send,
RE: sea_orm::Iterable<Iterator = I> + RelationBuilder,
I: Iterator<Item = RE> + Clone + DoubleEndedIterator + ExactSizeIterator + FusedIterator,
{
builder = register_entity_default_readonly_impl::<T, RE, I>(builder, entity);
builder = register_entity_default_mutations::<T, A>(builder, active_model_hooks);
builder
}
macro_rules! register_entity_default_readonly {
($builder:expr, $module_path:ident) => {
$crate::graphql::infra::custom::register_entity_default_readonly_impl::<
$module_path::Entity,
$module_path::RelatedEntity,
_,
>($builder, $module_path::Entity)
};
}
macro_rules! register_entity_default_writable {
($builder:expr, $module_path:ident, $active_model_hooks:expr) => {
$crate::graphql::infra::custom::register_entity_default_writable_impl::<
$module_path::Entity,
$module_path::RelatedEntity,
$module_path::ActiveModel,
_,
>($builder, $module_path::Entity, $active_model_hooks)
};
}
pub(crate) use register_entity_default_readonly;
pub(crate) use register_entity_default_writable;

View File

@@ -911,18 +911,15 @@ where
Box::new( Box::new(
move |_resolve_context: &ResolverContext<'_>, condition, filter| { move |_resolve_context: &ResolverContext<'_>, condition, filter| {
if let Some(filter) = filter { if let Some(filter) = filter {
let filter_value = to_value(filter.as_index_map()).map_err(|e| { let filter_value =
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e)) to_value(filter.as_index_map()).map_err(GraphqlError::new_with_source)?;
})?;
let filter_json: JsonValue = filter_value.into_json().map_err(|e| { let filter_json: JsonValue = filter_value
SeaographyError::AsyncGraphQLError(GraphqlError::new(format!("{e:?}"))) .into_json()
})?; .map_err(GraphqlError::new_with_source)?;
let cond_where = prepare_jsonb_filter_input(&Expr::col(column), filter_json) let cond_where = prepare_jsonb_filter_input(&Expr::col(column), filter_json)
.map_err(|e| { .map_err(GraphqlError::new_with_source)?;
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e))
})?;
let condition = condition.add(cond_where); let condition = condition.add(cond_where);
Ok(condition) Ok(condition)
@@ -946,14 +943,22 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.filter_types.overwrites.insert( context.filter_types.overwrites.insert(
get_entity_and_column_name::<T>(context, column), get_entity_and_column_name::<T>(context, column),
Some(FilterType::Custom(JSONB_FILTER_NAME.to_string())), Some(FilterType::Custom(JSONB_FILTER_NAME.to_string())),
); );
context.filter_types.condition_functions.insert(
entity_column_name.clone(),
generate_jsonb_filter_condition_function::<T>(context, column),
);
} }
pub fn validate_jsonb_input_for_entity<T, S>(context: &mut BuilderContext, column: &T::Column) pub fn try_convert_jsonb_input_for_entity<T, S>(
where context: &mut BuilderContext,
column: &T::Column,
case: Option<Case<'static>>,
) where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
S: DeserializeOwned + Serialize, S: DeserializeOwned + Serialize,
@@ -962,49 +967,52 @@ where
context.types.input_conversions.insert( context.types.input_conversions.insert(
entity_column_name.clone(), entity_column_name.clone(),
Box::new(move |_resolve_context, accessor| { Box::new(move |_resolve_context, accessor| {
let deserialized = accessor.deserialize::<S>().map_err(|err| { let mut json_value: serde_json::Value = accessor.deserialize()?;
SeaographyError::TypeConversionError(
err.message, if let Some(case) = case {
format!("Json - {entity_column_name}"), json_value = convert_json_keys(json_value, case);
) }
})?;
let json_value = serde_json::to_value(deserialized).map_err(|err| { serde_json::from_value::<S>(json_value.clone()).map_err(|err| {
SeaographyError::TypeConversionError( SeaographyError::TypeConversionError(
err.to_string(), err.to_string(),
format!("Json - {entity_column_name}"), format!("Json - {entity_column_name}"),
) )
})?; })?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value)))) Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}), }),
); );
} }
pub fn convert_jsonb_output_case_for_entity<T>( pub fn convert_jsonb_output_for_entity<T>(
context: &mut BuilderContext, context: &mut BuilderContext,
column: &T::Column, column: &T::Column,
case: Case<'static>, case: Option<Case<'static>>,
) where ) where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_column_key = get_entity_and_column_name::<T>(context, column); let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.output_conversions.insert( context.types.output_conversions.insert(
entity_column_key.clone(), entity_column_name.clone(),
Box::new(move |value| { Box::new(move |value| {
if let sea_orm::Value::Json(Some(json)) = value { if let sea_orm::Value::Json(Some(json)) = value {
let result = let mut json_value = json.as_ref().clone();
async_graphql::Value::from_json(convert_json_keys(json.as_ref().clone(), case)) if let Some(case) = case {
.map_err(|err| { json_value = convert_json_keys(json_value, case);
SeaographyError::TypeConversionError( }
err.to_string(), let result = async_graphql::Value::from_json(json_value).map_err(|err| {
format!("Json - {entity_column_key}"), SeaographyError::TypeConversionError(
) err.to_string(),
})?; format!("Json - {entity_column_name}"),
)
})?;
Ok(result) Ok(result)
} else { } else {
Err(SeaographyError::TypeConversionError( Err(SeaographyError::TypeConversionError(
"value should be json".to_string(), "value should be json".to_string(),
format!("Json - {entity_column_key}"), format!("Json - {entity_column_name}"),
)) ))
} }
}), }),

View File

@@ -78,7 +78,7 @@ where
context.filter_input.type_name.as_ref()(&entity_name) context.filter_input.type_name.as_ref()(&entity_name)
} }
pub fn get_entity_insert_data_input_type_name<T>(context: &BuilderContext) -> String pub fn get_entity_insert_input_type_name<T>(context: &BuilderContext) -> String
where where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
@@ -87,7 +87,7 @@ where
format!("{entity_name}{}", context.entity_input.insert_suffix) format!("{entity_name}{}", context.entity_input.insert_suffix)
} }
pub fn get_entity_update_data_input_type_name<T>(context: &BuilderContext) -> String pub fn get_entity_update_input_type_name<T>(context: &BuilderContext) -> String
where where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,

View File

@@ -12,6 +12,7 @@ use crate::{
credential_3rd::{ credential_3rd::{
register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context, register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context,
}, },
cron::{register_cron_to_schema_builder, register_cron_to_schema_context},
downloaders::{ downloaders::{
register_downloaders_to_schema_builder, register_downloaders_to_schema_context, register_downloaders_to_schema_builder, register_downloaders_to_schema_context,
}, },
@@ -38,6 +39,9 @@ use crate::{
subscriptions::{ subscriptions::{
register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context, register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context,
}, },
system_tasks::{
register_system_tasks_to_schema_builder, register_system_tasks_to_schema_context,
},
}, },
infra::{ infra::{
json::register_jsonb_input_filter_to_schema_builder, json::register_jsonb_input_filter_to_schema_builder,
@@ -77,6 +81,8 @@ pub fn build_schema(
register_subscription_bangumi_to_schema_context(&mut context); register_subscription_bangumi_to_schema_context(&mut context);
register_subscription_episode_to_schema_context(&mut context); register_subscription_episode_to_schema_context(&mut context);
register_bangumi_to_schema_context(&mut context); register_bangumi_to_schema_context(&mut context);
register_cron_to_schema_context(&mut context);
register_system_tasks_to_schema_context(&mut context);
} }
context context
}); });
@@ -100,6 +106,8 @@ pub fn build_schema(
builder = register_credential3rd_to_schema_builder(builder); builder = register_credential3rd_to_schema_builder(builder);
builder = register_subscriber_tasks_to_schema_builder(builder); builder = register_subscriber_tasks_to_schema_builder(builder);
builder = register_bangumi_to_schema_builder(builder); builder = register_bangumi_to_schema_builder(builder);
builder = register_cron_to_schema_builder(builder);
builder = register_system_tasks_to_schema_builder(builder);
} }
let schema = builder.schema_builder(); let schema = builder.schema_builder();

View File

@@ -27,6 +27,8 @@ pub mod migrations;
pub mod models; pub mod models;
pub mod storage; pub mod storage;
pub mod task; pub mod task;
pub mod test_utils;
pub mod utils; pub mod utils;
pub mod web; pub mod web;
#[cfg(any(test, feature = "test-utils"))]
pub mod test_utils;

View File

@@ -1,6 +1,8 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ts_rs::TS;
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, TS)]
#[ts(rename_all = "camelCase")]
pub enum AutoOptimizeImageFormat { pub enum AutoOptimizeImageFormat {
#[serde(rename = "image/webp")] #[serde(rename = "image/webp")]
Webp, Webp,
@@ -10,25 +12,29 @@ pub enum AutoOptimizeImageFormat {
Jxl, Jxl,
} }
#[derive(Clone, Debug, Serialize, Deserialize, Default)] #[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeWebpOptions { pub struct EncodeWebpOptions {
pub quality: Option<f32>, pub quality: Option<f32>,
} }
#[derive(Clone, Debug, Serialize, Deserialize, Default)] #[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeAvifOptions { pub struct EncodeAvifOptions {
pub quality: Option<u8>, pub quality: Option<u8>,
pub speed: Option<u8>, pub speed: Option<u8>,
pub threads: Option<u8>, pub threads: Option<u8>,
} }
#[derive(Clone, Debug, Serialize, Deserialize, Default)] #[derive(Clone, Debug, Serialize, Deserialize, Default, TS, PartialEq)]
#[ts(rename_all = "camelCase")]
pub struct EncodeJxlOptions { pub struct EncodeJxlOptions {
pub quality: Option<f32>, pub quality: Option<f32>,
pub speed: Option<u8>, pub speed: Option<u8>,
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize, TS, PartialEq)]
#[ts(tag = "mimeType")]
#[serde(tag = "mime_type")] #[serde(tag = "mime_type")]
pub enum EncodeImageOptions { pub enum EncodeImageOptions {
#[serde(rename = "image/webp")] #[serde(rename = "image/webp")]

View File

@@ -178,6 +178,7 @@ pub enum Cron {
SubscriberId, SubscriberId,
SubscriptionId, SubscriptionId,
CronExpr, CronExpr,
CronTimezone,
NextRun, NextRun,
LastRun, LastRun,
LastError, LastError,
@@ -189,7 +190,38 @@ pub enum Cron {
MaxAttempts, MaxAttempts,
Priority, Priority,
Status, Status,
SubscriberTask, SubscriberTaskCron,
SystemTaskCron,
}
#[derive(sea_query::Iden)]
pub enum ApalisSchema {
#[iden = "apalis"]
Schema,
}
#[derive(DeriveIden)]
pub enum ApalisJobs {
#[sea_orm(iden = "jobs")]
Table,
SubscriberId,
SubscriptionId,
Job,
JobType,
Status,
TaskType,
Id,
Attempts,
MaxAttempts,
RunAt,
LastError,
LockAt,
LockBy,
DoneAt,
Priority,
CronId,
} }
macro_rules! create_postgres_enum_for_active_enum { macro_rules! create_postgres_enum_for_active_enum {

View File

@@ -90,6 +90,11 @@ impl MigrationTrait for Migration {
SimpleExpr::from(AuthType::Basic).as_enum(AuthTypeEnum), SimpleExpr::from(AuthType::Basic).as_enum(AuthTypeEnum),
seed_subscriber_id.into(), seed_subscriber_id.into(),
]) ])
.on_conflict(
OnConflict::columns([Auth::Pid, Auth::AuthType])
.do_nothing()
.to_owned(),
)
.to_owned(), .to_owned(),
) )
.await?; .await?;

View File

@@ -95,6 +95,7 @@ impl MigrationTrait for Migration {
Table::alter() Table::alter()
.table(Subscriptions::Table) .table(Subscriptions::Table)
.drop_column(Subscriptions::CredentialId) .drop_column(Subscriptions::CredentialId)
.drop_foreign_key("fk_subscriptions_credential_id")
.to_owned(), .to_owned(),
) )
.await?; .await?;

View File

@@ -0,0 +1,221 @@
use async_trait::async_trait;
use sea_orm_migration::{prelude::*, schema::*};
use super::defs::{ApalisJobs, ApalisSchema};
use crate::{
migrations::defs::{Subscribers, Subscriptions},
task::{
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME,
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME, SUBSCRIBER_TASK_APALIS_NAME,
SYSTEM_TASK_APALIS_NAME,
},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.add_column_if_not_exists(integer_null(ApalisJobs::SubscriberId))
.add_column_if_not_exists(integer_null(ApalisJobs::SubscriptionId))
.add_column_if_not_exists(text_null(ApalisJobs::TaskType))
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_subscriber_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::SubscriberId)
.to_tbl(Subscribers::Table)
.to_col(Subscribers::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_subscription_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::SubscriptionId)
.to_tbl(Subscriptions::Table)
.to_col(Subscriptions::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"UPDATE {apalis_schema}.{apalis_table} SET {subscriber_id} = ({job} ->> '{subscriber_id}')::integer, {task_type} = ({job} ->> '{task_type}')::text, {subscription_id} = ({job} ->> '{subscription_id}')::integer"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
job = ApalisJobs::Job.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE TRIGGER {SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME}
BEFORE INSERT OR UPDATE ON {apalis_schema}.{apalis_table}
FOR EACH ROW
EXECUTE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}();"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string()
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_tasks")
.await?;
db.execute_unprepared("DROP VIEW IF EXISTS system_tasks")
.await?;
db.execute_unprepared(&format!(
r#"DROP TRIGGER IF EXISTS {SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME} ON {apalis_schema}.{apalis_table}"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string()
)).await?;
db.execute_unprepared(&format!(
r#"DROP FUNCTION IF EXISTS {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}()"#,
apalis_schema = ApalisSchema::Schema.to_string(),
))
.await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.drop_foreign_key("fk_apalis_jobs_subscriber_id")
.drop_foreign_key("fk_apalis_jobs_subscription_id")
.drop_column(ApalisJobs::SubscriberId)
.drop_column(ApalisJobs::SubscriptionId)
.to_owned(),
)
.await?;
Ok(())
}
}

View File

@@ -1,64 +0,0 @@
use async_trait::async_trait;
use sea_orm_migration::prelude::*;
use crate::task::SUBSCRIBER_TASK_APALIS_NAME;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
job,
job_type,
status,
(job ->> 'subscriber_id'::text)::integer AS subscriber_id,
job ->> 'task_type'::text AS task_type,
id,
attempts,
max_attempts,
run_at,
last_error,
lock_at,
lock_by,
done_at,
priority
FROM apalis.jobs
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscriber_id
ON apalis.jobs (((job -> 'subscriber_id')::integer))
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(
r#"DROP INDEX IF EXISTS idx_apalis_jobs_subscriber_id
ON apalis.jobs"#,
)
.await?;
db.execute_unprepared("DROP VIEW IF EXISTS subscriber_tasks")
.await?;
Ok(())
}
}

View File

@@ -15,6 +15,8 @@ pub struct Migration;
#[async_trait] #[async_trait]
impl MigrationTrait for Migration { impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan).await?; create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan).await?;
{ {
@@ -29,11 +31,17 @@ impl MigrationTrait for Migration {
BangumiTypeEnum, BangumiTypeEnum,
BangumiType::iden_values(), BangumiType::iden_values(),
)) ))
.drop_column(Bangumi::SavePath)
.to_owned(), .to_owned(),
) )
.await?; .await?;
db.execute_unprepared(&format!(
r#"ALTER TABLE {bangumi} DROP COLUMN IF EXISTS {save_path}"#,
bangumi = Bangumi::Table.to_string(),
save_path = Bangumi::SavePath.to_string(),
))
.await?;
manager manager
.exec_stmt( .exec_stmt(
UpdateStatement::new() UpdateStatement::new()
@@ -83,11 +91,17 @@ impl MigrationTrait for Migration {
.add_column_if_not_exists(big_integer_null( .add_column_if_not_exists(big_integer_null(
Episodes::EnclosureContentLength, Episodes::EnclosureContentLength,
)) ))
.drop_column(Episodes::SavePath)
.to_owned(), .to_owned(),
) )
.await?; .await?;
db.execute_unprepared(&format!(
r#"ALTER TABLE {episodes} DROP COLUMN IF EXISTS {save_path}"#,
episodes = Episodes::Table.to_string(),
save_path = Episodes::SavePath.to_string(),
))
.await?;
manager manager
.exec_stmt( .exec_stmt(
UpdateStatement::new() UpdateStatement::new()
@@ -120,10 +134,34 @@ impl MigrationTrait for Migration {
} }
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
Table::alter()
.table(Bangumi::Table)
.add_column_if_not_exists(text_null(Bangumi::SavePath))
.drop_column(Bangumi::BangumiType)
.to_owned(),
)
.await?;
manager manager
.drop_postgres_enum_for_active_enum(BangumiTypeEnum) .drop_postgres_enum_for_active_enum(BangumiTypeEnum)
.await?; .await?;
manager
.alter_table(
Table::alter()
.table(Episodes::Table)
.add_column_if_not_exists(text_null(Episodes::SavePath))
.drop_column(Episodes::EpisodeType)
.drop_column(Episodes::EnclosureMagnetLink)
.drop_column(Episodes::EnclosureTorrentLink)
.drop_column(Episodes::EnclosurePubDate)
.drop_column(Episodes::EnclosureContentLength)
.to_owned(),
)
.await?;
manager manager
.drop_postgres_enum_for_active_enum(EpisodeTypeEnum) .drop_postgres_enum_for_active_enum(EpisodeTypeEnum)
.await?; .await?;

View File

@@ -1,62 +0,0 @@
use async_trait::async_trait;
use sea_orm_migration::prelude::*;
use crate::task::SUBSCRIBER_TASK_APALIS_NAME;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
job,
job_type,
status,
(job ->> 'subscriber_id')::integer AS subscriber_id,
job ->> 'task_type' AS task_type,
id,
attempts,
max_attempts,
run_at,
last_error,
lock_at,
lock_by,
done_at,
priority,
(job ->> 'subscription_id')::integer AS subscription_id
FROM apalis.jobs
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscription_id
ON apalis.jobs (((job -> 'subscription_id')::integer))
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscription_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(
r#"DROP INDEX IF EXISTS idx_apalis_jobs_subscription_id
ON apalis.jobs"#,
)
.await?;
Ok(())
}
}

View File

@@ -4,12 +4,18 @@ use sea_orm_migration::{prelude::*, schema::*};
use crate::{ use crate::{
migrations::defs::{ migrations::defs::{
Cron, CustomSchemaManagerExt, GeneralIds, Subscribers, Subscriptions, table_auto_z, ApalisJobs, ApalisSchema, Cron, CustomSchemaManagerExt, GeneralIds, Subscribers,
Subscriptions, table_auto_z,
}, },
models::cron::{ models::cron::{
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_EVENT, CronStatus, CronStatusEnum, CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_DEBUG_EVENT, CRON_DUE_EVENT,
NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME, NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME, CronStatus, CronStatusEnum, NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME,
SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME, NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME,
SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME,
},
task::{
SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SUBSCRIBER_TASK_APALIS_NAME,
SYSTEM_TASK_APALIS_NAME,
}, },
}; };
@@ -25,7 +31,8 @@ impl MigrationTrait for Migration {
CronStatus::Pending, CronStatus::Pending,
CronStatus::Running, CronStatus::Running,
CronStatus::Completed, CronStatus::Completed,
CronStatus::Failed CronStatus::Failed,
CronStatus::Disabled
) )
.await?; .await?;
@@ -34,6 +41,7 @@ impl MigrationTrait for Migration {
table_auto_z(Cron::Table) table_auto_z(Cron::Table)
.col(pk_auto(Cron::Id)) .col(pk_auto(Cron::Id))
.col(string(Cron::CronExpr)) .col(string(Cron::CronExpr))
.col(string(Cron::CronTimezone))
.col(integer_null(Cron::SubscriberId)) .col(integer_null(Cron::SubscriberId))
.col(integer_null(Cron::SubscriptionId)) .col(integer_null(Cron::SubscriptionId))
.col(timestamp_with_time_zone_null(Cron::NextRun)) .col(timestamp_with_time_zone_null(Cron::NextRun))
@@ -42,16 +50,16 @@ impl MigrationTrait for Migration {
.col(boolean(Cron::Enabled).default(true)) .col(boolean(Cron::Enabled).default(true))
.col(string_null(Cron::LockedBy)) .col(string_null(Cron::LockedBy))
.col(timestamp_with_time_zone_null(Cron::LockedAt)) .col(timestamp_with_time_zone_null(Cron::LockedAt))
.col(integer_null(Cron::TimeoutMs)) .col(integer_null(Cron::TimeoutMs).default(5000))
.col(integer(Cron::Attempts)) .col(integer(Cron::Attempts).default(0))
.col(integer(Cron::MaxAttempts)) .col(integer(Cron::MaxAttempts).default(1))
.col(integer(Cron::Priority)) .col(integer(Cron::Priority).default(0))
.col(enumeration( .col(
Cron::Status, enumeration(Cron::Status, CronStatusEnum, CronStatus::iden_values())
CronStatusEnum, .default(CronStatus::Pending),
CronStatus::iden_values(), )
)) .col(json_binary_null(Cron::SubscriberTaskCron))
.col(json_binary_null(Cron::SubscriberTask)) .col(json_binary_null(Cron::SystemTaskCron))
.foreign_key( .foreign_key(
ForeignKey::create() ForeignKey::create()
.name("fk_cron_subscriber_id") .name("fk_cron_subscriber_id")
@@ -91,19 +99,30 @@ impl MigrationTrait for Migration {
db.execute_unprepared(&format!( db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$ r#"CREATE OR REPLACE FUNCTION {SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_subscriber_task_subscriber_id integer;
new_subscriber_task_subscription_id integer;
new_system_task_subscriber_id integer;
BEGIN BEGIN
IF jsonb_path_exists(NEW.{subscriber_task}, '$.subscriber_id ? (@.type() == "number")') THEN new_subscriber_task_subscriber_id = (NEW.{subscriber_task_cron} ->> 'subscriber_id')::integer;
NEW.{subscriber_id} = (NEW.{subscriber_task} ->> 'subscriber_id')::integer; new_subscriber_task_subscription_id = (NEW.{subscriber_task_cron} ->> 'subscription_id')::integer;
new_system_task_subscriber_id = (NEW.{system_task_cron} ->> 'subscriber_id')::integer;
IF new_subscriber_task_subscriber_id IS DISTINCT FROM (OLD.{subscriber_task_cron} ->> 'subscriber_id')::integer AND new_subscriber_task_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_subscriber_task_subscriber_id;
END IF; END IF;
IF jsonb_path_exists(NEW.{subscriber_task}, '$.subscription_id ? (@.type() == "number")') THEN IF new_subscriber_task_subscription_id IS DISTINCT FROM (OLD.{subscriber_task_cron} ->> 'subscription_id')::integer AND new_subscriber_task_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = (NEW.{subscriber_task} ->> 'subscription_id')::integer; NEW.{subscription_id} = new_subscriber_task_subscription_id;
END IF;
IF new_system_task_subscriber_id IS DISTINCT FROM (OLD.{system_task_cron} ->> 'subscriber_id')::integer AND new_system_task_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_system_task_subscriber_id;
END IF; END IF;
RETURN NEW; RETURN NEW;
END; END;
$$ LANGUAGE plpgsql;"#, $$ LANGUAGE plpgsql;"#,
subscriber_task = &Cron::SubscriberTask.to_string(), subscriber_task_cron = &Cron::SubscriberTaskCron.to_string(),
subscriber_id = &Cron::SubscriberId.to_string(), subscriber_id = &Cron::SubscriberId.to_string(),
subscription_id = &Cron::SubscriptionId.to_string(), subscription_id = &Cron::SubscriptionId.to_string(),
system_task_cron = &Cron::SystemTaskCron.to_string(),
)).await?; )).await?;
db.execute_unprepared(&format!( db.execute_unprepared(&format!(
@@ -122,7 +141,7 @@ impl MigrationTrait for Migration {
IF NEW.{next_run} IS NOT NULL IF NEW.{next_run} IS NOT NULL
AND NEW.{next_run} <= CURRENT_TIMESTAMP AND NEW.{next_run} <= CURRENT_TIMESTAMP
AND NEW.{enabled} = true AND NEW.{enabled} = true
AND NEW.{status} = '{pending}' AND NEW.{status} = '{pending}'::{status_type}
AND NEW.{attempts} < NEW.{max_attempts} AND NEW.{attempts} < NEW.{max_attempts}
-- Check if not locked or lock timeout -- Check if not locked or lock timeout
AND ( AND (
@@ -137,8 +156,8 @@ impl MigrationTrait for Migration {
OLD.{next_run} IS NULL OLD.{next_run} IS NULL
OR OLD.{next_run} > CURRENT_TIMESTAMP OR OLD.{next_run} > CURRENT_TIMESTAMP
OR OLD.{enabled} = false OR OLD.{enabled} = false
OR OLD.{status} != '{pending}' OR OLD.{status} IS DISTINCT FROM '{pending}'
OR OLD.{attempts} != NEW.{attempts} OR OLD.{attempts} IS DISTINCT FROM NEW.{attempts}
) )
THEN THEN
PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(NEW)::text); PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(NEW)::text);
@@ -154,6 +173,7 @@ impl MigrationTrait for Migration {
pending = &CronStatus::Pending.to_value(), pending = &CronStatus::Pending.to_value(),
attempts = &Cron::Attempts.to_string(), attempts = &Cron::Attempts.to_string(),
max_attempts = &Cron::MaxAttempts.to_string(), max_attempts = &Cron::MaxAttempts.to_string(),
status_type = &CronStatus::name().to_string(),
)) ))
.await?; .await?;
@@ -177,7 +197,7 @@ impl MigrationTrait for Migration {
WHERE {next_run} IS NOT NULL WHERE {next_run} IS NOT NULL
AND {next_run} <= CURRENT_TIMESTAMP AND {next_run} <= CURRENT_TIMESTAMP
AND {enabled} = true AND {enabled} = true
AND {status} = '{pending}' AND {status} = '{pending}'::{status_type}
AND {attempts} < {max_attempts} AND {attempts} < {max_attempts}
AND ( AND (
{locked_at} IS NULL {locked_at} IS NULL
@@ -189,9 +209,12 @@ impl MigrationTrait for Migration {
ORDER BY {priority} ASC, {next_run} ASC ORDER BY {priority} ASC, {next_run} ASC
FOR UPDATE SKIP LOCKED FOR UPDATE SKIP LOCKED
LOOP LOOP
-- PERFORM pg_notify('{CRON_DUE_DEBUG_EVENT}',format('Found due cron: value=%s; Now time: %s', row_to_json(cron_record)::text, CURRENT_TIMESTAMP));
PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(cron_record)::text); PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(cron_record)::text);
notification_count := notification_count + 1; notification_count := notification_count + 1;
END LOOP; END LOOP;
-- PERFORM pg_notify('{CRON_DUE_DEBUG_EVENT}', format('Notification count: %I; Now time: %s', notification_count, CURRENT_TIMESTAMP));
RETURN notification_count; RETURN notification_count;
END; END;
$$ LANGUAGE plpgsql;"#, $$ LANGUAGE plpgsql;"#,
@@ -205,15 +228,286 @@ impl MigrationTrait for Migration {
priority = &Cron::Priority.to_string(), priority = &Cron::Priority.to_string(),
attempts = &Cron::Attempts.to_string(), attempts = &Cron::Attempts.to_string(),
max_attempts = &Cron::MaxAttempts.to_string(), max_attempts = &Cron::MaxAttempts.to_string(),
status_type = &CronStatus::name().to_string(),
)) ))
.await?; .await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.add_column_if_not_exists(integer_null(ApalisJobs::CronId))
.add_foreign_key(
TableForeignKey::new()
.name("fk_apalis_jobs_cron_id")
.from_tbl((ApalisSchema::Schema, ApalisJobs::Table))
.from_col(ApalisJobs::CronId)
.to_tbl(Cron::Table)
.to_col(Cron::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id},
{cron_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{cron_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"
UPDATE {apalis_schema}.{apalis_table} SET {cron_id} = ({job} ->> '{cron_id}')::integer
"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_cron_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_cron_id = (NEW.{job} ->> '{cron_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_cron_id IS DISTINCT FROM (OLD.{job} ->> '{cron_id}')::integer AND new_job_cron_id IS DISTINCT FROM NEW.{cron_id} THEN
NEW.{cron_id} = new_job_cron_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
cron_id = ApalisJobs::CronId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
Ok(()) Ok(())
} }
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection(); let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {apalis_schema}.{SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
DECLARE
new_job_subscriber_id integer;
new_job_subscription_id integer;
new_job_task_type text;
BEGIN
new_job_subscriber_id = (NEW.{job} ->> '{subscriber_id}')::integer;
new_job_subscription_id = (NEW.{job} ->> '{subscription_id}')::integer;
new_job_task_type = (NEW.{job} ->> '{task_type}')::text;
IF new_job_subscriber_id IS DISTINCT FROM (OLD.{job} ->> '{subscriber_id}')::integer AND new_job_subscriber_id IS DISTINCT FROM NEW.{subscriber_id} THEN
NEW.{subscriber_id} = new_job_subscriber_id;
END IF;
IF new_job_subscription_id IS DISTINCT FROM (OLD.{job} ->> '{subscription_id}')::integer AND new_job_subscription_id IS DISTINCT FROM NEW.{subscription_id} THEN
NEW.{subscription_id} = new_job_subscription_id;
END IF;
IF new_job_task_type IS DISTINCT FROM (OLD.{job} ->> '{task_type}')::text AND new_job_task_type IS DISTINCT FROM NEW.{task_type} THEN
NEW.{task_type} = new_job_task_type;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
apalis_schema = ApalisSchema::Schema.to_string(),
job = ApalisJobs::Job.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority},
{subscription_id}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{subscriber_id} ? (@.type() == "number")')
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
subscription_id = ApalisJobs::SubscriptionId.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW system_tasks AS
SELECT
{job},
{job_type},
{status},
{subscriber_id},
{task_type},
{id},
{attempts},
{max_attempts},
{run_at},
{last_error},
{lock_at},
{lock_by},
{done_at},
{priority}
FROM {apalis_schema}.{apalis_table}
WHERE {job_type} = '{SYSTEM_TASK_APALIS_NAME}'
AND jsonb_path_exists({job}, '$.{task_type} ? (@.type() == "string")')"#,
apalis_schema = ApalisSchema::Schema.to_string(),
apalis_table = ApalisJobs::Table.to_string(),
job = ApalisJobs::Job.to_string(),
job_type = ApalisJobs::JobType.to_string(),
status = ApalisJobs::Status.to_string(),
subscriber_id = ApalisJobs::SubscriberId.to_string(),
task_type = ApalisJobs::TaskType.to_string(),
id = ApalisJobs::Id.to_string(),
attempts = ApalisJobs::Attempts.to_string(),
max_attempts = ApalisJobs::MaxAttempts.to_string(),
run_at = ApalisJobs::RunAt.to_string(),
last_error = ApalisJobs::LastError.to_string(),
lock_at = ApalisJobs::LockAt.to_string(),
lock_by = ApalisJobs::LockBy.to_string(),
done_at = ApalisJobs::DoneAt.to_string(),
priority = ApalisJobs::Priority.to_string(),
))
.await?;
manager
.alter_table(
TableAlterStatement::new()
.table((ApalisSchema::Schema, ApalisJobs::Table))
.drop_column(ApalisJobs::CronId)
.drop_foreign_key("fk_apalis_jobs_cron_id")
.to_owned(),
)
.await?;
db.execute_unprepared(&format!( db.execute_unprepared(&format!(
r#"DROP TRIGGER IF EXISTS {NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME} ON {table};"#, r#"DROP TRIGGER IF EXISTS {NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME} ON {table};"#,
table = &Cron::Table.to_string(), table = &Cron::Table.to_string(),

View File

@@ -7,10 +7,9 @@ pub mod m20220101_000001_init;
pub mod m20240224_082543_add_downloads; pub mod m20240224_082543_add_downloads;
pub mod m20241231_000001_auth; pub mod m20241231_000001_auth;
pub mod m20250501_021523_credential_3rd; pub mod m20250501_021523_credential_3rd;
pub mod m20250520_021135_subscriber_tasks; pub mod m20250520_021135_add_tasks;
pub mod m20250622_015618_feeds; pub mod m20250622_015618_feeds;
pub mod m20250622_020819_bangumi_and_episode_type; pub mod m20250622_020819_bangumi_and_episode_type;
pub mod m20250625_060701_add_subscription_id_to_subscriber_tasks;
pub mod m20250629_065628_add_cron; pub mod m20250629_065628_add_cron;
pub struct Migrator; pub struct Migrator;
@@ -23,10 +22,9 @@ impl MigratorTrait for Migrator {
Box::new(m20240224_082543_add_downloads::Migration), Box::new(m20240224_082543_add_downloads::Migration),
Box::new(m20241231_000001_auth::Migration), Box::new(m20241231_000001_auth::Migration),
Box::new(m20250501_021523_credential_3rd::Migration), Box::new(m20250501_021523_credential_3rd::Migration),
Box::new(m20250520_021135_subscriber_tasks::Migration), Box::new(m20250520_021135_add_tasks::Migration),
Box::new(m20250622_015618_feeds::Migration), Box::new(m20250622_015618_feeds::Migration),
Box::new(m20250622_020819_bangumi_and_episode_type::Migration), Box::new(m20250622_020819_bangumi_and_episode_type::Migration),
Box::new(m20250625_060701_add_subscription_id_to_subscriber_tasks::Migration),
Box::new(m20250629_065628_add_cron::Migration), Box::new(m20250629_065628_add_cron::Migration),
] ]
} }

View File

@@ -1,4 +1,5 @@
pub const CRON_DUE_EVENT: &str = "cron_due"; pub const CRON_DUE_EVENT: &str = "cron_due";
pub const CRON_DUE_DEBUG_EVENT: &str = "cron_due_debug";
pub const CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME: &str = "check_and_trigger_due_crons"; pub const CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME: &str = "check_and_trigger_due_crons";

View File

@@ -1,14 +1,14 @@
mod core; mod core;
mod registry;
pub use core::{ pub use core::{
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_EVENT, CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_DEBUG_EVENT, CRON_DUE_EVENT,
NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME, NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME, NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME, NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME,
SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME,
}; };
use async_trait::async_trait; use async_trait::async_trait;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use chrono_tz::Tz;
use croner::Cron; use croner::Cron;
use sea_orm::{ use sea_orm::{
ActiveValue::{self, Set}, ActiveValue::{self, Set},
@@ -21,8 +21,10 @@ use sea_orm::{
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
app::AppContextTrait, errors::RecorderResult, models::subscriber_tasks, app::AppContextTrait,
task::SubscriberTaskTrait, errors::RecorderResult,
models::{subscriber_tasks, system_tasks},
task::{SubscriberTaskTrait, SystemTaskTrait},
}; };
#[derive( #[derive(
@@ -39,9 +41,11 @@ pub enum CronStatus {
Completed, Completed,
#[sea_orm(string_value = "failed")] #[sea_orm(string_value = "failed")]
Failed, Failed,
#[sea_orm(string_value = "disabled")]
Disabled,
} }
#[derive(Debug, Clone, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)] #[derive(Debug, Clone, DeriveEntityModel, PartialEq, Serialize, Deserialize)]
#[sea_orm(table_name = "cron")] #[sea_orm(table_name = "cron")]
pub struct Model { pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")] #[sea_orm(default_expr = "Expr::current_timestamp()")]
@@ -53,13 +57,14 @@ pub struct Model {
pub subscriber_id: Option<i32>, pub subscriber_id: Option<i32>,
pub subscription_id: Option<i32>, pub subscription_id: Option<i32>,
pub cron_expr: String, pub cron_expr: String,
pub cron_timezone: String,
pub next_run: Option<DateTimeUtc>, pub next_run: Option<DateTimeUtc>,
pub last_run: Option<DateTimeUtc>, pub last_run: Option<DateTimeUtc>,
pub last_error: Option<String>, pub last_error: Option<String>,
pub locked_by: Option<String>, pub locked_by: Option<String>,
pub locked_at: Option<DateTimeUtc>, pub locked_at: Option<DateTimeUtc>,
#[sea_orm(default_expr = "5000")] // default_expr = "5000"
pub timeout_ms: i32, pub timeout_ms: Option<i32>,
#[sea_orm(default_expr = "0")] #[sea_orm(default_expr = "0")]
pub attempts: i32, pub attempts: i32,
#[sea_orm(default_expr = "1")] #[sea_orm(default_expr = "1")]
@@ -69,7 +74,8 @@ pub struct Model {
pub status: CronStatus, pub status: CronStatus,
#[sea_orm(default_expr = "true")] #[sea_orm(default_expr = "true")]
pub enabled: bool, pub enabled: bool,
pub subscriber_task: Option<subscriber_tasks::SubscriberTask>, pub subscriber_task_cron: Option<subscriber_tasks::SubscriberTask>,
pub system_task_cron: Option<system_tasks::SystemTask>,
} }
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -79,7 +85,7 @@ pub enum Relation {
from = "Column::SubscriberId", from = "Column::SubscriberId",
to = "super::subscribers::Column::Id", to = "super::subscribers::Column::Id",
on_update = "Cascade", on_update = "Cascade",
on_delete = "Cascade" on_delete = "Restrict"
)] )]
Subscriber, Subscriber,
#[sea_orm( #[sea_orm(
@@ -87,9 +93,13 @@ pub enum Relation {
from = "Column::SubscriptionId", from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id", to = "super::subscriptions::Column::Id",
on_update = "Cascade", on_update = "Cascade",
on_delete = "Cascade" on_delete = "Restrict"
)] )]
Subscription, Subscription,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(has_many = "super::system_tasks::Entity")]
SystemTask,
} }
impl Related<super::subscribers::Entity> for Entity { impl Related<super::subscribers::Entity> for Entity {
@@ -104,42 +114,90 @@ impl Related<super::subscriptions::Entity> for Entity {
} }
} }
impl Related<super::subscriber_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriberTask.def()
}
}
impl Related<super::system_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SystemTask.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity { pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")] #[sea_orm(entity = "super::subscribers::Entity")]
Subscriber, Subscriber,
#[sea_orm(entity = "super::subscriptions::Entity")] #[sea_orm(entity = "super::subscriptions::Entity")]
Subscription, Subscription,
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask,
#[sea_orm(entity = "super::system_tasks::Entity")]
SystemTask,
} }
#[async_trait] #[async_trait]
impl ActiveModelBehavior for ActiveModel { impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, _insert: bool) -> Result<Self, DbErr> async fn before_save<C>(mut self, _db: &C, insert: bool) -> Result<Self, DbErr>
where where
C: ConnectionTrait, C: ConnectionTrait,
{ {
if let ActiveValue::Set(ref cron_expr) = self.cron_expr match (
&& matches!( &self.cron_expr as &ActiveValue<String>,
self.next_run, &self.cron_timezone as &ActiveValue<String>,
ActiveValue::NotSet | ActiveValue::Unchanged(_) ) {
) (ActiveValue::Set(cron_expr), ActiveValue::Set(timezone)) => {
{ if matches!(
let next_run = &self.next_run,
Model::calculate_next_run(cron_expr).map_err(|e| DbErr::Custom(e.to_string()))?; ActiveValue::NotSet | ActiveValue::Unchanged(_)
self.next_run = Set(Some(next_run)); ) {
} let next_run = Model::calculate_next_run(cron_expr, timezone)
if let ActiveValue::Set(Some(subscriber_id)) = self.subscriber_id { .map_err(|e| DbErr::Custom(e.to_string()))?;
if let ActiveValue::Set(Some(ref subscriber_task)) = self.subscriber_task { self.next_run = Set(Some(next_run));
if subscriber_task.get_subscriber_id() != subscriber_id { }
}
(
ActiveValue::Unchanged(_) | ActiveValue::NotSet,
ActiveValue::Unchanged(_) | ActiveValue::NotSet,
) => {}
(_, _) => {
if matches!(
self.next_run,
ActiveValue::NotSet | ActiveValue::Unchanged(_)
) {
return Err(DbErr::Custom( return Err(DbErr::Custom(
"Subscriber task subscriber_id does not match cron subscriber_id" "Cron expr and timezone must be insert or update at same time when next \
run is not set"
.to_string(), .to_string(),
)); ));
} }
}
};
if let ActiveValue::Set(Some(subscriber_id)) = self.subscriber_id
&& let ActiveValue::Set(Some(ref subscriber_task)) = self.subscriber_task_cron
&& subscriber_task.get_subscriber_id() != subscriber_id
{
return Err(DbErr::Custom(
"Cron subscriber_id does not match subscriber_task_cron.subscriber_id".to_string(),
));
}
if let ActiveValue::Set(Some(subscriber_id)) = self.subscriber_id
&& let ActiveValue::Set(Some(ref system_task)) = self.system_task_cron
&& system_task.get_subscriber_id() != Some(subscriber_id)
{
return Err(DbErr::Custom(
"Cron subscriber_id does not match system_task_cron.subscriber_id".to_string(),
));
}
if let ActiveValue::Set(enabled) = self.enabled
&& !insert
{
if enabled {
self.status = Set(CronStatus::Pending)
} else { } else {
return Err(DbErr::Custom( self.status = Set(CronStatus::Disabled)
"Cron subscriber_id is set but subscriber_task is not set".to_string(),
));
} }
} }
@@ -199,7 +257,10 @@ impl Model {
&& cron.attempts < cron.max_attempts && cron.attempts < cron.max_attempts
&& cron.status == CronStatus::Pending && cron.status == CronStatus::Pending
&& (cron.locked_at.is_none_or(|locked_at| { && (cron.locked_at.is_none_or(|locked_at| {
locked_at + chrono::Duration::milliseconds(cron.timeout_ms as i64) <= Utc::now() cron.timeout_ms.is_some_and(|cron_timeout_ms| {
locked_at + chrono::Duration::milliseconds(cron_timeout_ms as i64)
<= Utc::now()
})
})) }))
&& cron.next_run.is_some_and(|next_run| next_run <= Utc::now()) && cron.next_run.is_some_and(|next_run| next_run <= Utc::now())
{ {
@@ -223,13 +284,20 @@ impl Model {
} }
async fn exec_cron(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> { async fn exec_cron(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
if let Some(subscriber_task) = self.subscriber_task.as_ref() { if let Some(subscriber_task) = self.subscriber_task_cron.as_ref() {
let task_service = ctx.task(); let task_service = ctx.task();
let mut new_subscriber_task = subscriber_task.clone();
new_subscriber_task.set_cron_id(Some(self.id));
task_service task_service
.add_subscriber_task(subscriber_task.clone()) .add_subscriber_task(new_subscriber_task)
.await?; .await?;
} else if let Some(system_task) = self.system_task_cron.as_ref() {
let task_service = ctx.task();
let mut new_system_task = system_task.clone();
new_system_task.set_cron_id(Some(self.id));
task_service.add_system_task(new_system_task).await?;
} else { } else {
unimplemented!("Cron without subscriber task is not supported now"); unimplemented!("Cron without unknown task is not supported now");
} }
Ok(()) Ok(())
@@ -238,7 +306,7 @@ impl Model {
async fn mark_cron_completed(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> { async fn mark_cron_completed(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
let db = ctx.db(); let db = ctx.db();
let next_run = Self::calculate_next_run(&self.cron_expr)?; let next_run = Self::calculate_next_run(&self.cron_expr, &self.cron_timezone)?;
ActiveModel { ActiveModel {
id: Set(self.id), id: Set(self.id),
@@ -276,7 +344,10 @@ impl Model {
let next_run = if should_retry { let next_run = if should_retry {
Some(Utc::now() + retry_duration) Some(Utc::now() + retry_duration)
} else { } else {
Some(Self::calculate_next_run(&self.cron_expr)?) Some(Self::calculate_next_run(
&self.cron_expr,
&self.cron_timezone,
)?)
}; };
ActiveModel { ActiveModel {
@@ -345,7 +416,15 @@ impl Model {
locked_cron locked_cron
.mark_cron_failed( .mark_cron_failed(
ctx, ctx,
format!("Cron timeout of {}ms", locked_cron.timeout_ms).as_str(), format!(
"Cron timeout of {}ms",
locked_cron
.timeout_ms
.as_ref()
.map(|s| s.to_string())
.unwrap_or_else(|| "Infinite".to_string())
)
.as_str(),
retry_duration, retry_duration,
) )
.await?; .await?;
@@ -357,11 +436,17 @@ impl Model {
Ok(()) Ok(())
} }
pub fn calculate_next_run(cron_expr: &str) -> RecorderResult<DateTime<Utc>> { pub fn calculate_next_run(cron_expr: &str, timezone: &str) -> RecorderResult<DateTime<Utc>> {
let cron_expr = Cron::new(cron_expr).parse()?; let user_tz = timezone.parse::<Tz>()?;
let next = cron_expr.find_next_occurrence(&Utc::now(), false)?; let user_tz_now = Utc::now().with_timezone(&user_tz);
Ok(next) let cron_expr = Cron::new(cron_expr).with_seconds_optional().parse()?;
let next = cron_expr.find_next_occurrence(&user_tz_now, false)?;
let next_utc = next.with_timezone(&Utc);
Ok(next_utc)
} }
} }

View File

@@ -1 +0,0 @@

View File

@@ -102,7 +102,7 @@ impl ActiveModelBehavior for ActiveModel {
C: ConnectionTrait, C: ConnectionTrait,
{ {
if insert && let ActiveValue::NotSet = self.token { if insert && let ActiveValue::NotSet = self.token {
let token = nanoid::nanoid!(10); let token = Uuid::now_v7().to_string();
self.token = ActiveValue::Set(token); self.token = ActiveValue::Set(token);
} }
Ok(self) Ok(self)

View File

@@ -1,6 +1,7 @@
pub mod auth; pub mod auth;
pub mod bangumi; pub mod bangumi;
pub mod credential_3rd; pub mod credential_3rd;
pub mod cron;
pub mod downloaders; pub mod downloaders;
pub mod downloads; pub mod downloads;
pub mod episodes; pub mod episodes;
@@ -11,4 +12,4 @@ pub mod subscribers;
pub mod subscription_bangumi; pub mod subscription_bangumi;
pub mod subscription_episode; pub mod subscription_episode;
pub mod subscriptions; pub mod subscriptions;
pub mod cron; pub mod system_tasks;

View File

@@ -1,9 +1,10 @@
use async_trait::async_trait; use async_trait::async_trait;
use sea_orm::entity::prelude::*; use sea_orm::{ActiveValue, entity::prelude::*};
use crate::task::SubscriberTaskTrait;
pub use crate::task::{ pub use crate::task::{
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant, SubscriberTask, SubscriberTaskInput, SubscriberTaskType, SubscriberTaskTypeEnum,
SubscriberTaskTypeVariantIter, SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter,
}; };
#[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)] #[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)]
@@ -23,13 +24,14 @@ pub enum SubscriberTaskStatus {
Killed, Killed,
} }
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "subscriber_tasks")] #[sea_orm(table_name = "subscriber_tasks")]
pub struct Model { pub struct Model {
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub id: String, pub id: String,
pub subscriber_id: i32, pub subscriber_id: i32,
pub subscription_id: Option<i32>, pub subscription_id: Option<i32>,
pub cron_id: Option<i32>,
pub job: SubscriberTask, pub job: SubscriberTask,
pub task_type: SubscriberTaskType, pub task_type: SubscriberTaskType,
pub status: SubscriberTaskStatus, pub status: SubscriberTaskStatus,
@@ -50,17 +52,25 @@ pub enum Relation {
from = "Column::SubscriberId", from = "Column::SubscriberId",
to = "super::subscribers::Column::Id", to = "super::subscribers::Column::Id",
on_update = "Cascade", on_update = "Cascade",
on_delete = "Cascade" on_delete = "Restrict"
)] )]
Subscriber, Subscriber,
#[sea_orm( #[sea_orm(
belongs_to = "super::subscriptions::Entity", belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId", from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id", to = "super::subscriptions::Column::Id",
on_update = "NoAction", on_update = "Cascade",
on_delete = "NoAction" on_delete = "Restrict"
)] )]
Subscription, Subscription,
#[sea_orm(
belongs_to = "super::cron::Entity",
from = "Column::CronId",
to = "super::cron::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Cron,
} }
impl Related<super::subscribers::Entity> for Entity { impl Related<super::subscribers::Entity> for Entity {
@@ -75,13 +85,36 @@ impl Related<super::subscriptions::Entity> for Entity {
} }
} }
impl Related<super::cron::Entity> for Entity {
fn to() -> RelationDef {
Relation::Cron.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity { pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")] #[sea_orm(entity = "super::subscribers::Entity")]
Subscriber, Subscriber,
#[sea_orm(entity = "super::subscriptions::Entity")] #[sea_orm(entity = "super::subscriptions::Entity")]
Subscription, Subscription,
#[sea_orm(entity = "super::cron::Entity")]
Cron,
} }
#[async_trait] #[async_trait]
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, _insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
if let ActiveValue::Set(subscriber_id) = self.subscriber_id
&& let ActiveValue::Set(ref job) = self.job
&& job.get_subscriber_id() != subscriber_id
{
return Err(DbErr::Custom(
"SubscriberTask subscriber_id does not match job.subscriber_id".to_string(),
));
}
Ok(self)
}
}

View File

@@ -45,6 +45,8 @@ pub enum Relation {
Feed, Feed,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")] #[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask, SubscriberTask,
#[sea_orm(has_many = "super::system_tasks::Entity")]
SystemTask,
} }
impl Related<super::subscriptions::Entity> for Entity { impl Related<super::subscriptions::Entity> for Entity {
@@ -95,6 +97,12 @@ impl Related<super::subscriber_tasks::Entity> for Entity {
} }
} }
impl Related<super::system_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SystemTask.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity { pub enum RelatedEntity {
#[sea_orm(entity = "super::subscriptions::Entity")] #[sea_orm(entity = "super::subscriptions::Entity")]
@@ -111,6 +119,8 @@ pub enum RelatedEntity {
Feed, Feed,
#[sea_orm(entity = "super::subscriber_tasks::Entity")] #[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask, SubscriberTask,
#[sea_orm(entity = "super::system_tasks::Entity")]
SystemTask,
} }
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]

View File

@@ -60,6 +60,8 @@ pub enum Relation {
Feed, Feed,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")] #[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask, SubscriberTask,
#[sea_orm(has_many = "super::cron::Entity")]
Cron,
} }
impl Related<super::subscribers::Entity> for Entity { impl Related<super::subscribers::Entity> for Entity {
@@ -126,6 +128,12 @@ impl Related<super::subscriber_tasks::Entity> for Entity {
} }
} }
impl Related<super::cron::Entity> for Entity {
fn to() -> RelationDef {
Relation::Cron.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity { pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")] #[sea_orm(entity = "super::subscribers::Entity")]
@@ -144,6 +152,8 @@ pub enum RelatedEntity {
Feed, Feed,
#[sea_orm(entity = "super::subscriber_tasks::Entity")] #[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask, SubscriberTask,
#[sea_orm(entity = "super::cron::Entity")]
Cron,
} }
#[async_trait] #[async_trait]

View File

@@ -0,0 +1,99 @@
use async_trait::async_trait;
use sea_orm::{ActiveValue, entity::prelude::*};
pub use crate::task::{
SystemTask, SystemTaskInput, SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant,
SystemTaskTypeVariantIter,
};
#[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SystemTaskStatus {
#[sea_orm(string_value = "Pending")]
Pending,
#[sea_orm(string_value = "Scheduled")]
Scheduled,
#[sea_orm(string_value = "Running")]
Running,
#[sea_orm(string_value = "Done")]
Done,
#[sea_orm(string_value = "Failed")]
Failed,
#[sea_orm(string_value = "Killed")]
Killed,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "system_tasks")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: String,
pub subscriber_id: Option<i32>,
pub cron_id: Option<i32>,
pub job: SystemTask,
pub task_type: SystemTaskType,
pub status: SystemTaskStatus,
pub attempts: i32,
pub max_attempts: i32,
pub run_at: DateTimeUtc,
pub last_error: Option<String>,
pub lock_at: Option<DateTimeUtc>,
pub lock_by: Option<String>,
pub done_at: Option<DateTimeUtc>,
pub priority: i32,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Subscriber,
#[sea_orm(
belongs_to = "super::cron::Entity",
from = "Column::CronId",
to = "super::cron::Column::Id",
on_update = "Cascade",
on_delete = "Restrict"
)]
Cron,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::cron::Entity> for Entity {
fn to() -> RelationDef {
Relation::Cron.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::cron::Entity")]
Cron,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, _insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
if let ActiveValue::Set(Some(..)) = self.subscriber_id {
return Err(DbErr::Custom(
"SystemTask can not be created by subscribers now".to_string(),
));
}
Ok(self)
}
}

View File

@@ -89,6 +89,13 @@ impl StorageService {
p p
} }
#[cfg(any(test, feature = "test-utils"))]
pub fn build_test_path(&self, path: impl AsRef<Path>) -> PathBuf {
let mut p = PathBuf::from("/test");
p.push(path);
p
}
pub fn build_public_path(&self, path: impl AsRef<Path>) -> PathBuf { pub fn build_public_path(&self, path: impl AsRef<Path>) -> PathBuf {
let mut p = PathBuf::from("/public"); let mut p = PathBuf::from("/public");
p.push(path); p.push(path);
@@ -271,7 +278,7 @@ impl StorageService {
if let Some(mut ranges) = ranges { if let Some(mut ranges) = ranges {
if ranges.len() > 1 { if ranges.len() > 1 {
let boundary = Uuid::new_v4().to_string(); let boundary = Uuid::now_v7().to_string();
let reader = self.reader(storage_path.as_ref()).await?; let reader = self.reader(storage_path.as_ref()).await?;
let stream: impl Stream<Item = Result<Bytes, RecorderError>> = { let stream: impl Stream<Item = Result<Bytes, RecorderError>> = {
let boundary = boundary.clone(); let boundary = boundary.clone();

View File

@@ -14,6 +14,8 @@ pub struct TaskConfig {
pub system_task_reenqueue_orphaned_after: Duration, pub system_task_reenqueue_orphaned_after: Duration,
#[serde(default = "default_cron_retry_duration")] #[serde(default = "default_cron_retry_duration")]
pub cron_retry_duration: Duration, pub cron_retry_duration: Duration,
#[serde(default = "default_cron_interval_duration")]
pub cron_interval_duration: Duration,
} }
impl Default for TaskConfig { impl Default for TaskConfig {
@@ -25,6 +27,7 @@ impl Default for TaskConfig {
default_subscriber_task_reenqueue_orphaned_after(), default_subscriber_task_reenqueue_orphaned_after(),
system_task_reenqueue_orphaned_after: default_system_task_reenqueue_orphaned_after(), system_task_reenqueue_orphaned_after: default_system_task_reenqueue_orphaned_after(),
cron_retry_duration: default_cron_retry_duration(), cron_retry_duration: default_cron_retry_duration(),
cron_interval_duration: default_cron_interval_duration(),
} }
} }
} }
@@ -45,6 +48,10 @@ pub fn default_system_task_workers() -> u32 {
} }
} }
pub fn default_cron_interval_duration() -> Duration {
Duration::from_secs(30)
}
pub fn default_subscriber_task_reenqueue_orphaned_after() -> Duration { pub fn default_subscriber_task_reenqueue_orphaned_after() -> Duration {
Duration::from_secs(3600) Duration::from_secs(3600)
} }

View File

@@ -2,12 +2,16 @@ use std::sync::Arc;
use async_trait::async_trait; use async_trait::async_trait;
use futures::{Stream, StreamExt, pin_mut}; use futures::{Stream, StreamExt, pin_mut};
use serde::{Deserialize, Serialize, de::DeserializeOwned}; use serde::{Serialize, de::DeserializeOwned};
use crate::{app::AppContextTrait, errors::RecorderResult}; use crate::{app::AppContextTrait, errors::RecorderResult};
pub const SYSTEM_TASK_APALIS_NAME: &str = "system_task"; pub const SYSTEM_TASK_APALIS_NAME: &str = "system_task";
pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task"; pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task";
pub const SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME: &str =
"setup_apalis_jobs_extra_foreign_keys";
pub const SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME: &str =
"setup_apalis_jobs_extra_foreign_keys_trigger";
#[async_trait] #[async_trait]
pub trait AsyncTaskTrait: Serialize + DeserializeOwned + Sized { pub trait AsyncTaskTrait: Serialize + DeserializeOwned + Sized {
@@ -41,16 +45,30 @@ where
} }
} }
pub trait SubscriberTaskTrait: AsyncTaskTrait { pub trait SystemTaskTrait: AsyncTaskTrait {
fn get_subscriber_id(&self) -> i32; type InputType: Serialize + DeserializeOwned + Sized + Send;
fn get_subscriber_id(&self) -> Option<i32>;
fn set_subscriber_id(&mut self, subscriber_id: Option<i32>);
fn get_cron_id(&self) -> Option<i32>; fn get_cron_id(&self) -> Option<i32>;
fn set_cron_id(&mut self, cron_id: Option<i32>);
fn from_input(input: Self::InputType, subscriber_id: Option<i32>) -> Self;
} }
pub trait SystemTaskTrait: AsyncTaskTrait {} pub trait SubscriberTaskTrait: AsyncTaskTrait {
type InputType: Serialize + DeserializeOwned + Sized + Send;
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] fn get_subscriber_id(&self) -> i32;
pub struct SubscriberTaskBase {
pub subscriber_id: i32, fn set_subscriber_id(&mut self, subscriber_id: i32);
pub cron_id: Option<i32>,
fn get_cron_id(&self) -> Option<i32>;
fn set_cron_id(&mut self, cron_id: Option<i32>);
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self;
} }

View File

@@ -1,16 +0,0 @@
use sea_orm::sea_query;
#[derive(sea_query::Iden)]
pub enum ApalisSchema {
#[iden = "apalis"]
Schema,
}
#[derive(sea_query::Iden)]
pub enum ApalisJobs {
#[iden = "jobs"]
Table,
Id,
}

View File

@@ -1,20 +1,22 @@
mod config; mod config;
mod core; mod core;
mod r#extern;
mod registry; mod registry;
mod service; mod service;
pub use core::{ pub use core::{
AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, StreamTaskTrait, AsyncTaskTrait, SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_FUNCTION_NAME,
SubscriberTaskBase, SubscriberTaskTrait, SystemTaskTrait, SETUP_APALIS_JOBS_EXTRA_FOREIGN_KEYS_TRIGGER_NAME, SUBSCRIBER_TASK_APALIS_NAME,
SYSTEM_TASK_APALIS_NAME, StreamTaskTrait, SubscriberTaskTrait, SystemTaskTrait,
}; };
pub use config::TaskConfig; pub use config::TaskConfig;
pub use r#extern::{ApalisJobs, ApalisSchema};
pub use registry::{ pub use registry::{
OptimizeImageTask, SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, EchoTask, OptimizeImageTask, SubscriberTask, SubscriberTaskInput, SubscriberTaskType,
SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter,
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask, SystemTask, SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant, SystemTaskTypeVariantIter, SyncOneSubscriptionSourcesTask, SystemTask, SystemTaskInput, SystemTaskType,
SystemTaskTypeEnum, SystemTaskTypeVariant, SystemTaskTypeVariantIter,
}; };
#[allow(unused_imports)]
pub(crate) use registry::{register_subscriber_task_type, register_system_task_type};
pub use service::TaskService; pub use service::TaskService;

View File

@@ -1,12 +1,14 @@
mod subscriber; mod subscriber;
mod system; mod system;
pub(crate) use subscriber::register_subscriber_task_type;
pub use subscriber::{ pub use subscriber::{
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant, SubscriberTask, SubscriberTaskInput, SubscriberTaskType, SubscriberTaskTypeEnum,
SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask, SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask,
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask, SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask,
}; };
pub(crate) use system::register_system_task_type;
pub use system::{ pub use system::{
OptimizeImageTask, SystemTask, SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant, EchoTask, OptimizeImageTask, SystemTask, SystemTaskInput, SystemTaskType, SystemTaskTypeEnum,
SystemTaskTypeVariantIter, SystemTaskTypeVariant, SystemTaskTypeVariantIter,
}; };

View File

@@ -6,15 +6,36 @@ macro_rules! register_subscriber_task_type {
} }
) => { ) => {
$(#[$type_meta])* $(#[$type_meta])*
#[derive(typed_builder::TypedBuilder)] #[derive(typed_builder::TypedBuilder, ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[ts(rename_all = "camelCase")]
$task_vis struct $task_name { $task_vis struct $task_name {
$($(#[$field_meta])* pub $field_name: $field_type,)* $($(#[$field_meta])* pub $field_name: $field_type,)*
pub subscriber_id: i32, pub subscriber_id: i32,
#[builder(default = None)] #[builder(default = None)]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cron_id: Option<i32>, pub cron_id: Option<i32>,
} }
paste::paste! {
$(#[$type_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
$task_vis struct [<$task_name Input>] {
$($(#[$field_meta])* pub $field_name: $field_type,)*
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subscriber_id: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cron_id: Option<i32>,
}
}
impl $crate::task::SubscriberTaskTrait for $task_name { impl $crate::task::SubscriberTaskTrait for $task_name {
paste::paste! {
type InputType = [<$task_name Input>];
}
fn get_subscriber_id(&self) -> i32 { fn get_subscriber_id(&self) -> i32 {
self.subscriber_id self.subscriber_id
} }
@@ -22,6 +43,22 @@ macro_rules! register_subscriber_task_type {
fn get_cron_id(&self) -> Option<i32> { fn get_cron_id(&self) -> Option<i32> {
self.cron_id self.cron_id
} }
fn set_subscriber_id(&mut self, subscriber_id: i32) {
self.subscriber_id = subscriber_id;
}
fn set_cron_id(&mut self, cron_id: Option<i32>) {
self.cron_id = cron_id;
}
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self {
Self {
$($field_name: input.$field_name,)*
cron_id: input.cron_id,
subscriber_id: input.subscriber_id.unwrap_or(subscriber_id),
}
}
} }
} }
} }

View File

@@ -1,8 +1,8 @@
mod base; mod base;
mod subscription; mod subscription;
pub(crate) use base::register_subscriber_task_type;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult}; use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
pub use subscription::{ pub use subscription::{
SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SyncOneSubscriptionSourcesTask, SyncOneSubscriptionSourcesTask,
@@ -12,7 +12,7 @@ macro_rules! register_subscriber_task_types {
( (
task_type_enum: { task_type_enum: {
$(#[$type_enum_meta:meta])* $(#[$type_enum_meta:meta])*
pub enum $type_enum_name:ident { $type_vis:vis enum $type_enum_name:ident {
$( $(
$(#[$variant_meta:meta])* $(#[$variant_meta:meta])*
$variant:ident => $string_value:literal $variant:ident => $string_value:literal
@@ -21,16 +21,18 @@ macro_rules! register_subscriber_task_types {
}, },
task_enum: { task_enum: {
$(#[$task_enum_meta:meta])* $(#[$task_enum_meta:meta])*
pub enum $task_enum_name:ident { $task_vis:vis enum $task_enum_name:ident {
$( $(
$(#[$task_variant_meta:meta])*
$task_variant:ident($task_type:ty) $task_variant:ident($task_type:ty)
),* $(,)? ),* $(,)?
} }
} }
) => { ) => {
$(#[$type_enum_meta])* $(#[$type_enum_meta])*
#[derive(serde::Serialize, serde::Deserialize)]
#[sea_orm(rs_type = "String", db_type = "Text")] #[sea_orm(rs_type = "String", db_type = "Text")]
pub enum $type_enum_name { $type_vis enum $type_enum_name {
$( $(
$(#[$variant_meta])* $(#[$variant_meta])*
#[serde(rename = $string_value)] #[serde(rename = $string_value)]
@@ -41,27 +43,28 @@ macro_rules! register_subscriber_task_types {
$(#[$task_enum_meta])* $(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(tag = "task_type")] #[serde(tag = "task_type")]
pub enum $task_enum_name { #[ts(export, rename = "SubscriberTaskType", rename_all = "camelCase", tag = "taskType")]
$task_vis enum $task_enum_name {
$( $(
$(#[$task_variant_meta])*
#[serde(rename = $string_value)]
$task_variant($task_type), $task_variant($task_type),
)* )*
} }
impl TryFrom<$task_enum_name> for serde_json::Value { paste::paste! {
type Error = $crate::errors::RecorderError; $(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
fn try_from(value: $task_enum_name) -> Result<Self, Self::Error> { #[serde(tag = "taskType", rename_all = "camelCase")]
let json_value = serde_json::to_value(value)?; #[ts(export, rename_all = "camelCase", tag = "taskType")]
Ok(match json_value { $task_vis enum [<$task_enum_name Input>] {
serde_json::Value::Object(mut map) => { $(
map.remove("task_type"); $(#[$task_variant_meta])*
serde_json::Value::Object(map) #[serde(rename = $string_value)]
} $task_variant(<$task_type as $crate::task::SubscriberTaskTrait>::InputType),
_ => { )*
unreachable!("subscriber task must be an json object");
}
})
} }
} }
@@ -84,6 +87,10 @@ macro_rules! register_subscriber_task_types {
} }
impl $crate::task::SubscriberTaskTrait for $task_enum_name { impl $crate::task::SubscriberTaskTrait for $task_enum_name {
paste::paste! {
type InputType = [<$task_enum_name Input>];
}
fn get_subscriber_id(&self) -> i32 { fn get_subscriber_id(&self) -> i32 {
match self { match self {
$(Self::$task_variant(t) => $(Self::$task_variant(t) =>
@@ -97,6 +104,26 @@ macro_rules! register_subscriber_task_types {
<$task_type as $crate::task::SubscriberTaskTrait>::get_cron_id(t),)* <$task_type as $crate::task::SubscriberTaskTrait>::get_cron_id(t),)*
} }
} }
fn set_subscriber_id(&mut self, subscriber_id: i32) {
match self {
$(Self::$task_variant(t) => t.set_subscriber_id(subscriber_id),)*
}
}
fn set_cron_id(&mut self, cron_id: Option<i32>) {
match self {
$(Self::$task_variant(t) => t.set_cron_id(cron_id),)*
}
}
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self {
match input {
$(Self::InputType::$task_variant(t) =>
Self::$task_variant(<$task_type as $crate::task::SubscriberTaskTrait>::from_input(t, subscriber_id)),)*
}
}
} }
$( $(
@@ -114,8 +141,6 @@ register_subscriber_task_types!(
#[derive( #[derive(
Clone, Clone,
Debug, Debug,
Serialize,
Deserialize,
PartialEq, PartialEq,
Eq, Eq,
Copy, Copy,
@@ -130,7 +155,7 @@ register_subscriber_task_types!(
} }
}, },
task_enum: { task_enum: {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, FromJsonQueryResult)] #[derive(Clone, Debug, PartialEq, FromJsonQueryResult)]
pub enum SubscriberTask { pub enum SubscriberTask {
SyncOneSubscriptionFeedsIncremental(SyncOneSubscriptionFeedsIncrementalTask), SyncOneSubscriptionFeedsIncremental(SyncOneSubscriptionFeedsIncrementalTask),
SyncOneSubscriptionFeedsFull(SyncOneSubscriptionFeedsFullTask), SyncOneSubscriptionFeedsFull(SyncOneSubscriptionFeedsFullTask),

View File

@@ -1,5 +1,4 @@
use sea_orm::prelude::*; use sea_orm::prelude::*;
use serde::{Deserialize, Serialize};
use super::base::register_subscriber_task_type; use super::base::register_subscriber_task_type;
use crate::{errors::RecorderResult, models::subscriptions::SubscriptionTrait}; use crate::{errors::RecorderResult, models::subscriptions::SubscriptionTrait};
@@ -40,7 +39,7 @@ macro_rules! register_subscription_task_type {
} }
register_subscription_task_type! { register_subscription_task_type! {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq)]
pub struct SyncOneSubscriptionFeedsIncrementalTask { pub struct SyncOneSubscriptionFeedsIncrementalTask {
} => async |subscription, ctx| -> RecorderResult<()> { } => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_feeds_incremental(ctx).await?; subscription.sync_feeds_incremental(ctx).await?;
@@ -49,7 +48,7 @@ register_subscription_task_type! {
} }
register_subscription_task_type! { register_subscription_task_type! {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq)]
pub struct SyncOneSubscriptionFeedsFullTask { pub struct SyncOneSubscriptionFeedsFullTask {
} => async |subscription, ctx| -> RecorderResult<()> { } => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_feeds_full(ctx).await?; subscription.sync_feeds_full(ctx).await?;
@@ -58,7 +57,7 @@ register_subscription_task_type! {
} }
register_subscription_task_type! { register_subscription_task_type! {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq)]
pub struct SyncOneSubscriptionSourcesTask { pub struct SyncOneSubscriptionSourcesTask {
} => async |subscription, ctx| -> RecorderResult<()> { } => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_sources(ctx).await?; subscription.sync_sources(ctx).await?;

View File

@@ -0,0 +1,67 @@
macro_rules! register_system_task_type {
(
$(#[$type_meta:meta])*
$task_vis:vis struct $task_name:ident {
$($(#[$field_meta:meta])* pub $field_name:ident: $field_type:ty),* $(,)?
}
) => {
$(#[$type_meta])*
#[derive(typed_builder::TypedBuilder, ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[ts(rename_all = "camelCase")]
$task_vis struct $task_name {
$($(#[$field_meta])* pub $field_name: $field_type,)*
#[serde(default, skip_serializing_if = "Option::is_none")]
#[builder(default = None)]
pub subscriber_id: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
#[builder(default = None)]
pub cron_id: Option<i32>,
}
paste::paste! {
$(#[$type_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
$task_vis struct [<$task_name Input>] {
$($(#[$field_meta])* pub $field_name: $field_type,)*
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subscriber_id: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cron_id: Option<i32>,
}
}
impl $crate::task::SystemTaskTrait for $task_name {
paste::paste! {
type InputType = [<$task_name Input>];
}
fn get_subscriber_id(&self) -> Option<i32> {
self.subscriber_id
}
fn get_cron_id(&self) -> Option<i32> {
self.cron_id
}
fn set_subscriber_id(&mut self, subscriber_id: Option<i32>) {
self.subscriber_id = subscriber_id;
}
fn set_cron_id(&mut self, cron_id: Option<i32>) {
self.cron_id = cron_id;
}
fn from_input(input: Self::InputType, subscriber_id: Option<i32>) -> Self {
Self {
$($field_name: input.$field_name,)*
subscriber_id: input.subscriber_id.or(subscriber_id),
cron_id: input.cron_id,
}
}
}
}
}
pub(crate) use register_system_task_type;

View File

@@ -1,18 +1,22 @@
use std::sync::Arc; use std::sync::Arc;
use quirks_path::Path; use quirks_path::Path;
use serde::{Deserialize, Serialize};
use tracing::instrument; use tracing::instrument;
use crate::{ use crate::{
app::AppContextTrait, errors::RecorderResult, media::EncodeImageOptions, task::AsyncTaskTrait, app::AppContextTrait,
errors::RecorderResult,
media::EncodeImageOptions,
task::{AsyncTaskTrait, register_system_task_type},
}; };
#[derive(Clone, Debug, Serialize, Deserialize)] register_system_task_type! {
pub struct OptimizeImageTask { #[derive(Clone, Debug, PartialEq)]
pub source_path: String, pub struct OptimizeImageTask {
pub target_path: String, pub source_path: String,
pub format_options: EncodeImageOptions, pub target_path: String,
pub format_options: EncodeImageOptions,
}
} }
#[async_trait::async_trait] #[async_trait::async_trait]

View File

@@ -0,0 +1,29 @@
use std::sync::Arc;
use chrono::Utc;
use crate::{
app::AppContextTrait,
errors::RecorderResult,
task::{AsyncTaskTrait, register_system_task_type},
};
register_system_task_type! {
#[derive(Debug, Clone, PartialEq)]
pub struct EchoTask {
pub task_id: String,
}
}
#[async_trait::async_trait]
impl AsyncTaskTrait for EchoTask {
async fn run_async(self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
tracing::info!(
"EchoTask {} start running at {}",
self.task_id,
Utc::now().to_rfc3339()
);
Ok(())
}
}

View File

@@ -1,14 +1,17 @@
mod base;
mod media; mod media;
mod misc;
pub(crate) use base::register_system_task_type;
pub use media::OptimizeImageTask; pub use media::OptimizeImageTask;
pub use misc::EchoTask;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult}; use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
macro_rules! register_system_task_types { macro_rules! register_system_task_types {
( (
task_type_enum: { task_type_enum: {
$(#[$type_enum_meta:meta])* $(#[$type_enum_meta:meta])*
pub enum $type_enum_name:ident { $type_vis:vis enum $type_enum_name:ident {
$( $(
$(#[$variant_meta:meta])* $(#[$variant_meta:meta])*
$variant:ident => $string_value:literal $variant:ident => $string_value:literal
@@ -17,16 +20,18 @@ macro_rules! register_system_task_types {
}, },
task_enum: { task_enum: {
$(#[$task_enum_meta:meta])* $(#[$task_enum_meta:meta])*
pub enum $task_enum_name:ident { $task_vis:vis enum $task_enum_name:ident {
$( $(
$(#[$task_variant_meta:meta])*
$task_variant:ident($task_type:ty) $task_variant:ident($task_type:ty)
),* $(,)? ),* $(,)?
} }
} }
) => { ) => {
$(#[$type_enum_meta])* $(#[$type_enum_meta])*
#[derive(serde::Serialize, serde::Deserialize, PartialEq, Eq)]
#[sea_orm(rs_type = "String", db_type = "Text")] #[sea_orm(rs_type = "String", db_type = "Text")]
pub enum $type_enum_name { $type_vis enum $type_enum_name {
$( $(
$(#[$variant_meta])* $(#[$variant_meta])*
#[serde(rename = $string_value)] #[serde(rename = $string_value)]
@@ -37,30 +42,17 @@ macro_rules! register_system_task_types {
$(#[$task_enum_meta])* $(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize, PartialEq)]
#[serde(tag = "task_type")] #[serde(tag = "task_type")]
pub enum $task_enum_name { #[ts(export, rename = "SystemTaskType", rename_all = "camelCase", tag = "taskType")]
$task_vis enum $task_enum_name {
$( $(
$(#[$task_variant_meta])*
#[serde(rename = $string_value)]
$task_variant($task_type), $task_variant($task_type),
)* )*
} }
impl TryFrom<$task_enum_name> for serde_json::Value {
type Error = $crate::errors::RecorderError;
fn try_from(value: $task_enum_name) -> Result<Self, Self::Error> {
let json_value = serde_json::to_value(value)?;
Ok(match json_value {
serde_json::Value::Object(mut map) => {
map.remove("task_type");
serde_json::Value::Object(map)
}
_ => {
unreachable!("subscriber task must be an json object");
}
})
}
}
impl $task_enum_name { impl $task_enum_name {
pub fn task_type(&self) -> $type_enum_name { pub fn task_type(&self) -> $type_enum_name {
match self { match self {
@@ -69,6 +61,21 @@ macro_rules! register_system_task_types {
} }
} }
paste::paste! {
$(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize, PartialEq)]
#[serde(tag = "taskType", rename_all = "camelCase")]
#[ts(export, rename_all = "camelCase", tag = "taskType")]
$task_vis enum [<$task_enum_name Input>] {
$(
$(#[$task_variant_meta])*
#[serde(rename = $string_value)]
$task_variant(<$task_type as $crate::task::SystemTaskTrait>::InputType),
)*
}
}
#[async_trait::async_trait] #[async_trait::async_trait]
impl $crate::task::AsyncTaskTrait for $task_enum_name { impl $crate::task::AsyncTaskTrait for $task_enum_name {
async fn run_async(self, ctx: std::sync::Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> { async fn run_async(self, ctx: std::sync::Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
@@ -78,6 +85,51 @@ macro_rules! register_system_task_types {
} }
} }
} }
impl $crate::task::SystemTaskTrait for $task_enum_name {
paste::paste! {
type InputType = [<$task_enum_name Input>];
}
fn get_subscriber_id(&self) -> Option<i32> {
match self {
$(Self::$task_variant(t) => t.get_subscriber_id(),)*
}
}
fn get_cron_id(&self) -> Option<i32> {
match self {
$(Self::$task_variant(t) => t.get_cron_id(),)*
}
}
fn set_subscriber_id(&mut self, subscriber_id: Option<i32>) {
match self {
$(Self::$task_variant(t) => t.set_subscriber_id(subscriber_id),)*
}
}
fn set_cron_id(&mut self, cron_id: Option<i32>) {
match self {
$(Self::$task_variant(t) => t.set_cron_id(cron_id),)*
}
}
fn from_input(input: Self::InputType, subscriber_id: Option<i32>) -> Self {
match input {
$(Self::InputType::$task_variant(t) =>
Self::$task_variant(<$task_type as $crate::task::SystemTaskTrait>::from_input(t, subscriber_id)),)*
}
}
}
$(
impl From<$task_type> for $task_enum_name {
fn from(task: $task_type) -> Self {
Self::$task_variant(task)
}
}
)*
}; };
} }
@@ -86,23 +138,21 @@ register_system_task_types! {
#[derive( #[derive(
Clone, Clone,
Debug, Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy, Copy,
DeriveActiveEnum, DeriveActiveEnum,
DeriveDisplay, DeriveDisplay,
EnumIter, EnumIter
)] )]
pub enum SystemTaskType { pub enum SystemTaskType {
OptimizeImage => "optimize_image" OptimizeImage => "optimize_image",
Test => "test",
} }
}, },
task_enum: { task_enum: {
#[derive(Clone, Debug, Serialize, Deserialize, FromJsonQueryResult)] #[derive(Clone, Debug, FromJsonQueryResult)]
pub enum SystemTask { pub enum SystemTask {
OptimizeImage(OptimizeImageTask), OptimizeImage(OptimizeImageTask),
Echo(EchoTask),
} }
} }
} }

View File

@@ -6,13 +6,14 @@ use apalis_sql::{
context::SqlContext, context::SqlContext,
postgres::{PgListen as ApalisPgListen, PostgresStorage as ApalisPostgresStorage}, postgres::{PgListen as ApalisPgListen, PostgresStorage as ApalisPostgresStorage},
}; };
use sea_orm::sqlx::postgres::PgListener; use sea_orm::{ActiveModelTrait, sqlx::postgres::PgListener};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use uuid::Uuid;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::{RecorderError, RecorderResult}, errors::{RecorderError, RecorderResult},
models::cron::{self, CRON_DUE_EVENT}, models::cron::{self, CRON_DUE_DEBUG_EVENT, CRON_DUE_EVENT},
task::{ task::{
AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, SubscriberTask, AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, SubscriberTask,
TaskConfig, TaskConfig,
@@ -53,7 +54,7 @@ impl TaskService {
Ok(Self { Ok(Self {
config, config,
cron_worker_id: nanoid::nanoid!(), cron_worker_id: Uuid::now_v7().to_string(),
ctx, ctx,
subscriber_task_storage: Arc::new(RwLock::new(subscriber_task_storage)), subscriber_task_storage: Arc::new(RwLock::new(subscriber_task_storage)),
system_task_storage: Arc::new(RwLock::new(system_task_storage)), system_task_storage: Arc::new(RwLock::new(system_task_storage)),
@@ -136,83 +137,110 @@ impl TaskService {
Ok(task_id) Ok(task_id)
} }
pub async fn run<F, Fut>(&self, shutdown_signal: Option<F>) -> RecorderResult<()> pub async fn add_subscriber_task_cron(
&self,
cm: cron::ActiveModel,
) -> RecorderResult<cron::Model> {
let db = self.ctx.db();
let m = cm.insert(db).await?;
Ok(m)
}
pub async fn add_system_task_cron(&self, cm: cron::ActiveModel) -> RecorderResult<cron::Model> {
let db = self.ctx.db();
let m = cm.insert(db).await?;
Ok(m)
}
pub async fn run(&self) -> RecorderResult<()> {
self.run_with_signal(None::<fn() -> std::future::Ready<()>>)
.await
}
pub async fn run_with_signal<F, Fut>(&self, shutdown_signal: Option<F>) -> RecorderResult<()>
where where
F: Fn() -> Fut + Send + 'static, F: FnOnce() -> Fut + Send + 'static,
Fut: Future<Output = ()> + Send, Fut: Future<Output = ()> + Send,
{ {
tokio::try_join!( tokio::select! {
async { _ = {
let monitor = self.setup_apalis_monitor().await?; let monitor = self.setup_apalis_monitor().await?;
if let Some(shutdown_signal) = shutdown_signal { async move {
monitor if let Some(shutdown_signal) = shutdown_signal {
.run_with_signal(async move { monitor
shutdown_signal().await; .run_with_signal(async move {
tracing::info!("apalis shutting down..."); shutdown_signal().await;
Ok(()) tracing::info!("apalis shutting down...");
}) Ok(())
.await?; })
} else { .await?;
monitor.run().await?; } else {
monitor.run().await?;
}
Ok::<_, RecorderError>(())
} }
Ok::<_, RecorderError>(()) } => {}
}, _ = {
async {
let listener = self.setup_apalis_listener().await?; let listener = self.setup_apalis_listener().await?;
tokio::task::spawn(async move { async move {
if let Err(e) = listener.listen().await { if let Err(e) = listener.listen().await {
tracing::error!("Error listening to apalis: {e}"); tracing::error!("Error listening to apalis: {e}");
} }
}); Ok::<_, RecorderError>(())
Ok::<_, RecorderError>(()) }
}, } => {},
async { _ = {
let listener = self.setup_cron_due_listening().await?; let mut listener = self.setup_cron_due_listening().await?;
let ctx = self.ctx.clone();
let cron_worker_id = self.cron_worker_id.clone(); let cron_worker_id = self.cron_worker_id.clone();
let retry_duration = chrono::Duration::milliseconds( let retry_duration =
self.config.cron_retry_duration.as_millis() as i64, chrono::Duration::milliseconds(self.config.cron_retry_duration.as_millis() as i64);
); let cron_interval_duration = self.config.cron_interval_duration;
async move {
listener.listen_all([CRON_DUE_EVENT as &str, CRON_DUE_DEBUG_EVENT as &str]).await?;
tokio::task::spawn(async move { tokio::join!(
if let Err(e) =
Self::listen_cron_due(listener, ctx, &cron_worker_id, retry_duration).await
{
tracing::error!("Error listening to cron due: {e}");
}
});
Ok::<_, RecorderError>(())
},
async {
let ctx = self.ctx.clone();
let retry_duration = chrono::Duration::milliseconds(
self.config.cron_retry_duration.as_millis() as i64,
);
tokio::task::spawn(async move {
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(60));
loop {
interval.tick().await;
if let Err(e) = cron::Model::check_and_cleanup_expired_cron_locks(
ctx.as_ref(),
retry_duration,
)
.await
{ {
tracing::error!( let ctx = self.ctx.clone();
"Error checking and cleaning up expired cron locks: {e}" async move {
); if let Err(e) =
} Self::listen_cron_due(listener, ctx, &cron_worker_id, retry_duration)
if let Err(e) = cron::Model::check_and_trigger_due_crons(ctx.as_ref()).await .await
{
tracing::error!("Error listening to cron due: {e}");
}
}
},
{ {
tracing::error!("Error checking and triggering due crons: {e}"); let ctx = self.ctx.clone();
} let mut interval = tokio::time::interval(cron_interval_duration);
} async move {
}); loop {
interval.tick().await;
if let Err(e) = cron::Model::check_and_cleanup_expired_cron_locks(
ctx.as_ref(),
retry_duration,
)
.await
{
tracing::error!(
"Error checking and cleaning up expired cron locks: {e}"
);
}
if let Err(e) =
cron::Model::check_and_trigger_due_crons(ctx.as_ref()).await
{
tracing::error!("Error checking and triggering due crons: {e}");
}
}
}
}
);
Ok::<_, RecorderError>(())
}
} => {}
};
Ok::<_, RecorderError>(())
}
)?;
Ok(()) Ok(())
} }
@@ -267,6 +295,7 @@ impl TaskService {
async fn setup_cron_due_listening(&self) -> RecorderResult<PgListener> { async fn setup_cron_due_listening(&self) -> RecorderResult<PgListener> {
let pool = self.ctx.db().get_postgres_connection_pool().clone(); let pool = self.ctx.db().get_postgres_connection_pool().clone();
let listener = PgListener::connect_with(&pool).await?; let listener = PgListener::connect_with(&pool).await?;
tracing::debug!("Cron due listener connected to postgres");
Ok(listener) Ok(listener)
} }
@@ -277,20 +306,129 @@ impl TaskService {
worker_id: &str, worker_id: &str,
retry_duration: chrono::Duration, retry_duration: chrono::Duration,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
listener.listen(CRON_DUE_EVENT).await?;
loop { loop {
let notification = listener.recv().await?; let notification = listener.recv().await?;
if let Err(e) = cron::Model::handle_cron_notification( if notification.channel() == CRON_DUE_DEBUG_EVENT {
ctx.as_ref(), tracing::debug!("Received cron due debug event: {:?}", notification);
notification, continue;
worker_id, } else if notification.channel() == CRON_DUE_EVENT
retry_duration, && let Err(e) = cron::Model::handle_cron_notification(
) ctx.as_ref(),
.await notification,
worker_id,
retry_duration,
)
.await
{ {
tracing::error!("Error handling cron notification: {e}"); tracing::error!("Error handling cron notification: {e}");
} }
} }
} }
} }
#[cfg(test)]
#[allow(unused_variables)]
mod tests {
use std::time::Duration;
use chrono::Utc;
use rstest::{fixture, rstest};
use sea_orm::ActiveValue;
use tracing::Level;
use super::*;
use crate::{
models::cron,
task::EchoTask,
test_utils::{
app::{TestingAppContextConfig, TestingPreset},
tracing::try_init_testing_tracing,
},
};
#[fixture]
fn before_each() {
try_init_testing_tracing(Level::DEBUG);
}
#[rstest]
#[tokio::test]
#[tracing_test::traced_test]
async fn test_check_and_trigger_due_crons_with_certain_interval(
before_each: (),
) -> RecorderResult<()> {
let preset = TestingPreset::default_with_config(
TestingAppContextConfig::builder()
.task_config(TaskConfig {
cron_interval_duration: Duration::from_millis(1500),
..Default::default()
})
.build(),
)
.await?;
let app_ctx = preset.app_ctx;
let task_service = app_ctx.task();
let task_id = Uuid::now_v7().to_string();
let echo_cron = cron::ActiveModel {
cron_expr: ActiveValue::Set("*/1 * * * * *".to_string()),
cron_timezone: ActiveValue::Set("Asia/Singapore".to_string()),
system_task_cron: ActiveValue::Set(Some(
EchoTask::builder().task_id(task_id.clone()).build().into(),
)),
..Default::default()
};
task_service.add_system_task_cron(echo_cron).await?;
task_service
.run_with_signal(Some(async move || {
tokio::time::sleep(std::time::Duration::from_secs(2)).await;
}))
.await?;
assert!(logs_contain(&format!(
"EchoTask {task_id} start running at"
)));
Ok(())
}
#[rstest]
#[tokio::test]
#[tracing_test::traced_test]
async fn test_trigger_due_cron_when_mutating(before_each: ()) -> RecorderResult<()> {
let preset = TestingPreset::default().await?;
let app_ctx = preset.app_ctx;
let task_service = app_ctx.task();
let task_id = Uuid::now_v7().to_string();
let echo_cron = cron::ActiveModel {
cron_expr: ActiveValue::Set("* * * */1 * *".to_string()),
cron_timezone: ActiveValue::Set("Asia/Singapore".to_string()),
next_run: ActiveValue::Set(Some(Utc::now() + chrono::Duration::seconds(-10))),
system_task_cron: ActiveValue::Set(Some(
EchoTask::builder().task_id(task_id.clone()).build().into(),
)),
..Default::default()
};
let task_runner = task_service.run_with_signal(Some(async move || {
tokio::time::sleep(std::time::Duration::from_millis(500)).await;
}));
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
task_service.add_system_task_cron(echo_cron).await?;
task_runner.await?;
assert!(logs_contain(&format!(
"EchoTask {task_id} start running at"
)));
Ok(())
}
}

View File

@@ -5,11 +5,13 @@ use typed_builder::TypedBuilder;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::RecorderResult,
task::TaskConfig,
test_utils::{ test_utils::{
crypto::build_testing_crypto_service, crypto::build_testing_crypto_service,
database::{TestingDatabaseServiceConfig, build_testing_database_service}, database::{TestingDatabaseServiceConfig, build_testing_database_service},
media::build_testing_media_service, media::build_testing_media_service,
mikan::build_testing_mikan_client, mikan::{MikanMockServer, build_testing_mikan_client},
storage::build_testing_storage_service, storage::build_testing_storage_service,
task::build_testing_task_service, task::build_testing_task_service,
}, },
@@ -42,12 +44,11 @@ impl TestingAppContext {
self.task.get_or_init(|| task); self.task.get_or_init(|| task);
} }
pub async fn from_preset( pub async fn from_config(config: TestingAppContextConfig) -> RecorderResult<Arc<Self>> {
preset: TestingAppContextPreset, let mikan_base_url = config.mikan_base_url.expect("mikan_base_url is required");
) -> crate::errors::RecorderResult<Arc<Self>> { let mikan_client = build_testing_mikan_client(mikan_base_url).await?;
let mikan_client = build_testing_mikan_client(preset.mikan_base_url.clone()).await?;
let db_service = let db_service =
build_testing_database_service(preset.database_config.unwrap_or_default()).await?; build_testing_database_service(config.database_config.unwrap_or_default()).await?;
let crypto_service = build_testing_crypto_service().await?; let crypto_service = build_testing_crypto_service().await?;
let storage_service = build_testing_storage_service().await?; let storage_service = build_testing_storage_service().await?;
let media_service = build_testing_media_service().await?; let media_service = build_testing_media_service().await?;
@@ -61,7 +62,7 @@ impl TestingAppContext {
.build(), .build(),
); );
let task_service = build_testing_task_service(app_ctx.clone()).await?; let task_service = build_testing_task_service(config.task_config, app_ctx.clone()).await?;
app_ctx.set_task(task_service); app_ctx.set_task(task_service);
@@ -133,7 +134,44 @@ impl AppContextTrait for TestingAppContext {
} }
} }
pub struct TestingAppContextPreset { #[derive(TypedBuilder, Debug)]
pub mikan_base_url: String, #[builder(field_defaults(default, setter(strip_option)))]
pub struct TestingAppContextConfig {
pub mikan_base_url: Option<String>,
pub database_config: Option<TestingDatabaseServiceConfig>, pub database_config: Option<TestingDatabaseServiceConfig>,
pub task_config: Option<TaskConfig>,
}
#[derive(TypedBuilder)]
pub struct TestingPreset {
pub mikan_server: MikanMockServer,
pub app_ctx: Arc<dyn AppContextTrait>,
}
impl TestingPreset {
pub async fn default_with_config(config: TestingAppContextConfig) -> RecorderResult<Self> {
let mikan_server = MikanMockServer::new().await?;
let mixed_config = TestingAppContextConfig {
mikan_base_url: Some(mikan_server.base_url().to_string()),
..config
};
let app_ctx = TestingAppContext::from_config(mixed_config).await?;
let preset = Self::builder()
.mikan_server(mikan_server)
.app_ctx(app_ctx)
.build();
Ok(preset)
}
pub async fn default() -> RecorderResult<Self> {
Self::default_with_config(TestingAppContextConfig {
mikan_base_url: None,
database_config: None,
task_config: None,
})
.await
}
} }

View File

@@ -3,6 +3,7 @@ use crate::{
errors::RecorderResult, errors::RecorderResult,
}; };
#[derive(Clone, Debug)]
pub struct TestingDatabaseServiceConfig { pub struct TestingDatabaseServiceConfig {
pub auto_migrate: bool, pub auto_migrate: bool,
} }
@@ -51,7 +52,7 @@ pub async fn build_testing_database_service(
uri: connection_string, uri: connection_string,
enable_logging: true, enable_logging: true,
min_connections: 1, min_connections: 1,
max_connections: 1, max_connections: 5,
connect_timeout: 5000, connect_timeout: 5000,
idle_timeout: 10000, idle_timeout: 10000,
acquire_timeout: None, acquire_timeout: None,

View File

@@ -1,5 +1,6 @@
use std::{ use std::{
collections::HashMap, collections::HashMap,
fmt::Debug,
ops::{Deref, DerefMut}, ops::{Deref, DerefMut},
path::{self, PathBuf}, path::{self, PathBuf},
}; };
@@ -148,13 +149,15 @@ impl AsRef<path::Path> for MikanDoppelPath {
} }
} }
#[cfg(any(test, debug_assertions, feature = "test-utils"))]
lazy_static! { lazy_static! {
static ref TEST_RESOURCES_DIR: String = static ref TEST_RESOURCES_DIR: String =
if cfg!(any(test, debug_assertions, feature = "playground")) { format!("{}/tests/resources", env!("CARGO_MANIFEST_DIR"));
format!("{}/tests/resources", env!("CARGO_MANIFEST_DIR")) }
} else {
"tests/resources".to_string() #[cfg(not(any(test, debug_assertions, feature = "test-utils")))]
}; lazy_static! {
static ref TEST_RESOURCES_DIR: String = "tests/resources".to_string();
} }
impl From<Url> for MikanDoppelPath { impl From<Url> for MikanDoppelPath {
@@ -227,6 +230,14 @@ pub struct MikanMockServer {
base_url: Url, base_url: Url,
} }
impl Debug for MikanMockServer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MikanMockServer")
.field("base_url", &self.base_url)
.finish()
}
}
impl MikanMockServer { impl MikanMockServer {
pub async fn new_with_port(port: u16) -> RecorderResult<Self> { pub async fn new_with_port(port: u16) -> RecorderResult<Self> {
let server = mockito::Server::new_with_opts_async(mockito::ServerOpts { let server = mockito::Server::new_with_opts_async(mockito::ServerOpts {

View File

@@ -7,9 +7,11 @@ use crate::{
}; };
pub async fn build_testing_task_service( pub async fn build_testing_task_service(
config: Option<TaskConfig>,
ctx: Arc<dyn AppContextTrait>, ctx: Arc<dyn AppContextTrait>,
) -> RecorderResult<TaskService> { ) -> RecorderResult<TaskService> {
let config = TaskConfig::default(); let config = config.unwrap_or_default();
let task_service = TaskService::from_config_and_ctx(config, ctx).await?; let task_service = TaskService::from_config_and_ctx(config, ctx).await?;
Ok(task_service) Ok(task_service)
} }

View File

@@ -110,7 +110,7 @@ fn make_request_id(maybe_request_id: Option<HeaderValue>) -> String {
}); });
id.filter(|s| !s.is_empty()) id.filter(|s| !s.is_empty())
}) })
.unwrap_or_else(|| Uuid::new_v4().to_string()) .unwrap_or_else(|| Uuid::now_v7().to_string())
} }
#[cfg(test)] #[cfg(test)]

View File

@@ -0,0 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"rootDir": ".",
"composite": true,
"module": "ESNext",
"moduleResolution": "bundler"
},
"include": ["bindings"]
}

View File

@@ -12,6 +12,13 @@ const config: CodegenConfig = {
}, },
config: { config: {
enumsAsConst: true, enumsAsConst: true,
useTypeImports: true,
scalars: {
SubscriberTaskType: {
input: 'recorder/bindings/SubscriberTaskInput#SubscriberTaskInput',
output: 'recorder/bindings/SubscriberTaskType#SubscriberTaskType',
},
},
}, },
}, },
}, },

View File

@@ -17,6 +17,8 @@
"@corvu/drawer": "^0.2.4", "@corvu/drawer": "^0.2.4",
"@corvu/otp-field": "^0.1.4", "@corvu/otp-field": "^0.1.4",
"@corvu/resizable": "^0.2.5", "@corvu/resizable": "^0.2.5",
"@datasert/cronjs-matcher": "^1.4.0",
"@datasert/cronjs-parser": "^1.4.0",
"@graphiql/toolkit": "^0.11.3", "@graphiql/toolkit": "^0.11.3",
"@hookform/resolvers": "^5.1.1", "@hookform/resolvers": "^5.1.1",
"@outposts/injection-js": "^2.5.1", "@outposts/injection-js": "^2.5.1",
@@ -49,6 +51,7 @@
"@rsbuild/plugin-react": "^1.3.2", "@rsbuild/plugin-react": "^1.3.2",
"@tanstack/react-form": "^1.12.3", "@tanstack/react-form": "^1.12.3",
"@tanstack/react-query": "^5.80.7", "@tanstack/react-query": "^5.80.7",
"@tanstack/react-router": "^1.121.2",
"@tanstack/react-table": "^8.21.3", "@tanstack/react-table": "^8.21.3",
"@tanstack/store": "^0.7.1", "@tanstack/store": "^0.7.1",
"arktype": "^2.1.20", "arktype": "^2.1.20",
@@ -70,14 +73,15 @@
"react-dom": "^19.1.0", "react-dom": "^19.1.0",
"react-resizable-panels": "^3.0.2", "react-resizable-panels": "^3.0.2",
"recharts": "^2.15.3", "recharts": "^2.15.3",
"recorder": "workspace:*",
"rxjs": "^7.8.2", "rxjs": "^7.8.2",
"sonner": "^2.0.5", "sonner": "^2.0.5",
"tailwind-merge": "^3.3.1", "tailwind-merge": "^3.3.1",
"tailwind-scrollbar": "^4.0.2",
"tailwindcss": "^4.1.10", "tailwindcss": "^4.1.10",
"tw-animate-css": "^1.3.4", "tw-animate-css": "^1.3.4",
"type-fest": "^4.41.0", "type-fest": "^4.41.0",
"vaul": "^1.1.2", "vaul": "^1.1.2"
"@tanstack/react-router": "^1.121.2"
}, },
"devDependencies": { "devDependencies": {
"@graphql-codegen/cli": "^5.0.7", "@graphql-codegen/cli": "^5.0.7",
@@ -86,13 +90,14 @@
"@graphql-typed-document-node/core": "^3.2.0", "@graphql-typed-document-node/core": "^3.2.0",
"@parcel/watcher": "^2.5.1", "@parcel/watcher": "^2.5.1",
"@rsbuild/core": "^1.3.22", "@rsbuild/core": "^1.3.22",
"@rsbuild/plugin-type-check": "^1.2.3",
"@tailwindcss/postcss": "^4.1.10", "@tailwindcss/postcss": "^4.1.10",
"@tanstack/router-devtools": "^1.121.5",
"@tanstack/router-plugin": "^1.121.4",
"@types/react": "^19.1.8", "@types/react": "^19.1.8",
"@types/react-dom": "^19.1.6", "@types/react-dom": "^19.1.6",
"chalk": "^5.4.1", "chalk": "^5.4.1",
"commander": "^14.0.0", "commander": "^14.0.0",
"postcss": "^8.5.5", "postcss": "^8.5.5"
"@tanstack/router-devtools": "^1.121.5",
"@tanstack/router-plugin": "^1.121.4"
} }
} }

View File

@@ -1,13 +1,26 @@
import { defineConfig } from '@rsbuild/core'; import { defineConfig } from '@rsbuild/core';
import { pluginReact } from '@rsbuild/plugin-react'; import { pluginReact } from '@rsbuild/plugin-react';
import { pluginTypeCheck } from '@rsbuild/plugin-type-check';
import { TanStackRouterRspack } from '@tanstack/router-plugin/rspack'; import { TanStackRouterRspack } from '@tanstack/router-plugin/rspack';
const TS_NO_CHECK_REGEX =
/[\\/]node_modules[\\/]|[\\/]gql[\\/]|[\\/]components[\\/]ui[\\/]/;
export default defineConfig({ export default defineConfig({
html: { html: {
title: 'Konobangu', title: 'Konobangu',
favicon: './public/assets/favicon.ico', favicon: './public/assets/favicon.ico',
}, },
plugins: [pluginReact()], plugins: [
pluginReact(),
pluginTypeCheck({
tsCheckerOptions: {
issue: {
exclude: [({ file = '' }) => TS_NO_CHECK_REGEX.test(file)],
},
},
}),
],
tools: { tools: {
rspack: { rspack: {
plugins: [ plugins: [

View File

@@ -145,3 +145,5 @@
cursor: pointer; cursor: pointer;
} }
} }
@plugin "tailwind-scrollbar";

View File

@@ -1,4 +1,3 @@
import type { NavMainGroup } from '@/infra/routes/nav';
import { import {
BookOpen, BookOpen,
Folders, Folders,
@@ -9,6 +8,7 @@ import {
Telescope, Telescope,
Tv, Tv,
} from 'lucide-react'; } from 'lucide-react';
import type { NavMainGroup } from '@/infra/routes/nav';
export const AppNavMainData: NavMainGroup[] = [ export const AppNavMainData: NavMainGroup[] = [
{ {
@@ -49,13 +49,13 @@ export const AppNavMainData: NavMainGroup[] = [
{ {
title: 'Manage', title: 'Manage',
link: { link: {
to: '/bangumi/recorder', to: '/bangumi',
}, },
}, },
{ {
title: 'Feed', title: 'Feed',
link: { link: {
to: '/bangumi/feed', to: '/bangumi',
}, },
}, },
], ],
@@ -65,11 +65,17 @@ export const AppNavMainData: NavMainGroup[] = [
icon: ListTodo, icon: ListTodo,
children: [ children: [
{ {
title: 'Manage', title: 'Tasks',
link: { link: {
to: '/tasks/manage', to: '/tasks/manage',
}, },
}, },
{
title: 'Crons',
link: {
to: '/tasks/cron/manage',
},
},
], ],
}, },
{ {

View File

@@ -1,7 +1,7 @@
'use client'; 'use client';
import { useMatches } from '@tanstack/react-router';
import { ChevronRight } from 'lucide-react'; import { ChevronRight } from 'lucide-react';
import { import {
Collapsible, Collapsible,
CollapsibleContent, CollapsibleContent,
@@ -27,13 +27,8 @@ import {
useSidebar, useSidebar,
} from '@/components/ui/sidebar'; } from '@/components/ui/sidebar';
import type { NavMainGroup, NavMainItem } from '@/infra/routes/nav'; import type { NavMainGroup, NavMainItem } from '@/infra/routes/nav';
import { useMatches } from '@tanstack/react-router';
export function NavMain({ export function NavMain({ groups }: { groups: NavMainGroup[] }) {
groups,
}: {
groups: NavMainGroup[];
}) {
const matches = useMatches(); const matches = useMatches();
const { state } = useSidebar(); const { state } = useSidebar();

View File

@@ -1,4 +1,4 @@
import { type VariantProps, cva } from "class-variance-authority"; import { cva, type VariantProps } from "class-variance-authority";
import * as React from "react"; import * as React from "react";
import { cn } from "@/presentation/utils"; import { cn } from "@/presentation/utils";

View File

@@ -0,0 +1,52 @@
import { useCanGoBack, useNavigate, useRouter } from "@tanstack/react-router";
import { ArrowLeft } from "lucide-react";
import { type ReactNode, memo } from "react";
import { Button } from "./button";
export interface ContainerHeaderProps {
title: string;
description: string;
defaultBackTo?: string;
actions?: ReactNode;
}
export const ContainerHeader = memo(
({ title, description, defaultBackTo, actions }: ContainerHeaderProps) => {
const navigate = useNavigate();
const router = useRouter();
const canGoBack = useCanGoBack();
const finalCanGoBack = canGoBack || !!defaultBackTo;
const handleBack = () => {
if (canGoBack) {
router.history.back();
} else {
navigate({ to: defaultBackTo });
}
};
return (
<div className="mb-6 flex items-center justify-between">
<div className="flex items-center gap-4">
{finalCanGoBack && (
<Button
variant="ghost"
size="sm"
onClick={handleBack}
className="h-8 w-8 p-0"
>
<ArrowLeft className="h-4 w-4" />
</Button>
)}
<div>
<h1 className="font-bold text-2xl">{title}</h1>
<p className="mt-1 text-muted-foreground">{description}</p>
</div>
</div>
<div className="flex gap-2">{actions}</div>
</div>
);
}
);

View File

@@ -0,0 +1,291 @@
# Cron Components
A comprehensive set of React components for creating, editing, and displaying cron expressions with TypeScript support and shadcn/ui integration.
## Features
- 🎯 **Multiple Input Modes**: Text input, visual builder, or both
- 🔍 **Real-time Validation**: Powered by `@datasert/cronjs-parser`
-**Next Run Preview**: Shows upcoming execution times with `@datasert/cronjs-matcher`
- 🌍 **Timezone Support**: Display times in different timezones
- 📱 **Responsive Design**: Works seamlessly on desktop and mobile
- 🎨 **shadcn/ui Integration**: Consistent with your existing design system
- 🔧 **TypeScript Support**: Full type definitions included
- 🚀 **Customizable**: Extensive props for customization
## Components
### `<Cron />` - Main Component
The primary component that combines all functionality.
```tsx
import { Cron } from '@/components/cron';
function MyScheduler() {
const [cronExpression, setCronExpression] = useState('0 0 9 * * 1-5');
return (
<Cron
value={cronExpression}
onChange={setCronExpression}
mode="both" // 'input' | 'builder' | 'both'
showPreview={true}
showDescription={true}
timezone="UTC"
/>
);
}
```
#### Props
| Prop | Type | Default | Description |
|------|------|---------|-------------|
| `value` | `string` | `''` | Current cron expression |
| `onChange` | `(value: string) => void` | - | Called when expression changes |
| `onValidate` | `(isValid: boolean) => void` | - | Called when validation state changes |
| `mode` | `'input' \| 'builder' \| 'both'` | `'both'` | Display mode |
| `disabled` | `boolean` | `false` | Disable all inputs |
| `placeholder` | `string` | `'0 0 * * * *'` | Input placeholder text |
| `showPreview` | `boolean` | `true` | Show next run times preview |
| `showDescription` | `boolean` | `true` | Show human-readable description |
| `timezone` | `string` | `'UTC'` | Timezone for preview times |
| `error` | `string` | - | External error message |
| `className` | `ClassValue` | - | Additional CSS classes |
### `<CronInput />` - Text Input Component
Simple text input with validation and help text.
```tsx
import { CronInput } from '@/components/cron';
function QuickEntry() {
const [expression, setExpression] = useState('');
const [isValid, setIsValid] = useState(false);
return (
<CronInput
value={expression}
onChange={setExpression}
onValidate={setIsValid}
placeholder="Enter cron expression..."
/>
);
}
```
#### Props
| Prop | Type | Default | Description |
|------|------|---------|-------------|
| `value` | `string` | - | Current expression value |
| `onChange` | `(value: string) => void` | - | Called when input changes |
| `onValidate` | `(isValid: boolean) => void` | - | Called when validation changes |
| `placeholder` | `string` | `'0 0 * * * *'` | Placeholder text |
| `disabled` | `boolean` | `false` | Disable input |
| `readOnly` | `boolean` | `false` | Make input read-only |
| `error` | `string` | - | Error message to display |
| `className` | `ClassValue` | - | Additional CSS classes |
### `<CronBuilder />` - Visual Builder Component
Visual interface for building cron expressions with presets and field editors.
```tsx
import { CronBuilder } from '@/components/cron';
function VisualScheduler() {
const [expression, setExpression] = useState('0 0 * * * *');
return (
<CronBuilder
value={expression}
onChange={setExpression}
showPreview={true}
defaultTab="daily"
allowedPeriods={['hourly', 'daily', 'weekly']}
/>
);
}
```
#### Props
| Prop | Type | Default | Description |
|------|------|---------|-------------|
| `value` | `string` | `'0 0 * * * *'` | Current expression |
| `onChange` | `(value: string) => void` | - | Called when expression changes |
| `disabled` | `boolean` | `false` | Disable all controls |
| `showPreview` | `boolean` | `true` | Show preview section |
| `defaultTab` | `CronPeriod` | `'hourly'` | Default active tab |
| `allowedPeriods` | `CronPeriod[]` | All periods | Which tabs to show |
| `presets` | `CronPreset[]` | Built-in presets | Custom preset list |
| `className` | `ClassValue` | - | Additional CSS classes |
### `<CronDisplay />` - Display Component
Read-only component for displaying cron expression information.
```tsx
import { CronDisplay } from '@/components/cron';
function ScheduleInfo({ schedule }) {
return (
<CronDisplay
expression={schedule.cronExpression}
showNextRuns={true}
showDescription={true}
nextRunsCount={5}
timezone={schedule.timezone}
/>
);
}
```
#### Props
| Prop | Type | Default | Description |
|------|------|---------|-------------|
| `expression` | `string` | - | Cron expression to display |
| `showNextRuns` | `boolean` | `true` | Show upcoming run times |
| `showDescription` | `boolean` | `true` | Show human-readable description |
| `nextRunsCount` | `number` | `5` | Number of future runs to show |
| `timezone` | `string` | `'UTC'` | Timezone for times |
| `className` | `ClassValue` | - | Additional CSS classes |
## Cron Expression Format
The components support 6-field cron expressions with seconds:
```
┌─────────────── second (0-59)
│ ┌───────────── minute (0-59)
│ │ ┌─────────── hour (0-23)
│ │ │ ┌───────── day of month (1-31)
│ │ │ │ ┌─────── month (1-12)
│ │ │ │ │ ┌───── day of week (0-6, Sunday=0)
│ │ │ │ │ │
* * * * * *
```
### Special Characters
| Character | Description | Example |
|-----------|-------------|---------|
| `*` | Any value | `*` = every value |
| `,` | List separator | `1,3,5` = values 1, 3, and 5 |
| `-` | Range | `1-5` = values 1 through 5 |
| `/` | Step values | `*/5` = every 5th value |
| `?` | No specific value | Used when day/weekday conflict |
| `L` | Last | Last day of month/week |
| `W` | Weekday | Nearest weekday |
### Common Examples
| Expression | Description |
|------------|-------------|
| `0 * * * * *` | Every minute |
| `0 */5 * * * *` | Every 5 minutes |
| `0 0 * * * *` | Every hour |
| `0 0 9 * * *` | Daily at 9 AM |
| `0 30 9 * * 1-5` | Weekdays at 9:30 AM |
| `0 0 0 * * 0` | Every Sunday at midnight |
| `0 0 0 1 * *` | First day of every month |
| `0 0 0 1 1 *` | Every January 1st |
## Dependencies
- `@datasert/cronjs-parser` - For parsing and validating cron expressions
- `@datasert/cronjs-matcher` - For calculating next run times
- `@radix-ui/react-*` - UI primitives (via shadcn/ui)
- `lucide-react` - Icons
## Installation
1. Copy the component files to your project
2. Ensure you have the required dependencies:
```bash
npm install @datasert/cronjs-parser @datasert/cronjs-matcher
```
3. Import and use the components:
```tsx
import { Cron } from '@/components/cron';
```
## Customization
### Custom Presets
```tsx
const customPresets = [
{
label: 'Business Hours',
value: '0 0 9-17 * * 1-5',
description: 'Every hour during business hours',
category: 'custom'
},
// ... more presets
];
<CronBuilder presets={customPresets} />
```
### Restricted Periods
```tsx
<CronBuilder
allowedPeriods={['daily', 'weekly']}
defaultTab="daily"
/>
```
### Custom Validation
```tsx
function MyComponent() {
const [expression, setExpression] = useState('');
const [isValid, setIsValid] = useState(false);
const handleValidation = (valid: boolean) => {
setIsValid(valid);
// Custom validation logic
};
return (
<Cron
value={expression}
onChange={setExpression}
onValidate={handleValidation}
error={!isValid ? 'Invalid expression' : undefined}
/>
);
}
```
## TypeScript Support
All components include comprehensive TypeScript definitions:
```tsx
import type {
CronProps,
CronExpression,
CronValidationResult,
CronPeriod
} from '@/components/cron';
```
## Examples
See `CronExample` component for comprehensive usage examples and interactive demos.
## Browser Support
- Modern browsers with ES2015+ support
- React 16.8+ (hooks support required)
- TypeScript 4.0+ recommended

View File

@@ -0,0 +1,743 @@
import { getFutureMatches } from "@datasert/cronjs-matcher";
import { Calendar, Clock, Info, Settings, Zap } from "lucide-react";
import {
type CSSProperties,
type FC,
memo,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import {
Card,
CardContent,
CardDescription,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Separator } from "@/components/ui/separator";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { ToggleGroup, ToggleGroupItem } from "@/components/ui/toggle-group";
import { cn } from "@/presentation/utils";
import {
type CronBuilderProps,
CronField,
type CronFieldConfig,
CronPeriod,
type CronPreset,
} from "./types.js";
const CRON_PRESETS: CronPreset[] = [
{
label: "Every minute",
value: "0 * * * * *",
description: "Runs every minute",
category: "common",
},
{
label: "Every 5 minutes",
value: "0 */5 * * * *",
description: "Runs every 5 minutes",
category: "common",
},
{
label: "Every 15 minutes",
value: "0 */15 * * * *",
description: "Runs every 15 minutes",
category: "common",
},
{
label: "Every 30 minutes",
value: "0 */30 * * * *",
description: "Runs every 30 minutes",
category: "common",
},
{
label: "Every hour",
value: "0 0 * * * *",
description: "Runs at the top of every hour",
category: "common",
},
{
label: "Every 6 hours",
value: "0 0 */6 * * *",
description: "Runs every 6 hours",
category: "common",
},
{
label: "Daily at midnight",
value: "0 0 0 * * *",
description: "Runs once daily at 00:00",
category: "daily",
},
{
label: "Daily at 9 AM",
value: "0 0 9 * * *",
description: "Runs daily at 9:00 AM",
category: "daily",
},
{
label: "Weekdays at 9 AM",
value: "0 0 9 * * 1-5",
description: "Runs Monday to Friday at 9:00 AM",
category: "weekly",
},
{
label: "Every Sunday",
value: "0 0 0 * * 0",
description: "Runs every Sunday at midnight",
category: "weekly",
},
{
label: "First day of month",
value: "0 0 0 1 * *",
description: "Runs on the 1st day of every month",
category: "monthly",
},
{
label: "Every year",
value: "0 0 0 1 1 *",
description: "Runs on January 1st every year",
category: "yearly",
},
];
const FIELD_CONFIGS: Record<CronField, CronFieldConfig> = {
seconds: {
min: 0,
max: 59,
step: 1,
allowSpecial: ["*", "?"],
},
minutes: {
min: 0,
max: 59,
step: 1,
allowSpecial: ["*", "?"],
},
hours: {
min: 0,
max: 23,
step: 1,
allowSpecial: ["*", "?"],
},
dayOfMonth: {
min: 1,
max: 31,
step: 1,
allowSpecial: ["*", "?", "L", "W"],
options: [
{ label: "Any day", value: "*" },
{ label: "No specific day", value: "?" },
{ label: "Last day", value: "L" },
{ label: "Weekday", value: "W" },
],
},
month: {
min: 1,
max: 12,
step: 1,
allowSpecial: ["*"],
options: [
{ label: "January", value: 1 },
{ label: "February", value: 2 },
{ label: "March", value: 3 },
{ label: "April", value: 4 },
{ label: "May", value: 5 },
{ label: "June", value: 6 },
{ label: "July", value: 7 },
{ label: "August", value: 8 },
{ label: "September", value: 9 },
{ label: "October", value: 10 },
{ label: "November", value: 11 },
{ label: "December", value: 12 },
],
},
dayOfWeek: {
min: 0,
max: 6,
step: 1,
allowSpecial: ["*", "?"],
options: [
{ label: "Sunday", value: 0 },
{ label: "Monday", value: 1 },
{ label: "Tuesday", value: 2 },
{ label: "Wednesday", value: 3 },
{ label: "Thursday", value: 4 },
{ label: "Friday", value: 5 },
{ label: "Saturday", value: 6 },
],
},
year: {
min: 0,
max: 9999,
step: 1,
allowSpecial: ["*", "?"],
},
};
const PERIOD_CONFIGS = {
minute: {
label: CronPeriod.Minute,
description: "Run every minute",
template: "0 * * * * *",
fields: [CronField.Minutes],
},
hourly: {
label: CronPeriod.Hourly,
description: "Run every hour",
template: "0 0 * * * *",
fields: [CronField.Minutes, CronField.Hours],
},
daily: {
label: CronPeriod.Daily,
description: "Run every day",
template: "0 0 0 * * *",
fields: [CronField.Seconds, CronField.Minutes, CronField.Hours],
},
weekly: {
label: CronPeriod.Weekly,
description: "Run every week",
template: "0 0 0 * * 0",
fields: [
CronField.Seconds,
CronField.Minutes,
CronField.Hours,
CronField.DayOfWeek,
],
},
monthly: {
label: CronPeriod.Monthly,
description: "Run every month",
template: "0 0 0 1 * *",
fields: [
CronField.Seconds,
CronField.Minutes,
CronField.Hours,
CronField.DayOfMonth,
],
},
yearly: {
label: CronPeriod.Yearly,
description: "Run every year",
template: "0 0 0 1 1 *",
fields: [
CronField.Seconds,
CronField.Minutes,
CronField.Hours,
CronField.DayOfMonth,
CronField.Month,
],
},
custom: {
label: CronPeriod.Custom,
description: "Custom expression",
template: "0 0 * * * *",
fields: [
CronField.Seconds,
CronField.Minutes,
CronField.Hours,
CronField.DayOfMonth,
CronField.Month,
CronField.DayOfWeek,
],
},
} as const;
const CronBuilder: FC<CronBuilderProps> = ({
timezone = "UTC",
value = "0 0 * * * *",
onChange,
className,
disabled = false,
showPreview = true,
showPresets = true,
displayPeriods = [
CronPeriod.Custom,
CronPeriod.Minute,
CronPeriod.Hourly,
CronPeriod.Daily,
CronPeriod.Weekly,
CronPeriod.Monthly,
CronPeriod.Yearly,
],
defaultTab = CronPeriod.Custom,
presets = CRON_PRESETS,
showGeneratedExpression = true,
withCard = true,
}) => {
const [activeTab, setActiveTab] = useState<CronPeriod>(defaultTab);
const [cronFields, setCronFields] = useState(() =>
parseCronExpression(value)
);
const currentExpression = useMemo(() => {
return `${cronFields.seconds} ${cronFields.minutes} ${cronFields.hours} ${cronFields.dayOfMonth} ${cronFields.month} ${cronFields.dayOfWeek}`;
}, [cronFields]);
const nextRuns = useMemo(() => {
if (!showPreview) {
return [];
}
try {
const matches = getFutureMatches(`${currentExpression} *`, {
matchCount: 3,
timezone,
formatInTimezone: true,
hasSeconds: true,
});
return matches.map((match) => new Date(match));
} catch (error) {
console.error("Failed to get future matched runs", error);
return [];
}
}, [currentExpression, showPreview, timezone]);
useEffect(() => {
setCronFields(parseCronExpression(value));
}, [value]);
useEffect(() => {
onChange?.(currentExpression);
}, [currentExpression, onChange]);
const handlePresetSelect = useCallback((preset: CronPreset) => {
setCronFields(parseCronExpression(preset.value));
}, []);
const handleFieldChange = useCallback(
(field: CronField, newValue: string) => {
setCronFields((prev) => ({ ...prev, [field]: newValue }));
},
[]
);
const handlePeriodChange = useCallback((period: CronPeriod) => {
setActiveTab(period);
if (period !== "custom") {
const config = PERIOD_CONFIGS[period];
setCronFields(parseCronExpression(config.template));
}
}, []);
const filteredPresets = useMemo(() => {
return presets.filter((preset) => {
if (activeTab === "custom") {
return true;
}
return preset.category === activeTab;
});
}, [presets, activeTab]);
return (
<div className={cn(withCard && "space-y-6", className)}>
<Tabs
value={activeTab}
onValueChange={(v) => handlePeriodChange(v as CronPeriod)}
>
<div className="overflow-x-auto">
<TabsList
className="grid w-(--all-grids-width) grid-cols-7 whitespace-nowrap lg:w-full"
style={
{
"--my-grid-cols": `grid-template-columns: repeat(${displayPeriods.length}, minmax(0, 1fr))`,
"--all-grids-width":
displayPeriods.length > 4
? `${displayPeriods.length * 25 - 20}%`
: "100%",
} as CSSProperties
}
>
{displayPeriods.map((period) => (
<TabsTrigger
key={period}
value={period}
disabled={disabled}
className="text-xs capitalize"
>
{PERIOD_CONFIGS[period].label}
</TabsTrigger>
))}
</TabsList>
</div>
{displayPeriods.map((period) => (
<TabsContent
key={period}
value={period}
className={cn(withCard ? "space-y-4" : "px-0")}
>
<Card className={cn(!withCard && "border-none shadow-none")}>
<CardHeader className={cn("pb-1", !withCard && "px-0")}>
<CardTitle className="flex items-center gap-2 text-base">
<Settings className="h-4 w-4" />
<span className="capitalize">
{PERIOD_CONFIGS[period].label} Configuration
</span>
</CardTitle>
<CardDescription>
{PERIOD_CONFIGS[period].description}
</CardDescription>
</CardHeader>
<CardContent className={cn("space-y-4", !withCard && "px-0")}>
<CronFieldEditor
period={period}
fields={cronFields}
onChange={handleFieldChange}
disabled={disabled}
/>
</CardContent>
</Card>
{showPresets && filteredPresets.length > 0 && (
<Card className={cn(!withCard && "border-none shadow-none")}>
<CardHeader className={cn(!withCard && "px-0")}>
<CardTitle className="flex items-center gap-2 text-base">
<Zap className="h-4 w-4" />
Quick Presets
</CardTitle>
<CardDescription>
Common cron expressions for quick setup
</CardDescription>
</CardHeader>
<CardContent className={cn(!withCard && "px-0")}>
<div className="grid gap-3 sm:grid-cols-1 lg:grid-cols-2 xl:grid-cols-3">
{filteredPresets.map((preset, index) => (
<Button
key={index}
variant="outline"
className="h-auto justify-start p-4 text-left"
onClick={() => handlePresetSelect(preset)}
disabled={disabled}
>
<div className="w-full space-y-2">
<div className="font-medium text-sm">
{preset.label}
</div>
<div className="whitespace-normal break-words text-muted-foreground text-xs leading-relaxed">
{preset.description}
</div>
<Badge
variant="secondary"
className="mt-1 break-all font-mono text-xs"
>
{preset.value}
</Badge>
</div>
</Button>
))}
</div>
</CardContent>
</Card>
)}
</TabsContent>
))}
</Tabs>
{/* Current Expression & Preview */}
{showGeneratedExpression && (
<Card className={cn(!withCard && "border-none shadow-none")}>
<CardHeader className={cn(!withCard && "px-0")}>
<CardTitle className="flex items-center gap-2 text-base">
<Clock className="h-4 w-4" />
Generated Expression
</CardTitle>
</CardHeader>
<CardContent className={cn("space-y-4", !withCard && "px-0")}>
<div className="flex items-center gap-2">
<Badge variant="outline" className="px-3 py-1 font-mono text-sm">
{currentExpression}
</Badge>
</div>
{showPreview && nextRuns.length > 0 && (
<>
<Separator />
<div className="space-y-2">
<h4 className="flex items-center gap-2 font-medium text-sm">
<Calendar className="h-4 w-4" />
Next Runs({timezone})
</h4>
<div className="space-y-1">
{nextRuns.map((date, index) => (
<div
key={index}
className="flex items-center justify-between rounded bg-muted/50 px-3 py-2 text-sm"
>
<span className="font-medium text-muted-foreground">
#{index + 1}
</span>
<span className="font-mono">
{date.toLocaleString()}
</span>
</div>
))}
</div>
</div>
</>
)}
</CardContent>
</Card>
)}
</div>
);
};
interface CronFieldEditorProps {
period: CronPeriod;
fields: Record<CronField, string>;
onChange: (field: CronField, value: string) => void;
disabled?: boolean;
}
const CronFieldEditor: FC<CronFieldEditorProps> = ({
period,
fields,
onChange,
disabled = false,
}) => {
const relevantFields = [...PERIOD_CONFIGS[period].fields] as CronField[];
return (
<div className="grid gap-4 sm:grid-cols-2 lg:grid-cols-3">
{relevantFields.map((field) => {
const config = FIELD_CONFIGS[field];
const currentValue = fields[field];
return (
<CronFieldItemEditor
key={field}
config={config}
field={field}
value={currentValue}
onChange={onChange}
disabled={disabled}
/>
);
})}
</div>
);
};
const CronFieldItemAnyOrSpecificOption = {
Any: "any",
Specific: "specific",
} as const;
type CronFieldItemAnyOrSpecificOption =
(typeof CronFieldItemAnyOrSpecificOption)[keyof typeof CronFieldItemAnyOrSpecificOption];
interface CronFieldItemEditorProps {
config: CronFieldConfig;
field: CronField;
value: string;
onChange: (field: CronField, value: string) => void;
disabled?: boolean;
}
function encodeCronFieldItem(value: string): string {
if (value === "") {
return "<meta:empty>";
}
if (value.includes(" ")) {
return `<meta:contains-space:${encodeURIComponent(value)}>`;
}
return value;
}
function decodeCronFieldItem(value: string): string {
if (value.startsWith("<meta:contains")) {
return decodeURIComponent(
// biome-ignore lint/performance/useTopLevelRegex: false
value.replace(/^<meta:contains-space:([^>]+)>$/, "$1")
);
}
if (value === "<meta:empty>") {
return "";
}
return value;
}
export const CronFieldItemEditor: FC<CronFieldItemEditorProps> = memo(
({ field, value, onChange, config, disabled = false }) => {
const [innerValue, _setInnerValue] = useState(() =>
decodeCronFieldItem(value)
);
const [anyOrSpecificOption, _setAnyOrSpecificOption] =
useState<CronFieldItemAnyOrSpecificOption>(() =>
innerValue === "*"
? CronFieldItemAnyOrSpecificOption.Any
: CronFieldItemAnyOrSpecificOption.Specific
);
// biome-ignore lint/correctness/useExhaustiveDependencies: false
useEffect(() => {
const nextValue = decodeCronFieldItem(value);
if (nextValue !== innerValue) {
_setInnerValue(nextValue);
}
}, [value]);
const handleChange = useCallback(
(v: string) => {
_setInnerValue(v);
onChange(field, encodeCronFieldItem(v));
},
[field, onChange]
);
const setAnyOrSpecificOption = useCallback(
(v: CronFieldItemAnyOrSpecificOption) => {
_setAnyOrSpecificOption(v);
if (v === CronFieldItemAnyOrSpecificOption.Any) {
handleChange("*");
} else if (v === CronFieldItemAnyOrSpecificOption.Specific) {
handleChange("0");
}
},
[handleChange]
);
return (
<div className="space-y-2">
<Label className="font-medium text-sm capitalize">
{field.replace(/([A-Z])/g, " $1").toLowerCase()}
</Label>
{(field === "month" || field === "dayOfWeek") && (
<Select
value={innerValue}
onValueChange={handleChange}
disabled={disabled}
>
<SelectTrigger>
<SelectValue />
</SelectTrigger>
<SelectContent>
<SelectItem value="*">Any</SelectItem>
{config.options?.map((option, index) => (
<SelectItem key={index} value={option.value.toString()}>
{option.label}
</SelectItem>
))}
</SelectContent>
</Select>
)}
{field === "dayOfMonth" && (
<div className="space-y-2">
<Select
value={innerValue}
onValueChange={handleChange}
disabled={disabled}
>
<SelectTrigger>
<SelectValue />
</SelectTrigger>
<SelectContent>
{config.options?.map((option, index) => (
<SelectItem key={index} value={option.value.toString()}>
{option.label}
</SelectItem>
))}
{Array.from({ length: 31 }, (_, i) => i + 1).map((day) => (
<SelectItem key={day} value={day.toString()}>
{day}
</SelectItem>
))}
</SelectContent>
</Select>
</div>
)}
{!(
field === "month" ||
field === "dayOfWeek" ||
field === "dayOfMonth"
) && (
<div className="space-y-2">
<ToggleGroup
type="single"
value={anyOrSpecificOption}
onValueChange={setAnyOrSpecificOption}
disabled={disabled}
>
<ToggleGroupItem
value={CronFieldItemAnyOrSpecificOption.Any}
className="min-w-fit text-xs"
>
Any
</ToggleGroupItem>
<ToggleGroupItem
value={CronFieldItemAnyOrSpecificOption.Specific}
className="min-w-fit text-xs"
>
Specific
</ToggleGroupItem>
</ToggleGroup>
{anyOrSpecificOption ===
CronFieldItemAnyOrSpecificOption.Specific && (
<Input
type="text"
value={innerValue}
onChange={(e) => handleChange(e.target.value)}
placeholder={`0-${config.max}`}
disabled={disabled}
className="font-mono text-sm"
/>
)}
<div className="text-muted-foreground text-xs">
<div className="flex items-center gap-1">
<Info className="h-3 w-3" />
<span>
Range: {config.min}-{config.max}
</span>
</div>
<div className="mt-1">
Supports: *, numbers, ranges (1-5), lists (1,3,5), steps (*/5)
</div>
</div>
</div>
)}
</div>
);
}
);
function parseCronExpression(expression: string): Record<CronField, string> {
const parts = expression.split(" ");
// Ensure we have 6 parts, pad with defaults if needed
while (parts.length < 6) {
parts.push("*");
}
return {
seconds: parts[0] || "0",
minutes: parts[1] || "*",
hours: parts[2] || "*",
dayOfMonth: parts[3] || "*",
month: parts[4] || "*",
dayOfWeek: parts[5] || "*",
year: parts[6] || "*",
};
}
export { CronBuilder };

View File

@@ -0,0 +1,277 @@
import { Badge } from '@/components/ui/badge';
import {
Card,
CardContent,
CardDescription,
CardHeader,
CardTitle,
} from '@/components/ui/card';
import { cn } from '@/presentation/utils';
import { getFutureMatches, isTimeMatches } from '@datasert/cronjs-matcher';
import { parse } from '@datasert/cronjs-parser';
import { AlertCircle, CalendarDays, CheckCircle, Clock } from 'lucide-react';
import { type FC, useMemo } from 'react';
import type {
CronDisplayProps,
CronNextRun,
CronValidationResult,
} from './types.js';
const CronDisplay: FC<CronDisplayProps> = ({
expression,
className,
showNextRuns = true,
nextRunsCount = 5,
timezone = 'UTC',
showDescription = true,
withCard = true,
}) => {
const validationResult = useMemo((): CronValidationResult => {
if (!expression) {
return { isValid: false, error: 'No expression provided' };
}
try {
const _parsed = parse(`${expression} *`, { hasSeconds: true });
return {
isValid: true,
description: generateDescription(expression),
};
} catch (error) {
return {
isValid: false,
error: error instanceof Error ? error.message : 'Invalid expression',
};
}
}, [expression]);
const nextRuns = useMemo((): CronNextRun[] => {
if (!expression || !validationResult.isValid || !showNextRuns) {
return [];
}
try {
const matches = getFutureMatches(`${expression} *`, {
matchCount: nextRunsCount,
timezone,
formatInTimezone: true,
hasSeconds: true,
});
return matches.map((match) => {
const date = new Date(match);
return {
date,
timestamp: date.getTime(),
formatted: date.toLocaleString(),
relative: getRelativeTime(date),
};
});
} catch (error) {
console.warn('Failed to get future matches:', error);
return [];
}
}, [
expression,
validationResult.isValid,
showNextRuns,
nextRunsCount,
timezone,
]);
const isCurrentTimeMatch = useMemo(() => {
if (!expression || !validationResult.isValid) {
return false;
}
try {
return isTimeMatches(
`${expression} *`,
new Date().toISOString(),
timezone
);
} catch (_error: unknown) {
return false;
}
}, [expression, validationResult.isValid, timezone]);
if (!expression) {
return (
<Card className={cn(className, !withCard && 'border-none shadow-none')}>
<CardContent className={cn('p-4', !withCard && 'px-0')}>
<div className="flex items-center gap-2 text-muted-foreground">
<AlertCircle className="h-4 w-4" />
<span className="text-sm">No cron expression set</span>
</div>
</CardContent>
</Card>
);
}
return (
<Card className={cn(className, !withCard && 'border-none shadow-none')}>
<CardHeader className={cn(!withCard && 'px-0')}>
<div className="flex items-center justify-between">
<CardTitle className="flex items-center gap-2 text-base">
<Clock className="h-4 w-4" />
Cron Expression
{isCurrentTimeMatch && (
<Badge variant="default" className="text-xs">
<CheckCircle className="mr-1 h-3 w-3" />
Active Now
</Badge>
)}
</CardTitle>
<Badge
variant={validationResult.isValid ? 'secondary' : 'destructive'}
className="font-mono text-xs"
>
{expression}
</Badge>
</div>
{validationResult.isValid &&
showDescription &&
validationResult.description && (
<CardDescription className="text-sm">
{validationResult.description}
</CardDescription>
)}
{!validationResult.isValid && validationResult.error && (
<CardDescription className="flex items-center gap-2 text-destructive text-sm">
<AlertCircle className="h-4 w-4" />
{validationResult.error}
</CardDescription>
)}
</CardHeader>
{validationResult.isValid && showNextRuns && nextRuns.length > 0 && (
<CardContent className={cn('pt-0', !withCard && 'px-0')}>
<div className="space-y-3">
<h4 className="flex items-center gap-2 font-medium text-sm">
<CalendarDays className="h-4 w-4" />
Next Runs
<Badge variant="outline" className="text-xs">
{timezone}
</Badge>
</h4>
<div className="space-y-2">
{nextRuns.map((run, index) => (
<div
key={index}
className="flex items-center justify-between rounded border bg-muted/50 p-2"
>
<div className="flex items-center gap-2">
<span className="w-6 font-medium text-muted-foreground text-xs">
#{index + 1}
</span>
<span className="font-mono text-sm">{run.formatted}</span>
</div>
<span className="text-muted-foreground text-xs">
{run.relative}
</span>
</div>
))}
</div>
</div>
</CardContent>
)}
</Card>
);
};
function generateDescription(expression: string): string {
// Enhanced description generator based on common patterns
const parts = expression.split(' ');
if (parts.length !== 6) {
return expression;
}
const [sec, min, hour, day, month, weekday] = parts;
// Common patterns
const patterns: Record<string, string> = {
'* * * * * *': 'Every second',
'0 * * * * *': 'Every minute',
'0 0 * * * *': 'Every hour',
'0 0 0 * * *': 'Daily at midnight',
'0 0 0 * * 0': 'Every Sunday at midnight',
'0 0 0 * * 1': 'Every Monday at midnight',
'0 0 0 * * 2': 'Every Tuesday at midnight',
'0 0 0 * * 3': 'Every Wednesday at midnight',
'0 0 0 * * 4': 'Every Thursday at midnight',
'0 0 0 * * 5': 'Every Friday at midnight',
'0 0 0 * * 6': 'Every Saturday at midnight',
'0 0 0 1 * *': 'Monthly on the 1st at midnight',
'0 0 0 1 1 *': 'Yearly on January 1st at midnight',
'0 30 9 * * 1-5': 'Weekdays at 9:30 AM',
'0 0 */6 * * *': 'Every 6 hours',
'0 */30 * * * *': 'Every 30 minutes',
'0 */15 * * * *': 'Every 15 minutes',
'0 */5 * * * *': 'Every 5 minutes',
};
if (patterns[expression]) {
return patterns[expression];
}
// Generate dynamic description
let description = 'At ';
if (sec !== '*' && sec !== '0') {
description += `second ${sec}, `;
}
if (min !== '*') {
description += `minute ${min}, `;
}
if (hour !== '*') {
description += `hour ${hour}, `;
}
if (day !== '*' && weekday !== '*') {
description += `on day ${day} and weekday ${weekday} `;
} else if (day !== '*') {
description += `on day ${day} `;
} else if (weekday !== '*') {
description += `on weekday ${weekday} `;
}
if (month !== '*') {
description += `in month ${month}`;
}
// biome-ignore lint/performance/useTopLevelRegex: <explanation>
return description.replace(/,\s*$/, '').replace(/At\s*$/, 'Every occurrence');
}
function getRelativeTime(date: Date): string {
const now = new Date();
const diffMs = date.getTime() - now.getTime();
if (diffMs < 0) {
return 'Past';
}
const diffSec = Math.floor(diffMs / 1000);
const diffMin = Math.floor(diffSec / 60);
const diffHour = Math.floor(diffMin / 60);
const diffDay = Math.floor(diffHour / 24);
if (diffSec < 60) {
return `in ${diffSec}s`;
}
if (diffMin < 60) {
return `in ${diffMin}m`;
}
if (diffHour < 24) {
return `in ${diffHour}h`;
}
if (diffDay < 7) {
return `in ${diffDay}d`;
}
return `in ${Math.floor(diffDay / 7)}w`;
}
export { CronDisplay };

View File

@@ -0,0 +1,413 @@
import { Code2, Play, Settings, Type } from "lucide-react";
import { type FC, useCallback, useState } from "react";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import {
Card,
CardContent,
CardDescription,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import { Separator } from "@/components/ui/separator";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { Cron } from "./cron.jsx";
import { CronBuilder } from "./cron-builder.jsx";
import { CronDisplay } from "./cron-display.jsx";
import { CronInput } from "./cron-input.jsx";
const CronExample: FC = () => {
const [inputValue, setInputValue] = useState("0 30 9 * * 1-5");
const [builderValue, setBuilderValue] = useState("0 0 */6 * * *");
const [fullValue, setFullValue] = useState("0 */15 * * * *");
const [displayValue] = useState("0 0 0 * * 0");
const examples = [
{
label: "Every minute",
expression: "0 * * * * *",
description: "Runs at the start of every minute",
},
{
label: "Every 5 minutes",
expression: "0 */5 * * * *",
description: "Runs every 5 minutes",
},
{
label: "Every hour",
expression: "0 0 * * * *",
description: "Runs at the start of every hour",
},
{
label: "Daily at 9 AM",
expression: "0 0 9 * * *",
description: "Runs every day at 9:00 AM",
},
{
label: "Weekdays at 9:30 AM",
expression: "0 30 9 * * 1-5",
description: "Runs Monday through Friday at 9:30 AM",
},
{
label: "Every Sunday",
expression: "0 0 0 * * 0",
description: "Runs every Sunday at midnight",
},
{
label: "First day of month",
expression: "0 0 0 1 * *",
description: "Runs on the 1st day of every month",
},
{
label: "Every quarter",
expression: "0 0 0 1 */3 *",
description: "Runs on the 1st day of every quarter",
},
];
const handleCopyExample = useCallback(async (expression: string) => {
try {
await navigator.clipboard.writeText(expression);
} catch (error) {
console.warn("Failed to copy to clipboard:", error);
}
}, []);
return (
<div className="space-y-8">
{/* Header */}
<div className="space-y-2">
<h1 className="font-bold text-3xl">Cron Expression Components</h1>
<p className="text-lg text-muted-foreground">
A comprehensive set of components for creating and managing cron
expressions.
</p>
</div>
{/* Quick Examples */}
<Card>
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Code2 className="h-5 w-5" />
Common Examples
</CardTitle>
<CardDescription>
Click any example to copy the expression to your clipboard
</CardDescription>
</CardHeader>
<CardContent>
<div className="grid gap-3 sm:grid-cols-2 lg:grid-cols-4">
{examples.map((example, index) => (
<Button
key={index}
variant="outline"
className="h-auto flex-col items-start p-4 text-left"
onClick={() => handleCopyExample(example.expression)}
>
<div className="w-full space-y-2">
<div className="font-medium text-sm">{example.label}</div>
<Badge variant="secondary" className="font-mono text-xs">
{example.expression}
</Badge>
<div className="text-muted-foreground text-xs">
{example.description}
</div>
</div>
</Button>
))}
</div>
</CardContent>
</Card>
<Separator />
{/* Component Examples */}
<div className="space-y-8">
<div className="space-y-2">
<h2 className="font-semibold text-2xl">Component Examples</h2>
<p className="text-muted-foreground">
Interactive examples showing different ways to use the cron
components.
</p>
</div>
<Tabs defaultValue="full" className="space-y-6">
<TabsList className="grid w-full grid-cols-4">
<TabsTrigger value="full">Complete</TabsTrigger>
<TabsTrigger value="input">Input Only</TabsTrigger>
<TabsTrigger value="builder">Builder Only</TabsTrigger>
<TabsTrigger value="display">Display Only</TabsTrigger>
</TabsList>
<TabsContent value="full" className="space-y-4">
<Card>
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Settings className="h-5 w-5" />
Complete Cron Component
</CardTitle>
<CardDescription>
Full-featured component with both input and visual builder
modes, validation, preview, and help documentation.
</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<Cron
value={fullValue}
onChange={setFullValue}
mode="both"
showPreview={true}
showDescription={true}
timezone="UTC"
/>
<div className="rounded bg-muted p-4">
<h4 className="mb-2 font-medium text-sm">Current Value:</h4>
<Badge variant="outline" className="font-mono">
{fullValue || "No expression set"}
</Badge>
</div>
</div>
</CardContent>
</Card>
</TabsContent>
<TabsContent value="input" className="space-y-4">
<Card>
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Type className="h-5 w-5" />
Text Input Component
</CardTitle>
<CardDescription>
Simple text input with validation, help text, and real-time
feedback.
</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<CronInput
value={inputValue}
onChange={setInputValue}
placeholder="Enter cron expression..."
/>
<div className="rounded bg-muted p-4">
<h4 className="mb-2 font-medium text-sm">Current Value:</h4>
<Badge variant="outline" className="font-mono">
{inputValue || "No expression set"}
</Badge>
</div>
</div>
</CardContent>
</Card>
<Card>
<CardHeader>
<CardTitle>Input-Only Mode</CardTitle>
<CardDescription>
Using the main Cron component in input-only mode with preview.
</CardDescription>
</CardHeader>
<CardContent>
<Cron
value={inputValue}
onChange={setInputValue}
mode="input"
showPreview={true}
/>
</CardContent>
</Card>
</TabsContent>
<TabsContent value="builder" className="space-y-4">
<Card>
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Settings className="h-5 w-5" />
Visual Builder Component
</CardTitle>
<CardDescription>
Visual interface for building cron expressions with presets
and field editors.
</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<CronBuilder
value={builderValue}
onChange={setBuilderValue}
showPreview={true}
/>
<div className="rounded bg-muted p-4">
<h4 className="mb-2 font-medium text-sm">Current Value:</h4>
<Badge variant="outline" className="font-mono">
{builderValue || "No expression set"}
</Badge>
</div>
</div>
</CardContent>
</Card>
<Card>
<CardHeader>
<CardTitle>Builder-Only Mode</CardTitle>
<CardDescription>
Using the main Cron component in builder-only mode.
</CardDescription>
</CardHeader>
<CardContent>
<Cron
value={builderValue}
onChange={setBuilderValue}
mode="builder"
showPreview={false}
/>
</CardContent>
</Card>
</TabsContent>
<TabsContent value="display" className="space-y-4">
<Card>
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Play className="h-5 w-5" />
Display Component
</CardTitle>
<CardDescription>
Read-only component that shows cron expression details,
description, and next run times.
</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<CronDisplay
expression={displayValue}
showNextRuns={true}
showDescription={true}
nextRunsCount={5}
timezone="UTC"
/>
</div>
</CardContent>
</Card>
<Card>
<CardHeader>
<CardTitle>Multiple Timezone Display</CardTitle>
<CardDescription>
Same expression displayed in different timezones.
</CardDescription>
</CardHeader>
<CardContent>
<div className="grid gap-4 lg:grid-cols-2">
<div>
<h4 className="mb-2 font-medium text-sm">UTC</h4>
<CronDisplay
expression="0 0 12 * * *"
showNextRuns={true}
nextRunsCount={3}
timezone="UTC"
/>
</div>
<div>
<h4 className="mb-2 font-medium text-sm">
America/New_York
</h4>
<CronDisplay
expression="0 0 12 * * *"
showNextRuns={true}
nextRunsCount={3}
timezone="America/New_York"
/>
</div>
</div>
</CardContent>
</Card>
</TabsContent>
</Tabs>
</div>
{/* Usage Examples */}
<Card>
<CardHeader>
<CardTitle>Usage Examples</CardTitle>
<CardDescription>
Code examples showing how to integrate these components into your
application.
</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-6">
<div>
<h4 className="mb-2 font-medium text-sm">Basic Usage</h4>
<div className="rounded bg-muted p-4 font-mono text-sm">
<pre>{`import { Cron } from '@/components/cron';
function MyComponent() {
const [cronExpression, setCronExpression] = useState('0 0 * * * *');
return (
<Cron
value={cronExpression}
onChange={setCronExpression}
mode="both"
showPreview={true}
/>
);
}`}</pre>
</div>
</div>
<div>
<h4 className="mb-2 font-medium text-sm">
Input Only with Validation
</h4>
<div className="rounded bg-muted p-4 font-mono text-sm">
<pre>{`import { CronInput } from '@/components/cron';
function ScheduleForm() {
const [expression, setExpression] = useState('');
const [isValid, setIsValid] = useState(false);
return (
<CronInput
value={expression}
onChange={setExpression}
onValidate={setIsValid}
placeholder="0 0 * * * *"
/>
);
}`}</pre>
</div>
</div>
<div>
<h4 className="mb-2 font-medium text-sm">
Display Schedule Information
</h4>
<div className="rounded bg-muted p-4 font-mono text-sm">
<pre>{`import { CronDisplay } from '@/components/cron';
function SchedulePreview({ schedule }) {
return (
<CronDisplay
expression={schedule.cronExpression}
showNextRuns={true}
showDescription={true}
timezone={schedule.timezone}
/>
);
}`}</pre>
</div>
</div>
</div>
</CardContent>
</Card>
</div>
);
};
export { CronExample };

View File

@@ -0,0 +1,190 @@
import { Badge } from '@/components/ui/badge';
import { Input } from '@/components/ui/input';
import { cn } from '@/presentation/utils';
import { parse } from '@datasert/cronjs-parser';
import { AlertCircle, CheckCircle, Info } from 'lucide-react';
import {
type ChangeEvent,
forwardRef,
useCallback,
useEffect,
useMemo,
useState,
} from 'react';
import type { CronInputProps, CronValidationResult } from './types.js';
const CronInput = forwardRef<HTMLInputElement, CronInputProps>(
(
{
value,
onChange,
onValidate,
placeholder = '0 0 * * * *',
className,
disabled,
readOnly,
error,
...props
},
ref
) => {
const [internalValue, setInternalValue] = useState(value || '');
const [isFocused, setIsFocused] = useState(false);
const validationResult = useMemo((): CronValidationResult => {
if (!internalValue.trim()) {
return {
isValid: false,
error: 'Expression is required',
isEmpty: true,
};
}
try {
parse(`${internalValue} *`, { hasSeconds: true });
return { isValid: true };
} catch (parseError) {
return {
isValid: false,
error:
parseError instanceof Error
? parseError.message
: 'Invalid cron expression',
};
}
}, [internalValue]);
useEffect(() => {
setInternalValue(value || '');
}, [value]);
useEffect(() => {
onValidate?.(validationResult.isValid);
}, [validationResult.isValid, onValidate]);
const handleChange = useCallback(
(e: ChangeEvent<HTMLInputElement>) => {
const newValue = e.target.value;
setInternalValue(newValue);
onChange?.(newValue);
},
[onChange]
);
const handleFocus = useCallback(() => {
setIsFocused(true);
}, []);
const handleBlur = useCallback(() => {
setIsFocused(false);
}, []);
const hasError =
error || (!validationResult.isValid && internalValue.trim());
const showSuccess =
validationResult.isValid && internalValue.trim() && !isFocused;
return (
<div className="space-y-2">
<div className="relative">
<Input
ref={ref}
type="text"
value={internalValue}
onChange={handleChange}
onFocus={handleFocus}
onBlur={handleBlur}
placeholder={placeholder}
className={cn(
'pr-10 font-mono text-sm',
hasError && 'border-destructive focus-visible:ring-destructive',
showSuccess && 'border-success focus-visible:ring-success',
className
)}
disabled={disabled}
readOnly={readOnly}
aria-invalid={hasError ? 'true' : 'false'}
{...props}
/>
{/* Status icon */}
<div className="-translate-y-1/2 absolute top-1/2 right-3">
{hasError && <AlertCircle className="h-4 w-4 text-destructive" />}
{showSuccess && <CheckCircle className="h-4 w-4 text-success" />}
</div>
</div>
{/* Error message */}
{hasError && (
<div className="flex items-center gap-2 text-destructive text-sm">
<AlertCircle className="h-4 w-4" />
<span>{error || validationResult.error}</span>
</div>
)}
{/* Help text when focused */}
{isFocused && !hasError && (
<div className="space-y-2 text-muted-foreground text-sm">
<div className="flex items-center gap-2">
<Info className="h-4 w-4" />
<span>Format: second minute hour day month weekday</span>
</div>
<div className="grid grid-cols-2 gap-2 text-xs">
<div className="flex items-center gap-1">
<Badge variant="outline" className="font-mono text-xs">
*
</Badge>
<span>any value</span>
</div>
<div className="flex items-center gap-1">
<Badge variant="outline" className="font-mono text-xs">
,
</Badge>
<span>list separator</span>
</div>
<div className="flex items-center gap-1">
<Badge variant="outline" className="font-mono text-xs">
-
</Badge>
<span>range</span>
</div>
<div className="flex items-center gap-1">
<Badge variant="outline" className="font-mono text-xs">
/
</Badge>
<span>step value</span>
</div>
</div>
<div className="mt-2 space-y-1">
<div className="font-medium text-xs">Examples:</div>
<div className="space-y-1 text-xs">
<div className="flex items-center justify-between">
<Badge variant="secondary" className="font-mono text-xs">
0 * * * * *
</Badge>
<span>Every minute</span>
</div>
<div className="flex items-center justify-between">
<Badge variant="secondary" className="font-mono text-xs">
0 0 * * * *
</Badge>
<span>Every hour</span>
</div>
<div className="flex items-center justify-between">
<Badge variant="secondary" className="font-mono text-xs">
0 30 9 * * 1-5
</Badge>
<span>Weekdays at 9:30 AM</span>
</div>
</div>
</div>
</div>
)}
</div>
);
}
);
CronInput.displayName = 'CronInput';
export { CronInput };

View File

@@ -0,0 +1,512 @@
import { parse } from "@datasert/cronjs-parser";
import {
AlertCircle,
Bolt,
Check,
Code2,
Copy,
Settings,
Type,
} from "lucide-react";
import { type FC, useCallback, useEffect, useMemo, useState } from "react";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import {
Card,
CardContent,
CardDescription,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import { Separator } from "@/components/ui/separator";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { cn } from "@/presentation/utils";
import { CronBuilder } from "./cron-builder.js";
import { CronDisplay } from "./cron-display.js";
import { CronInput } from "./cron-input.js";
import {
CronMode,
type CronPrimitiveMode,
type CronProps,
type CronValidationResult,
} from "./types.js";
const PLACEHOLDER = "0 0 * * * *";
const Cron: FC<CronProps> = ({
value = "",
onChange,
activeMode = "input",
onActiveModeChange,
onValidate,
className,
mode = "both",
disabled = false,
placeholder = PLACEHOLDER,
showPreview = true,
showDescription = true,
timezone = "UTC",
error,
children,
showHelp = true,
displayPeriods,
defaultTab,
presets,
showPresets,
withCard = true,
isFirstSibling = false,
// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: false
}) => {
const [internalValue, setInternalValue] = useState(value || "");
const [internalActiveMode, setInternalActiveMode] =
useState<CronPrimitiveMode>(
mode === CronMode.Both ? activeMode : (mode as CronPrimitiveMode)
);
const [copied, setCopied] = useState(false);
const validationResult = useMemo((): CronValidationResult => {
if (!internalValue.trim()) {
return { isValid: false, error: "Expression is required", isEmpty: true };
}
try {
parse(`${internalValue} *`, { hasSeconds: true });
return { isValid: true };
} catch (parseError) {
return {
isValid: false,
error:
parseError instanceof Error
? parseError.message
: "Invalid cron expression",
};
}
}, [internalValue]);
useEffect(() => {
setInternalValue(value || "");
}, [value]);
useEffect(() => {
onValidate?.(validationResult.isValid);
}, [validationResult.isValid, onValidate]);
useEffect(() => {
if (mode === "both") {
setInternalActiveMode(activeMode);
}
}, [activeMode, mode]);
const handleChange = useCallback(
(newValue: string) => {
setInternalValue(newValue);
onChange?.(newValue);
},
[onChange]
);
const handleActiveModeChange = useCallback(
(m: CronPrimitiveMode) => {
setInternalActiveMode(m);
onActiveModeChange?.(m);
},
[onActiveModeChange]
);
const handleCopy = useCallback(async () => {
if (!internalValue) {
return;
}
try {
await navigator.clipboard.writeText(internalValue);
setCopied(true);
setTimeout(() => setCopied(false), 2000);
} catch (e) {
console.warn("Failed to copy to clipboard:", e);
}
}, [internalValue]);
const hasError =
!!error || !!(!validationResult.isValid && internalValue.trim());
if (mode === "input") {
return (
<div className={cn(withCard && "space-y-4", className)}>
<CronInput
value={internalValue}
onChange={handleChange}
onValidate={onValidate}
placeholder={placeholder}
disabled={disabled}
error={error}
/>
{showPreview &&
(validationResult.isValid || validationResult.isEmpty) && (
<CronDisplay
expression={
validationResult.isEmpty ? placeholder : internalValue
}
showNextRuns={true}
showDescription={showDescription}
timezone={timezone}
nextRunsCount={3}
withCard={withCard}
/>
)}
{children}
</div>
);
}
if (mode === "builder") {
return (
<div className={cn(withCard && "space-y-4", className)}>
<CronBuilder
value={internalValue}
onChange={handleChange}
disabled={disabled}
showPreview={showPreview}
displayPeriods={displayPeriods}
defaultTab={defaultTab}
presets={presets}
showPresets={showPresets}
showGeneratedExpression={true}
timezone={timezone}
withCard={withCard}
/>
{children}
</div>
);
}
return (
<div className={cn(withCard && "space-y-6", className)}>
<Card
className={cn(
!withCard && "border-none shadow-none",
!withCard && isFirstSibling && "pt-0"
)}
>
<CardHeader className={cn(!withCard && "px-0")}>
<div className="flex items-center justify-between">
<div>
<CardTitle className="flex items-center gap-2 text-base">
<Bolt className="h-4 w-4" />
Cron Expression Builder
</CardTitle>
<CardDescription className="text-sm">
Create and validate cron expressions using visual builder or
text input
</CardDescription>
</div>
{internalValue && (
<div className="flex items-center gap-2">
<Badge
variant={
validationResult.isValid ? "secondary" : "destructive"
}
className="font-mono text-sm"
>
{internalValue}
</Badge>
<Button
variant="outline"
size="sm"
onClick={handleCopy}
disabled={!internalValue || hasError}
className="h-8 px-2"
>
{copied ? (
<Check className="h-4 w-4" />
) : (
<Copy className="h-4 w-4" />
)}
</Button>
</div>
)}
</div>
{hasError && (
<div className="mt-3 flex items-center gap-2 text-destructive text-sm">
<AlertCircle className="h-4 w-4" />
<span>{error || validationResult.error}</span>
</div>
)}
</CardHeader>
<CardContent className={cn(!withCard && "px-0")}>
<Tabs
value={internalActiveMode}
onValueChange={(v) =>
handleActiveModeChange(v as "input" | "builder")
}
>
<TabsList className="grid w-full grid-cols-2">
<TabsTrigger
value="input"
className="flex min-w-fit items-center gap-1"
>
<Type className="h-4 w-4" />
Text Input
</TabsTrigger>
<TabsTrigger
value="builder"
className="flex min-w-fit items-center gap-1"
>
<Settings className="h-4 w-4" />
Visual Build
</TabsTrigger>
</TabsList>
<TabsContent value="input" className="mt-6 space-y-4">
<CronInput
value={internalValue}
onChange={handleChange}
onValidate={onValidate}
placeholder={placeholder}
disabled={disabled}
error={error}
/>
</TabsContent>
<TabsContent value="builder" className="mt-6">
<CronBuilder
value={internalValue}
onChange={handleChange}
disabled={disabled}
showPreview={false}
displayPeriods={displayPeriods}
defaultTab={defaultTab}
presets={presets}
showPresets={showPresets}
showGeneratedExpression={false}
timezone={timezone}
withCard={withCard}
/>
</TabsContent>
</Tabs>
</CardContent>
</Card>
{/* Preview Section */}
{showPreview &&
(validationResult.isValid || validationResult.isEmpty) && (
<>
{!withCard && <Separator />}
<CronDisplay
expression={
validationResult.isEmpty ? placeholder : internalValue
}
showNextRuns={true}
showDescription={showDescription}
timezone={timezone}
nextRunsCount={3}
withCard={withCard}
/>
</>
)}
{/* Help Section */}
{showHelp && (
<>
{!withCard && <Separator />}
<Card className={cn(!withCard && "border-none shadow-none")}>
<CardHeader className={cn(!withCard && "px-0")}>
<CardTitle className="flex items-center gap-2 text-base">
<Code2 className="h-4 w-4" />
Cron Expression Format
</CardTitle>
</CardHeader>
<CardContent className={cn(!withCard && "px-0")}>
<div className="space-y-4">
<div className="grid grid-cols-6 gap-2 text-center text-sm">
<div className="space-y-1">
<div className="font-medium font-mono text-muted-foreground">
Second
</div>
<div className="text-xs">0-59</div>
</div>
<div className="space-y-1">
<div className="font-medium font-mono text-muted-foreground">
Minute
</div>
<div className="text-xs">0-59</div>
</div>
<div className="space-y-1">
<div className="font-medium font-mono text-muted-foreground">
Hour
</div>
<div className="text-xs">0-23</div>
</div>
<div className="space-y-1">
<div className="font-medium font-mono text-muted-foreground">
Day
</div>
<div className="text-xs">1-31</div>
</div>
<div className="space-y-1">
<div className="font-medium font-mono text-muted-foreground">
Month
</div>
<div className="text-xs">1-12</div>
</div>
<div className="space-y-1">
<div className="font-medium font-mono text-muted-foreground">
Weekday
</div>
<div className="text-xs">0-6</div>
</div>
</div>
<Separator />
<div className="grid gap-3 sm:grid-cols-2 lg:grid-cols-4">
<div className="space-y-1">
<div className="flex items-center gap-2">
<Badge variant="outline" className="font-mono">
*
</Badge>
<span className="text-sm">Any value</span>
</div>
<div className="text-muted-foreground text-xs">
Matches all possible values
</div>
</div>
<div className="space-y-1">
<div className="flex items-center gap-2">
<Badge variant="outline" className="font-mono">
5
</Badge>
<span className="text-sm">Specific value</span>
</div>
<div className="text-muted-foreground text-xs">
Matches exactly this value
</div>
</div>
<div className="space-y-1">
<div className="flex items-center gap-2">
<Badge variant="outline" className="font-mono">
1-5
</Badge>
<span className="text-sm">Range</span>
</div>
<div className="text-muted-foreground text-xs">
Matches values 1 through 5
</div>
</div>
<div className="space-y-1">
<div className="flex items-center gap-2">
<Badge variant="outline" className="font-mono">
1,3,5
</Badge>
<span className="text-sm">List</span>
</div>
<div className="text-muted-foreground text-xs">
Matches values 1, 3, and 5
</div>
</div>
<div className="space-y-1">
<div className="flex items-center gap-2">
<Badge variant="outline" className="font-mono">
*/5
</Badge>
<span className="text-sm">Step</span>
</div>
<div className="text-muted-foreground text-xs">
Every 5th value
</div>
</div>
<div className="space-y-1">
<div className="flex items-center gap-2">
<Badge variant="outline" className="font-mono">
0-10/2
</Badge>
<span className="text-sm">Range + Step</span>
</div>
<div className="text-muted-foreground text-xs">
Even values 0-10
</div>
</div>
<div className="space-y-1">
<div className="flex items-center gap-2">
<Badge variant="outline" className="font-mono">
?
</Badge>
<span className="text-sm">No specific</span>
</div>
<div className="text-muted-foreground text-xs">
Used when day/weekday conflicts
</div>
</div>
<div className="space-y-1">
<div className="flex items-center gap-2">
<Badge variant="outline" className="font-mono">
L
</Badge>
<span className="text-sm">Last</span>
</div>
<div className="text-muted-foreground text-xs">
Last day of month/week
</div>
</div>
</div>
<Separator />
<div className="space-y-2">
<h4 className="font-medium text-sm">Common Examples:</h4>
<div className="grid gap-2 text-sm">
<div className="flex items-center justify-between">
<Badge variant="secondary" className="font-mono text-xs">
0 0 * * * *
</Badge>
<span className="text-muted-foreground">Every hour</span>
</div>
<div className="flex items-center justify-between">
<Badge variant="secondary" className="font-mono text-xs">
0 */15 * * * *
</Badge>
<span className="text-muted-foreground">
Every 15 minutes
</span>
</div>
<div className="flex items-center justify-between">
<Badge variant="secondary" className="font-mono text-xs">
0 0 0 * * *
</Badge>
<span className="text-muted-foreground">
Daily at midnight
</span>
</div>
<div className="flex items-center justify-between">
<Badge variant="secondary" className="font-mono text-xs">
0 30 9 * * 1-5
</Badge>
<span className="text-muted-foreground">
Weekdays at 9:30 AM
</span>
</div>
</div>
</div>
</div>
</CardContent>
</Card>
</>
)}
{children}
</div>
);
};
export { Cron };

View File

@@ -0,0 +1,20 @@
export { Cron } from "./cron.js";
export { CronBuilder } from "./cron-builder.js";
export { CronDisplay } from "./cron-display.js";
export { CronExample } from "./cron-example.js";
export { CronInput } from "./cron-input.js";
export {
type CronBuilderProps,
type CronDisplayProps,
type CronExpression,
CronField,
type CronFieldConfig,
type CronInputProps,
type CronNextRun,
CronPeriod,
type CronPreset,
type CronProps,
type CronValidationResult,
type PeriodConfig,
} from "./types.js";

View File

@@ -0,0 +1,163 @@
import type { ClassValue } from 'clsx';
import type { ReactNode } from 'react';
export interface CronExpression {
seconds?: string;
minutes?: string;
hours?: string;
dayOfMonth?: string;
month?: string;
dayOfWeek?: string;
year?: string;
}
export interface CronDisplayProps {
expression: string;
className?: ClassValue;
showNextRuns?: boolean;
nextRunsCount?: number;
timezone?: string;
showDescription?: boolean;
withCard?: boolean;
}
export interface CronInputProps {
value?: string;
onChange?: (value: string) => void;
onValidate?: (isValid: boolean) => void;
placeholder?: string;
className?: ClassValue;
disabled?: boolean;
readOnly?: boolean;
error?: string;
}
export interface CronBuilderProps {
value?: string;
onChange?: (value: string) => void;
className?: ClassValue;
disabled?: boolean;
showPreview?: boolean;
defaultTab?: CronPeriod;
displayPeriods?: CronPeriod[];
presets?: CronPreset[];
showPresets?: boolean;
showGeneratedExpression?: boolean;
timezone?: string;
withCard?: boolean;
}
export const CronPrimitiveMode = {
Input: 'input',
Builder: 'builder',
} as const;
export type CronPrimitiveMode =
(typeof CronPrimitiveMode)[keyof typeof CronPrimitiveMode];
export const CronMode = {
Input: 'input',
Builder: 'builder',
Both: 'both',
} as const;
export type CronMode = (typeof CronMode)[keyof typeof CronMode];
export interface CronProps {
value?: string;
onChange?: (value: string) => void;
activeMode?: CronPrimitiveMode;
onActiveModeChange?: (mode: CronPrimitiveMode) => void;
onValidate?: (isValid: boolean) => void;
className?: ClassValue;
mode?: CronMode;
disabled?: boolean;
placeholder?: string;
showPreview?: boolean;
showDescription?: boolean;
timezone?: string;
error?: string;
children?: ReactNode;
defaultTab?: CronPeriod;
displayPeriods?: CronPeriod[];
presets?: CronPreset[];
showHelp?: boolean;
showPresets?: boolean;
withCard?: boolean;
isFirstSibling?: boolean;
}
export const CronPeriod = {
Minute: 'minute',
Hourly: 'hourly',
Daily: 'daily',
Weekly: 'weekly',
Monthly: 'monthly',
Yearly: 'yearly',
Custom: 'custom',
} as const;
export type CronPeriod = (typeof CronPeriod)[keyof typeof CronPeriod];
export interface CronFieldProps {
period: CronPeriod;
value: string;
onChange: (value: string) => void;
disabled?: boolean;
className?: ClassValue;
}
export interface CronPreset {
label: string;
value: string;
description: string;
category?: string;
}
export interface CronValidationResult {
isValid: boolean;
error?: string;
description?: string;
isEmpty?: boolean;
}
export interface CronNextRun {
date: Date;
timestamp: number;
formatted: string;
relative: string;
}
export interface PeriodConfig {
label: string;
description: string;
defaultValue: string;
fields: {
seconds?: boolean;
minutes?: boolean;
hours?: boolean;
dayOfMonth?: boolean;
month?: boolean;
dayOfWeek?: boolean;
};
}
export const CronField = {
Seconds: 'seconds',
Minutes: 'minutes',
Hours: 'hours',
DayOfMonth: 'dayOfMonth',
Month: 'month',
DayOfWeek: 'dayOfWeek',
Year: 'year',
} as const;
export type CronField = (typeof CronField)[keyof typeof CronField];
export interface CronFieldConfig {
min: number;
max: number;
step?: number;
options?: Array<{ label: string; value: number | string }>;
allowSpecial?: string[];
}

View File

@@ -1,5 +1,5 @@
import { Card, CardContent, CardHeader } from './ui/card'; import { Card, CardContent, CardHeader } from "./card";
import { Skeleton } from './ui/skeleton'; import { Skeleton } from "./skeleton";
export function DetailCardSkeleton() { export function DetailCardSkeleton() {
return ( return (

Some files were not shown because too many files have changed in this diff Show More