feat: add mikan and rawer parsers

This commit is contained in:
master 2024-02-29 23:59:00 +08:00
parent 686faaf060
commit 050998b09e
60 changed files with 2370 additions and 164 deletions

4
.gitignore vendored
View File

@ -214,9 +214,11 @@ index.d.ts.map
# Added by cargo
/target
/examples/*
!/examples/.gitkeep
/.env
/.env.bk
/.angular
/*.session.sql
/temp
/rustc-ice-*

3
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,3 @@
{
"rust-analyzer.showUnlinkedFileNotification": false
}

544
Cargo.lock generated
View File

@ -403,12 +403,27 @@ dependencies = [
"thiserror",
]
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "base64-simd"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "781dd20c3aff0bd194fe7d2a977dd92f21c173891f3a03b677359e5fa457e5d5"
dependencies = [
"simd-abstraction",
]
[[package]]
name = "base64ct"
version = "1.6.0"
@ -439,6 +454,21 @@ dependencies = [
"num-traits",
]
[[package]]
name = "bit-set"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
dependencies = [
"bit-vec",
]
[[package]]
name = "bit-vec"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
[[package]]
name = "bitflags"
version = "1.3.2"
@ -671,7 +701,7 @@ checksum = "d59ae0466b83e838b81a54256c39d5d7c20b9d7daa10510a242d9b75abd5936e"
dependencies = [
"chrono",
"chrono-tz-build",
"phf",
"phf 0.11.2",
]
[[package]]
@ -681,8 +711,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "433e39f13c9a060046954e0592a8d0a4bcb1040125cbf91cb8ee58964cfb350f"
dependencies = [
"parse-zoneinfo",
"phf",
"phf_codegen",
"phf 0.11.2",
"phf_codegen 0.11.2",
]
[[package]]
@ -783,6 +813,26 @@ version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "const-str"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21077772762a1002bb421c3af42ac1725fa56066bfc53d9a55bb79905df2aaf3"
dependencies = [
"const-str-proc-macro",
]
[[package]]
name = "const-str-proc-macro"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e1e0fdd2e5d3041e530e1b21158aeeef8b5d0e306bc5c1e3d6cf0930d10e25a"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "cookie"
version = "0.18.0"
@ -933,14 +983,56 @@ dependencies = [
"typenum",
]
[[package]]
name = "cssparser"
version = "0.33.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9be934d936a0fbed5bcdc01042b770de1398bf79d0e192f49fa7faea0e99281e"
dependencies = [
"cssparser-macros",
"dtoa-short",
"itoa",
"phf 0.11.2",
"smallvec",
]
[[package]]
name = "cssparser-color"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "556c099a61d85989d7af52b692e35a8d68a57e7df8c6d07563dc0778b3960c9f"
dependencies = [
"cssparser",
]
[[package]]
name = "cssparser-macros"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331"
dependencies = [
"quote",
"syn 2.0.50",
]
[[package]]
name = "darling"
version = "0.14.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850"
dependencies = [
"darling_core",
"darling_macro",
"darling_core 0.14.4",
"darling_macro 0.14.4",
]
[[package]]
name = "darling"
version = "0.20.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391"
dependencies = [
"darling_core 0.20.8",
"darling_macro 0.20.8",
]
[[package]]
@ -957,17 +1049,42 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "darling_core"
version = "0.20.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f"
dependencies = [
"fnv",
"ident_case",
"proc-macro2",
"quote",
"strsim 0.10.0",
"syn 2.0.50",
]
[[package]]
name = "darling_macro"
version = "0.14.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e"
dependencies = [
"darling_core",
"darling_core 0.14.4",
"quote",
"syn 1.0.109",
]
[[package]]
name = "darling_macro"
version = "0.20.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f"
dependencies = [
"darling_core 0.20.8",
"quote",
"syn 2.0.50",
]
[[package]]
name = "dashmap"
version = "5.5.3"
@ -981,6 +1098,21 @@ dependencies = [
"parking_lot_core",
]
[[package]]
name = "data-encoding"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
[[package]]
name = "data-url"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a30bfce702bcfa94e906ef82421f2c0e61c076ad76030c16ee5d2e9a32fe193"
dependencies = [
"matches",
]
[[package]]
name = "der"
version = "0.7.8"
@ -1028,7 +1160,7 @@ version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c11bdc11a0c47bc7d37d582b5285da6849c96681023680b906673c5707af7b0f"
dependencies = [
"darling",
"darling 0.14.4",
"proc-macro2",
"quote",
"syn 1.0.109",
@ -1110,6 +1242,21 @@ version = "0.15.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
[[package]]
name = "dtoa"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653"
[[package]]
name = "dtoa-short"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbaceec3c6e4211c79e7b1800fb9680527106beb2f9c51904a3210c03a448c74"
dependencies = [
"dtoa",
]
[[package]]
name = "duct"
version = "0.13.7"
@ -1137,7 +1284,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbfb21b9878cf7a348dcb8559109aabc0ec40d69924bd706fa5149846c4fef75"
dependencies = [
"base64",
"base64 0.21.7",
"memchr",
]
@ -1205,6 +1352,17 @@ dependencies = [
"once_cell",
]
[[package]]
name = "fancy-regex"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2"
dependencies = [
"bit-set",
"regex-automata 0.4.5",
"regex-syntax 0.8.2",
]
[[package]]
name = "fastrand"
version = "2.0.1"
@ -1389,6 +1547,15 @@ dependencies = [
"slab",
]
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]]
name = "generic-array"
version = "0.14.7"
@ -1470,7 +1637,7 @@ dependencies = [
"futures-sink",
"futures-util",
"http 0.2.11",
"indexmap",
"indexmap 2.2.3",
"slab",
"tokio",
"tokio-util",
@ -1489,7 +1656,7 @@ dependencies = [
"futures-sink",
"futures-util",
"http 1.0.0",
"indexmap",
"indexmap 2.2.3",
"slab",
"tokio",
"tokio-util",
@ -1583,6 +1750,15 @@ dependencies = [
"winapi",
]
[[package]]
name = "html-escape"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d1ad449764d627e22bfd7cd5e8868264fc9236e07c752972b4080cd351cb476"
dependencies = [
"utf8-width",
]
[[package]]
name = "http"
version = "0.2.11"
@ -1846,6 +2022,17 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
[[package]]
name = "indexmap"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
"hashbrown 0.12.3",
"serde",
]
[[package]]
name = "indexmap"
version = "2.2.3"
@ -1901,6 +2088,15 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "itertools"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.12.1"
@ -1931,7 +2127,7 @@ version = "9.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c7ea04a7c5c055c175f189b6dc6ba036fd62306b58c66c9f6389036c503a3f4"
dependencies = [
"base64",
"base64 0.21.7",
"js-sys",
"pem",
"ring",
@ -1956,7 +2152,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357ff5edb6d8326473a64c82cf41ddf78ab116f89668c50c4fac1b321e5e80f4"
dependencies = [
"async-trait",
"base64",
"base64 0.21.7",
"chumsky",
"email-encoding",
"email_address",
@ -2013,6 +2209,31 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "lightningcss"
version = "1.0.0-alpha.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07d306844e5af1753490c420c0d6ae3d814b00725092d106332762827ca8f0fe"
dependencies = [
"ahash 0.8.9",
"bitflags 2.4.2",
"const-str",
"cssparser",
"cssparser-color",
"dashmap",
"data-encoding",
"getrandom",
"itertools 0.10.5",
"lazy_static",
"parcel_selectors",
"parcel_sourcemap",
"paste",
"pathdiff",
"rayon",
"serde",
"smallvec",
]
[[package]]
name = "linked-hash-map"
version = "0.5.6"
@ -2094,6 +2315,12 @@ version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
[[package]]
name = "maplit"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
[[package]]
name = "match_cfg"
version = "0.1.0"
@ -2109,6 +2336,12 @@ dependencies = [
"regex-automata 0.1.10",
]
[[package]]
name = "matches"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5"
[[package]]
name = "matchit"
version = "0.7.3"
@ -2174,6 +2407,12 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "mod_use"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d95ee98a292cf91c2f5b3f35424773af16842a68b3be33b389137606b2633539"
[[package]]
name = "native-tls"
version = "0.2.11"
@ -2323,7 +2562,7 @@ dependencies = [
"chrono",
"futures",
"humantime",
"itertools",
"itertools 0.12.1",
"parking_lot",
"percent-encoding",
"snafu",
@ -2383,6 +2622,15 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "ordered-float"
version = "2.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c"
dependencies = [
"num-traits",
]
[[package]]
name = "ordered-float"
version = "3.9.2"
@ -2426,12 +2674,48 @@ dependencies = [
"syn 2.0.50",
]
[[package]]
name = "outref"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f222829ae9293e33a9f5e9f440c6760a3d450a64affe1846486b140db81c1f4"
[[package]]
name = "overload"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "parcel_selectors"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05d74befe2d076330d9a58bf9ca2da424568724ab278adf15fb5718253133887"
dependencies = [
"bitflags 2.4.2",
"cssparser",
"fxhash",
"log",
"phf 0.10.1",
"phf_codegen 0.10.0",
"precomputed-hash",
"smallvec",
]
[[package]]
name = "parcel_sourcemap"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "485b74d7218068b2b7c0e3ff12fbc61ae11d57cb5d8224f525bd304c6be05bbb"
dependencies = [
"base64-simd",
"data-url",
"rkyv",
"serde",
"serde_json",
"vlq",
]
[[package]]
name = "parking_lot"
version = "0.12.1"
@ -2481,13 +2765,19 @@ version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
[[package]]
name = "pathdiff"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd"
[[package]]
name = "pem"
version = "3.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b8fcc794035347fb64beda2d3b462595dd2753e3f268d89c5aae77e8cf2c310"
dependencies = [
"base64",
"base64 0.21.7",
"serde",
]
@ -2551,13 +2841,33 @@ dependencies = [
"sha2",
]
[[package]]
name = "phf"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259"
dependencies = [
"phf_shared 0.10.0",
]
[[package]]
name = "phf"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc"
dependencies = [
"phf_shared",
"phf_macros",
"phf_shared 0.11.2",
]
[[package]]
name = "phf_codegen"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd"
dependencies = [
"phf_generator 0.10.0",
"phf_shared 0.10.0",
]
[[package]]
@ -2566,8 +2876,18 @@ version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a"
dependencies = [
"phf_generator",
"phf_shared",
"phf_generator 0.11.2",
"phf_shared 0.11.2",
]
[[package]]
name = "phf_generator"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6"
dependencies = [
"phf_shared 0.10.0",
"rand",
]
[[package]]
@ -2576,10 +2896,32 @@ version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0"
dependencies = [
"phf_shared",
"phf_shared 0.11.2",
"rand",
]
[[package]]
name = "phf_macros"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b"
dependencies = [
"phf_generator 0.11.2",
"phf_shared 0.11.2",
"proc-macro2",
"quote",
"syn 2.0.50",
]
[[package]]
name = "phf_shared"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096"
dependencies = [
"siphasher",
]
[[package]]
name = "phf_shared"
version = "0.11.2"
@ -2660,6 +3002,12 @@ version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "precomputed-hash"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
[[package]]
name = "pretty_assertions"
version = "1.4.0"
@ -2741,6 +3089,26 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "qbit-rs"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "167a5e28adf918639d3b180cfe5c919ed38200d5517c88f9b132a2e54a995468"
dependencies = [
"mod_use",
"reqwest",
"serde",
"serde-value",
"serde_json",
"serde_repr",
"serde_with",
"tap",
"thiserror",
"tracing",
"typed-builder",
"url",
]
[[package]]
name = "quick-xml"
version = "0.30.0"
@ -2802,6 +3170,26 @@ dependencies = [
"getrandom",
]
[[package]]
name = "rayon"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4963ed1bc86e4f3ee217022bd855b297cef07fb9eac5dfa1f788b220b49b3bd"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]]
name = "recorder"
version = "0.1.0"
@ -2811,11 +3199,18 @@ dependencies = [
"bytes",
"chrono",
"eyre",
"fancy-regex",
"futures",
"html-escape",
"include_dir",
"insta",
"itertools",
"itertools 0.12.1",
"lazy_static",
"lightningcss",
"loco-rs",
"maplit",
"qbit-rs",
"regex",
"reqwest",
"rss",
"rstest",
@ -2825,9 +3220,12 @@ dependencies = [
"serde_json",
"serial_test",
"thiserror",
"tl",
"tokio",
"tracing",
"tracing-subscriber",
"uni-path",
"url",
"uuid",
"validator",
]
@ -2963,7 +3361,7 @@ version = "0.11.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6920094eb85afde5e4a138be3f2de8bbdf28000f0029e72c45025a56b042251"
dependencies = [
"base64",
"base64 0.21.7",
"bytes",
"encoding_rs",
"futures-core",
@ -3222,7 +3620,7 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c"
dependencies = [
"base64",
"base64 0.21.7",
]
[[package]]
@ -3231,7 +3629,7 @@ version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c333bb734fcdedcea57de1602543590f545f127dc8b533324318fd492c5c70b"
dependencies = [
"base64",
"base64 0.21.7",
"rustls-pki-types",
]
@ -3433,7 +3831,7 @@ dependencies = [
"chrono",
"derivative",
"inherent",
"ordered-float",
"ordered-float 3.9.2",
"rust_decimal",
"sea-query-derive",
"serde_json",
@ -3540,6 +3938,16 @@ dependencies = [
"serde_derive",
]
[[package]]
name = "serde-value"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c"
dependencies = [
"ordered-float 2.10.1",
"serde",
]
[[package]]
name = "serde_derive"
version = "1.0.197"
@ -3582,6 +3990,17 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_repr"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b2e6b945e9d3df726b65d6ee24060aff8e3533d431f677a9695db04eff9dfdb"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.50",
]
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
@ -3603,13 +4022,41 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_with"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07ff71d2c147a7b57362cead5e22f772cd52f6ab31cfcd9edcd7f6aeb2a0afbe"
dependencies = [
"base64 0.13.1",
"chrono",
"hex",
"indexmap 1.9.3",
"serde",
"serde_json",
"serde_with_macros",
"time",
]
[[package]]
name = "serde_with_macros"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f"
dependencies = [
"darling 0.20.8",
"proc-macro2",
"quote",
"syn 2.0.50",
]
[[package]]
name = "serde_yaml"
version = "0.9.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fd075d994154d4a774f95b51fb96bdc2832b0ea48425c92546073816cda1f2f"
dependencies = [
"indexmap",
"indexmap 2.2.3",
"itoa",
"ryu",
"serde",
@ -3734,6 +4181,15 @@ dependencies = [
"rand_core",
]
[[package]]
name = "simd-abstraction"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cadb29c57caadc51ff8346233b5cec1d240b68ce55cf1afc764818791876987"
dependencies = [
"outref",
]
[[package]]
name = "simdutf8"
version = "0.1.4"
@ -3877,7 +4333,7 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c"
dependencies = [
"itertools",
"itertools 0.12.1",
"nom",
"unicode_categories",
]
@ -3919,7 +4375,7 @@ dependencies = [
"futures-util",
"hashlink",
"hex",
"indexmap",
"indexmap 2.2.3",
"log",
"memchr",
"once_cell",
@ -3990,7 +4446,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e37195395df71fd068f6e2082247891bc11e3289624bbc776a0cdfa1ca7f1ea4"
dependencies = [
"atoi",
"base64",
"base64 0.21.7",
"bigdecimal",
"bitflags 2.4.2",
"byteorder",
@ -4037,7 +4493,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6ac0ac3b7ccd10cc96c7ab29791a7dd236bd94021f31eec7ba3d46a74aa1c24"
dependencies = [
"atoi",
"base64",
"base64 0.21.7",
"bigdecimal",
"bitflags 2.4.2",
"byteorder",
@ -4354,6 +4810,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tl"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b130bd8a58c163224b44e217b4239ca7b927d82bf6cc2fea1fc561d15056e3f7"
[[package]]
name = "tokio"
version = "1.36.0"
@ -4442,7 +4904,7 @@ version = "0.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1"
dependencies = [
"indexmap",
"indexmap 2.2.3",
"toml_datetime",
"winnow",
]
@ -4583,6 +5045,17 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "typed-builder"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64cba322cb9b7bc6ca048de49e83918223f35e7a86311267013afff257004870"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "typenum"
version = "1.17.0"
@ -4595,6 +5068,12 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9"
[[package]]
name = "uni-path"
version = "1.51.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25e328d505b1f855c20e7358711b7ec6398524181664f016dd15cfb36c3a6275"
[[package]]
name = "unic-char-property"
version = "0.9.0"
@ -4720,6 +5199,7 @@ dependencies = [
"form_urlencoded",
"idna 0.5.0",
"percent-encoding",
"serde",
]
[[package]]
@ -4810,6 +5290,12 @@ version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "vlq"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65dd7eed29412da847b0f78bcec0ac98588165988a8cfe41d4ea1d429f8ccfff"
[[package]]
name = "walkdir"
version = "2.4.0"

View File

@ -36,6 +36,16 @@ rss = "2.0.7"
bytes = "1.5.0"
futures = "0.3.30"
itertools = "0.12.1"
qbit-rs = "0.4.1"
url = "2.5.0"
fancy-regex = "0.13.0"
regex = "1.10.3"
lazy_static = "1.4.0"
maplit = "1.0.2"
uni-path = "1.51.1"
tl = { version = "0.7.8", features = ["simd"] }
lightningcss = "1.0.0-alpha.54"
html-escape = "0.2.13"
[lib]
name = "recorder"

View File

@ -1,8 +1,8 @@
#![allow(unused_imports)]
use eyre::Context;
#[allow(unused_imports)]
use loco_rs::{cli::playground, prelude::*};
async fn fetch_and_parse_rss_demo () -> eyre::Result<()> {
async fn fetch_and_parse_rss_demo() -> eyre::Result<()> {
let url =
"https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";

View File

@ -14,7 +14,7 @@ use loco_rs::{
use sea_orm::DatabaseConnection;
use crate::{
controllers, migrations::Migrator, models::_entities::subscribers,
controllers, migrations::Migrator, models::entities::subscribers,
workers::downloader::DownloadWorker,
};

View File

@ -1,6 +1,6 @@
use loco_rs::prelude::*;
use crate::{models::_entities::subscribers, views::subscribers::CurrentResponse};
use crate::{models::entities::subscribers, views::subscribers::CurrentResponse};
async fn current(State(ctx): State<AppContext>) -> Result<Json<CurrentResponse>> {
let subscriber = subscribers::Model::find_root(&ctx.db).await?;

View File

@ -1,6 +0,0 @@
use bytes::Bytes;
pub async fn download_bytes (url: &str) -> eyre::Result<Bytes> {
let bytes = reqwest::get(url).await?.bytes().await?;
Ok(bytes)
}

View File

@ -1 +0,0 @@
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";

View File

@ -1,4 +0,0 @@
pub mod aria;
pub mod qbitorrent;
pub mod defs;
pub mod bytes;

View File

@ -0,0 +1,67 @@
#![allow(unused_variables)]
use super::{
defs::{Torrent, TorrentFilter, TorrentSources},
torrent_downloader::TorrentDownloader,
};
use crate::path::{VFSPathBuf, VFSSubPath};
#[derive(Debug)]
pub struct AriaDownloader {}
#[async_trait::async_trait]
impl TorrentDownloader for AriaDownloader {
async fn get_torrents_info(
&self,
status_filter: TorrentFilter,
category: String,
tag: Option<String>,
) -> eyre::Result<Vec<Torrent>> {
unimplemented!()
}
async fn add_torrents(
&self,
source: TorrentSources,
save_path: String,
category: Option<String>,
) -> eyre::Result<()> {
unimplemented!()
}
async fn delete_torrents(&self, hashes: Vec<String>) -> eyre::Result<()> {
unimplemented!()
}
async fn rename_torrent_file(
&self,
hash: &str,
old_path: &str,
new_path: &str,
) -> eyre::Result<()> {
unimplemented!()
}
async fn move_torrents(&self, hashes: Vec<String>, new_path: &str) -> eyre::Result<()> {
unimplemented!()
}
async fn get_torrent_path(&self, hashes: String) -> eyre::Result<Option<String>> {
unimplemented!()
}
async fn check_connection(&self) -> eyre::Result<()> {
unimplemented!()
}
async fn set_torrents_category(&self, hashes: Vec<String>, category: &str) -> eyre::Result<()> {
unimplemented!()
}
async fn add_torrent_tags(&self, hashes: Vec<String>, tags: Vec<String>) -> eyre::Result<()> {
unimplemented!()
}
fn get_save_path(&self, sub_path: &VFSSubPath) -> VFSPathBuf {
unimplemented!()
}
}

View File

@ -0,0 +1,12 @@
use bytes::Bytes;
use reqwest::IntoUrl;
use super::defs::DEFAULT_USER_AEGNT;
pub async fn download_bytes<T: IntoUrl>(url: T) -> eyre::Result<Bytes> {
let request_client = reqwest::Client::builder()
.user_agent(DEFAULT_USER_AEGNT)
.build()?;
let bytes = request_client.get(url).send().await?.bytes().await?;
Ok(bytes)
}

View File

@ -0,0 +1,89 @@
pub use qbit_rs::model::{
Torrent as QbitTorrent, TorrentContent as QbitTorrentContent,
TorrentFilter as QbitTorrentFilter, TorrentSource as QbitTorrentSource,
};
use serde::{Deserialize, Serialize};
use url::Url;
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
pub const DEFAULT_USER_AEGNT: &str = "Wget/1.13.4 (linux-gnu)";
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum TorrentFilter {
All,
Downloading,
Completed,
Paused,
Active,
Inactive,
Resumed,
Stalled,
StalledUploading,
StalledDownloading,
Errored,
}
impl From<TorrentFilter> for QbitTorrentFilter {
fn from(val: TorrentFilter) -> Self {
match val {
TorrentFilter::All => QbitTorrentFilter::All,
TorrentFilter::Downloading => QbitTorrentFilter::Downloading,
TorrentFilter::Completed => QbitTorrentFilter::Completed,
TorrentFilter::Paused => QbitTorrentFilter::Paused,
TorrentFilter::Active => QbitTorrentFilter::Active,
TorrentFilter::Inactive => QbitTorrentFilter::Inactive,
TorrentFilter::Resumed => QbitTorrentFilter::Resumed,
TorrentFilter::Stalled => QbitTorrentFilter::Stalled,
TorrentFilter::StalledUploading => QbitTorrentFilter::StalledUploading,
TorrentFilter::StalledDownloading => QbitTorrentFilter::StalledDownloading,
TorrentFilter::Errored => QbitTorrentFilter::Errored,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum TorrentSources {
Urls { urls: Vec<Url> },
TorrentFiles { torrents: Vec<u8> },
}
impl From<TorrentSources> for QbitTorrentSource {
fn from(value: TorrentSources) -> Self {
match value {
TorrentSources::Urls { urls } => QbitTorrentSource::Urls {
urls: qbit_rs::model::Sep::from(urls),
},
TorrentSources::TorrentFiles { torrents } => {
QbitTorrentSource::TorrentFiles { torrents }
}
}
}
}
pub trait TorrentContent {
fn get_name(&self) -> &str;
}
impl TorrentContent for QbitTorrentContent {
fn get_name(&self) -> &str {
self.name.as_str()
}
}
pub enum Torrent {
Qbit {
torrent: QbitTorrent,
contents: Vec<QbitTorrentContent>,
},
}
impl Torrent {
pub fn iter_files(&self) -> impl Iterator<Item = &dyn TorrentContent> {
match self {
Torrent::Qbit { contents, .. } => {
contents.iter().map(|item| item as &dyn TorrentContent)
}
}
}
}

View File

@ -0,0 +1,9 @@
use thiserror::Error;
#[derive(Error, Debug)]
pub enum DownloaderError {
#[error("Invalid mime (expected {expected:?}, got {found:?})")]
InvalidMime { expected: String, found: String },
#[error("Invalid url format")]
InvalidUrlFormat(#[from] url::ParseError),
}

View File

@ -0,0 +1,11 @@
use reqwest::IntoUrl;
use super::defs::DEFAULT_USER_AEGNT;
pub async fn download_html<U: IntoUrl>(url: U) -> eyre::Result<String> {
let request_client = reqwest::Client::builder()
.user_agent(DEFAULT_USER_AEGNT)
.build()?;
let content = request_client.get(url).send().await?.text().await?;
Ok(content)
}

View File

@ -0,0 +1,8 @@
use bytes::Bytes;
use reqwest::IntoUrl;
use super::bytes::download_bytes;
pub async fn download_image<U: IntoUrl>(url: U) -> eyre::Result<Bytes> {
download_bytes(url).await
}

View File

@ -0,0 +1,8 @@
pub mod aria;
pub mod bytes;
pub mod defs;
pub mod error;
pub mod html;
pub mod qbitorrent;
pub mod torrent_downloader;
pub mod image;

View File

@ -0,0 +1,181 @@
use std::fmt::Debug;
use eyre::OptionExt;
use futures::future::try_join_all;
use qbit_rs::{
model::{AddTorrentArg, Credential, GetTorrentListArg, NonEmptyStr},
Qbit,
};
use url::Url;
use super::{
defs::{Torrent, TorrentFilter, TorrentSources},
error::DownloaderError,
torrent_downloader::TorrentDownloader,
};
use crate::{
models::{entities::downloaders, prelude::DownloaderCategory},
path::{VFSPathBuf, VFSSubPath},
};
pub struct QBittorrentDownloader {
pub subscriber_id: i32,
pub endpoint_url: Url,
pub client: Qbit,
pub save_path: String,
}
impl QBittorrentDownloader {
pub fn from_downloader_model(model: downloaders::Model) -> Result<Self, DownloaderError> {
if model.category != DownloaderCategory::QBittorrent {
return Err(DownloaderError::InvalidMime {
expected: DownloaderCategory::QBittorrent.to_string(),
found: model.category.to_string(),
});
}
let endpoint_url = model
.endpoint_url()
.map_err(DownloaderError::InvalidUrlFormat)?;
let credential = Credential::new(model.username, model.password);
let client = Qbit::new(endpoint_url.clone(), credential);
Ok(Self {
client,
endpoint_url,
subscriber_id: model.subscriber_id,
save_path: model.download_path,
})
}
async fn api_version(&self) -> eyre::Result<String> {
let result = self.client.get_webapi_version().await?;
Ok(result)
}
}
#[async_trait::async_trait]
impl TorrentDownloader for QBittorrentDownloader {
async fn get_torrents_info(
&self,
status_filter: TorrentFilter,
category: String,
tag: Option<String>,
) -> eyre::Result<Vec<Torrent>> {
let arg = GetTorrentListArg {
filter: Some(status_filter.into()),
category: Some(category),
tag,
..Default::default()
};
let torrent_list = self.client.get_torrent_list(arg).await?;
let torrent_contents = try_join_all(torrent_list.iter().map(|s| async {
if let Some(hash) = &s.hash {
self.client.get_torrent_contents(hash as &str, None).await
} else {
Ok(vec![])
}
}))
.await?;
Ok(torrent_list
.into_iter()
.zip(torrent_contents)
.map(|(torrent, contents)| Torrent::Qbit { torrent, contents })
.collect::<Vec<_>>())
}
async fn add_torrents(
&self,
source: TorrentSources,
save_path: String,
category: Option<String>,
) -> eyre::Result<()> {
let arg = AddTorrentArg {
source: source.into(),
savepath: Some(save_path),
category,
auto_torrent_management: Some(false),
..Default::default()
};
self.client.add_torrent(arg).await?;
Ok(())
}
async fn delete_torrents(&self, hashes: Vec<String>) -> eyre::Result<()> {
self.client.delete_torrents(hashes, None).await?;
Ok(())
}
async fn rename_torrent_file(
&self,
hash: &str,
old_path: &str,
new_path: &str,
) -> eyre::Result<()> {
self.client.rename_file(hash, old_path, new_path).await?;
Ok(())
}
async fn move_torrents(&self, hashes: Vec<String>, new_path: &str) -> eyre::Result<()> {
self.client.set_torrent_location(hashes, new_path).await?;
Ok(())
}
async fn get_torrent_path(&self, hashes: String) -> eyre::Result<Option<String>> {
let mut torrent_list = self
.client
.get_torrent_list(GetTorrentListArg {
hashes: Some(hashes),
..Default::default()
})
.await?;
let torrent = torrent_list.first_mut().ok_or_eyre("No torrent found")?;
Ok(torrent.save_path.take())
}
async fn check_connection(&self) -> eyre::Result<()> {
self.api_version().await?;
Ok(())
}
async fn set_torrents_category(&self, hashes: Vec<String>, category: &str) -> eyre::Result<()> {
if category.is_empty() {
return Err(eyre::anyhow!("Category cannot be empty"));
}
let result = self
.client
.set_torrent_category(hashes.clone(), category)
.await;
if let Err(qbit_rs::Error::ApiError(qbit_rs::ApiError::CategoryNotFound)) = result {
self.client
.add_category(
NonEmptyStr::new(category)
.unwrap_or_else(|| unreachable!("Category cannot be empty")),
self.save_path.as_str(),
)
.await?;
self.client.set_torrent_category(hashes, category).await?;
} else {
result?;
}
Ok(())
}
async fn add_torrent_tags(&self, hashes: Vec<String>, tags: Vec<String>) -> eyre::Result<()> {
self.client.add_torrent_tags(hashes, tags).await?;
Ok(())
}
fn get_save_path(&self, sub_path: &VFSSubPath) -> VFSPathBuf {
VFSPathBuf::new(self.save_path.clone(), sub_path.to_path_buf())
}
}
impl Debug for QBittorrentDownloader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("QBittorrentDownloader")
.field("subscriber_id", &self.subscriber_id)
.field("client", &self.endpoint_url.as_str())
.finish()
}
}

View File

@ -0,0 +1,109 @@
use downloaders::DownloaderCategory;
use sea_orm::{ActiveModelTrait, ActiveValue, DatabaseConnection, IntoActiveModel};
use url::Url;
use super::{
bytes::download_bytes,
defs::{Torrent, TorrentFilter, TorrentSources},
qbitorrent::QBittorrentDownloader,
};
use crate::{
models::{bangumi, downloaders, downloads},
path::{torrent_path::gen_bangumi_sub_path, VFSPathBuf, VFSSubPath},
};
#[async_trait::async_trait]
pub trait TorrentDownloader {
async fn get_torrents_info(
&self,
status_filter: TorrentFilter,
category: String,
tag: Option<String>,
) -> eyre::Result<Vec<Torrent>>;
async fn add_torrents(
&self,
source: TorrentSources,
save_path: String,
category: Option<String>,
) -> eyre::Result<()>;
async fn delete_torrents(&self, hashes: Vec<String>) -> eyre::Result<()>;
async fn rename_torrent_file(
&self,
hash: &str,
old_path: &str,
new_path: &str,
) -> eyre::Result<()>;
async fn move_torrents(&self, hashes: Vec<String>, new_path: &str) -> eyre::Result<()>;
async fn get_torrent_path(&self, hashes: String) -> eyre::Result<Option<String>>;
async fn check_connection(&self) -> eyre::Result<()>;
async fn set_torrents_category(&self, hashes: Vec<String>, category: &str) -> eyre::Result<()>;
async fn add_torrent_tags(&self, hashes: Vec<String>, tags: Vec<String>) -> eyre::Result<()>;
fn get_save_path(&self, sub_path: &VFSSubPath) -> VFSPathBuf;
async fn add_downlods_for_bangumi<'a, 'b>(
&self,
db: &'a DatabaseConnection,
downloads: &[&downloads::Model],
mut bangumi: bangumi::Model,
) -> eyre::Result<bangumi::Model> {
if bangumi.sub_path.is_none() {
let gen_sub_path = gen_bangumi_sub_path(&bangumi);
let mut bangumi_active = bangumi.into_active_model();
bangumi_active.sub_path = ActiveValue::Set(Some(gen_sub_path.to_string()));
bangumi = bangumi_active.update(db).await?;
}
let sub_path = bangumi
.sub_path
.as_ref()
.unwrap_or_else(|| unreachable!("must have a sub path"));
let mut torrent_urls = vec![];
for m in downloads.iter() {
torrent_urls.push(Url::parse(&m.url as &str)?);
}
let source = build_torrent_source_from_urls(torrent_urls.into_iter()).await?;
self.add_torrents(source, sub_path.to_string(), Some("bangumi".to_string()))
.await?;
Ok(bangumi)
}
}
pub fn build_torrent_downloader_from_downloader_model(
model: downloaders::Model,
) -> eyre::Result<Box<dyn TorrentDownloader>> {
Ok(Box::new(match &model.category {
DownloaderCategory::QBittorrent => QBittorrentDownloader::from_downloader_model(model)?,
}))
}
pub async fn build_torrent_source_from_url(url: Url) -> eyre::Result<TorrentSources> {
let source = if url.scheme() == "magnet" {
TorrentSources::Urls { urls: vec![url] }
} else {
let bytes = download_bytes(url).await?;
TorrentSources::TorrentFiles {
torrents: bytes.into(),
}
};
Ok(source)
}
pub async fn build_torrent_source_from_urls<IU: Iterator<Item = Url>>(
urls: IU,
) -> eyre::Result<TorrentSources> {
let urls = urls.collect::<Vec<_>>();
Ok(TorrentSources::Urls { urls })
}

View File

@ -1,8 +1,11 @@
#![feature(async_closure)]
pub mod app;
pub mod controllers;
pub mod downloader;
pub mod downloaders;
pub mod migrations;
pub mod models;
pub mod parsers;
pub mod path;
pub mod subscriptions;
pub mod tasks;
pub mod views;

View File

@ -1,9 +1,7 @@
use std::{collections::HashSet};
use std::fmt::Display;
use std::{collections::HashSet, fmt::Display};
use sea_orm::{DeriveIden, Statement};
use sea_orm_migration::prelude::*;
use sea_orm_migration::prelude::extension::postgres::IntoTypeRef;
use sea_orm_migration::prelude::{extension::postgres::IntoTypeRef, *};
use crate::migrations::extension::postgres::Type;
@ -19,6 +17,7 @@ pub enum Subscribers {
Id,
Pid,
DisplayName,
DownloaderId,
}
#[derive(DeriveIden)]
@ -65,6 +64,18 @@ pub enum Downloads {
Url,
}
#[derive(DeriveIden)]
pub enum Downloaders {
Table,
Id,
Category,
Endpoint,
Password,
Username,
SubscriberId,
DownloadPath,
}
#[async_trait::async_trait]
pub trait CustomSchemaManagerExt {
async fn create_postgres_auto_update_ts_fn(&self, col_name: &str) -> Result<(), DbErr>;
@ -141,7 +152,7 @@ pub trait CustomSchemaManagerExt {
async fn create_postgres_enum_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send,
I: IntoIterator<Item=T> + Send,
I: IntoIterator<Item = T> + Send,
>(
&self,
enum_name: E,
@ -151,7 +162,7 @@ pub trait CustomSchemaManagerExt {
async fn add_postgres_enum_values_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send,
I: IntoIterator<Item=T> + Send,
I: IntoIterator<Item = T> + Send,
>(
&self,
enum_name: E,
@ -229,7 +240,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
async fn create_postgres_enum_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send,
I: IntoIterator<Item=T> + Send,
I: IntoIterator<Item = T> + Send,
>(
&self,
enum_name: E,
@ -241,12 +252,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
.into_iter()
.map(|v| Alias::new(v.to_string()))
.collect::<Vec<_>>();
self.create_type(
Type::create()
.as_enum(enum_name)
.values(idents)
.to_owned(),
)
self.create_type(Type::create().as_enum(enum_name).values(idents).to_owned())
.await?;
} else {
self.add_postgres_enum_values_for_active_enum(enum_name, values)
@ -258,7 +264,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
async fn add_postgres_enum_values_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send,
I: IntoIterator<Item=T> + Send,
I: IntoIterator<Item = T> + Send,
>(
&self,
enum_name: E,

View File

@ -65,7 +65,9 @@ impl MigrationTrait for Migration {
ForeignKey::create()
.name("fk_subscription_subscriber_id")
.from(Subscriptions::Table, Subscriptions::SubscriberId)
.to(Subscribers::Table, Subscribers::Id),
.to(Subscribers::Table, Subscribers::Id)
.on_update(ForeignKeyAction::Restrict)
.on_delete(ForeignKeyAction::Cascade),
)
.to_owned(),
)
@ -88,7 +90,9 @@ impl MigrationTrait for Migration {
ForeignKey::create()
.name("fk_bangumi_subscription_id")
.from(Bangumi::Table, Bangumi::SubscriptionId)
.to(Subscriptions::Table, Subscriptions::Id),
.to(Subscriptions::Table, Subscriptions::Id)
.on_update(ForeignKeyAction::Restrict)
.on_delete(ForeignKeyAction::Cascade),
)
.to_owned(),
)
@ -109,7 +113,9 @@ impl MigrationTrait for Migration {
ForeignKey::create()
.name("fk_episode_bangumi_id")
.from(Episodes::Table, Episodes::BangumiId)
.to(Bangumi::Table, Bangumi::Id),
.to(Bangumi::Table, Bangumi::Id)
.on_update(ForeignKeyAction::Restrict)
.on_delete(ForeignKeyAction::Cascade),
)
.to_owned(),
)

View File

@ -2,8 +2,10 @@ use loco_rs::schema::table_auto;
use sea_orm_migration::{prelude::*, schema::*};
use super::defs::*;
use crate::models::prelude::{DownloadMime, DownloadStatus};
use crate::models::prelude::downloads::{DownloadMimeEnum, DownloadStatusEnum};
use crate::models::prelude::{
downloads::{DownloadMimeEnum, DownloadStatusEnum},
DownloadMime, DownloadStatus,
};
#[derive(DeriveMigrationName)]
pub struct Migration;
@ -53,13 +55,18 @@ impl MigrationTrait for Migration {
.col(big_unsigned(Downloads::CurrSize))
.col(text(Downloads::Url))
.index(
Index::create().table(Downloads::Table).col(Downloads::Url).name("idx_download_url")
Index::create()
.table(Downloads::Table)
.col(Downloads::Url)
.name("idx_download_url"),
)
.foreign_key(
ForeignKey::create()
.name("fk_download_subscription_id")
.from(Downloads::Table, Downloads::SubscriptionId)
.to(Subscriptions::Table, Subscriptions::Id),
.to(Subscriptions::Table, Subscriptions::Id)
.on_update(ForeignKeyAction::Restrict)
.on_delete(ForeignKeyAction::Cascade),
)
.to_owned(),
)
@ -73,14 +80,16 @@ impl MigrationTrait for Migration {
.alter_table(
Table::alter()
.table(Episodes::Table)
.add_column_if_not_exists(integer(Episodes::DownloadId))
.add_column_if_not_exists(integer_null(Episodes::DownloadId))
.add_foreign_key(
TableForeignKey::new()
.name("fk_episode_download_id")
.from_tbl(Episodes::Table)
.from_col(Episodes::DownloadId)
.to_tbl(Downloads::Table)
.to_col(Downloads::Id),
.to_col(Downloads::Id)
.on_update(ForeignKeyAction::Restrict)
.on_delete(ForeignKeyAction::SetNull),
)
.to_owned(),
)

View File

@ -0,0 +1,102 @@
use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{CustomSchemaManagerExt, Downloaders, GeneralIds, Subscribers},
models::{downloaders::DownloaderCategoryEnum, prelude::DownloaderCategory},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_postgres_enum_for_active_enum(
DownloaderCategoryEnum,
&[DownloaderCategory::QBittorrent],
)
.await?;
manager
.create_table(
table_auto(Downloaders::Table)
.col(pk_auto(Downloaders::Id))
.col(text(Downloaders::Endpoint))
.col(string_null(Downloaders::Username))
.col(string_null(Downloaders::Password))
.col(enumeration(
Downloaders::Category,
DownloaderCategoryEnum,
DownloaderCategory::iden_values(),
))
.col(text(Downloaders::DownloadPath))
.col(integer(Downloaders::SubscriberId))
.foreign_key(
ForeignKey::create()
.name("fk_downloader_subscriber_id")
.from_tbl(Downloaders::Table)
.from_col(Downloaders::SubscriberId)
.to_tbl(Subscribers::Table)
.to_col(Subscribers::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
manager
.create_postgres_auto_update_ts_trigger_for_col(
Downloaders::Table,
GeneralIds::UpdatedAt,
)
.await?;
manager
.alter_table(
Table::alter()
.table(Subscribers::Table)
.add_column_if_not_exists(integer_null(Subscribers::DownloaderId))
.add_foreign_key(
TableForeignKey::new()
.name("fk_subscriber_downloader_id")
.from_tbl(Subscribers::Table)
.from_col(Subscribers::DownloaderId)
.to_tbl(Downloaders::Table)
.to_col(Downloaders::Id)
.on_delete(ForeignKeyAction::SetNull)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
Table::alter()
.table(Subscribers::Table)
.drop_foreign_key(Alias::new("fk_subscriber_downloader_id"))
.drop_column(Subscribers::DownloaderId)
.to_owned(),
)
.await?;
manager
.drop_postgres_auto_update_ts_trigger_for_col(Downloaders::Table, GeneralIds::UpdatedAt)
.await?;
manager
.drop_table(Table::drop().table(Downloaders::Table).to_owned())
.await?;
manager
.drop_postgres_enum_for_active_enum(DownloaderCategoryEnum)
.await?;
Ok(())
}
}

View File

@ -3,6 +3,7 @@ pub use sea_orm_migration::prelude::*;
pub mod defs;
pub mod m20220101_000001_init;
pub mod m20240224_082543_add_downloads;
pub mod m20240225_060853_subscriber_add_downloader;
pub struct Migrator;
@ -12,6 +13,7 @@ impl MigratorTrait for Migrator {
vec![
Box::new(m20220101_000001_init::Migration),
Box::new(m20240224_082543_add_downloads::Migration),
Box::new(m20240225_060853_subscriber_add_downloader::Migration),
]
}
}

View File

@ -1,12 +0,0 @@
pub use super::{
bangumi,
bangumi::Entity as Bangumi,
downloads,
downloads::{DownloadMime, DownloadStatus, Entity as Download},
episodes,
episodes::Entity as Episode,
subscribers,
subscribers::Entity as Subscriber,
subscriptions,
subscriptions::{Entity as Subscription, SubscriptionCategory},
};

View File

@ -1,6 +1,6 @@
use sea_orm::entity::prelude::*;
pub use super::_entities::bangumi::{self, ActiveModel, Entity, Model};
pub use super::entities::bangumi::*;
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}

View File

@ -0,0 +1,14 @@
use sea_orm::prelude::*;
use url::Url;
pub use crate::models::entities::downloaders::*;
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl Model {
pub fn endpoint_url(&self) -> Result<Url, url::ParseError> {
let url = Url::parse(&self.endpoint)?;
Ok(url)
}
}

View File

@ -1,9 +1,10 @@
use sea_orm::{prelude::*, ActiveValue, Condition, QuerySelect, QueryOrder};
use sea_orm::sea_query::OnConflict;
use sea_orm::{prelude::*, sea_query::OnConflict, ActiveValue, Condition, QueryOrder, QuerySelect};
use crate::models::_entities::downloads::*;
use crate::models::prelude::{SubscriptionCategory, subscriptions};
use crate::subscriptions::mikan::{MikanSubscriptionEngine, MikanSubscriptionItem};
pub use crate::models::entities::downloads::*;
use crate::{
models::subscriptions::{self, SubscriptionCategory},
subscriptions::mikan::{MikanSubscriptionEngine, MikanSubscriptionItem},
};
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}
@ -17,8 +18,9 @@ impl ActiveModel {
status: ActiveValue::Set(DownloadStatus::Pending),
mime: ActiveValue::Set(DownloadMime::BitTorrent),
url: ActiveValue::Set(m.url),
all_size: ActiveValue::Set(m.content_length.unwrap_or_default()),
curr_size: ActiveValue::Set(0),
curr_size: ActiveValue::Set(m.content_length.as_ref().map(|_| 0)),
all_size: ActiveValue::Set(m.content_length),
homepage: ActiveValue::Set(m.homepage),
..Default::default()
}
}
@ -32,8 +34,8 @@ impl Model {
match &item.category {
SubscriptionCategory::Mikan => {
let items =
MikanSubscriptionEngine::subscription_items_from_rss_url(&item.source_url).
await?;
MikanSubscriptionEngine::subscription_items_from_rss_url(&item.source_url)
.await?;
let all_items = items.collect::<Vec<_>>();
let last_old_id = {
@ -42,23 +44,21 @@ impl Model {
.column(Column::Id)
.order_by_desc(Column::Id)
.filter(Column::SubscriptionId.eq(item.id))
.one(db).await?
}.map(|i| i.id);
.one(db)
.await?
}
.map(|i| i.id);
if all_items.is_empty() {
return Ok(vec![]);
}
let new_items = all_items.into_iter().map(|i| {
ActiveModel::from_mikan_subscription_item(i, item.id)
});
let new_items = all_items
.into_iter()
.map(|i| ActiveModel::from_mikan_subscription_item(i, item.id));
let insert_result = Entity::insert_many(new_items)
.on_conflict(
OnConflict::column(Column::Url)
.do_nothing()
.to_owned()
)
.on_conflict(OnConflict::column(Column::Url).do_nothing().to_owned())
.exec(db)
.await?;
@ -71,9 +71,7 @@ impl Model {
.add(Column::Id.lte(insert_result.last_insert_id));
if let Some(last_old_id) = last_old_id {
cond = cond.add(
Column::Id.gt(last_old_id)
)
cond = cond.add(Column::Id.gt(last_old_id))
}
cond

View File

@ -1,8 +1,12 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
use sea_orm::entity::prelude::*;
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct BangumiFilter {
pub name: Option<Vec<String>>,
pub group: Option<Vec<String>>,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "bangumi")]
pub struct Model {
@ -12,6 +16,19 @@ pub struct Model {
pub id: i32,
pub display_name: String,
pub subscription_id: i32,
pub official_title: String,
pub season: i32,
pub season_raw: Option<String>,
pub group_name: Option<String>,
pub resolution: Option<String>,
pub source: Option<String>,
pub filter: Option<BangumiFilter>,
pub subtitle: Option<String>,
pub rss_link: Option<String>,
pub poster_link: Option<String>,
pub rule_name: Option<String>,
pub sub_path: Option<String>,
pub deleted: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@ -0,0 +1,45 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "downloader_type")]
#[serde(rename_all = "snake_case")]
pub enum DownloaderCategory {
#[sea_orm(string_value = "qbittorrent")]
QBittorrent,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "downloaders")]
pub struct Model {
#[sea_orm(column_type = "Timestamp")]
pub created_at: DateTime,
#[sea_orm(column_type = "Timestamp")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub category: DownloaderCategory,
pub endpoint: String,
pub password: String,
pub username: String,
pub subscriber_id: i32,
pub download_path: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id"
)]
Subscriber,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}

View File

@ -2,7 +2,7 @@ use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_status")]
#[serde(rename_all = "snake_case")]
@ -22,7 +22,7 @@ pub enum DownloadStatus {
}
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_mime")]
pub enum DownloadMime {
@ -47,8 +47,9 @@ pub struct Model {
pub status: DownloadStatus,
pub mime: DownloadMime,
pub url: String,
pub all_size: u64,
pub curr_size: u64,
pub all_size: Option<u64>,
pub curr_size: Option<u64>,
pub homepage: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@ -1,8 +1,25 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
use sea_orm::entity::prelude::*;
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct EpisodeFileMeta {
pub media_path: String,
pub group: Option<String>,
pub title: String,
pub season: i32,
pub episode_index: Option<i32>,
pub extension: String,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubtitleFileMeta {
pub episode_file_meta: EpisodeFileMeta,
pub extension: String,
pub lang: Option<String>,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "episodes")]
pub struct Model {

View File

@ -1,9 +1,7 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.4
pub mod prelude;
pub mod bangumi;
pub mod downloads;
pub mod episodes;
pub mod subscribers;
pub mod subscriptions;
pub mod downloaders;

View File

@ -1,8 +1,13 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
use sea_orm::entity::prelude::*;
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscriberBangumiConfig {
pub leading_group_tag: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscribers")]
pub struct Model {
@ -13,12 +18,20 @@ pub struct Model {
#[sea_orm(unique)]
pub pid: String,
pub display_name: String,
pub downloader_id: Option<i32>,
pub bangumi_conf: SubscriberBangumiConfig,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::subscriptions::Entity")]
Subscription,
#[sea_orm(
belongs_to = "super::downloaders::Entity",
from = "Column::DownloaderId",
to = "super::downloaders::Column::Id"
)]
Downloader,
}
impl Related<super::subscriptions::Entity> for Entity {
@ -26,3 +39,9 @@ impl Related<super::subscriptions::Entity> for Entity {
Relation::Subscription.def()
}
}
impl Related<super::downloaders::Entity> for Entity {
fn to() -> RelationDef {
Relation::Downloader.def()
}
}

View File

@ -1,15 +1,13 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
#[serde(rename_all = "snake_case")]
pub enum SubscriptionCategory {

View File

@ -1,6 +1,6 @@
use sea_orm::entity::prelude::*;
pub use super::_entities::episodes::{self, ActiveModel, Entity, Model};
pub use super::entities::episodes::*;
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}

View File

@ -1,8 +1,9 @@
pub mod _entities;
pub mod bangumi;
pub mod downloaders;
pub mod downloads;
pub mod entities;
pub mod episodes;
pub mod notifications;
pub mod prelude;
pub mod subscribers;
pub mod subscriptions;
pub use _entities::prelude;

View File

@ -0,0 +1,9 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Notification {
official_title: String,
season: i32,
episode_size: u32,
poster_url: Option<String>,
}

View File

@ -0,0 +1,8 @@
pub use super::{
bangumi::{self, Entity as Bangumi},
downloaders::{self, DownloaderCategory, Entity as Downloader},
downloads::{self, DownloadMime, DownloadStatus, Entity as Download},
episodes::{self, Entity as Episode},
subscribers::{self, Entity as Subscriber},
subscriptions::{self, Entity as Subscription, SubscriptionCategory},
};

View File

@ -2,7 +2,7 @@ use loco_rs::model::{ModelError, ModelResult};
use sea_orm::{entity::prelude::*, ActiveValue, TransactionTrait};
use serde::{Deserialize, Serialize};
pub use super::_entities::subscribers::{self, ActiveModel, Entity, Model};
pub use super::entities::subscribers::*;
pub const ROOT_SUBSCRIBER: &str = "konobangu";
@ -36,7 +36,7 @@ impl Model {
pub async fn find_by_pid(db: &DatabaseConnection, pid: &str) -> ModelResult<Self> {
let parse_uuid = Uuid::parse_str(pid).map_err(|e| ModelError::Any(e.into()))?;
let subscriber = Entity::find()
.filter(subscribers::Column::Pid.eq(parse_uuid))
.filter(Column::Pid.eq(parse_uuid))
.one(db)
.await?;
subscriber.ok_or_else(|| ModelError::EntityNotFound)

View File

@ -1,6 +1,6 @@
use sea_orm::{entity::prelude::*, ActiveValue};
pub use super::_entities::subscriptions::{self, *};
pub use super::entities::subscriptions::{self, *};
use crate::subscriptions::defs::RssCreateDto;
#[async_trait::async_trait]
@ -27,7 +27,7 @@ impl Model {
pub async fn toggle_iters(
db: &DatabaseConnection,
ids: impl Iterator<Item=i32>,
ids: impl Iterator<Item = i32>,
enabled: bool,
) -> eyre::Result<()> {
Entity::update_many()
@ -40,7 +40,7 @@ impl Model {
pub async fn delete_iters(
db: &DatabaseConnection,
ids: impl Iterator<Item=i32>,
ids: impl Iterator<Item = i32>,
) -> eyre::Result<()> {
Entity::delete_many()
.filter(Column::Id.is_in(ids))

View File

@ -0,0 +1,7 @@
use crate::parsers::errors::ParseError;
pub fn parse_bangumi_season(season_str: &str) -> Result<i32, ParseError> {
season_str
.parse::<i32>()
.map_err(ParseError::BangumiSeasonError)
}

View File

@ -0,0 +1,73 @@
use std::collections::{BTreeMap, HashMap};
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use maplit::{btreemap, hashmap};
use regex::Regex;
const LANG_ZH_TW: &str = "zh-tw";
const LANG_ZH: &str = "zh";
const LANG_EN: &str = "en";
const LANG_JP: &str = "jp";
lazy_static! {
pub static ref SEASON_REGEX: Regex =
Regex::new(r"(S\|[Ss]eason\s+)(\d+)").expect("Invalid regex");
pub static ref TORRENT_PRASE_RULE_REGS: Vec<FancyRegex> = vec![
FancyRegex::new(
r"(.*) - (\d{1,4}(?!\d|p)|\d{1,4}\.\d{1,2}(?!\d|p))(?:v\d{1,2})?(?: )?(?:END)?(.*)"
)
.unwrap(),
FancyRegex::new(
r"(.*)[\[\ E](\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)"
)
.unwrap(),
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
];
pub static ref SUBTITLE_LANG: BTreeMap<&'static str, Vec<&'static str>> = {
btreemap! {
LANG_ZH_TW => vec!["tc", "cht", "", "zh-tw"],
LANG_ZH => vec!["sc", "chs", "", "zh", "zh-cn"],
LANG_EN => vec!["en", "eng", ""],
LANG_JP => vec!["jp", "jpn", ""],
}
};
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {
hashmap! {
"" => 0,
"" => 1,
"" => 2,
"" => 3,
"" => 4,
"" => 5,
"" => 6,
"" => 7,
"" => 8,
"" => 9,
"" => 10,
"廿" => 20,
"" => 100,
"" => 1000,
"" => 0,
"" => 1,
"" => 2,
"" => 3,
"" => 4,
"" => 5,
"" => 6,
"" => 7,
"" => 8,
"" => 9,
"" => 10,
"" => 20,
"" => 100,
"" => 1000,
}
};
pub static ref ZH_NUM_RE: Regex =
Regex::new(r"[〇一二三四五六七八九十廿百千零壹贰叁肆伍陆柒捌玖拾念佰仟]").unwrap();
}

View File

@ -0,0 +1,9 @@
use thiserror::Error;
#[derive(Error, Debug)]
pub enum ParseError {
#[error("Parse bangumi season error: {0}")]
BangumiSeasonError(#[from] std::num::ParseIntError),
#[error("Parse file url error: {0}")]
FileUrlError(#[from] url::ParseError),
}

View File

@ -0,0 +1,34 @@
use lightningcss::declaration::DeclarationBlock;
pub fn query_selector_first<'a>(
dom: &'a tl::VDom<'a>,
selector: &'a str,
parser: &'a tl::Parser<'a>,
) -> Option<&'a tl::Node<'a>> {
dom.query_selector(selector)
.and_then(|mut s| s.next())
.and_then(|n| n.get(parser))
}
pub fn query_selector_first_tag<'a>(
dom: &'a tl::VDom<'a>,
selector: &'a str,
parser: &'a tl::Parser<'a>,
) -> Option<&'a tl::HTMLTag<'a>> {
query_selector_first(dom, selector, parser).and_then(|n| n.as_tag())
}
pub fn parse_style_attr(style_attr: &str) -> Option<DeclarationBlock> {
let result = DeclarationBlock::parse_string(style_attr, Default::default()).ok()?;
Some(result)
}
pub fn get_tag_style<'a>(tag: &'a tl::HTMLTag<'a>) -> Option<DeclarationBlock<'a>> {
let style_attr = tag
.attributes()
.get("style")
.flatten()
.and_then(|s| std::str::from_utf8(s.as_bytes()).ok());
style_attr.and_then(parse_style_attr)
}

View File

@ -0,0 +1,127 @@
use bytes::Bytes;
use html_escape::decode_html_entities;
use lazy_static::lazy_static;
use lightningcss::{properties::Property, values::image::Image};
use regex::Regex;
use url::Url;
use crate::{
downloaders::{html::download_html, image::download_image},
parsers::html_parser::{get_tag_style, query_selector_first_tag},
};
pub struct MikanEpisodeMeta {
pub homepage: Url,
pub poster_src: Option<Url>,
pub poster_data: Option<Bytes>,
pub official_title: String,
}
lazy_static! {
pub static ref MIKAN_TITLE_SEASON: Regex = Regex::new("第.*季").unwrap();
}
pub async fn parse_episode_meta_from_mikan_homepage(
url: Url,
) -> eyre::Result<Option<MikanEpisodeMeta>> {
let url_host = url.origin().unicode_serialization();
let content = download_html(url.as_str()).await?;
let dom = tl::parse(&content, tl::ParserOptions::default())?;
let parser = dom.parser();
let poster_node = query_selector_first_tag(&dom, r"div.bangumi-poster", parser);
let official_title_node = query_selector_first_tag(&dom, r"p.bangumi-title", parser);
let mut poster_src = None;
if let Some(style) = poster_node.and_then(get_tag_style) {
for (prop, _) in style.iter() {
match prop {
Property::BackgroundImage(images) => {
if let Some(Image::Url(path)) = images.first() {
if let Ok(url) = Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
{
poster_src = Some(url);
}
}
}
Property::Background(backgrounds) => {
for bg in backgrounds {
if let Image::Url(path) = &bg.image {
if let Ok(url) =
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
{
poster_src = Some(url);
}
}
}
}
_ => {}
}
}
};
poster_src = poster_src.map(|mut p| {
p.set_query(None);
p
});
let poster_data = if let Some(p) = poster_src.as_ref() {
download_image(p.as_str()).await.ok()
} else {
None
};
let meta = official_title_node
.map(|s| s.inner_text(parser))
.and_then(|official_title| {
let title = MIKAN_TITLE_SEASON
.replace(&decode_html_entities(&official_title), "")
.trim()
.to_string();
if title.is_empty() {
None
} else {
Some(title)
}
})
.map(|title| MikanEpisodeMeta {
homepage: url,
poster_src,
official_title: title,
poster_data,
});
Ok(meta)
}
#[cfg(test)]
mod test {
use url::Url;
use crate::parsers::mikan_ep_parser::parse_episode_meta_from_mikan_homepage;
#[tokio::test]
async fn test_parse_mikan() {
let test_fn = async || -> eyre::Result<()> {
let url_str =
"https://mikanani.me/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a";
let url = Url::parse(url_str)?;
if let Some(ep_meta) = parse_episode_meta_from_mikan_homepage(url.clone()).await? {
assert_eq!(ep_meta.homepage, url);
assert_eq!(
ep_meta.poster_src,
Some(Url::parse(
"https://mikanani.me/images/Bangumi/202309/5ce9fed1.jpg"
)?)
);
assert_eq!(ep_meta.official_title, "葬送的芙莉莲");
let u8_data = ep_meta.poster_data.expect("should have poster data");
assert!(
u8_data.starts_with(&[255, 216, 255, 224]),
"should start with valid jpeg data magic number"
);
} else {
panic!("can not find mikan episode title")
}
Ok(())
};
test_fn().await.expect("test parse mikan failed");
}
}

View File

@ -0,0 +1,8 @@
pub mod bangumi_parser;
pub mod defs;
pub mod errors;
pub mod html_parser;
pub mod mikan_ep_parser;
pub mod raw_ep_parser;
pub mod title_parser;
pub mod torrent_parser;

View File

@ -0,0 +1,394 @@
use std::borrow::Cow;
use itertools::Itertools;
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize};
use super::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE};
lazy_static! {
static ref TITLE_RE: Regex = Regex::new(
r#"(.*|\[.*])( -? \d+|\[\d+]|\[\d+.?[vV]\d]|第\d+[话話集]|\[第?\d+[话話集]]|\[\d+.?END]|[Ee][Pp]?\d+)(.*)"#
).unwrap();
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
static ref SOURCE_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|Web|WebRip").unwrap();
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap();
static ref PREFIX_RE: Regex =
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
static ref MAIN_TITLE_PREFIX_PROCESS_RE1: Regex = Regex::new(r"新番|月?番").unwrap();
static ref MAIN_TITLE_PREFIX_PROCESS_RE2: Regex = Regex::new(r"[港澳台]{1,3}地区").unwrap();
static ref SEASON_EXTRACT_SEASON_ALL_RE: Regex = Regex::new(r"S\d{1,2}|Season \d{1,2}|[第].[季期]|1st|2nd|3rd|\d{1,2}th").unwrap();
static ref SEASON_EXTRACT_SEASON_EN_PREFIX_RE: Regex = Regex::new(r"Season|S").unwrap();
static ref SEASON_EXTRACT_SEASON_EN_NTH_RE: Regex = Regex::new(r"1st|2nd|3rd|\d{1,2}th").unwrap();
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_RE: Regex = Regex::new(r"[第 ].*[季期(部分)]|部分").unwrap();
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE: Regex = Regex::new(r"[第季期 ]").unwrap();
static ref NAME_EXTRACT_REMOVE_RE: Regex = Regex::new(r"[(]仅限[港澳台]{1,3}地区[)]").unwrap();
static ref NAME_EXTRACT_SPLIT_RE: Regex = Regex::new(r"/|\s{2}|-\s{2}").unwrap();
static ref NAME_JP_TEST: Regex = Regex::new(r"[\p{scx=Hira}\p{scx=Kana}]{2,}").unwrap();
static ref NAME_ZH_TEST: Regex = Regex::new(r"[\p{scx=Han}]{2,}").unwrap();
static ref NAME_EN_TEST: Regex = Regex::new(r"[a-zA-Z]{3,}").unwrap();
static ref TAGS_EXTRACT_SPLIT_RE: Regex = Regex::new(r"[\[\]()]").unwrap();
static ref CLEAR_SUB_RE: Regex = Regex::new(r"_MP4|_MKV").unwrap();
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct RawEpisodeMeta {
name_en: Option<String>,
name_en_no_season: Option<String>,
name_jp: Option<String>,
name_jp_no_season: Option<String>,
name_zh: Option<String>,
name_zh_no_season: Option<String>,
season: i32,
season_raw: Option<String>,
episode_index: i32,
sub: Option<String>,
source: Option<String>,
fansub: Option<String>,
resolution: Option<String>,
}
fn extract_fansub(raw_name: &str) -> Option<&str> {
let mut groups = EN_BRACKET_SPLIT_RE.splitn(raw_name, 3);
groups.nth(1)
}
fn replace_ch_bracket_to_en(raw_name: &str) -> String {
raw_name.replace('【', "[").replace('】', "]")
}
fn title_body_prefix_process(title_body: &str, fansub: Option<&str>) -> eyre::Result<String> {
let raw_without_fansub = if let Some(fansub) = fansub {
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
fan_sub_re.replace_all(title_body, "")
} else {
Cow::Borrowed(title_body)
};
let raw_with_prefix_replaced = PREFIX_RE.replace_all(&raw_without_fansub, "/");
let mut arg_group = raw_with_prefix_replaced
.split('/')
.map(|s| s.trim())
.collect::<Vec<_>>();
if arg_group.len() == 1 {
arg_group = arg_group.first_mut().unwrap().split(' ').collect();
}
let mut raw = raw_without_fansub.to_string();
for arg in arg_group.iter() {
if (arg_group.len() <= 5 && MAIN_TITLE_PREFIX_PROCESS_RE1.is_match(arg))
|| (MAIN_TITLE_PREFIX_PROCESS_RE2.is_match(arg))
{
let sub = Regex::new(&format!(".{arg}."))?;
raw = sub.replace_all(&raw, "").to_string();
}
}
Ok(raw.to_string())
}
fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, i32) {
let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " ");
let seasons = SEASON_EXTRACT_SEASON_ALL_RE
.find(&name_and_season)
.into_iter()
.map(|s| s.as_str())
.collect_vec();
if seasons.is_empty() {
return (title_body.to_string(), None, 1);
}
let mut season = 1;
let mut season_raw = None;
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
for s in seasons {
season_raw = Some(s);
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s) {
if let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
.replace_all(m.as_str(), "")
.parse::<i32>()
{
season = s;
break;
}
}
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s) {
if let Some(s) = DIGIT_1PLUS_REG
.find(m.as_str())
.and_then(|s| s.as_str().parse::<i32>().ok())
{
season = s;
break;
}
}
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
.replace(m.as_str(), "")
.parse::<i32>()
{
season = s;
break;
}
if let Some(m) = ZH_NUM_RE.find(m.as_str()) {
season = ZH_NUM_MAP[m.as_str()];
break;
}
}
}
(name.to_string(), season_raw.map(|s| s.to_string()), season)
}
fn extract_name_from_title_body_name_section(
title_body_name_section: &str,
) -> (Option<String>, Option<String>, Option<String>) {
let mut name_en = None;
let mut name_zh = None;
let mut name_jp = None;
let replaced = NAME_EXTRACT_REMOVE_RE.replace_all(title_body_name_section, "");
let trimed = replaced.trim();
let mut split = NAME_EXTRACT_SPLIT_RE
.split(trimed)
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.map(|s| s.to_string())
.collect_vec();
if split.len() == 1 {
let mut split_space = split[0].split(' ').collect_vec();
let mut search_indices = vec![0];
if split_space.len() > 1 {
search_indices.push(search_indices.len() - 1);
}
for i in search_indices {
if NAME_ZH_TEST.is_match(split_space[i]) {
let chs = split_space[i];
split_space.remove(i);
split = vec![chs.to_string(), split_space.join(" ")];
break;
}
}
}
for item in split {
if NAME_JP_TEST.is_match(&item) && name_jp.is_none() {
name_jp = Some(item);
} else if NAME_ZH_TEST.is_match(&item) && name_zh.is_none() {
name_zh = Some(item);
} else if NAME_EN_TEST.is_match(&item) && name_en.is_none() {
name_en = Some(item);
}
}
(name_en, name_zh, name_jp)
}
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32> {
DIGIT_1PLUS_REG
.find(title_episode)?
.as_str()
.parse::<i32>()
.ok()
}
fn clear_sub(sub: Option<String>) -> Option<String> {
sub.map(|s| CLEAR_SUB_RE.replace_all(&s, "").to_string())
}
fn extract_tags_from_title_extra(
title_extra: &str,
) -> (Option<String>, Option<String>, Option<String>) {
let replaced = TAGS_EXTRACT_SPLIT_RE.replace_all(title_extra, " ");
let elements = replaced
.split(' ')
.map(|s| s.trim())
.filter(|s| !s.is_empty());
let mut sub = None;
let mut resolution = None;
let mut source = None;
for element in elements {
if SUB_RE.is_match(element) {
sub = Some(element.to_string())
} else if RESOLUTION_RE.is_match(element) {
resolution = Some(element.to_string())
} else if SOURCE_RE.is_match(element) {
source = Some(element.to_string())
}
}
(clear_sub(sub), resolution, source)
}
pub fn parse_episode_meta_from_raw_name(s: &str) -> eyre::Result<RawEpisodeMeta> {
let raw_title = s.trim();
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
let fansub = extract_fansub(&raw_title_without_ch_brackets);
if let Some(title_re_match_obj) = TITLE_RE.captures(&raw_title_without_ch_brackets) {
let title_body = title_re_match_obj
.get(1)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
let title_episode = title_re_match_obj
.get(2)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
let title_extra = title_re_match_obj
.get(3)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
let title_body = title_body_prefix_process(title_body, fansub)?;
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
let (name_en_no_season, name_zh_no_season, name_jp_no_season) =
extract_name_from_title_body_name_section(&name_without_season);
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(0);
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
Ok(RawEpisodeMeta {
name_en,
name_en_no_season,
name_jp,
name_jp_no_season,
name_zh,
name_zh_no_season,
season,
season_raw,
episode_index,
sub,
source,
fansub: fansub.map(|s| s.to_string()),
resolution,
})
} else {
Err(eyre::eyre!("Can not parse episode meta from raw filename"))
}
}
#[cfg(test)]
mod tests {
use super::{parse_episode_meta_from_raw_name, RawEpisodeMeta};
struct TestCase {
source: &'static str,
expected: &'static str,
}
#[test]
fn test_parse_episode_meta_from_raw_name() {
let test_cases = vec![
TestCase {
// ep+version case
source: r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
expected: r#"{
"name_en": "Shin no Nakama 2nd",
"name_en_no_season": "Shin no Nakama",
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
"season": 2,
"season_raw": "2nd",
"episode_index": 8,
"sub": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
},
TestCase {
// pure english title case
source: r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
expected: r#"{
"name_en": "THE MARGINAL SERVICE",
"name_en_no_season": "THE MARGINAL SERVICE",
"season": 1,
"episode_index": 8,
"sub": "简繁内封字幕",
"source": "WebRip",
"fansub": "动漫国字幕组&LoliHouse",
"resolution": "1080p"
}"#,
},
TestCase {
// two zh titles case
source: r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
expected: r#"{
"name_en": "Nozomanu Fushi no Boukensha",
"name_en_no_season": "Nozomanu Fushi no Boukensha",
"name_zh": "事与愿违的不死冒险者",
"name_zh_no_season": "事与愿违的不死冒险者",
"season": 1,
"season_raw": null,
"episode_index": 1,
"sub": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
},
TestCase {
// en+zh+jp case
source: r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
expected: r#"{
"name_en": "Pon no Michi",
"name_jp": "ぽんのみち",
"name_zh": "碰之道",
"name_en_no_season": "Pon no Michi",
"name_jp_no_season": "ぽんのみち",
"name_zh_no_season": "碰之道",
"season": 1,
"season_raw": null,
"episode_index": 7,
"sub": "简繁日内封字幕",
"source": "WebRip",
"fansub": "喵萌奶茶屋&LoliHouse",
"resolution": "1080p"
}"#,
},
TestCase {
// season nth case
source: r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
expected: r#"{
"name_en": "Yowai Character Tomozakikun",
"name_en_no_season": "Yowai Character Tomozakikun",
"name_zh": "弱角友崎同学 2nd STAGE",
"name_zh_no_season": "弱角友崎同学",
"season": 2,
"season_raw": "2nd",
"episode_index": 9,
"sub": "CHT",
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
}"#,
},
TestCase {
// season en + season zh case
source: r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
expected: r#"{
"name_en": "Kingdom S5",
"name_en_no_season": "Kingdom",
"name_zh": "王者天下 第五季",
"name_zh_no_season": "王者天下",
"season": 5,
"season_raw": "第五季",
"episode_index": 7,
"sub": "简繁外挂字幕",
"source": "WebRip",
"fansub": "豌豆字幕组&LoliHouse",
"resolution": "1080p"
}"#,
},
];
for case in test_cases {
let expected: Option<RawEpisodeMeta> = serde_json::from_str(case.expected).unwrap();
let found = parse_episode_meta_from_raw_name(case.source).ok();
if expected != found {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
}
assert_eq!(expected, found);
}
}
}

View File

@ -0,0 +1,9 @@
use crate::path::VFSPath;
pub fn parse_torrent_title(
torrent_path: VFSPath<'_>,
torrent_name: Option<&str>,
season: Option<i32>,
file_type: &str,
) {
}

View File

@ -0,0 +1,90 @@
use super::defs::{
BRACKETS_REG, DIGIT_1PLUS_REG, SEASON_REGEX, SUBTITLE_LANG, TORRENT_PRASE_RULE_REGS,
};
use crate::path::VFSPath;
pub fn get_path_basename<'a>(path: &'a VFSPath) -> &'a str {
path.basename()
}
pub fn get_group(group_and_title: &str) -> (Option<&str>, &str) {
let n = BRACKETS_REG
.split(group_and_title)
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
if n.len() > 1 {
if DIGIT_1PLUS_REG.is_match(n[1]) {
(None, group_and_title)
} else {
(Some(n[0]), n[1])
}
} else {
(None, n[0])
}
}
pub fn get_season_and_title(season_and_title: &str) -> (String, i32) {
let title = SEASON_REGEX.replace(season_and_title, "");
let title = title.trim().to_string();
let mut season = 1;
if let Some(match_result) = SEASON_REGEX.captures(season_and_title) {
let season_str = match_result
.get(2)
.unwrap_or_else(|| unreachable!("season regex should have 2 groups"))
.as_str();
season = season_str
.parse::<i32>()
.unwrap_or_else(|_| unreachable!("season should be a number"));
}
(title, season)
}
pub fn get_subtitle_lang(subtitle_name: &str) -> Option<&'static str> {
let subtitle_name_lower = subtitle_name.to_lowercase();
for (lang, matches) in SUBTITLE_LANG.iter() {
for m in matches {
if subtitle_name_lower.contains(m) {
return Some(lang);
}
}
}
None
}
pub fn parse_torrent<'a>(
torrent_path: &'a VFSPath<'a>,
torrent_name: Option<&str>,
season: Option<i32>,
file_type: Option<&str>,
) {
let media_name = get_path_basename(torrent_path);
for rule in TORRENT_PRASE_RULE_REGS.iter() {
let match_obj = if let Some(torrent_name) = torrent_name {
rule.captures(torrent_name)
} else {
rule.captures(media_name)
};
if let Ok(Some(match_obj)) = match_obj {
let group_and_title = match_obj
.get(1)
.unwrap_or_else(|| unreachable!("should have 1 group"))
.as_str();
let (group, title) = get_group(group_and_title);
let season_and_title = get_season_and_title(title);
let season = season.unwrap_or(season_and_title.1);
let title = season_and_title.0;
let episode = match_obj
.get(2)
.unwrap_or_else(|| unreachable!("should have 2 group"))
.as_str()
.parse::<i32>()
.unwrap_or_else(|_| unreachable!("episode should be a number"));
let extension = media_name;
todo!()
}
}
}

View File

@ -0,0 +1,4 @@
pub mod torrent_path;
pub mod vfs_path;
pub use vfs_path::{VFSComponent, VFSComponents, VFSPath, VFSPathBuf, VFSSubPath, VFSSubPathBuf};

View File

@ -0,0 +1,79 @@
use std::collections::HashSet;
use crate::{
downloaders::defs::Torrent,
models::{bangumi, subscribers},
parsers::{bangumi_parser::parse_bangumi_season, defs::SEASON_REGEX},
path::{VFSPath, VFSSubPathBuf},
};
pub fn check_files(info: &Torrent) -> (Vec<VFSSubPathBuf>, Vec<VFSSubPathBuf>) {
let mut media_list = vec![];
let mut subtitle_list = vec![];
for f in info.iter_files() {
let file_name = VFSSubPathBuf::from(f.get_name());
let extension = file_name.extension().unwrap_or_default().to_lowercase();
match extension.as_str() {
".mp4" | ".mkv" => {
media_list.push(file_name);
}
".ass" | ".srt" => subtitle_list.push(file_name),
_ => {}
}
}
(media_list, subtitle_list)
}
pub fn path_to_bangumi<'a>(
save_path: VFSPath<'a>,
downloader_path: VFSPath<'a>,
) -> Option<(&'a str, i32)> {
let downloader_parts = downloader_path
.components()
.map(|s| s.as_str())
.collect::<HashSet<_>>();
let mut season = None;
let mut bangumi_name = None;
for part in save_path.components().map(|s| s.as_str()) {
if let Some(match_result) = SEASON_REGEX.captures(part) {
season = Some(
parse_bangumi_season(
match_result
.get(2)
.unwrap_or_else(|| unreachable!("must have a season"))
.as_str(),
)
.unwrap_or_else(|e| unreachable!("{}", e.to_string())),
);
} else if !downloader_parts.contains(part) {
bangumi_name = Some(part);
}
}
match (season, bangumi_name) {
(Some(season), Some(bangumi_name)) => Some((bangumi_name, season)),
_ => None,
}
}
pub fn file_depth(path: &VFSPath<'_>) -> usize {
path.components().count()
}
pub fn is_ep(path: &VFSPath<'_>) -> bool {
file_depth(path) <= 2
}
pub fn gen_bangumi_sub_path(data: &bangumi::Model) -> VFSSubPathBuf {
VFSSubPathBuf::from(data.official_title.to_string()).join(format!("Season {}", data.season))
}
pub fn rule_name(bgm: &bangumi::Model, conf: &subscribers::SubscriberBangumiConfig) -> String {
if let (Some(true), Some(group_name)) = (conf.leading_group_tag, &bgm.group_name) {
format!("[{}] {} S{}", group_name, bgm.official_title, bgm.season)
} else {
format!("{} S{}", bgm.official_title, bgm.season)
}
}

View File

@ -0,0 +1,113 @@
use std::path::{Path, PathBuf};
use bytes::Buf;
use lazy_static::lazy_static;
pub use uni_path::{Path as VFSSubPath, PathBuf as VFSSubPathBuf};
use crate::parsers::errors::ParseError;
const VFS_EMPTY_STR: &str = "";
lazy_static! {
pub static ref VFS_SUB_ROOT_BUF: VFSSubPathBuf = VFSSubPathBuf::from("/");
pub static ref VFS_SUB_ROOT: &'static VFSSubPath = &VFS_SUB_ROOT_BUF.as_path();
}
pub type VFSComponents<'a> = uni_path::Components<'a>;
pub type VFSComponent<'a> = uni_path::Component<'a>;
pub struct VFSPath<'a> {
pub root: &'a str,
pub sub: &'a VFSSubPath,
}
impl<'a> VFSPath<'a> {
pub fn new(root: &'a str, sub: &'a VFSSubPath) -> VFSPath<'a> {
Self { root, sub }
}
pub fn file_name(&self) -> Option<&str> {
self.sub.file_name()
}
pub fn parent(&self) -> Option<VFSPath> {
self.sub.parent().map(|p| Self::new(self.root, p))
}
pub fn dirname(&'a self) -> VFSPath<'a> {
self.parent()
.unwrap_or_else(|| Self::new(self.root, &VFS_SUB_ROOT))
}
pub fn basename(&self) -> &str {
self.file_name().unwrap_or(VFS_EMPTY_STR)
}
pub fn components(&self) -> VFSComponents<'a> {
self.sub.components()
}
pub fn join<P: AsRef<VFSSubPath>>(&self, path: P) -> VFSPathBuf {
VFSPathBuf::new(self.root, self.sub.join(path))
}
pub fn extension(&self) -> Option<&str> {
self.sub.extension()
}
pub fn extname(&self) -> &str {
self.extension().unwrap_or_default()
}
pub fn to_std_path_buf(&self) -> PathBuf {
PathBuf::from(self.root).join(self.sub.as_str())
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct VFSPathBuf {
pub root: String,
pub sub: VFSSubPathBuf,
}
impl VFSPathBuf {
pub fn new<R: Into<String>, S: Into<VFSSubPathBuf>>(root: R, sub: S) -> Self {
Self {
root: root.into(),
sub: sub.into(),
}
}
pub fn from_root(root: &str) -> Result<Self, ParseError> {
Ok(Self {
root: root.to_string(),
sub: VFS_SUB_ROOT_BUF.clone(),
})
}
pub fn as_path(&self) -> VFSPath {
VFSPath::new(&self.root as &str, self.sub.as_path())
}
pub fn push<P: AsRef<VFSSubPath>>(&mut self, path: P) {
self.sub.push(path);
}
pub fn pop(&mut self) -> bool {
self.sub.pop()
}
pub fn set_extension<S: AsRef<str>>(&mut self, ext: S) {
self.sub.set_extension(ext);
}
pub fn set_file_name<S: AsRef<str>>(&mut self, file_name: S) {
self.sub.set_file_name(file_name);
}
}
impl Into<PathBuf> for VFSPathBuf {
fn into(self) -> PathBuf {
let root = self.root;
PathBuf::from(root).join(self.sub.as_str())
}
}

View File

@ -1,31 +1,37 @@
use crate::downloader::bytes::download_bytes;
use crate::downloader::defs::BITTORRENT_MIME_TYPE;
use chrono::DateTime;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone)]
use crate::downloaders::{bytes::download_bytes, defs::BITTORRENT_MIME_TYPE};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanSubscriptionItem {
pub title: String,
pub home_page: Option<String>,
pub homepage: Option<String>,
pub url: String,
pub content_length: Option<u64>,
pub mime: String,
pub pub_date: Option<String>,
pub pub_date: Option<i64>,
}
impl MikanSubscriptionItem {
pub fn from_rss_item(item: rss::Item) -> Option<Self> {
let mime_match = item.enclosure()
let mime_match = item
.enclosure()
.map(|x| x.mime_type == BITTORRENT_MIME_TYPE)
.unwrap_or_default();
if mime_match {
let enclosure = item.enclosure.unwrap();
let content_length = enclosure.length.parse().ok();
Some(MikanSubscriptionItem {
title: item.title.unwrap_or_default(),
home_page: item.link,
homepage: item.link,
url: enclosure.url,
content_length,
content_length: enclosure.length.parse().ok(),
mime: enclosure.mime_type,
pub_date: item.pub_date,
pub_date: item
.pub_date
.and_then(|s| DateTime::parse_from_rfc2822(&s).ok())
.map(|s| s.timestamp_millis()),
})
} else {
None
@ -37,12 +43,42 @@ pub struct MikanSubscriptionEngine;
impl MikanSubscriptionEngine {
pub async fn subscription_items_from_rss_url(
url: &str
) -> eyre::Result<impl Iterator<Item=MikanSubscriptionItem>> {
url: &str,
) -> eyre::Result<impl Iterator<Item = MikanSubscriptionItem>> {
let bytes = download_bytes(url).await?;
let channel = rss::Channel::read_from(&bytes[..])?;
Ok(channel.items.into_iter().flat_map(MikanSubscriptionItem::from_rss_item))
Ok(channel
.items
.into_iter()
.flat_map(MikanSubscriptionItem::from_rss_item))
}
}
#[cfg(test)]
mod tests {
use crate::downloaders::defs::BITTORRENT_MIME_TYPE;
#[tokio::test]
pub async fn test_mikan_subscription_items_from_rss_url() {
let url = "https://mikanani.me/RSS/Bangumi?bangumiId=3141&subgroupid=370";
let items = super::MikanSubscriptionEngine::subscription_items_from_rss_url(url)
.await
.expect("should get subscription items from rss url")
.collect::<Vec<_>>();
let first_sub_item = items
.first()
.expect("mikan subscriptions should have at least one subs");
assert_eq!(first_sub_item.mime, BITTORRENT_MIME_TYPE);
let homepage = first_sub_item
.homepage
.as_ref()
.expect("mikan subscription item should have home page");
assert!(homepage.starts_with("https://mikanani.me/Home/Episode"));
let name = first_sub_item.title.as_str();
assert!(name.contains("葬送的芙莉莲"));
}
}

View File

@ -1,6 +1,6 @@
use serde::{Deserialize, Serialize};
use crate::models::_entities::subscribers;
use crate::models::entities::subscribers;
#[derive(Debug, Deserialize, Serialize)]
pub struct CurrentResponse {

View File

@ -1,2 +1 @@
mod notes;
mod subscribers;

View File

@ -16,8 +16,7 @@
use std::collections::BTreeMap;
use loco_rs::{db, prelude::*};
use migration::Migrator;
use recorder::app::App;
use recorder::{app::App, migrations::Migrator};
#[allow(clippy::module_name_repetitions)]
pub struct SeedData;