refactor: split modules

This commit is contained in:
master 2025-04-08 02:12:06 +08:00
parent 376d2b28d3
commit 2686fa1d76
94 changed files with 1125 additions and 580 deletions

View File

@ -2,7 +2,7 @@
recorder-playground = "run -p recorder --example playground -- --environment development" recorder-playground = "run -p recorder --example playground -- --environment development"
[build] [build]
rustflags = ["-Zthreads=8"] rustflags = ["-Zthreads=8", "--cfg", "feature=\"testcontainers\""]
[target.x86_64-unknown-linux-gnu] [target.x86_64-unknown-linux-gnu]
linker = "clang" linker = "clang"

View File

@ -5,6 +5,7 @@
"unifiedjs.vscode-mdx", "unifiedjs.vscode-mdx",
"mikestead.dotenv", "mikestead.dotenv",
"christian-kohler.npm-intellisense", "christian-kohler.npm-intellisense",
"skellock.just" "skellock.just",
"zerotaskx.rust-extension-pack"
] ]
} }

417
Cargo.lock generated
View File

@ -14,19 +14,13 @@ dependencies = [
[[package]] [[package]]
name = "addr2line" name = "addr2line"
version = "0.21.0" version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
dependencies = [ dependencies = [
"gimli", "gimli",
] ]
[[package]]
name = "adler"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]] [[package]]
name = "adler2" name = "adler2"
version = "2.0.0" version = "2.0.0"
@ -201,6 +195,33 @@ dependencies = [
"const_panic", "const_panic",
] ]
[[package]]
name = "async-backtrace"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dcb391558246d27a13f195c1e3a53eda422270fdd452bd57a5aa9c1da1bb198"
dependencies = [
"async-backtrace-attributes",
"dashmap 5.5.3",
"futures",
"loom 0.5.6",
"once_cell",
"pin-project-lite",
"rustc-hash 1.1.0",
"static_assertions",
]
[[package]]
name = "async-backtrace-attributes"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "affbba0d438add06462a0371997575927bc05052f7ec486e7a4ca405c956c3d7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.100",
]
[[package]] [[package]]
name = "async-compression" name = "async-compression"
version = "0.4.22" version = "0.4.22"
@ -238,7 +259,7 @@ dependencies = [
"futures-util", "futures-util",
"handlebars", "handlebars",
"http", "http",
"indexmap 2.8.0", "indexmap 2.9.0",
"lru", "lru",
"mime", "mime",
"multer", "multer",
@ -307,7 +328,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0cde74de18e3a00c5dd5cfa002ab6f532e1a06c2a79ee6671e2fc353b400b92" checksum = "d0cde74de18e3a00c5dd5cfa002ab6f532e1a06c2a79ee6671e2fc353b400b92"
dependencies = [ dependencies = [
"bytes", "bytes",
"indexmap 2.8.0", "indexmap 2.9.0",
"serde", "serde",
"serde_json", "serde_json",
] ]
@ -375,7 +396,7 @@ dependencies = [
"derive_builder", "derive_builder",
"diligent-date-parser", "diligent-date-parser",
"never", "never",
"quick-xml 0.37.3", "quick-xml 0.37.4",
] ]
[[package]] [[package]]
@ -406,6 +427,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de45108900e1f9b9242f7f2e254aa3e2c029c921c258fe9e6b4217eeebd54288" checksum = "de45108900e1f9b9242f7f2e254aa3e2c029c921c258fe9e6b4217eeebd54288"
dependencies = [ dependencies = [
"axum-core", "axum-core",
"axum-macros",
"base64 0.22.1", "base64 0.22.1",
"bytes", "bytes",
"form_urlencoded", "form_urlencoded",
@ -478,6 +500,17 @@ dependencies = [
"tower-service", "tower-service",
] ]
[[package]]
name = "axum-macros"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.100",
]
[[package]] [[package]]
name = "backoff" name = "backoff"
version = "0.4.0" version = "0.4.0"
@ -502,17 +535,17 @@ dependencies = [
[[package]] [[package]]
name = "backtrace" name = "backtrace"
version = "0.3.71" version = "0.3.74"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a"
dependencies = [ dependencies = [
"addr2line", "addr2line",
"cc",
"cfg-if", "cfg-if",
"libc", "libc",
"miniz_oxide 0.7.4", "miniz_oxide",
"object", "object",
"rustc-demangle", "rustc-demangle",
"windows-targets 0.52.6",
] ]
[[package]] [[package]]
@ -550,9 +583,9 @@ checksum = "89e25b6adfb930f02d1981565a6e5d9c547ac15a96606256d3b59040e5cd4ca3"
[[package]] [[package]]
name = "bigdecimal" name = "bigdecimal"
version = "0.4.7" version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f31f3af01c5c65a07985c804d3366560e6fa7883d640a122819b14ec327482c" checksum = "1a22f228ab7a1b23027ccc6c350b72868017af7ea8356fbdf19f8d991c690013"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"libm", "libm",
@ -805,9 +838,9 @@ dependencies = [
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.2.17" version = "1.2.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fcb57c740ae1daf453ae85f16e37396f672b039e00d9d866e07ddb24e328e3a" checksum = "525046617d8376e3db1deffb079e91cef90a89fc3ca5c185bbf8c9ecdd15cd5c"
dependencies = [ dependencies = [
"jobserver", "jobserver",
"libc", "libc",
@ -875,9 +908,9 @@ dependencies = [
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.34" version = "4.5.35"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e958897981290da2a852763fe9cdb89cd36977a5d729023127095fa94d95e2ff" checksum = "d8aa86934b44c19c50f87cc2790e19f54f7a67aedb64101c2e1a2e5ecfb73944"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive", "clap_derive",
@ -885,9 +918,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.5.34" version = "4.5.35"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83b0f35019843db2160b5bb19ae09b4e6411ac33fc6a712003c33e03090e2489" checksum = "2414dbb2dd0695280da6ea9261e327479e9d37b0630f6b53ba2a11c60c679fd9"
dependencies = [ dependencies = [
"anstream", "anstream",
"anstyle", "anstyle",
@ -1352,9 +1385,9 @@ dependencies = [
[[package]] [[package]]
name = "deranged" name = "deranged"
version = "0.4.1" version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28cfac68e08048ae1883171632c2aef3ebc555621ae56fbccce1cbf22dd7f058" checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
dependencies = [ dependencies = [
"powerfmt", "powerfmt",
"serde", "serde",
@ -1493,6 +1526,38 @@ version = "0.15.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
[[package]]
name = "downloader"
version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
"bytes",
"chrono",
"fetch",
"futures",
"itertools 0.14.0",
"librqbit",
"librqbit-core",
"merge-struct",
"qbit-rs",
"quirks_path",
"reqwest",
"serde",
"serde-value",
"serde_json",
"snafu",
"testcontainers",
"testcontainers-ext",
"testcontainers-modules",
"testing-torrents",
"tokio",
"tracing",
"tracing-subscriber",
"url",
"util",
]
[[package]] [[package]]
name = "dtoa" name = "dtoa"
version = "1.0.10" version = "1.0.10"
@ -1626,9 +1691,9 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]] [[package]]
name = "errno" name = "errno"
version = "0.3.10" version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e"
dependencies = [ dependencies = [
"libc", "libc",
"windows-sys 0.59.0", "windows-sys 0.59.0",
@ -1692,6 +1757,33 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "fetch"
version = "0.1.0"
dependencies = [
"async-trait",
"axum",
"axum-extra",
"bytes",
"cookie",
"fastrand",
"http-cache",
"http-cache-reqwest",
"http-cache-semantics",
"lazy_static",
"leaky-bucket",
"moka",
"reqwest",
"reqwest-middleware",
"reqwest-retry",
"reqwest-tracing",
"serde",
"serde_json",
"serde_with",
"snafu",
"url",
]
[[package]] [[package]]
name = "ff" name = "ff"
version = "0.13.1" version = "0.13.1"
@ -1738,12 +1830,12 @@ dependencies = [
[[package]] [[package]]
name = "flate2" name = "flate2"
version = "1.1.0" version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece"
dependencies = [ dependencies = [
"crc32fast", "crc32fast",
"miniz_oxide 0.8.5", "miniz_oxide",
] ]
[[package]] [[package]]
@ -1793,6 +1885,15 @@ dependencies = [
"percent-encoding", "percent-encoding",
] ]
[[package]]
name = "fsevent-sys"
version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "funty" name = "funty"
version = "2.0.0" version = "2.0.0"
@ -1924,6 +2025,19 @@ dependencies = [
"byteorder", "byteorder",
] ]
[[package]]
name = "generator"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cc16584ff22b460a382b7feec54b23d2908d858152e5739a120b949293bd74e"
dependencies = [
"cc",
"libc",
"log",
"rustversion",
"windows 0.48.0",
]
[[package]] [[package]]
name = "generator" name = "generator"
version = "0.8.4" version = "0.8.4"
@ -1995,9 +2109,9 @@ dependencies = [
[[package]] [[package]]
name = "gimli" name = "gimli"
version = "0.28.1" version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
[[package]] [[package]]
name = "glob" name = "glob"
@ -2087,7 +2201,7 @@ dependencies = [
"futures-core", "futures-core",
"futures-sink", "futures-sink",
"http", "http",
"indexmap 2.8.0", "indexmap 2.9.0",
"slab", "slab",
"tokio", "tokio",
"tokio-util", "tokio-util",
@ -2428,9 +2542,9 @@ dependencies = [
[[package]] [[package]]
name = "hyper-util" name = "hyper-util"
version = "0.1.10" version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2"
dependencies = [ dependencies = [
"bytes", "bytes",
"futures-channel", "futures-channel",
@ -2438,6 +2552,7 @@ dependencies = [
"http", "http",
"http-body", "http-body",
"hyper", "hyper",
"libc",
"pin-project-lite", "pin-project-lite",
"socket2", "socket2",
"tokio", "tokio",
@ -2462,9 +2577,9 @@ dependencies = [
[[package]] [[package]]
name = "iana-time-zone" name = "iana-time-zone"
version = "0.1.62" version = "0.1.63"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2fd658b06e56721792c5df4475705b6cda790e9298d19d2f8af083457bcd127" checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8"
dependencies = [ dependencies = [
"android_system_properties", "android_system_properties",
"core-foundation-sys", "core-foundation-sys",
@ -2472,7 +2587,7 @@ dependencies = [
"js-sys", "js-sys",
"log", "log",
"wasm-bindgen", "wasm-bindgen",
"windows-core 0.52.0", "windows-core 0.61.0",
] ]
[[package]] [[package]]
@ -2668,9 +2783,9 @@ dependencies = [
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "2.8.0" version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058" checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
dependencies = [ dependencies = [
"equivalent", "equivalent",
"hashbrown 0.15.2", "hashbrown 0.15.2",
@ -2694,6 +2809,26 @@ version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb"
[[package]]
name = "inotify"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdd168d97690d0b8c412d6b6c10360277f4d7ee495c5d0d5d5fe0854923255cc"
dependencies = [
"bitflags 1.3.2",
"inotify-sys",
"libc",
]
[[package]]
name = "inotify-sys"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "insta" name = "insta"
version = "1.42.2" version = "1.42.2"
@ -2776,10 +2911,11 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]] [[package]]
name = "jobserver" name = "jobserver"
version = "0.1.32" version = "0.1.33"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a"
dependencies = [ dependencies = [
"getrandom 0.3.2",
"libc", "libc",
] ]
@ -2967,6 +3103,26 @@ dependencies = [
"mutate_once", "mutate_once",
] ]
[[package]]
name = "kqueue"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7447f1ca1b7b563588a205fe93dea8df60fd981423a768bc1c0ded35ed147d0c"
dependencies = [
"kqueue-sys",
"libc",
]
[[package]]
name = "kqueue-sys"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b"
dependencies = [
"bitflags 1.3.2",
"libc",
]
[[package]] [[package]]
name = "lazy_static" name = "lazy_static"
version = "1.5.0" version = "1.5.0"
@ -3007,7 +3163,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
dependencies = [ dependencies = [
"bitflags 2.9.0", "bitflags 2.9.0",
"libc", "libc",
"redox_syscall 0.5.10", "redox_syscall 0.5.11",
] ]
[[package]] [[package]]
@ -3018,6 +3174,7 @@ checksum = "75b2acdb059277c92c39628b9b78b7cb4fe1148877150d2d63e21c52b35b1d08"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap", "arc-swap",
"async-backtrace",
"async-stream", "async-stream",
"async-trait", "async-trait",
"backoff", "backoff",
@ -3043,6 +3200,7 @@ dependencies = [
"librqbit-upnp", "librqbit-upnp",
"memmap2 0.9.5", "memmap2 0.9.5",
"mime_guess", "mime_guess",
"notify",
"parking_lot 0.12.3", "parking_lot 0.12.3",
"rand 0.8.5", "rand 0.8.5",
"regex", "regex",
@ -3137,7 +3295,7 @@ dependencies = [
"dashmap 6.1.0", "dashmap 6.1.0",
"futures", "futures",
"hex 0.4.3", "hex 0.4.3",
"indexmap 2.8.0", "indexmap 2.9.0",
"leaky-bucket", "leaky-bucket",
"librqbit-bencode", "librqbit-bencode",
"librqbit-clone-to-owned", "librqbit-clone-to-owned",
@ -3214,7 +3372,7 @@ dependencies = [
"futures", "futures",
"httparse", "httparse",
"network-interface", "network-interface",
"quick-xml 0.37.3", "quick-xml 0.37.4",
"reqwest", "reqwest",
"serde", "serde",
"tokio", "tokio",
@ -3247,7 +3405,7 @@ dependencies = [
"dashmap 5.5.3", "dashmap 5.5.3",
"data-encoding", "data-encoding",
"getrandom 0.2.15", "getrandom 0.2.15",
"indexmap 2.8.0", "indexmap 2.9.0",
"itertools 0.10.5", "itertools 0.10.5",
"lazy_static", "lazy_static",
"lightningcss-derive", "lightningcss-derive",
@ -3312,6 +3470,19 @@ version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
[[package]]
name = "loom"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff50ecb28bb86013e935fb6683ab1f6d3a20016f123c76fd4c27470076ac30f5"
dependencies = [
"cfg-if",
"generator 0.7.5",
"scoped-tls",
"tracing",
"tracing-subscriber",
]
[[package]] [[package]]
name = "loom" name = "loom"
version = "0.7.2" version = "0.7.2"
@ -3319,7 +3490,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"generator", "generator 0.8.4",
"scoped-tls", "scoped-tls",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
@ -3477,18 +3648,9 @@ dependencies = [
[[package]] [[package]]
name = "miniz_oxide" name = "miniz_oxide"
version = "0.7.4" version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" checksum = "ff70ce3e48ae43fa075863cef62e8b43b71a4f2382229920e0df362592919430"
dependencies = [
"adler",
]
[[package]]
name = "miniz_oxide"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
dependencies = [ dependencies = [
"adler2", "adler2",
] ]
@ -3500,6 +3662,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd"
dependencies = [ dependencies = [
"libc", "libc",
"log",
"wasi 0.11.0+wasi-snapshot-preview1", "wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
@ -3546,7 +3709,7 @@ dependencies = [
"crossbeam-utils", "crossbeam-utils",
"event-listener", "event-listener",
"futures-util", "futures-util",
"loom", "loom 0.7.2",
"parking_lot 0.12.3", "parking_lot 0.12.3",
"portable-atomic", "portable-atomic",
"rustc_version", "rustc_version",
@ -3641,6 +3804,34 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21"
[[package]]
name = "notify"
version = "7.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c533b4c39709f9ba5005d8002048266593c1cfaf3c5f0739d5b8ab0c6c504009"
dependencies = [
"bitflags 2.9.0",
"filetime",
"fsevent-sys",
"inotify",
"kqueue",
"libc",
"log",
"mio",
"notify-types",
"walkdir",
"windows-sys 0.52.0",
]
[[package]]
name = "notify-types"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "585d3cb5e12e01aed9e8a1f70d5c6b5e86fe2a6e48fc8cd0b3e0b8df6f6eb174"
dependencies = [
"instant",
]
[[package]] [[package]]
name = "nu-ansi-term" name = "nu-ansi-term"
version = "0.46.0" version = "0.46.0"
@ -3770,9 +3961,9 @@ dependencies = [
[[package]] [[package]]
name = "object" name = "object"
version = "0.32.2" version = "0.36.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
dependencies = [ dependencies = [
"memchr", "memchr",
] ]
@ -3843,9 +4034,9 @@ dependencies = [
[[package]] [[package]]
name = "openssl" name = "openssl"
version = "0.10.71" version = "0.10.72"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e14130c6a98cd258fdcb0fb6d744152343ff729cbfcb28c656a9d12b999fbcd" checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da"
dependencies = [ dependencies = [
"bitflags 2.9.0", "bitflags 2.9.0",
"cfg-if", "cfg-if",
@ -3875,9 +4066,9 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
[[package]] [[package]]
name = "openssl-sys" name = "openssl-sys"
version = "0.9.106" version = "0.9.107"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8bb61ea9811cc39e3c2069f40b8b8e2e70d8569b361f879786cc7ed48b777cdd" checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -3981,7 +4172,7 @@ dependencies = [
"phf", "phf",
"phf_codegen", "phf_codegen",
"precomputed-hash", "precomputed-hash",
"rustc-hash", "rustc-hash 2.1.1",
"smallvec", "smallvec",
] ]
@ -4048,7 +4239,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
"redox_syscall 0.5.10", "redox_syscall 0.5.11",
"smallvec", "smallvec",
"windows-targets 0.52.6", "windows-targets 0.52.6",
] ]
@ -4484,9 +4675,9 @@ dependencies = [
[[package]] [[package]]
name = "quick-xml" name = "quick-xml"
version = "0.37.3" version = "0.37.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf763ab1c7a3aa408be466efc86efe35ed1bd3dd74173ed39d6b0d0a6f0ba148" checksum = "a4ce8c88de324ff838700f36fb6ab86c96df0e3c4ab6ef3a9b2044465cce1369"
dependencies = [ dependencies = [
"encoding_rs", "encoding_rs",
"memchr", "memchr",
@ -4504,7 +4695,7 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
"quinn-proto", "quinn-proto",
"quinn-udp", "quinn-udp",
"rustc-hash", "rustc-hash 2.1.1",
"rustls", "rustls",
"socket2", "socket2",
"thiserror 2.0.12", "thiserror 2.0.12",
@ -4523,7 +4714,7 @@ dependencies = [
"getrandom 0.3.2", "getrandom 0.3.2",
"rand 0.9.0", "rand 0.9.0",
"ring", "ring",
"rustc-hash", "rustc-hash 2.1.1",
"rustls", "rustls",
"rustls-pki-types", "rustls-pki-types",
"slab", "slab",
@ -4674,7 +4865,6 @@ dependencies = [
name = "recorder" name = "recorder"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"async-graphql", "async-graphql",
"async-graphql-axum", "async-graphql-axum",
"async-stream", "async-stream",
@ -4685,43 +4875,30 @@ dependencies = [
"bytes", "bytes",
"chrono", "chrono",
"clap", "clap",
"cookie",
"ctor", "ctor",
"dotenv", "dotenv",
"downloader",
"fancy-regex", "fancy-regex",
"fastrand", "fetch",
"figment", "figment",
"futures", "futures",
"futures-util",
"html-escape", "html-escape",
"http", "http",
"http-cache",
"http-cache-reqwest",
"http-cache-semantics",
"insta", "insta",
"ipnetwork", "ipnetwork",
"itertools 0.14.0", "itertools 0.14.0",
"jwt-authorizer", "jwt-authorizer",
"lazy_static", "lazy_static",
"leaky-bucket",
"librqbit",
"librqbit-core",
"lightningcss", "lightningcss",
"log", "log",
"maplit", "maplit",
"merge-struct",
"mockito", "mockito",
"moka", "moka",
"once_cell", "once_cell",
"opendal", "opendal",
"openidconnect", "openidconnect",
"qbit-rs",
"quirks_path", "quirks_path",
"regex", "regex",
"reqwest",
"reqwest-middleware",
"reqwest-retry",
"reqwest-tracing",
"rss", "rss",
"rstest", "rstest",
"scraper", "scraper",
@ -4729,7 +4906,6 @@ dependencies = [
"sea-orm-migration", "sea-orm-migration",
"seaography", "seaography",
"serde", "serde",
"serde-value",
"serde_json", "serde_json",
"serde_variant", "serde_variant",
"serde_with", "serde_with",
@ -4740,7 +4916,6 @@ dependencies = [
"testcontainers", "testcontainers",
"testcontainers-ext", "testcontainers-ext",
"testcontainers-modules", "testcontainers-modules",
"testing-torrents",
"tokio", "tokio",
"tower", "tower",
"tower-http", "tower-http",
@ -4749,6 +4924,7 @@ dependencies = [
"tracing-subscriber", "tracing-subscriber",
"typed-builder 0.21.0", "typed-builder 0.21.0",
"url", "url",
"util",
"uuid", "uuid",
"zune-image", "zune-image",
] ]
@ -4773,9 +4949,9 @@ dependencies = [
[[package]] [[package]]
name = "redox_syscall" name = "redox_syscall"
version = "0.5.10" version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1" checksum = "d2f103c6d277498fbceb16e84d317e2a400f160f46904d5f5410848c829511a3"
dependencies = [ dependencies = [
"bitflags 2.9.0", "bitflags 2.9.0",
] ]
@ -5070,7 +5246,7 @@ dependencies = [
"atom_syndication", "atom_syndication",
"derive_builder", "derive_builder",
"never", "never",
"quick-xml 0.37.3", "quick-xml 0.37.4",
] ]
[[package]] [[package]]
@ -5125,6 +5301,12 @@ version = "0.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]] [[package]]
name = "rustc-hash" name = "rustc-hash"
version = "2.1.1" version = "2.1.1"
@ -5142,9 +5324,9 @@ dependencies = [
[[package]] [[package]]
name = "rustix" name = "rustix"
version = "1.0.3" version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e56a18552996ac8d29ecc3b190b4fdbb2d91ca4ec396de7bbffaf43f3d637e96" checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf"
dependencies = [ dependencies = [
"bitflags 2.9.0", "bitflags 2.9.0",
"errno", "errno",
@ -5649,7 +5831,7 @@ dependencies = [
"chrono", "chrono",
"hex 0.4.3", "hex 0.4.3",
"indexmap 1.9.3", "indexmap 1.9.3",
"indexmap 2.8.0", "indexmap 2.9.0",
"serde", "serde",
"serde_derive", "serde_derive",
"serde_json", "serde_json",
@ -5675,7 +5857,7 @@ version = "0.9.34+deprecated"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
dependencies = [ dependencies = [
"indexmap 2.8.0", "indexmap 2.9.0",
"itoa", "itoa",
"ryu", "ryu",
"serde", "serde",
@ -5859,9 +6041,9 @@ dependencies = [
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "1.14.0" version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
dependencies = [ dependencies = [
"serde", "serde",
] ]
@ -5959,7 +6141,7 @@ dependencies = [
"futures-util", "futures-util",
"hashbrown 0.15.2", "hashbrown 0.15.2",
"hashlink", "hashlink",
"indexmap 2.8.0", "indexmap 2.9.0",
"log", "log",
"memchr", "memchr",
"once_cell", "once_cell",
@ -6555,9 +6737,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.44.1" version = "1.44.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f382da615b842244d4b8738c82ed1275e6c5dd90c459a30941cd07080b06c91a" checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48"
dependencies = [ dependencies = [
"backtrace", "backtrace",
"bytes", "bytes",
@ -6694,7 +6876,7 @@ version = "0.22.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474"
dependencies = [ dependencies = [
"indexmap 2.8.0", "indexmap 2.9.0",
"serde", "serde",
"serde_spanned", "serde_spanned",
"toml_datetime", "toml_datetime",
@ -7083,6 +7265,17 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "util"
version = "0.1.0"
dependencies = [
"anyhow",
"bytes",
"futures",
"quirks_path",
"snafu",
]
[[package]] [[package]]
name = "uuid" name = "uuid"
version = "1.16.0" version = "1.16.0"
@ -7291,7 +7484,7 @@ version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7" checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7"
dependencies = [ dependencies = [
"redox_syscall 0.5.10", "redox_syscall 0.5.11",
"wasite", "wasite",
] ]
@ -7326,6 +7519,15 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f"
dependencies = [
"windows-targets 0.48.5",
]
[[package]] [[package]]
name = "windows" name = "windows"
version = "0.58.0" version = "0.58.0"
@ -7358,15 +7560,6 @@ dependencies = [
"windows-core 0.61.0", "windows-core 0.61.0",
] ]
[[package]]
name = "windows-core"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
dependencies = [
"windows-targets 0.52.6",
]
[[package]] [[package]]
name = "windows-core" name = "windows-core"
version = "0.58.0" version = "0.58.0"

View File

@ -1,9 +1,52 @@
[workspace] [workspace]
members = ["apps/recorder", "packages/testing-torrents"] members = [
"packages/testing-torrents",
"packages/util",
"packages/fetch",
"packages/downloader",
"apps/recorder",
]
resolver = "2" resolver = "2"
[workspace.dependencies]
moka = "0.12"
futures = "0.3"
futures-util = "0.3"
quirks_path = "0.1"
snafu = { version = "0.8", features = ["futures"] }
testcontainers = { version = "0.23.3" }
testcontainers-modules = { version = "0.11.4" }
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
serde = { version = "1", features = ["derive"] }
tokio = { version = "1", features = ["macros", "fs", "rt-multi-thread"] }
serde_json = "1"
async-trait = "0.1"
tracing = "0.1"
url = "2.5"
anyhow = "1"
itertools = "0.14"
chrono = "0.4"
bytes = "1"
serde_with = "3"
regex = "1.11"
lazy_static = "1.5"
axum = { version = "0.8.3", features = ["macros"] }
reqwest = { version = "0.12", default-features = false, features = [
"charset",
"http2",
"json",
"macos-system-configuration",
"rustls-tls",
"cookies",
] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
axum-extra = "0.10"
testing-torrents = { path = "./packages/testing-torrents" }
util = { path = "./packages/util" }
fetch = { path = "./packages/fetch" }
downloader = { path = "./packages/downloader" }
recorder = { path = "./apps/recorder" }
[patch.crates-io] [patch.crates-io]
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" } jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
# [patch."https://github.com/lonelyhentxi/qbit.git"]
# qbit-rs = { path = "./patches/qbit-rs" }

View File

@ -19,17 +19,35 @@ testcontainers = [
"dep:testcontainers", "dep:testcontainers",
"dep:testcontainers-modules", "dep:testcontainers-modules",
"dep:testcontainers-ext", "dep:testcontainers-ext",
"dep:testing-torrents",
] ]
[dependencies] [dependencies]
serde = { workspace = true }
tokio = { workspace = true }
serde_json = { workspace = true }
async-trait = { workspace = true }
testcontainers = { workspace = true, optional = true }
testcontainers-modules = { workspace = true, optional = true }
testcontainers-ext = { workspace = true, optional = true, features = [
"tracing",
] }
tracing = { workspace = true }
axum = { workspace = true }
axum-extra = { workspace = true }
snafu = { workspace = true }
itertools = { workspace = true }
url = { workspace = true }
regex = { workspace = true }
lazy_static = { workspace = true }
quirks_path = { workspace = true }
futures = { workspace = true }
bytes = { workspace = true }
serde_with = { workspace = true }
moka = { workspace = true }
chrono = { workspace = true }
tracing-subscriber = { workspace = true }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
async-trait = "0.1.83"
tracing = "0.1"
chrono = "0.4"
sea-orm = { version = "1.1", features = [ sea-orm = { version = "1.1", features = [
"sqlx-sqlite", "sqlx-sqlite",
"sqlx-postgres", "sqlx-postgres",
@ -38,58 +56,25 @@ sea-orm = { version = "1.1", features = [
"debug-print", "debug-print",
] } ] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] } figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
axum = "0.8"
uuid = { version = "1.6.0", features = ["v4"] } uuid = { version = "1.6.0", features = ["v4"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio-rustls"] } sea-orm-migration = { version = "1.1", features = ["runtime-tokio-rustls"] }
reqwest = { version = "0.12", default-features = false, features = [
"charset",
"http2",
"json",
"macos-system-configuration",
"rustls-tls",
"cookies",
] }
rss = "2" rss = "2"
bytes = "1.9"
itertools = "0.14"
url = "2.5"
fancy-regex = "0.14" fancy-regex = "0.14"
regex = "1.11"
lazy_static = "1.5"
maplit = "1.0.2" maplit = "1.0.2"
lightningcss = "1.0.0-alpha.61" lightningcss = "1.0.0-alpha.61"
html-escape = "0.2.13" html-escape = "0.2.13"
opendal = { version = "0.51.0", features = ["default", "services-fs"] } opendal = { version = "0.51.0", features = ["default", "services-fs"] }
zune-image = "0.4.15" zune-image = "0.4.15"
once_cell = "1.20.2" once_cell = "1.20.2"
reqwest-middleware = "0.4.0"
reqwest-retry = "0.7.0"
reqwest-tracing = "0.5.5"
scraper = "0.23" scraper = "0.23"
leaky-bucket = "1.1.2"
serde_with = "3"
jwt-authorizer = "0.15.0" jwt-authorizer = "0.15.0"
futures = "0.3.31"
librqbit-core = "4"
qbit-rs = { git = "https://github.com/lonelyhentxi/qbit.git", rev = "72d53138ebe", features = [
"default",
"builder",
] }
testcontainers = { version = "0.23.3", optional = true }
testcontainers-modules = { version = "0.11.4", optional = true }
testcontainers-ext = { version = "0.1.0", optional = true, features = [
"tracing",
] }
log = "0.4.22" log = "0.4.22"
async-graphql = { version = "7", features = [] } async-graphql = { version = "7", features = [] }
async-graphql-axum = "7" async-graphql-axum = "7"
fastrand = "2.3.0"
seaography = { version = "1.1" } seaography = { version = "1.1" }
quirks_path = "0.1.1"
base64 = "0.22.1" base64 = "0.22.1"
tower = "0.5.2" tower = "0.5.2"
axum-extra = "0.10"
tower-http = { version = "0.6", features = [ tower-http = { version = "0.6", features = [
"trace", "trace",
"catch-panic", "catch-panic",
@ -102,34 +87,19 @@ tower-http = { version = "0.6", features = [
] } ] }
tera = "1.20.0" tera = "1.20.0"
openidconnect = { version = "4", features = ["rustls-tls"] } openidconnect = { version = "4", features = ["rustls-tls"] }
http-cache-reqwest = { version = "0.15", features = [
"manager-cacache",
"manager-moka",
] }
moka = "0.12.10"
http-cache = { version = "0.20.0", features = [
"cacache-tokio",
"manager-cacache",
"manager-moka",
], default-features = false }
http-cache-semantics = "2.1.0"
dotenv = "0.15.0" dotenv = "0.15.0"
http = "1.2.0" http = "1.2.0"
cookie = "0.18.1"
async-stream = "0.3.6" async-stream = "0.3.6"
serde_variant = "0.1.3" serde_variant = "0.1.3"
tracing-appender = "0.2.3" tracing-appender = "0.2.3"
clap = "4.5.31" clap = "4.5.31"
futures-util = "0.3.31"
ipnetwork = "0.21.1" ipnetwork = "0.21.1"
librqbit = "8.0.0"
typed-builder = "0.21.0" typed-builder = "0.21.0"
snafu = { version = "0.8.5", features = ["futures"] }
anyhow = "1.0.97"
serde_yaml = "0.9.34" serde_yaml = "0.9.34"
merge-struct = "0.1.0"
serde-value = "0.7.0" downloader = { workspace = true }
testing-torrents = { path = "../../packages/testing-torrents", optional = true } util = { workspace = true }
fetch = { workspace = true }
[dev-dependencies] [dev-dependencies]
serial_test = "3" serial_test = "3"

View File

@ -1,8 +1,8 @@
use recorder::errors::app_error::RResult; use recorder::errors::RecorderResult;
// #![allow(unused_imports)] // #![allow(unused_imports)]
// use recorder::{ // use recorder::{
// app::{AppContext, AppContextTrait}, // app::{AppContext, AppContextTrait},
// errors::RResult, // errors::RecorderResult,
// migrations::Migrator, // migrations::Migrator,
// models::{ // models::{
// subscribers::SEED_SUBSCRIBER, // subscribers::SEED_SUBSCRIBER,
@ -12,7 +12,7 @@ use recorder::errors::app_error::RResult;
// use sea_orm::{ColumnTrait, EntityTrait, QueryFilter}; // use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
// use sea_orm_migration::MigratorTrait; // use sea_orm_migration::MigratorTrait;
// async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RResult<()> { // async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RecorderResult<()> {
// let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370"; // let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
// // let rss_link = // // let rss_link =
@ -44,13 +44,13 @@ use recorder::errors::app_error::RResult;
// } // }
// #[tokio::main] // #[tokio::main]
// async fn main() -> RResult<()> { // async fn main() -> RecorderResult<()> {
// pull_mikan_bangumi_rss(&ctx).await?; // pull_mikan_bangumi_rss(&ctx).await?;
// Ok(()) // Ok(())
// } // }
#[tokio::main] #[tokio::main]
async fn main() -> RResult<()> { async fn main() -> RecorderResult<()> {
Ok(()) Ok(())
} }

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use clap::{Parser, command}; use clap::{Parser, command};
use super::{AppContext, core::App, env::Environment}; use super::{AppContext, core::App, env::Environment};
use crate::{app::config::AppConfig, errors::app_error::RResult}; use crate::{app::config::AppConfig, errors::RecorderResult};
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
#[command(version, about, long_about = None)] #[command(version, about, long_about = None)]
@ -33,7 +33,7 @@ pub struct AppBuilder {
} }
impl AppBuilder { impl AppBuilder {
pub async fn from_main_cli(environment: Option<Environment>) -> RResult<Self> { pub async fn from_main_cli(environment: Option<Environment>) -> RecorderResult<Self> {
let args = MainCliArgs::parse(); let args = MainCliArgs::parse();
let environment = environment.unwrap_or_else(|| { let environment = environment.unwrap_or_else(|| {
@ -68,7 +68,7 @@ impl AppBuilder {
Ok(builder) Ok(builder)
} }
pub async fn build(self) -> RResult<App> { pub async fn build(self) -> RecorderResult<App> {
AppConfig::load_dotenv( AppConfig::load_dotenv(
&self.environment, &self.environment,
&self.working_dir, &self.working_dir,

View File

@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
use super::env::Environment; use super::env::Environment;
use crate::{ use crate::{
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::app_error::RResult, auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::RecorderResult,
extract::mikan::MikanConfig, graphql::GraphQLConfig, logger::LoggerConfig, extract::mikan::MikanConfig, graphql::GraphQLConfig, logger::LoggerConfig,
storage::StorageConfig, web::WebServerConfig, storage::StorageConfig, web::WebServerConfig,
}; };
@ -64,7 +64,7 @@ impl AppConfig {
fig: Figment, fig: Figment,
filepath: impl AsRef<Path>, filepath: impl AsRef<Path>,
ext: &str, ext: &str,
) -> RResult<Figment> { ) -> RecorderResult<Figment> {
let content = fs::read_to_string(filepath)?; let content = fs::read_to_string(filepath)?;
let rendered = tera::Tera::one_off( let rendered = tera::Tera::one_off(
@ -85,7 +85,7 @@ impl AppConfig {
environment: &Environment, environment: &Environment,
working_dir: &str, working_dir: &str,
dotenv_file: Option<&str>, dotenv_file: Option<&str>,
) -> RResult<()> { ) -> RecorderResult<()> {
let try_dotenv_file_or_dirs = if dotenv_file.is_some() { let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
vec![dotenv_file] vec![dotenv_file]
} else { } else {
@ -124,7 +124,7 @@ impl AppConfig {
environment: &Environment, environment: &Environment,
working_dir: &str, working_dir: &str,
config_file: Option<&str>, config_file: Option<&str>,
) -> RResult<AppConfig> { ) -> RecorderResult<AppConfig> {
let try_config_file_or_dirs = if config_file.is_some() { let try_config_file_or_dirs = if config_file.is_some() {
vec![config_file] vec![config_file]
} else { } else {

View File

@ -1,6 +1,6 @@
use super::{Environment, config::AppConfig}; use super::{Environment, config::AppConfig};
use crate::{ use crate::{
auth::AuthService, cache::CacheService, database::DatabaseService, errors::app_error::RResult, auth::AuthService, cache::CacheService, database::DatabaseService, errors::RecorderResult,
extract::mikan::MikanClient, graphql::GraphQLService, logger::LoggerService, extract::mikan::MikanClient, graphql::GraphQLService, logger::LoggerService,
storage::StorageService, storage::StorageService,
}; };
@ -36,7 +36,7 @@ impl AppContext {
environment: Environment, environment: Environment,
config: AppConfig, config: AppConfig,
working_dir: impl ToString, working_dir: impl ToString,
) -> RResult<Self> { ) -> RecorderResult<Self> {
let config_cloned = config.clone(); let config_cloned = config.clone();
let logger = LoggerService::from_config(config.logger).await?; let logger = LoggerService::from_config(config.logger).await?;

View File

@ -1,12 +1,11 @@
use std::{net::SocketAddr, sync::Arc}; use std::{net::SocketAddr, sync::Arc};
use axum::Router; use axum::Router;
use futures::try_join;
use tokio::signal; use tokio::signal;
use super::{builder::AppBuilder, context::AppContextTrait}; use super::{builder::AppBuilder, context::AppContextTrait};
use crate::{ use crate::{
errors::app_error::RResult, errors::RecorderResult,
web::{ web::{
controller::{self, core::ControllerTrait}, controller::{self, core::ControllerTrait},
middleware::default_middleware_stack, middleware::default_middleware_stack,
@ -23,7 +22,7 @@ impl App {
AppBuilder::default() AppBuilder::default()
} }
pub async fn serve(&self) -> RResult<()> { pub async fn serve(&self) -> RecorderResult<()> {
let context = &self.context; let context = &self.context;
let config = context.config(); let config = context.config();
let listener = tokio::net::TcpListener::bind(&format!( let listener = tokio::net::TcpListener::bind(&format!(
@ -34,7 +33,7 @@ impl App {
let mut router = Router::<Arc<dyn AppContextTrait>>::new(); let mut router = Router::<Arc<dyn AppContextTrait>>::new();
let (graphql_c, oidc_c, metadata_c) = try_join!( let (graphql_c, oidc_c, metadata_c) = futures::try_join!(
controller::graphql::create(context.clone()), controller::graphql::create(context.clone()),
controller::oidc::create(context.clone()), controller::oidc::create(context.clone()),
controller::metadata::create(context.clone()) controller::metadata::create(context.clone())

View File

@ -1,7 +1,7 @@
use async_trait::async_trait; use async_trait::async_trait;
use axum::http::{HeaderValue, request::Parts}; use axum::http::{HeaderValue, request::Parts};
use base64::{self, Engine}; use base64::{self, Engine};
use reqwest::header::AUTHORIZATION; use http::header::AUTHORIZATION;
use super::{ use super::{
config::BasicAuthConfig, config::BasicAuthConfig,

View File

@ -4,6 +4,7 @@ use axum::{
http::StatusCode, http::StatusCode,
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use fetch::HttpClientError;
use openidconnect::{ use openidconnect::{
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError, ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
StandardErrorResponse, core::CoreErrorResponseType, StandardErrorResponse, core::CoreErrorResponseType,
@ -11,7 +12,7 @@ use openidconnect::{
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use snafu::prelude::*; use snafu::prelude::*;
use crate::{fetch::HttpClientError, models::auth::AuthType}; use crate::models::auth::AuthType;
#[derive(Debug, Snafu)] #[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))] #[snafu(visibility(pub(crate)))]

View File

@ -1,10 +1,17 @@
use std::{ use std::{
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
future::Future,
ops::Deref,
pin::Pin,
sync::Arc, sync::Arc,
}; };
use async_trait::async_trait; use async_trait::async_trait;
use axum::http::{HeaderValue, request::Parts}; use axum::{
http,
http::{HeaderValue, request::Parts},
};
use fetch::{HttpClient, client::HttpClientError};
use itertools::Itertools; use itertools::Itertools;
use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer}; use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer};
use moka::future::Cache; use moka::future::Cache;
@ -24,9 +31,49 @@ use super::{
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu}, errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
service::{AuthServiceTrait, AuthUserInfo}, service::{AuthServiceTrait, AuthUserInfo},
}; };
use crate::{ use crate::{app::AppContextTrait, errors::RecorderError, models::auth::AuthType};
app::AppContextTrait, errors::app_error::RError, fetch::HttpClient, models::auth::AuthType,
}; pub struct OidcHttpClient(pub Arc<HttpClient>);
impl<'a> Deref for OidcHttpClient {
type Target = HttpClient;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'c> openidconnect::AsyncHttpClient<'c> for OidcHttpClient {
type Error = HttpClientError;
#[cfg(target_arch = "wasm32")]
type Future =
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + 'c>>;
#[cfg(not(target_arch = "wasm32"))]
type Future =
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + Send + 'c>>;
fn call(&'c self, request: openidconnect::HttpRequest) -> Self::Future {
Box::pin(async move {
let response = self.execute(request.try_into()?).await?;
let mut builder = http::Response::builder().status(response.status());
#[cfg(not(target_arch = "wasm32"))]
{
builder = builder.version(response.version());
}
for (name, value) in response.headers().iter() {
builder = builder.header(name, value);
}
builder
.body(response.bytes().await?.to_vec())
.map_err(HttpClientError::from)
})
}
}
#[derive(Deserialize, Serialize, Clone, Debug)] #[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OidcAuthClaims { pub struct OidcAuthClaims {
@ -118,18 +165,19 @@ pub struct OidcAuthCallbackPayload {
pub struct OidcAuthService { pub struct OidcAuthService {
pub config: OidcAuthConfig, pub config: OidcAuthConfig,
pub api_authorizer: Authorizer<OidcAuthClaims>, pub api_authorizer: Authorizer<OidcAuthClaims>,
pub oidc_provider_client: HttpClient, pub oidc_provider_client: Arc<HttpClient>,
pub oidc_request_cache: Cache<String, OidcAuthRequest>, pub oidc_request_cache: Cache<String, OidcAuthRequest>,
} }
impl OidcAuthService { impl OidcAuthService {
pub async fn build_authorization_request( pub async fn build_authorization_request<'a>(
&self, &'a self,
redirect_uri: &str, redirect_uri: &str,
) -> Result<OidcAuthRequest, AuthError> { ) -> Result<OidcAuthRequest, AuthError> {
let oidc_provider_client = OidcHttpClient(self.oidc_provider_client.clone());
let provider_metadata = CoreProviderMetadata::discover_async( let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?, IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&self.oidc_provider_client, &oidc_provider_client,
) )
.await?; .await?;
@ -199,10 +247,11 @@ impl OidcAuthService {
Ok(result) Ok(result)
} }
pub async fn extract_authorization_request_callback( pub async fn extract_authorization_request_callback<'a>(
&self, &'a self,
query: OidcAuthCallbackQuery, query: OidcAuthCallbackQuery,
) -> Result<OidcAuthCallbackPayload, AuthError> { ) -> Result<OidcAuthCallbackPayload, AuthError> {
let oidc_http_client = OidcHttpClient(self.oidc_provider_client.clone());
let csrf_token = query.state.ok_or(AuthError::OidcInvalidStateError)?; let csrf_token = query.state.ok_or(AuthError::OidcInvalidStateError)?;
let code = query.code.ok_or(AuthError::OidcInvalidCodeError)?; let code = query.code.ok_or(AuthError::OidcInvalidCodeError)?;
@ -211,7 +260,7 @@ impl OidcAuthService {
let provider_metadata = CoreProviderMetadata::discover_async( let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?, IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&self.oidc_provider_client, &oidc_http_client,
) )
.await?; .await?;
@ -227,7 +276,7 @@ impl OidcAuthService {
let token_response = oidc_client let token_response = oidc_client
.exchange_code(AuthorizationCode::new(code))? .exchange_code(AuthorizationCode::new(code))?
.set_pkce_verifier(pkce_verifier) .set_pkce_verifier(pkce_verifier)
.request_async(&HttpClient::default()) .request_async(&oidc_http_client)
.await?; .await?;
let id_token = token_response let id_token = token_response
@ -312,7 +361,7 @@ impl AuthServiceTrait for OidcAuthService {
} }
} }
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await { let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(RError::DbError { Err(RecorderError::DbError {
source: DbErr::RecordNotFound(..), source: DbErr::RecordNotFound(..),
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await, }) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await,
r => r, r => r,

View File

@ -6,9 +6,13 @@ use axum::{
http::request::Parts, http::request::Parts,
response::{IntoResponse as _, Response}, response::{IntoResponse as _, Response},
}; };
use fetch::{
HttpClient, HttpClientConfig,
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
};
use http::header::HeaderValue;
use jwt_authorizer::{JwtAuthorizer, Validation}; use jwt_authorizer::{JwtAuthorizer, Validation};
use moka::future::Cache; use moka::future::Cache;
use reqwest::header::HeaderValue;
use snafu::prelude::*; use snafu::prelude::*;
use super::{ use super::{
@ -17,14 +21,7 @@ use super::{
errors::{AuthError, OidcProviderHttpClientSnafu}, errors::{AuthError, OidcProviderHttpClientSnafu},
oidc::{OidcAuthClaims, OidcAuthService}, oidc::{OidcAuthClaims, OidcAuthService},
}; };
use crate::{ use crate::{app::AppContextTrait, models::auth::AuthType};
app::AppContextTrait,
fetch::{
HttpClient, HttpClientConfig,
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
},
models::auth::AuthType,
};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct AuthUserInfo { pub struct AuthUserInfo {
@ -89,7 +86,7 @@ impl AuthService {
AuthService::Oidc(Box::new(OidcAuthService { AuthService::Oidc(Box::new(OidcAuthService {
config, config,
api_authorizer, api_authorizer,
oidc_provider_client, oidc_provider_client: Arc::new(oidc_provider_client),
oidc_request_cache: Cache::builder() oidc_request_cache: Cache::builder()
.time_to_live(Duration::from_mins(5)) .time_to_live(Duration::from_mins(5))
.name("oidc_request_cache") .name("oidc_request_cache")

View File

@ -1,7 +1,7 @@
use recorder::{app::AppBuilder, errors::app_error::RResult}; use recorder::{app::AppBuilder, errors::RecorderResult};
#[tokio::main] #[tokio::main]
async fn main() -> RResult<()> { async fn main() -> RecorderResult<()> {
let builder = AppBuilder::from_main_cli(None).await?; let builder = AppBuilder::from_main_cli(None).await?;
let app = builder.build().await?; let app = builder.build().await?;

View File

@ -1,10 +1,10 @@
use super::CacheConfig; use super::CacheConfig;
use crate::errors::app_error::RResult; use crate::errors::RecorderResult;
pub struct CacheService {} pub struct CacheService {}
impl CacheService { impl CacheService {
pub async fn from_config(_config: CacheConfig) -> RResult<Self> { pub async fn from_config(_config: CacheConfig) -> RecorderResult<Self> {
Ok(Self {}) Ok(Self {})
} }
} }

View File

@ -7,14 +7,14 @@ use sea_orm::{
use sea_orm_migration::MigratorTrait; use sea_orm_migration::MigratorTrait;
use super::DatabaseConfig; use super::DatabaseConfig;
use crate::{errors::app_error::RResult, migrations::Migrator}; use crate::{errors::RecorderResult, migrations::Migrator};
pub struct DatabaseService { pub struct DatabaseService {
connection: DatabaseConnection, connection: DatabaseConnection,
} }
impl DatabaseService { impl DatabaseService {
pub async fn from_config(config: DatabaseConfig) -> RResult<Self> { pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
let mut opt = ConnectOptions::new(&config.uri); let mut opt = ConnectOptions::new(&config.uri);
opt.max_connections(config.max_connections) opt.max_connections(config.max_connections)
.min_connections(config.min_connections) .min_connections(config.min_connections)

View File

@ -1 +0,0 @@

View File

@ -4,6 +4,7 @@ use axum::{
Json, Json,
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use fetch::{FetchError, HttpClientError};
use http::StatusCode; use http::StatusCode;
use serde::{Deserialize, Deserializer, Serialize}; use serde::{Deserialize, Deserializer, Serialize};
use snafu::Snafu; use snafu::Snafu;
@ -12,12 +13,11 @@ use crate::{
auth::AuthError, auth::AuthError,
downloader::DownloaderError, downloader::DownloaderError,
errors::{OptDynErr, response::StandardErrorResponse}, errors::{OptDynErr, response::StandardErrorResponse},
fetch::HttpClientError,
}; };
#[derive(Snafu, Debug)] #[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))] #[snafu(visibility(pub(crate)))]
pub enum RError { pub enum RecorderError {
#[snafu(transparent, context(false))] #[snafu(transparent, context(false))]
FancyRegexError { FancyRegexError {
#[snafu(source(from(fancy_regex::Error, Box::new)))] #[snafu(source(from(fancy_regex::Error, Box::new)))]
@ -53,8 +53,6 @@ pub enum RError {
IOError { source: std::io::Error }, IOError { source: std::io::Error },
#[snafu(transparent)] #[snafu(transparent)]
DbError { source: sea_orm::DbErr }, DbError { source: sea_orm::DbErr },
#[snafu(transparent)]
CookieParseError { source: cookie::ParseError },
#[snafu(transparent, context(false))] #[snafu(transparent, context(false))]
FigmentError { FigmentError {
#[snafu(source(from(figment::Error, Box::new)))] #[snafu(source(from(figment::Error, Box::new)))]
@ -63,10 +61,6 @@ pub enum RError {
#[snafu(transparent)] #[snafu(transparent)]
SerdeJsonError { source: serde_json::Error }, SerdeJsonError { source: serde_json::Error },
#[snafu(transparent)] #[snafu(transparent)]
ReqwestMiddlewareError { source: reqwest_middleware::Error },
#[snafu(transparent)]
ReqwestError { source: reqwest::Error },
#[snafu(transparent)]
ParseUrlError { source: url::ParseError }, ParseUrlError { source: url::ParseError },
#[snafu(display("{source}"), context(false))] #[snafu(display("{source}"), context(false))]
OpenDALError { OpenDALError {
@ -106,6 +100,8 @@ pub enum RError {
}, },
#[snafu(display("Model Entity {entity} not found"))] #[snafu(display("Model Entity {entity} not found"))]
ModelEntityNotFound { entity: Cow<'static, str> }, ModelEntityNotFound { entity: Cow<'static, str> },
#[snafu(transparent)]
FetchError { source: FetchError },
#[snafu(display("{message}"))] #[snafu(display("{message}"))]
Whatever { Whatever {
message: String, message: String,
@ -114,7 +110,7 @@ pub enum RError {
}, },
} }
impl RError { impl RecorderError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self { pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError { Self::MikanMetaMissingFieldError {
field, field,
@ -146,7 +142,7 @@ impl RError {
} }
} }
impl snafu::FromString for RError { impl snafu::FromString for RecorderError {
type Source = Box<dyn std::error::Error + Send + Sync>; type Source = Box<dyn std::error::Error + Send + Sync>;
fn without_source(message: String) -> Self { fn without_source(message: String) -> Self {
@ -164,7 +160,7 @@ impl snafu::FromString for RError {
} }
} }
impl IntoResponse for RError { impl IntoResponse for RecorderError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
match self { match self {
Self::AuthError { source: auth_error } => auth_error.into_response(), Self::AuthError { source: auth_error } => auth_error.into_response(),
@ -177,7 +173,7 @@ impl IntoResponse for RError {
} }
} }
impl Serialize for RError { impl Serialize for RecorderError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: serde::Serializer, S: serde::Serializer,
@ -186,7 +182,7 @@ impl Serialize for RError {
} }
} }
impl<'de> Deserialize<'de> for RError { impl<'de> Deserialize<'de> for RecorderError {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where where
D: Deserializer<'de>, D: Deserializer<'de>,
@ -199,4 +195,4 @@ impl<'de> Deserialize<'de> for RError {
} }
} }
pub type RResult<T> = Result<T, RError>; pub type RecorderResult<T> = Result<T, RecorderError>;

View File

@ -1,9 +1 @@
pub trait RAnyhowResultExt<T>: snafu::ResultExt<T, anyhow::Error> {
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>>;
}
impl<T> RAnyhowResultExt<T> for Result<T, anyhow::Error> {
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>> {
self.map_err(|e| e.into())
}
}

View File

@ -1,9 +1,7 @@
pub mod alias;
pub mod app_error; pub mod app_error;
pub mod ext; pub mod ext;
pub mod response; pub mod response;
pub use alias::OptDynErr; pub use app_error::{RecorderError, RecorderResult};
pub use app_error::*;
pub use ext::RAnyhowResultExt;
pub use response::StandardErrorResponse; pub use response::StandardErrorResponse;
pub use util::errors::OptDynErr;

View File

@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize};
use snafu::{OptionExt, whatever}; use snafu::{OptionExt, whatever};
use crate::{ use crate::{
errors::app_error::{RError, RResult}, errors::app_error::{RecorderError, RecorderResult},
extract::defs::SUBTITLE_LANG, extract::defs::SUBTITLE_LANG,
}; };
@ -104,10 +104,10 @@ pub fn parse_episode_media_meta_from_torrent(
torrent_path: &Path, torrent_path: &Path,
torrent_name: Option<&str>, torrent_name: Option<&str>,
season: Option<i32>, season: Option<i32>,
) -> RResult<TorrentEpisodeMediaMeta> { ) -> RecorderResult<TorrentEpisodeMediaMeta> {
let media_name = torrent_path let media_name = torrent_path
.file_name() .file_name()
.with_whatever_context::<_, _, RError>(|| { .with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {}", torrent_path) format!("failed to get file name of {}", torrent_path)
})?; })?;
let mut match_obj = None; let mut match_obj = None;
@ -124,7 +124,7 @@ pub fn parse_episode_media_meta_from_torrent(
if let Some(match_obj) = match_obj { if let Some(match_obj) = match_obj {
let group_season_and_title = match_obj let group_season_and_title = match_obj
.get(1) .get(1)
.whatever_context::<_, RError>("should have 1 group")? .whatever_context::<_, RecorderError>("should have 1 group")?
.as_str(); .as_str();
let (fansub, season_and_title) = get_fansub(group_season_and_title); let (fansub, season_and_title) = get_fansub(group_season_and_title);
let (title, season) = if let Some(season) = season { let (title, season) = if let Some(season) = season {
@ -135,7 +135,7 @@ pub fn parse_episode_media_meta_from_torrent(
}; };
let episode_index = match_obj let episode_index = match_obj
.get(2) .get(2)
.whatever_context::<_, RError>("should have 2 group")? .whatever_context::<_, RecorderError>("should have 2 group")?
.as_str() .as_str()
.parse::<i32>() .parse::<i32>()
.unwrap_or(1); .unwrap_or(1);
@ -163,11 +163,11 @@ pub fn parse_episode_subtitle_meta_from_torrent(
torrent_path: &Path, torrent_path: &Path,
torrent_name: Option<&str>, torrent_name: Option<&str>,
season: Option<i32>, season: Option<i32>,
) -> RResult<TorrentEpisodeSubtitleMeta> { ) -> RecorderResult<TorrentEpisodeSubtitleMeta> {
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?; let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
let media_name = torrent_path let media_name = torrent_path
.file_name() .file_name()
.with_whatever_context::<_, _, RError>(|| { .with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {}", torrent_path) format!("failed to get file name of {}", torrent_path)
})?; })?;

View File

@ -1,6 +1,3 @@
pub mod core;
pub mod extract; pub mod extract;
pub use core::{BITTORRENT_MIME_TYPE, MAGNET_SCHEMA};
pub use extract::*; pub use extract::*;

View File

@ -1,15 +1,11 @@
use std::{fmt::Debug, ops::Deref}; use std::{fmt::Debug, ops::Deref};
use reqwest_middleware::ClientWithMiddleware; use fetch::{FetchError, HttpClient, HttpClientTrait, client::HttpClientCookiesAuth};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
use super::MikanConfig; use super::MikanConfig;
use crate::{ use crate::errors::RecorderError;
errors::app_error::RError,
fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth},
};
#[derive(Default, Clone, Deserialize, Serialize)] #[derive(Default, Clone, Deserialize, Serialize)]
pub struct MikanAuthSecrecy { pub struct MikanAuthSecrecy {
pub cookie: String, pub cookie: String,
@ -26,8 +22,10 @@ impl Debug for MikanAuthSecrecy {
} }
impl MikanAuthSecrecy { impl MikanAuthSecrecy {
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RError> { pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RecorderError> {
HttpClientCookiesAuth::from_cookies(&self.cookie, url, self.user_agent) HttpClientCookiesAuth::from_cookies(&self.cookie, url, self.user_agent)
.map_err(FetchError::from)
.map_err(RecorderError::from)
} }
} }
@ -38,7 +36,7 @@ pub struct MikanClient {
} }
impl MikanClient { impl MikanClient {
pub async fn from_config(config: MikanConfig) -> Result<Self, RError> { pub async fn from_config(config: MikanConfig) -> Result<Self, RecorderError> {
let http_client = HttpClient::from_config(config.http_client)?; let http_client = HttpClient::from_config(config.http_client)?;
let base_url = config.base_url; let base_url = config.base_url;
Ok(Self { Ok(Self {
@ -47,7 +45,7 @@ impl MikanClient {
}) })
} }
pub fn fork_with_auth(&self, secrecy: Option<MikanAuthSecrecy>) -> Result<Self, RError> { pub fn fork_with_auth(&self, secrecy: Option<MikanAuthSecrecy>) -> Result<Self, RecorderError> {
let mut fork = self.http_client.fork(); let mut fork = self.http_client.fork();
if let Some(secrecy) = secrecy { if let Some(secrecy) = secrecy {
@ -71,10 +69,10 @@ impl MikanClient {
} }
impl Deref for MikanClient { impl Deref for MikanClient {
type Target = ClientWithMiddleware; type Target = fetch::reqwest_middleware::ClientWithMiddleware;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
self.http_client.deref() &self.http_client
} }
} }

View File

@ -1,8 +1,7 @@
use fetch::HttpClientConfig;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
use crate::fetch::HttpClientConfig;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct MikanConfig { pub struct MikanConfig {
pub http_client: HttpClientConfig, pub http_client: HttpClientConfig,

View File

@ -1,22 +1,19 @@
use std::borrow::Cow; use std::borrow::Cow;
use chrono::DateTime; use chrono::DateTime;
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
use fetch::{FetchError, IntoUrl, bytes::fetch_bytes};
use itertools::Itertools; use itertools::Itertools;
use reqwest::IntoUrl;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::instrument; use tracing::instrument;
use url::Url; use url::Url;
use crate::{ use crate::{
errors::app_error::{RError, RResult}, errors::app_error::{RecorderError, RecorderResult},
extract::{ extract::mikan::{
bittorrent::BITTORRENT_MIME_TYPE,
mikan::{
MikanClient, MikanClient,
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage}, web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
}, },
},
fetch::bytes::fetch_bytes,
}; };
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
@ -102,28 +99,28 @@ impl MikanRssChannel {
} }
impl TryFrom<rss::Item> for MikanRssItem { impl TryFrom<rss::Item> for MikanRssItem {
type Error = RError; type Error = RecorderError;
fn try_from(item: rss::Item) -> Result<Self, Self::Error> { fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
let enclosure = item let enclosure = item.enclosure.ok_or_else(|| {
.enclosure RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure"))
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure")))?; })?;
let mime_type = enclosure.mime_type; let mime_type = enclosure.mime_type;
if mime_type != BITTORRENT_MIME_TYPE { if mime_type != BITTORRENT_MIME_TYPE {
return Err(RError::MimeError { return Err(RecorderError::MimeError {
expected: String::from(BITTORRENT_MIME_TYPE), expected: String::from(BITTORRENT_MIME_TYPE),
found: mime_type.to_string(), found: mime_type.to_string(),
desc: String::from("MikanRssItem"), desc: String::from("MikanRssItem"),
}); });
} }
let title = item let title = item.title.ok_or_else(|| {
.title RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title"))
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?; })?;
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| { let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
RError::from_mikan_rss_invalid_field_and_source( RecorderError::from_mikan_rss_invalid_field_and_source(
"enclosure_url:enclosure.link".into(), "enclosure_url:enclosure.link".into(),
err, err,
) )
@ -132,12 +129,14 @@ impl TryFrom<rss::Item> for MikanRssItem {
let homepage = item let homepage = item
.link .link
.and_then(|link| Url::parse(&link).ok()) .and_then(|link| Url::parse(&link).ok())
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link")))?; .ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link"))
})?;
let MikanEpisodeHomepage { let MikanEpisodeHomepage {
mikan_episode_id, .. mikan_episode_id, ..
} = extract_mikan_episode_id_from_homepage(&homepage).ok_or_else(|| { } = extract_mikan_episode_id_from_homepage(&homepage).ok_or_else(|| {
RError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id")) RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
})?; })?;
Ok(MikanRssItem { Ok(MikanRssItem {
@ -170,8 +169,8 @@ pub fn build_mikan_bangumi_rss_link(
mikan_base_url: impl IntoUrl, mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str, mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>, mikan_fansub_id: Option<&str>,
) -> RResult<Url> { ) -> RecorderResult<Url> {
let mut url = mikan_base_url.into_url()?; let mut url = mikan_base_url.into_url().map_err(FetchError::from)?;
url.set_path("/RSS/Bangumi"); url.set_path("/RSS/Bangumi");
url.query_pairs_mut() url.query_pairs_mut()
.append_pair("bangumiId", mikan_bangumi_id); .append_pair("bangumiId", mikan_bangumi_id);
@ -185,7 +184,7 @@ pub fn build_mikan_bangumi_rss_link(
pub fn build_mikan_subscriber_aggregation_rss_link( pub fn build_mikan_subscriber_aggregation_rss_link(
mikan_base_url: &str, mikan_base_url: &str,
mikan_aggregation_id: &str, mikan_aggregation_id: &str,
) -> RResult<Url> { ) -> RecorderResult<Url> {
let mut url = Url::parse(mikan_base_url)?; let mut url = Url::parse(mikan_base_url)?;
url.set_path("/RSS/MyBangumi"); url.set_path("/RSS/MyBangumi");
url.query_pairs_mut() url.query_pairs_mut()
@ -227,7 +226,7 @@ pub fn extract_mikan_subscriber_aggregation_id_from_rss_link(
pub async fn extract_mikan_rss_channel_from_rss_link( pub async fn extract_mikan_rss_channel_from_rss_link(
http_client: &MikanClient, http_client: &MikanClient,
channel_rss_link: impl IntoUrl, channel_rss_link: impl IntoUrl,
) -> RResult<MikanRssChannel> { ) -> RecorderResult<MikanRssChannel> {
let bytes = fetch_bytes(http_client, channel_rss_link.as_str()).await?; let bytes = fetch_bytes(http_client, channel_rss_link.as_str()).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = rss::Channel::read_from(&bytes[..])?;
@ -326,7 +325,7 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
}, },
)) ))
} else { } else {
Err(RError::MikanRssInvalidFormatError).inspect_err(|error| { Err(RecorderError::MikanRssInvalidFormatError).inspect_err(|error| {
tracing::warn!(error = %error); tracing::warn!(error = %error);
}) })
} }
@ -336,24 +335,22 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
mod tests { mod tests {
use std::assert_matches::assert_matches; use std::assert_matches::assert_matches;
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
use rstest::rstest; use rstest::rstest;
use url::Url; use url::Url;
use crate::{ use crate::{
errors::app_error::RResult, errors::RecorderResult,
extract::{ extract::mikan::{
bittorrent::BITTORRENT_MIME_TYPE,
mikan::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel, MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
extract_mikan_rss_channel_from_rss_link, extract_mikan_rss_channel_from_rss_link,
}, },
},
test_utils::mikan::build_testing_mikan_client, test_utils::mikan::build_testing_mikan_client,
}; };
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_parse_mikan_rss_channel_from_rss_link() -> RResult<()> { async fn test_parse_mikan_rss_channel_from_rss_link() -> RecorderResult<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;

View File

@ -2,6 +2,7 @@ use std::{borrow::Cow, sync::Arc};
use async_stream::try_stream; use async_stream::try_stream;
use bytes::Bytes; use bytes::Bytes;
use fetch::{html::fetch_html, image::fetch_image};
use futures::Stream; use futures::Stream;
use itertools::Itertools; use itertools::Itertools;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
@ -15,12 +16,11 @@ use super::{
}; };
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::app_error::{RError, RResult}, errors::app_error::{RecorderResult, RecorderError},
extract::{ extract::{
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref}, html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str, media::extract_image_src_from_str,
}, },
fetch::{html::fetch_html, image::fetch_image},
storage::StorageContentCategory, storage::StorageContentCategory,
}; };
@ -115,7 +115,7 @@ pub fn extract_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeH
pub async fn extract_mikan_poster_meta_from_src( pub async fn extract_mikan_poster_meta_from_src(
http_client: &MikanClient, http_client: &MikanClient,
origin_poster_src_url: Url, origin_poster_src_url: Url,
) -> Result<MikanBangumiPosterMeta, RError> { ) -> Result<MikanBangumiPosterMeta, RecorderError> {
let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?; let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?;
Ok(MikanBangumiPosterMeta { Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url, origin_poster_src: origin_poster_src_url,
@ -128,7 +128,7 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
origin_poster_src_url: Url, origin_poster_src_url: Url,
subscriber_id: i32, subscriber_id: i32,
) -> RResult<MikanBangumiPosterMeta> { ) -> RecorderResult<MikanBangumiPosterMeta> {
let dal_client = ctx.storage(); let dal_client = ctx.storage();
let mikan_client = ctx.mikan(); let mikan_client = ctx.mikan();
if let Some(poster_src) = dal_client if let Some(poster_src) = dal_client
@ -170,7 +170,7 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
pub async fn extract_mikan_episode_meta_from_episode_homepage( pub async fn extract_mikan_episode_meta_from_episode_homepage(
http_client: &MikanClient, http_client: &MikanClient,
mikan_episode_homepage_url: Url, mikan_episode_homepage_url: Url,
) -> Result<MikanEpisodeMeta, RError> { ) -> Result<MikanEpisodeMeta, RecorderError> {
let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?; let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?; let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?;
@ -186,7 +186,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
.select(bangumi_title_selector) .select(bangumi_title_selector)
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title"))) .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| { .inspect_err(|error| {
tracing::warn!(error = %error); tracing::warn!(error = %error);
})?; })?;
@ -201,18 +201,22 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
.and_then(|el| el.value().attr("href")) .and_then(|el| el.value().attr("href"))
.and_then(|s| mikan_episode_homepage_url.join(s).ok()) .and_then(|s| mikan_episode_homepage_url.join(s).ok())
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url)) .and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))) .ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?; .inspect_err(|error| tracing::error!(error = %error))?;
let mikan_fansub_id = mikan_fansub_id let mikan_fansub_id = mikan_fansub_id
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id"))) .ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?; .inspect_err(|error| tracing::error!(error = %error))?;
let episode_title = html let episode_title = html
.select(&Selector::parse("title").unwrap()) .select(&Selector::parse("title").unwrap())
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title"))) .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
.inspect_err(|error| { .inspect_err(|error| {
tracing::warn!(error = %error); tracing::warn!(error = %error);
})?; })?;
@ -220,7 +224,9 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
let MikanEpisodeHomepage { let MikanEpisodeHomepage {
mikan_episode_id, .. mikan_episode_id, ..
} = extract_mikan_episode_id_from_homepage(&mikan_episode_homepage_url) } = extract_mikan_episode_id_from_homepage(&mikan_episode_homepage_url)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))) .ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
})
.inspect_err(|error| { .inspect_err(|error| {
tracing::warn!(error = %error); tracing::warn!(error = %error);
})?; })?;
@ -232,7 +238,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
) )
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name"))) .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
.inspect_err(|error| { .inspect_err(|error| {
tracing::warn!(error = %error); tracing::warn!(error = %error);
})?; })?;
@ -275,7 +281,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage( pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
http_client: &MikanClient, http_client: &MikanClient,
mikan_bangumi_homepage_url: Url, mikan_bangumi_homepage_url: Url,
) -> Result<MikanBangumiMeta, RError> { ) -> Result<MikanBangumiMeta, RecorderError> {
let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?; let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?; let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?;
let html = Html::parse_document(&content); let html = Html::parse_document(&content);
@ -289,7 +295,7 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
.select(bangumi_title_selector) .select(bangumi_title_selector)
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title"))) .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| tracing::warn!(error = %error))?; .inspect_err(|error| tracing::warn!(error = %error))?;
let mikan_bangumi_id = html let mikan_bangumi_id = html
@ -303,7 +309,9 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
mikan_bangumi_id, .. mikan_bangumi_id, ..
}| mikan_bangumi_id, }| mikan_bangumi_id,
) )
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))) .ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?; .inspect_err(|error| tracing::error!(error = %error))?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| { let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
@ -353,8 +361,8 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
context: Arc<dyn AppContextTrait>, context: Arc<dyn AppContextTrait>,
my_bangumi_page_url: Url, my_bangumi_page_url: Url,
auth_secrecy: Option<MikanAuthSecrecy>, auth_secrecy: Option<MikanAuthSecrecy>,
history: &[Arc<RResult<MikanBangumiMeta>>], history: &[Arc<RecorderResult<MikanBangumiMeta>>],
) -> impl Stream<Item = RResult<MikanBangumiMeta>> { ) -> impl Stream<Item = RecorderResult<MikanBangumiMeta>> {
try_stream! { try_stream! {
let http_client = &context.mikan().fork_with_auth(auth_secrecy.clone())?; let http_client = &context.mikan().fork_with_auth(auth_secrecy.clone())?;
@ -511,7 +519,7 @@ mod test {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_extract_mikan_poster_from_src(before_each: ()) -> RResult<()> { async fn test_extract_mikan_poster_from_src(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -542,7 +550,7 @@ mod test {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_extract_mikan_episode(before_each: ()) -> RResult<()> { async fn test_extract_mikan_episode(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -582,7 +590,7 @@ mod test {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(before_each: ()) -> RResult<()> { async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -619,7 +627,7 @@ mod test {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(before_each: ()) -> RResult<()> { async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;

View File

@ -10,7 +10,7 @@ use serde::{Deserialize, Serialize};
use snafu::whatever; use snafu::whatever;
use crate::{ use crate::{
errors::app_error::RResult, errors::RecorderResult,
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE}, extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
}; };
@ -75,7 +75,7 @@ fn replace_ch_bracket_to_en(raw_name: &str) -> String {
raw_name.replace('【', "[").replace('】', "]") raw_name.replace('【', "[").replace('】', "]")
} }
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RResult<String> { fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RecorderResult<String> {
let raw_without_fansub = if let Some(fansub) = fansub { let raw_without_fansub = if let Some(fansub) = fansub {
let fan_sub_re = Regex::new(&format!(".{fansub}."))?; let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
fan_sub_re.replace_all(title_body, "") fan_sub_re.replace_all(title_body, "")
@ -263,7 +263,7 @@ pub fn check_is_movie(title: &str) -> bool {
MOVIE_TITLE_RE.is_match(title) MOVIE_TITLE_RE.is_match(title)
} }
pub fn parse_episode_meta_from_raw_name(s: &str) -> RResult<RawEpisodeMeta> { pub fn parse_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
let raw_title = s.trim(); let raw_title = s.trim();
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title); let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
let fansub = extract_fansub(&raw_title_without_ch_brackets); let fansub = extract_fansub(&raw_title_without_ch_brackets);

View File

@ -1,36 +0,0 @@
use std::{future::Future, pin::Pin};
use axum::http;
use super::{HttpClient, client::HttpClientError};
impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
type Error = HttpClientError;
#[cfg(target_arch = "wasm32")]
type Future = Pin<Box<dyn Future<Output = Result<HttpResponse, Self::Error>> + 'c>>;
#[cfg(not(target_arch = "wasm32"))]
type Future =
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + Send + 'c>>;
fn call(&'c self, request: openidconnect::HttpRequest) -> Self::Future {
Box::pin(async move {
let response = self.execute(request.try_into()?).await?;
let mut builder = http::Response::builder().status(response.status());
#[cfg(not(target_arch = "wasm32"))]
{
builder = builder.version(response.version());
}
for (name, value) in response.headers().iter() {
builder = builder.header(name, value);
}
builder
.body(response.bytes().await?.to_vec())
.map_err(HttpClientError::from)
})
}
}

View File

@ -2,7 +2,7 @@ use async_graphql::dynamic::Schema;
use sea_orm::DatabaseConnection; use sea_orm::DatabaseConnection;
use super::{config::GraphQLConfig, schema_root}; use super::{config::GraphQLConfig, schema_root};
use crate::errors::app_error::RResult; use crate::errors::RecorderResult;
#[derive(Debug)] #[derive(Debug)]
pub struct GraphQLService { pub struct GraphQLService {
@ -13,7 +13,7 @@ impl GraphQLService {
pub async fn from_config_and_database( pub async fn from_config_and_database(
config: GraphQLConfig, config: GraphQLConfig,
db: DatabaseConnection, db: DatabaseConnection,
) -> RResult<Self> { ) -> RecorderResult<Self> {
let schema = schema_root::schema( let schema = schema_root::schema(
db, db,
config.depth_limit.and_then(|l| l.into()), config.depth_limit.and_then(|l| l.into()),

View File

@ -5,19 +5,18 @@
impl_trait_in_bindings, impl_trait_in_bindings,
iterator_try_collect, iterator_try_collect,
async_fn_traits, async_fn_traits,
let_chains, error_generic_member_access,
error_generic_member_access associated_type_defaults,
let_chains
)] )]
#![feature(associated_type_defaults)] pub use downloader;
pub mod app; pub mod app;
pub mod auth; pub mod auth;
pub mod cache; pub mod cache;
pub mod database; pub mod database;
pub mod downloader;
pub mod errors; pub mod errors;
pub mod extract; pub mod extract;
pub mod fetch;
pub mod graphql; pub mod graphql;
pub mod logger; pub mod logger;
pub mod migrations; pub mod migrations;
@ -26,5 +25,4 @@ pub mod storage;
pub mod tasks; pub mod tasks;
#[cfg(test)] #[cfg(test)]
pub mod test_utils; pub mod test_utils;
pub mod utils;
pub mod web; pub mod web;

View File

@ -10,7 +10,7 @@ use tracing_subscriber::{
}; };
use super::{LogFormat, LogLevel, LogRotation, LoggerConfig}; use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
use crate::errors::app_error::RResult; use crate::errors::RecorderResult;
// Function to initialize the logger based on the provided configuration // Function to initialize the logger based on the provided configuration
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"]; const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];
@ -74,7 +74,7 @@ impl LoggerService {
.expect("logger initialization failed") .expect("logger initialization failed")
} }
pub async fn from_config(config: LoggerConfig) -> RResult<Self> { pub async fn from_config(config: LoggerConfig) -> RecorderResult<Self> {
let mut layers: Vec<Box<dyn Layer<Registry> + Sync + Send>> = Vec::new(); let mut layers: Vec<Box<dyn Layer<Registry> + Sync + Send>> = Vec::new();
if let Some(file_appender_config) = config.file_appender.as_ref() { if let Some(file_appender_config) = config.file_appender.as_ref() {

View File

@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
use super::subscribers::{self, SEED_SUBSCRIBER}; use super::subscribers::{self, SEED_SUBSCRIBER};
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::app_error::{RError, RResult}, errors::app_error::{RecorderError, RecorderResult},
}; };
#[derive( #[derive(
@ -57,17 +57,17 @@ impl Related<super::subscribers::Entity> for Entity {
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}
impl Model { impl Model {
pub async fn find_by_pid(ctx: &dyn AppContextTrait, pid: &str) -> RResult<Self> { pub async fn find_by_pid(ctx: &dyn AppContextTrait, pid: &str) -> RecorderResult<Self> {
let db = ctx.db(); let db = ctx.db();
let subscriber_auth = Entity::find() let subscriber_auth = Entity::find()
.filter(Column::Pid.eq(pid)) .filter(Column::Pid.eq(pid))
.one(db) .one(db)
.await? .await?
.ok_or_else(|| RError::from_db_record_not_found("auth::find_by_pid"))?; .ok_or_else(|| RecorderError::from_db_record_not_found("auth::find_by_pid"))?;
Ok(subscriber_auth) Ok(subscriber_auth)
} }
pub async fn create_from_oidc(ctx: &dyn AppContextTrait, sub: String) -> RResult<Self> { pub async fn create_from_oidc(ctx: &dyn AppContextTrait, sub: String) -> RecorderResult<Self> {
let db = ctx.db(); let db = ctx.db();
let txn = db.begin().await?; let txn = db.begin().await?;

View File

@ -4,7 +4,7 @@ use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::O
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::subscription_bangumi; use super::subscription_bangumi;
use crate::{app::AppContextTrait, errors::app_error::RResult}; use crate::{app::AppContextTrait, errors::RecorderResult};
#[derive( #[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject, Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
@ -119,9 +119,9 @@ impl Model {
mikan_bangumi_id: String, mikan_bangumi_id: String,
mikan_fansub_id: String, mikan_fansub_id: String,
f: F, f: F,
) -> RResult<Model> ) -> RecorderResult<Model>
where where
F: AsyncFnOnce(&mut ActiveModel) -> RResult<()>, F: AsyncFnOnce(&mut ActiveModel) -> RecorderResult<()>,
{ {
let db = ctx.db(); let db = ctx.db();
if let Some(existed) = Entity::find() if let Some(existed) = Entity::find()

View File

@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use super::{bangumi, query::InsertManyReturningExt, subscription_episode}; use super::{bangumi, query::InsertManyReturningExt, subscription_episode};
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::app_error::RResult, errors::RecorderResult,
extract::{ extract::{
mikan::{MikanEpisodeMeta, build_mikan_episode_homepage}, mikan::{MikanEpisodeMeta, build_mikan_episode_homepage},
rawname::parse_episode_meta_from_raw_name, rawname::parse_episode_meta_from_raw_name,
@ -140,7 +140,7 @@ impl Model {
subscriber_id: i32, subscriber_id: i32,
subscription_id: i32, subscription_id: i32,
creations: impl IntoIterator<Item = MikanEpsiodeCreation>, creations: impl IntoIterator<Item = MikanEpsiodeCreation>,
) -> RResult<()> { ) -> RecorderResult<()> {
let db = ctx.db(); let db = ctx.db();
let new_episode_active_modes = creations let new_episode_active_modes = creations
.into_iter() .into_iter()
@ -191,7 +191,7 @@ impl ActiveModel {
pub fn from_mikan_episode_meta( pub fn from_mikan_episode_meta(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
creation: MikanEpsiodeCreation, creation: MikanEpsiodeCreation,
) -> RResult<Self> { ) -> RecorderResult<Self> {
let item = creation.episode; let item = creation.episode;
let bgm = creation.bangumi; let bgm = creation.bangumi;
let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title) let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title)

View File

@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::app_error::{RError, RResult}, errors::app_error::{RecorderResult, RecorderError},
}; };
pub const SEED_SUBSCRIBER: &str = "konobangu"; pub const SEED_SUBSCRIBER: &str = "konobangu";
@ -95,22 +95,22 @@ pub struct SubscriberIdParams {
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}
impl Model { impl Model {
pub async fn find_seed_subscriber_id(ctx: &dyn AppContextTrait) -> RResult<i32> { pub async fn find_seed_subscriber_id(ctx: &dyn AppContextTrait) -> RecorderResult<i32> {
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER).await?; let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER).await?;
Ok(subscriber_auth.subscriber_id) Ok(subscriber_auth.subscriber_id)
} }
pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RResult<Self> { pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RecorderResult<Self> {
let db = ctx.db(); let db = ctx.db();
let subscriber = Entity::find_by_id(id) let subscriber = Entity::find_by_id(id)
.one(db) .one(db)
.await? .await?
.ok_or_else(|| RError::from_db_record_not_found("subscriptions::find_by_id"))?; .ok_or_else(|| RecorderError::from_db_record_not_found("subscriptions::find_by_id"))?;
Ok(subscriber) Ok(subscriber)
} }
pub async fn create_root(ctx: &dyn AppContextTrait) -> RResult<Self> { pub async fn create_root(ctx: &dyn AppContextTrait) -> RecorderResult<Self> {
let db = ctx.db(); let db = ctx.db();
let txn = db.begin().await?; let txn = db.begin().await?;

View File

@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize};
use super::{bangumi, episodes, query::filter_values_in}; use super::{bangumi, episodes, query::filter_values_in};
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::app_error::RResult, errors::RecorderResult,
extract::{ extract::{
mikan::{ mikan::{
build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link, build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link,
@ -182,7 +182,7 @@ impl Model {
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
create_dto: SubscriptionCreateDto, create_dto: SubscriptionCreateDto,
subscriber_id: i32, subscriber_id: i32,
) -> RResult<Self> { ) -> RecorderResult<Self> {
let db = ctx.db(); let db = ctx.db();
let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id); let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id);
@ -193,7 +193,7 @@ impl Model {
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>, ids: impl Iterator<Item = i32>,
enabled: bool, enabled: bool,
) -> RResult<()> { ) -> RecorderResult<()> {
let db = ctx.db(); let db = ctx.db();
Entity::update_many() Entity::update_many()
.col_expr(Column::Enabled, Expr::value(enabled)) .col_expr(Column::Enabled, Expr::value(enabled))
@ -206,7 +206,7 @@ impl Model {
pub async fn delete_with_ids( pub async fn delete_with_ids(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>, ids: impl Iterator<Item = i32>,
) -> RResult<()> { ) -> RecorderResult<()> {
let db = ctx.db(); let db = ctx.db();
Entity::delete_many() Entity::delete_many()
.filter(Column::Id.is_in(ids)) .filter(Column::Id.is_in(ids))
@ -215,7 +215,7 @@ impl Model {
Ok(()) Ok(())
} }
pub async fn pull_subscription(&self, ctx: &dyn AppContextTrait) -> RResult<()> { pub async fn pull_subscription(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
match &self.category { match &self.category {
SubscriptionCategory::Mikan => { SubscriptionCategory::Mikan => {
let mikan_client = ctx.mikan(); let mikan_client = ctx.mikan();
@ -287,7 +287,7 @@ impl Model {
self.id, self.id,
mikan_bangumi_id.to_string(), mikan_bangumi_id.to_string(),
mikan_fansub_id.to_string(), mikan_fansub_id.to_string(),
async |am| -> RResult<()> { async |am| -> RecorderResult<()> {
let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage( let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage(
mikan_client, mikan_client,
bgm_homepage.clone(), bgm_homepage.clone(),

View File

@ -2,7 +2,7 @@ use async_trait::async_trait;
use sea_orm::{QuerySelect, entity::prelude::*}; use sea_orm::{QuerySelect, entity::prelude::*};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{app::AppContextTrait, errors::app_error::RResult}; use crate::{app::AppContextTrait, errors::RecorderResult};
#[derive( #[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize, Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
@ -76,7 +76,7 @@ impl Model {
pub async fn find_stream_task_by_id( pub async fn find_stream_task_by_id(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
task_id: i32, task_id: i32,
) -> RResult<Option<(Model, Vec<super::task_stream_item::Model>)>> { ) -> RecorderResult<Option<(Model, Vec<super::task_stream_item::Model>)>> {
let db = ctx.db(); let db = ctx.db();
let res = Entity::find() let res = Entity::find()
.filter(Column::Id.eq(task_id)) .filter(Column::Id.eq(task_id))

View File

@ -8,7 +8,7 @@ use url::Url;
use uuid::Uuid; use uuid::Uuid;
use super::StorageConfig; use super::StorageConfig;
use crate::errors::app_error::{RError, RResult}; use crate::errors::app_error::{RecorderError, RecorderResult};
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
@ -50,7 +50,7 @@ pub struct StorageService {
} }
impl StorageService { impl StorageService {
pub async fn from_config(config: StorageConfig) -> RResult<Self> { pub async fn from_config(config: StorageConfig) -> RecorderResult<Self> {
Ok(Self { Ok(Self {
data_dir: config.data_dir.to_string(), data_dir: config.data_dir.to_string(),
}) })
@ -71,7 +71,7 @@ impl StorageService {
bucket: Option<&str>, bucket: Option<&str>,
filename: &str, filename: &str,
data: Bytes, data: Bytes,
) -> Result<StorageStoredUrl, RError> { ) -> Result<StorageStoredUrl, RecorderError> {
match content_category { match content_category {
StorageContentCategory::Image => { StorageContentCategory::Image => {
let fullname = [ let fullname = [
@ -108,7 +108,7 @@ impl StorageService {
subscriber_id: i32, subscriber_id: i32,
bucket: Option<&str>, bucket: Option<&str>,
filename: &str, filename: &str,
) -> Result<Option<StorageStoredUrl>, RError> { ) -> Result<Option<StorageStoredUrl>, RecorderError> {
match content_category { match content_category {
StorageContentCategory::Image => { StorageContentCategory::Image => {
let fullname = [ let fullname = [
@ -142,7 +142,7 @@ impl StorageService {
subscriber_pid: &str, subscriber_pid: &str,
bucket: Option<&str>, bucket: Option<&str>,
filename: &str, filename: &str,
) -> RResult<Buffer> { ) -> RecorderResult<Buffer> {
match content_category { match content_category {
StorageContentCategory::Image => { StorageContentCategory::Image => {
let fullname = [ let fullname = [

View File

@ -7,7 +7,7 @@ use tokio::sync::{RwLock, mpsc};
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::app_error::{RError, RResult}, errors::app_error::{RecorderError, RecorderResult},
models, models,
}; };
@ -103,41 +103,41 @@ pub trait StreamTaskCoreTrait: Sized {
} }
pub trait StreamTaskReplayLayoutTrait: StreamTaskCoreTrait { pub trait StreamTaskReplayLayoutTrait: StreamTaskCoreTrait {
fn history(&self) -> &[Arc<RResult<Self::Item>>]; fn history(&self) -> &[Arc<RecorderResult<Self::Item>>];
fn resume_from_model( fn resume_from_model(
task: models::tasks::Model, task: models::tasks::Model,
stream_items: Vec<models::task_stream_item::Model>, stream_items: Vec<models::task_stream_item::Model>,
) -> RResult<Self>; ) -> RecorderResult<Self>;
fn running_receiver( fn running_receiver(
&self, &self,
) -> impl Future<Output = Option<mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>>>; ) -> impl Future<Output = Option<mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>>>;
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
fn init_receiver( fn init_receiver(
&self, &self,
) -> impl Future< ) -> impl Future<
Output = ( Output = (
mpsc::UnboundedSender<Arc<RResult<Self::Item>>>, mpsc::UnboundedSender<Arc<RecorderResult<Self::Item>>>,
mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>, mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>,
), ),
>; >;
fn serialize_request(request: Self::Request) -> RResult<serde_json::Value> { fn serialize_request(request: Self::Request) -> RecorderResult<serde_json::Value> {
serde_json::to_value(request).map_err(RError::from) serde_json::to_value(request).map_err(RecorderError::from)
} }
fn serialize_item(item: RResult<Self::Item>) -> RResult<serde_json::Value> { fn serialize_item(item: RecorderResult<Self::Item>) -> RecorderResult<serde_json::Value> {
serde_json::to_value(item).map_err(RError::from) serde_json::to_value(item).map_err(RecorderError::from)
} }
fn deserialize_request(request: serde_json::Value) -> RResult<Self::Request> { fn deserialize_request(request: serde_json::Value) -> RecorderResult<Self::Request> {
serde_json::from_value(request).map_err(RError::from) serde_json::from_value(request).map_err(RecorderError::from)
} }
fn deserialize_item(item: serde_json::Value) -> RResult<RResult<Self::Item>> { fn deserialize_item(item: serde_json::Value) -> RecorderResult<RecorderResult<Self::Item>> {
serde_json::from_value(item).map_err(RError::from) serde_json::from_value(item).map_err(RecorderError::from)
} }
} }
@ -145,15 +145,15 @@ pub trait StreamTaskRunnerTrait: StreamTaskCoreTrait {
fn run( fn run(
context: Arc<dyn AppContextTrait>, context: Arc<dyn AppContextTrait>,
request: &Self::Request, request: &Self::Request,
history: &[Arc<RResult<Self::Item>>], history: &[Arc<RecorderResult<Self::Item>>],
) -> impl Stream<Item = RResult<Self::Item>>; ) -> impl Stream<Item = RecorderResult<Self::Item>>;
} }
pub trait StreamTaskReplayRunnerTrait: StreamTaskRunnerTrait + StreamTaskReplayLayoutTrait { pub trait StreamTaskReplayRunnerTrait: StreamTaskRunnerTrait + StreamTaskReplayLayoutTrait {
fn run_shared( fn run_shared(
&self, &self,
context: Arc<dyn AppContextTrait>, context: Arc<dyn AppContextTrait>,
) -> impl Stream<Item = Arc<RResult<Self::Item>>> { ) -> impl Stream<Item = Arc<RecorderResult<Self::Item>>> {
stream! { stream! {
if let Some(mut receiver) = self.running_receiver().await { if let Some(mut receiver) = self.running_receiver().await {
while let Some(item) = receiver.recv().await { while let Some(item) = receiver.recv().await {
@ -185,9 +185,9 @@ where
{ {
pub meta: TaskMeta, pub meta: TaskMeta,
pub request: Request, pub request: Request,
pub history: Vec<Arc<RResult<Item>>>, pub history: Vec<Arc<RecorderResult<Item>>>,
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
pub channel: Arc<RwLock<Option<ReplayChannel<Arc<RResult<Item>>>>>>, pub channel: Arc<RwLock<Option<ReplayChannel<Arc<RecorderResult<Item>>>>>>,
} }
impl<Request, Item> StreamTaskCoreTrait for StandardStreamTaskReplayLayout<Request, Item> impl<Request, Item> StreamTaskCoreTrait for StandardStreamTaskReplayLayout<Request, Item>
@ -225,14 +225,14 @@ where
Request: Serialize + DeserializeOwned, Request: Serialize + DeserializeOwned,
Item: Serialize + DeserializeOwned + Sync + Send + 'static, Item: Serialize + DeserializeOwned + Sync + Send + 'static,
{ {
fn history(&self) -> &[Arc<RResult<Self::Item>>] { fn history(&self) -> &[Arc<RecorderResult<Self::Item>>] {
&self.history &self.history
} }
fn resume_from_model( fn resume_from_model(
task: models::tasks::Model, task: models::tasks::Model,
stream_items: Vec<models::task_stream_item::Model>, stream_items: Vec<models::task_stream_item::Model>,
) -> RResult<Self> { ) -> RecorderResult<Self> {
Ok(Self { Ok(Self {
meta: TaskMeta { meta: TaskMeta {
task_id: task.id, task_id: task.id,
@ -243,12 +243,14 @@ where
history: stream_items history: stream_items
.into_iter() .into_iter()
.map(|m| Self::deserialize_item(m.item).map(Arc::new)) .map(|m| Self::deserialize_item(m.item).map(Arc::new))
.collect::<RResult<Vec<_>>>()?, .collect::<RecorderResult<Vec<_>>>()?,
channel: Arc::new(RwLock::new(None)), channel: Arc::new(RwLock::new(None)),
}) })
} }
async fn running_receiver(&self) -> Option<mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>> { async fn running_receiver(
&self,
) -> Option<mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>> {
if let Some(channel) = self.channel.read().await.as_ref() { if let Some(channel) = self.channel.read().await.as_ref() {
Some(channel.receiver().await) Some(channel.receiver().await)
} else { } else {
@ -259,8 +261,8 @@ where
async fn init_receiver( async fn init_receiver(
&self, &self,
) -> ( ) -> (
mpsc::UnboundedSender<Arc<RResult<Self::Item>>>, mpsc::UnboundedSender<Arc<RecorderResult<Self::Item>>>,
mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>, mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>,
) { ) {
let channel = ReplayChannel::new(self.history.clone()); let channel = ReplayChannel::new(self.history.clone());
let rx = channel.receiver().await; let rx = channel.receiver().await;

View File

@ -6,7 +6,7 @@ use url::Url;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::app_error::RResult, errors::RecorderResult,
extract::mikan::{MikanAuthSecrecy, MikanBangumiMeta, web_extract}, extract::mikan::{MikanAuthSecrecy, MikanBangumiMeta, web_extract},
tasks::core::{StandardStreamTaskReplayLayout, StreamTaskRunnerTrait}, tasks::core::{StandardStreamTaskReplayLayout, StreamTaskRunnerTrait},
}; };
@ -24,8 +24,8 @@ impl StreamTaskRunnerTrait for ExtractMikanBangumisMetaFromMyBangumiTask {
fn run( fn run(
context: Arc<dyn AppContextTrait>, context: Arc<dyn AppContextTrait>,
request: &Self::Request, request: &Self::Request,
history: &[Arc<RResult<Self::Item>>], history: &[Arc<RecorderResult<Self::Item>>],
) -> impl Stream<Item = RResult<Self::Item>> { ) -> impl Stream<Item = RecorderResult<Self::Item>> {
let context = context.clone(); let context = context.clone();
web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page( web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page(
context, context,

View File

@ -1,6 +0,0 @@
use crate::{errors::app_error::RResult, fetch::HttpClient};
pub fn build_testing_http_client() -> RResult<HttpClient> {
let mikan_client = HttpClient::default();
Ok(mikan_client)
}

View File

@ -1,17 +1,18 @@
use reqwest::IntoUrl; use fetch::{FetchError, HttpClientConfig, IntoUrl};
use crate::{ use crate::{
errors::app_error::RResult, errors::RecorderResult,
extract::mikan::{MikanClient, MikanConfig}, extract::mikan::{MikanClient, MikanConfig},
fetch::HttpClientConfig,
}; };
pub async fn build_testing_mikan_client(base_mikan_url: impl IntoUrl) -> RResult<MikanClient> { pub async fn build_testing_mikan_client(
base_mikan_url: impl IntoUrl,
) -> RecorderResult<MikanClient> {
let mikan_client = MikanClient::from_config(MikanConfig { let mikan_client = MikanClient::from_config(MikanConfig {
http_client: HttpClientConfig { http_client: HttpClientConfig {
..Default::default() ..Default::default()
}, },
base_url: base_mikan_url.into_url()?, base_url: base_mikan_url.into_url().map_err(FetchError::from)?,
}) })
.await?; .await?;
Ok(mikan_client) Ok(mikan_client)

View File

@ -1,4 +1,3 @@
pub mod app; pub mod app;
pub mod fetch;
pub mod mikan; pub mod mikan;
pub mod tracing; pub mod tracing;

View File

@ -1 +0,0 @@

View File

@ -7,7 +7,7 @@ use super::core::Controller;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
auth::{AuthUserInfo, header_www_authenticate_middleware}, auth::{AuthUserInfo, header_www_authenticate_middleware},
errors::app_error::RResult, errors::RecorderResult,
}; };
pub const CONTROLLER_PREFIX: &str = "/api/graphql"; pub const CONTROLLER_PREFIX: &str = "/api/graphql";
@ -25,7 +25,7 @@ async fn graphql_handler(
graphql_service.schema.execute(req).await.into() graphql_service.schema.execute(req).await.into()
} }
pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RResult<Controller> { pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
let router = Router::<Arc<dyn AppContextTrait>>::new() let router = Router::<Arc<dyn AppContextTrait>>::new()
.route("/", post(graphql_handler)) .route("/", post(graphql_handler))
.layer(from_fn_with_state(ctx, header_www_authenticate_middleware)); .layer(from_fn_with_state(ctx, header_www_authenticate_middleware));

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use axum::{Json, Router, extract::State, routing::get}; use axum::{Json, Router, extract::State, routing::get};
use serde::Serialize; use serde::Serialize;
use crate::{app::AppContextTrait, errors::app_error::RResult, web::controller::Controller}; use crate::{app::AppContextTrait, errors::RecorderResult, web::controller::Controller};
pub const CONTROLLER_PREFIX: &str = "/api/metadata"; pub const CONTROLLER_PREFIX: &str = "/api/metadata";
@ -13,7 +13,9 @@ pub struct StandardResponse {
pub message: String, pub message: String,
} }
async fn health(State(ctx): State<Arc<dyn AppContextTrait>>) -> RResult<Json<StandardResponse>> { async fn health(
State(ctx): State<Arc<dyn AppContextTrait>>,
) -> RecorderResult<Json<StandardResponse>> {
ctx.db().ping().await.inspect_err( ctx.db().ping().await.inspect_err(
|err| tracing::error!(err.msg = %err, err.detail = ?err, "health check database ping error"), |err| tracing::error!(err.msg = %err, err.detail = ?err, "health check database ping error"),
)?; )?;
@ -31,7 +33,7 @@ async fn ping() -> Json<StandardResponse> {
}) })
} }
pub async fn create(_context: Arc<dyn AppContextTrait>) -> RResult<Controller> { pub async fn create(_context: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
let router = Router::<Arc<dyn AppContextTrait>>::new() let router = Router::<Arc<dyn AppContextTrait>>::new()
.route("/health", get(health)) .route("/health", get(health))
.route("/ping", get(ping)); .route("/ping", get(ping));

View File

@ -2,13 +2,11 @@ use std::sync::Arc;
use axum::{ use axum::{
Json, Router, Json, Router,
extract::{Query, State}, extract::{Query, Request, State},
http::request::Parts,
routing::get, routing::get,
}; };
use snafu::prelude::*; use snafu::ResultExt;
use super::core::Controller;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
auth::{ auth::{
@ -16,9 +14,10 @@ use crate::{
errors::OidcRequestRedirectUriSnafu, errors::OidcRequestRedirectUriSnafu,
oidc::{OidcAuthCallbackPayload, OidcAuthCallbackQuery, OidcAuthRequest}, oidc::{OidcAuthCallbackPayload, OidcAuthCallbackQuery, OidcAuthRequest},
}, },
errors::app_error::RResult, errors::RecorderResult,
extract::http::ForwardedRelatedInfo, extract::http::ForwardedRelatedInfo,
models::auth::AuthType, models::auth::AuthType,
web::controller::core::Controller,
}; };
pub const CONTROLLER_PREFIX: &str = "/api/oidc"; pub const CONTROLLER_PREFIX: &str = "/api/oidc";
@ -43,10 +42,11 @@ async fn oidc_callback(
async fn oidc_auth( async fn oidc_auth(
State(ctx): State<Arc<dyn AppContextTrait>>, State(ctx): State<Arc<dyn AppContextTrait>>,
parts: Parts, request: Request,
) -> Result<Json<OidcAuthRequest>, AuthError> { ) -> Result<Json<OidcAuthRequest>, AuthError> {
let auth_service = ctx.auth(); let auth_service = ctx.auth();
if let AuthService::Oidc(oidc_auth_service) = auth_service { if let AuthService::Oidc(oidc_auth_service) = auth_service {
let (parts, _) = request.into_parts();
let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts) let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts)
.resolved_origin() .resolved_origin()
.ok_or(url::ParseError::EmptyHost) .ok_or(url::ParseError::EmptyHost)
@ -73,7 +73,7 @@ async fn oidc_auth(
} }
} }
pub async fn create(_context: Arc<dyn AppContextTrait>) -> RResult<Controller> { pub async fn create(_context: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
let router = Router::<Arc<dyn AppContextTrait>>::new() let router = Router::<Arc<dyn AppContextTrait>>::new()
.route("/auth", get(oidc_auth)) .route("/auth", get(oidc_auth))
.route("/callback", get(oidc_callback)); .route("/callback", get(oidc_callback));

View File

@ -12,7 +12,7 @@ use http::StatusCode;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tower_http::catch_panic::CatchPanicLayer; use tower_http::catch_panic::CatchPanicLayer;
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer}; use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CatchPanic { pub struct CatchPanic {
@ -55,7 +55,7 @@ impl MiddlewareLayer for CatchPanic {
fn apply( fn apply(
&self, &self,
app: Router<Arc<dyn AppContextTrait>>, app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> { ) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(CatchPanicLayer::custom(handle_panic))) Ok(app.layer(CatchPanicLayer::custom(handle_panic)))
} }
} }

View File

@ -11,7 +11,7 @@ use axum::Router;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tower_http::compression::CompressionLayer; use tower_http::compression::CompressionLayer;
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer}; use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Compression { pub struct Compression {
@ -38,7 +38,7 @@ impl MiddlewareLayer for Compression {
fn apply( fn apply(
&self, &self,
app: Router<Arc<dyn AppContextTrait>>, app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> { ) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(CompressionLayer::new())) Ok(app.layer(CompressionLayer::new()))
} }
} }

View File

@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use tower_http::cors::{self, Any}; use tower_http::cors::{self, Any};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer}; use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
/// CORS middleware configuration /// CORS middleware configuration
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
@ -80,7 +80,7 @@ impl Cors {
/// ///
/// In all of these cases, the error returned will be the result of the /// In all of these cases, the error returned will be the result of the
/// `parse` method of the corresponding type. /// `parse` method of the corresponding type.
pub fn cors(&self) -> RResult<cors::CorsLayer> { pub fn cors(&self) -> RecorderResult<cors::CorsLayer> {
let mut cors: cors::CorsLayer = cors::CorsLayer::new(); let mut cors: cors::CorsLayer = cors::CorsLayer::new();
// testing CORS, assuming https://example.com in the allow list: // testing CORS, assuming https://example.com in the allow list:
@ -160,7 +160,7 @@ impl MiddlewareLayer for Cors {
fn apply( fn apply(
&self, &self,
app: Router<Arc<dyn AppContextTrait>>, app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> { ) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(self.cors()?)) Ok(app.layer(self.cors()?))
} }
} }

View File

@ -21,11 +21,11 @@ use axum::{
}, },
response::Response, response::Response,
}; };
use futures_util::future::BoxFuture; use futures::future::BoxFuture;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tower::{Layer, Service}; use tower::{Layer, Service};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer}; use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Etag { pub struct Etag {
@ -52,7 +52,7 @@ impl MiddlewareLayer for Etag {
fn apply( fn apply(
&self, &self,
app: Router<Arc<dyn AppContextTrait>>, app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> { ) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(EtagLayer)) Ok(app.layer(EtagLayer))
} }
} }

View File

@ -1,4 +1,3 @@
//! Detect a content type and format and responds accordingly
use axum::{ use axum::{
extract::FromRequestParts, extract::FromRequestParts,
http::{ http::{
@ -8,7 +7,7 @@ use axum::{
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::errors::app_error::RError as Error; use crate::errors::RecorderError as Error;
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
pub struct Format(pub RespondTo); pub struct Format(pub RespondTo);

View File

@ -15,7 +15,7 @@ use tower_http::{add_extension::AddExtensionLayer, trace::TraceLayer};
use crate::{ use crate::{
app::{AppContextTrait, Environment}, app::{AppContextTrait, Environment},
errors::app_error::RResult, errors::RecorderResult,
web::middleware::{MiddlewareLayer, request_id::LocoRequestId}, web::middleware::{MiddlewareLayer, request_id::LocoRequestId},
}; };
@ -70,7 +70,7 @@ impl MiddlewareLayer for Middleware {
fn apply( fn apply(
&self, &self,
app: Router<Arc<dyn AppContextTrait>>, app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> { ) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app Ok(app
.layer( .layer(
TraceLayer::new_for_http().make_span_with(|request: &http::Request<_>| { TraceLayer::new_for_http().make_span_with(|request: &http::Request<_>| {

View File

@ -14,7 +14,7 @@ use std::sync::Arc;
use axum::Router; use axum::Router;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{app::AppContextTrait, errors::app_error::RResult}; use crate::{app::AppContextTrait, errors::RecorderResult};
/// Trait representing the behavior of middleware components in the application. /// Trait representing the behavior of middleware components in the application.
/// When implementing a new middleware, make sure to go over this checklist: /// When implementing a new middleware, make sure to go over this checklist:
@ -55,7 +55,7 @@ pub trait MiddlewareLayer {
fn apply( fn apply(
&self, &self,
app: Router<Arc<dyn AppContextTrait>>, app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>>; ) -> RecorderResult<Router<Arc<dyn AppContextTrait>>>;
} }
#[allow(clippy::unnecessary_lazy_evaluations)] #[allow(clippy::unnecessary_lazy_evaluations)]

View File

@ -24,7 +24,7 @@ use axum::{
http::{header::HeaderMap, request::Parts}, http::{header::HeaderMap, request::Parts},
response::Response, response::Response,
}; };
use futures_util::future::BoxFuture; use futures::future::BoxFuture;
use ipnetwork::IpNetwork; use ipnetwork::IpNetwork;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use snafu::ResultExt; use snafu::ResultExt;
@ -33,7 +33,7 @@ use tracing::error;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::app_error::{RError, RResult}, errors::app_error::{RecorderError, RecorderResult},
web::middleware::MiddlewareLayer, web::middleware::MiddlewareLayer,
}; };
@ -127,7 +127,7 @@ impl MiddlewareLayer for RemoteIpMiddleware {
fn apply( fn apply(
&self, &self,
app: Router<Arc<dyn AppContextTrait>>, app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> { ) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(RemoteIPLayer::new(self)?)) Ok(app.layer(RemoteIPLayer::new(self)?))
} }
} }
@ -225,7 +225,7 @@ impl RemoteIPLayer {
/// ///
/// # Errors /// # Errors
/// Fails if invalid header values found /// Fails if invalid header values found
pub fn new(config: &RemoteIpMiddleware) -> RResult<Self> { pub fn new(config: &RemoteIpMiddleware) -> RecorderResult<Self> {
Ok(Self { Ok(Self {
trusted_proxies: config trusted_proxies: config
.trusted_proxies .trusted_proxies
@ -236,14 +236,14 @@ impl RemoteIPLayer {
.map(|proxy| { .map(|proxy| {
IpNetwork::from_str(proxy) IpNetwork::from_str(proxy)
.boxed() .boxed()
.with_whatever_context::<_, _, RError>(|_| { .with_whatever_context::<_, _, RecorderError>(|_| {
format!( format!(
"remote ip middleare cannot parse trusted proxy \ "remote ip middleare cannot parse trusted proxy \
configuration: `{proxy}`" configuration: `{proxy}`"
) )
}) })
}) })
.collect::<RResult<Vec<_>>>() .collect::<RecorderResult<Vec<_>>>()
}) })
.transpose()?, .transpose()?,
}) })

View File

@ -18,7 +18,7 @@ const MAX_LEN: usize = 255;
use std::sync::{Arc, OnceLock}; use std::sync::{Arc, OnceLock};
use crate::errors::app_error::RResult; use crate::errors::RecorderResult;
static ID_CLEANUP: OnceLock<Regex> = OnceLock::new(); static ID_CLEANUP: OnceLock<Regex> = OnceLock::new();
@ -57,7 +57,7 @@ impl MiddlewareLayer for RequestId {
fn apply( fn apply(
&self, &self,
app: Router<Arc<dyn AppContextTrait>>, app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> { ) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(axum::middleware::from_fn(request_id_middleware))) Ok(app.layer(axum::middleware::from_fn(request_id_middleware)))
} }
} }

View File

@ -15,13 +15,13 @@ use axum::{
http::{HeaderName, HeaderValue, Request}, http::{HeaderName, HeaderValue, Request},
response::Response, response::Response,
}; };
use futures_util::future::BoxFuture; use futures::future::BoxFuture;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::{self, json}; use serde_json::{self, json};
use snafu::whatever; use snafu::whatever;
use tower::{Layer, Service}; use tower::{Layer, Service};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer}; use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
static PRESETS: OnceLock<HashMap<String, BTreeMap<String, String>>> = OnceLock::new(); static PRESETS: OnceLock<HashMap<String, BTreeMap<String, String>>> = OnceLock::new();
fn get_presets() -> &'static HashMap<String, BTreeMap<String, String>> { fn get_presets() -> &'static HashMap<String, BTreeMap<String, String>> {
@ -115,7 +115,7 @@ impl MiddlewareLayer for SecureHeader {
fn apply( fn apply(
&self, &self,
app: Router<Arc<dyn AppContextTrait>>, app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> { ) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(SecureHeaders::new(self)?)) Ok(app.layer(SecureHeaders::new(self)?))
} }
} }
@ -124,7 +124,7 @@ impl SecureHeader {
/// Converts the configuration into a list of headers. /// Converts the configuration into a list of headers.
/// ///
/// Applies the preset headers and any custom overrides. /// Applies the preset headers and any custom overrides.
fn as_headers(&self) -> RResult<Vec<(HeaderName, HeaderValue)>> { fn as_headers(&self) -> RecorderResult<Vec<(HeaderName, HeaderValue)>> {
let mut headers = vec![]; let mut headers = vec![];
let preset = &self.preset; let preset = &self.preset;
@ -147,7 +147,7 @@ impl SecureHeader {
fn push_headers( fn push_headers(
headers: &mut Vec<(HeaderName, HeaderValue)>, headers: &mut Vec<(HeaderName, HeaderValue)>,
hm: &BTreeMap<String, String>, hm: &BTreeMap<String, String>,
) -> RResult<()> { ) -> RecorderResult<()> {
for (k, v) in hm { for (k, v) in hm {
headers.push(( headers.push((
HeaderName::from_bytes(k.clone().as_bytes())?, HeaderName::from_bytes(k.clone().as_bytes())?,
@ -171,7 +171,7 @@ impl SecureHeaders {
/// ///
/// # Errors /// # Errors
/// Returns an error if any header values are invalid. /// Returns an error if any header values are invalid.
pub fn new(config: &SecureHeader) -> RResult<Self> { pub fn new(config: &SecureHeader) -> RecorderResult<Self> {
Ok(Self { Ok(Self {
headers: config.as_headers()?, headers: config.as_headers()?,
}) })

View File

@ -16,7 +16,7 @@ use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use tower_http::timeout::TimeoutLayer; use tower_http::timeout::TimeoutLayer;
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer}; use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
/// Timeout middleware configuration /// Timeout middleware configuration
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
@ -61,7 +61,7 @@ impl MiddlewareLayer for TimeOut {
fn apply( fn apply(
&self, &self,
app: Router<Arc<dyn AppContextTrait>>, app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> { ) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(TimeoutLayer::new(Duration::from_millis(self.timeout)))) Ok(app.layer(TimeoutLayer::new(Duration::from_millis(self.timeout))))
} }
} }

View File

@ -0,0 +1,49 @@
[package]
name = "downloader"
version = "0.1.0"
edition = "2024"
[features]
default = []
testcontainers = [
"dep:testcontainers",
"dep:testcontainers-modules",
"dep:testcontainers-ext",
"dep:testing-torrents",
]
[dependencies]
futures = { workspace = true }
testcontainers = { workspace = true, optional = true }
testcontainers-modules = { workspace = true, optional = true }
testcontainers-ext = { workspace = true, optional = true }
tokio = { workspace = true }
serde_json = { workspace = true }
async-trait = { workspace = true }
tracing = { workspace = true }
snafu = { workspace = true }
url = { workspace = true }
serde = { workspace = true }
anyhow = { workspace = true }
quirks_path = { workspace = true }
itertools = { workspace = true }
chrono = { workspace = true }
bytes = { workspace = true }
serde-value = "0.7"
qbit-rs = { git = "https://github.com/lonelyhentxi/qbit.git", rev = "72d53138ebe", features = [
"default",
"builder",
] }
merge-struct = "0.1"
librqbit-core = "4"
librqbit = { version = "8", features = ["async-bt", "watch"] }
util = { workspace = true }
testing-torrents = { workspace = true, optional = true }
fetch = { workspace = true }
[dev-dependencies]
reqwest = { workspace = true }
tracing-subscriber = { workspace = true }

View File

@ -1,6 +1,6 @@
use async_trait::async_trait; use async_trait::async_trait;
use crate::downloader::{ use crate::{
DownloaderError, DownloaderError,
bittorrent::task::{ bittorrent::task::{
TorrentCreationTrait, TorrentHashTrait, TorrentStateTrait, TorrentTaskTrait, TorrentCreationTrait, TorrentHashTrait, TorrentStateTrait, TorrentTaskTrait,
@ -21,7 +21,7 @@ where
async fn pause_downloads( async fn pause_downloads(
&self, &self,
selector: Self::Selector, selector: <Self as DownloaderTrait>::Selector,
) -> Result<Self::IdSelector, DownloaderError> { ) -> Result<Self::IdSelector, DownloaderError> {
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?; let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
self.pause_torrents(hashes).await self.pause_torrents(hashes).await
@ -29,14 +29,14 @@ where
async fn resume_downloads( async fn resume_downloads(
&self, &self,
selector: Self::Selector, selector: <Self as DownloaderTrait>::Selector,
) -> Result<Self::IdSelector, DownloaderError> { ) -> Result<Self::IdSelector, DownloaderError> {
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?; let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
self.resume_torrents(hashes).await self.resume_torrents(hashes).await
} }
async fn remove_downloads( async fn remove_downloads(
&self, &self,
selector: Self::Selector, selector: <Self as DownloaderTrait>::Selector,
) -> Result<Self::IdSelector, DownloaderError> { ) -> Result<Self::IdSelector, DownloaderError> {
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?; let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
self.remove_torrents(hashes).await self.remove_torrents(hashes).await
@ -44,7 +44,7 @@ where
async fn query_torrent_hashes( async fn query_torrent_hashes(
&self, &self,
selector: Self::Selector, selector: <Self as DownloaderTrait>::Selector,
) -> Result<Self::IdSelector, DownloaderError> { ) -> Result<Self::IdSelector, DownloaderError> {
let hashes = match selector.try_into_ids_only() { let hashes = match selector.try_into_ids_only() {
Ok(hashes) => Self::IdSelector::from_iter(hashes), Ok(hashes) => Self::IdSelector::from_iter(hashes),

View File

@ -0,0 +1,6 @@
pub mod defs;
pub mod downloader;
pub mod source;
pub mod task;
pub use defs::{BITTORRENT_MIME_TYPE, MAGNET_SCHEMA};

View File

@ -4,18 +4,14 @@ use std::{
}; };
use bytes::Bytes; use bytes::Bytes;
use fetch::{bytes::fetch_bytes, client::core::HttpClientTrait};
use librqbit_core::{magnet::Magnet, torrent_metainfo, torrent_metainfo::TorrentMetaV1Owned}; use librqbit_core::{magnet::Magnet, torrent_metainfo, torrent_metainfo::TorrentMetaV1Owned};
use snafu::ResultExt; use snafu::ResultExt;
use url::Url; use url::Url;
use util::errors::AnyhowResultExt;
use crate::{ use super::defs::MAGNET_SCHEMA;
downloader::errors::{ use crate::errors::{DownloadFetchSnafu, DownloaderError, MagnetFormatSnafu, TorrentMetaSnafu};
DownloadFetchSnafu, DownloaderError, MagnetFormatSnafu, TorrentMetaSnafu,
},
errors::RAnyhowResultExt,
extract::bittorrent::core::MAGNET_SCHEMA,
fetch::{bytes::fetch_bytes, client::core::HttpClientTrait},
};
pub trait HashTorrentSourceTrait: Sized { pub trait HashTorrentSourceTrait: Sized {
fn hash_info(&self) -> Cow<'_, str>; fn hash_info(&self) -> Cow<'_, str>;
@ -90,7 +86,7 @@ impl TorrentUrlSource {
pub struct TorrentFileSource { pub struct TorrentFileSource {
pub url: Option<String>, pub url: Option<String>,
pub payload: Bytes, pub payload: Bytes,
pub meta: TorrentMetaV1Owned, pub meta: Box<TorrentMetaV1Owned>,
pub filename: String, pub filename: String,
} }
@ -114,7 +110,7 @@ impl TorrentFileSource {
Ok(TorrentFileSource { Ok(TorrentFileSource {
url, url,
payload: bytes, payload: bytes,
meta, meta: Box::new(meta),
filename, filename,
}) })
} }

View File

@ -2,7 +2,7 @@ use std::{borrow::Cow, hash::Hash};
use quirks_path::{Path, PathBuf}; use quirks_path::{Path, PathBuf};
use crate::downloader::{ use crate::{
bittorrent::source::HashTorrentSource, bittorrent::source::HashTorrentSource,
core::{DownloadCreationTrait, DownloadIdTrait, DownloadStateTrait, DownloadTaskTrait}, core::{DownloadCreationTrait, DownloadIdTrait, DownloadStateTrait, DownloadTaskTrait},
}; };
@ -11,6 +11,12 @@ pub const TORRENT_TAG_NAME: &str = "konobangu";
pub trait TorrentHashTrait: DownloadIdTrait + Send + Hash {} pub trait TorrentHashTrait: DownloadIdTrait + Send + Hash {}
pub type SimpleTorrentHash = String;
impl DownloadIdTrait for SimpleTorrentHash {}
impl TorrentHashTrait for SimpleTorrentHash {}
pub trait TorrentStateTrait: DownloadStateTrait {} pub trait TorrentStateTrait: DownloadStateTrait {}
pub trait TorrentTaskTrait: DownloadTaskTrait pub trait TorrentTaskTrait: DownloadTaskTrait

View File

@ -1,8 +1,7 @@
use std::{borrow::Cow, time::Duration}; use std::{borrow::Cow, time::Duration};
use snafu::prelude::*; use snafu::prelude::*;
use util::errors::OptDynErr;
use crate::errors::OptDynErr;
#[derive(Snafu, Debug)] #[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))] #[snafu(visibility(pub(crate)))]

View File

@ -8,7 +8,6 @@ use std::{
use async_trait::async_trait; use async_trait::async_trait;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use futures_util::future::try_join_all;
use itertools::Itertools; use itertools::Itertools;
use merge_struct::merge; use merge_struct::merge;
use qbit_rs::{ use qbit_rs::{
@ -27,7 +26,7 @@ use tokio::{
use tracing::instrument; use tracing::instrument;
use url::Url; use url::Url;
use crate::downloader::{ use crate::{
DownloaderError, DownloaderError,
bittorrent::{ bittorrent::{
downloader::TorrentDownloaderTrait, downloader::TorrentDownloaderTrait,
@ -409,8 +408,8 @@ impl DownloaderTrait for QBittorrentDownloader {
async fn add_downloads( async fn add_downloads(
&self, &self,
creation: Self::Creation, creation: <Self as DownloaderTrait>::Creation,
) -> Result<HashSet<Self::Id>, DownloaderError> { ) -> Result<HashSet<<Self as DownloaderTrait>::Id>, DownloaderError> {
let tags = { let tags = {
let mut tags = vec![TORRENT_TAG_NAME.to_string()]; let mut tags = vec![TORRENT_TAG_NAME.to_string()];
tags.extend(creation.tags); tags.extend(creation.tags);
@ -506,21 +505,21 @@ impl DownloaderTrait for QBittorrentDownloader {
async fn pause_downloads( async fn pause_downloads(
&self, &self,
selector: Self::Selector, selector: <Self as DownloaderTrait>::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> { ) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
<Self as TorrentDownloaderTrait>::pause_downloads(self, selector).await <Self as TorrentDownloaderTrait>::pause_downloads(self, selector).await
} }
async fn resume_downloads( async fn resume_downloads(
&self, &self,
selector: Self::Selector, selector: <Self as DownloaderTrait>::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> { ) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
<Self as TorrentDownloaderTrait>::resume_downloads(self, selector).await <Self as TorrentDownloaderTrait>::resume_downloads(self, selector).await
} }
async fn remove_downloads( async fn remove_downloads(
&self, &self,
selector: Self::Selector, selector: <Self as DownloaderTrait>::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> { ) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
<Self as TorrentDownloaderTrait>::remove_downloads(self, selector).await <Self as TorrentDownloaderTrait>::remove_downloads(self, selector).await
} }
@ -528,7 +527,7 @@ impl DownloaderTrait for QBittorrentDownloader {
async fn query_downloads( async fn query_downloads(
&self, &self,
selector: QBittorrentSelector, selector: QBittorrentSelector,
) -> Result<Vec<Self::Task>, DownloaderError> { ) -> Result<Vec<<Self as DownloaderTrait>::Task>, DownloaderError> {
let selector = match selector { let selector = match selector {
QBittorrentSelector::Hash(h) => h.into(), QBittorrentSelector::Hash(h) => h.into(),
QBittorrentSelector::Complex(c) => c, QBittorrentSelector::Complex(c) => c,
@ -536,7 +535,7 @@ impl DownloaderTrait for QBittorrentDownloader {
let torrent_list = self.client.get_torrent_list(selector.query).await?; let torrent_list = self.client.get_torrent_list(selector.query).await?;
let torrent_contents = try_join_all(torrent_list.iter().map(|s| async { let torrent_contents = futures::future::try_join_all(torrent_list.iter().map(|s| async {
if let Some(hash) = &s.hash { if let Some(hash) = &s.hash {
self.client.get_torrent_contents(hash as &str, None).await self.client.get_torrent_contents(hash as &str, None).await
} else { } else {
@ -557,11 +556,12 @@ impl DownloaderTrait for QBittorrentDownloader {
#[async_trait] #[async_trait]
impl TorrentDownloaderTrait for QBittorrentDownloader { impl TorrentDownloaderTrait for QBittorrentDownloader {
type IdSelector = DownloadIdSelector<Self::Task>; type IdSelector = DownloadIdSelector<Self::Task>;
#[instrument(level = "debug", skip(self))] #[instrument(level = "debug", skip(self))]
async fn pause_torrents( async fn pause_torrents(
&self, &self,
hashes: Self::IdSelector, hashes: <Self as TorrentDownloaderTrait>::IdSelector,
) -> Result<Self::IdSelector, DownloaderError> { ) -> Result<<Self as TorrentDownloaderTrait>::IdSelector, DownloaderError> {
self.client.pause_torrents(hashes.clone()).await?; self.client.pause_torrents(hashes.clone()).await?;
Ok(hashes) Ok(hashes)
} }
@ -569,8 +569,8 @@ impl TorrentDownloaderTrait for QBittorrentDownloader {
#[instrument(level = "debug", skip(self))] #[instrument(level = "debug", skip(self))]
async fn resume_torrents( async fn resume_torrents(
&self, &self,
hashes: Self::IdSelector, hashes: <Self as TorrentDownloaderTrait>::IdSelector,
) -> Result<Self::IdSelector, DownloaderError> { ) -> Result<<Self as TorrentDownloaderTrait>::IdSelector, DownloaderError> {
self.client.resume_torrents(hashes.clone()).await?; self.client.resume_torrents(hashes.clone()).await?;
Ok(hashes) Ok(hashes)
} }
@ -578,8 +578,8 @@ impl TorrentDownloaderTrait for QBittorrentDownloader {
#[instrument(level = "debug", skip(self))] #[instrument(level = "debug", skip(self))]
async fn remove_torrents( async fn remove_torrents(
&self, &self,
hashes: Self::IdSelector, hashes: <Self as TorrentDownloaderTrait>::IdSelector,
) -> Result<Self::IdSelector, DownloaderError> { ) -> Result<<Self as TorrentDownloaderTrait>::IdSelector, DownloaderError> {
self.client self.client
.delete_torrents(hashes.clone(), Some(true)) .delete_torrents(hashes.clone(), Some(true))
.await?; .await?;

View File

@ -6,30 +6,28 @@ use qbit_rs::model::{
}; };
use quirks_path::{Path, PathBuf}; use quirks_path::{Path, PathBuf};
use crate::downloader::{ use crate::{
DownloaderError, DownloaderError,
bittorrent::{ bittorrent::{
source::HashTorrentSource, source::HashTorrentSource,
task::{TorrentCreationTrait, TorrentHashTrait, TorrentStateTrait, TorrentTaskTrait}, task::{SimpleTorrentHash, TorrentCreationTrait, TorrentStateTrait, TorrentTaskTrait},
}, },
core::{ core::{
DownloadCreationTrait, DownloadIdSelector, DownloadIdTrait, DownloadSelectorTrait, DownloadCreationTrait, DownloadIdSelector, DownloadSelectorTrait, DownloadStateTrait,
DownloadStateTrait, DownloadTaskTrait, DownloadTaskTrait,
}, },
}; };
pub type QBittorrentHash = String; pub type QBittorrentHash = SimpleTorrentHash;
impl DownloadIdTrait for QBittorrentHash {}
impl TorrentHashTrait for QBittorrentHash {}
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] #[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct QBittorrentState(Option<State>); pub struct QBittorrentState(Option<State>);
impl DownloadStateTrait for QBittorrentState {} impl From<State> for QBittorrentState {
fn from(value: State) -> Self {
impl TorrentStateTrait for QBittorrentState {} Self(Some(value))
}
}
impl From<Option<State>> for QBittorrentState { impl From<Option<State>> for QBittorrentState {
fn from(value: Option<State>) -> Self { fn from(value: Option<State>) -> Self {
@ -37,6 +35,10 @@ impl From<Option<State>> for QBittorrentState {
} }
} }
impl DownloadStateTrait for QBittorrentState {}
impl TorrentStateTrait for QBittorrentState {}
#[derive(Debug)] #[derive(Debug)]
pub struct QBittorrentTask { pub struct QBittorrentTask {
pub hash_info: QBittorrentHash, pub hash_info: QBittorrentHash,

View File

@ -3,10 +3,9 @@ use std::time::Duration;
use chrono::Utc; use chrono::Utc;
use qbit_rs::model::{GetTorrentListArg, TorrentFilter as QbitTorrentFilter}; use qbit_rs::model::{GetTorrentListArg, TorrentFilter as QbitTorrentFilter};
use quirks_path::Path; use quirks_path::Path;
use snafu::{OptionExt, ResultExt}; use snafu::OptionExt;
use crate::{ use crate::{
downloader::{
DownloaderError, DownloaderError,
bittorrent::{ bittorrent::{
downloader::TorrentDownloaderTrait, source::HashTorrentSource, task::TorrentTaskTrait, downloader::TorrentDownloaderTrait, source::HashTorrentSource, task::TorrentTaskTrait,
@ -20,9 +19,6 @@ use crate::{
}, },
}, },
utils::path_equals_as_file_url, utils::path_equals_as_file_url,
},
errors::{RError, RResult},
test_utils::fetch::build_testing_http_client,
}; };
fn get_tmp_qbit_test_folder() -> &'static str { fn get_tmp_qbit_test_folder() -> &'static str {
@ -35,7 +31,7 @@ fn get_tmp_qbit_test_folder() -> &'static str {
#[cfg(feature = "testcontainers")] #[cfg(feature = "testcontainers")]
pub async fn create_qbit_testcontainers() pub async fn create_qbit_testcontainers()
-> RResult<testcontainers::ContainerRequest<testcontainers::GenericImage>> { -> anyhow::Result<testcontainers::ContainerRequest<testcontainers::GenericImage>> {
use testcontainers::{ use testcontainers::{
GenericImage, GenericImage,
core::{ core::{
@ -66,13 +62,13 @@ pub async fn create_qbit_testcontainers()
#[tokio::test] #[tokio::test]
async fn test_qbittorrent_downloader() { async fn test_qbittorrent_downloader() {
let hash = "47ee2d69e7f19af783ad896541a07b012676f858".to_string(); let hash = "47ee2d69e7f19af783ad896541a07b012676f858".to_string();
let torrent_url = "https://mikanani.me/Download/20240301/{}.torrent"; let torrent_url = format!("https://mikanani.me/Download/20240301/{}.torrent", hash);
let _ = test_qbittorrent_downloader_impl(torrent_url, hash, None, None).await; let _ = test_qbittorrent_downloader_impl(torrent_url, hash, None, None).await;
} }
#[cfg(feature = "testcontainers")] #[cfg(feature = "testcontainers")]
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_qbittorrent_downloader() -> RResult<()> { async fn test_qbittorrent_downloader() -> anyhow::Result<()> {
use testcontainers::runners::AsyncRunner; use testcontainers::runners::AsyncRunner;
use testing_torrents::{TestTorrentRequest, TestTorrentResponse, TestingTorrentFileItem}; use testing_torrents::{TestTorrentRequest, TestTorrentResponse, TestingTorrentFileItem};
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
@ -155,8 +151,8 @@ async fn test_qbittorrent_downloader_impl(
torrent_hash: String, torrent_hash: String,
username: Option<&str>, username: Option<&str>,
password: Option<&str>, password: Option<&str>,
) -> RResult<()> { ) -> anyhow::Result<()> {
let http_client = build_testing_http_client()?; let http_client = fetch::test_util::build_testing_http_client()?;
let base_save_path = Path::new(get_tmp_qbit_test_folder()); let base_save_path = Path::new(get_tmp_qbit_test_folder());
let downloader = QBittorrentDownloader::from_creation(QBittorrentDownloaderCreation { let downloader = QBittorrentDownloader::from_creation(QBittorrentDownloaderCreation {
@ -255,9 +251,7 @@ async fn test_qbittorrent_downloader_impl(
assert!( assert!(
path_equals_as_file_url(actual_content_path, moved_torrent_path) path_equals_as_file_url(actual_content_path, moved_torrent_path)
.whatever_context::<_, RError>( .expect("failed to compare actual torrent path and found expected torrent path")
"failed to compare actual torrent path and found expected torrent path"
)?
); );
downloader downloader

View File

@ -0,0 +1 @@
pub struct RqbitDownloaderCreation {}

View File

@ -1,3 +1,4 @@
pub mod downloader; pub mod downloader;
pub mod source;
pub mod task; pub mod task;
#[cfg(test)]
mod test;

View File

@ -0,0 +1,215 @@
use std::{borrow::Cow, time::Duration};
use itertools::Itertools;
use qbit_rs::model::{
GetTorrentListArg, State, Torrent as QbitTorrent, TorrentContent as QbitTorrentContent,
};
use quirks_path::{Path, PathBuf};
use crate::{
DownloaderError,
bittorrent::{
source::HashTorrentSource,
task::{SimpleTorrentHash, TorrentCreationTrait, TorrentStateTrait, TorrentTaskTrait},
},
core::{
DownloadCreationTrait, DownloadIdSelector, DownloadSelectorTrait, DownloadStateTrait,
DownloadTaskTrait,
},
};
pub type RqbitHash = SimpleTorrentHash;
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct RqbitState(Option<State>);
impl DownloadStateTrait for RqbitState {}
impl TorrentStateTrait for RqbitState {}
impl From<Option<State>> for RqbitState {
fn from(value: Option<State>) -> Self {
Self(value)
}
}
#[derive(Debug)]
pub struct RqbitTask {
pub hash_info: RqbitHash,
pub torrent: QbitTorrent,
pub contents: Vec<QbitTorrentContent>,
pub state: RqbitState,
}
impl RqbitTask {
pub fn from_query(
torrent: QbitTorrent,
contents: Vec<QbitTorrentContent>,
) -> Result<Self, DownloaderError> {
let hash = torrent
.hash
.clone()
.ok_or_else(|| DownloaderError::TorrentMetaError {
message: "missing hash".to_string(),
source: None.into(),
})?;
let state = RqbitState::from(torrent.state.clone());
Ok(Self {
hash_info: hash,
contents,
state,
torrent,
})
}
}
impl DownloadTaskTrait for RqbitTask {
type State = RqbitState;
type Id = RqbitHash;
fn id(&self) -> &Self::Id {
&self.hash_info
}
fn into_id(self) -> Self::Id {
self.hash_info
}
fn name(&self) -> Cow<'_, str> {
self.torrent
.name
.as_deref()
.map(Cow::Borrowed)
.unwrap_or_else(|| DownloadTaskTrait::name(self))
}
fn speed(&self) -> Option<u64> {
self.torrent.dlspeed.and_then(|s| u64::try_from(s).ok())
}
fn state(&self) -> &Self::State {
&self.state
}
fn dl_bytes(&self) -> Option<u64> {
self.torrent.downloaded.and_then(|v| u64::try_from(v).ok())
}
fn total_bytes(&self) -> Option<u64> {
self.torrent.size.and_then(|v| u64::try_from(v).ok())
}
fn left_bytes(&self) -> Option<u64> {
self.torrent.amount_left.and_then(|v| u64::try_from(v).ok())
}
fn et(&self) -> Option<Duration> {
self.torrent
.time_active
.and_then(|v| u64::try_from(v).ok())
.map(Duration::from_secs)
}
fn eta(&self) -> Option<Duration> {
self.torrent
.eta
.and_then(|v| u64::try_from(v).ok())
.map(Duration::from_secs)
}
fn progress(&self) -> Option<f32> {
self.torrent.progress.as_ref().map(|s| *s as f32)
}
}
impl TorrentTaskTrait for RqbitTask {
fn hash_info(&self) -> &str {
&self.hash_info
}
fn tags(&self) -> impl Iterator<Item = Cow<'_, str>> {
self.torrent
.tags
.as_deref()
.unwrap_or("")
.split(',')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.map(Cow::Borrowed)
}
fn category(&self) -> Option<Cow<'_, str>> {
self.torrent.category.as_deref().map(Cow::Borrowed)
}
}
#[derive(Debug, Clone, Default)]
pub struct RqbitCreation {
pub save_path: PathBuf,
pub tags: Vec<String>,
pub category: Option<String>,
pub sources: Vec<HashTorrentSource>,
}
impl DownloadCreationTrait for RqbitCreation {
type Task = RqbitTask;
}
impl TorrentCreationTrait for RqbitCreation {
fn save_path(&self) -> &Path {
self.save_path.as_ref()
}
fn save_path_mut(&mut self) -> &mut PathBuf {
&mut self.save_path
}
fn sources_mut(&mut self) -> &mut Vec<HashTorrentSource> {
&mut self.sources
}
}
pub type RqbitHashSelector = DownloadIdSelector<RqbitTask>;
pub struct RqbitComplexSelector {
pub query: GetTorrentListArg,
}
impl From<RqbitHashSelector> for RqbitComplexSelector {
fn from(value: RqbitHashSelector) -> Self {
Self {
query: GetTorrentListArg {
hashes: Some(value.ids.join("|")),
..Default::default()
},
}
}
}
impl DownloadSelectorTrait for RqbitComplexSelector {
type Id = RqbitHash;
type Task = RqbitTask;
}
pub enum RqbitSelector {
Hash(RqbitHashSelector),
Complex(RqbitComplexSelector),
}
impl DownloadSelectorTrait for RqbitSelector {
type Id = RqbitHash;
type Task = RqbitTask;
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
match self {
RqbitSelector::Complex(c) => c.try_into_ids_only().map_err(RqbitSelector::Complex),
RqbitSelector::Hash(h) => {
let result = h
.try_into_ids_only()
.unwrap_or_else(|_| unreachable!("hash selector must contains hash"))
.into_iter();
Ok(result.collect_vec())
}
}
}
}

View File

35
packages/fetch/Cargo.toml Normal file
View File

@ -0,0 +1,35 @@
[package]
name = "fetch"
version = "0.1.0"
edition = "2024"
[dependencies]
snafu = { workspace = true }
bytes = { workspace = true }
url = { workspace = true }
serde = { workspace = true }
serde_with = { workspace = true }
lazy_static = { workspace = true }
serde_json = { workspace = true }
axum = { workspace = true }
axum-extra = { workspace = true }
async-trait = { workspace = true }
moka = { workspace = true }
reqwest = { workspace = true }
leaky-bucket = "1.1"
cookie = "0.18"
http-cache-reqwest = { version = "0.15", features = [
"manager-cacache",
"manager-moka",
] }
http-cache-semantics = "2.1"
fastrand = "2.3"
reqwest-middleware = "0.4"
reqwest-retry = "0.7"
reqwest-tracing = "0.5"
http-cache = { version = "0.20", features = [
"cacache-tokio",
"manager-cacache",
"manager-moka",
], default-features = false }

View File

@ -2,12 +2,12 @@ use bytes::Bytes;
use reqwest::IntoUrl; use reqwest::IntoUrl;
use super::client::HttpClientTrait; use super::client::HttpClientTrait;
use crate::errors::app_error::RError; use crate::FetchError;
pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>( pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>(
client: &H, client: &H,
url: T, url: T,
) -> Result<Bytes, RError> { ) -> Result<Bytes, FetchError> {
let bytes = client let bytes = client
.get(url) .get(url)
.send() .send()

View File

@ -17,7 +17,7 @@ use serde_with::serde_as;
use snafu::Snafu; use snafu::Snafu;
use super::HttpClientSecrecyDataTrait; use super::HttpClientSecrecyDataTrait;
use crate::fetch::get_random_mobile_ua; use crate::get_random_mobile_ua;
pub struct RateLimiterMiddleware { pub struct RateLimiterMiddleware {
rate_limiter: RateLimiter, rate_limiter: RateLimiter,

View File

@ -4,7 +4,7 @@ use cookie::Cookie;
use reqwest::{ClientBuilder, cookie::Jar}; use reqwest::{ClientBuilder, cookie::Jar};
use url::Url; use url::Url;
use crate::errors::app_error::RError; use crate::FetchError;
pub trait HttpClientSecrecyDataTrait { pub trait HttpClientSecrecyDataTrait {
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder { fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
@ -23,9 +23,9 @@ impl HttpClientCookiesAuth {
cookies: &str, cookies: &str,
url: &Url, url: &Url,
user_agent: Option<String>, user_agent: Option<String>,
) -> Result<Self, RError> { ) -> Result<Self, FetchError> {
let cookie_jar = Arc::new(Jar::default()); let cookie_jar = Arc::new(Jar::default());
for cookie in Cookie::split_parse(cookies).try_collect::<Vec<_>>()? { for cookie in Cookie::split_parse(cookies).collect::<Result<Vec<Cookie<'_>>, _>>()? {
cookie_jar.add_cookie_str(&cookie.to_string(), url); cookie_jar.add_cookie_str(&cookie.to_string(), url);
} }

View File

@ -0,0 +1,12 @@
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub))]
pub enum FetchError {
#[snafu(transparent)]
CookieParseError { source: cookie::ParseError },
#[snafu(transparent)]
ReqwestError { source: reqwest::Error },
#[snafu(transparent)]
RequestMiddlewareError { source: reqwest_middleware::Error },
}

View File

@ -1,12 +1,12 @@
use reqwest::IntoUrl; use reqwest::IntoUrl;
use super::client::HttpClientTrait; use super::client::HttpClientTrait;
use crate::errors::app_error::RError; use crate::FetchError;
pub async fn fetch_html<T: IntoUrl, H: HttpClientTrait>( pub async fn fetch_html<T: IntoUrl, H: HttpClientTrait>(
client: &H, client: &H,
url: T, url: T,
) -> Result<String, RError> { ) -> Result<String, FetchError> {
let content = client let content = client
.get(url) .get(url)
.send() .send()

View File

@ -2,11 +2,11 @@ use bytes::Bytes;
use reqwest::IntoUrl; use reqwest::IntoUrl;
use super::{bytes::fetch_bytes, client::HttpClientTrait}; use super::{bytes::fetch_bytes, client::HttpClientTrait};
use crate::errors::app_error::RError; use crate::FetchError;
pub async fn fetch_image<T: IntoUrl, H: HttpClientTrait>( pub async fn fetch_image<T: IntoUrl, H: HttpClientTrait>(
client: &H, client: &H,
url: T, url: T,
) -> Result<Bytes, RError> { ) -> Result<Bytes, FetchError> {
fetch_bytes(client, url).await fetch_bytes(client, url).await
} }

View File

@ -1,9 +1,10 @@
pub mod bytes; pub mod bytes;
pub mod client; pub mod client;
pub mod core; pub mod core;
pub mod errors;
pub mod html; pub mod html;
pub mod image; pub mod image;
pub mod oidc; pub mod test_util;
pub use core::get_random_mobile_ua; pub use core::get_random_mobile_ua;
@ -12,5 +13,8 @@ pub use client::{
HttpClient, HttpClientConfig, HttpClientCookiesAuth, HttpClientError, HttpClient, HttpClientConfig, HttpClientCookiesAuth, HttpClientError,
HttpClientSecrecyDataTrait, HttpClientTrait, HttpClientSecrecyDataTrait, HttpClientTrait,
}; };
pub use errors::FetchError;
pub use html::fetch_html; pub use html::fetch_html;
pub use image::fetch_image; pub use image::fetch_image;
pub use reqwest::{self, IntoUrl};
pub use reqwest_middleware;

View File

@ -0,0 +1,6 @@
use crate::{FetchError, HttpClient};
pub fn build_testing_http_client() -> Result<HttpClient, FetchError> {
let mikan_client = HttpClient::default();
Ok(mikan_client)
}

View File

@ -4,7 +4,7 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
serde = { version = "1", features = ["derive"] } testcontainers = { workspace = true }
testcontainers = { version = "0.23.3" } testcontainers-modules = { workspace = true }
testcontainers-modules = { version = "0.11.4" } testcontainers-ext = { workspace = true }
testcontainers-ext = { version = "0.1.0", features = ["tracing"] } serde = { workspace = true }

11
packages/util/Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[package]
name = "util"
version = "0.1.0"
edition = "2024"
[dependencies]
futures = { workspace = true }
quirks_path = { workspace = true }
snafu = { workspace = true }
anyhow = { workspace = true }
bytes = { workspace = true }

View File

@ -53,3 +53,13 @@ impl From<Box<dyn std::error::Error + Send + Sync>> for OptDynErr {
Self::some(value) Self::some(value)
} }
} }
pub trait AnyhowResultExt<T>: snafu::ResultExt<T, anyhow::Error> {
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>>;
}
impl<T> AnyhowResultExt<T> for Result<T, anyhow::Error> {
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>> {
self.map_err(|e| e.into())
}
}

3
packages/util/src/lib.rs Normal file
View File

@ -0,0 +1,3 @@
pub mod errors;
pub use errors::OptDynErr;

View File

@ -1,4 +1,4 @@
[toolchain] [toolchain]
channel = "nightly" channel = "nightly-2025-02-20"
components = ["rustfmt", "clippy"] components = ["rustfmt", "clippy"]
profile = "default" profile = "default"