refactor: remove loco-rs

This commit is contained in:
master 2025-02-28 03:19:48 +08:00
parent c0707d17bb
commit a68aab1452
66 changed files with 1321 additions and 829 deletions

573
Cargo.lock generated
View File

@ -33,6 +33,48 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
[[package]]
name = "adler32"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234"
[[package]]
name = "aead"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0"
dependencies = [
"crypto-common",
"generic-array",
]
[[package]]
name = "aes"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
dependencies = [
"cfg-if",
"cipher",
"cpufeatures",
]
[[package]]
name = "aes-gcm-siv"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae0784134ba9375416d469ec31e7c5f9fa94405049cf08c5ce5b4698be673e0d"
dependencies = [
"aead",
"aes",
"cipher",
"ctr",
"polyval",
"subtle",
"zeroize",
]
[[package]]
name = "ahash"
version = "0.7.8"
@ -164,6 +206,30 @@ version = "1.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b964d184e89d9b6b67dd2715bc8e74cf3107fb2b529990c90cf517326150bf4"
[[package]]
name = "apache-avro"
version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1aef82843a0ec9f8b19567445ad2421ceeb1d711514384bdd3d49fe37102ee13"
dependencies = [
"bigdecimal",
"digest",
"libflate",
"log",
"num-bigint",
"quad-rand",
"rand 0.8.5",
"regex-lite",
"serde",
"serde_bytes",
"serde_json",
"strum",
"strum_macros",
"thiserror 1.0.69",
"typed-builder 0.19.1",
"uuid",
]
[[package]]
name = "argon2"
version = "0.5.3"
@ -980,6 +1046,16 @@ dependencies = [
"stacker",
]
[[package]]
name = "cipher"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
dependencies = [
"crypto-common",
"inout",
]
[[package]]
name = "clap"
version = "4.5.31"
@ -1172,6 +1248,15 @@ dependencies = [
"unicode-segmentation",
]
[[package]]
name = "convert_case"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "cookie"
version = "0.18.1"
@ -1227,6 +1312,15 @@ version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
name = "core2"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505"
dependencies = [
"memchr",
]
[[package]]
name = "cpufeatures"
version = "0.2.17"
@ -1271,6 +1365,17 @@ dependencies = [
"once_cell",
]
[[package]]
name = "cron"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5877d3fbf742507b66bc2a1945106bd30dd8504019d596901ddd012a4dd01740"
dependencies = [
"chrono",
"once_cell",
"winnow 0.6.26",
]
[[package]]
name = "cron_clock"
version = "0.8.0"
@ -1364,6 +1469,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"rand_core 0.6.4",
"typenum",
]
@ -1424,6 +1530,36 @@ dependencies = [
"syn 2.0.98",
]
[[package]]
name = "csv"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf"
dependencies = [
"csv-core",
"itoa",
"ryu",
"serde",
]
[[package]]
name = "csv-core"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d02f3b0da4c6504f86e9cd789d8dbafab48c2321be74e9987593de5a894d93d"
dependencies = [
"memchr",
]
[[package]]
name = "ctr"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835"
dependencies = [
"cipher",
]
[[package]]
name = "curve25519-dalek"
version = "4.1.3"
@ -1486,6 +1622,12 @@ dependencies = [
"syn 2.0.98",
]
[[package]]
name = "dary_heap"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04d2cd9c18b9f454ed67da600630b021a8a80bf33f8c95896ab33aaf1c26b728"
[[package]]
name = "dashmap"
version = "5.5.3"
@ -1616,7 +1758,16 @@ version = "5.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35"
dependencies = [
"dirs-sys",
"dirs-sys 0.4.1",
]
[[package]]
name = "dirs"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e"
dependencies = [
"dirs-sys 0.5.0",
]
[[package]]
@ -1637,10 +1788,22 @@ checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
dependencies = [
"libc",
"option-ext",
"redox_users",
"redox_users 0.4.6",
"windows-sys 0.48.0",
]
[[package]]
name = "dirs-sys"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab"
dependencies = [
"libc",
"option-ext",
"redox_users 0.5.0",
"windows-sys 0.59.0",
]
[[package]]
name = "dirs-sys-next"
version = "0.1.2"
@ -1648,7 +1811,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
dependencies = [
"libc",
"redox_users",
"redox_users 0.4.6",
"winapi",
]
@ -1886,6 +2049,16 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "etag"
version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b3d0661a2ccddc26cba0b834e9b717959ed6fdd76c7129ee159c170a875bf44"
dependencies = [
"str-buf",
"xxhash-rust",
]
[[package]]
name = "etcetera"
version = "0.8.0"
@ -1948,6 +2121,16 @@ dependencies = [
"ascii_utils",
]
[[package]]
name = "faster-hex"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7223ae2d2f179b803433d9c830478527e92b8117eab39460edae7f1614d9fb73"
dependencies = [
"heapless",
"serde",
]
[[package]]
name = "fastrand"
version = "2.3.0"
@ -2361,6 +2544,15 @@ dependencies = [
"thiserror 1.0.69",
]
[[package]]
name = "hash32"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47d60b12902ba28e2730cd37e95b8c9223af2808df9e902d4df49588d1470606"
dependencies = [
"byteorder",
]
[[package]]
name = "hashbrown"
version = "0.12.3"
@ -2424,6 +2616,16 @@ dependencies = [
"http",
]
[[package]]
name = "heapless"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bfb9eb618601c89945a70e254898da93b13be0388091d42117462b265bb3fad"
dependencies = [
"hash32",
"stable_deref_trait",
]
[[package]]
name = "heck"
version = "0.4.1"
@ -3045,6 +3247,15 @@ version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb"
[[package]]
name = "inout"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01"
dependencies = [
"generic-array",
]
[[package]]
name = "insta"
version = "1.42.1"
@ -3410,6 +3621,30 @@ version = "0.2.170"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828"
[[package]]
name = "libflate"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45d9dfdc14ea4ef0900c1cddbc8dcd553fbaacd8a4a282cf4018ae9dd04fb21e"
dependencies = [
"adler32",
"core2",
"crc32fast",
"dary_heap",
"libflate_lz77",
]
[[package]]
name = "libflate_lz77"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6e0d73b369f386f1c44abd9c570d5318f55ccde816ff4b562fa452e5182863d"
dependencies = [
"core2",
"hashbrown 0.14.5",
"rle-decode-fast",
]
[[package]]
name = "libm"
version = "0.2.11"
@ -3540,7 +3775,7 @@ version = "1.0.0-alpha.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84c12744d1279367caed41739ef094c325d53fb0ffcd4f9b84a368796f870252"
dependencies = [
"convert_case",
"convert_case 0.6.0",
"proc-macro2",
"quote",
"syn 1.0.109",
@ -3930,6 +4165,8 @@ dependencies = [
"httparse",
"memchr",
"mime",
"serde",
"serde_json",
"spin",
"version_check",
]
@ -4006,6 +4243,7 @@ checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
dependencies = [
"num-integer",
"num-traits",
"serde",
]
[[package]]
@ -4082,6 +4320,15 @@ dependencies = [
"libc",
]
[[package]]
name = "num_threads"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9"
dependencies = [
"libc",
]
[[package]]
name = "oauth2"
version = "5.0.0"
@ -4117,6 +4364,12 @@ version = "1.20.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e"
[[package]]
name = "opaque-debug"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381"
[[package]]
name = "opendal"
version = "0.50.2"
@ -4701,6 +4954,18 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
[[package]]
name = "polyval"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25"
dependencies = [
"cfg-if",
"cpufeatures",
"opaque-debug",
"universal-hash",
]
[[package]]
name = "portable-atomic"
version = "1.11.0"
@ -4861,10 +5126,16 @@ dependencies = [
"tap",
"thiserror 2.0.11",
"tracing",
"typed-builder",
"typed-builder 0.20.0",
"url",
]
[[package]]
name = "quad-rand"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a651516ddc9168ebd67b24afd085a718be02f8858fe406591b013d101ce2f40"
[[package]]
name = "quick-error"
version = "1.2.3"
@ -5131,6 +5402,7 @@ dependencies = [
"tracing-subscriber",
"url",
"uuid",
"zino",
"zune-image",
]
@ -5192,6 +5464,17 @@ dependencies = [
"thiserror 1.0.69",
]
[[package]]
name = "redox_users"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b"
dependencies = [
"getrandom 0.2.15",
"libredox",
"thiserror 2.0.11",
]
[[package]]
name = "reflink-copy"
version = "0.1.24"
@ -5236,6 +5519,12 @@ dependencies = [
"regex-syntax 0.8.5",
]
[[package]]
name = "regex-lite"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a"
[[package]]
name = "regex-syntax"
version = "0.6.29"
@ -5269,11 +5558,13 @@ version = "0.12.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da"
dependencies = [
"async-compression",
"base64 0.22.1",
"bytes",
"cookie",
"cookie_store",
"encoding_rs",
"futures-channel",
"futures-core",
"futures-util",
"h2",
@ -5454,6 +5745,12 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "rle-decode-fast"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422"
[[package]]
name = "rrgen"
version = "0.5.5"
@ -5672,7 +5969,7 @@ dependencies = [
"async-trait",
"bb8",
"chrono",
"convert_case",
"convert_case 0.6.0",
"cron_clock",
"gethostname",
"hex 0.4.3",
@ -6071,6 +6368,15 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_bytes"
version = "0.11.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "387cc504cb06bb40a96c8e04e951fe01854cf6bc921053c954e4a606d9675c6a"
dependencies = [
"serde",
]
[[package]]
name = "serde_derive"
version = "1.0.218"
@ -6113,6 +6419,17 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_qs"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd34f36fe4c5ba9654417139a9b3a20d2e1de6012ee678ad14d240c22c78d8d6"
dependencies = [
"percent-encoding",
"serde",
"thiserror 1.0.69",
]
[[package]]
name = "serde_regex"
version = "1.1.0"
@ -6712,6 +7029,12 @@ version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7beae5182595e9a8b683fa98c4317f956c9a2dec3b9716990d20023cc60c766"
[[package]]
name = "str-buf"
version = "3.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ceb97b7225c713c2fd4db0153cb6b3cab244eb37900c3f634ed4d43310d8c34"
[[package]]
name = "string_cache"
version = "0.8.8"
@ -7045,7 +7368,9 @@ checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21"
dependencies = [
"deranged",
"itoa",
"libc",
"num-conv",
"num_threads",
"powerfmt",
"serde",
"time-core",
@ -7118,7 +7443,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2594dd7c2abbbafbb1c78d167fd10860dc7bd75f814cb051a1e0d3e796b9702"
dependencies = [
"chrono",
"cron",
"cron 0.12.1",
"num-derive",
"num-traits",
"tokio",
@ -7240,7 +7565,7 @@ dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"winnow",
"winnow 0.7.3",
]
[[package]]
@ -7329,6 +7654,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf"
dependencies = [
"crossbeam-channel",
"parking_lot 0.12.3",
"thiserror 1.0.69",
"time",
"tracing-subscriber",
@ -7395,12 +7721,14 @@ dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"parking_lot 0.12.3",
"regex",
"serde",
"serde_json",
"sharded-slab",
"smallvec",
"thread_local",
"time",
"tracing",
"tracing-core",
"tracing-log",
@ -7430,13 +7758,33 @@ dependencies = [
"utf-8",
]
[[package]]
name = "typed-builder"
version = "0.19.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a06fbd5b8de54c5f7c91f6fe4cebb949be2125d7758e630bb58b1d831dbce600"
dependencies = [
"typed-builder-macro 0.19.1",
]
[[package]]
name = "typed-builder"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e14ed59dc8b7b26cacb2a92bad2e8b1f098806063898ab42a3bd121d7d45e75"
dependencies = [
"typed-builder-macro",
"typed-builder-macro 0.20.0",
]
[[package]]
name = "typed-builder-macro"
version = "0.19.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9534daa9fd3ed0bd911d462a37f172228077e7abf18c18a5f67199d959205f8"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.98",
]
[[package]]
@ -7576,6 +7924,16 @@ version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "universal-hash"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea"
dependencies = [
"crypto-common",
"subtle",
]
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
@ -7630,6 +7988,44 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "utoipa"
version = "5.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "435c6f69ef38c9017b4b4eea965dfb91e71e53d869e896db40d1cf2441dd75c0"
dependencies = [
"indexmap 2.7.1",
"serde",
"serde_json",
"utoipa-gen",
]
[[package]]
name = "utoipa-gen"
version = "5.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a77d306bc75294fd52f3e99b13ece67c02c1a2789190a6f31d32f736624326f7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.98",
"ulid",
"url",
"uuid",
]
[[package]]
name = "utoipa-rapidoc"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5f8f5abd341cce16bb4f09a8bafc087d4884a004f25fb980e538d51d6501dab"
dependencies = [
"axum",
"serde",
"serde_json",
"utoipa",
]
[[package]]
name = "uuid"
version = "1.15.0"
@ -8253,6 +8649,15 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winnow"
version = "0.6.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e90edd2ac1aa278a5c4599b1d89cf03074b610800f866d4026dc199d7929a28"
dependencies = [
"memchr",
]
[[package]]
name = "winnow"
version = "0.7.3"
@ -8439,6 +8844,156 @@ dependencies = [
"syn 2.0.98",
]
[[package]]
name = "zino"
version = "0.33.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "265118c999bdb42b8dbdfeb3a5f3375128b90ab2f0758bc626887fffaacdefe6"
dependencies = [
"cfg-if",
"serde_json",
"zino-axum",
"zino-core",
"zino-http",
"zino-openapi",
"zino-storage",
]
[[package]]
name = "zino-axum"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5862b177fabf349df302aaa9cda10ed6bdd125b6e07deb60b10a35ddc525457c"
dependencies = [
"axum",
"futures",
"http",
"tokio",
"tower 0.5.2",
"tower-http",
"tracing",
"utoipa-rapidoc",
"zino-core",
"zino-http",
"zino-openapi",
]
[[package]]
name = "zino-channel"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee50afd3b09309f256260362a2e13a96da50a6e1e59c14635942cb9a36aff22d"
dependencies = [
"serde",
"serde_json",
"zino-core",
]
[[package]]
name = "zino-core"
version = "0.33.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7ce0b6105ded8ff1c3719b5c055e141427239fbc1443ecc460720ebbe00208c"
dependencies = [
"aes-gcm-siv",
"ahash 0.8.11",
"apache-avro",
"argon2",
"base64 0.22.1",
"cfg-if",
"chrono",
"convert_case 0.7.1",
"cron 0.15.0",
"csv",
"dirs",
"faster-hex",
"hkdf",
"hmac",
"http",
"parking_lot 0.12.3",
"rand 0.9.0",
"regex",
"reqwest",
"reqwest-middleware",
"reqwest-tracing",
"rust_decimal",
"serde",
"serde_json",
"serde_qs",
"sha1",
"sha2",
"smallvec",
"sqlx",
"toml",
"tracing",
"tracing-appender",
"tracing-log",
"tracing-subscriber",
"url",
"uuid",
]
[[package]]
name = "zino-http"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14d87dedb04d606d64da351c92c14fa33ab04777c8a20ec32c4302298512776c"
dependencies = [
"bytes",
"cfg-if",
"etag",
"futures",
"http",
"mime_guess",
"multer",
"percent-encoding",
"regex",
"reqwest",
"ryu",
"serde",
"serde_json",
"serde_qs",
"smallvec",
"toml",
"tracing",
"url",
"zino-channel",
"zino-core",
"zino-storage",
]
[[package]]
name = "zino-openapi"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c40508d23d8aab6753093ddd60eee9866fc86b03109e8d682d1af70aa20e55bc"
dependencies = [
"ahash 0.8.11",
"convert_case 0.7.1",
"serde_json",
"toml",
"tracing",
"utoipa",
"zino-core",
]
[[package]]
name = "zino-storage"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ef47e6602c68087e9018cc1cfccd852468525b0ba59ac4fb439855f41992325"
dependencies = [
"bytes",
"etag",
"md-5",
"mime_guess",
"multer",
"reqwest",
"toml",
"tracing",
"zino-core",
]
[[package]]
name = "zstd"
version = "0.13.3"

View File

@ -3,9 +3,6 @@ members = ["apps/recorder"]
resolver = "2"
[patch.crates-io]
# loco-rs = { git = "https://github.com/lonelyhentxi/loco.git", rev = "beb890e" }
# loco-rs = { git = "https://github.com/loco-rs/loco.git" }
# loco-rs = { path = "./patches/loco" }
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
# [patch."https://github.com/lonelyhentxi/qbit.git"]

View File

@ -23,6 +23,7 @@ testcontainers = [
[dependencies]
loco-rs = { version = "0.14" }
zino = { version = "0.33", features = ["axum"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }

View File

@ -1,65 +1,68 @@
#![allow(unused_imports)]
use color_eyre::eyre::Context;
use itertools::Itertools;
use loco_rs::{
app::Hooks,
boot::{BootResult, StartMode},
environment::Environment,
prelude::*,
};
use recorder::{
app::App,
migrations::Migrator,
models::{
subscribers::SEED_SUBSCRIBER,
subscriptions::{self, SubscriptionCreateFromRssDto},
},
};
use sea_orm_migration::MigratorTrait;
// #![allow(unused_imports)]
// use color_eyre::eyre::Context;
// use itertools::Itertools;
// use loco_rs::{
// app::Hooks,
// boot::{BootResult, StartMode},
// environment::Environment,
// prelude::AppContext as LocoContext,
// };
// use recorder::{
// app::{App1, AppContext},
// errors::RResult,
// migrations::Migrator,
// models::{
// subscribers::SEED_SUBSCRIBER,
// subscriptions::{self, SubscriptionCreateFromRssDto},
// },
// };
// use sea_orm::ColumnTrait;
// use sea_orm_migration::MigratorTrait;
async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> color_eyre::eyre::Result<()> {
let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
// async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> RResult<()> {
// let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
// let rss_link =
// "https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
let subscription = if let Some(subscription) = subscriptions::Entity::find()
.filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
.one(&ctx.db)
.await?
{
subscription
} else {
subscriptions::Model::add_subscription(
ctx,
subscriptions::SubscriptionCreateDto::Mikan(SubscriptionCreateFromRssDto {
rss_link: rss_link.to_string(),
display_name: String::from("Mikan Project - 我的番组"),
enabled: Some(true),
}),
1,
)
.await?
};
// // let rss_link =
// // "https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
// let subscription = if let Some(subscription) =
// subscriptions::Entity::find()
// .filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
// .one(&ctx.db)
// .await?
// {
// subscription
// } else {
// subscriptions::Model::add_subscription(
// ctx,
//
// subscriptions::SubscriptionCreateDto::Mikan(SubscriptionCreateFromRssDto {
// rss_link: rss_link.to_string(),
// display_name: String::from("Mikan Project - 我的番组"),
// enabled: Some(true),
// }),
// 1,
// )
// .await?
// };
subscription.pull_subscription(ctx).await?;
// subscription.pull_subscription(ctx).await?;
Ok(())
}
// Ok(())
// }
async fn init() -> color_eyre::eyre::Result<AppContext> {
color_eyre::install()?;
let ctx = loco_rs::cli::playground::<App>().await?;
let BootResult {
app_context: ctx, ..
} = loco_rs::boot::run_app::<App>(&StartMode::ServerOnly, ctx).await?;
Migrator::up(&ctx.db, None).await?;
Ok(ctx)
}
// async fn init() -> RResult<LocoContext> {
// let ctx = loco_rs::cli::playground::<App1>().await?;
// let BootResult {
// app_context: ctx, ..
// } = loco_rs::boot::run_app::<App1>(&StartMode::ServerOnly, ctx).await?;
// Migrator::up(&ctx.db, None).await?;
// Ok(ctx)
// }
#[tokio::main]
async fn main() -> color_eyre::eyre::Result<()> {
let ctx = init().await?;
pull_mikan_bangumi_rss(&ctx).await?;
// #[tokio::main]
// async fn main() -> color_eyre::eyre::Result<()> {
// pull_mikan_bangumi_rss(&ctx).await?;
Ok(())
}
// Ok(())
// }
fn main() {}

View File

@ -0,0 +1,156 @@
use std::{path::Path, sync::Arc};
use figment::Figment;
use itertools::Itertools;
use super::{core::App, env::Enviornment};
use crate::{
app::{config::AppConfig, context::create_context, router::create_router},
errors::RResult,
};
pub struct AppBuilder {
dotenv_file: Option<String>,
config_file: Option<String>,
working_dir: String,
enviornment: Enviornment,
}
impl AppBuilder {
pub async fn load_dotenv(&self) -> RResult<()> {
let try_dotenv_file_or_dirs = if self.dotenv_file.is_some() {
vec![self.dotenv_file.as_deref()]
} else {
vec![Some(&self.working_dir as &str)]
};
let priority_suffix = &AppConfig::priority_suffix(&self.enviornment);
let dotenv_prefix = AppConfig::dotenv_prefix();
let try_filenames = priority_suffix
.iter()
.map(|ps| format!("{}{}", &dotenv_prefix, ps))
.collect_vec();
for try_dotenv_file_or_dir in try_dotenv_file_or_dirs.into_iter().flatten() {
let try_dotenv_file_or_dir_path = Path::new(try_dotenv_file_or_dir);
if try_dotenv_file_or_dir_path.exists() {
if try_dotenv_file_or_dir_path.is_dir() {
for f in try_filenames.iter() {
let p = try_dotenv_file_or_dir_path.join(f);
if p.exists() && p.is_file() {
dotenv::from_path(p)?;
break;
}
}
} else if try_dotenv_file_or_dir_path.is_file() {
dotenv::from_path(try_dotenv_file_or_dir_path)?;
break;
}
}
}
Ok(())
}
pub async fn build_config(&self) -> RResult<AppConfig> {
let try_config_file_or_dirs = if self.config_file.is_some() {
vec![self.config_file.as_deref()]
} else {
vec![Some(&self.working_dir as &str)]
};
let allowed_extensions = &AppConfig::allowed_extension();
let priority_suffix = &AppConfig::priority_suffix(&self.enviornment);
let convention_prefix = &AppConfig::config_prefix();
let try_filenames = priority_suffix
.iter()
.flat_map(|ps| {
allowed_extensions
.iter()
.map(move |ext| (format!("{}{}{}", convention_prefix, ps, ext), ext))
})
.collect_vec();
let mut fig = Figment::from(AppConfig::default_provider());
for try_config_file_or_dir in try_config_file_or_dirs.into_iter().flatten() {
let try_config_file_or_dir_path = Path::new(try_config_file_or_dir);
if try_config_file_or_dir_path.exists() {
if try_config_file_or_dir_path.is_dir() {
for (f, ext) in try_filenames.iter() {
let p = try_config_file_or_dir_path.join(f);
if p.exists() && p.is_file() {
fig = AppConfig::merge_provider_from_file(fig, &p, ext)?;
break;
}
}
} else if let Some(ext) = try_config_file_or_dir_path
.extension()
.and_then(|s| s.to_str())
&& try_config_file_or_dir_path.is_file()
{
fig =
AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?;
break;
}
}
}
let app_config: AppConfig = fig.extract()?;
Ok(app_config)
}
pub async fn build(self) -> RResult<App> {
let _app_name = env!("CARGO_CRATE_NAME");
let _app_version = format!(
"{} ({})",
env!("CARGO_PKG_VERSION"),
option_env!("BUILD_SHA")
.or(option_env!("GITHUB_SHA"))
.unwrap_or("dev")
);
self.load_dotenv().await?;
let config = self.build_config().await?;
let app_context = Arc::new(create_context(config).await?);
let router = create_router(app_context.clone()).await?;
Ok(App {
context: app_context,
router,
builder: self,
})
}
pub fn set_working_dir(self, working_dir: String) -> Self {
let mut ret = self;
ret.working_dir = working_dir;
ret
}
pub fn set_working_dir_to_manifest_dir(self) -> Self {
let manifest_dir = if cfg!(debug_assertions) {
env!("CARGO_MANIFEST_DIR")
} else {
"./apps/recorder"
};
self.set_working_dir(manifest_dir.to_string())
}
}
impl Default for AppBuilder {
fn default() -> Self {
Self {
enviornment: Enviornment::Production,
dotenv_file: None,
config_file: None,
working_dir: String::from("."),
}
}
}

View File

@ -0,0 +1,16 @@
[storage]
data_dir = "./data"
[mikan]
base_url = "https://mikanani.me/"
[mikan.http_client]
exponential_backoff_max_retries = 3
leaky_bucket_max_tokens = 2
leaky_bucket_initial_tokens = 0
leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500
[graphql]
depth_limit = inf
complexity_limit = inf

View File

@ -0,0 +1,78 @@
use std::{fs, path::Path, str};
use figment::{
Figment, Provider,
providers::{Format, Json, Toml, Yaml},
};
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use super::env::Enviornment;
use crate::{
auth::AuthConfig, errors::RResult, extract::mikan::AppMikanConfig,
graphql::config::GraphQLConfig, storage::StorageConfig,
};
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
const CONFIG_ALLOWED_EXTENSIONS: &[&str] = &[".toml", ".json", ".yaml", ".yml"];
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AppConfig {
pub auth: AuthConfig,
pub dal: StorageConfig,
pub mikan: AppMikanConfig,
pub graphql: GraphQLConfig,
}
impl AppConfig {
pub fn config_prefix() -> String {
format!("{}.config", env!("CARGO_PKG_NAME"))
}
pub fn dotenv_prefix() -> String {
String::from(".env")
}
pub fn allowed_extension() -> Vec<String> {
CONFIG_ALLOWED_EXTENSIONS
.iter()
.map(|s| s.to_string())
.collect_vec()
}
pub fn priority_suffix(enviornment: &Enviornment) -> Vec<String> {
vec![
format!(".{}.local", enviornment.full_name()),
format!(".{}.local", enviornment.short_name()),
String::from(".local"),
enviornment.full_name().to_string(),
enviornment.short_name().to_string(),
String::from(""),
]
}
pub fn default_provider() -> impl Provider {
Toml::string(DEFAULT_CONFIG_MIXIN)
}
pub fn merge_provider_from_file(
fig: Figment,
filepath: impl AsRef<Path>,
ext: &str,
) -> RResult<Figment> {
let content = fs::read_to_string(filepath)?;
let rendered = tera::Tera::one_off(
&content,
&tera::Context::from_value(serde_json::json!({}))?,
false,
)?;
Ok(match ext {
".toml" => fig.merge(Toml::string(&rendered)),
".json" => fig.merge(Json::string(&rendered)),
".yaml" | ".yml" => fig.merge(Yaml::string(&rendered)),
_ => unreachable!("unsupported config extension"),
})
}
}

View File

@ -0,0 +1,22 @@
use sea_orm::DatabaseConnection;
use super::config::AppConfig;
use crate::{
auth::AuthService, cache::CacheService, errors::RResult, extract::mikan::MikanClient,
graphql::GraphQLService, storage::StorageService,
};
pub struct AppContext {
pub db: DatabaseConnection,
pub config: AppConfig,
pub cache: CacheService,
pub mikan: MikanClient,
pub auth: AuthService,
pub graphql: GraphQLService,
pub storage: StorageService,
pub working_dir: String,
}
pub async fn create_context(_config: AppConfig) -> RResult<AppContext> {
todo!()
}

View File

@ -0,0 +1,15 @@
use std::sync::Arc;
use super::{builder::AppBuilder, context::AppContext, router::AppRouter};
pub struct App {
pub context: Arc<AppContext>,
pub builder: AppBuilder,
pub router: AppRouter,
}
impl App {
pub fn builder() -> AppBuilder {
AppBuilder::default()
}
}

View File

@ -0,0 +1,23 @@
pub enum Enviornment {
Development,
Production,
Testing,
}
impl Enviornment {
pub fn full_name(&self) -> &'static str {
match &self {
Self::Development => "development",
Self::Production => "production",
Self::Testing => "testing",
}
}
pub fn short_name(&self) -> &'static str {
match &self {
Self::Development => "dev",
Self::Production => "prod",
Self::Testing => "test",
}
}
}

View File

@ -1,34 +0,0 @@
use loco_rs::{app::AppContext, environment::Environment};
use crate::{
auth::service::AppAuthService, dal::AppDalClient, extract::mikan::AppMikanClient,
graphql::service::AppGraphQLService,
};
pub trait AppContextExt {
fn get_dal_client(&self) -> &AppDalClient {
AppDalClient::app_instance()
}
fn get_mikan_client(&self) -> &AppMikanClient {
AppMikanClient::app_instance()
}
fn get_auth_service(&self) -> &AppAuthService {
AppAuthService::app_instance()
}
fn get_graphql_service(&self) -> &AppGraphQLService {
AppGraphQLService::app_instance()
}
fn get_node_env(&self) -> Environment {
let node_env = std::env::var("NODE_ENV");
match node_env.as_deref() {
Ok("production") => Environment::Production,
_ => Environment::Development,
}
}
}
impl AppContextExt for AppContext {}

View File

@ -1,62 +1,33 @@
pub mod ext;
pub mod builder;
pub mod config;
pub mod context;
pub mod core;
pub mod env;
pub mod router;
use std::{
fs,
path::{self, Path, PathBuf},
sync::Arc,
};
pub use core::App;
use std::path::Path;
use async_trait::async_trait;
pub use ext::AppContextExt;
use itertools::Itertools;
pub use context::AppContext;
use loco_rs::{
Result,
app::{AppContext, Hooks},
app::{AppContext as LocoAppContext, Hooks},
boot::{BootResult, StartMode, create_app},
cache,
config::Config,
controller::{AppRoutes, middleware, middleware::MiddlewareLayer},
controller::AppRoutes,
db::truncate_table,
environment::Environment,
prelude::*,
task::Tasks,
};
use once_cell::sync::OnceCell;
use crate::{
auth::service::AppAuthServiceInitializer,
controllers::{self},
dal::AppDalInitalizer,
extract::mikan::client::AppMikanClientInitializer,
graphql::service::AppGraphQLServiceInitializer,
migrations::Migrator,
models::subscribers,
workers::subscription_worker::SubscriptionWorker,
};
use crate::{migrations::Migrator, models::subscribers};
pub const WORKING_ROOT_VAR_NAME: &str = "WORKING_ROOT";
static APP_WORKING_ROOT: OnceCell<quirks_path::PathBuf> = OnceCell::new();
pub struct App;
impl App {
pub fn set_working_root(path: PathBuf) {
APP_WORKING_ROOT.get_or_init(|| {
quirks_path::PathBuf::from(path.as_os_str().to_string_lossy().to_string())
});
}
pub fn get_working_root() -> &'static quirks_path::Path {
APP_WORKING_ROOT
.get()
.map(|p| p.as_path())
.expect("working root not set")
}
}
pub struct App1;
#[async_trait]
impl Hooks for App {
impl Hooks for App1 {
fn app_version() -> String {
format!(
"{} ({})",
@ -79,130 +50,28 @@ impl Hooks for App {
create_app::<Self, Migrator>(mode, environment, config).await
}
async fn load_config(env: &Environment) -> Result<Config> {
let working_roots_to_search = [
std::env::var(WORKING_ROOT_VAR_NAME).ok(),
Some(String::from("./apps/recorder")),
Some(String::from(".")),
]
.into_iter()
.flatten()
.collect_vec();
for working_root in working_roots_to_search.iter() {
let working_root = PathBuf::from(working_root);
for env_file in [
working_root.join(format!(".env.{env}.local")),
working_root.join(format!(".env.{env}")),
working_root.join(".env.local"),
working_root.join(".env"),
] {
tracing::info!(env_file =? env_file);
if env_file.exists() && env_file.is_file() {
dotenv::from_path(&env_file).map_err(loco_rs::Error::wrap)?;
tracing::info!("loaded env from {} success.", env_file.to_string_lossy());
}
}
}
for working_root in working_roots_to_search.iter() {
let working_root = PathBuf::from(working_root);
let config_dir = working_root.as_path().join("config");
for config_file in [
config_dir.join(format!("{env}.local.yaml")),
config_dir.join(format!("{env}.yaml")),
] {
if config_file.exists() && config_file.is_file() {
let content = fs::read_to_string(config_file.clone())?;
let rendered = tera::Tera::one_off(
&content,
&tera::Context::from_value(serde_json::json!({}))?,
false,
)?;
App::set_working_root(working_root);
let config_file = &config_file.to_string_lossy();
let res = serde_yaml::from_str(&rendered)
.map_err(|err| loco_rs::Error::YAMLFile(err, config_file.to_string()))?;
tracing::info!("loading config from {} success", config_file);
return Ok(res);
}
}
}
Err(loco_rs::Error::Message(format!(
"no configuration file found in search paths: {}",
working_roots_to_search
.iter()
.flat_map(|p| path::absolute(PathBuf::from(p)))
.map(|p| p.to_string_lossy().to_string())
.join(",")
)))
}
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
let initializers: Vec<Box<dyn Initializer>> = vec![
Box::new(AppDalInitalizer),
Box::new(AppMikanClientInitializer),
Box::new(AppGraphQLServiceInitializer),
Box::new(AppAuthServiceInitializer),
];
async fn initializers(_ctx: &LocoAppContext) -> Result<Vec<Box<dyn Initializer>>> {
let initializers: Vec<Box<dyn Initializer>> = vec![];
Ok(initializers)
}
fn routes(ctx: &AppContext) -> AppRoutes {
let ctx = Arc::new(ctx.clone());
fn routes(_ctx: &LocoAppContext) -> AppRoutes {
AppRoutes::with_default_routes()
.prefix("/api")
.add_route(controllers::graphql::routes(ctx.clone()))
}
fn middlewares(ctx: &AppContext) -> Vec<Box<dyn MiddlewareLayer>> {
use loco_rs::controller::middleware::static_assets::{FolderConfig, StaticAssets};
let mut middlewares = middleware::default_middleware_stack(ctx);
middlewares.push(Box::new(StaticAssets {
enable: true,
must_exist: true,
folder: FolderConfig {
uri: String::from("/api/static"),
path: App::get_working_root().join("public").into(),
},
fallback: App::get_working_root()
.join("public/assets/404.html")
.into(),
precompressed: false,
}));
middlewares
}
async fn after_context(ctx: AppContext) -> Result<AppContext> {
Ok(AppContext {
cache: cache::Cache::new(cache::drivers::inmem::new()).into(),
..ctx
})
}
async fn connect_workers(ctx: &AppContext, queue: &Queue) -> Result<()> {
queue.register(SubscriptionWorker::build(ctx)).await?;
Ok(())
}
fn register_tasks(_tasks: &mut Tasks) {}
async fn truncate(ctx: &AppContext) -> Result<()> {
async fn truncate(ctx: &LocoAppContext) -> Result<()> {
truncate_table(&ctx.db, subscribers::Entity).await?;
Ok(())
}
async fn seed(_ctx: &AppContext, _base: &Path) -> Result<()> {
async fn seed(_ctx: &LocoAppContext, _base: &Path) -> Result<()> {
Ok(())
}
async fn connect_workers(_ctx: &LocoAppContext, _queue: &Queue) -> Result<()> {
Ok(())
}
}

View File

@ -0,0 +1,31 @@
use std::sync::Arc;
use axum::Router;
use futures::try_join;
use crate::{
app::AppContext,
controllers::{self, core::ControllerTrait},
errors::RResult,
};
pub struct AppRouter {
pub root: Router<Arc<AppContext>>,
}
pub async fn create_router(context: Arc<AppContext>) -> RResult<AppRouter> {
let mut root_router = Router::<Arc<AppContext>>::new();
let (graphqlc, oidcc) = try_join!(
controllers::graphql::create(context.clone()),
controllers::oidc::create(context.clone()),
)?;
for c in [graphqlc, oidcc] {
root_router = c.apply_to(root_router);
}
root_router = root_router.with_state(context);
Ok(AppRouter { root: root_router })
}

View File

@ -1,15 +1,17 @@
use async_trait::async_trait;
use axum::http::{HeaderValue, request::Parts};
use base64::{self, Engine};
use loco_rs::app::AppContext;
use reqwest::header::AUTHORIZATION;
use super::{
config::BasicAuthConfig,
errors::AuthError,
service::{AuthService, AuthUserInfo},
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{
app::AppContext,
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
};
use crate::models::{auth::AuthType, subscribers::SEED_SUBSCRIBER};
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct AuthBasic {
@ -59,7 +61,7 @@ pub struct BasicAuthService {
}
#[async_trait]
impl AuthService for BasicAuthService {
impl AuthServiceTrait for BasicAuthService {
async fn extract_user_info(
&self,
ctx: &AppContext,

View File

@ -1,6 +1,6 @@
use jwt_authorizer::OneOrArray;
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, NoneAsEmptyString};
use serde_with::{NoneAsEmptyString, serde_as};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct BasicAuthConfig {
@ -33,7 +33,7 @@ pub struct OidcAuthConfig {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "auth_type", rename_all = "snake_case")]
pub enum AppAuthConfig {
pub enum AuthConfig {
Basic(BasicAuthConfig),
Oidc(OidcAuthConfig),
}

View File

@ -1,9 +1,4 @@
import { InjectionToken } from '@outposts/injection-js';
import {
type EventTypes,
LogLevel,
type OpenIdConfiguration,
} from 'oidc-client-rx';
import { LogLevel, type OpenIdConfiguration } from 'oidc-client-rx';
export const isBasicAuth = process.env.AUTH_TYPE === 'basic';

View File

@ -6,7 +6,6 @@ use axum::{
http::StatusCode,
response::{IntoResponse, Response},
};
use loco_rs::model::ModelError;
use openidconnect::{
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
StandardErrorResponse, core::CoreErrorResponseType,
@ -14,7 +13,7 @@ use openidconnect::{
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{fetch::HttpClientError, models::auth::AuthType};
use crate::{errors::RError, fetch::HttpClientError, models::auth::AuthType};
#[derive(Debug, Error)]
pub enum AuthError {
@ -24,7 +23,7 @@ pub enum AuthError {
current: AuthType,
},
#[error("Failed to find auth record")]
FindAuthRecordError(ModelError),
FindAuthRecordError(RError),
#[error("Invalid credentials")]
BasicInvalidCredentials,
#[error(transparent)]

View File

@ -6,16 +6,15 @@ use axum::{
middleware::Next,
response::{IntoResponse, Response},
};
use loco_rs::prelude::AppContext;
use crate::{app::AppContextExt, auth::AuthService};
use crate::{app::AppContext, auth::AuthServiceTrait};
pub async fn api_auth_middleware(
pub async fn header_www_authenticate_middleware(
State(ctx): State<Arc<AppContext>>,
request: Request,
next: Next,
) -> Response {
let auth_service = ctx.get_auth_service();
let auth_service = &ctx.auth;
let (mut parts, body) = request.into_parts();

View File

@ -5,7 +5,7 @@ pub mod middleware;
pub mod oidc;
pub mod service;
pub use config::{AppAuthConfig, BasicAuthConfig, OidcAuthConfig};
pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig};
pub use errors::AuthError;
pub use middleware::api_auth_middleware;
pub use service::{AppAuthService, AuthService, AuthUserInfo};
pub use middleware::header_www_authenticate_middleware;
pub use service::{AuthService, AuthServiceTrait, AuthUserInfo};

View File

@ -7,13 +7,13 @@ use async_trait::async_trait;
use axum::http::{HeaderValue, request::Parts};
use itertools::Itertools;
use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer};
use loco_rs::{app::AppContext, model::ModelError};
use moka::future::Cache;
use openidconnect::{
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse,
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata},
};
use sea_orm::DbErr;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use url::Url;
@ -21,9 +21,9 @@ use url::Url;
use super::{
config::OidcAuthConfig,
errors::AuthError,
service::{AuthService, AuthUserInfo},
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{fetch::HttpClient, models::auth::AuthType};
use crate::{app::AppContext, errors::RError, fetch::HttpClient, models::auth::AuthType};
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OidcAuthClaims {
@ -258,7 +258,7 @@ impl OidcAuthService {
}
#[async_trait]
impl AuthService for OidcAuthService {
impl AuthServiceTrait for OidcAuthService {
async fn extract_user_info(
&self,
ctx: &AppContext,
@ -306,7 +306,7 @@ impl AuthService for OidcAuthService {
}
}
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(ModelError::EntityNotFound) => {
Err(RError::DbError(DbErr::RecordNotFound(..))) => {
crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await
}
r => r,

View File

@ -7,20 +7,17 @@ use axum::{
response::{IntoResponse as _, Response},
};
use jwt_authorizer::{JwtAuthorizer, Validation};
use loco_rs::app::{AppContext, Initializer};
use moka::future::Cache;
use once_cell::sync::OnceCell;
use reqwest::header::HeaderValue;
use super::{
AppAuthConfig,
AuthConfig,
basic::BasicAuthService,
errors::AuthError,
oidc::{OidcAuthClaims, OidcAuthService},
};
use crate::{
app::AppContextExt as _,
config::AppConfigExt,
app::AppContext,
fetch::{
HttpClient, HttpClientConfig,
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
@ -41,7 +38,7 @@ impl FromRequestParts<AppContext> for AuthUserInfo {
parts: &mut Parts,
state: &AppContext,
) -> Result<Self, Self::Rejection> {
let auth_service = state.get_auth_service();
let auth_service = &state.auth;
auth_service
.extract_user_info(state, parts)
@ -51,7 +48,7 @@ impl FromRequestParts<AppContext> for AuthUserInfo {
}
#[async_trait]
pub trait AuthService {
pub trait AuthServiceTrait {
async fn extract_user_info(
&self,
ctx: &AppContext,
@ -61,24 +58,16 @@ pub trait AuthService {
fn auth_type(&self) -> AuthType;
}
pub enum AppAuthService {
pub enum AuthService {
Basic(BasicAuthService),
Oidc(OidcAuthService),
}
static APP_AUTH_SERVICE: OnceCell<AppAuthService> = OnceCell::new();
impl AppAuthService {
pub fn app_instance() -> &'static Self {
APP_AUTH_SERVICE
.get()
.expect("AppAuthService is not initialized")
}
pub async fn from_conf(config: AppAuthConfig) -> Result<Self, AuthError> {
impl AuthService {
pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> {
let result = match config {
AppAuthConfig::Basic(config) => AppAuthService::Basic(BasicAuthService { config }),
AppAuthConfig::Oidc(config) => {
AuthConfig::Basic(config) => AuthService::Basic(BasicAuthService { config }),
AuthConfig::Oidc(config) => {
let validation = Validation::new()
.iss(&[&config.issuer])
.aud(&[&config.audience]);
@ -96,7 +85,7 @@ impl AppAuthService {
.build()
.await?;
AppAuthService::Oidc(OidcAuthService {
AuthService::Oidc(OidcAuthService {
config,
api_authorizer,
oidc_provider_client,
@ -112,50 +101,29 @@ impl AppAuthService {
}
#[async_trait]
impl AuthService for AppAuthService {
impl AuthServiceTrait for AuthService {
async fn extract_user_info(
&self,
ctx: &AppContext,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
match self {
AppAuthService::Basic(service) => service.extract_user_info(ctx, request).await,
AppAuthService::Oidc(service) => service.extract_user_info(ctx, request).await,
AuthService::Basic(service) => service.extract_user_info(ctx, request).await,
AuthService::Oidc(service) => service.extract_user_info(ctx, request).await,
}
}
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
match self {
AppAuthService::Basic(service) => service.www_authenticate_header_value(),
AppAuthService::Oidc(service) => service.www_authenticate_header_value(),
AuthService::Basic(service) => service.www_authenticate_header_value(),
AuthService::Oidc(service) => service.www_authenticate_header_value(),
}
}
fn auth_type(&self) -> AuthType {
match self {
AppAuthService::Basic(service) => service.auth_type(),
AppAuthService::Oidc(service) => service.auth_type(),
AuthService::Basic(service) => service.auth_type(),
AuthService::Oidc(service) => service.auth_type(),
}
}
}
pub struct AppAuthServiceInitializer;
#[async_trait]
impl Initializer for AppAuthServiceInitializer {
fn name(&self) -> String {
String::from("AppAuthServiceInitializer")
}
async fn before_run(&self, ctx: &AppContext) -> Result<(), loco_rs::Error> {
let auth_conf = ctx.config.get_app_conf()?.auth;
let service = AppAuthService::from_conf(auth_conf)
.await
.map_err(loco_rs::Error::wrap)?;
APP_AUTH_SERVICE.get_or_init(|| service);
Ok(())
}
}

View File

@ -1,9 +1,9 @@
use loco_rs::cli;
use recorder::{app::App, migrations::Migrator};
use recorder::{app::App1, migrations::Migrator};
#[tokio::main]
async fn main() -> color_eyre::eyre::Result<()> {
color_eyre::install()?;
cli::main::<App, Migrator>().await?;
cli::main::<App1, Migrator>().await?;
Ok(())
}

1
apps/recorder/src/cache/config.rs vendored Normal file
View File

@ -0,0 +1 @@
pub struct CacheConfig {}

5
apps/recorder/src/cache/mod.rs vendored Normal file
View File

@ -0,0 +1,5 @@
pub mod config;
pub mod service;
pub use config::CacheConfig;
pub use service::CacheService;

1
apps/recorder/src/cache/service.rs vendored Normal file
View File

@ -0,0 +1 @@
pub struct CacheService {}

View File

@ -1,74 +0,0 @@
use figment::{
Figment,
providers::{Format, Json, Yaml},
};
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use crate::{
auth::AppAuthConfig, dal::config::AppDalConfig, errors::RecorderError,
extract::mikan::AppMikanConfig, graphql::config::AppGraphQLConfig,
};
const DEFAULT_APP_SETTINGS_MIXIN: &str = include_str!("./settings_mixin.yaml");
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AppConfig {
pub auth: AppAuthConfig,
pub dal: AppDalConfig,
pub mikan: AppMikanConfig,
pub graphql: AppGraphQLConfig,
}
pub fn deserialize_key_path_from_json_value<T: DeserializeOwned>(
value: &serde_json::Value,
key_path: &[&str],
) -> Result<Option<T>, loco_rs::Error> {
let mut stack = vec![("", value)];
for key in key_path {
let current = stack.last().unwrap().1;
if let Some(v) = current.get(key) {
stack.push((key, v));
} else {
return Ok(None);
}
}
let result: T = serde_json::from_value(stack.pop().unwrap().1.clone())?;
Ok(Some(result))
}
pub fn deserialize_key_path_from_app_config<T: DeserializeOwned>(
app_config: &loco_rs::config::Config,
key_path: &[&str],
) -> Result<Option<T>, loco_rs::Error> {
let settings = app_config.settings.as_ref();
if let Some(settings) = settings {
deserialize_key_path_from_json_value(settings, key_path)
} else {
Ok(None)
}
}
pub trait AppConfigExt {
fn get_root_conf(&self) -> &loco_rs::config::Config;
fn get_app_conf(&self) -> Result<AppConfig, RecorderError> {
let settings_str = self
.get_root_conf()
.settings
.as_ref()
.map(serde_json::to_string)
.unwrap_or_else(|| Ok(String::new()))?;
let app_config = Figment::from(Json::string(&settings_str))
.merge(Yaml::string(DEFAULT_APP_SETTINGS_MIXIN))
.extract()?;
Ok(app_config)
}
}
impl AppConfigExt for loco_rs::config::Config {
fn get_root_conf(&self) -> &loco_rs::config::Config {
self
}
}

View File

@ -1,15 +0,0 @@
# dal:
# data_dir: ./data
# mikan:
# http_client:
# exponential_backoff_max_retries: 3
# leaky_bucket_max_tokens: 2
# leaky_bucket_initial_tokens: 0
# leaky_bucket_refill_tokens: 1
# leaky_bucket_refill_interval: 500
# base_url: "https://mikanani.me/"
# graphql:
# depth_limit: null
# complexity_limit: null

View File

@ -0,0 +1,50 @@
use std::{borrow::Cow, sync::Arc};
use axum::Router;
use crate::app::AppContext;
pub trait ControllerTrait: Sized {
fn apply_to(self, router: Router<Arc<AppContext>>) -> Router<Arc<AppContext>>;
}
pub struct PrefixController {
prefix: Cow<'static, str>,
router: Router<Arc<AppContext>>,
}
impl PrefixController {
pub fn new(prefix: impl Into<Cow<'static, str>>, router: Router<Arc<AppContext>>) -> Self {
Self {
prefix: prefix.into(),
router,
}
}
}
impl ControllerTrait for PrefixController {
fn apply_to(self, router: Router<Arc<AppContext>>) -> Router<Arc<AppContext>> {
router.nest(&self.prefix, self.router)
}
}
pub enum Controller {
Prefix(PrefixController),
}
impl Controller {
pub fn from_prefix(
prefix: impl Into<Cow<'static, str>>,
router: Router<Arc<AppContext>>,
) -> Self {
Self::Prefix(PrefixController::new(prefix, router))
}
}
impl ControllerTrait for Controller {
fn apply_to(self, router: Router<Arc<AppContext>>) -> Router<Arc<AppContext>> {
match self {
Self::Prefix(p) => p.apply_to(router),
}
}
}

View File

@ -1,20 +1,23 @@
use std::sync::Arc;
use async_graphql_axum::{GraphQLRequest, GraphQLResponse};
use axum::{extract::State, middleware::from_fn_with_state, routing::post, Extension};
use loco_rs::{app::AppContext, prelude::Routes};
use axum::{Extension, Router, extract::State, middleware::from_fn_with_state, routing::post};
use super::core::Controller;
use crate::{
app::AppContextExt,
auth::{api_auth_middleware, AuthUserInfo},
app::AppContext,
auth::{AuthUserInfo, header_www_authenticate_middleware},
errors::RResult,
};
pub const CONTROLLER_PREFIX: &str = "/api/graphql";
async fn graphql_handler(
State(ctx): State<AppContext>,
State(ctx): State<Arc<AppContext>>,
Extension(auth_user_info): Extension<AuthUserInfo>,
req: GraphQLRequest,
) -> GraphQLResponse {
let graphql_service = ctx.get_graphql_service();
let graphql_service = &ctx.graphql;
let mut req = req.into_inner();
req = req.data(auth_user_info);
@ -22,9 +25,9 @@ async fn graphql_handler(
graphql_service.schema.execute(req).await.into()
}
pub fn routes(ctx: Arc<AppContext>) -> Routes {
Routes::new().prefix("/graphql").add(
"/",
post(graphql_handler).layer(from_fn_with_state(ctx, api_auth_middleware)),
)
pub async fn create(ctx: Arc<AppContext>) -> RResult<Controller> {
let router = Router::<Arc<AppContext>>::new()
.route("/", post(graphql_handler))
.layer(from_fn_with_state(ctx, header_www_authenticate_middleware));
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router))
}

View File

@ -1,2 +1,3 @@
pub mod core;
pub mod graphql;
pub mod oidc;

View File

@ -1,24 +1,32 @@
use std::sync::Arc;
use axum::{extract::Query, http::request::Parts};
use loco_rs::prelude::*;
use axum::{
Json, Router,
extract::{Query, State},
http::request::Parts,
routing::get,
};
use super::core::Controller;
use crate::{
app::AppContextExt,
app::AppContext,
auth::{
AuthError, AuthService, AuthServiceTrait,
oidc::{OidcAuthCallbackPayload, OidcAuthCallbackQuery, OidcAuthRequest},
AppAuthService, AuthError, AuthService,
},
errors::RResult,
extract::http::ForwardedRelatedInfo,
models::auth::AuthType,
};
pub const CONTROLLER_PREFIX: &str = "/api/oidc";
async fn oidc_callback(
State(ctx): State<Arc<AppContext>>,
Query(query): Query<OidcAuthCallbackQuery>,
) -> Result<Json<OidcAuthCallbackPayload>, AuthError> {
let auth_service = ctx.get_auth_service();
if let AppAuthService::Oidc(oidc_auth_service) = auth_service {
let auth_service = &ctx.auth;
if let AuthService::Oidc(oidc_auth_service) = auth_service {
let response = oidc_auth_service
.extract_authorization_request_callback(query)
.await?;
@ -35,13 +43,13 @@ async fn oidc_auth(
State(ctx): State<Arc<AppContext>>,
parts: Parts,
) -> Result<Json<OidcAuthRequest>, AuthError> {
let auth_service = ctx.get_auth_service();
if let AppAuthService::Oidc(oidc_auth_service) = auth_service {
let auth_service = &ctx.auth;
if let AuthService::Oidc(oidc_auth_service) = auth_service {
let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts)
.resolved_origin()
.ok_or_else(|| AuthError::OidcRequestRedirectUriError(url::ParseError::EmptyHost))?;
redirect_uri.set_path("/api/oidc/callback");
redirect_uri.set_path(&format!("{CONTROLLER_PREFIX}/callback"));
let auth_request = oidc_auth_service
.build_authorization_request(redirect_uri.as_str())
@ -62,9 +70,10 @@ async fn oidc_auth(
}
}
pub fn routes(state: Arc<AppContext>) -> Routes {
Routes::new()
.prefix("/oidc")
.add("/auth", get(oidc_auth).with_state(state.clone()))
.add("/callback", get(oidc_callback).with_state(state))
pub async fn create(_context: Arc<AppContext>) -> RResult<Controller> {
let router = Router::<Arc<AppContext>>::new()
.route("/auth", get(oidc_auth))
.route("/callback", get(oidc_callback));
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router))
}

View File

@ -1,4 +0,0 @@
pub mod client;
pub mod config;
pub use client::{AppDalClient, AppDalInitalizer, DalContentCategory};
pub use config::AppDalConfig;

View File

@ -5,7 +5,17 @@ use thiserror::Error as ThisError;
use crate::fetch::HttpClientError;
#[derive(ThisError, Debug)]
pub enum RecorderError {
pub enum RError {
#[error(transparent)]
RSSError(#[from] rss::Error),
#[error(transparent)]
DotEnvError(#[from] dotenv::Error),
#[error(transparent)]
TeraError(#[from] tera::Error),
#[error(transparent)]
IOError(#[from] std::io::Error),
#[error(transparent)]
DbError(#[from] sea_orm::DbErr),
#[error(transparent)]
CookieParseError(#[from] cookie::ParseError),
#[error(transparent)]
@ -44,9 +54,11 @@ pub enum RecorderError {
#[source]
source: Option<Box<dyn StdError + Send + Sync>>,
},
#[error("Model Entity {entity} not found")]
ModelEntityNotFound { entity: Cow<'static, str> },
}
impl RecorderError {
impl RError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
@ -70,10 +82,10 @@ impl RecorderError {
source: Some(source),
}
}
}
impl From<RecorderError> for loco_rs::Error {
fn from(error: RecorderError) -> Self {
Self::wrap(error)
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
Self::DbError(sea_orm::DbErr::RecordNotFound(detail.to_string()))
}
}
pub type RResult<T> = Result<T, RError>;

View File

@ -1,21 +1,15 @@
use std::ops::Deref;
use async_trait::async_trait;
use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use reqwest_middleware::ClientWithMiddleware;
use secrecy::{ExposeSecret, SecretString};
use url::Url;
use super::AppMikanConfig;
use crate::{
config::AppConfigExt,
errors::RecorderError,
errors::RError,
fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth},
};
static APP_MIKAN_CLIENT: OnceCell<AppMikanClient> = OnceCell::new();
#[derive(Debug, Default, Clone)]
pub struct MikanAuthSecrecy {
pub cookie: SecretString,
@ -23,19 +17,19 @@ pub struct MikanAuthSecrecy {
}
impl MikanAuthSecrecy {
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RecorderError> {
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RError> {
HttpClientCookiesAuth::from_cookies(self.cookie.expose_secret(), url, self.user_agent)
}
}
#[derive(Debug)]
pub struct AppMikanClient {
pub struct MikanClient {
http_client: HttpClient,
base_url: Url,
}
impl AppMikanClient {
pub fn new(config: AppMikanConfig) -> Result<Self, RecorderError> {
impl MikanClient {
pub fn new(config: AppMikanConfig) -> Result<Self, RError> {
let http_client = HttpClient::from_config(config.http_client)?;
let base_url = config.base_url;
Ok(Self {
@ -44,7 +38,7 @@ impl AppMikanClient {
})
}
pub fn fork_with_auth(&self, secrecy: MikanAuthSecrecy) -> Result<Self, RecorderError> {
pub fn fork_with_auth(&self, secrecy: MikanAuthSecrecy) -> Result<Self, RError> {
let cookie_auth = secrecy.into_cookie_auth(&self.base_url)?;
let fork = self.http_client.fork().attach_secrecy(cookie_auth);
@ -54,12 +48,6 @@ impl AppMikanClient {
})
}
pub fn app_instance() -> &'static AppMikanClient {
APP_MIKAN_CLIENT
.get()
.expect("AppMikanClient is not initialized")
}
pub fn base_url(&self) -> &Url {
&self.base_url
}
@ -69,7 +57,7 @@ impl AppMikanClient {
}
}
impl Deref for AppMikanClient {
impl Deref for MikanClient {
type Target = ClientWithMiddleware;
fn deref(&self) -> &Self::Target {
@ -77,22 +65,4 @@ impl Deref for AppMikanClient {
}
}
impl HttpClientTrait for AppMikanClient {}
pub struct AppMikanClientInitializer;
#[async_trait]
impl Initializer for AppMikanClientInitializer {
fn name(&self) -> String {
"AppMikanClientInitializer".to_string()
}
async fn before_run(&self, app_context: &AppContext) -> loco_rs::Result<()> {
let config = &app_context.config;
let app_mikan_conf = config.get_app_conf()?.mikan;
APP_MIKAN_CLIENT.get_or_try_init(|| AppMikanClient::new(app_mikan_conf))?;
Ok(())
}
}
impl HttpClientTrait for MikanClient {}

View File

@ -4,7 +4,7 @@ pub mod constants;
pub mod rss_extract;
pub mod web_extract;
pub use client::{AppMikanClient, AppMikanClientInitializer, MikanAuthSecrecy};
pub use client::{MikanAuthSecrecy, MikanClient};
pub use config::AppMikanConfig;
pub use constants::MIKAN_BUCKET_KEY;
pub use rss_extract::{

View File

@ -1,7 +1,6 @@
use std::borrow::Cow;
use chrono::DateTime;
use color_eyre::eyre;
use itertools::Itertools;
use reqwest::IntoUrl;
use serde::{Deserialize, Serialize};
@ -9,9 +8,9 @@ use tracing::instrument;
use url::Url;
use crate::{
errors::RecorderError,
errors::{RError, RResult},
extract::mikan::{
AppMikanClient,
MikanClient,
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
},
fetch::bytes::fetch_bytes,
@ -101,28 +100,28 @@ impl MikanRssChannel {
}
impl TryFrom<rss::Item> for MikanRssItem {
type Error = RecorderError;
type Error = RError;
fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
let enclosure = item.enclosure.ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure"))
})?;
let enclosure = item
.enclosure
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure")))?;
let mime_type = enclosure.mime_type;
if mime_type != BITTORRENT_MIME_TYPE {
return Err(RecorderError::MimeError {
return Err(RError::MimeError {
expected: String::from(BITTORRENT_MIME_TYPE),
found: mime_type.to_string(),
desc: String::from("MikanRssItem"),
});
}
let title = item.title.ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title"))
})?;
let title = item
.title
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?;
let enclosure_url = Url::parse(&enclosure.url).map_err(|inner| {
RecorderError::from_mikan_rss_invalid_field_and_source(
RError::from_mikan_rss_invalid_field_and_source(
Cow::Borrowed("enclosure_url:enclosure.link"),
Box::new(inner),
)
@ -131,14 +130,12 @@ impl TryFrom<rss::Item> for MikanRssItem {
let homepage = item
.link
.and_then(|link| Url::parse(&link).ok())
.ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link"))
})?;
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link")))?;
let MikanEpisodeHomepage {
mikan_episode_id, ..
} = extract_mikan_episode_id_from_homepage(&homepage).ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
RError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
})?;
Ok(MikanRssItem {
@ -171,7 +168,7 @@ pub fn build_mikan_bangumi_rss_link(
mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> eyre::Result<Url> {
) -> RResult<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path("/RSS/Bangumi");
url.query_pairs_mut()
@ -186,7 +183,7 @@ pub fn build_mikan_bangumi_rss_link(
pub fn build_mikan_subscriber_aggregation_rss_link(
mikan_base_url: &str,
mikan_aggregation_id: &str,
) -> eyre::Result<Url> {
) -> RResult<Url> {
let mut url = Url::parse(mikan_base_url)?;
url.set_path("/RSS/MyBangumi");
url.query_pairs_mut()
@ -226,9 +223,9 @@ pub fn extract_mikan_subscriber_aggregation_id_from_rss_link(
#[instrument(skip_all, fields(channel_rss_link = channel_rss_link.as_str()))]
pub async fn extract_mikan_rss_channel_from_rss_link(
http_client: &AppMikanClient,
http_client: &MikanClient,
channel_rss_link: impl IntoUrl,
) -> eyre::Result<MikanRssChannel> {
) -> RResult<MikanRssChannel> {
let bytes = fetch_bytes(http_client, channel_rss_link.as_str()).await?;
let channel = rss::Channel::read_from(&bytes[..])?;
@ -327,11 +324,9 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
},
))
} else {
Err(RecorderError::MikanRssInvalidFormatError)
.inspect_err(|error| {
Err(RError::MikanRssInvalidFormatError).inspect_err(|error| {
tracing::warn!(error = %error);
})
.map_err(|error| error.into())
}
}

View File

@ -4,23 +4,22 @@ use async_stream::try_stream;
use bytes::Bytes;
use futures::Stream;
use itertools::Itertools;
use loco_rs::app::AppContext;
use scraper::{Html, Selector};
use tracing::instrument;
use url::Url;
use super::{
AppMikanClient, MIKAN_BUCKET_KEY, MikanBangumiRssLink, extract_mikan_bangumi_id_from_rss_link,
MIKAN_BUCKET_KEY, MikanBangumiRssLink, MikanClient, extract_mikan_bangumi_id_from_rss_link,
};
use crate::{
app::AppContextExt,
dal::DalContentCategory,
errors::RecorderError,
app::AppContext,
errors::{RError, RResult},
extract::{
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str,
},
fetch::{html::fetch_html, image::fetch_image},
storage::StorageContentCategory,
};
#[derive(Clone, Debug, PartialEq)]
@ -112,9 +111,9 @@ pub fn extract_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeH
}
pub async fn extract_mikan_poster_meta_from_src(
http_client: &AppMikanClient,
http_client: &MikanClient,
origin_poster_src_url: Url,
) -> Result<MikanBangumiPosterMeta, RecorderError> {
) -> Result<MikanBangumiPosterMeta, RError> {
let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
@ -127,12 +126,12 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
ctx: &AppContext,
origin_poster_src_url: Url,
subscriber_id: i32,
) -> Result<MikanBangumiPosterMeta, RecorderError> {
let dal_client = ctx.get_dal_client();
let mikan_client = ctx.get_mikan_client();
) -> RResult<MikanBangumiPosterMeta> {
let dal_client = &ctx.storage;
let mikan_client = &ctx.mikan;
if let Some(poster_src) = dal_client
.exists_object(
DalContentCategory::Image,
StorageContentCategory::Image,
subscriber_id,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
@ -150,7 +149,7 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
let poster_str = dal_client
.store_object(
DalContentCategory::Image,
StorageContentCategory::Image,
subscriber_id,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
@ -167,9 +166,9 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
#[instrument(skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))]
pub async fn extract_mikan_episode_meta_from_episode_homepage(
http_client: &AppMikanClient,
http_client: &MikanClient,
mikan_episode_homepage_url: Url,
) -> Result<MikanEpisodeMeta, RecorderError> {
) -> Result<MikanEpisodeMeta, RError> {
let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?;
@ -185,7 +184,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
.select(bangumi_title_selector)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -200,22 +199,18 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
.and_then(|el| el.value().attr("href"))
.and_then(|s| mikan_episode_homepage_url.join(s).ok())
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")))
.inspect_err(|error| tracing::error!(error = %error))?;
let mikan_fansub_id = mikan_fansub_id
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id"))
})
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id")))
.inspect_err(|error| tracing::error!(error = %error))?;
let episode_title = html
.select(&Selector::parse("title").unwrap())
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -223,9 +218,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
let MikanEpisodeHomepage {
mikan_episode_id, ..
} = extract_mikan_episode_id_from_homepage(&mikan_episode_homepage_url)
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
})
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -237,7 +230,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -278,9 +271,9 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
#[instrument(skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))]
pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
http_client: &AppMikanClient,
http_client: &MikanClient,
mikan_bangumi_homepage_url: Url,
) -> Result<MikanBangumiMeta, RecorderError> {
) -> Result<MikanBangumiMeta, RError> {
let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?;
let html = Html::parse_document(&content);
@ -294,7 +287,7 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
.select(bangumi_title_selector)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| tracing::warn!(error = %error))?;
let mikan_bangumi_id = html
@ -308,9 +301,7 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
mikan_bangumi_id, ..
}| mikan_bangumi_id,
)
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")))
.inspect_err(|error| tracing::error!(error = %error))?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
@ -360,9 +351,9 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
*/
#[instrument(skip_all, fields(my_bangumi_page_url = my_bangumi_page_url.as_str()))]
pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
http_client: &AppMikanClient,
http_client: &MikanClient,
my_bangumi_page_url: Url,
) -> impl Stream<Item = Result<MikanBangumiMeta, RecorderError>> {
) -> impl Stream<Item = Result<MikanBangumiMeta, RError>> {
try_stream! {
let mikan_base_url = Url::parse(&my_bangumi_page_url.origin().unicode_serialization())?;

View File

@ -2,12 +2,12 @@ use bytes::Bytes;
use reqwest::IntoUrl;
use super::client::HttpClientTrait;
use crate::errors::RecorderError;
use crate::errors::RError;
pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>(
client: &H,
url: T,
) -> Result<Bytes, RecorderError> {
) -> Result<Bytes, RError> {
let bytes = client
.get(url)
.send()

View File

@ -3,7 +3,7 @@ use std::{fmt::Debug, ops::Deref, sync::Arc, time::Duration};
use async_trait::async_trait;
use axum::http::{self, Extensions};
use http_cache_reqwest::{
CACacheManager, Cache, CacheManager, CacheMode, HttpCache, HttpCacheOptions, MokaManager,
Cache, CacheManager, CacheMode, HttpCache, HttpCacheOptions, MokaManager,
};
use leaky_bucket::RateLimiter;
use reqwest::{ClientBuilder, Request, Response};
@ -17,7 +17,7 @@ use serde_with::serde_as;
use thiserror::Error;
use super::HttpClientSecrecyDataTrait;
use crate::{app::App, fetch::get_random_mobile_ua};
use crate::fetch::get_random_mobile_ua;
pub struct RateLimiterMiddleware {
rate_limiter: RateLimiter,
@ -40,7 +40,6 @@ impl Middleware for RateLimiterMiddleware {
#[serde(rename_all = "snake_case", tag = "type")]
pub enum HttpClientCacheBackendConfig {
Moka { cache_size: u64 },
CACache { cache_path: String },
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
@ -238,12 +237,6 @@ impl HttpClient {
.cache_backend
.as_ref()
.map(|b| match b {
HttpClientCacheBackendConfig::CACache { cache_path } => {
let path = std::path::PathBuf::from(
App::get_working_root().join(cache_path).as_str(),
);
CacheBackend::new(CACacheManager { path })
}
HttpClientCacheBackendConfig::Moka { cache_size } => {
CacheBackend::new(MokaManager {
cache: Arc::new(moka::future::Cache::new(*cache_size)),

View File

@ -5,7 +5,7 @@ use reqwest::{ClientBuilder, cookie::Jar};
use secrecy::zeroize::Zeroize;
use url::Url;
use crate::errors::RecorderError;
use crate::errors::RError;
pub trait HttpClientSecrecyDataTrait: Zeroize {
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
@ -24,7 +24,7 @@ impl HttpClientCookiesAuth {
cookies: &str,
url: &Url,
user_agent: Option<String>,
) -> Result<Self, RecorderError> {
) -> Result<Self, RError> {
let cookie_jar = Arc::new(Jar::default());
for cookie in Cookie::split_parse(cookies).try_collect::<Vec<_>>()? {
cookie_jar.add_cookie_str(&cookie.to_string(), url);

View File

@ -1,12 +1,12 @@
use reqwest::IntoUrl;
use super::client::HttpClientTrait;
use crate::errors::RecorderError;
use crate::errors::RError;
pub async fn fetch_html<T: IntoUrl, H: HttpClientTrait>(
client: &H,
url: T,
) -> Result<String, RecorderError> {
) -> Result<String, RError> {
let content = client
.get(url)
.send()

View File

@ -2,11 +2,11 @@ use bytes::Bytes;
use reqwest::IntoUrl;
use super::{bytes::fetch_bytes, client::HttpClientTrait};
use crate::errors::RecorderError;
use crate::errors::RError;
pub async fn fetch_image<T: IntoUrl, H: HttpClientTrait>(
client: &H,
url: T,
) -> Result<Bytes, RecorderError> {
) -> Result<Bytes, RError> {
fetch_bytes(client, url).await
}

View File

@ -1,7 +1,7 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct AppGraphQLConfig {
pub struct GraphQLConfig {
pub depth_limit: Option<usize>,
pub complexity_limit: Option<usize>,
}

View File

@ -1,8 +1,9 @@
pub mod config;
pub mod filter;
pub mod guard;
pub mod schema_root;
pub mod service;
pub mod util;
pub mod filter;
pub use schema_root::schema;
pub use service::GraphQLService;

View File

@ -1,51 +1,16 @@
use async_graphql::dynamic::{Schema, SchemaError};
use async_trait::async_trait;
use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use sea_orm::DatabaseConnection;
use super::{config::AppGraphQLConfig, schema_root};
use crate::config::AppConfigExt;
static APP_GRAPHQL_SERVICE: OnceCell<AppGraphQLService> = OnceCell::new();
use super::{config::GraphQLConfig, schema_root};
#[derive(Debug)]
pub struct AppGraphQLService {
pub struct GraphQLService {
pub schema: Schema,
}
impl AppGraphQLService {
pub fn new(config: AppGraphQLConfig, db: DatabaseConnection) -> Result<Self, SchemaError> {
impl GraphQLService {
pub fn new(config: GraphQLConfig, db: DatabaseConnection) -> Result<Self, SchemaError> {
let schema = schema_root::schema(db, config.depth_limit, config.complexity_limit)?;
Ok(Self { schema })
}
pub fn app_instance() -> &'static Self {
APP_GRAPHQL_SERVICE
.get()
.expect("AppGraphQLService is not initialized")
}
}
#[derive(Debug, Clone)]
pub struct AppGraphQLServiceInitializer;
#[async_trait]
impl Initializer for AppGraphQLServiceInitializer {
fn name(&self) -> String {
String::from("AppGraphQLServiceInitializer")
}
async fn before_run(&self, app_context: &AppContext) -> loco_rs::Result<()> {
APP_GRAPHQL_SERVICE.get_or_try_init(|| {
let config = app_context
.config
.get_app_conf()
.map_err(loco_rs::Error::wrap)?
.graphql;
let db = &app_context.db;
AppGraphQLService::new(config, db.clone()).map_err(loco_rs::Error::wrap)
})?;
Ok(())
}
}

View File

@ -3,23 +3,24 @@
assert_matches,
unboxed_closures,
impl_trait_in_bindings,
iterator_try_collect
iterator_try_collect,
async_fn_traits,
let_chains
)]
pub mod app;
pub mod auth;
pub mod config;
pub mod cache;
pub mod controllers;
pub mod dal;
pub mod errors;
pub mod extract;
pub mod fetch;
pub mod graphql;
pub mod migrations;
pub mod models;
pub mod storage;
pub mod sync;
pub mod tasks;
#[cfg(test)]
pub mod test_utils;
pub mod views;
pub mod workers;

View File

@ -1,5 +1,4 @@
use async_trait::async_trait;
use loco_rs::schema::table_auto;
use sea_orm_migration::{prelude::*, schema::*};
use super::defs::*;

View File

@ -1,12 +1,12 @@
use async_trait::async_trait;
use loco_rs::{
app::AppContext,
model::{ModelError, ModelResult},
};
use sea_orm::{Set, TransactionTrait, entity::prelude::*};
use serde::{Deserialize, Serialize};
use super::subscribers::{self, SEED_SUBSCRIBER};
use crate::{
app::AppContext,
errors::{RError, RResult},
};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
@ -57,17 +57,17 @@ impl Related<super::subscribers::Entity> for Entity {
impl ActiveModelBehavior for ActiveModel {}
impl Model {
pub async fn find_by_pid(ctx: &AppContext, pid: &str) -> ModelResult<Self> {
pub async fn find_by_pid(ctx: &AppContext, pid: &str) -> RResult<Self> {
let db = &ctx.db;
let subscriber_auth = Entity::find()
.filter(Column::Pid.eq(pid))
.one(db)
.await?
.ok_or_else(|| ModelError::EntityNotFound)?;
.ok_or_else(|| RError::from_db_record_not_found("auth::find_by_pid"))?;
Ok(subscriber_auth)
}
pub async fn create_from_oidc(ctx: &AppContext, sub: String) -> ModelResult<Self> {
pub async fn create_from_oidc(ctx: &AppContext, sub: String) -> RResult<Self> {
let db = &ctx.db;
let txn = db.begin().await?;

View File

@ -1,10 +1,10 @@
use async_graphql::SimpleObject;
use async_trait::async_trait;
use loco_rs::app::AppContext;
use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::OnConflict};
use serde::{Deserialize, Serialize};
use super::subscription_bangumi;
use crate::{app::AppContext, errors::RResult};
#[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
@ -119,9 +119,9 @@ impl Model {
mikan_bangumi_id: String,
mikan_fansub_id: String,
f: F,
) -> color_eyre::eyre::Result<Model>
) -> RResult<Model>
where
F: AsyncFnOnce(&mut ActiveModel) -> color_eyre::eyre::Result<()>,
F: AsyncFnOnce(&mut ActiveModel) -> RResult<()>,
{
let db = &ctx.db;
if let Some(existed) = Entity::find()

View File

@ -1,13 +1,13 @@
use std::sync::Arc;
use async_trait::async_trait;
use loco_rs::app::AppContext;
use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::OnConflict};
use serde::{Deserialize, Serialize};
use super::{bangumi, query::InsertManyReturningExt, subscription_episode};
use crate::{
app::AppContextExt,
app::AppContext,
errors::RResult,
extract::{
mikan::{MikanEpisodeMeta, build_mikan_episode_homepage},
rawname::parse_episode_meta_from_raw_name,
@ -140,7 +140,7 @@ impl Model {
subscriber_id: i32,
subscription_id: i32,
creations: impl IntoIterator<Item = MikanEpsiodeCreation>,
) -> color_eyre::eyre::Result<()> {
) -> RResult<()> {
let db = &ctx.db;
let new_episode_active_modes = creations
.into_iter()
@ -200,10 +200,8 @@ impl ActiveModel {
})
.ok()
.unwrap_or_default();
let homepage = build_mikan_episode_homepage(
ctx.get_mikan_client().base_url().clone(),
&item.mikan_episode_id,
);
let homepage =
build_mikan_episode_homepage(ctx.mikan.base_url().clone(), &item.mikan_episode_id);
Ok(Self {
mikan_episode_id: ActiveValue::Set(Some(item.mikan_episode_id)),

View File

@ -1,12 +1,13 @@
use async_graphql::SimpleObject;
use async_trait::async_trait;
use loco_rs::{
app::AppContext,
model::{ModelError, ModelResult},
};
use sea_orm::{ActiveValue, FromJsonQueryResult, TransactionTrait, entity::prelude::*};
use serde::{Deserialize, Serialize};
use crate::{
app::AppContext,
errors::{RError, RResult},
};
pub const SEED_SUBSCRIBER: &str = "konobangu";
#[derive(
@ -94,22 +95,22 @@ pub struct SubscriberIdParams {
impl ActiveModelBehavior for ActiveModel {}
impl Model {
pub async fn find_seed_subscriber_id(ctx: &AppContext) -> ModelResult<i32> {
pub async fn find_seed_subscriber_id(ctx: &AppContext) -> RResult<i32> {
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER).await?;
Ok(subscriber_auth.subscriber_id)
}
pub async fn find_by_id(ctx: &AppContext, id: i32) -> ModelResult<Self> {
pub async fn find_by_id(ctx: &AppContext, id: i32) -> RResult<Self> {
let db = &ctx.db;
let subscriber = Entity::find_by_id(id)
.one(db)
.await?
.ok_or_else(|| ModelError::EntityNotFound)?;
.ok_or_else(|| RError::from_db_record_not_found("subscriptions::find_by_id"))?;
Ok(subscriber)
}
pub async fn create_root(ctx: &AppContext) -> ModelResult<Self> {
pub async fn create_root(ctx: &AppContext) -> RResult<Self> {
let db = &ctx.db;
let txn = db.begin().await?;

View File

@ -2,13 +2,13 @@ use std::{collections::HashSet, sync::Arc};
use async_trait::async_trait;
use itertools::Itertools;
use loco_rs::app::AppContext;
use sea_orm::{ActiveValue, entity::prelude::*};
use serde::{Deserialize, Serialize};
use super::{bangumi, episodes, query::filter_values_in};
use crate::{
app::AppContextExt,
app::AppContext,
errors::RResult,
extract::{
mikan::{
build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link,
@ -182,7 +182,7 @@ impl Model {
ctx: &AppContext,
create_dto: SubscriptionCreateDto,
subscriber_id: i32,
) -> color_eyre::eyre::Result<Self> {
) -> RResult<Self> {
let db = &ctx.db;
let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id);
@ -193,7 +193,7 @@ impl Model {
ctx: &AppContext,
ids: impl Iterator<Item = i32>,
enabled: bool,
) -> color_eyre::eyre::Result<()> {
) -> RResult<()> {
let db = &ctx.db;
Entity::update_many()
.col_expr(Column::Enabled, Expr::value(enabled))
@ -203,10 +203,7 @@ impl Model {
Ok(())
}
pub async fn delete_with_ids(
ctx: &AppContext,
ids: impl Iterator<Item = i32>,
) -> color_eyre::eyre::Result<()> {
pub async fn delete_with_ids(ctx: &AppContext, ids: impl Iterator<Item = i32>) -> RResult<()> {
let db = &ctx.db;
Entity::delete_many()
.filter(Column::Id.is_in(ids))
@ -215,10 +212,10 @@ impl Model {
Ok(())
}
pub async fn pull_subscription(&self, ctx: &AppContext) -> color_eyre::eyre::Result<()> {
pub async fn pull_subscription(&self, ctx: &AppContext) -> RResult<()> {
match &self.category {
SubscriptionCategory::Mikan => {
let mikan_client = ctx.get_mikan_client();
let mikan_client = &ctx.mikan;
let channel =
extract_mikan_rss_channel_from_rss_link(mikan_client, &self.source_url).await?;
@ -269,7 +266,7 @@ impl Model {
for ((mikan_bangumi_id, mikan_fansub_id), new_ep_metas) in new_mikan_bangumi_groups
{
let mikan_base_url = ctx.get_mikan_client().base_url();
let mikan_base_url = ctx.mikan.base_url();
let bgm_homepage = build_mikan_bangumi_homepage(
mikan_base_url.clone(),
&mikan_bangumi_id,
@ -287,7 +284,7 @@ impl Model {
self.id,
mikan_bangumi_id.to_string(),
mikan_fansub_id.to_string(),
async |am| -> color_eyre::eyre::Result<()> {
async |am| -> RResult<()> {
let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage(
mikan_client,
bgm_homepage.clone(),

View File

@ -1,26 +1,22 @@
use std::fmt;
use async_trait::async_trait;
use bytes::Bytes;
use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use opendal::{Buffer, Operator, layers::LoggingLayer, services::Fs};
use quirks_path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use url::Url;
use uuid::Uuid;
use super::AppDalConfig;
use crate::{app::App, config::AppConfigExt, errors::RecorderError};
use super::StorageConfig;
use crate::errors::RError;
// TODO: wait app-context-trait to integrate
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum DalContentCategory {
pub enum StorageContentCategory {
Image,
}
impl AsRef<str> for DalContentCategory {
impl AsRef<str> for StorageContentCategory {
fn as_ref(&self) -> &str {
match self {
Self::Image => "image",
@ -28,14 +24,12 @@ impl AsRef<str> for DalContentCategory {
}
}
static APP_DAL_CLIENT: OnceCell<AppDalClient> = OnceCell::new();
pub enum DalStoredUrl {
pub enum StorageStoredUrl {
RelativePath { path: String },
Absolute { url: Url },
}
impl AsRef<str> for DalStoredUrl {
impl AsRef<str> for StorageStoredUrl {
fn as_ref(&self) -> &str {
match &self {
Self::Absolute { url } => url.as_str(),
@ -44,32 +38,24 @@ impl AsRef<str> for DalStoredUrl {
}
}
impl fmt::Display for DalStoredUrl {
impl fmt::Display for StorageStoredUrl {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.as_ref())
}
}
#[derive(Debug, Clone)]
pub struct AppDalClient {
pub struct StorageService {
pub data_dir: String,
}
impl AppDalClient {
pub fn new(config: AppDalConfig) -> Self {
impl StorageService {
pub fn from_config(config: StorageConfig) -> Self {
Self {
data_dir: App::get_working_root()
.join(config.data_dir.as_deref().unwrap_or("./data"))
.to_string(),
data_dir: config.data_dir,
}
}
pub fn app_instance() -> &'static AppDalClient {
APP_DAL_CLIENT
.get()
.expect("AppDalClient is not initialized")
}
pub fn get_fs(&self) -> Fs {
Fs::default().root(&self.data_dir)
}
@ -80,14 +66,14 @@ impl AppDalClient {
pub async fn store_object(
&self,
content_category: DalContentCategory,
content_category: StorageContentCategory,
subscriber_id: i32,
bucket: Option<&str>,
filename: &str,
data: Bytes,
) -> Result<DalStoredUrl, RecorderError> {
) -> Result<StorageStoredUrl, RError> {
match content_category {
DalContentCategory::Image => {
StorageContentCategory::Image => {
let fullname = [
&subscriber_id.to_string(),
content_category.as_ref(),
@ -109,7 +95,7 @@ impl AppDalClient {
fs_op.write(fullname.as_str(), data).await?;
Ok(DalStoredUrl::RelativePath {
Ok(StorageStoredUrl::RelativePath {
path: fullname.to_string(),
})
}
@ -118,13 +104,13 @@ impl AppDalClient {
pub async fn exists_object(
&self,
content_category: DalContentCategory,
content_category: StorageContentCategory,
subscriber_id: i32,
bucket: Option<&str>,
filename: &str,
) -> Result<Option<DalStoredUrl>, RecorderError> {
) -> Result<Option<StorageStoredUrl>, RError> {
match content_category {
DalContentCategory::Image => {
StorageContentCategory::Image => {
let fullname = [
&subscriber_id.to_string(),
content_category.as_ref(),
@ -140,7 +126,7 @@ impl AppDalClient {
.finish();
if fs_op.exists(fullname.as_str()).await? {
Ok(Some(DalStoredUrl::RelativePath {
Ok(Some(StorageStoredUrl::RelativePath {
path: fullname.to_string(),
}))
} else {
@ -152,13 +138,13 @@ impl AppDalClient {
pub async fn load_object(
&self,
content_category: DalContentCategory,
content_category: StorageContentCategory,
subscriber_pid: &str,
bucket: Option<&str>,
filename: &str,
) -> color_eyre::eyre::Result<Buffer> {
match content_category {
DalContentCategory::Image => {
StorageContentCategory::Image => {
let fullname = [
subscriber_pid,
content_category.as_ref(),
@ -180,21 +166,3 @@ impl AppDalClient {
}
}
}
pub struct AppDalInitalizer;
#[async_trait]
impl Initializer for AppDalInitalizer {
fn name(&self) -> String {
String::from("AppDalInitalizer")
}
async fn before_run(&self, app_context: &AppContext) -> loco_rs::Result<()> {
let config = &app_context.config;
let app_dal_conf = config.get_app_conf()?.dal;
APP_DAL_CLIENT.get_or_init(|| AppDalClient::new(app_dal_conf));
Ok(())
}
}

View File

@ -1,6 +1,6 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct AppDalConfig {
pub data_dir: Option<String>,
pub struct StorageConfig {
pub data_dir: String,
}

View File

@ -0,0 +1,4 @@
pub mod client;
pub mod config;
pub use client::{StorageContentCategory, StorageService};
pub use config::StorageConfig;

View File

@ -0,0 +1,16 @@
use std::borrow::Cow;
use async_trait::async_trait;
use crate::{app::AppContext, errors::RResult};
pub struct TaskVars {}
#[async_trait]
pub trait Task: Send + Sync {
fn task_name() -> Cow<'static, str>;
fn task_id(&self) -> &str;
async fn run(&self, app_context: &AppContext, vars: &TaskVars) -> RResult<()>;
}

View File

@ -1,50 +1,47 @@
use futures::{TryStreamExt, pin_mut};
use loco_rs::prelude::*;
use std::borrow::Cow;
use futures::{TryStreamExt, pin_mut};
use super::core::{Task, TaskVars};
use crate::{
app::AppContext,
errors::RResult,
extract::mikan::{
MikanAuthSecrecy
MikanAuthSecrecy, web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page,
},
};
#[derive(Debug)]
pub struct CreateMikanRSSFromMyBangumiTask {
subscriber_id: i32,
task_id: String,
auth_secrecy: MikanAuthSecrecy,
pub subscriber_id: i32,
pub task_id: String,
pub auth_secrecy: MikanAuthSecrecy,
}
#[async_trait::async_trait]
impl Task for CreateMikanRSSFromMyBangumiTask {
fn task(&self) -> TaskInfo {
TaskInfo {
name: format!(
"create-mikan-rss-from-my-bangumi-{}-{}",
self.subscriber_id, self.task_id
),
detail: "create mikan rss from my bangumi page for {} {}".to_string(),
}
fn task_name() -> Cow<'static, str> {
Cow::Borrowed("create-mikan-rss-from-my-bangumi")
}
async fn run(&self, app_context: &AppContext, _vars: &task::Vars) -> Result<()> {
fn task_id(&self) -> &str {
&self.task_id
}
async fn run(&self, app_context: &AppContext, _vars: &TaskVars) -> RResult<()> {
let mikan_client = app_context
.get_mikan_client()
.mikan
.fork_with_auth(self.auth_secrecy.clone())?;
{
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
&mikan_client,
mikan_client
.base_url()
.join("/Home/MyBangumi")
.map_err(loco_rs::Error::wrap)?,
mikan_client.base_url().join("/Home/MyBangumi")?,
);
pin_mut!(bangumi_metas);
let bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
tokio::sync::broadcast::
let _bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
}
Ok(())

View File

@ -1 +1,2 @@
pub mod core;
pub mod create_mikan_bangumi_subscriptions_from_my_bangumi_page;

View File

@ -2,12 +2,12 @@ use color_eyre::eyre;
use reqwest::IntoUrl;
use crate::{
extract::mikan::{AppMikanClient, AppMikanConfig},
extract::mikan::{AppMikanConfig, MikanClient},
fetch::HttpClientConfig,
};
pub fn build_testing_mikan_client(base_mikan_url: impl IntoUrl) -> eyre::Result<AppMikanClient> {
let mikan_client = AppMikanClient::new(AppMikanConfig {
pub fn build_testing_mikan_client(base_mikan_url: impl IntoUrl) -> eyre::Result<MikanClient> {
let mikan_client = MikanClient::new(AppMikanConfig {
http_client: HttpClientConfig {
..Default::default()
},

View File

@ -1 +0,0 @@
pub mod subscription_worker;

View File

@ -1,31 +0,0 @@
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use crate::models::subscriptions;
pub struct SubscriptionWorker {
pub ctx: AppContext,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct SubscriptionWorkerArgs {
pub subscription: subscriptions::Model,
}
#[async_trait]
impl BackgroundWorker<SubscriptionWorkerArgs> for SubscriptionWorker {
fn build(ctx: &AppContext) -> Self {
Self { ctx: ctx.clone() }
}
async fn perform(&self, _args: SubscriptionWorkerArgs) -> Result<()> {
println!("================================================");
let _db = &self.ctx.db;
let _storage = &self.ctx.storage;
println!("================================================");
Ok(())
}
}

View File

@ -1,3 +1,2 @@
mod models;
mod requests;
mod tasks;

View File

@ -1,5 +1,4 @@
// use insta::assert_debug_snapshot;
// use loco_rs::testing;
// use recorder::{app::App, models::subscribers::Model};
use serial_test::serial;

View File

@ -1,7 +1,6 @@
#![allow(unused_imports)]
use insta::{assert_debug_snapshot, with_settings};
use loco_rs::testing;
use recorder::app::App;
use recorder::app::App1;
use serial_test::serial;
macro_rules! configure_insta {
@ -17,15 +16,4 @@ macro_rules! configure_insta {
#[serial]
async fn can_get_current_user() {
configure_insta!();
// testing::request::<App, _, _>(|request, _ctx| async move {
// let response = request.get("/api/user/current").await;
// with_settings!({
// filters => testing::cleanup_user_model()
// }, {
// assert_debug_snapshot!((response.status_code(),
// response.text())); });
// })
// .await;
}

View File

@ -1 +0,0 @@
pub mod seed;

View File

@ -1,42 +0,0 @@
//! This task implements data seeding functionality for initializing new
//! development/demo environments.
//!
//! # Example
//!
//! Run the task with the following command:
//! ```sh
//! cargo run task
//! ```
//!
//! To override existing data and reset the data structure, use the following
//! command with the `refresh:true` argument:
//! ```sh
//! cargo run task seed_data refresh:true
//! ```
#![allow(unused_imports)]
use loco_rs::{db, prelude::*};
use recorder::{app::App, migrations::Migrator};
#[allow(clippy::module_name_repetitions)]
pub struct SeedData;
#[async_trait]
impl Task for SeedData {
fn task(&self) -> TaskInfo {
TaskInfo {
name: "seed_data".to_string(),
detail: "Task for seeding data".to_string(),
}
}
async fn run(&self, _app_context: &AppContext, _vars: &task::Vars) -> Result<()> {
// let refresh = vars.cli.get("refresh").is_some_and(|refresh| refresh ==
// "true");
//
// if refresh {
// db::reset::<Migrator>(&app_context.db).await?;
// }
// let path = std::path::Path::new("src/fixtures");
// db::run_app_seed::<App>(&app_context.db, path).await?;
Ok(())
}
}