feat: do a little work

This commit is contained in:
master 2024-03-01 23:59:00 +08:00
parent 019fef9a7b
commit 01f1e31e5c
38 changed files with 853 additions and 264 deletions

2
.gitignore vendored
View File

@ -112,7 +112,7 @@ coverage
# nyc tests coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
# Grunt intermediate dal (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)

314
Cargo.lock generated
View File

@ -17,6 +17,17 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "aes"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
dependencies = [
"cfg-if",
"cipher",
"cpufeatures",
]
[[package]]
name = "ahash"
version = "0.7.8"
@ -232,7 +243,7 @@ dependencies = [
"derive_builder",
"diligent-date-parser",
"never",
"quick-xml",
"quick-xml 0.30.0",
]
[[package]]
@ -376,6 +387,18 @@ dependencies = [
"url",
]
[[package]]
name = "backon"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c79c8ef183b8b663e8cb19cf92fb7d98c56739977bd47eae2de2717bd5de2c2c"
dependencies = [
"fastrand",
"futures-core",
"pin-project",
"tokio",
]
[[package]]
name = "backtrace"
version = "0.3.69"
@ -514,6 +537,15 @@ dependencies = [
"generic-array",
]
[[package]]
name = "block-padding"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93"
dependencies = [
"generic-array",
]
[[package]]
name = "borsh"
version = "1.3.1"
@ -657,6 +689,15 @@ dependencies = [
"thiserror",
]
[[package]]
name = "cbc"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6"
dependencies = [
"cipher",
]
[[package]]
name = "cc"
version = "1.0.86"
@ -725,6 +766,16 @@ dependencies = [
"stacker",
]
[[package]]
name = "cipher"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
dependencies = [
"crypto-common",
"inout",
]
[[package]]
name = "clap"
version = "4.5.1"
@ -813,6 +864,26 @@ version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "const-random"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aaf16c9c2c612020bcfd042e170f6e32de9b9d75adb5277cdbbd2e2c8c8299a"
dependencies = [
"const-random-macro",
]
[[package]]
name = "const-random-macro"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e"
dependencies = [
"getrandom",
"once_cell",
"tiny-keccak",
]
[[package]]
name = "const-str"
version = "0.3.2"
@ -973,6 +1044,12 @@ dependencies = [
"regex",
]
[[package]]
name = "crunchy"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
[[package]]
name = "crypto-common"
version = "0.1.6"
@ -1230,6 +1307,15 @@ dependencies = [
"winapi",
]
[[package]]
name = "dlv-list"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f"
dependencies = [
"const-random",
]
[[package]]
name = "doc-comment"
version = "0.3.3"
@ -1375,6 +1461,12 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6"
[[package]]
name = "flagset"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d52a7e408202050813e6f1d9addadcaafef3dca7530c7ddfb005d4081cce6779"
[[package]]
name = "flate2"
version = "1.0.28"
@ -1893,6 +1985,20 @@ dependencies = [
"want",
]
[[package]]
name = "hyper-rustls"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590"
dependencies = [
"futures-util",
"http 0.2.11",
"hyper 0.14.28",
"rustls 0.21.10",
"tokio",
"tokio-rustls 0.24.1",
]
[[package]]
name = "hyper-tls"
version = "0.5.0"
@ -2054,6 +2160,16 @@ dependencies = [
"syn 2.0.50",
]
[[package]]
name = "inout"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5"
dependencies = [
"block-padding",
"generic-array",
]
[[package]]
name = "insta"
version = "1.35.1"
@ -2170,7 +2286,7 @@ dependencies = [
"rustls-pemfile 2.1.0",
"socket2",
"tokio",
"tokio-rustls",
"tokio-rustls 0.25.0",
"url",
"webpki-roots 0.26.1",
]
@ -2578,6 +2694,36 @@ version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "opendal"
version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3350be0d4ba326017ce22c98a9e94d21b069160fcd95bbe6c2555dac4e93c47a"
dependencies = [
"anyhow",
"async-trait",
"backon",
"base64 0.21.7",
"bytes",
"chrono",
"flagset",
"futures",
"getrandom",
"http 0.2.11",
"log",
"md-5",
"once_cell",
"percent-encoding",
"quick-xml 0.30.0",
"reqsign",
"reqwest",
"serde",
"serde_json",
"sha2",
"tokio",
"uuid",
]
[[package]]
name = "openssl"
version = "0.10.64"
@ -2640,6 +2786,16 @@ dependencies = [
"num-traits",
]
[[package]]
name = "ordered-multimap"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4d6a8c22fc714f0c2373e6091bf6f5e9b37b1bc0b1184874b7e0a4e303d318f"
dependencies = [
"dlv-list",
"hashbrown 0.14.3",
]
[[package]]
name = "os_pipe"
version = "1.1.5"
@ -2771,6 +2927,16 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd"
[[package]]
name = "pbkdf2"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2"
dependencies = [
"digest",
"hmac",
]
[[package]]
name = "pem"
version = "3.0.3"
@ -2974,6 +3140,21 @@ dependencies = [
"spki",
]
[[package]]
name = "pkcs5"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e847e2c91a18bfa887dd028ec33f2fe6f25db77db3619024764914affe8b69a6"
dependencies = [
"aes",
"cbc",
"der",
"pbkdf2",
"scrypt",
"sha2",
"spki",
]
[[package]]
name = "pkcs8"
version = "0.10.2"
@ -2981,6 +3162,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
dependencies = [
"der",
"pkcs5",
"rand_core",
"spki",
]
@ -3117,6 +3300,17 @@ checksum = "eff6510e86862b57b210fd8cbe8ed3f0d7d600b9c2863cd4549a2e033c66e956"
dependencies = [
"encoding_rs",
"memchr",
"serde",
]
[[package]]
name = "quick-xml"
version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33"
dependencies = [
"memchr",
"serde",
]
[[package]]
@ -3209,6 +3403,7 @@ dependencies = [
"lightningcss",
"loco-rs",
"maplit",
"opendal",
"qbit-rs",
"regex",
"reqwest",
@ -3329,6 +3524,37 @@ dependencies = [
"bytecheck",
]
[[package]]
name = "reqsign"
version = "0.14.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43e319d9de9ff4d941abf4ac718897118b0fe04577ea3f8e0f5788971784eef5"
dependencies = [
"anyhow",
"async-trait",
"base64 0.21.7",
"chrono",
"form_urlencoded",
"getrandom",
"hex",
"hmac",
"home",
"http 0.2.11",
"jsonwebtoken",
"log",
"once_cell",
"percent-encoding",
"quick-xml 0.31.0",
"rand",
"reqwest",
"rsa",
"rust-ini",
"serde",
"serde_json",
"sha1",
"sha2",
]
[[package]]
name = "requestty"
version = "0.5.0"
@ -3370,6 +3596,7 @@ dependencies = [
"http 0.2.11",
"http-body 0.4.6",
"hyper 0.14.28",
"hyper-rustls",
"hyper-tls",
"ipnet",
"js-sys",
@ -3379,6 +3606,8 @@ dependencies = [
"once_cell",
"percent-encoding",
"pin-project-lite",
"rustls 0.21.10",
"rustls-native-certs",
"rustls-pemfile 1.0.4",
"serde",
"serde_json",
@ -3387,10 +3616,13 @@ dependencies = [
"system-configuration",
"tokio",
"tokio-native-tls",
"tokio-rustls 0.24.1",
"tokio-util",
"tower-service",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-streams",
"web-sys",
"winreg",
]
@ -3482,6 +3714,7 @@ dependencies = [
"pkcs1",
"pkcs8",
"rand_core",
"sha2",
"signature",
"spki",
"subtle",
@ -3497,7 +3730,7 @@ dependencies = [
"atom_syndication",
"derive_builder",
"never",
"quick-xml",
"quick-xml 0.30.0",
]
[[package]]
@ -3529,6 +3762,16 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "rust-ini"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e0698206bcb8882bf2a9ecb4c1e7785db57ff052297085a6efd4fe42302068a"
dependencies = [
"cfg-if",
"ordered-multimap",
]
[[package]]
name = "rust-multipart-rfc7578_2"
version = "0.6.1"
@ -3595,6 +3838,7 @@ version = "0.21.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba"
dependencies = [
"log",
"ring",
"rustls-webpki 0.101.7",
"sct",
@ -3614,6 +3858,18 @@ dependencies = [
"zeroize",
]
[[package]]
name = "rustls-native-certs"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00"
dependencies = [
"openssl-probe",
"rustls-pemfile 1.0.4",
"schannel",
"security-framework",
]
[[package]]
name = "rustls-pemfile"
version = "1.0.4"
@ -3698,6 +3954,15 @@ version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]]
name = "salsa20"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213"
dependencies = [
"cipher",
]
[[package]]
name = "same-file"
version = "1.0.6"
@ -3722,6 +3987,17 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "scrypt"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0516a385866c09368f0b5bcd1caff3366aace790fcd46e2bb032697bb172fd1f"
dependencies = [
"pbkdf2",
"salsa20",
"sha2",
]
[[package]]
name = "sct"
version = "0.7.1"
@ -4795,6 +5071,15 @@ dependencies = [
"time-core",
]
[[package]]
name = "tiny-keccak"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237"
dependencies = [
"crunchy",
]
[[package]]
name = "tinyvec"
version = "1.6.0"
@ -4856,6 +5141,16 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-rustls"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
dependencies = [
"rustls 0.21.10",
"tokio",
]
[[package]]
name = "tokio-rustls"
version = "0.25.0"
@ -5387,6 +5682,19 @@ version = "0.2.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
[[package]]
name = "wasm-streams"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129"
dependencies = [
"futures-util",
"js-sys",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
]
[[package]]
name = "web-sys"
version = "0.3.68"

View File

@ -46,6 +46,7 @@ uni-path = "1.51.1"
tl = { version = "0.7.8", features = ["simd"] }
lightningcss = "1.0.0-alpha.54"
html-escape = "0.2.13"
opendal = "0.45.0"
[lib]
name = "recorder"

View File

@ -1,6 +1,7 @@
use std::path::Path;
use async_trait::async_trait;
use axum::Router;
use loco_rs::{
app::{AppContext, Hooks},
boot::{create_app, BootResult, StartMode},
@ -15,7 +16,7 @@ use sea_orm::DatabaseConnection;
use crate::{
controllers, migrations::Migrator, models::entities::subscribers,
workers::downloader::DownloadWorker,
workers::subscription_worker::SubscriptionWorker,
};
pub struct App;
@ -47,7 +48,7 @@ impl Hooks for App {
}
fn connect_workers<'a>(p: &'a mut Processor, ctx: &'a AppContext) {
p.register(DownloadWorker::build(ctx));
p.register(SubscriptionWorker::build(ctx));
}
fn register_tasks(_tasks: &mut Tasks) {}

View File

@ -0,0 +1,10 @@
use serde::{Deserialize, Serialize};
pub fn default_app_dal_fs_root() -> String {
String::from("data")
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct AppDalConf {
pub fs_root: String,
}

View File

@ -0,0 +1,44 @@
pub mod dal_conf;
pub use dal_conf::AppDalConf;
use eyre::OptionExt;
use itertools::Itertools;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
pub const DAL_CONF_KEY: &str = "dal";
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct AppCustomConf {
pub dal: AppDalConf,
}
pub fn deserialize_key_path_from_json_value<T: DeserializeOwned>(
key_path: &[&str],
value: &serde_json::Value,
) -> eyre::Result<T> {
let mut stack = vec![("", value)];
for key in key_path {
let current = stack.last().unwrap().1;
if let Some(v) = current.get(key) {
stack.push((key, v));
} else {
let failed_key_path = stack.iter().map(|s| s.0).collect_vec().join(".");
return Err(eyre::eyre!(
"can not config key {} of settings",
failed_key_path
));
}
}
let result: T = serde_json::from_value(stack.pop().unwrap().1.clone())?;
Ok(result)
}
pub fn deserialize_key_path_from_loco_rs_config<T: DeserializeOwned>(
key_path: &[&str],
app_config: &loco_rs::config::Config,
) -> eyre::Result<T> {
let settings = app_config
.settings
.as_ref()
.ok_or_eyre("App config setting not set")?;
deserialize_key_path_from_json_value(key_path, settings)
}

View File

@ -0,0 +1,76 @@
use bytes::Bytes;
use opendal::{layers::LoggingLayer, services, Operator};
use serde::{Deserialize, Serialize};
use url::Url;
use uuid::Uuid;
use crate::{
config::AppDalConf,
path::{VFSSubPath, VFSSubPathBuf},
};
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum AppDalContentCategory {
Poster,
}
impl AsRef<str> for AppDalContentCategory {
fn as_ref(&self) -> &str {
match self {
Self::Poster => "poster",
}
}
}
#[derive(Debug, Clone)]
pub struct AppDalContext {
pub config: AppDalConf,
}
pub enum DalStoredUrl {
RelativePath { path: String },
Absolute { url: Url },
}
impl AppDalContext {
pub fn new(app_dal_conf: AppDalConf) -> Self {
Self {
config: app_dal_conf,
}
}
pub async fn store_blob(
&self,
content_category: AppDalContentCategory,
extname: &str,
data: Bytes,
subscriber_pid: &str,
) -> eyre::Result<DalStoredUrl> {
let basename = format!("{}{}", Uuid::new_v4(), extname);
let mut dirname = [subscriber_pid, content_category.as_ref()]
.into_iter()
.map(VFSSubPath::new)
.collect::<VFSSubPathBuf>();
let mut fs_builder = services::Fs::default();
fs_builder.root(self.config.fs_root.as_str());
let fs_op = Operator::new(fs_builder)?
.layer(LoggingLayer::default())
.finish();
fs_op.create_dir(dirname.as_str()).await?;
let fullname = {
dirname.push(basename);
dirname
};
fs_op.write_with(fullname.as_str(), data).await?;
Ok(DalStoredUrl::RelativePath {
path: fullname.to_string(),
})
}
}

View File

@ -6,4 +6,6 @@ pub enum DownloaderError {
InvalidMime { expected: String, found: String },
#[error("Invalid url format")]
InvalidUrlFormat(#[from] url::ParseError),
#[error("QBit api error: {0:?}")]
QBitAPIError(#[from] qbit_rs::Error),
}

View File

@ -26,7 +26,7 @@ pub struct QBittorrentDownloader {
}
impl QBittorrentDownloader {
pub fn from_downloader_model(model: downloaders::Model) -> Result<Self, DownloaderError> {
pub async fn from_downloader_model(model: downloaders::Model) -> Result<Self, DownloaderError> {
if model.category != DownloaderCategory::QBittorrent {
return Err(DownloaderError::InvalidMime {
expected: DownloaderCategory::QBittorrent.to_string(),
@ -40,16 +40,21 @@ impl QBittorrentDownloader {
let credential = Credential::new(model.username, model.password);
let client = Qbit::new(endpoint_url.clone(), credential);
client
.login(false)
.await
.map_err(DownloaderError::QBitAPIError)?;
Ok(Self {
client,
endpoint_url,
subscriber_id: model.subscriber_id,
save_path: model.download_path,
save_path: model.save_path,
})
}
async fn api_version(&self) -> eyre::Result<String> {
let result = self.client.get_webapi_version().await?;
let result = self.client.get_version().await?;
Ok(result)
}
}
@ -179,3 +184,38 @@ impl Debug for QBittorrentDownloader {
.finish()
}
}
#[cfg(test)]
mod tests {
use super::*;
fn get_tmp_qbit_test_folder() -> &'static str {
if cfg!(windows) {
"~/AppData/Local/Temp/konobangu/qbit"
} else {
"/tmp/konobangu/qbit"
}
}
#[tokio::test]
async fn test_add_torrents() {
let downloader = QBittorrentDownloader::from_downloader_model(downloaders::Model {
created_at: Default::default(),
updated_at: Default::default(),
id: 0,
category: DownloaderCategory::QBittorrent,
endpoint: "http://127.0.0.1:8080".to_string(),
password: "".to_string(),
username: "".to_string(),
subscriber_id: 0,
save_path: get_tmp_qbit_test_folder().to_string(),
})
.await
.expect("should create downloader success");
downloader
.check_connection()
.await
.expect("should check connection success");
}
}

View File

@ -55,15 +55,15 @@ pub trait TorrentDownloader {
downloads: &[&downloads::Model],
mut bangumi: bangumi::Model,
) -> eyre::Result<bangumi::Model> {
if bangumi.sub_path.is_none() {
if bangumi.save_path.is_none() {
let gen_sub_path = gen_bangumi_sub_path(&bangumi);
let mut bangumi_active = bangumi.into_active_model();
bangumi_active.sub_path = ActiveValue::Set(Some(gen_sub_path.to_string()));
bangumi_active.save_path = ActiveValue::Set(Some(gen_sub_path.to_string()));
bangumi = bangumi_active.update(db).await?;
}
let sub_path = bangumi
.sub_path
.save_path
.as_ref()
.unwrap_or_else(|| unreachable!("must have a sub path"));
@ -81,11 +81,13 @@ pub trait TorrentDownloader {
}
}
pub fn build_torrent_downloader_from_downloader_model(
pub async fn build_torrent_downloader_from_downloader_model(
model: downloaders::Model,
) -> eyre::Result<Box<dyn TorrentDownloader>> {
Ok(Box::new(match &model.category {
DownloaderCategory::QBittorrent => QBittorrentDownloader::from_downloader_model(model)?,
DownloaderCategory::QBittorrent => {
QBittorrentDownloader::from_downloader_model(model).await?
}
}))
}

View File

@ -1,12 +1,13 @@
#![feature(async_closure)]
pub mod app;
pub mod config;
pub mod controllers;
pub mod dal;
pub mod downloaders;
pub mod migrations;
pub mod models;
pub mod parsers;
pub mod path;
pub mod subscriptions;
pub mod tasks;
pub mod views;
pub mod workers;

View File

@ -1,17 +1,18 @@
use loco_rs::app::AppContext;
use sea_orm::{prelude::*, sea_query::OnConflict, ActiveValue, Condition, QueryOrder, QuerySelect};
pub use crate::models::entities::downloads::*;
use crate::{
models::subscriptions::{self, SubscriptionCategory},
subscriptions::mikan::{MikanSubscriptionEngine, MikanSubscriptionItem},
parsers::mikan::{parse_mikan_rss_items_from_rss_link, MikanRssItem},
};
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {
pub fn from_mikan_subscription_item(m: MikanSubscriptionItem, subscription_id: i32) -> Self {
Self {
pub fn from_mikan_rss_item(m: MikanRssItem, subscription_id: i32) -> Self {
let _ = Self {
origin_name: ActiveValue::Set(m.title.clone()),
display_name: ActiveValue::Set(m.title),
subscription_id: ActiveValue::Set(subscription_id),
@ -22,20 +23,20 @@ impl ActiveModel {
all_size: ActiveValue::Set(m.content_length),
homepage: ActiveValue::Set(m.homepage),
..Default::default()
}
};
todo!()
}
}
impl Model {
pub async fn pull_subscription(
db: &DatabaseConnection,
ctx: AppContext,
item: &subscriptions::Model,
) -> eyre::Result<Vec<i32>> {
let db = &ctx.db;
match &item.category {
SubscriptionCategory::Mikan => {
let items =
MikanSubscriptionEngine::subscription_items_from_rss_url(&item.source_url)
.await?;
let items = parse_mikan_rss_items_from_rss_link(&item.source_url).await?;
let all_items = items.collect::<Vec<_>>();
let last_old_id = {
@ -55,7 +56,7 @@ impl Model {
let new_items = all_items
.into_iter()
.map(|i| ActiveModel::from_mikan_subscription_item(i, item.id));
.map(|i| ActiveModel::from_mikan_rss_item(i, item.id));
let insert_result = Entity::insert_many(new_items)
.on_conflict(OnConflict::column(Column::Url).do_nothing().to_owned())

View File

@ -19,15 +19,14 @@ pub struct Model {
pub official_title: String,
pub season: i32,
pub season_raw: Option<String>,
pub group_name: Option<String>,
pub fansub: Option<String>,
pub resolution: Option<String>,
pub source: Option<String>,
pub filter: Option<BangumiFilter>,
pub subtitle: Option<String>,
pub rss_link: Option<String>,
pub poster_link: Option<String>,
pub rule_name: Option<String>,
pub sub_path: Option<String>,
pub save_path: Option<String>,
pub deleted: bool,
}

View File

@ -25,7 +25,7 @@ pub struct Model {
pub password: String,
pub username: String,
pub subscriber_id: i32,
pub download_path: String,
pub save_path: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@ -27,10 +27,11 @@ pub struct Model {
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub raw_name: String,
pub display_name: String,
pub bangumi_id: i32,
pub output_name: String,
pub download_id: i32,
pub save_path: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@ -1,26 +1,58 @@
use sea_orm::{entity::prelude::*, ActiveValue};
use serde::{Deserialize, Serialize};
pub use super::entities::subscriptions::{self, *};
use crate::subscriptions::defs::RssCreateDto;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct SubscriptionCreateFromRssDto {
pub rss_link: String,
pub display_name: String,
pub aggregate: bool,
pub enabled: Option<bool>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(tag = "category")]
pub enum SubscriptionCreateDto {
Mikan(SubscriptionCreateFromRssDto),
}
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl Model {
pub async fn add_rss(
db: &DatabaseConnection,
create_dto: RssCreateDto,
impl ActiveModel {
pub fn from_create_dto(create_dto: SubscriptionCreateDto, subscriber_id: i32) -> Self {
match create_dto {
SubscriptionCreateDto::Mikan(create_dto) => {
Self::from_rss_create_dto(SubscriptionCategory::Mikan, create_dto, subscriber_id)
}
}
}
fn from_rss_create_dto(
category: SubscriptionCategory,
create_dto: SubscriptionCreateFromRssDto,
subscriber_id: i32,
) -> eyre::Result<Self> {
let subscription = ActiveModel {
) -> Self {
Self {
display_name: ActiveValue::Set(create_dto.display_name),
enabled: ActiveValue::Set(create_dto.enabled.unwrap_or(false)),
aggregate: ActiveValue::Set(create_dto.aggregate),
subscriber_id: ActiveValue::Set(subscriber_id),
category: ActiveValue::Set(SubscriptionCategory::Mikan),
category: ActiveValue::Set(category),
source_url: ActiveValue::Set(create_dto.rss_link),
..Default::default()
};
}
}
}
impl Model {
pub async fn add_subscription(
db: &DatabaseConnection,
create_dto: SubscriptionCreateDto,
subscriber_id: i32,
) -> eyre::Result<Self> {
let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id);
Ok(subscription.insert(db).await?)
}

View File

@ -1,7 +0,0 @@
use crate::parsers::errors::ParseError;
pub fn parse_bangumi_season(season_str: &str) -> Result<i32, ParseError> {
season_str
.parse::<i32>()
.map_err(ParseError::BangumiSeasonError)
}

View File

@ -6,4 +6,10 @@ pub enum ParseError {
BangumiSeasonError(#[from] std::num::ParseIntError),
#[error("Parse file url error: {0}")]
FileUrlError(#[from] url::ParseError),
#[error("Parse {desc} with mime error, expected {expected}, but got {found}")]
MimeError {
desc: String,
expected: String,
found: String,
},
}

View File

@ -0,0 +1,3 @@
pub mod html_parser_utils;
pub use html_parser_utils::{get_tag_style, query_selector_first_tag};

View File

@ -7,18 +7,18 @@ use url::Url;
use crate::{
downloaders::{html::download_html, image::download_image},
parsers::html_parser::{get_tag_style, query_selector_first_tag},
parsers::html::{get_tag_style, query_selector_first_tag},
};
pub struct MikanEpisodeMeta {
pub homepage: Url,
pub poster_src: Option<Url>,
pub poster_data: Option<Bytes>,
pub origin_poster_src: Option<Url>,
pub official_title: String,
}
lazy_static! {
pub static ref MIKAN_TITLE_SEASON: Regex = Regex::new("第.*季").unwrap();
static ref MIKAN_TITLE_SEASON: Regex = Regex::new("第.*季").unwrap();
}
pub async fn parse_episode_meta_from_mikan_homepage(
@ -30,7 +30,7 @@ pub async fn parse_episode_meta_from_mikan_homepage(
let parser = dom.parser();
let poster_node = query_selector_first_tag(&dom, r"div.bangumi-poster", parser);
let official_title_node = query_selector_first_tag(&dom, r"p.bangumi-title", parser);
let mut poster_src = None;
let mut origin_poster_src = None;
if let Some(style) = poster_node.and_then(get_tag_style) {
for (prop, _) in style.iter() {
match prop {
@ -38,7 +38,7 @@ pub async fn parse_episode_meta_from_mikan_homepage(
if let Some(Image::Url(path)) = images.first() {
if let Ok(url) = Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
{
poster_src = Some(url);
origin_poster_src = Some(url);
}
}
}
@ -48,7 +48,7 @@ pub async fn parse_episode_meta_from_mikan_homepage(
if let Ok(url) =
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
{
poster_src = Some(url);
origin_poster_src = Some(url);
}
}
}
@ -57,12 +57,12 @@ pub async fn parse_episode_meta_from_mikan_homepage(
}
}
};
poster_src = poster_src.map(|mut p| {
origin_poster_src = origin_poster_src.map(|mut p| {
p.set_query(None);
p
});
let poster_data = if let Some(p) = poster_src.as_ref() {
download_image(p.as_str()).await.ok()
let poster_data = if let Some(p) = origin_poster_src.as_ref() {
download_image(p.clone()).await.ok()
} else {
None
};
@ -81,9 +81,9 @@ pub async fn parse_episode_meta_from_mikan_homepage(
})
.map(|title| MikanEpisodeMeta {
homepage: url,
poster_src,
official_title: title,
poster_data,
official_title: title,
origin_poster_src,
});
Ok(meta)
}
@ -92,7 +92,7 @@ pub async fn parse_episode_meta_from_mikan_homepage(
mod test {
use url::Url;
use crate::parsers::mikan_ep_parser::parse_episode_meta_from_mikan_homepage;
use super::parse_episode_meta_from_mikan_homepage;
#[tokio::test]
async fn test_parse_mikan() {
@ -103,13 +103,13 @@ mod test {
if let Some(ep_meta) = parse_episode_meta_from_mikan_homepage(url.clone()).await? {
assert_eq!(ep_meta.homepage, url);
assert_eq!(ep_meta.official_title, "葬送的芙莉莲");
assert_eq!(
ep_meta.poster_src,
ep_meta.origin_poster_src,
Some(Url::parse(
"https://mikanani.me/images/Bangumi/202309/5ce9fed1.jpg"
)?)
);
assert_eq!(ep_meta.official_title, "葬送的芙莉莲");
let u8_data = ep_meta.poster_data.expect("should have poster data");
assert!(
u8_data.starts_with(&[255, 216, 255, 224]),

View File

@ -1,10 +1,14 @@
use chrono::DateTime;
use reqwest::IntoUrl;
use serde::{Deserialize, Serialize};
use crate::downloaders::{bytes::download_bytes, defs::BITTORRENT_MIME_TYPE};
use crate::{
downloaders::{bytes::download_bytes, defs::BITTORRENT_MIME_TYPE},
parsers::errors::ParseError,
};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanSubscriptionItem {
pub struct MikanRssItem {
pub title: String,
pub homepage: Option<String>,
pub url: String,
@ -13,16 +17,18 @@ pub struct MikanSubscriptionItem {
pub pub_date: Option<i64>,
}
impl MikanSubscriptionItem {
pub fn from_rss_item(item: rss::Item) -> Option<Self> {
let mime_match = item
impl TryFrom<rss::Item> for MikanRssItem {
type Error = ParseError;
fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
let mime_type = item
.enclosure()
.map(|x| x.mime_type == BITTORRENT_MIME_TYPE)
.map(|x| x.mime_type.to_string())
.unwrap_or_default();
if mime_match {
if mime_type == BITTORRENT_MIME_TYPE {
let enclosure = item.enclosure.unwrap();
Some(MikanSubscriptionItem {
Ok(MikanRssItem {
title: item.title.unwrap_or_default(),
homepage: item.link,
url: enclosure.url,
@ -34,36 +40,36 @@ impl MikanSubscriptionItem {
.map(|s| s.timestamp_millis()),
})
} else {
None
Err(ParseError::MimeError {
expected: String::from(BITTORRENT_MIME_TYPE),
found: mime_type,
desc: String::from("MikanRssItem"),
})
}
}
}
pub struct MikanSubscriptionEngine;
pub async fn parse_mikan_rss_items_from_rss_link(
url: impl IntoUrl,
) -> eyre::Result<impl Iterator<Item = MikanRssItem>> {
let bytes = download_bytes(url).await?;
impl MikanSubscriptionEngine {
pub async fn subscription_items_from_rss_url(
url: &str,
) -> eyre::Result<impl Iterator<Item = MikanSubscriptionItem>> {
let bytes = download_bytes(url).await?;
let channel = rss::Channel::read_from(&bytes[..])?;
let channel = rss::Channel::read_from(&bytes[..])?;
Ok(channel
.items
.into_iter()
.flat_map(MikanSubscriptionItem::from_rss_item))
}
Ok(channel.items.into_iter().flat_map(MikanRssItem::try_from))
}
#[cfg(test)]
mod tests {
use url::Url;
use super::parse_mikan_rss_items_from_rss_link;
use crate::downloaders::defs::BITTORRENT_MIME_TYPE;
#[tokio::test]
pub async fn test_mikan_subscription_items_from_rss_url() {
let url = "https://mikanani.me/RSS/Bangumi?bangumiId=3141&subgroupid=370";
let items = super::MikanSubscriptionEngine::subscription_items_from_rss_url(url)
let items = parse_mikan_rss_items_from_rss_link(url)
.await
.expect("should get subscription items from rss url")
.collect::<Vec<_>>();

View File

@ -0,0 +1,5 @@
pub mod mikan_ep_parser;
pub mod mikan_rss_parser;
pub use mikan_ep_parser::{parse_episode_meta_from_mikan_homepage, MikanEpisodeMeta};
pub use mikan_rss_parser::{parse_mikan_rss_items_from_rss_link, MikanRssItem};

View File

@ -1,8 +1,8 @@
pub mod bangumi_parser;
pub mod defs;
pub mod errors;
pub mod html_parser;
pub mod mikan_ep_parser;
pub mod raw_ep_parser;
pub mod html;
pub mod mikan;
pub mod raw;
pub mod title_parser;
pub mod torrent;
pub mod torrent_parser;

View File

@ -0,0 +1,3 @@
pub mod raw_ep_parser;
pub use raw_ep_parser::{parse_episode_meta_from_raw_name, RawEpisodeMeta};

View File

@ -5,7 +5,7 @@ use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize};
use super::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE};
use crate::parsers::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE};
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
@ -95,7 +95,10 @@ fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> eyre::Resul
}
if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw) {
if m.len() as f32 > (raw.len() as f32) * 0.5 {
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1.replace(&raw, "").chars().collect_vec();
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1
.replace(&raw, "")
.chars()
.collect_vec();
while let Some(ch) = raw1.pop() {
if ch == ']' {
break;
@ -168,7 +171,8 @@ fn extract_name_from_title_body_name_section(
let mut name_zh = None;
let mut name_jp = None;
let replaced1 = NAME_EXTRACT_REMOVE_RE.replace_all(title_body_name_section, "");
let replaced2 = NAME_EXTRACT_REPLACE_ADHOC1_RE.replace_all(&replaced1, NAME_EXTRACT_REPLACE_ADHOC1_REPLACED);
let replaced2 = NAME_EXTRACT_REPLACE_ADHOC1_RE
.replace_all(&replaced1, NAME_EXTRACT_REPLACE_ADHOC1_REPLACED);
let trimmed = replaced2.trim();
let mut split = NAME_EXTRACT_SPLIT_RE
.split(trimmed)
@ -256,11 +260,15 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> eyre::Result<RawEpisodeMeta>
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
let fansub = extract_fansub(&raw_title_without_ch_brackets);
let is_movie = check_is_movie(&raw_title_without_ch_brackets);
if let Some(title_re_match_obj) = MOVIE_TITLE_RE.captures(&raw_title_without_ch_brackets).or(TITLE_RE.captures(&raw_title_without_ch_brackets)) {
if let Some(title_re_match_obj) = MOVIE_TITLE_RE
.captures(&raw_title_without_ch_brackets)
.or(TITLE_RE.captures(&raw_title_without_ch_brackets))
{
let mut title_body = title_re_match_obj
.get(1)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups")).to_string();
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"))
.to_string();
let mut title_episode = title_re_match_obj
.get(2)
.map(|s| s.as_str().trim())
@ -306,18 +314,25 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> eyre::Result<RawEpisodeMeta>
mod tests {
use super::{parse_episode_meta_from_raw_name, RawEpisodeMeta};
struct TestCase {
source: &'static str,
expected: &'static str,
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap();
let found = parse_episode_meta_from_raw_name(raw_name).ok();
if expected != found {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
}
assert_eq!(expected, found);
}
#[test]
fn test_parse_episode_meta_from_raw_name() {
let test_cases = vec![
// all field wrapped by []
TestCase {
source: r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
expected: r#"{
fn test_parse_ep_with_all_parts_wrapped() {
test_raw_ep_parser_case(
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
r#"{
"name_zh": "我心里危险的东西",
"name_zh_no_season": "我心里危险的东西",
"season": 2,
@ -328,11 +343,14 @@ mod tests {
"fansub": "新Sub",
"resolution": "1080P"
}"#,
},
// title wrap with []
TestCase {
source: r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
expected: r#"{
)
}
#[test]
fn test_parse_ep_with_title_wrapped_by_one_square_bracket_and_season_prefix() {
test_raw_ep_parser_case(
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
r#"{
"name_en": "Boku no Kokoro no Yabai Yatsu",
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu",
"name_zh": "我内心的糟糕念头",
@ -345,11 +363,14 @@ mod tests {
"fansub": "喵萌奶茶屋",
"resolution": "1080p"
}"#,
},
TestCase {
// ep+version case
source: r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
expected: r#"{
);
}
#[test]
fn test_parse_ep_with_ep_and_version() {
test_raw_ep_parser_case(
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{
"name_en": "Shin no Nakama 2nd",
"name_en_no_season": "Shin no Nakama",
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
@ -362,11 +383,14 @@ mod tests {
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
},
TestCase {
// pure english title case
source: r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
expected: r#"{
)
}
#[test]
fn test_parse_ep_with_en_title_only() {
test_raw_ep_parser_case(
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
r#"{
"name_en": "THE MARGINAL SERVICE",
"name_en_no_season": "THE MARGINAL SERVICE",
"season": 1,
@ -376,11 +400,14 @@ mod tests {
"fansub": "动漫国字幕组&LoliHouse",
"resolution": "1080p"
}"#,
},
TestCase {
// two zh titles case
source: r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
expected: r#"{
)
}
#[test]
fn test_parse_ep_with_two_zh_title() {
test_raw_ep_parser_case(
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{
"name_en": "Nozomanu Fushi no Boukensha",
"name_en_no_season": "Nozomanu Fushi no Boukensha",
"name_zh": "事与愿违的不死冒险者",
@ -393,11 +420,14 @@ mod tests {
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
},
TestCase {
// en+zh+jp case
source: r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
expected: r#"{
)
}
#[test]
fn test_parse_ep_with_en_zh_jp_titles() {
test_raw_ep_parser_case(
r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
r#"{
"name_en": "Pon no Michi",
"name_jp": "ぽんのみち",
"name_zh": "碰之道",
@ -412,11 +442,14 @@ mod tests {
"fansub": "喵萌奶茶屋&LoliHouse",
"resolution": "1080p"
}"#,
},
TestCase {
// season nth case
source: r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
expected: r#"{
)
}
#[test]
fn test_parse_ep_with_nth_season() {
test_raw_ep_parser_case(
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_en": "Yowai Character Tomozakikun",
"name_en_no_season": "Yowai Character Tomozakikun",
"name_zh": "弱角友崎同学 2nd STAGE",
@ -429,11 +462,14 @@ mod tests {
"fansub": "ANi",
"resolution": "1080P"
}"#,
},
TestCase {
// season en + season zh case
source: r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
expected: r#"{
)
}
#[test]
fn test_parse_ep_with_season_en_and_season_zh() {
test_raw_ep_parser_case(
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
r#"{
"name_en": "Kingdom S5",
"name_en_no_season": "Kingdom",
"name_zh": "王者天下 第五季",
@ -446,11 +482,14 @@ mod tests {
"fansub": "豌豆字幕组&LoliHouse",
"resolution": "1080p"
}"#,
},
// ad-hoc cases for 千夏字幕组 _sep style
TestCase {
source: r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
expected: r#"{
)
}
#[test]
fn test_parse_ep_with_airota_fansub_style_case1() {
test_raw_ep_parser_case(
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
r#"{
"name_en": "Alice to Therese no Maboroshi Koujou",
"name_en_no_season": "Alice to Therese no Maboroshi Koujou",
"name_zh": "爱丽丝与特蕾丝的虚幻工厂",
@ -462,11 +501,14 @@ mod tests {
"fansub": "千夏字幕组",
"resolution": "1080p"
}"#,
},
// ad-hoc cases for 千夏字幕组 _sep style starting with ") "
TestCase {
source: r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
expected: r#"{
)
}
#[test]
fn test_parse_ep_with_airota_fansub_style_case2() {
test_raw_ep_parser_case(
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
r#"{
"name_en": "Yuru Camp Movie",
"name_en_no_season": "Yuru Camp Movie",
"name_jp": null,
@ -481,11 +523,14 @@ mod tests {
"fansub": "千夏字幕组&喵萌奶茶屋",
"resolution": "2160p"
}"#,
},
// title split by ][
TestCase {
source: r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
expected: r#"{
)
}
#[test]
fn test_parse_ep_with_many_square_brackets_split_title() {
test_raw_ep_parser_case(
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
r#"{
"name_en": "Yuru Camp",
"name_en_no_season": "Yuru Camp",
"name_zh": "剧场版-摇曳露营",
@ -496,11 +541,14 @@ mod tests {
"fansub": "MCE汉化组",
"resolution": "1080P"
}"#,
},
// single title block split by space + netflex
TestCase {
source: r#"[天月搬运组][迷宫饭 Delicious in Dungeon][03][日语中字][MKV][1080P][NETFLIX][高画质版]"#,
expected: r#"
)
}
#[test]
fn test_parse_ep_with_square_brackets_wrapped_and_space_split() {
test_raw_ep_parser_case(
r#"[天月搬运组][迷宫饭 Delicious in Dungeon][03][日语中字][MKV][1080P][NETFLIX][高画质版]"#,
r#"
{
"name_en": "Delicious in Dungeon",
"name_en_no_season": "Delicious in Dungeon",
@ -514,11 +562,14 @@ mod tests {
"resolution": "1080P"
}
"#,
},
// start with season like 1月新番
TestCase {
source: r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
expected: r#"{
)
}
#[test]
fn test_parse_ep_with_start_with_brackets_wrapped_season_info_prefix() {
test_raw_ep_parser_case(
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
r#"{
"name_en": "Dungeon Meshi",
"name_en_no_season": "Dungeon Meshi",
"name_zh": "迷宫饭",
@ -529,11 +580,14 @@ mod tests {
"fansub": "爱恋字幕社",
"resolution": "1080P"
}"#,
},
// prevent [ ] pair to small, chars size in biggest [ ] in title should greater than len(title_body) * 0.5
TestCase {
source: r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
expected: r#"{
)
}
#[test]
fn test_parse_ep_with_small_no_title_extra_brackets_case() {
test_raw_ep_parser_case(
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_en": "Mahou Shoujo ni Akogarete",
"name_en_no_season": "Mahou Shoujo ni Akogarete",
"name_zh": "梦想成为魔法少女 [年龄限制版]",
@ -545,11 +599,15 @@ mod tests {
"fansub": "ANi",
"resolution": "1080P"
}"#,
},
// TODO: failed case, can not find capture point
TestCase {
source: r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
expected: r#"{
)
}
// TODO: FIXME
#[test]
fn test_bad_case() {
test_raw_ep_parser_case(
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
r#"{
"name_zh": "摇曳露营△剧场版",
"name_zh_no_season": "摇曳露营△剧场版",
"season": 1,
@ -560,21 +618,6 @@ mod tests {
"fansub": "7³ACG x 桜都字幕组",
"resolution": "1080p"
}"#,
},
];
for case in test_cases {
let expected: Option<RawEpisodeMeta> = serde_json::from_str(case.expected).unwrap();
let found = parse_episode_meta_from_raw_name(case.source).ok();
if expected != found {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
}
assert_eq!(expected, found);
}
)
}
}

View File

@ -1,9 +1 @@
use crate::path::VFSPath;
pub fn parse_torrent_title(
torrent_path: VFSPath<'_>,
torrent_name: Option<&str>,
season: Option<i32>,
file_type: &str,
) {
}

View File

@ -0,0 +1 @@
mod torrent_ep_parser;

View File

@ -0,0 +1,45 @@
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeMediaMeta {}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeSubtitleMeta {}
pub fn parse_episode_media_meta_from_torrent(
torrent_path: &str,
torrent_name: Option<&str>,
season: Option<i32>,
) -> eyre::Result<TorrentEpisodeMediaMeta> {
todo!()
}
pub fn parse_episode_subtitle_meta_from_torrent(
torrent_path: &str,
torrent_name: Option<&str>,
season: Option<i32>,
) -> eyre::Result<TorrentEpisodeMediaMeta> {
todo!()
}
#[cfg(test)]
mod tests {
use super::{
parse_episode_media_meta_from_torrent, parse_episode_subtitle_meta_from_torrent,
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta,
};
pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) {
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).unwrap();
let found = parse_episode_media_meta_from_torrent(raw_name, None, None).ok();
if expected != found {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
}
assert_eq!(expected, found);
}
}

View File

@ -7,7 +7,7 @@ pub fn get_path_basename<'a>(path: &'a VFSPath) -> &'a str {
path.basename()
}
pub fn get_group(group_and_title: &str) -> (Option<&str>, &str) {
pub fn get_fansub(group_and_title: &str) -> (Option<&str>, &str) {
let n = BRACKETS_REG
.split(group_and_title)
.map(|s| s.trim())
@ -72,7 +72,7 @@ pub fn parse_torrent<'a>(
.get(1)
.unwrap_or_else(|| unreachable!("should have 1 group"))
.as_str();
let (group, title) = get_group(group_and_title);
let (group, title) = get_fansub(group_and_title);
let season_and_title = get_season_and_title(title);
let season = season.unwrap_or(season_and_title.1);
let title = season_and_title.0;

View File

@ -3,7 +3,7 @@ use std::collections::HashSet;
use crate::{
downloaders::defs::Torrent,
models::{bangumi, subscribers},
parsers::{bangumi_parser::parse_bangumi_season, defs::SEASON_REGEX},
parsers::defs::SEASON_REGEX,
path::{VFSPath, VFSSubPathBuf},
};
@ -40,13 +40,12 @@ pub fn path_to_bangumi<'a>(
for part in save_path.components().map(|s| s.as_str()) {
if let Some(match_result) = SEASON_REGEX.captures(part) {
season = Some(
parse_bangumi_season(
match_result
.get(2)
.unwrap_or_else(|| unreachable!("must have a season"))
.as_str(),
)
.unwrap_or_else(|e| unreachable!("{}", e.to_string())),
match_result
.get(2)
.unwrap_or_else(|| unreachable!("must have a season"))
.as_str()
.parse::<i32>()
.unwrap_or_else(|e| unreachable!("{}", e.to_string())),
);
} else if !downloader_parts.contains(part) {
bangumi_name = Some(part);
@ -71,7 +70,7 @@ pub fn gen_bangumi_sub_path(data: &bangumi::Model) -> VFSSubPathBuf {
}
pub fn rule_name(bgm: &bangumi::Model, conf: &subscribers::SubscriberBangumiConfig) -> String {
if let (Some(true), Some(group_name)) = (conf.leading_group_tag, &bgm.group_name) {
if let (Some(true), Some(group_name)) = (conf.leading_group_tag, &bgm.fansub) {
format!("[{}] {} S{}", group_name, bgm.official_title, bgm.season)
} else {
format!("{} S{}", bgm.official_title, bgm.season)

View File

@ -1,6 +1,5 @@
use std::path::{Path, PathBuf};
use std::path::PathBuf;
use bytes::Buf;
use lazy_static::lazy_static;
pub use uni_path::{Path as VFSSubPath, PathBuf as VFSSubPathBuf};

View File

@ -1,9 +0,0 @@
use crate::models::prelude::*;
pub struct RssCreateDto {
pub rss_link: String,
pub display_name: String,
pub aggregate: bool,
pub category: SubscriptionCategory,
pub enabled: Option<bool>,
}

View File

@ -1,3 +0,0 @@
pub mod defs;
pub mod bangumi;
pub mod mikan;

View File

@ -1,43 +0,0 @@
use std::time::Duration;
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use tokio::time::sleep;
use crate::models::subscribers;
pub struct DownloadWorker {
pub ctx: AppContext,
}
#[derive(Deserialize, Debug, Serialize)]
pub struct DownloadWorkerArgs {
pub user_guid: String,
}
impl worker::AppWorker<DownloadWorkerArgs> for DownloadWorker {
fn build(ctx: &AppContext) -> Self {
Self { ctx: ctx.clone() }
}
}
#[async_trait]
impl worker::Worker<DownloadWorkerArgs> for DownloadWorker {
async fn perform(&self, args: DownloadWorkerArgs) -> worker::Result<()> {
// TODO: Some actual work goes here...
println!("================================================");
println!("Sending payment report to user {}", args.user_guid);
sleep(Duration::from_millis(2000)).await;
let all = subscribers::Entity::find()
.all(&self.ctx.db)
.await
.map_err(Box::from)?;
for user in &all {
println!("user: {}", user.id);
}
println!("================================================");
Ok(())
}
}

View File

@ -1 +1 @@
pub mod downloader;
pub mod subscription_worker;

View File

@ -0,0 +1,32 @@
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use crate::models::subscriptions;
pub struct SubscriptionWorker {
pub ctx: AppContext,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct SubscriptionWorkerArgs {
pub subscription: subscriptions::Model,
}
impl worker::AppWorker<SubscriptionWorkerArgs> for SubscriptionWorker {
fn build(ctx: &AppContext) -> Self {
Self { ctx: ctx.clone() }
}
}
#[async_trait]
impl worker::Worker<SubscriptionWorkerArgs> for SubscriptionWorker {
async fn perform(&self, args: SubscriptionWorkerArgs) -> worker::Result<()> {
println!("================================================");
let db = &self.ctx.db;
let storage = &self.ctx.storage;
println!("================================================");
Ok(())
}
}