diff --git a/.cargo/config.toml b/.cargo/config.toml index 408636c..0d82fd2 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -2,9 +2,4 @@ recorder-playground = "run -p recorder --example playground -- --environment development" [build] -rustflags = [ - "-Zthreads=8", - "--cfg", - "feature=\"testcontainers\"", - "-Zshare-generics=y", -] +rustflags = ["-Zthreads=8", "-Zshare-generics=y"] diff --git a/Cargo.lock b/Cargo.lock index c4be0e6..2471dc9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1709,7 +1709,6 @@ dependencies = [ "async-trait", "bytes", "chrono", - "dashmap 6.1.0", "fetch", "futures", "itertools 0.14.0", @@ -1721,7 +1720,6 @@ dependencies = [ "reqwest", "serde", "serde-value", - "serde_json", "snafu", "testcontainers", "testcontainers-ext", @@ -5166,7 +5164,6 @@ dependencies = [ "sea-orm", "sea-orm-migration", "seaography", - "secrecy", "serde", "serde_json", "serde_variant", @@ -5174,7 +5171,6 @@ dependencies = [ "serde_yaml", "serial_test", "snafu", - "string-interner", "tera", "testcontainers", "testcontainers-ext", @@ -5934,15 +5930,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "secrecy" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a" -dependencies = [ - "zeroize", -] - [[package]] name = "security-framework" version = "2.11.1" @@ -6651,16 +6638,6 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7beae5182595e9a8b683fa98c4317f956c9a2dec3b9716990d20023cc60c766" -[[package]] -name = "string-interner" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23de088478b31c349c9ba67816fa55d9355232d63c3afea8bf513e31f0f1d2c0" -dependencies = [ - "hashbrown 0.15.2", - "serde", -] - [[package]] name = "string_cache" version = "0.8.9" diff --git a/apps/recorder/.gitignore b/apps/recorder/.gitignore index b1a4dba..1b5bbe1 100644 --- a/apps/recorder/.gitignore +++ b/apps/recorder/.gitignore @@ -25,4 +25,5 @@ Cargo.lock # Dist node_modules dist/ -temp/ +temp/* +!temp/.gitkeep diff --git a/apps/recorder/Cargo.toml b/apps/recorder/Cargo.toml index e31fa77..008bd05 100644 --- a/apps/recorder/Cargo.toml +++ b/apps/recorder/Cargo.toml @@ -19,6 +19,8 @@ testcontainers = [ "dep:testcontainers", "dep:testcontainers-modules", "dep:testcontainers-ext", + "downloader/testcontainers", + "testcontainers-modules/postgres", ] [dependencies] @@ -108,12 +110,11 @@ apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] } apalis-sql = { version = "0.7", features = ["postgres"] } cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] } rand = "0.9.1" +reqwest_cookie_store = "0.8.0" + downloader = { workspace = true } util = { workspace = true } fetch = { workspace = true } -string-interner = "0.19.0" -secrecy = "0.10.3" -reqwest_cookie_store = "0.8.0" [dev-dependencies] serial_test = "3" diff --git a/apps/recorder/src/app/config/mod.rs b/apps/recorder/src/app/config/mod.rs index 47dd5a7..232a29b 100644 --- a/apps/recorder/src/app/config/mod.rs +++ b/apps/recorder/src/app/config/mod.rs @@ -142,7 +142,7 @@ impl AppConfig { .flat_map(|ps| { allowed_extensions .iter() - .map(move |ext| (format!("{}{}{}", convention_prefix, ps, ext), ext)) + .map(move |ext| (format!("{convention_prefix}{ps}{ext}"), ext)) }) .collect_vec(); diff --git a/apps/recorder/src/app/context.rs b/apps/recorder/src/app/context.rs index d616d16..cc7aade 100644 --- a/apps/recorder/src/app/context.rs +++ b/apps/recorder/src/app/context.rs @@ -4,9 +4,16 @@ use tokio::sync::OnceCell; use super::{Environment, config::AppConfig}; use crate::{ - auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService, - errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService, - logger::LoggerService, storage::StorageService, tasks::TaskService, + auth::AuthService, + cache::CacheService, + crypto::CryptoService, + database::DatabaseService, + errors::RecorderResult, + extract::mikan::MikanClient, + graphql::GraphQLService, + logger::LoggerService, + storage::{StorageService, StorageServiceTrait}, + tasks::TaskService, }; pub trait AppContextTrait: Send + Sync + Debug { @@ -17,7 +24,7 @@ pub trait AppContextTrait: Send + Sync + Debug { fn mikan(&self) -> &MikanClient; fn auth(&self) -> &AuthService; fn graphql(&self) -> &GraphQLService; - fn storage(&self) -> &StorageService; + fn storage(&self) -> &dyn StorageServiceTrait; fn working_dir(&self) -> &String; fn environment(&self) -> &Environment; fn crypto(&self) -> &CryptoService; @@ -109,7 +116,7 @@ impl AppContextTrait for AppContext { fn graphql(&self) -> &GraphQLService { &self.graphql } - fn storage(&self) -> &StorageService { + fn storage(&self) -> &dyn StorageServiceTrait { &self.storage } fn working_dir(&self) -> &String { diff --git a/apps/recorder/src/auth/basic.rs b/apps/recorder/src/auth/basic.rs index 0627f02..827ad81 100644 --- a/apps/recorder/src/auth/basic.rs +++ b/apps/recorder/src/auth/basic.rs @@ -71,18 +71,16 @@ impl AuthServiceTrait for BasicAuthService { user: found_user, password: found_password, }) = AuthBasic::decode_request_parts(request) + && self.config.user == found_user + && self.config.password == found_password.unwrap_or_default() { - if self.config.user == found_user - && self.config.password == found_password.unwrap_or_default() - { - let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER) - .await - .map_err(|_| AuthError::FindAuthRecordError)?; - return Ok(AuthUserInfo { - subscriber_auth, - auth_type: AuthType::Basic, - }); - } + let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER) + .await + .map_err(|_| AuthError::FindAuthRecordError)?; + return Ok(AuthUserInfo { + subscriber_auth, + auth_type: AuthType::Basic, + }); } Err(AuthError::BasicInvalidCredentials) } diff --git a/apps/recorder/src/auth/oidc.rs b/apps/recorder/src/auth/oidc.rs index 0820967..cdb933a 100644 --- a/apps/recorder/src/auth/oidc.rs +++ b/apps/recorder/src/auth/oidc.rs @@ -297,10 +297,10 @@ impl OidcAuthService { id_token.signing_key(id_token_verifier)?, )?; - if let Some(expected_access_token_hash) = claims.access_token_hash() { - if actual_access_token_hash != *expected_access_token_hash { - return Err(AuthError::OidcInvalidAccessTokenError); - } + if let Some(expected_access_token_hash) = claims.access_token_hash() + && actual_access_token_hash != *expected_access_token_hash + { + return Err(AuthError::OidcInvalidAccessTokenError); } Ok(OidcAuthCallbackPayload { @@ -350,14 +350,14 @@ impl AuthServiceTrait for OidcAuthService { if !claims.has_claim(key) { return Err(AuthError::OidcExtraClaimMissingError { claim: key.clone() }); } - if let Some(value) = config.extra_claim_value.as_ref() { - if claims.get_claim(key).is_none_or(|v| &v != value) { - return Err(AuthError::OidcExtraClaimMatchError { - expected: value.clone(), - found: claims.get_claim(key).unwrap_or_default().to_string(), - key: key.clone(), - }); - } + if let Some(value) = config.extra_claim_value.as_ref() + && claims.get_claim(key).is_none_or(|v| &v != value) + { + return Err(AuthError::OidcExtraClaimMatchError { + expected: value.clone(), + found: claims.get_claim(key).unwrap_or_default().to_string(), + key: key.clone(), + }); } } let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await { diff --git a/apps/recorder/src/database/service.rs b/apps/recorder/src/database/service.rs index 4961b33..12b1b9a 100644 --- a/apps/recorder/src/database/service.rs +++ b/apps/recorder/src/database/service.rs @@ -9,8 +9,15 @@ use sea_orm_migration::MigratorTrait; use super::DatabaseConfig; use crate::{errors::RecorderResult, migrations::Migrator}; +pub trait DatabaseServiceConnectionTrait { + fn get_database_connection(&self) -> &DatabaseConnection; +} + pub struct DatabaseService { connection: DatabaseConnection, + #[cfg(all(test, feature = "testcontainers"))] + pub container: + Option>, } impl DatabaseService { @@ -48,7 +55,11 @@ impl DatabaseService { Migrator::up(&db, None).await?; } - Ok(Self { connection: db }) + Ok(Self { + connection: db, + #[cfg(all(test, feature = "testcontainers"))] + container: None, + }) } } diff --git a/apps/recorder/src/extract/bittorrent/extract.rs b/apps/recorder/src/extract/bittorrent/extract.rs index 3d6a5d8..d122b44 100644 --- a/apps/recorder/src/extract/bittorrent/extract.rs +++ b/apps/recorder/src/extract/bittorrent/extract.rs @@ -108,7 +108,7 @@ pub fn parse_episode_media_meta_from_torrent( let media_name = torrent_path .file_name() .with_whatever_context::<_, _, RecorderError>(|| { - format!("failed to get file name of {}", torrent_path) + format!("failed to get file name of {torrent_path}") })?; let mut match_obj = None; for rule in TORRENT_EP_PARSE_RULES.iter() { @@ -141,7 +141,7 @@ pub fn parse_episode_media_meta_from_torrent( .unwrap_or(1); let extname = torrent_path .extension() - .map(|e| format!(".{}", e)) + .map(|e| format!(".{e}")) .unwrap_or_default(); Ok(TorrentEpisodeMediaMeta { fansub: fansub.map(|s| s.to_string()), @@ -168,7 +168,7 @@ pub fn parse_episode_subtitle_meta_from_torrent( let media_name = torrent_path .file_name() .with_whatever_context::<_, _, RecorderError>(|| { - format!("failed to get file name of {}", torrent_path) + format!("failed to get file name of {torrent_path}") })?; let lang = get_subtitle_lang(media_name); @@ -271,7 +271,7 @@ mod tests { pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) { let extname = Path::new(raw_name) .extension() - .map(|e| format!(".{}", e)) + .map(|e| format!(".{e}")) .unwrap_or_default() .to_lowercase(); diff --git a/apps/recorder/src/extract/html/styles.rs b/apps/recorder/src/extract/html/styles.rs index f6909c9..be03d92 100644 --- a/apps/recorder/src/extract/html/styles.rs +++ b/apps/recorder/src/extract/html/styles.rs @@ -19,21 +19,19 @@ pub fn extract_background_image_src_from_style_attr( match prop { Property::BackgroundImage(images) => { for img in images { - if let CSSImage::Url(path) = img { - if let Some(url) = extract_image_src_from_str(path.url.trim(), base_url) - { - return Some(url); - } + if let CSSImage::Url(path) = img + && let Some(url) = extract_image_src_from_str(path.url.trim(), base_url) + { + return Some(url); } } } Property::Background(backgrounds) => { for bg in backgrounds { - if let CSSImage::Url(path) = &bg.image { - if let Some(url) = extract_image_src_from_str(path.url.trim(), base_url) - { - return Some(url); - } + if let CSSImage::Url(path) = &bg.image + && let Some(url) = extract_image_src_from_str(path.url.trim(), base_url) + { + return Some(url); } } } diff --git a/apps/recorder/src/extract/http.rs b/apps/recorder/src/extract/http.rs index 4470ba9..ab711a2 100644 --- a/apps/recorder/src/extract/http.rs +++ b/apps/recorder/src/extract/http.rs @@ -1,4 +1,4 @@ -use axum::http::{header, request::Parts, HeaderName, HeaderValue, Uri}; +use axum::http::{HeaderName, HeaderValue, Uri, header, request::Parts}; use itertools::Itertools; use url::Url; @@ -121,11 +121,7 @@ impl ForwardedRelatedInfo { .and_then(|s| s.to_str().ok()) .and_then(|s| { let l = s.split(",").map(|s| s.trim().to_string()).collect_vec(); - if l.is_empty() { - None - } else { - Some(l) - } + if l.is_empty() { None } else { Some(l) } }); let host = headers @@ -165,7 +161,7 @@ impl ForwardedRelatedInfo { pub fn resolved_origin(&self) -> Option { if let (Some(protocol), Some(host)) = (self.resolved_protocol(), self.resolved_host()) { - let origin = format!("{}://{}", protocol, host); + let origin = format!("{protocol}://{host}"); Url::parse(&origin).ok() } else { None diff --git a/apps/recorder/src/extract/media/mod.rs b/apps/recorder/src/extract/media/mod.rs index 1812c18..429d703 100644 --- a/apps/recorder/src/extract/media/mod.rs +++ b/apps/recorder/src/extract/media/mod.rs @@ -3,7 +3,7 @@ use url::Url; pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option { let mut image_url = base_url.join(image_src).ok()?; if let Some((_, value)) = image_url.query_pairs().find(|(key, _)| key == "webp") { - image_url.set_query(Some(&format!("webp={}", value))); + image_url.set_query(Some(&format!("webp={value}"))); } else { image_url.set_query(None); } diff --git a/apps/recorder/src/extract/mikan/client.rs b/apps/recorder/src/extract/mikan/client.rs index eee6216..11bfd6b 100644 --- a/apps/recorder/src/extract/mikan/client.rs +++ b/apps/recorder/src/extract/mikan/client.rs @@ -3,7 +3,6 @@ use std::{fmt::Debug, ops::Deref, sync::Arc}; use fetch::{HttpClient, HttpClientTrait}; use maplit::hashmap; use sea_orm::DbErr; -use secrecy::SecretBox; use serde::{Deserialize, Serialize}; use url::Url; use util::OptDynErr; @@ -23,8 +22,6 @@ pub struct MikanCredentialForm { pub user_agent: String, } -pub type MikanAuthSecrecy = SecretBox; - impl Debug for MikanCredentialForm { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("MikanCredentialForm") @@ -72,7 +69,7 @@ impl MikanClient { Ok(false) } else { Err(RecorderError::Credential3rdError { - message: format!("mikan account check has login failed, status = {}", status), + message: format!("mikan account check has login failed, status = {status}"), source: None.into(), }) } @@ -189,7 +186,7 @@ impl MikanClient { userpass_credential_opt = Some(userpass_credential); } else { return Err(RecorderError::from_db_record_not_found( - DbErr::RecordNotFound(format!("credential={} not found", credential_id)), + DbErr::RecordNotFound(format!("credential={credential_id} not found")), )); } } diff --git a/apps/recorder/src/extract/mikan/constants.rs b/apps/recorder/src/extract/mikan/constants.rs index 193af75..243a006 100644 --- a/apps/recorder/src/extract/mikan/constants.rs +++ b/apps/recorder/src/extract/mikan/constants.rs @@ -1,4 +1,4 @@ -pub const MIKAN_BUCKET_KEY: &str = "mikan"; +pub const MIKAN_POSTER_BUCKET_KEY: &str = "mikan_poster"; pub const MIKAN_UNKNOWN_FANSUB_NAME: &str = "生肉/不明字幕"; pub const MIKAN_UNKNOWN_FANSUB_ID: &str = "202"; pub const MIKAN_LOGIN_PAGE_PATH: &str = "/Account/Login"; diff --git a/apps/recorder/src/extract/mikan/mod.rs b/apps/recorder/src/extract/mikan/mod.rs index fc35a2f..cb82374 100644 --- a/apps/recorder/src/extract/mikan/mod.rs +++ b/apps/recorder/src/extract/mikan/mod.rs @@ -1,23 +1,31 @@ -pub mod client; -pub mod config; -pub mod constants; -pub mod rss_extract; -pub mod web_extract; +mod client; +mod config; +mod constants; +mod rss; +mod web; pub use client::{MikanClient, MikanCredentialForm}; pub use config::MikanConfig; -pub use constants::MIKAN_BUCKET_KEY; -pub use rss_extract::{ - MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanBangumiRssUrlMeta, - MikanRssChannel, MikanRssItem, MikanSubscriberAggregationRssChannel, - MikanSubscriberAggregationRssUrlMeta, build_mikan_bangumi_rss_url, - build_mikan_subscriber_aggregation_rss_url, extract_mikan_bangumi_id_from_rss_url, - extract_mikan_rss_channel_from_rss_link, extract_mikan_subscriber_aggregation_id_from_rss_link, +pub use constants::{ + MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_LOGIN_PAGE_PATH, MIKAN_LOGIN_PAGE_SEARCH, + MIKAN_POSTER_BUCKET_KEY, MIKAN_UNKNOWN_FANSUB_ID, MIKAN_UNKNOWN_FANSUB_NAME, }; -pub use web_extract::{ - MikanBangumiMeta, MikanEpisodeMeta, MikanSeasonStr, build_mikan_bangumi_homepage_url, - build_mikan_episode_homepage_url, build_mikan_season_flow_url, - extract_mikan_bangumi_indices_meta_from_season_flow_fragment, - extract_mikan_bangumi_meta_from_bangumi_homepage, - extract_mikan_episode_meta_from_episode_homepage, +pub use rss::{ + MikanBangumiIndexRssChannel, MikanBangumiRssChannel, MikanBangumiRssUrlMeta, MikanRssChannel, + MikanRssItem, MikanSubscriberAggregationRssUrlMeta, MikanSubscriberStreamRssChannel, + build_mikan_bangumi_rss_url, build_mikan_subscriber_aggregation_rss_url, + extract_mikan_bangumi_id_from_rss_url, extract_mikan_rss_channel_from_rss_link, + extract_mikan_subscriber_aggregation_id_from_rss_link, +}; +pub use web::{ + MikanBangumiHomepageUrlMeta, MikanBangumiIndexHomepageUrlMeta, MikanBangumiIndexMeta, + MikanBangumiMeta, MikanBangumiPosterMeta, MikanEpisodeHomepageUrlMeta, MikanEpisodeMeta, + MikanSeasonFlowUrlMeta, MikanSeasonStr, build_mikan_bangumi_expand_subscribed_url, + build_mikan_bangumi_homepage_url, build_mikan_episode_homepage_url, + build_mikan_season_flow_url, extract_mikan_bangumi_index_meta_list_from_season_flow_fragment, + extract_mikan_episode_meta_from_episode_homepage_html, + scrape_mikan_bangumi_meta_from_bangumi_homepage_url, + scrape_mikan_bangumi_meta_list_from_season_flow_url, + scrape_mikan_episode_meta_from_episode_homepage_url, scrape_mikan_poster_data_from_image_url, + scrape_mikan_poster_meta_from_image_url, }; diff --git a/apps/recorder/src/extract/mikan/rss_extract.rs b/apps/recorder/src/extract/mikan/rss.rs similarity index 85% rename from apps/recorder/src/extract/mikan/rss_extract.rs rename to apps/recorder/src/extract/mikan/rss.rs index fcfc2bc..88436b3 100644 --- a/apps/recorder/src/extract/mikan/rss_extract.rs +++ b/apps/recorder/src/extract/mikan/rss.rs @@ -10,10 +10,7 @@ use url::Url; use crate::{ errors::app_error::{RecorderError, RecorderResult}, - extract::mikan::{ - MikanClient, - web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage_url}, - }, + extract::mikan::{MikanClient, MikanEpisodeHomepageUrlMeta}, }; #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] @@ -37,7 +34,7 @@ pub struct MikanBangumiRssChannel { } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] -pub struct MikanBangumiAggregationRssChannel { +pub struct MikanBangumiIndexRssChannel { pub name: String, pub url: Url, pub mikan_bangumi_id: String, @@ -45,7 +42,7 @@ pub struct MikanBangumiAggregationRssChannel { } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] -pub struct MikanSubscriberAggregationRssChannel { +pub struct MikanSubscriberStreamRssChannel { pub mikan_aggregation_id: String, pub url: Url, pub items: Vec, @@ -54,46 +51,40 @@ pub struct MikanSubscriberAggregationRssChannel { #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] pub enum MikanRssChannel { Bangumi(MikanBangumiRssChannel), - BangumiAggregation(MikanBangumiAggregationRssChannel), - SubscriberAggregation(MikanSubscriberAggregationRssChannel), + BangumiIndex(MikanBangumiIndexRssChannel), + SubscriberStream(MikanSubscriberStreamRssChannel), } impl MikanRssChannel { pub fn items(&self) -> &[MikanRssItem] { match &self { Self::Bangumi(MikanBangumiRssChannel { items, .. }) - | Self::BangumiAggregation(MikanBangumiAggregationRssChannel { items, .. }) - | Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { items, .. }) => { - items - } + | Self::BangumiIndex(MikanBangumiIndexRssChannel { items, .. }) + | Self::SubscriberStream(MikanSubscriberStreamRssChannel { items, .. }) => items, } } pub fn into_items(self) -> Vec { match self { Self::Bangumi(MikanBangumiRssChannel { items, .. }) - | Self::BangumiAggregation(MikanBangumiAggregationRssChannel { items, .. }) - | Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { items, .. }) => { - items - } + | Self::BangumiIndex(MikanBangumiIndexRssChannel { items, .. }) + | Self::SubscriberStream(MikanSubscriberStreamRssChannel { items, .. }) => items, } } pub fn name(&self) -> Option<&str> { match &self { Self::Bangumi(MikanBangumiRssChannel { name, .. }) - | Self::BangumiAggregation(MikanBangumiAggregationRssChannel { name, .. }) => { - Some(name.as_str()) - } - Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { .. }) => None, + | Self::BangumiIndex(MikanBangumiIndexRssChannel { name, .. }) => Some(name.as_str()), + Self::SubscriberStream(MikanSubscriberStreamRssChannel { .. }) => None, } } pub fn url(&self) -> &Url { match &self { Self::Bangumi(MikanBangumiRssChannel { url, .. }) - | Self::BangumiAggregation(MikanBangumiAggregationRssChannel { url, .. }) - | Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { url, .. }) => url, + | Self::BangumiIndex(MikanBangumiIndexRssChannel { url, .. }) + | Self::SubscriberStream(MikanSubscriberStreamRssChannel { url, .. }) => url, } } } @@ -133,9 +124,9 @@ impl TryFrom for MikanRssItem { RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link")) })?; - let MikanEpisodeHomepage { + let MikanEpisodeHomepageUrlMeta { mikan_episode_id, .. - } = extract_mikan_episode_id_from_homepage_url(&homepage).ok_or_else(|| { + } = MikanEpisodeHomepageUrlMeta::parse_url(&homepage).ok_or_else(|| { RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id")) })?; @@ -278,17 +269,15 @@ pub async fn extract_mikan_rss_channel_from_rss_link( channel_name, channel_link = channel_link.as_str(), mikan_bangumi_id, - "MikanBangumiAggregationRssChannel extracted" + "MikanBangumiIndexRssChannel extracted" ); - Ok(MikanRssChannel::BangumiAggregation( - MikanBangumiAggregationRssChannel { - name: channel_name, - mikan_bangumi_id, - url: channel_link, - items, - }, - )) + Ok(MikanRssChannel::BangumiIndex(MikanBangumiIndexRssChannel { + name: channel_name, + mikan_bangumi_id, + url: channel_link, + items, + })) } } else if let Some(MikanSubscriberAggregationRssUrlMeta { mikan_aggregation_id, @@ -317,8 +306,8 @@ pub async fn extract_mikan_rss_channel_from_rss_link( "MikanSubscriberAggregationRssChannel extracted" ); - Ok(MikanRssChannel::SubscriberAggregation( - MikanSubscriberAggregationRssChannel { + Ok(MikanRssChannel::SubscriberStream( + MikanSubscriberStreamRssChannel { mikan_aggregation_id, items, url: channel_link, @@ -342,7 +331,7 @@ mod tests { use crate::{ errors::RecorderResult, extract::mikan::{ - MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel, + MikanBangumiIndexRssChannel, MikanBangumiRssChannel, MikanRssChannel, extract_mikan_rss_channel_from_rss_link, }, test_utils::mikan::build_testing_mikan_client, @@ -413,7 +402,7 @@ mod tests { assert_matches!( &channel, - MikanRssChannel::BangumiAggregation(MikanBangumiAggregationRssChannel { .. }) + MikanRssChannel::BangumiIndex(MikanBangumiIndexRssChannel { .. }) ); assert_matches!(&channel.name(), Some("叹气的亡灵想隐退")); diff --git a/apps/recorder/src/extract/mikan/web_extract.rs b/apps/recorder/src/extract/mikan/web.rs similarity index 57% rename from apps/recorder/src/extract/mikan/web_extract.rs rename to apps/recorder/src/extract/mikan/web.rs index de296c5..be8535b 100644 --- a/apps/recorder/src/extract/mikan/web_extract.rs +++ b/apps/recorder/src/extract/mikan/web.rs @@ -1,15 +1,17 @@ -use std::{borrow::Cow, fmt}; +use std::{borrow::Cow, fmt, sync::Arc}; use bytes::Bytes; use fetch::{html::fetch_html, image::fetch_image}; use html_escape::decode_html_entities; use scraper::{Html, Selector}; use serde::{Deserialize, Serialize}; +use snafu::OptionExt; use tracing::instrument; use url::Url; use super::{ - MIKAN_BUCKET_KEY, MikanBangumiRssUrlMeta, MikanClient, extract_mikan_bangumi_id_from_rss_url, + MIKAN_POSTER_BUCKET_KEY, MikanBangumiRssUrlMeta, MikanClient, + extract_mikan_bangumi_id_from_rss_url, }; use crate::{ app::AppContextTrait, @@ -18,10 +20,124 @@ use crate::{ html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref}, media::extract_image_src_from_str, }, - storage::StorageContentCategory, + storage::{StorageContentCategory, StorageServiceTrait}, }; -#[derive(Clone, Debug, Copy, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct MikanBangumiIndexMeta { + pub homepage: Url, + pub origin_poster_src: Option, + pub bangumi_title: String, + pub mikan_bangumi_id: String, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct MikanFansubMeta { + pub mikan_fansub_id: String, + pub fansub: String, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct MikanBangumiMeta { + pub homepage: Url, + pub origin_poster_src: Option, + pub bangumi_title: String, + pub mikan_bangumi_id: String, + pub mikan_fansub_id: String, + pub fansub: String, +} + +impl MikanBangumiMeta { + pub fn from_bangumi_index_and_fansub_meta( + bangumi_index_meta: MikanBangumiIndexMeta, + fansub_meta: MikanFansubMeta, + ) -> Self { + Self { + homepage: bangumi_index_meta.homepage, + origin_poster_src: bangumi_index_meta.origin_poster_src, + bangumi_title: bangumi_index_meta.bangumi_title, + mikan_bangumi_id: bangumi_index_meta.mikan_bangumi_id, + mikan_fansub_id: fansub_meta.mikan_fansub_id, + fansub: fansub_meta.fansub, + } + } +} + +#[derive(Clone, Debug, PartialEq)] +pub struct MikanEpisodeMeta { + pub homepage: Url, + pub origin_poster_src: Option, + pub bangumi_title: String, + pub episode_title: String, + pub fansub: String, + pub mikan_bangumi_id: String, + pub mikan_fansub_id: String, + pub mikan_episode_id: String, +} + +#[derive(Clone, Debug, PartialEq)] +pub struct MikanBangumiPosterMeta { + pub origin_poster_src: Url, + pub poster_src: Option, +} + +#[derive(Clone, Debug, PartialEq)] +pub struct MikanBangumiIndexHomepageUrlMeta { + pub mikan_bangumi_id: String, +} + +impl MikanBangumiIndexHomepageUrlMeta { + pub fn parse_url(url: &Url) -> Option { + if url.path().starts_with("/Home/Bangumi/") { + let mikan_bangumi_id = url.path().replace("/Home/Bangumi/", ""); + + Some(Self { mikan_bangumi_id }) + } else { + None + } + } +} + +#[derive(Clone, Debug, PartialEq)] +pub struct MikanBangumiHomepageUrlMeta { + pub mikan_bangumi_id: String, + pub mikan_fansub_id: String, +} + +impl MikanBangumiHomepageUrlMeta { + pub fn from_url(url: &Url) -> Option { + if url.path().starts_with("/Home/Bangumi/") { + let mikan_bangumi_id = url.path().replace("/Home/Bangumi/", ""); + + let url_fragment = url.fragment()?; + + Some(Self { + mikan_bangumi_id, + mikan_fansub_id: String::from(url_fragment), + }) + } else { + None + } + } +} + +#[derive(Clone, Debug, PartialEq)] +pub struct MikanEpisodeHomepageUrlMeta { + pub mikan_episode_id: String, +} + +impl MikanEpisodeHomepageUrlMeta { + pub fn parse_url(url: &Url) -> Option { + if url.path().starts_with("/Home/Episode/") { + let mikan_episode_id = url.path().replace("/Home/Episode/", ""); + Some(Self { mikan_episode_id }) + } else { + None + } + } +} + +#[derive(Clone, Debug, Copy, Serialize, Deserialize, PartialEq, Eq)] pub enum MikanSeasonStr { #[serde(rename = "春")] Spring, @@ -45,64 +161,9 @@ impl fmt::Display for MikanSeasonStr { } #[derive(Clone, Debug, PartialEq)] -pub struct MikanEpisodeMeta { - pub homepage: Url, - pub origin_poster_src: Option, - pub bangumi_title: String, - pub episode_title: String, - pub fansub: String, - pub mikan_bangumi_id: String, - pub mikan_fansub_id: String, - pub mikan_episode_id: String, -} - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct MikanBangumiIndexMeta { - pub homepage: Url, - pub origin_poster_src: Option, - pub bangumi_title: String, - pub mikan_bangumi_id: String, -} - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct MikanBangumiMeta { - pub homepage: Url, - pub origin_poster_src: Option, - pub bangumi_title: String, - pub mikan_bangumi_id: String, - pub mikan_fansub_id: Option, - pub fansub: Option, -} - -#[derive(Clone, Debug, PartialEq)] -pub struct MikanBangumiPosterMeta { - pub origin_poster_src: Url, - pub poster_data: Option, - pub poster_src: Option, -} - -impl From for MikanBangumiMeta { - fn from(index_meta: MikanBangumiIndexMeta) -> Self { - MikanBangumiMeta { - homepage: index_meta.homepage, - origin_poster_src: index_meta.origin_poster_src, - bangumi_title: index_meta.bangumi_title, - mikan_bangumi_id: index_meta.mikan_bangumi_id, - mikan_fansub_id: None, - fansub: None, - } - } -} - -#[derive(Clone, Debug, PartialEq)] -pub struct MikanEpisodeHomepage { - pub mikan_episode_id: String, -} - -#[derive(Clone, Debug, PartialEq)] -pub struct MikanBangumiHomepage { - pub mikan_bangumi_id: String, - pub mikan_fansub_id: Option, +pub struct MikanSeasonFlowUrlMeta { + pub year: i32, + pub season_str: MikanSeasonStr, } pub fn build_mikan_bangumi_homepage_url( @@ -135,7 +196,7 @@ pub fn build_mikan_episode_homepage_url(mikan_base_url: Url, mikan_episode_id: & url } -pub fn build_mikan_bangumi_expand_subscribed_fragment_url( +pub fn build_mikan_bangumi_expand_subscribed_url( mikan_base_url: Url, mikan_bangumi_id: &str, ) -> Url { @@ -147,92 +208,12 @@ pub fn build_mikan_bangumi_expand_subscribed_fragment_url( url } -pub fn extract_mikan_bangumi_id_from_homepage_url(url: &Url) -> Option { - if url.path().starts_with("/Home/Bangumi/") { - let mikan_bangumi_id = url.path().replace("/Home/Bangumi/", ""); - - Some(MikanBangumiHomepage { - mikan_bangumi_id, - mikan_fansub_id: url.fragment().map(String::from), - }) - } else { - None - } -} - -pub fn extract_mikan_episode_id_from_homepage_url(url: &Url) -> Option { - if url.path().starts_with("/Home/Episode/") { - let mikan_episode_id = url.path().replace("/Home/Episode/", ""); - Some(MikanEpisodeHomepage { mikan_episode_id }) - } else { - None - } -} - -pub async fn extract_mikan_poster_meta_from_src( - http_client: &MikanClient, - origin_poster_src_url: Url, -) -> Result { - let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?; - Ok(MikanBangumiPosterMeta { - origin_poster_src: origin_poster_src_url, - poster_data: Some(poster_data), - poster_src: None, - }) -} - -pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache( - ctx: &dyn AppContextTrait, - origin_poster_src_url: Url, - subscriber_id: i32, -) -> RecorderResult { - let dal_client = ctx.storage(); - let mikan_client = ctx.mikan(); - if let Some(poster_src) = dal_client - .exists_object( - StorageContentCategory::Image, - subscriber_id, - Some(MIKAN_BUCKET_KEY), - &origin_poster_src_url.path().replace("/images/Bangumi/", ""), - ) - .await? - { - return Ok(MikanBangumiPosterMeta { - origin_poster_src: origin_poster_src_url, - poster_data: None, - poster_src: Some(poster_src.to_string()), - }); - } - - let poster_data = fetch_image(mikan_client, origin_poster_src_url.clone()).await?; - - let poster_str = dal_client - .store_object( - StorageContentCategory::Image, - subscriber_id, - Some(MIKAN_BUCKET_KEY), - &origin_poster_src_url.path().replace("/images/Bangumi/", ""), - poster_data.clone(), - ) - .await?; - - Ok(MikanBangumiPosterMeta { - origin_poster_src: origin_poster_src_url, - poster_data: Some(poster_data), - poster_src: Some(poster_str.to_string()), - }) -} - -#[instrument(skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))] -pub async fn extract_mikan_episode_meta_from_episode_homepage( - http_client: &MikanClient, +#[instrument(err, skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))] +pub fn extract_mikan_episode_meta_from_episode_homepage_html( + html: &Html, + mikan_base_url: Url, mikan_episode_homepage_url: Url, -) -> Result { - let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?; - let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?; - - let html = Html::parse_document(&content); - +) -> RecorderResult { let bangumi_title_selector = &Selector::parse(".bangumi-title > a[href^='/Home/Bangumi/']").unwrap(); let mikan_bangumi_id_selector = @@ -243,9 +224,8 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage( .select(bangumi_title_selector) .next() .map(extract_inner_text_from_element_ref) - .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title"))) - .inspect_err(|error| { - tracing::warn!(error = %error); + .ok_or_else(|| { + RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")) })?; let MikanBangumiRssUrlMeta { @@ -260,33 +240,25 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage( .and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_url(&rss_link_url)) .ok_or_else(|| { RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")) - }) - .inspect_err(|error| tracing::error!(error = %error))?; + })?; - let mikan_fansub_id = mikan_fansub_id - .ok_or_else(|| { - RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id")) - }) - .inspect_err(|error| tracing::error!(error = %error))?; + let mikan_fansub_id = mikan_fansub_id.ok_or_else(|| { + RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id")) + })?; let episode_title = html .select(&Selector::parse("title").unwrap()) .next() .map(extract_inner_text_from_element_ref) - .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title"))) - .inspect_err(|error| { - tracing::warn!(error = %error); + .ok_or_else(|| { + RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")) })?; - let MikanEpisodeHomepage { + let MikanEpisodeHomepageUrlMeta { mikan_episode_id, .. - } = extract_mikan_episode_id_from_homepage_url(&mikan_episode_homepage_url) - .ok_or_else(|| { - RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id")) - }) - .inspect_err(|error| { - tracing::warn!(error = %error); - })?; + } = MikanEpisodeHomepageUrlMeta::parse_url(&mikan_episode_homepage_url).ok_or_else(|| { + RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id")) + })?; let fansub_name = html .select( @@ -295,9 +267,8 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage( ) .next() .map(extract_inner_text_from_element_ref) - .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name"))) - .inspect_err(|error| { - tracing::warn!(error = %error); + .ok_or_else(|| { + RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")) })?; let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| { @@ -334,15 +305,28 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage( }) } -#[instrument(skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))] -pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage( +#[instrument(skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))] +pub async fn scrape_mikan_episode_meta_from_episode_homepage_url( http_client: &MikanClient, - mikan_bangumi_homepage_url: Url, -) -> Result { - let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?; - let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?; + mikan_episode_homepage_url: Url, +) -> RecorderResult { + let mikan_base_url = http_client.base_url().clone(); + let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?; + let html = Html::parse_document(&content); + extract_mikan_episode_meta_from_episode_homepage_html( + &html, + mikan_base_url, + mikan_episode_homepage_url, + ) +} + +pub fn extract_mikan_bangumi_index_meta_from_bangumi_homepage_html( + html: &Html, + mikan_bangumi_homepage_url: Url, + mikan_base_url: &Url, +) -> RecorderResult { let bangumi_title_selector = &Selector::parse(".bangumi-title").unwrap(); let mikan_bangumi_id_selector = &Selector::parse(".bangumi-title > .mikan-rss[data-original-title='RSS']").unwrap(); @@ -352,8 +336,9 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage( .select(bangumi_title_selector) .next() .map(extract_inner_text_from_element_ref) - .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title"))) - .inspect_err(|error| tracing::warn!(error = %error))?; + .ok_or_else(|| { + RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")) + })?; let mikan_bangumi_id = html .select(mikan_bangumi_id_selector) @@ -368,58 +353,168 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage( ) .ok_or_else(|| { RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")) - }) - .inspect_err(|error| tracing::error!(error = %error))?; + })?; let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| { el.value() .attr("data-src") - .and_then(|data_src| extract_image_src_from_str(data_src, &mikan_base_url)) + .and_then(|data_src| extract_image_src_from_str(data_src, mikan_base_url)) .or_else(|| { el.value().attr("style").and_then(|style| { - extract_background_image_src_from_style_attr(style, &mikan_base_url) + extract_background_image_src_from_style_attr(style, mikan_base_url) }) }) }); - let (mikan_fansub_id, fansub) = mikan_bangumi_homepage_url - .fragment() - .and_then(|id| { - html.select( - &Selector::parse(&format!("a.subgroup-name[data-anchor='#{}']", id)).unwrap(), - ) - .next() - .map(extract_inner_text_from_element_ref) - .map(|fansub_name| (id.to_string(), fansub_name)) - }) - .unzip(); - tracing::trace!( bangumi_title, mikan_bangumi_id, origin_poster_src = origin_poster_src.as_ref().map(|url| url.as_str()), - fansub, - mikan_fansub_id, - "mikan bangumi meta extracted" + "mikan bangumi index meta extracted" ); - Ok(MikanBangumiMeta { + Ok(MikanBangumiIndexMeta { homepage: mikan_bangumi_homepage_url, bangumi_title, origin_poster_src, mikan_bangumi_id, - fansub, - mikan_fansub_id, }) } -#[instrument] -pub fn extract_mikan_bangumi_indices_meta_from_season_flow_fragment( - season_flow_fragment: &str, - mikan_base_url: Url, -) -> Vec { - let html = Html::parse_fragment(season_flow_fragment); +pub fn extract_mikan_fansub_meta_from_bangumi_homepage_html( + html: &Html, + mikan_fansub_id: String, +) -> Option { + html.select( + &Selector::parse(&format!( + "a.subgroup-name[data-anchor='#{mikan_fansub_id}']" + )) + .unwrap(), + ) + .next() + .map(extract_inner_text_from_element_ref) + .map(|fansub_name| MikanFansubMeta { + mikan_fansub_id, + fansub: fansub_name, + }) +} +#[instrument(err, skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))] +pub fn extract_mikan_bangumi_meta_from_bangumi_homepage_html( + html: &Html, + mikan_bangumi_homepage_url: Url, + mikan_base_url: &Url, +) -> RecorderResult { + let mikan_fansub_id = MikanBangumiHomepageUrlMeta::from_url(&mikan_bangumi_homepage_url) + .map(|s| s.mikan_fansub_id) + .ok_or_else(|| { + RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id")) + })?; + + let bangumi_index_meta = extract_mikan_bangumi_index_meta_from_bangumi_homepage_html( + html, + mikan_bangumi_homepage_url, + mikan_base_url, + )?; + + let fansub_meta = extract_mikan_fansub_meta_from_bangumi_homepage_html(html, mikan_fansub_id) + .ok_or_else(|| { + RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")) + })?; + + Ok(MikanBangumiMeta::from_bangumi_index_and_fansub_meta( + bangumi_index_meta, + fansub_meta, + )) +} + +#[instrument(err, skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))] +pub async fn scrape_mikan_bangumi_meta_from_bangumi_homepage_url( + mikan_client: &MikanClient, + mikan_bangumi_homepage_url: Url, +) -> RecorderResult { + let mikan_base_url = mikan_client.base_url(); + let content = fetch_html(mikan_client, mikan_bangumi_homepage_url.as_str()).await?; + let html = Html::parse_document(&content); + + extract_mikan_bangumi_meta_from_bangumi_homepage_html( + &html, + mikan_bangumi_homepage_url, + mikan_base_url, + ) +} + +#[instrument(err, skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))] +pub async fn scrape_mikan_bangumi_index_meta_from_bangumi_homepage_url( + mikan_client: &MikanClient, + mikan_bangumi_homepage_url: Url, +) -> RecorderResult { + let mikan_base_url = mikan_client.base_url(); + let content = fetch_html(mikan_client, mikan_bangumi_homepage_url.as_str()).await?; + let html = Html::parse_document(&content); + + extract_mikan_bangumi_index_meta_from_bangumi_homepage_html( + &html, + mikan_bangumi_homepage_url, + mikan_base_url, + ) +} + +#[instrument(skip_all, fields(origin_poster_src_url = origin_poster_src_url.as_str()))] +pub async fn scrape_mikan_poster_data_from_image_url( + mikan_client: &MikanClient, + origin_poster_src_url: Url, +) -> RecorderResult { + let poster_data = fetch_image(mikan_client, origin_poster_src_url.clone()).await?; + Ok(poster_data) +} + +#[instrument(skip_all, fields(origin_poster_src_url = origin_poster_src_url.as_str()))] +pub async fn scrape_mikan_poster_meta_from_image_url( + mikan_client: &MikanClient, + storage_service: &dyn StorageServiceTrait, + origin_poster_src_url: Url, + subscriber_id: i32, +) -> RecorderResult { + if let Some(poster_src) = storage_service + .exists_object( + StorageContentCategory::Image, + subscriber_id, + Some(MIKAN_POSTER_BUCKET_KEY), + &origin_poster_src_url.path().replace("/images/Bangumi/", ""), + ) + .await? + { + return Ok(MikanBangumiPosterMeta { + origin_poster_src: origin_poster_src_url, + poster_src: Some(poster_src.to_string()), + }); + } + + let poster_data = + scrape_mikan_poster_data_from_image_url(mikan_client, origin_poster_src_url.clone()) + .await?; + + let poster_str = storage_service + .store_object( + StorageContentCategory::Image, + subscriber_id, + Some(MIKAN_POSTER_BUCKET_KEY), + &origin_poster_src_url.path().replace("/images/Bangumi/", ""), + poster_data, + ) + .await?; + + Ok(MikanBangumiPosterMeta { + origin_poster_src: origin_poster_src_url, + poster_src: Some(poster_str.to_string()), + }) +} + +pub fn extract_mikan_bangumi_index_meta_list_from_season_flow_fragment( + html: &Html, + mikan_base_url: &Url, +) -> Vec { let bangumi_empty_selector = &Selector::parse(".no-subscribe-bangumi").unwrap(); if html.select(bangumi_empty_selector).next().is_some() { @@ -439,7 +534,7 @@ pub fn extract_mikan_bangumi_indices_meta_from_season_flow_fragment( { let origin_poster_src = bangumi_poster_span .attr("data-src") - .and_then(|data_src| extract_image_src_from_str(data_src, &mikan_base_url)); + .and_then(|data_src| extract_image_src_from_str(data_src, mikan_base_url)); let bangumi_title = bangumi_title_a .attr("title") .map(|title| decode_html_entities(&title).trim().to_string()); @@ -473,7 +568,7 @@ pub fn extract_mikan_bangumi_indices_meta_from_season_flow_fragment( origin_poster_src, bangumi_title, mikan_bangumi_id, - }) + }); } } } @@ -482,11 +577,10 @@ pub fn extract_mikan_bangumi_indices_meta_from_season_flow_fragment( #[instrument(skip_all, fields(mikan_bangumi_index = mikan_bangumi_index.mikan_bangumi_id.as_str()))] pub fn extract_mikan_bangumi_meta_from_expand_subscribed_fragment( + html: &Html, mikan_bangumi_index: MikanBangumiIndexMeta, - expand_subscribed_fragment: &str, mikan_base_url: Url, ) -> Option { - let html = Html::parse_fragment(expand_subscribed_fragment); let fansub_container_selector = &Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap(); let fansub_title_selector = &Selector::parse(".tag-res-name[title]").unwrap(); @@ -515,48 +609,111 @@ pub fn extract_mikan_bangumi_meta_from_expand_subscribed_fragment( } }) } { - tracing::trace!(fansub_name, mikan_fansub_id, "subscribed fansub extracted"); + tracing::trace!( + mikan_bangumi_id = mikan_bangumi_index.mikan_bangumi_id, + bangumi_title = mikan_bangumi_index.bangumi_title, + fansub_name, + mikan_fansub_id, + "subscribed fansub extracted" + ); + let mikan_bangumi_id = mikan_bangumi_index.mikan_bangumi_id; let bangumi_title = mikan_bangumi_index.bangumi_title; let origin_poster_src = mikan_bangumi_index.origin_poster_src; Some(MikanBangumiMeta { homepage: build_mikan_bangumi_homepage_url( - mikan_base_url.clone(), + mikan_base_url, &mikan_bangumi_id, Some(&mikan_fansub_id), ), bangumi_title: bangumi_title.to_string(), mikan_bangumi_id: mikan_bangumi_id.to_string(), - mikan_fansub_id: Some(mikan_fansub_id), - fansub: Some(fansub_name), + mikan_fansub_id: mikan_fansub_id.to_string(), + fansub: fansub_name.to_string(), origin_poster_src: origin_poster_src.clone(), }) } else { - tracing::trace!("subscribed fansub not found"); + tracing::trace!( + mikan_bangumi_id = mikan_bangumi_index.mikan_bangumi_id, + bangumi_title = mikan_bangumi_index.bangumi_title, + "subscribed fansub failed to extract" + ); None } } +#[instrument(err, skip_all, fields(mikan_season_flow_url = mikan_season_flow_url.as_str(), credential_id = credential_id))] +pub async fn scrape_mikan_bangumi_meta_list_from_season_flow_url( + mikan_client: &MikanClient, + ctx: Arc, + mikan_season_flow_url: Url, + credential_id: i32, +) -> RecorderResult> { + let mikan_client = mikan_client + .fork_with_credential(ctx.clone(), Some(credential_id)) + .await?; + + let mikan_base_url = mikan_client.base_url(); + let content = fetch_html(&mikan_client, mikan_season_flow_url.clone()).await?; + let mut bangumi_indices_meta = { + let html = Html::parse_document(&content); + extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(&html, mikan_base_url) + }; + + if bangumi_indices_meta.is_empty() && !mikan_client.has_login().await? { + mikan_client.login().await?; + let content = fetch_html(&mikan_client, mikan_season_flow_url).await?; + let html = Html::parse_document(&content); + bangumi_indices_meta = + extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(&html, mikan_base_url); + } + + let mut bangumi_metas = vec![]; + + for bangumi_index in bangumi_indices_meta { + let bangumi_title = bangumi_index.bangumi_title.clone(); + let bangumi_expand_subscribed_fragment_url = build_mikan_bangumi_expand_subscribed_url( + mikan_base_url.clone(), + &bangumi_index.mikan_bangumi_id, + ); + let bangumi_expand_subscribed_fragment = + fetch_html(&mikan_client, bangumi_expand_subscribed_fragment_url).await?; + + let bangumi_meta = { + let html = Html::parse_document(&bangumi_expand_subscribed_fragment); + + extract_mikan_bangumi_meta_from_expand_subscribed_fragment( + &html, + bangumi_index, + mikan_base_url.clone(), + ) + .with_whatever_context::<_, String, RecorderError>(|| { + format!("failed to extract mikan bangumi fansub of title = {bangumi_title}") + }) + }?; + + bangumi_metas.push(bangumi_meta); + } + + Ok(bangumi_metas) +} + #[cfg(test)] mod test { #![allow(unused_variables)] - use std::{fs, sync::Arc}; + use std::fs; - use futures::{TryStreamExt, pin_mut}; - use http::header; use rstest::{fixture, rstest}; use tracing::Level; use url::Url; use zune_image::{codecs::ImageFormat, image::Image}; use super::*; - use crate::{ - extract::mikan::MikanCredentialForm, - test_utils::{ - app::UnitTestAppContext, mikan::build_testing_mikan_client, - tracing::try_init_testing_tracing, - }, + use crate::test_utils::{ + app::UnitTestAppContext, database::build_testing_database_service, + mikan::build_testing_mikan_client, storage::build_testing_storage_service, + tracing::try_init_testing_tracing, }; #[fixture] @@ -566,7 +723,7 @@ mod test { #[rstest] #[tokio::test] - async fn test_extract_mikan_poster_from_src(before_each: ()) -> RecorderResult<()> { + async fn test_scrape_mikan_poster_data_from_image_url(before_each: ()) -> RecorderResult<()> { let mut mikan_server = mockito::Server::new_async().await; let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; @@ -579,11 +736,11 @@ mod test { .create_async() .await; - let bgm_poster = - extract_mikan_poster_meta_from_src(&mikan_client, bangumi_poster_url).await?; + let bgm_poster_data = + scrape_mikan_poster_data_from_image_url(&mikan_client, bangumi_poster_url).await?; + bangumi_poster_mock.expect(1); - let u8_data = bgm_poster.poster_data.expect("should have poster data"); - let image = Image::read(u8_data.to_vec(), Default::default()); + let image = Image::read(bgm_poster_data.to_vec(), Default::default()); assert!( image.is_ok_and(|img| img .metadata() @@ -597,100 +754,66 @@ mod test { #[rstest] #[tokio::test] - async fn test_extract_mikan_episode(before_each: ()) -> RecorderResult<()> { + async fn test_scrape_mikan_poster_meta_from_image_url(before_each: ()) -> RecorderResult<()> { let mut mikan_server = mockito::Server::new_async().await; let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; + let storage_service = build_testing_storage_service().await?; + let storage_operator = storage_service.get_operator()?; - let episode_homepage_url = - mikan_base_url.join("/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a")?; + let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?; - let episode_homepage_mock = mikan_server - .mock("GET", episode_homepage_url.path()) - .with_body_from_file( - "tests/resources/mikan/Episode-475184dce83ea2b82902592a5ac3343f6d54b36a.htm", - ) + let bangumi_poster_mock = mikan_server + .mock("GET", bangumi_poster_url.path()) + .with_body_from_file("tests/resources/mikan/Bangumi-202309-5ce9fed1.jpg") .create_async() .await; - let ep_meta = extract_mikan_episode_meta_from_episode_homepage( + let bgm_poster = scrape_mikan_poster_meta_from_image_url( &mikan_client, - episode_homepage_url.clone(), + &storage_service, + bangumi_poster_url, + 1, ) .await?; - assert_eq!(ep_meta.homepage, episode_homepage_url); - assert_eq!(ep_meta.bangumi_title, "葬送的芙莉莲"); - assert_eq!( - ep_meta - .origin_poster_src - .as_ref() - .map(|s| s.path().to_string()), - Some(String::from("/images/Bangumi/202309/5ce9fed1.jpg")) + bangumi_poster_mock.expect(1); + + let storage_fullname = storage_service.get_fullname( + StorageContentCategory::Image, + 1, + Some(MIKAN_POSTER_BUCKET_KEY), + "202309/5ce9fed1.jpg", ); - assert_eq!(ep_meta.fansub, "LoliHouse"); - assert_eq!(ep_meta.mikan_fansub_id, "370"); - assert_eq!(ep_meta.mikan_bangumi_id, "3141"); + let storage_fullename_str = storage_fullname.as_str(); - Ok(()) - } + assert!(storage_operator.exists(storage_fullename_str).await?); - #[rstest] - #[tokio::test] - async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage( - before_each: (), - ) -> RecorderResult<()> { - let mut mikan_server = mockito::Server::new_async().await; - let mikan_base_url = Url::parse(&mikan_server.url())?; - let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; - - let bangumi_homepage_url = mikan_base_url.join("/Home/Bangumi/3416#370")?; - - let bangumi_homepage_mock = mikan_server - .mock("GET", bangumi_homepage_url.path()) - .with_body_from_file("tests/resources/mikan/Bangumi-3416-370.htm") - .create_async() - .await; - - let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage( - &mikan_client, - bangumi_homepage_url.clone(), - ) - .await?; - - assert_eq!(bgm_meta.homepage, bangumi_homepage_url); - assert_eq!(bgm_meta.bangumi_title, "叹气的亡灵想隐退"); - assert_eq!( - bgm_meta - .origin_poster_src - .as_ref() - .map(|s| s.path().to_string()), - Some(String::from("/images/Bangumi/202410/480ef127.jpg")) - ); - assert_eq!(bgm_meta.fansub, Some(String::from("LoliHouse"))); - assert_eq!(bgm_meta.mikan_fansub_id, Some(String::from("370"))); - assert_eq!(bgm_meta.mikan_bangumi_id, "3416"); + let expected_data = fs::read("tests/resources/mikan/Bangumi-202309-5ce9fed1.jpg")?; + let found_data = storage_operator.read(storage_fullename_str).await?.to_vec(); + assert_eq!(expected_data, found_data); Ok(()) } #[rstest] #[test] - fn test_extract_mikan_bangumi_indices_meta_from_season_flow_fragment( + fn test_extract_mikan_bangumi_index_meta_list_from_season_flow_fragment( before_each: (), ) -> RecorderResult<()> { - let fragment = + let fragment_str = fs::read_to_string("tests/resources/mikan/BangumiCoverFlow-2025-spring.html")?; - let indices = extract_mikan_bangumi_indices_meta_from_season_flow_fragment( - &fragment, - Url::parse("https://mikanani.me/")?, - ); + let mikan_base_url = Url::parse("https://mikanani.me/")?; - tracing::info!("indices: {:#?}", &indices[0]); + let bangumi_index_meta_list = + extract_mikan_bangumi_index_meta_list_from_season_flow_fragment( + &Html::parse_document(&fragment_str), + &mikan_base_url, + ); - assert_eq!(indices.len(), 49); - let first = &indices[0]; + assert_eq!(bangumi_index_meta_list.len(), 49); + let first = &bangumi_index_meta_list[0]; assert_eq!(first.bangumi_title, "吉伊卡哇"); assert_eq!(first.mikan_bangumi_id, "3288"); assert_eq!( @@ -711,41 +834,42 @@ mod test { #[rstest] #[test] - fn test_extract_mikan_bangumi_indices_meta_from_season_flow_fragment_noauth( + fn test_extract_mikan_bangumi_index_meta_list_from_season_flow_fragment_noauth( before_each: (), ) -> RecorderResult<()> { - let fragment = + let fragment_str = fs::read_to_string("tests/resources/mikan/BangumiCoverFlow-2025-spring-noauth.html")?; - let indices = extract_mikan_bangumi_indices_meta_from_season_flow_fragment( - &fragment, - Url::parse("https://mikanani.me/")?, - ); + let bangumi_index_meta_list = + extract_mikan_bangumi_index_meta_list_from_season_flow_fragment( + &Html::parse_document(&fragment_str), + &Url::parse("https://mikanani.me/")?, + ); - assert!(indices.is_empty()); + assert!(bangumi_index_meta_list.is_empty()); Ok(()) } #[rstest] #[test] - fn test_extract_mikan_bangumi_meta_from_expand_subscribed_fragment( + fn ttest_extract_mikan_bangumi_meta_from_expand_subscribed_fragment( before_each: (), ) -> RecorderResult<()> { let origin_poster_src = Url::parse("https://mikanani.me/images/Bangumi/202504/076c1094.jpg")?; - let bangumi_index = MikanBangumiIndexMeta { + let bangumi_index_meta = MikanBangumiIndexMeta { homepage: Url::parse("https://mikanani.me/Home/Bangumi/3599")?, origin_poster_src: Some(origin_poster_src.clone()), bangumi_title: "夏日口袋".to_string(), mikan_bangumi_id: "3599".to_string(), }; - let fragment = fs::read_to_string("tests/resources/mikan/ExpandBangumi-3599.html")?; + let fragment_str = fs::read_to_string("tests/resources/mikan/ExpandBangumi-3599.html")?; let bangumi = extract_mikan_bangumi_meta_from_expand_subscribed_fragment( - bangumi_index.clone(), - &fragment, + &Html::parse_document(&fragment_str), + bangumi_index_meta.clone(), Url::parse("https://mikanani.me/")?, ) .unwrap_or_else(|| { @@ -756,11 +880,17 @@ mod test { bangumi.homepage, Url::parse("https://mikanani.me/Home/Bangumi/3599#370")? ); - assert_eq!(bangumi.bangumi_title, bangumi_index.bangumi_title); - assert_eq!(bangumi.mikan_bangumi_id, bangumi_index.mikan_bangumi_id); - assert_eq!(bangumi.origin_poster_src, bangumi_index.origin_poster_src); - assert_eq!(bangumi.mikan_fansub_id, Some(String::from("370"))); - assert_eq!(bangumi.fansub, Some(String::from("LoliHouse"))); + assert_eq!(bangumi.bangumi_title, bangumi_index_meta.bangumi_title); + assert_eq!( + bangumi.mikan_bangumi_id, + bangumi_index_meta.mikan_bangumi_id + ); + assert_eq!( + bangumi.origin_poster_src, + bangumi_index_meta.origin_poster_src + ); + assert_eq!(bangumi.mikan_fansub_id, String::from("370")); + assert_eq!(bangumi.fansub, String::from("LoliHouse")); Ok(()) } @@ -772,18 +902,19 @@ mod test { ) -> RecorderResult<()> { let origin_poster_src = Url::parse("https://mikanani.me/images/Bangumi/202504/076c1094.jpg")?; - let bangumi_index = MikanBangumiIndexMeta { + let bangumi_index_meta = MikanBangumiIndexMeta { homepage: Url::parse("https://mikanani.me/Home/Bangumi/3599")?, origin_poster_src: Some(origin_poster_src.clone()), bangumi_title: "夏日口袋".to_string(), mikan_bangumi_id: "3599".to_string(), }; - let fragment = fs::read_to_string("tests/resources/mikan/ExpandBangumi-3599-noauth.html")?; + let fragment_str = + fs::read_to_string("tests/resources/mikan/ExpandBangumi-3599-noauth.html")?; let bangumi = extract_mikan_bangumi_meta_from_expand_subscribed_fragment( - bangumi_index.clone(), - &fragment, + &Html::parse_document(&fragment_str), + bangumi_index_meta.clone(), Url::parse("https://mikanani.me/")?, ); @@ -792,6 +923,124 @@ mod test { Ok(()) } + #[rstest] + #[tokio::test] + async fn test_scrape_mikan_bangumi_meta_list_from_season_flow_url( + before_each: (), + ) -> RecorderResult<()> { + let mut mikan_server = mockito::Server::new_async().await; + let mikan_base_url = Url::parse(&mikan_server.url())?; + + let app_ctx = { + let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; + let db_service = build_testing_database_service().await?; + let app_ctx = UnitTestAppContext::builder() + .mikan(mikan_client) + .db(db_service) + .build(); + + Arc::new(app_ctx) + }; + + let mikan_client = app_ctx.mikan(); + + let mikan_season_flow_url = + build_mikan_season_flow_url(mikan_base_url, 2025, MikanSeasonStr::Spring); + + let bangumi_meta_list = scrape_mikan_bangumi_meta_list_from_season_flow_url( + mikan_client, + app_ctx.clone(), + mikan_season_flow_url, + 1, + ) + .await?; + + assert!(!bangumi_meta_list.is_empty()); + + Ok(()) + } + + #[rstest] + #[tokio::test] + async fn test_scrape_mikan_episode_meta_from_episode_homepage_url( + before_each: (), + ) -> RecorderResult<()> { + let mut mikan_server = mockito::Server::new_async().await; + let mikan_base_url = Url::parse(&mikan_server.url())?; + let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; + + let episode_homepage_url = + mikan_base_url.join("/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a")?; + + let episode_homepage_mock = mikan_server + .mock("GET", episode_homepage_url.path()) + .with_body_from_file( + "tests/resources/mikan/Episode-475184dce83ea2b82902592a5ac3343f6d54b36a.htm", + ) + .create_async() + .await; + + let episode_meta = scrape_mikan_episode_meta_from_episode_homepage_url( + &mikan_client, + episode_homepage_url.clone(), + ) + .await?; + + assert_eq!(episode_meta.homepage, episode_homepage_url); + assert_eq!(episode_meta.bangumi_title, "葬送的芙莉莲"); + assert_eq!( + episode_meta + .origin_poster_src + .as_ref() + .map(|s| s.path().to_string()), + Some(String::from("/images/Bangumi/202309/5ce9fed1.jpg")) + ); + assert_eq!(episode_meta.fansub, "LoliHouse"); + assert_eq!(episode_meta.mikan_fansub_id, "370"); + assert_eq!(episode_meta.mikan_bangumi_id, "3141"); + + Ok(()) + } + + #[rstest] + #[tokio::test] + async fn test_scrape_mikan_bangumi_meta_from_bangumi_homepage_url( + before_each: (), + ) -> RecorderResult<()> { + let mut mikan_server = mockito::Server::new_async().await; + let mikan_base_url = Url::parse(&mikan_server.url())?; + let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; + + let bangumi_homepage_url = mikan_base_url.join("/Home/Bangumi/3416#370")?; + + let bangumi_homepage_mock = mikan_server + .mock("GET", bangumi_homepage_url.path()) + .with_body_from_file("tests/resources/mikan/Bangumi-3416-370.htm") + .create_async() + .await; + + let bangumi_meta = scrape_mikan_bangumi_meta_from_bangumi_homepage_url( + &mikan_client, + bangumi_homepage_url.clone(), + ) + .await?; + + assert_eq!(bangumi_meta.homepage, bangumi_homepage_url); + assert_eq!(bangumi_meta.bangumi_title, "叹气的亡灵想隐退"); + assert_eq!( + bangumi_meta + .origin_poster_src + .as_ref() + .map(|s| s.path().to_string()), + Some(String::from("/images/Bangumi/202410/480ef127.jpg")) + ); + assert_eq!(bangumi_meta.fansub, String::from("LoliHouse")); + assert_eq!(bangumi_meta.mikan_fansub_id, String::from("370")); + assert_eq!(bangumi_meta.mikan_bangumi_id, "3416"); + + Ok(()) + } + // #[rstest] // #[tokio::test] // async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page( diff --git a/apps/recorder/src/extract/rawname/parser.rs b/apps/recorder/src/extract/rawname/parser.rs index 7f5b5f9..d27ecf1 100644 --- a/apps/recorder/src/extract/rawname/parser.rs +++ b/apps/recorder/src/extract/rawname/parser.rs @@ -101,19 +101,19 @@ fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RecorderRes raw = sub.replace_all(&raw, "").to_string(); } } - if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw) { - if m.len() as f32 > (raw.len() as f32) * 0.5 { - let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1 - .replace(&raw, "") - .chars() - .collect_vec(); - while let Some(ch) = raw1.pop() { - if ch == ']' { - break; - } + if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw) + && m.len() as f32 > (raw.len() as f32) * 0.5 + { + let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1 + .replace(&raw, "") + .chars() + .collect_vec(); + while let Some(ch) = raw1.pop() { + if ch == ']' { + break; } - raw = raw1.into_iter().collect(); } + raw = raw1.into_iter().collect(); } Ok(raw.to_string()) } @@ -136,23 +136,21 @@ pub fn extract_season_from_title_body(title_body: &str) -> (String, Option() - { - season = s; - break; - } + { + season = s; + break; } - if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s) { - if let Some(s) = DIGIT_1PLUS_REG + if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s) + && let Some(s) = DIGIT_1PLUS_REG .find(m.as_str()) .and_then(|s| s.as_str().parse::().ok()) - { - season = s; - break; - } + { + season = s; + break; } if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) { if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE diff --git a/apps/recorder/src/lib.rs b/apps/recorder/src/lib.rs index b29da28..57dde78 100644 --- a/apps/recorder/src/lib.rs +++ b/apps/recorder/src/lib.rs @@ -1,5 +1,5 @@ #![feature( - duration_constructors, + duration_constructors_lite, assert_matches, unboxed_closures, impl_trait_in_bindings, diff --git a/apps/recorder/src/logger/service.rs b/apps/recorder/src/logger/service.rs index 1424fe4..245008c 100644 --- a/apps/recorder/src/logger/service.rs +++ b/apps/recorder/src/logger/service.rs @@ -77,62 +77,62 @@ impl LoggerService { pub async fn from_config(config: LoggerConfig) -> RecorderResult { let mut layers: Vec + Sync + Send>> = Vec::new(); - if let Some(file_appender_config) = config.file_appender.as_ref() { - if file_appender_config.enable { - let dir = file_appender_config - .dir - .as_ref() - .map_or_else(|| "./logs".to_string(), ToString::to_string); + if let Some(file_appender_config) = config.file_appender.as_ref() + && file_appender_config.enable + { + let dir = file_appender_config + .dir + .as_ref() + .map_or_else(|| "./logs".to_string(), ToString::to_string); - let mut rolling_builder = tracing_appender::rolling::Builder::default() - .max_log_files(file_appender_config.max_log_files); + let mut rolling_builder = tracing_appender::rolling::Builder::default() + .max_log_files(file_appender_config.max_log_files); - rolling_builder = match file_appender_config.rotation { - LogRotation::Minutely => { - rolling_builder.rotation(tracing_appender::rolling::Rotation::MINUTELY) - } - LogRotation::Hourly => { - rolling_builder.rotation(tracing_appender::rolling::Rotation::HOURLY) - } - LogRotation::Daily => { - rolling_builder.rotation(tracing_appender::rolling::Rotation::DAILY) - } - LogRotation::Never => { - rolling_builder.rotation(tracing_appender::rolling::Rotation::NEVER) - } + rolling_builder = match file_appender_config.rotation { + LogRotation::Minutely => { + rolling_builder.rotation(tracing_appender::rolling::Rotation::MINUTELY) + } + LogRotation::Hourly => { + rolling_builder.rotation(tracing_appender::rolling::Rotation::HOURLY) + } + LogRotation::Daily => { + rolling_builder.rotation(tracing_appender::rolling::Rotation::DAILY) + } + LogRotation::Never => { + rolling_builder.rotation(tracing_appender::rolling::Rotation::NEVER) + } + }; + + let file_appender = rolling_builder + .filename_prefix( + file_appender_config + .filename_prefix + .as_ref() + .map_or_else(String::new, ToString::to_string), + ) + .filename_suffix( + file_appender_config + .filename_suffix + .as_ref() + .map_or_else(String::new, ToString::to_string), + ) + .build(dir)?; + + let file_appender_layer = if file_appender_config.non_blocking { + let (non_blocking_file_appender, work_guard) = + tracing_appender::non_blocking(file_appender); + if NONBLOCKING_WORK_GUARD_KEEP.set(work_guard).is_err() { + whatever!("cannot lock for appender"); }; - - let file_appender = rolling_builder - .filename_prefix( - file_appender_config - .filename_prefix - .as_ref() - .map_or_else(String::new, ToString::to_string), - ) - .filename_suffix( - file_appender_config - .filename_suffix - .as_ref() - .map_or_else(String::new, ToString::to_string), - ) - .build(dir)?; - - let file_appender_layer = if file_appender_config.non_blocking { - let (non_blocking_file_appender, work_guard) = - tracing_appender::non_blocking(file_appender); - if NONBLOCKING_WORK_GUARD_KEEP.set(work_guard).is_err() { - whatever!("cannot lock for appender"); - }; - Self::init_layer( - non_blocking_file_appender, - &file_appender_config.format, - false, - ) - } else { - Self::init_layer(file_appender, &file_appender_config.format, false) - }; - layers.push(file_appender_layer); - } + Self::init_layer( + non_blocking_file_appender, + &file_appender_config.format, + false, + ) + } else { + Self::init_layer(file_appender, &file_appender_config.format, false) + }; + layers.push(file_appender_layer); } if config.enable { diff --git a/apps/recorder/src/models/subscriptions.rs b/apps/recorder/src/models/subscriptions.rs index 23b0f38..1921758 100644 --- a/apps/recorder/src/models/subscriptions.rs +++ b/apps/recorder/src/models/subscriptions.rs @@ -11,13 +11,11 @@ use crate::{ errors::RecorderResult, extract::{ mikan::{ - build_mikan_bangumi_homepage_url, build_mikan_bangumi_rss_url, - extract_mikan_bangumi_meta_from_bangumi_homepage, - extract_mikan_episode_meta_from_episode_homepage, + MikanBangumiPosterMeta, build_mikan_bangumi_homepage_url, build_mikan_bangumi_rss_url, extract_mikan_rss_channel_from_rss_link, - web_extract::{ - MikanBangumiPosterMeta, extract_mikan_bangumi_poster_meta_from_src_with_cache, - }, + scrape_mikan_bangumi_meta_from_bangumi_homepage_url, + scrape_mikan_episode_meta_from_episode_homepage_url, + scrape_mikan_poster_meta_from_image_url, }, rawname::extract_season_from_title_body, }, @@ -272,7 +270,7 @@ impl Model { let mut new_metas = vec![]; for new_rss_item in new_rss_items.iter() { new_metas.push( - extract_mikan_episode_meta_from_episode_homepage( + scrape_mikan_episode_meta_from_episode_homepage_url( mikan_client, new_rss_item.homepage.clone(), ) @@ -305,7 +303,7 @@ impl Model { mikan_bangumi_id.to_string(), mikan_fansub_id.to_string(), async |am| -> RecorderResult<()> { - let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage( + let bgm_meta = scrape_mikan_bangumi_meta_from_bangumi_homepage_url( mikan_client, bgm_homepage.clone(), ) @@ -319,20 +317,20 @@ impl Model { am.season_raw = ActiveValue::Set(bgm_season_raw); am.rss_link = ActiveValue::Set(Some(bgm_rss_link.to_string())); am.homepage = ActiveValue::Set(Some(bgm_homepage.to_string())); - am.fansub = ActiveValue::Set(bgm_meta.fansub); - if let Some(origin_poster_src) = bgm_meta.origin_poster_src { - if let MikanBangumiPosterMeta { + am.fansub = ActiveValue::Set(Some(bgm_meta.fansub)); + if let Some(origin_poster_src) = bgm_meta.origin_poster_src + && let MikanBangumiPosterMeta { poster_src: Some(poster_src), .. - } = extract_mikan_bangumi_poster_meta_from_src_with_cache( - ctx, + } = scrape_mikan_poster_meta_from_image_url( + mikan_client, + ctx.storage(), origin_poster_src, self.subscriber_id, ) .await? - { - am.poster_link = ActiveValue::Set(Some(poster_src)) - } + { + am.poster_link = ActiveValue::Set(Some(poster_src)) } Ok(()) }, diff --git a/apps/recorder/src/storage/client.rs b/apps/recorder/src/storage/client.rs index ce45d21..f14ac90 100644 --- a/apps/recorder/src/storage/client.rs +++ b/apps/recorder/src/storage/client.rs @@ -1,14 +1,13 @@ use std::fmt; use bytes::Bytes; -use opendal::{Buffer, Operator, layers::LoggingLayer, services::Fs}; +use opendal::{Buffer, Operator, layers::LoggingLayer}; use quirks_path::{Path, PathBuf}; use serde::{Deserialize, Serialize}; use url::Url; -use uuid::Uuid; use super::StorageConfig; -use crate::errors::app_error::{RecorderError, RecorderResult}; +use crate::errors::app_error::RecorderResult; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] @@ -44,6 +43,88 @@ impl fmt::Display for StorageStoredUrl { } } +#[async_trait::async_trait] +pub trait StorageServiceTrait: Sync { + fn get_operator(&self) -> RecorderResult; + + fn get_fullname( + &self, + content_category: StorageContentCategory, + subscriber_id: i32, + bucket: Option<&str>, + filename: &str, + ) -> PathBuf { + [ + &subscriber_id.to_string(), + content_category.as_ref(), + bucket.unwrap_or_default(), + filename, + ] + .into_iter() + .map(Path::new) + .collect::() + } + async fn store_object( + &self, + content_category: StorageContentCategory, + subscriber_id: i32, + bucket: Option<&str>, + filename: &str, + data: Bytes, + ) -> RecorderResult { + let fullname = self.get_fullname(content_category, subscriber_id, bucket, filename); + + let operator = self.get_operator()?; + + if let Some(dirname) = fullname.parent() { + let dirname = dirname.join("/"); + operator.create_dir(dirname.as_str()).await?; + } + + operator.write(fullname.as_str(), data).await?; + + Ok(StorageStoredUrl::RelativePath { + path: fullname.to_string(), + }) + } + + async fn exists_object( + &self, + content_category: StorageContentCategory, + subscriber_id: i32, + bucket: Option<&str>, + filename: &str, + ) -> RecorderResult> { + let fullname = self.get_fullname(content_category, subscriber_id, bucket, filename); + + let operator = self.get_operator()?; + + if operator.exists(fullname.as_str()).await? { + Ok(Some(StorageStoredUrl::RelativePath { + path: fullname.to_string(), + })) + } else { + Ok(None) + } + } + + async fn load_object( + &self, + content_category: StorageContentCategory, + subscriber_id: i32, + bucket: Option<&str>, + filename: &str, + ) -> RecorderResult { + let fullname = self.get_fullname(content_category, subscriber_id, bucket, filename); + + let operator = self.get_operator()?; + + let data = operator.read(fullname.as_str()).await?; + + Ok(data) + } +} + #[derive(Debug, Clone)] pub struct StorageService { pub data_dir: String, @@ -55,114 +136,15 @@ impl StorageService { data_dir: config.data_dir.to_string(), }) } +} - pub fn get_fs(&self) -> Fs { - Fs::default().root(&self.data_dir) - } +#[async_trait::async_trait] +impl StorageServiceTrait for StorageService { + fn get_operator(&self) -> RecorderResult { + let fs_op = Operator::new(opendal::services::Fs::default().root(&self.data_dir))? + .layer(LoggingLayer::default()) + .finish(); - pub fn create_filename(extname: &str) -> String { - format!("{}{}", Uuid::new_v4(), extname) - } - - pub async fn store_object( - &self, - content_category: StorageContentCategory, - subscriber_id: i32, - bucket: Option<&str>, - filename: &str, - data: Bytes, - ) -> Result { - match content_category { - StorageContentCategory::Image => { - let fullname = [ - &subscriber_id.to_string(), - content_category.as_ref(), - bucket.unwrap_or_default(), - filename, - ] - .into_iter() - .map(Path::new) - .collect::(); - - let fs_op = Operator::new(self.get_fs())? - .layer(LoggingLayer::default()) - .finish(); - - if let Some(dirname) = fullname.parent() { - let dirname = dirname.join("/"); - fs_op.create_dir(dirname.as_str()).await?; - } - - fs_op.write(fullname.as_str(), data).await?; - - Ok(StorageStoredUrl::RelativePath { - path: fullname.to_string(), - }) - } - } - } - - pub async fn exists_object( - &self, - content_category: StorageContentCategory, - subscriber_id: i32, - bucket: Option<&str>, - filename: &str, - ) -> Result, RecorderError> { - match content_category { - StorageContentCategory::Image => { - let fullname = [ - &subscriber_id.to_string(), - content_category.as_ref(), - bucket.unwrap_or_default(), - filename, - ] - .into_iter() - .map(Path::new) - .collect::(); - - let fs_op = Operator::new(self.get_fs())? - .layer(LoggingLayer::default()) - .finish(); - - if fs_op.exists(fullname.as_str()).await? { - Ok(Some(StorageStoredUrl::RelativePath { - path: fullname.to_string(), - })) - } else { - Ok(None) - } - } - } - } - - pub async fn load_object( - &self, - content_category: StorageContentCategory, - subscriber_pid: &str, - bucket: Option<&str>, - filename: &str, - ) -> RecorderResult { - match content_category { - StorageContentCategory::Image => { - let fullname = [ - subscriber_pid, - content_category.as_ref(), - bucket.unwrap_or_default(), - filename, - ] - .into_iter() - .map(Path::new) - .collect::(); - - let fs_op = Operator::new(self.get_fs())? - .layer(LoggingLayer::default()) - .finish(); - - let data = fs_op.read(fullname.as_str()).await?; - - Ok(data) - } - } + Ok(fs_op) } } diff --git a/apps/recorder/src/storage/mod.rs b/apps/recorder/src/storage/mod.rs index f8e448a..8edde09 100644 --- a/apps/recorder/src/storage/mod.rs +++ b/apps/recorder/src/storage/mod.rs @@ -1,4 +1,4 @@ -pub mod client; -pub mod config; -pub use client::{StorageContentCategory, StorageService}; +mod client; +mod config; +pub use client::{StorageContentCategory, StorageService, StorageServiceTrait, StorageStoredUrl}; pub use config::StorageConfig; diff --git a/apps/recorder/src/tasks/mikan/extract_season_subscription.rs b/apps/recorder/src/tasks/mikan/extract_season_subscription.rs index 2eca1d2..9db481b 100644 --- a/apps/recorder/src/tasks/mikan/extract_season_subscription.rs +++ b/apps/recorder/src/tasks/mikan/extract_season_subscription.rs @@ -2,20 +2,14 @@ use std::{ops::Deref, sync::Arc}; use apalis::prelude::*; use apalis_sql::postgres::PostgresStorage; -use fetch::fetch_html; use serde::{Deserialize, Serialize}; -use snafu::OptionExt; use crate::{ app::AppContextTrait, - errors::{RecorderError, RecorderResult}, + errors::RecorderResult, extract::mikan::{ MikanBangumiMeta, MikanSeasonStr, build_mikan_season_flow_url, - extract_mikan_bangumi_indices_meta_from_season_flow_fragment, - web_extract::{ - MikanBangumiIndexMeta, build_mikan_bangumi_expand_subscribed_fragment_url, - extract_mikan_bangumi_meta_from_expand_subscribed_fragment, - }, + scrape_mikan_bangumi_meta_list_from_season_flow_url, }, }; @@ -31,17 +25,6 @@ pub struct ExtractMikanSeasonSubscriptionTask { pub subscriber_id: i32, } -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ExtractMikanSeasonSubscriptionFansubsTask { - pub task_id: i32, - pub year: i32, - pub season_str: MikanSeasonStr, - pub credential_id: i32, - pub subscription_id: i32, - pub subscriber_id: i32, - pub bangumi_indices: Vec, -} - #[derive(Clone, Debug, Serialize, Deserialize)] pub struct ExtractMikanSeasonSubscriptionTaskResult { pub task_id: i32, @@ -50,97 +33,31 @@ pub struct ExtractMikanSeasonSubscriptionTaskResult { pub credential_id: i32, pub subscription_id: i32, pub subscriber_id: i32, - pub bangumi_metas: Vec, + pub bangumi_meta_list: Vec, } pub async fn extract_mikan_season_subscription( job: ExtractMikanSeasonSubscriptionTask, data: Data>, -) -> RecorderResult> { - let ctx = data.deref(); - - let mikan_client = ctx - .mikan() - .fork_with_credential(ctx.clone(), Some(job.credential_id)) - .await?; - - let mikan_base_url = mikan_client.base_url().clone(); - - let season_flow_fragment_url = - build_mikan_season_flow_url(mikan_base_url.clone(), job.year, job.season_str); - - let season_flow_fragment = fetch_html(&mikan_client, season_flow_fragment_url.clone()).await?; - - let mut bangumi_indices = extract_mikan_bangumi_indices_meta_from_season_flow_fragment( - &season_flow_fragment, - mikan_base_url.clone(), - ); - - if bangumi_indices.is_empty() && !mikan_client.has_login().await? { - mikan_client.login().await?; - let season_flow_fragment = - fetch_html(&mikan_client, season_flow_fragment_url.clone()).await?; - bangumi_indices = extract_mikan_bangumi_indices_meta_from_season_flow_fragment( - &season_flow_fragment, - mikan_base_url.clone(), - ); - } - - Ok(GoTo::Next(ExtractMikanSeasonSubscriptionFansubsTask { - task_id: job.task_id, - year: job.year, - season_str: job.season_str, - credential_id: job.credential_id, - subscription_id: job.subscription_id, - subscriber_id: job.subscriber_id, - bangumi_indices, - })) -} - -pub async fn extract_mikan_season_subscription_fansubs( - job: ExtractMikanSeasonSubscriptionFansubsTask, - data: Data>, ) -> RecorderResult> { let ctx = data.deref(); - let mikan_client = ctx - .mikan() - .fork_with_credential(ctx.clone(), Some(job.credential_id)) - .await?; + let mikan_client = ctx.mikan(); + let mikan_base_url = mikan_client.base_url(); - let bangumi_indices = job.bangumi_indices; + let mikan_season_flow_url = + build_mikan_season_flow_url(mikan_base_url.clone(), job.year, job.season_str); - let mut bangumi_metas = vec![]; - - let mikan_base_url = mikan_client.base_url().clone(); - - for bangumi_index in bangumi_indices { - let bangumi_title = bangumi_index.bangumi_title.clone(); - let bangumi_expand_subscribed_fragment_url = - build_mikan_bangumi_expand_subscribed_fragment_url( - mikan_base_url.clone(), - &bangumi_index.mikan_bangumi_id, - ); - let bangumi_expand_subscribed_fragment = - fetch_html(&mikan_client, bangumi_expand_subscribed_fragment_url).await?; - - let bangumi_meta = extract_mikan_bangumi_meta_from_expand_subscribed_fragment( - bangumi_index, - &bangumi_expand_subscribed_fragment, - mikan_base_url.clone(), - ) - .with_whatever_context::<_, String, RecorderError>(|| { - format!( - "failed to extract mikan bangumi fansub of title = {}", - bangumi_title - ) - })?; - - bangumi_metas.push(bangumi_meta); - } + let bangumi_meta_list = scrape_mikan_bangumi_meta_list_from_season_flow_url( + mikan_client, + ctx.clone(), + mikan_season_flow_url, + job.credential_id, + ) + .await?; Ok(GoTo::Done(ExtractMikanSeasonSubscriptionTaskResult { - bangumi_metas, + bangumi_meta_list, credential_id: job.credential_id, season_str: job.season_str, subscriber_id: job.subscriber_id, @@ -157,9 +74,7 @@ pub fn register_extract_mikan_season_subscription_task( let pool = ctx.db().get_postgres_connection_pool().clone(); let storage = PostgresStorage::new(pool); - let steps = StepBuilder::new() - .step_fn(extract_mikan_season_subscription) - .step_fn(extract_mikan_season_subscription_fansubs); + let steps = StepBuilder::new().step_fn(extract_mikan_season_subscription); let worker = WorkerBuilder::new(TASK_NAME) .catch_panic() diff --git a/apps/recorder/src/temp/.gitkeep b/apps/recorder/src/temp/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/apps/recorder/src/test_utils/app.rs b/apps/recorder/src/test_utils/app.rs index 5a75f6a..22b8937 100644 --- a/apps/recorder/src/test_utils/app.rs +++ b/apps/recorder/src/test_utils/app.rs @@ -58,7 +58,7 @@ impl AppContextTrait for UnitTestAppContext { self.graphql.as_ref().expect("should set graphql") } - fn storage(&self) -> &crate::storage::StorageService { + fn storage(&self) -> &dyn crate::storage::StorageServiceTrait { self.storage.as_ref().expect("should set storage") } diff --git a/apps/recorder/src/test_utils/database.rs b/apps/recorder/src/test_utils/database.rs new file mode 100644 index 0000000..69b0880 --- /dev/null +++ b/apps/recorder/src/test_utils/database.rs @@ -0,0 +1,59 @@ +use crate::{ + database::{DatabaseConfig, DatabaseService}, + errors::RecorderResult, +}; + +#[cfg(feature = "testcontainers")] +pub async fn build_testing_database_service() -> RecorderResult { + use testcontainers::runners::AsyncRunner; + use testcontainers_ext::{ImageDefaultLogConsumerExt, ImagePruneExistedLabelExt}; + use testcontainers_modules::postgres::Postgres; + + let container = Postgres::default() + .with_db_name("konobangu") + .with_user("konobangu") + .with_password("konobangu") + .with_default_log_consumer() + .with_prune_existed_label(env!("CARGO_PKG_NAME"), "postgres", true, true) + .await?; + + let container = container.start().await?; + + let host_ip = container.get_host().await?; + let host_port = container.get_host_port_ipv4(5432).await?; + + let connection_string = + format!("postgres://konobangu:konobangu@{host_ip}:{host_port}/konobangu"); + + let mut db_service = DatabaseService::from_config(DatabaseConfig { + uri: connection_string, + enable_logging: true, + min_connections: 1, + max_connections: 1, + connect_timeout: 5000, + idle_timeout: 10000, + acquire_timeout: None, + auto_migrate: true, + }) + .await?; + db_service.container = Some(container); + + Ok(db_service) +} + +#[cfg(not(feature = "testcontainers"))] +pub async fn build_testing_database_service() -> RecorderResult { + let db_service = DatabaseService::from_config(DatabaseConfig { + uri: String::from("postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"), + enable_logging: true, + min_connections: 1, + max_connections: 1, + connect_timeout: 5000, + idle_timeout: 10000, + acquire_timeout: None, + auto_migrate: true, + }) + .await?; + + Ok(db_service) +} diff --git a/apps/recorder/src/test_utils/mod.rs b/apps/recorder/src/test_utils/mod.rs index 4c8a279..f5c8717 100644 --- a/apps/recorder/src/test_utils/mod.rs +++ b/apps/recorder/src/test_utils/mod.rs @@ -1,3 +1,5 @@ pub mod app; +pub mod database; pub mod mikan; +pub mod storage; pub mod tracing; diff --git a/apps/recorder/src/test_utils/storage.rs b/apps/recorder/src/test_utils/storage.rs new file mode 100644 index 0000000..b07968b --- /dev/null +++ b/apps/recorder/src/test_utils/storage.rs @@ -0,0 +1,28 @@ +use opendal::{Operator, layers::LoggingLayer}; + +use crate::{errors::RecorderResult, storage::StorageServiceTrait}; + +pub struct TestingStorageService { + operator: Operator, +} + +impl TestingStorageService { + pub fn new() -> RecorderResult { + let op = Operator::new(opendal::services::Memory::default())? + .layer(LoggingLayer::default()) + .finish(); + + Ok(Self { operator: op }) + } +} + +#[async_trait::async_trait] +impl StorageServiceTrait for TestingStorageService { + fn get_operator(&self) -> RecorderResult { + Ok(self.operator.clone()) + } +} + +pub async fn build_testing_storage_service() -> RecorderResult { + TestingStorageService::new() +} diff --git a/apps/recorder/src/test_utils/tracing.rs b/apps/recorder/src/test_utils/tracing.rs index 03cf18e..210055b 100644 --- a/apps/recorder/src/test_utils/tracing.rs +++ b/apps/recorder/src/test_utils/tracing.rs @@ -4,7 +4,7 @@ use tracing_subscriber::EnvFilter; pub fn try_init_testing_tracing(level: Level) { let crate_name = env!("CARGO_PKG_NAME"); let level = level.as_str().to_lowercase(); - let filter = EnvFilter::new(format!("{}[]={}", crate_name, level)) - .add_directive(format!("mockito[]={}", level).parse().unwrap()); + let filter = EnvFilter::new(format!("{crate_name}[]={level}")) + .add_directive(format!("mockito[]={level}").parse().unwrap()); let _ = tracing_subscriber::fmt().with_env_filter(filter).try_init(); } diff --git a/apps/recorder/src/web/controller/graphql/mod.rs b/apps/recorder/src/web/controller/graphql/mod.rs index dc501c6..96fde93 100644 --- a/apps/recorder/src/web/controller/graphql/mod.rs +++ b/apps/recorder/src/web/controller/graphql/mod.rs @@ -27,10 +27,10 @@ async fn graphql_handler( // 检查是否是 introspection 查询 fn is_introspection_query(req: &async_graphql::Request) -> bool { - if let Some(operation) = &req.operation_name { - if operation.starts_with("__") { - return true; - } + if let Some(operation) = &req.operation_name + && operation.starts_with("__") + { + return true; } // 检查查询内容是否包含 introspection 字段 diff --git a/apps/recorder/src/web/middleware/etag.rs b/apps/recorder/src/web/middleware/etag.rs index c332a42..66554fc 100644 --- a/apps/recorder/src/web/middleware/etag.rs +++ b/apps/recorder/src/web/middleware/etag.rs @@ -97,15 +97,14 @@ where let res_fut = async move { let response = future.await?; let etag_from_response = response.headers().get(ETAG).cloned(); - if let Some(etag_in_request) = ifnm { - if let Some(etag_from_response) = etag_from_response { - if etag_in_request == etag_from_response { - return Ok(Response::builder() - .status(StatusCode::NOT_MODIFIED) - .body(Body::empty()) - .unwrap()); - } - } + if let Some(etag_in_request) = ifnm + && let Some(etag_from_response) = etag_from_response + && etag_in_request == etag_from_response + { + return Ok(Response::builder() + .status(StatusCode::NOT_MODIFIED) + .body(Body::empty()) + .unwrap()); } Ok(response) }; diff --git a/justfile b/justfile index b7588f9..1a39127 100644 --- a/justfile +++ b/justfile @@ -2,8 +2,7 @@ set windows-shell := ["pwsh.exe", "-c"] set dotenv-load := true prepare-dev-recorder: - cargo install sea-orm-cli - cargo install cargo-watch + cargo install sea-orm-cli watchexec cargo-llvm-cov cargo-nextest dev-webui: pnpm run --filter=webui dev @@ -30,3 +29,6 @@ dev-codegen-wait: @until nc -z localhost 5001; do echo "Waiting for Recorder..."; sleep 1; done pnpm run --filter=webui codegen-watch +dev-coverage: + cargo llvm-cov test --html + diff --git a/packages/downloader/Cargo.toml b/packages/downloader/Cargo.toml index a11d0dc..5fb8017 100644 --- a/packages/downloader/Cargo.toml +++ b/packages/downloader/Cargo.toml @@ -18,7 +18,6 @@ testcontainers = { workspace = true, optional = true } testcontainers-modules = { workspace = true, optional = true } testcontainers-ext = { workspace = true, optional = true } tokio = { workspace = true } -serde_json = { workspace = true } async-trait = { workspace = true } tracing = { workspace = true } snafu = { workspace = true } @@ -42,7 +41,6 @@ librqbit = { version = "8", features = ["async-bt", "watch"] } util = { workspace = true } testing-torrents = { workspace = true, optional = true } fetch = { workspace = true } -dashmap = "6.1.0" [dev-dependencies] diff --git a/rust-toolchain.toml b/rust-toolchain.toml index b8901de..f644ff5 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2025-02-20" +channel = "nightly-2025-05-14" components = ["rustfmt", "clippy"] profile = "default"