feature: add new mikan scrapers
This commit is contained in:
parent
dbded94324
commit
3fe0538468
@ -2,9 +2,4 @@
|
|||||||
recorder-playground = "run -p recorder --example playground -- --environment development"
|
recorder-playground = "run -p recorder --example playground -- --environment development"
|
||||||
|
|
||||||
[build]
|
[build]
|
||||||
rustflags = [
|
rustflags = ["-Zthreads=8", "-Zshare-generics=y"]
|
||||||
"-Zthreads=8",
|
|
||||||
"--cfg",
|
|
||||||
"feature=\"testcontainers\"",
|
|
||||||
"-Zshare-generics=y",
|
|
||||||
]
|
|
||||||
|
23
Cargo.lock
generated
23
Cargo.lock
generated
@ -1709,7 +1709,6 @@ dependencies = [
|
|||||||
"async-trait",
|
"async-trait",
|
||||||
"bytes",
|
"bytes",
|
||||||
"chrono",
|
"chrono",
|
||||||
"dashmap 6.1.0",
|
|
||||||
"fetch",
|
"fetch",
|
||||||
"futures",
|
"futures",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
@ -1721,7 +1720,6 @@ dependencies = [
|
|||||||
"reqwest",
|
"reqwest",
|
||||||
"serde",
|
"serde",
|
||||||
"serde-value",
|
"serde-value",
|
||||||
"serde_json",
|
|
||||||
"snafu",
|
"snafu",
|
||||||
"testcontainers",
|
"testcontainers",
|
||||||
"testcontainers-ext",
|
"testcontainers-ext",
|
||||||
@ -5166,7 +5164,6 @@ dependencies = [
|
|||||||
"sea-orm",
|
"sea-orm",
|
||||||
"sea-orm-migration",
|
"sea-orm-migration",
|
||||||
"seaography",
|
"seaography",
|
||||||
"secrecy",
|
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_variant",
|
"serde_variant",
|
||||||
@ -5174,7 +5171,6 @@ dependencies = [
|
|||||||
"serde_yaml",
|
"serde_yaml",
|
||||||
"serial_test",
|
"serial_test",
|
||||||
"snafu",
|
"snafu",
|
||||||
"string-interner",
|
|
||||||
"tera",
|
"tera",
|
||||||
"testcontainers",
|
"testcontainers",
|
||||||
"testcontainers-ext",
|
"testcontainers-ext",
|
||||||
@ -5934,15 +5930,6 @@ dependencies = [
|
|||||||
"zeroize",
|
"zeroize",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "secrecy"
|
|
||||||
version = "0.10.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a"
|
|
||||||
dependencies = [
|
|
||||||
"zeroize",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "security-framework"
|
name = "security-framework"
|
||||||
version = "2.11.1"
|
version = "2.11.1"
|
||||||
@ -6651,16 +6638,6 @@ version = "1.1.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d7beae5182595e9a8b683fa98c4317f956c9a2dec3b9716990d20023cc60c766"
|
checksum = "d7beae5182595e9a8b683fa98c4317f956c9a2dec3b9716990d20023cc60c766"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "string-interner"
|
|
||||||
version = "0.19.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "23de088478b31c349c9ba67816fa55d9355232d63c3afea8bf513e31f0f1d2c0"
|
|
||||||
dependencies = [
|
|
||||||
"hashbrown 0.15.2",
|
|
||||||
"serde",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "string_cache"
|
name = "string_cache"
|
||||||
version = "0.8.9"
|
version = "0.8.9"
|
||||||
|
3
apps/recorder/.gitignore
vendored
3
apps/recorder/.gitignore
vendored
@ -25,4 +25,5 @@ Cargo.lock
|
|||||||
# Dist
|
# Dist
|
||||||
node_modules
|
node_modules
|
||||||
dist/
|
dist/
|
||||||
temp/
|
temp/*
|
||||||
|
!temp/.gitkeep
|
||||||
|
@ -19,6 +19,8 @@ testcontainers = [
|
|||||||
"dep:testcontainers",
|
"dep:testcontainers",
|
||||||
"dep:testcontainers-modules",
|
"dep:testcontainers-modules",
|
||||||
"dep:testcontainers-ext",
|
"dep:testcontainers-ext",
|
||||||
|
"downloader/testcontainers",
|
||||||
|
"testcontainers-modules/postgres",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
@ -108,12 +110,11 @@ apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
|
|||||||
apalis-sql = { version = "0.7", features = ["postgres"] }
|
apalis-sql = { version = "0.7", features = ["postgres"] }
|
||||||
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
|
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
|
||||||
rand = "0.9.1"
|
rand = "0.9.1"
|
||||||
|
reqwest_cookie_store = "0.8.0"
|
||||||
|
|
||||||
downloader = { workspace = true }
|
downloader = { workspace = true }
|
||||||
util = { workspace = true }
|
util = { workspace = true }
|
||||||
fetch = { workspace = true }
|
fetch = { workspace = true }
|
||||||
string-interner = "0.19.0"
|
|
||||||
secrecy = "0.10.3"
|
|
||||||
reqwest_cookie_store = "0.8.0"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
serial_test = "3"
|
serial_test = "3"
|
||||||
|
@ -142,7 +142,7 @@ impl AppConfig {
|
|||||||
.flat_map(|ps| {
|
.flat_map(|ps| {
|
||||||
allowed_extensions
|
allowed_extensions
|
||||||
.iter()
|
.iter()
|
||||||
.map(move |ext| (format!("{}{}{}", convention_prefix, ps, ext), ext))
|
.map(move |ext| (format!("{convention_prefix}{ps}{ext}"), ext))
|
||||||
})
|
})
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
|
|
||||||
|
@ -4,9 +4,16 @@ use tokio::sync::OnceCell;
|
|||||||
|
|
||||||
use super::{Environment, config::AppConfig};
|
use super::{Environment, config::AppConfig};
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
|
auth::AuthService,
|
||||||
errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
|
cache::CacheService,
|
||||||
logger::LoggerService, storage::StorageService, tasks::TaskService,
|
crypto::CryptoService,
|
||||||
|
database::DatabaseService,
|
||||||
|
errors::RecorderResult,
|
||||||
|
extract::mikan::MikanClient,
|
||||||
|
graphql::GraphQLService,
|
||||||
|
logger::LoggerService,
|
||||||
|
storage::{StorageService, StorageServiceTrait},
|
||||||
|
tasks::TaskService,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait AppContextTrait: Send + Sync + Debug {
|
pub trait AppContextTrait: Send + Sync + Debug {
|
||||||
@ -17,7 +24,7 @@ pub trait AppContextTrait: Send + Sync + Debug {
|
|||||||
fn mikan(&self) -> &MikanClient;
|
fn mikan(&self) -> &MikanClient;
|
||||||
fn auth(&self) -> &AuthService;
|
fn auth(&self) -> &AuthService;
|
||||||
fn graphql(&self) -> &GraphQLService;
|
fn graphql(&self) -> &GraphQLService;
|
||||||
fn storage(&self) -> &StorageService;
|
fn storage(&self) -> &dyn StorageServiceTrait;
|
||||||
fn working_dir(&self) -> &String;
|
fn working_dir(&self) -> &String;
|
||||||
fn environment(&self) -> &Environment;
|
fn environment(&self) -> &Environment;
|
||||||
fn crypto(&self) -> &CryptoService;
|
fn crypto(&self) -> &CryptoService;
|
||||||
@ -109,7 +116,7 @@ impl AppContextTrait for AppContext {
|
|||||||
fn graphql(&self) -> &GraphQLService {
|
fn graphql(&self) -> &GraphQLService {
|
||||||
&self.graphql
|
&self.graphql
|
||||||
}
|
}
|
||||||
fn storage(&self) -> &StorageService {
|
fn storage(&self) -> &dyn StorageServiceTrait {
|
||||||
&self.storage
|
&self.storage
|
||||||
}
|
}
|
||||||
fn working_dir(&self) -> &String {
|
fn working_dir(&self) -> &String {
|
||||||
|
@ -71,18 +71,16 @@ impl AuthServiceTrait for BasicAuthService {
|
|||||||
user: found_user,
|
user: found_user,
|
||||||
password: found_password,
|
password: found_password,
|
||||||
}) = AuthBasic::decode_request_parts(request)
|
}) = AuthBasic::decode_request_parts(request)
|
||||||
|
&& self.config.user == found_user
|
||||||
|
&& self.config.password == found_password.unwrap_or_default()
|
||||||
{
|
{
|
||||||
if self.config.user == found_user
|
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER)
|
||||||
&& self.config.password == found_password.unwrap_or_default()
|
.await
|
||||||
{
|
.map_err(|_| AuthError::FindAuthRecordError)?;
|
||||||
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER)
|
return Ok(AuthUserInfo {
|
||||||
.await
|
subscriber_auth,
|
||||||
.map_err(|_| AuthError::FindAuthRecordError)?;
|
auth_type: AuthType::Basic,
|
||||||
return Ok(AuthUserInfo {
|
});
|
||||||
subscriber_auth,
|
|
||||||
auth_type: AuthType::Basic,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Err(AuthError::BasicInvalidCredentials)
|
Err(AuthError::BasicInvalidCredentials)
|
||||||
}
|
}
|
||||||
|
@ -297,10 +297,10 @@ impl OidcAuthService {
|
|||||||
id_token.signing_key(id_token_verifier)?,
|
id_token.signing_key(id_token_verifier)?,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
if let Some(expected_access_token_hash) = claims.access_token_hash() {
|
if let Some(expected_access_token_hash) = claims.access_token_hash()
|
||||||
if actual_access_token_hash != *expected_access_token_hash {
|
&& actual_access_token_hash != *expected_access_token_hash
|
||||||
return Err(AuthError::OidcInvalidAccessTokenError);
|
{
|
||||||
}
|
return Err(AuthError::OidcInvalidAccessTokenError);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(OidcAuthCallbackPayload {
|
Ok(OidcAuthCallbackPayload {
|
||||||
@ -350,14 +350,14 @@ impl AuthServiceTrait for OidcAuthService {
|
|||||||
if !claims.has_claim(key) {
|
if !claims.has_claim(key) {
|
||||||
return Err(AuthError::OidcExtraClaimMissingError { claim: key.clone() });
|
return Err(AuthError::OidcExtraClaimMissingError { claim: key.clone() });
|
||||||
}
|
}
|
||||||
if let Some(value) = config.extra_claim_value.as_ref() {
|
if let Some(value) = config.extra_claim_value.as_ref()
|
||||||
if claims.get_claim(key).is_none_or(|v| &v != value) {
|
&& claims.get_claim(key).is_none_or(|v| &v != value)
|
||||||
return Err(AuthError::OidcExtraClaimMatchError {
|
{
|
||||||
expected: value.clone(),
|
return Err(AuthError::OidcExtraClaimMatchError {
|
||||||
found: claims.get_claim(key).unwrap_or_default().to_string(),
|
expected: value.clone(),
|
||||||
key: key.clone(),
|
found: claims.get_claim(key).unwrap_or_default().to_string(),
|
||||||
});
|
key: key.clone(),
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
||||||
|
@ -9,8 +9,15 @@ use sea_orm_migration::MigratorTrait;
|
|||||||
use super::DatabaseConfig;
|
use super::DatabaseConfig;
|
||||||
use crate::{errors::RecorderResult, migrations::Migrator};
|
use crate::{errors::RecorderResult, migrations::Migrator};
|
||||||
|
|
||||||
|
pub trait DatabaseServiceConnectionTrait {
|
||||||
|
fn get_database_connection(&self) -> &DatabaseConnection;
|
||||||
|
}
|
||||||
|
|
||||||
pub struct DatabaseService {
|
pub struct DatabaseService {
|
||||||
connection: DatabaseConnection,
|
connection: DatabaseConnection,
|
||||||
|
#[cfg(all(test, feature = "testcontainers"))]
|
||||||
|
pub container:
|
||||||
|
Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DatabaseService {
|
impl DatabaseService {
|
||||||
@ -48,7 +55,11 @@ impl DatabaseService {
|
|||||||
Migrator::up(&db, None).await?;
|
Migrator::up(&db, None).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self { connection: db })
|
Ok(Self {
|
||||||
|
connection: db,
|
||||||
|
#[cfg(all(test, feature = "testcontainers"))]
|
||||||
|
container: None,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ pub fn parse_episode_media_meta_from_torrent(
|
|||||||
let media_name = torrent_path
|
let media_name = torrent_path
|
||||||
.file_name()
|
.file_name()
|
||||||
.with_whatever_context::<_, _, RecorderError>(|| {
|
.with_whatever_context::<_, _, RecorderError>(|| {
|
||||||
format!("failed to get file name of {}", torrent_path)
|
format!("failed to get file name of {torrent_path}")
|
||||||
})?;
|
})?;
|
||||||
let mut match_obj = None;
|
let mut match_obj = None;
|
||||||
for rule in TORRENT_EP_PARSE_RULES.iter() {
|
for rule in TORRENT_EP_PARSE_RULES.iter() {
|
||||||
@ -141,7 +141,7 @@ pub fn parse_episode_media_meta_from_torrent(
|
|||||||
.unwrap_or(1);
|
.unwrap_or(1);
|
||||||
let extname = torrent_path
|
let extname = torrent_path
|
||||||
.extension()
|
.extension()
|
||||||
.map(|e| format!(".{}", e))
|
.map(|e| format!(".{e}"))
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
Ok(TorrentEpisodeMediaMeta {
|
Ok(TorrentEpisodeMediaMeta {
|
||||||
fansub: fansub.map(|s| s.to_string()),
|
fansub: fansub.map(|s| s.to_string()),
|
||||||
@ -168,7 +168,7 @@ pub fn parse_episode_subtitle_meta_from_torrent(
|
|||||||
let media_name = torrent_path
|
let media_name = torrent_path
|
||||||
.file_name()
|
.file_name()
|
||||||
.with_whatever_context::<_, _, RecorderError>(|| {
|
.with_whatever_context::<_, _, RecorderError>(|| {
|
||||||
format!("failed to get file name of {}", torrent_path)
|
format!("failed to get file name of {torrent_path}")
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let lang = get_subtitle_lang(media_name);
|
let lang = get_subtitle_lang(media_name);
|
||||||
@ -271,7 +271,7 @@ mod tests {
|
|||||||
pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) {
|
pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) {
|
||||||
let extname = Path::new(raw_name)
|
let extname = Path::new(raw_name)
|
||||||
.extension()
|
.extension()
|
||||||
.map(|e| format!(".{}", e))
|
.map(|e| format!(".{e}"))
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.to_lowercase();
|
.to_lowercase();
|
||||||
|
|
||||||
|
@ -19,21 +19,19 @@ pub fn extract_background_image_src_from_style_attr(
|
|||||||
match prop {
|
match prop {
|
||||||
Property::BackgroundImage(images) => {
|
Property::BackgroundImage(images) => {
|
||||||
for img in images {
|
for img in images {
|
||||||
if let CSSImage::Url(path) = img {
|
if let CSSImage::Url(path) = img
|
||||||
if let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
|
&& let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
|
||||||
{
|
{
|
||||||
return Some(url);
|
return Some(url);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Property::Background(backgrounds) => {
|
Property::Background(backgrounds) => {
|
||||||
for bg in backgrounds {
|
for bg in backgrounds {
|
||||||
if let CSSImage::Url(path) = &bg.image {
|
if let CSSImage::Url(path) = &bg.image
|
||||||
if let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
|
&& let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
|
||||||
{
|
{
|
||||||
return Some(url);
|
return Some(url);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use axum::http::{header, request::Parts, HeaderName, HeaderValue, Uri};
|
use axum::http::{HeaderName, HeaderValue, Uri, header, request::Parts};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
@ -121,11 +121,7 @@ impl ForwardedRelatedInfo {
|
|||||||
.and_then(|s| s.to_str().ok())
|
.and_then(|s| s.to_str().ok())
|
||||||
.and_then(|s| {
|
.and_then(|s| {
|
||||||
let l = s.split(",").map(|s| s.trim().to_string()).collect_vec();
|
let l = s.split(",").map(|s| s.trim().to_string()).collect_vec();
|
||||||
if l.is_empty() {
|
if l.is_empty() { None } else { Some(l) }
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(l)
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let host = headers
|
let host = headers
|
||||||
@ -165,7 +161,7 @@ impl ForwardedRelatedInfo {
|
|||||||
|
|
||||||
pub fn resolved_origin(&self) -> Option<Url> {
|
pub fn resolved_origin(&self) -> Option<Url> {
|
||||||
if let (Some(protocol), Some(host)) = (self.resolved_protocol(), self.resolved_host()) {
|
if let (Some(protocol), Some(host)) = (self.resolved_protocol(), self.resolved_host()) {
|
||||||
let origin = format!("{}://{}", protocol, host);
|
let origin = format!("{protocol}://{host}");
|
||||||
Url::parse(&origin).ok()
|
Url::parse(&origin).ok()
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -3,7 +3,7 @@ use url::Url;
|
|||||||
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
|
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
|
||||||
let mut image_url = base_url.join(image_src).ok()?;
|
let mut image_url = base_url.join(image_src).ok()?;
|
||||||
if let Some((_, value)) = image_url.query_pairs().find(|(key, _)| key == "webp") {
|
if let Some((_, value)) = image_url.query_pairs().find(|(key, _)| key == "webp") {
|
||||||
image_url.set_query(Some(&format!("webp={}", value)));
|
image_url.set_query(Some(&format!("webp={value}")));
|
||||||
} else {
|
} else {
|
||||||
image_url.set_query(None);
|
image_url.set_query(None);
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,6 @@ use std::{fmt::Debug, ops::Deref, sync::Arc};
|
|||||||
use fetch::{HttpClient, HttpClientTrait};
|
use fetch::{HttpClient, HttpClientTrait};
|
||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
use sea_orm::DbErr;
|
use sea_orm::DbErr;
|
||||||
use secrecy::SecretBox;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use util::OptDynErr;
|
use util::OptDynErr;
|
||||||
@ -23,8 +22,6 @@ pub struct MikanCredentialForm {
|
|||||||
pub user_agent: String,
|
pub user_agent: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type MikanAuthSecrecy = SecretBox<MikanCredentialForm>;
|
|
||||||
|
|
||||||
impl Debug for MikanCredentialForm {
|
impl Debug for MikanCredentialForm {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
f.debug_struct("MikanCredentialForm")
|
f.debug_struct("MikanCredentialForm")
|
||||||
@ -72,7 +69,7 @@ impl MikanClient {
|
|||||||
Ok(false)
|
Ok(false)
|
||||||
} else {
|
} else {
|
||||||
Err(RecorderError::Credential3rdError {
|
Err(RecorderError::Credential3rdError {
|
||||||
message: format!("mikan account check has login failed, status = {}", status),
|
message: format!("mikan account check has login failed, status = {status}"),
|
||||||
source: None.into(),
|
source: None.into(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -189,7 +186,7 @@ impl MikanClient {
|
|||||||
userpass_credential_opt = Some(userpass_credential);
|
userpass_credential_opt = Some(userpass_credential);
|
||||||
} else {
|
} else {
|
||||||
return Err(RecorderError::from_db_record_not_found(
|
return Err(RecorderError::from_db_record_not_found(
|
||||||
DbErr::RecordNotFound(format!("credential={} not found", credential_id)),
|
DbErr::RecordNotFound(format!("credential={credential_id} not found")),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
pub const MIKAN_BUCKET_KEY: &str = "mikan";
|
pub const MIKAN_POSTER_BUCKET_KEY: &str = "mikan_poster";
|
||||||
pub const MIKAN_UNKNOWN_FANSUB_NAME: &str = "生肉/不明字幕";
|
pub const MIKAN_UNKNOWN_FANSUB_NAME: &str = "生肉/不明字幕";
|
||||||
pub const MIKAN_UNKNOWN_FANSUB_ID: &str = "202";
|
pub const MIKAN_UNKNOWN_FANSUB_ID: &str = "202";
|
||||||
pub const MIKAN_LOGIN_PAGE_PATH: &str = "/Account/Login";
|
pub const MIKAN_LOGIN_PAGE_PATH: &str = "/Account/Login";
|
||||||
|
@ -1,23 +1,31 @@
|
|||||||
pub mod client;
|
mod client;
|
||||||
pub mod config;
|
mod config;
|
||||||
pub mod constants;
|
mod constants;
|
||||||
pub mod rss_extract;
|
mod rss;
|
||||||
pub mod web_extract;
|
mod web;
|
||||||
|
|
||||||
pub use client::{MikanClient, MikanCredentialForm};
|
pub use client::{MikanClient, MikanCredentialForm};
|
||||||
pub use config::MikanConfig;
|
pub use config::MikanConfig;
|
||||||
pub use constants::MIKAN_BUCKET_KEY;
|
pub use constants::{
|
||||||
pub use rss_extract::{
|
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_LOGIN_PAGE_PATH, MIKAN_LOGIN_PAGE_SEARCH,
|
||||||
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanBangumiRssUrlMeta,
|
MIKAN_POSTER_BUCKET_KEY, MIKAN_UNKNOWN_FANSUB_ID, MIKAN_UNKNOWN_FANSUB_NAME,
|
||||||
MikanRssChannel, MikanRssItem, MikanSubscriberAggregationRssChannel,
|
|
||||||
MikanSubscriberAggregationRssUrlMeta, build_mikan_bangumi_rss_url,
|
|
||||||
build_mikan_subscriber_aggregation_rss_url, extract_mikan_bangumi_id_from_rss_url,
|
|
||||||
extract_mikan_rss_channel_from_rss_link, extract_mikan_subscriber_aggregation_id_from_rss_link,
|
|
||||||
};
|
};
|
||||||
pub use web_extract::{
|
pub use rss::{
|
||||||
MikanBangumiMeta, MikanEpisodeMeta, MikanSeasonStr, build_mikan_bangumi_homepage_url,
|
MikanBangumiIndexRssChannel, MikanBangumiRssChannel, MikanBangumiRssUrlMeta, MikanRssChannel,
|
||||||
build_mikan_episode_homepage_url, build_mikan_season_flow_url,
|
MikanRssItem, MikanSubscriberAggregationRssUrlMeta, MikanSubscriberStreamRssChannel,
|
||||||
extract_mikan_bangumi_indices_meta_from_season_flow_fragment,
|
build_mikan_bangumi_rss_url, build_mikan_subscriber_aggregation_rss_url,
|
||||||
extract_mikan_bangumi_meta_from_bangumi_homepage,
|
extract_mikan_bangumi_id_from_rss_url, extract_mikan_rss_channel_from_rss_link,
|
||||||
extract_mikan_episode_meta_from_episode_homepage,
|
extract_mikan_subscriber_aggregation_id_from_rss_link,
|
||||||
|
};
|
||||||
|
pub use web::{
|
||||||
|
MikanBangumiHomepageUrlMeta, MikanBangumiIndexHomepageUrlMeta, MikanBangumiIndexMeta,
|
||||||
|
MikanBangumiMeta, MikanBangumiPosterMeta, MikanEpisodeHomepageUrlMeta, MikanEpisodeMeta,
|
||||||
|
MikanSeasonFlowUrlMeta, MikanSeasonStr, build_mikan_bangumi_expand_subscribed_url,
|
||||||
|
build_mikan_bangumi_homepage_url, build_mikan_episode_homepage_url,
|
||||||
|
build_mikan_season_flow_url, extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||||
|
scrape_mikan_bangumi_meta_from_bangumi_homepage_url,
|
||||||
|
scrape_mikan_bangumi_meta_list_from_season_flow_url,
|
||||||
|
scrape_mikan_episode_meta_from_episode_homepage_url, scrape_mikan_poster_data_from_image_url,
|
||||||
|
scrape_mikan_poster_meta_from_image_url,
|
||||||
};
|
};
|
||||||
|
@ -10,10 +10,7 @@ use url::Url;
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::app_error::{RecorderError, RecorderResult},
|
errors::app_error::{RecorderError, RecorderResult},
|
||||||
extract::mikan::{
|
extract::mikan::{MikanClient, MikanEpisodeHomepageUrlMeta},
|
||||||
MikanClient,
|
|
||||||
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage_url},
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
@ -37,7 +34,7 @@ pub struct MikanBangumiRssChannel {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
pub struct MikanBangumiAggregationRssChannel {
|
pub struct MikanBangumiIndexRssChannel {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub url: Url,
|
pub url: Url,
|
||||||
pub mikan_bangumi_id: String,
|
pub mikan_bangumi_id: String,
|
||||||
@ -45,7 +42,7 @@ pub struct MikanBangumiAggregationRssChannel {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
pub struct MikanSubscriberAggregationRssChannel {
|
pub struct MikanSubscriberStreamRssChannel {
|
||||||
pub mikan_aggregation_id: String,
|
pub mikan_aggregation_id: String,
|
||||||
pub url: Url,
|
pub url: Url,
|
||||||
pub items: Vec<MikanRssItem>,
|
pub items: Vec<MikanRssItem>,
|
||||||
@ -54,46 +51,40 @@ pub struct MikanSubscriberAggregationRssChannel {
|
|||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
pub enum MikanRssChannel {
|
pub enum MikanRssChannel {
|
||||||
Bangumi(MikanBangumiRssChannel),
|
Bangumi(MikanBangumiRssChannel),
|
||||||
BangumiAggregation(MikanBangumiAggregationRssChannel),
|
BangumiIndex(MikanBangumiIndexRssChannel),
|
||||||
SubscriberAggregation(MikanSubscriberAggregationRssChannel),
|
SubscriberStream(MikanSubscriberStreamRssChannel),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MikanRssChannel {
|
impl MikanRssChannel {
|
||||||
pub fn items(&self) -> &[MikanRssItem] {
|
pub fn items(&self) -> &[MikanRssItem] {
|
||||||
match &self {
|
match &self {
|
||||||
Self::Bangumi(MikanBangumiRssChannel { items, .. })
|
Self::Bangumi(MikanBangumiRssChannel { items, .. })
|
||||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { items, .. })
|
| Self::BangumiIndex(MikanBangumiIndexRssChannel { items, .. })
|
||||||
| Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { items, .. }) => {
|
| Self::SubscriberStream(MikanSubscriberStreamRssChannel { items, .. }) => items,
|
||||||
items
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_items(self) -> Vec<MikanRssItem> {
|
pub fn into_items(self) -> Vec<MikanRssItem> {
|
||||||
match self {
|
match self {
|
||||||
Self::Bangumi(MikanBangumiRssChannel { items, .. })
|
Self::Bangumi(MikanBangumiRssChannel { items, .. })
|
||||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { items, .. })
|
| Self::BangumiIndex(MikanBangumiIndexRssChannel { items, .. })
|
||||||
| Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { items, .. }) => {
|
| Self::SubscriberStream(MikanSubscriberStreamRssChannel { items, .. }) => items,
|
||||||
items
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn name(&self) -> Option<&str> {
|
pub fn name(&self) -> Option<&str> {
|
||||||
match &self {
|
match &self {
|
||||||
Self::Bangumi(MikanBangumiRssChannel { name, .. })
|
Self::Bangumi(MikanBangumiRssChannel { name, .. })
|
||||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { name, .. }) => {
|
| Self::BangumiIndex(MikanBangumiIndexRssChannel { name, .. }) => Some(name.as_str()),
|
||||||
Some(name.as_str())
|
Self::SubscriberStream(MikanSubscriberStreamRssChannel { .. }) => None,
|
||||||
}
|
|
||||||
Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { .. }) => None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn url(&self) -> &Url {
|
pub fn url(&self) -> &Url {
|
||||||
match &self {
|
match &self {
|
||||||
Self::Bangumi(MikanBangumiRssChannel { url, .. })
|
Self::Bangumi(MikanBangumiRssChannel { url, .. })
|
||||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { url, .. })
|
| Self::BangumiIndex(MikanBangumiIndexRssChannel { url, .. })
|
||||||
| Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { url, .. }) => url,
|
| Self::SubscriberStream(MikanSubscriberStreamRssChannel { url, .. }) => url,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -133,9 +124,9 @@ impl TryFrom<rss::Item> for MikanRssItem {
|
|||||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link"))
|
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link"))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let MikanEpisodeHomepage {
|
let MikanEpisodeHomepageUrlMeta {
|
||||||
mikan_episode_id, ..
|
mikan_episode_id, ..
|
||||||
} = extract_mikan_episode_id_from_homepage_url(&homepage).ok_or_else(|| {
|
} = MikanEpisodeHomepageUrlMeta::parse_url(&homepage).ok_or_else(|| {
|
||||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
|
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
@ -278,17 +269,15 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
|
|||||||
channel_name,
|
channel_name,
|
||||||
channel_link = channel_link.as_str(),
|
channel_link = channel_link.as_str(),
|
||||||
mikan_bangumi_id,
|
mikan_bangumi_id,
|
||||||
"MikanBangumiAggregationRssChannel extracted"
|
"MikanBangumiIndexRssChannel extracted"
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(MikanRssChannel::BangumiAggregation(
|
Ok(MikanRssChannel::BangumiIndex(MikanBangumiIndexRssChannel {
|
||||||
MikanBangumiAggregationRssChannel {
|
name: channel_name,
|
||||||
name: channel_name,
|
mikan_bangumi_id,
|
||||||
mikan_bangumi_id,
|
url: channel_link,
|
||||||
url: channel_link,
|
items,
|
||||||
items,
|
}))
|
||||||
},
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
} else if let Some(MikanSubscriberAggregationRssUrlMeta {
|
} else if let Some(MikanSubscriberAggregationRssUrlMeta {
|
||||||
mikan_aggregation_id,
|
mikan_aggregation_id,
|
||||||
@ -317,8 +306,8 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
|
|||||||
"MikanSubscriberAggregationRssChannel extracted"
|
"MikanSubscriberAggregationRssChannel extracted"
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(MikanRssChannel::SubscriberAggregation(
|
Ok(MikanRssChannel::SubscriberStream(
|
||||||
MikanSubscriberAggregationRssChannel {
|
MikanSubscriberStreamRssChannel {
|
||||||
mikan_aggregation_id,
|
mikan_aggregation_id,
|
||||||
items,
|
items,
|
||||||
url: channel_link,
|
url: channel_link,
|
||||||
@ -342,7 +331,7 @@ mod tests {
|
|||||||
use crate::{
|
use crate::{
|
||||||
errors::RecorderResult,
|
errors::RecorderResult,
|
||||||
extract::mikan::{
|
extract::mikan::{
|
||||||
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
|
MikanBangumiIndexRssChannel, MikanBangumiRssChannel, MikanRssChannel,
|
||||||
extract_mikan_rss_channel_from_rss_link,
|
extract_mikan_rss_channel_from_rss_link,
|
||||||
},
|
},
|
||||||
test_utils::mikan::build_testing_mikan_client,
|
test_utils::mikan::build_testing_mikan_client,
|
||||||
@ -413,7 +402,7 @@ mod tests {
|
|||||||
|
|
||||||
assert_matches!(
|
assert_matches!(
|
||||||
&channel,
|
&channel,
|
||||||
MikanRssChannel::BangumiAggregation(MikanBangumiAggregationRssChannel { .. })
|
MikanRssChannel::BangumiIndex(MikanBangumiIndexRssChannel { .. })
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_matches!(&channel.name(), Some("叹气的亡灵想隐退"));
|
assert_matches!(&channel.name(), Some("叹气的亡灵想隐退"));
|
File diff suppressed because it is too large
Load Diff
@ -101,19 +101,19 @@ fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RecorderRes
|
|||||||
raw = sub.replace_all(&raw, "").to_string();
|
raw = sub.replace_all(&raw, "").to_string();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw) {
|
if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw)
|
||||||
if m.len() as f32 > (raw.len() as f32) * 0.5 {
|
&& m.len() as f32 > (raw.len() as f32) * 0.5
|
||||||
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1
|
{
|
||||||
.replace(&raw, "")
|
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1
|
||||||
.chars()
|
.replace(&raw, "")
|
||||||
.collect_vec();
|
.chars()
|
||||||
while let Some(ch) = raw1.pop() {
|
.collect_vec();
|
||||||
if ch == ']' {
|
while let Some(ch) = raw1.pop() {
|
||||||
break;
|
if ch == ']' {
|
||||||
}
|
break;
|
||||||
}
|
}
|
||||||
raw = raw1.into_iter().collect();
|
|
||||||
}
|
}
|
||||||
|
raw = raw1.into_iter().collect();
|
||||||
}
|
}
|
||||||
Ok(raw.to_string())
|
Ok(raw.to_string())
|
||||||
}
|
}
|
||||||
@ -136,23 +136,21 @@ pub fn extract_season_from_title_body(title_body: &str) -> (String, Option<Strin
|
|||||||
|
|
||||||
for s in seasons {
|
for s in seasons {
|
||||||
season_raw = Some(s);
|
season_raw = Some(s);
|
||||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s) {
|
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s)
|
||||||
if let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
|
&& let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
|
||||||
.replace_all(m.as_str(), "")
|
.replace_all(m.as_str(), "")
|
||||||
.parse::<i32>()
|
.parse::<i32>()
|
||||||
{
|
{
|
||||||
season = s;
|
season = s;
|
||||||
break;
|
break;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s) {
|
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s)
|
||||||
if let Some(s) = DIGIT_1PLUS_REG
|
&& let Some(s) = DIGIT_1PLUS_REG
|
||||||
.find(m.as_str())
|
.find(m.as_str())
|
||||||
.and_then(|s| s.as_str().parse::<i32>().ok())
|
.and_then(|s| s.as_str().parse::<i32>().ok())
|
||||||
{
|
{
|
||||||
season = s;
|
season = s;
|
||||||
break;
|
break;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
|
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
|
||||||
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
|
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#![feature(
|
#![feature(
|
||||||
duration_constructors,
|
duration_constructors_lite,
|
||||||
assert_matches,
|
assert_matches,
|
||||||
unboxed_closures,
|
unboxed_closures,
|
||||||
impl_trait_in_bindings,
|
impl_trait_in_bindings,
|
||||||
|
@ -77,62 +77,62 @@ impl LoggerService {
|
|||||||
pub async fn from_config(config: LoggerConfig) -> RecorderResult<Self> {
|
pub async fn from_config(config: LoggerConfig) -> RecorderResult<Self> {
|
||||||
let mut layers: Vec<Box<dyn Layer<Registry> + Sync + Send>> = Vec::new();
|
let mut layers: Vec<Box<dyn Layer<Registry> + Sync + Send>> = Vec::new();
|
||||||
|
|
||||||
if let Some(file_appender_config) = config.file_appender.as_ref() {
|
if let Some(file_appender_config) = config.file_appender.as_ref()
|
||||||
if file_appender_config.enable {
|
&& file_appender_config.enable
|
||||||
let dir = file_appender_config
|
{
|
||||||
.dir
|
let dir = file_appender_config
|
||||||
.as_ref()
|
.dir
|
||||||
.map_or_else(|| "./logs".to_string(), ToString::to_string);
|
.as_ref()
|
||||||
|
.map_or_else(|| "./logs".to_string(), ToString::to_string);
|
||||||
|
|
||||||
let mut rolling_builder = tracing_appender::rolling::Builder::default()
|
let mut rolling_builder = tracing_appender::rolling::Builder::default()
|
||||||
.max_log_files(file_appender_config.max_log_files);
|
.max_log_files(file_appender_config.max_log_files);
|
||||||
|
|
||||||
rolling_builder = match file_appender_config.rotation {
|
rolling_builder = match file_appender_config.rotation {
|
||||||
LogRotation::Minutely => {
|
LogRotation::Minutely => {
|
||||||
rolling_builder.rotation(tracing_appender::rolling::Rotation::MINUTELY)
|
rolling_builder.rotation(tracing_appender::rolling::Rotation::MINUTELY)
|
||||||
}
|
}
|
||||||
LogRotation::Hourly => {
|
LogRotation::Hourly => {
|
||||||
rolling_builder.rotation(tracing_appender::rolling::Rotation::HOURLY)
|
rolling_builder.rotation(tracing_appender::rolling::Rotation::HOURLY)
|
||||||
}
|
}
|
||||||
LogRotation::Daily => {
|
LogRotation::Daily => {
|
||||||
rolling_builder.rotation(tracing_appender::rolling::Rotation::DAILY)
|
rolling_builder.rotation(tracing_appender::rolling::Rotation::DAILY)
|
||||||
}
|
}
|
||||||
LogRotation::Never => {
|
LogRotation::Never => {
|
||||||
rolling_builder.rotation(tracing_appender::rolling::Rotation::NEVER)
|
rolling_builder.rotation(tracing_appender::rolling::Rotation::NEVER)
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let file_appender = rolling_builder
|
||||||
|
.filename_prefix(
|
||||||
|
file_appender_config
|
||||||
|
.filename_prefix
|
||||||
|
.as_ref()
|
||||||
|
.map_or_else(String::new, ToString::to_string),
|
||||||
|
)
|
||||||
|
.filename_suffix(
|
||||||
|
file_appender_config
|
||||||
|
.filename_suffix
|
||||||
|
.as_ref()
|
||||||
|
.map_or_else(String::new, ToString::to_string),
|
||||||
|
)
|
||||||
|
.build(dir)?;
|
||||||
|
|
||||||
|
let file_appender_layer = if file_appender_config.non_blocking {
|
||||||
|
let (non_blocking_file_appender, work_guard) =
|
||||||
|
tracing_appender::non_blocking(file_appender);
|
||||||
|
if NONBLOCKING_WORK_GUARD_KEEP.set(work_guard).is_err() {
|
||||||
|
whatever!("cannot lock for appender");
|
||||||
};
|
};
|
||||||
|
Self::init_layer(
|
||||||
let file_appender = rolling_builder
|
non_blocking_file_appender,
|
||||||
.filename_prefix(
|
&file_appender_config.format,
|
||||||
file_appender_config
|
false,
|
||||||
.filename_prefix
|
)
|
||||||
.as_ref()
|
} else {
|
||||||
.map_or_else(String::new, ToString::to_string),
|
Self::init_layer(file_appender, &file_appender_config.format, false)
|
||||||
)
|
};
|
||||||
.filename_suffix(
|
layers.push(file_appender_layer);
|
||||||
file_appender_config
|
|
||||||
.filename_suffix
|
|
||||||
.as_ref()
|
|
||||||
.map_or_else(String::new, ToString::to_string),
|
|
||||||
)
|
|
||||||
.build(dir)?;
|
|
||||||
|
|
||||||
let file_appender_layer = if file_appender_config.non_blocking {
|
|
||||||
let (non_blocking_file_appender, work_guard) =
|
|
||||||
tracing_appender::non_blocking(file_appender);
|
|
||||||
if NONBLOCKING_WORK_GUARD_KEEP.set(work_guard).is_err() {
|
|
||||||
whatever!("cannot lock for appender");
|
|
||||||
};
|
|
||||||
Self::init_layer(
|
|
||||||
non_blocking_file_appender,
|
|
||||||
&file_appender_config.format,
|
|
||||||
false,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
Self::init_layer(file_appender, &file_appender_config.format, false)
|
|
||||||
};
|
|
||||||
layers.push(file_appender_layer);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.enable {
|
if config.enable {
|
||||||
|
@ -11,13 +11,11 @@ use crate::{
|
|||||||
errors::RecorderResult,
|
errors::RecorderResult,
|
||||||
extract::{
|
extract::{
|
||||||
mikan::{
|
mikan::{
|
||||||
build_mikan_bangumi_homepage_url, build_mikan_bangumi_rss_url,
|
MikanBangumiPosterMeta, build_mikan_bangumi_homepage_url, build_mikan_bangumi_rss_url,
|
||||||
extract_mikan_bangumi_meta_from_bangumi_homepage,
|
|
||||||
extract_mikan_episode_meta_from_episode_homepage,
|
|
||||||
extract_mikan_rss_channel_from_rss_link,
|
extract_mikan_rss_channel_from_rss_link,
|
||||||
web_extract::{
|
scrape_mikan_bangumi_meta_from_bangumi_homepage_url,
|
||||||
MikanBangumiPosterMeta, extract_mikan_bangumi_poster_meta_from_src_with_cache,
|
scrape_mikan_episode_meta_from_episode_homepage_url,
|
||||||
},
|
scrape_mikan_poster_meta_from_image_url,
|
||||||
},
|
},
|
||||||
rawname::extract_season_from_title_body,
|
rawname::extract_season_from_title_body,
|
||||||
},
|
},
|
||||||
@ -272,7 +270,7 @@ impl Model {
|
|||||||
let mut new_metas = vec![];
|
let mut new_metas = vec![];
|
||||||
for new_rss_item in new_rss_items.iter() {
|
for new_rss_item in new_rss_items.iter() {
|
||||||
new_metas.push(
|
new_metas.push(
|
||||||
extract_mikan_episode_meta_from_episode_homepage(
|
scrape_mikan_episode_meta_from_episode_homepage_url(
|
||||||
mikan_client,
|
mikan_client,
|
||||||
new_rss_item.homepage.clone(),
|
new_rss_item.homepage.clone(),
|
||||||
)
|
)
|
||||||
@ -305,7 +303,7 @@ impl Model {
|
|||||||
mikan_bangumi_id.to_string(),
|
mikan_bangumi_id.to_string(),
|
||||||
mikan_fansub_id.to_string(),
|
mikan_fansub_id.to_string(),
|
||||||
async |am| -> RecorderResult<()> {
|
async |am| -> RecorderResult<()> {
|
||||||
let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage(
|
let bgm_meta = scrape_mikan_bangumi_meta_from_bangumi_homepage_url(
|
||||||
mikan_client,
|
mikan_client,
|
||||||
bgm_homepage.clone(),
|
bgm_homepage.clone(),
|
||||||
)
|
)
|
||||||
@ -319,20 +317,20 @@ impl Model {
|
|||||||
am.season_raw = ActiveValue::Set(bgm_season_raw);
|
am.season_raw = ActiveValue::Set(bgm_season_raw);
|
||||||
am.rss_link = ActiveValue::Set(Some(bgm_rss_link.to_string()));
|
am.rss_link = ActiveValue::Set(Some(bgm_rss_link.to_string()));
|
||||||
am.homepage = ActiveValue::Set(Some(bgm_homepage.to_string()));
|
am.homepage = ActiveValue::Set(Some(bgm_homepage.to_string()));
|
||||||
am.fansub = ActiveValue::Set(bgm_meta.fansub);
|
am.fansub = ActiveValue::Set(Some(bgm_meta.fansub));
|
||||||
if let Some(origin_poster_src) = bgm_meta.origin_poster_src {
|
if let Some(origin_poster_src) = bgm_meta.origin_poster_src
|
||||||
if let MikanBangumiPosterMeta {
|
&& let MikanBangumiPosterMeta {
|
||||||
poster_src: Some(poster_src),
|
poster_src: Some(poster_src),
|
||||||
..
|
..
|
||||||
} = extract_mikan_bangumi_poster_meta_from_src_with_cache(
|
} = scrape_mikan_poster_meta_from_image_url(
|
||||||
ctx,
|
mikan_client,
|
||||||
|
ctx.storage(),
|
||||||
origin_poster_src,
|
origin_poster_src,
|
||||||
self.subscriber_id,
|
self.subscriber_id,
|
||||||
)
|
)
|
||||||
.await?
|
.await?
|
||||||
{
|
{
|
||||||
am.poster_link = ActiveValue::Set(Some(poster_src))
|
am.poster_link = ActiveValue::Set(Some(poster_src))
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
|
@ -1,14 +1,13 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use opendal::{Buffer, Operator, layers::LoggingLayer, services::Fs};
|
use opendal::{Buffer, Operator, layers::LoggingLayer};
|
||||||
use quirks_path::{Path, PathBuf};
|
use quirks_path::{Path, PathBuf};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
use super::StorageConfig;
|
use super::StorageConfig;
|
||||||
use crate::errors::app_error::{RecorderError, RecorderResult};
|
use crate::errors::app_error::RecorderResult;
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
@ -44,6 +43,88 @@ impl fmt::Display for StorageStoredUrl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
pub trait StorageServiceTrait: Sync {
|
||||||
|
fn get_operator(&self) -> RecorderResult<Operator>;
|
||||||
|
|
||||||
|
fn get_fullname(
|
||||||
|
&self,
|
||||||
|
content_category: StorageContentCategory,
|
||||||
|
subscriber_id: i32,
|
||||||
|
bucket: Option<&str>,
|
||||||
|
filename: &str,
|
||||||
|
) -> PathBuf {
|
||||||
|
[
|
||||||
|
&subscriber_id.to_string(),
|
||||||
|
content_category.as_ref(),
|
||||||
|
bucket.unwrap_or_default(),
|
||||||
|
filename,
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.map(Path::new)
|
||||||
|
.collect::<PathBuf>()
|
||||||
|
}
|
||||||
|
async fn store_object(
|
||||||
|
&self,
|
||||||
|
content_category: StorageContentCategory,
|
||||||
|
subscriber_id: i32,
|
||||||
|
bucket: Option<&str>,
|
||||||
|
filename: &str,
|
||||||
|
data: Bytes,
|
||||||
|
) -> RecorderResult<StorageStoredUrl> {
|
||||||
|
let fullname = self.get_fullname(content_category, subscriber_id, bucket, filename);
|
||||||
|
|
||||||
|
let operator = self.get_operator()?;
|
||||||
|
|
||||||
|
if let Some(dirname) = fullname.parent() {
|
||||||
|
let dirname = dirname.join("/");
|
||||||
|
operator.create_dir(dirname.as_str()).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
operator.write(fullname.as_str(), data).await?;
|
||||||
|
|
||||||
|
Ok(StorageStoredUrl::RelativePath {
|
||||||
|
path: fullname.to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn exists_object(
|
||||||
|
&self,
|
||||||
|
content_category: StorageContentCategory,
|
||||||
|
subscriber_id: i32,
|
||||||
|
bucket: Option<&str>,
|
||||||
|
filename: &str,
|
||||||
|
) -> RecorderResult<Option<StorageStoredUrl>> {
|
||||||
|
let fullname = self.get_fullname(content_category, subscriber_id, bucket, filename);
|
||||||
|
|
||||||
|
let operator = self.get_operator()?;
|
||||||
|
|
||||||
|
if operator.exists(fullname.as_str()).await? {
|
||||||
|
Ok(Some(StorageStoredUrl::RelativePath {
|
||||||
|
path: fullname.to_string(),
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn load_object(
|
||||||
|
&self,
|
||||||
|
content_category: StorageContentCategory,
|
||||||
|
subscriber_id: i32,
|
||||||
|
bucket: Option<&str>,
|
||||||
|
filename: &str,
|
||||||
|
) -> RecorderResult<Buffer> {
|
||||||
|
let fullname = self.get_fullname(content_category, subscriber_id, bucket, filename);
|
||||||
|
|
||||||
|
let operator = self.get_operator()?;
|
||||||
|
|
||||||
|
let data = operator.read(fullname.as_str()).await?;
|
||||||
|
|
||||||
|
Ok(data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct StorageService {
|
pub struct StorageService {
|
||||||
pub data_dir: String,
|
pub data_dir: String,
|
||||||
@ -55,114 +136,15 @@ impl StorageService {
|
|||||||
data_dir: config.data_dir.to_string(),
|
data_dir: config.data_dir.to_string(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_fs(&self) -> Fs {
|
#[async_trait::async_trait]
|
||||||
Fs::default().root(&self.data_dir)
|
impl StorageServiceTrait for StorageService {
|
||||||
}
|
fn get_operator(&self) -> RecorderResult<Operator> {
|
||||||
|
let fs_op = Operator::new(opendal::services::Fs::default().root(&self.data_dir))?
|
||||||
|
.layer(LoggingLayer::default())
|
||||||
|
.finish();
|
||||||
|
|
||||||
pub fn create_filename(extname: &str) -> String {
|
Ok(fs_op)
|
||||||
format!("{}{}", Uuid::new_v4(), extname)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn store_object(
|
|
||||||
&self,
|
|
||||||
content_category: StorageContentCategory,
|
|
||||||
subscriber_id: i32,
|
|
||||||
bucket: Option<&str>,
|
|
||||||
filename: &str,
|
|
||||||
data: Bytes,
|
|
||||||
) -> Result<StorageStoredUrl, RecorderError> {
|
|
||||||
match content_category {
|
|
||||||
StorageContentCategory::Image => {
|
|
||||||
let fullname = [
|
|
||||||
&subscriber_id.to_string(),
|
|
||||||
content_category.as_ref(),
|
|
||||||
bucket.unwrap_or_default(),
|
|
||||||
filename,
|
|
||||||
]
|
|
||||||
.into_iter()
|
|
||||||
.map(Path::new)
|
|
||||||
.collect::<PathBuf>();
|
|
||||||
|
|
||||||
let fs_op = Operator::new(self.get_fs())?
|
|
||||||
.layer(LoggingLayer::default())
|
|
||||||
.finish();
|
|
||||||
|
|
||||||
if let Some(dirname) = fullname.parent() {
|
|
||||||
let dirname = dirname.join("/");
|
|
||||||
fs_op.create_dir(dirname.as_str()).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
fs_op.write(fullname.as_str(), data).await?;
|
|
||||||
|
|
||||||
Ok(StorageStoredUrl::RelativePath {
|
|
||||||
path: fullname.to_string(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn exists_object(
|
|
||||||
&self,
|
|
||||||
content_category: StorageContentCategory,
|
|
||||||
subscriber_id: i32,
|
|
||||||
bucket: Option<&str>,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<Option<StorageStoredUrl>, RecorderError> {
|
|
||||||
match content_category {
|
|
||||||
StorageContentCategory::Image => {
|
|
||||||
let fullname = [
|
|
||||||
&subscriber_id.to_string(),
|
|
||||||
content_category.as_ref(),
|
|
||||||
bucket.unwrap_or_default(),
|
|
||||||
filename,
|
|
||||||
]
|
|
||||||
.into_iter()
|
|
||||||
.map(Path::new)
|
|
||||||
.collect::<PathBuf>();
|
|
||||||
|
|
||||||
let fs_op = Operator::new(self.get_fs())?
|
|
||||||
.layer(LoggingLayer::default())
|
|
||||||
.finish();
|
|
||||||
|
|
||||||
if fs_op.exists(fullname.as_str()).await? {
|
|
||||||
Ok(Some(StorageStoredUrl::RelativePath {
|
|
||||||
path: fullname.to_string(),
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn load_object(
|
|
||||||
&self,
|
|
||||||
content_category: StorageContentCategory,
|
|
||||||
subscriber_pid: &str,
|
|
||||||
bucket: Option<&str>,
|
|
||||||
filename: &str,
|
|
||||||
) -> RecorderResult<Buffer> {
|
|
||||||
match content_category {
|
|
||||||
StorageContentCategory::Image => {
|
|
||||||
let fullname = [
|
|
||||||
subscriber_pid,
|
|
||||||
content_category.as_ref(),
|
|
||||||
bucket.unwrap_or_default(),
|
|
||||||
filename,
|
|
||||||
]
|
|
||||||
.into_iter()
|
|
||||||
.map(Path::new)
|
|
||||||
.collect::<PathBuf>();
|
|
||||||
|
|
||||||
let fs_op = Operator::new(self.get_fs())?
|
|
||||||
.layer(LoggingLayer::default())
|
|
||||||
.finish();
|
|
||||||
|
|
||||||
let data = fs_op.read(fullname.as_str()).await?;
|
|
||||||
|
|
||||||
Ok(data)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
pub mod client;
|
mod client;
|
||||||
pub mod config;
|
mod config;
|
||||||
pub use client::{StorageContentCategory, StorageService};
|
pub use client::{StorageContentCategory, StorageService, StorageServiceTrait, StorageStoredUrl};
|
||||||
pub use config::StorageConfig;
|
pub use config::StorageConfig;
|
||||||
|
@ -2,20 +2,14 @@ use std::{ops::Deref, sync::Arc};
|
|||||||
|
|
||||||
use apalis::prelude::*;
|
use apalis::prelude::*;
|
||||||
use apalis_sql::postgres::PostgresStorage;
|
use apalis_sql::postgres::PostgresStorage;
|
||||||
use fetch::fetch_html;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use snafu::OptionExt;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContextTrait,
|
||||||
errors::{RecorderError, RecorderResult},
|
errors::RecorderResult,
|
||||||
extract::mikan::{
|
extract::mikan::{
|
||||||
MikanBangumiMeta, MikanSeasonStr, build_mikan_season_flow_url,
|
MikanBangumiMeta, MikanSeasonStr, build_mikan_season_flow_url,
|
||||||
extract_mikan_bangumi_indices_meta_from_season_flow_fragment,
|
scrape_mikan_bangumi_meta_list_from_season_flow_url,
|
||||||
web_extract::{
|
|
||||||
MikanBangumiIndexMeta, build_mikan_bangumi_expand_subscribed_fragment_url,
|
|
||||||
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -31,17 +25,6 @@ pub struct ExtractMikanSeasonSubscriptionTask {
|
|||||||
pub subscriber_id: i32,
|
pub subscriber_id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
|
||||||
pub struct ExtractMikanSeasonSubscriptionFansubsTask {
|
|
||||||
pub task_id: i32,
|
|
||||||
pub year: i32,
|
|
||||||
pub season_str: MikanSeasonStr,
|
|
||||||
pub credential_id: i32,
|
|
||||||
pub subscription_id: i32,
|
|
||||||
pub subscriber_id: i32,
|
|
||||||
pub bangumi_indices: Vec<MikanBangumiIndexMeta>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub struct ExtractMikanSeasonSubscriptionTaskResult {
|
pub struct ExtractMikanSeasonSubscriptionTaskResult {
|
||||||
pub task_id: i32,
|
pub task_id: i32,
|
||||||
@ -50,97 +33,31 @@ pub struct ExtractMikanSeasonSubscriptionTaskResult {
|
|||||||
pub credential_id: i32,
|
pub credential_id: i32,
|
||||||
pub subscription_id: i32,
|
pub subscription_id: i32,
|
||||||
pub subscriber_id: i32,
|
pub subscriber_id: i32,
|
||||||
pub bangumi_metas: Vec<MikanBangumiMeta>,
|
pub bangumi_meta_list: Vec<MikanBangumiMeta>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn extract_mikan_season_subscription(
|
pub async fn extract_mikan_season_subscription(
|
||||||
job: ExtractMikanSeasonSubscriptionTask,
|
job: ExtractMikanSeasonSubscriptionTask,
|
||||||
data: Data<Arc<dyn AppContextTrait>>,
|
data: Data<Arc<dyn AppContextTrait>>,
|
||||||
) -> RecorderResult<GoTo<ExtractMikanSeasonSubscriptionFansubsTask>> {
|
|
||||||
let ctx = data.deref();
|
|
||||||
|
|
||||||
let mikan_client = ctx
|
|
||||||
.mikan()
|
|
||||||
.fork_with_credential(ctx.clone(), Some(job.credential_id))
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mikan_base_url = mikan_client.base_url().clone();
|
|
||||||
|
|
||||||
let season_flow_fragment_url =
|
|
||||||
build_mikan_season_flow_url(mikan_base_url.clone(), job.year, job.season_str);
|
|
||||||
|
|
||||||
let season_flow_fragment = fetch_html(&mikan_client, season_flow_fragment_url.clone()).await?;
|
|
||||||
|
|
||||||
let mut bangumi_indices = extract_mikan_bangumi_indices_meta_from_season_flow_fragment(
|
|
||||||
&season_flow_fragment,
|
|
||||||
mikan_base_url.clone(),
|
|
||||||
);
|
|
||||||
|
|
||||||
if bangumi_indices.is_empty() && !mikan_client.has_login().await? {
|
|
||||||
mikan_client.login().await?;
|
|
||||||
let season_flow_fragment =
|
|
||||||
fetch_html(&mikan_client, season_flow_fragment_url.clone()).await?;
|
|
||||||
bangumi_indices = extract_mikan_bangumi_indices_meta_from_season_flow_fragment(
|
|
||||||
&season_flow_fragment,
|
|
||||||
mikan_base_url.clone(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(GoTo::Next(ExtractMikanSeasonSubscriptionFansubsTask {
|
|
||||||
task_id: job.task_id,
|
|
||||||
year: job.year,
|
|
||||||
season_str: job.season_str,
|
|
||||||
credential_id: job.credential_id,
|
|
||||||
subscription_id: job.subscription_id,
|
|
||||||
subscriber_id: job.subscriber_id,
|
|
||||||
bangumi_indices,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn extract_mikan_season_subscription_fansubs(
|
|
||||||
job: ExtractMikanSeasonSubscriptionFansubsTask,
|
|
||||||
data: Data<Arc<dyn AppContextTrait>>,
|
|
||||||
) -> RecorderResult<GoTo<ExtractMikanSeasonSubscriptionTaskResult>> {
|
) -> RecorderResult<GoTo<ExtractMikanSeasonSubscriptionTaskResult>> {
|
||||||
let ctx = data.deref();
|
let ctx = data.deref();
|
||||||
|
|
||||||
let mikan_client = ctx
|
let mikan_client = ctx.mikan();
|
||||||
.mikan()
|
let mikan_base_url = mikan_client.base_url();
|
||||||
.fork_with_credential(ctx.clone(), Some(job.credential_id))
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let bangumi_indices = job.bangumi_indices;
|
let mikan_season_flow_url =
|
||||||
|
build_mikan_season_flow_url(mikan_base_url.clone(), job.year, job.season_str);
|
||||||
|
|
||||||
let mut bangumi_metas = vec![];
|
let bangumi_meta_list = scrape_mikan_bangumi_meta_list_from_season_flow_url(
|
||||||
|
mikan_client,
|
||||||
let mikan_base_url = mikan_client.base_url().clone();
|
ctx.clone(),
|
||||||
|
mikan_season_flow_url,
|
||||||
for bangumi_index in bangumi_indices {
|
job.credential_id,
|
||||||
let bangumi_title = bangumi_index.bangumi_title.clone();
|
)
|
||||||
let bangumi_expand_subscribed_fragment_url =
|
.await?;
|
||||||
build_mikan_bangumi_expand_subscribed_fragment_url(
|
|
||||||
mikan_base_url.clone(),
|
|
||||||
&bangumi_index.mikan_bangumi_id,
|
|
||||||
);
|
|
||||||
let bangumi_expand_subscribed_fragment =
|
|
||||||
fetch_html(&mikan_client, bangumi_expand_subscribed_fragment_url).await?;
|
|
||||||
|
|
||||||
let bangumi_meta = extract_mikan_bangumi_meta_from_expand_subscribed_fragment(
|
|
||||||
bangumi_index,
|
|
||||||
&bangumi_expand_subscribed_fragment,
|
|
||||||
mikan_base_url.clone(),
|
|
||||||
)
|
|
||||||
.with_whatever_context::<_, String, RecorderError>(|| {
|
|
||||||
format!(
|
|
||||||
"failed to extract mikan bangumi fansub of title = {}",
|
|
||||||
bangumi_title
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
bangumi_metas.push(bangumi_meta);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(GoTo::Done(ExtractMikanSeasonSubscriptionTaskResult {
|
Ok(GoTo::Done(ExtractMikanSeasonSubscriptionTaskResult {
|
||||||
bangumi_metas,
|
bangumi_meta_list,
|
||||||
credential_id: job.credential_id,
|
credential_id: job.credential_id,
|
||||||
season_str: job.season_str,
|
season_str: job.season_str,
|
||||||
subscriber_id: job.subscriber_id,
|
subscriber_id: job.subscriber_id,
|
||||||
@ -157,9 +74,7 @@ pub fn register_extract_mikan_season_subscription_task(
|
|||||||
let pool = ctx.db().get_postgres_connection_pool().clone();
|
let pool = ctx.db().get_postgres_connection_pool().clone();
|
||||||
let storage = PostgresStorage::new(pool);
|
let storage = PostgresStorage::new(pool);
|
||||||
|
|
||||||
let steps = StepBuilder::new()
|
let steps = StepBuilder::new().step_fn(extract_mikan_season_subscription);
|
||||||
.step_fn(extract_mikan_season_subscription)
|
|
||||||
.step_fn(extract_mikan_season_subscription_fansubs);
|
|
||||||
|
|
||||||
let worker = WorkerBuilder::new(TASK_NAME)
|
let worker = WorkerBuilder::new(TASK_NAME)
|
||||||
.catch_panic()
|
.catch_panic()
|
||||||
|
0
apps/recorder/src/temp/.gitkeep
Normal file
0
apps/recorder/src/temp/.gitkeep
Normal file
@ -58,7 +58,7 @@ impl AppContextTrait for UnitTestAppContext {
|
|||||||
self.graphql.as_ref().expect("should set graphql")
|
self.graphql.as_ref().expect("should set graphql")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn storage(&self) -> &crate::storage::StorageService {
|
fn storage(&self) -> &dyn crate::storage::StorageServiceTrait {
|
||||||
self.storage.as_ref().expect("should set storage")
|
self.storage.as_ref().expect("should set storage")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
59
apps/recorder/src/test_utils/database.rs
Normal file
59
apps/recorder/src/test_utils/database.rs
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
use crate::{
|
||||||
|
database::{DatabaseConfig, DatabaseService},
|
||||||
|
errors::RecorderResult,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(feature = "testcontainers")]
|
||||||
|
pub async fn build_testing_database_service() -> RecorderResult<DatabaseService> {
|
||||||
|
use testcontainers::runners::AsyncRunner;
|
||||||
|
use testcontainers_ext::{ImageDefaultLogConsumerExt, ImagePruneExistedLabelExt};
|
||||||
|
use testcontainers_modules::postgres::Postgres;
|
||||||
|
|
||||||
|
let container = Postgres::default()
|
||||||
|
.with_db_name("konobangu")
|
||||||
|
.with_user("konobangu")
|
||||||
|
.with_password("konobangu")
|
||||||
|
.with_default_log_consumer()
|
||||||
|
.with_prune_existed_label(env!("CARGO_PKG_NAME"), "postgres", true, true)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let container = container.start().await?;
|
||||||
|
|
||||||
|
let host_ip = container.get_host().await?;
|
||||||
|
let host_port = container.get_host_port_ipv4(5432).await?;
|
||||||
|
|
||||||
|
let connection_string =
|
||||||
|
format!("postgres://konobangu:konobangu@{host_ip}:{host_port}/konobangu");
|
||||||
|
|
||||||
|
let mut db_service = DatabaseService::from_config(DatabaseConfig {
|
||||||
|
uri: connection_string,
|
||||||
|
enable_logging: true,
|
||||||
|
min_connections: 1,
|
||||||
|
max_connections: 1,
|
||||||
|
connect_timeout: 5000,
|
||||||
|
idle_timeout: 10000,
|
||||||
|
acquire_timeout: None,
|
||||||
|
auto_migrate: true,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
db_service.container = Some(container);
|
||||||
|
|
||||||
|
Ok(db_service)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "testcontainers"))]
|
||||||
|
pub async fn build_testing_database_service() -> RecorderResult<DatabaseService> {
|
||||||
|
let db_service = DatabaseService::from_config(DatabaseConfig {
|
||||||
|
uri: String::from("postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"),
|
||||||
|
enable_logging: true,
|
||||||
|
min_connections: 1,
|
||||||
|
max_connections: 1,
|
||||||
|
connect_timeout: 5000,
|
||||||
|
idle_timeout: 10000,
|
||||||
|
acquire_timeout: None,
|
||||||
|
auto_migrate: true,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(db_service)
|
||||||
|
}
|
@ -1,3 +1,5 @@
|
|||||||
pub mod app;
|
pub mod app;
|
||||||
|
pub mod database;
|
||||||
pub mod mikan;
|
pub mod mikan;
|
||||||
|
pub mod storage;
|
||||||
pub mod tracing;
|
pub mod tracing;
|
||||||
|
28
apps/recorder/src/test_utils/storage.rs
Normal file
28
apps/recorder/src/test_utils/storage.rs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
use opendal::{Operator, layers::LoggingLayer};
|
||||||
|
|
||||||
|
use crate::{errors::RecorderResult, storage::StorageServiceTrait};
|
||||||
|
|
||||||
|
pub struct TestingStorageService {
|
||||||
|
operator: Operator,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TestingStorageService {
|
||||||
|
pub fn new() -> RecorderResult<Self> {
|
||||||
|
let op = Operator::new(opendal::services::Memory::default())?
|
||||||
|
.layer(LoggingLayer::default())
|
||||||
|
.finish();
|
||||||
|
|
||||||
|
Ok(Self { operator: op })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StorageServiceTrait for TestingStorageService {
|
||||||
|
fn get_operator(&self) -> RecorderResult<Operator> {
|
||||||
|
Ok(self.operator.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn build_testing_storage_service() -> RecorderResult<TestingStorageService> {
|
||||||
|
TestingStorageService::new()
|
||||||
|
}
|
@ -4,7 +4,7 @@ use tracing_subscriber::EnvFilter;
|
|||||||
pub fn try_init_testing_tracing(level: Level) {
|
pub fn try_init_testing_tracing(level: Level) {
|
||||||
let crate_name = env!("CARGO_PKG_NAME");
|
let crate_name = env!("CARGO_PKG_NAME");
|
||||||
let level = level.as_str().to_lowercase();
|
let level = level.as_str().to_lowercase();
|
||||||
let filter = EnvFilter::new(format!("{}[]={}", crate_name, level))
|
let filter = EnvFilter::new(format!("{crate_name}[]={level}"))
|
||||||
.add_directive(format!("mockito[]={}", level).parse().unwrap());
|
.add_directive(format!("mockito[]={level}").parse().unwrap());
|
||||||
let _ = tracing_subscriber::fmt().with_env_filter(filter).try_init();
|
let _ = tracing_subscriber::fmt().with_env_filter(filter).try_init();
|
||||||
}
|
}
|
||||||
|
@ -27,10 +27,10 @@ async fn graphql_handler(
|
|||||||
|
|
||||||
// 检查是否是 introspection 查询
|
// 检查是否是 introspection 查询
|
||||||
fn is_introspection_query(req: &async_graphql::Request) -> bool {
|
fn is_introspection_query(req: &async_graphql::Request) -> bool {
|
||||||
if let Some(operation) = &req.operation_name {
|
if let Some(operation) = &req.operation_name
|
||||||
if operation.starts_with("__") {
|
&& operation.starts_with("__")
|
||||||
return true;
|
{
|
||||||
}
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 检查查询内容是否包含 introspection 字段
|
// 检查查询内容是否包含 introspection 字段
|
||||||
|
@ -97,15 +97,14 @@ where
|
|||||||
let res_fut = async move {
|
let res_fut = async move {
|
||||||
let response = future.await?;
|
let response = future.await?;
|
||||||
let etag_from_response = response.headers().get(ETAG).cloned();
|
let etag_from_response = response.headers().get(ETAG).cloned();
|
||||||
if let Some(etag_in_request) = ifnm {
|
if let Some(etag_in_request) = ifnm
|
||||||
if let Some(etag_from_response) = etag_from_response {
|
&& let Some(etag_from_response) = etag_from_response
|
||||||
if etag_in_request == etag_from_response {
|
&& etag_in_request == etag_from_response
|
||||||
return Ok(Response::builder()
|
{
|
||||||
.status(StatusCode::NOT_MODIFIED)
|
return Ok(Response::builder()
|
||||||
.body(Body::empty())
|
.status(StatusCode::NOT_MODIFIED)
|
||||||
.unwrap());
|
.body(Body::empty())
|
||||||
}
|
.unwrap());
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Ok(response)
|
Ok(response)
|
||||||
};
|
};
|
||||||
|
6
justfile
6
justfile
@ -2,8 +2,7 @@ set windows-shell := ["pwsh.exe", "-c"]
|
|||||||
set dotenv-load := true
|
set dotenv-load := true
|
||||||
|
|
||||||
prepare-dev-recorder:
|
prepare-dev-recorder:
|
||||||
cargo install sea-orm-cli
|
cargo install sea-orm-cli watchexec cargo-llvm-cov cargo-nextest
|
||||||
cargo install cargo-watch
|
|
||||||
|
|
||||||
dev-webui:
|
dev-webui:
|
||||||
pnpm run --filter=webui dev
|
pnpm run --filter=webui dev
|
||||||
@ -30,3 +29,6 @@ dev-codegen-wait:
|
|||||||
@until nc -z localhost 5001; do echo "Waiting for Recorder..."; sleep 1; done
|
@until nc -z localhost 5001; do echo "Waiting for Recorder..."; sleep 1; done
|
||||||
pnpm run --filter=webui codegen-watch
|
pnpm run --filter=webui codegen-watch
|
||||||
|
|
||||||
|
dev-coverage:
|
||||||
|
cargo llvm-cov test --html
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@ testcontainers = { workspace = true, optional = true }
|
|||||||
testcontainers-modules = { workspace = true, optional = true }
|
testcontainers-modules = { workspace = true, optional = true }
|
||||||
testcontainers-ext = { workspace = true, optional = true }
|
testcontainers-ext = { workspace = true, optional = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
|
||||||
async-trait = { workspace = true }
|
async-trait = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
snafu = { workspace = true }
|
snafu = { workspace = true }
|
||||||
@ -42,7 +41,6 @@ librqbit = { version = "8", features = ["async-bt", "watch"] }
|
|||||||
util = { workspace = true }
|
util = { workspace = true }
|
||||||
testing-torrents = { workspace = true, optional = true }
|
testing-torrents = { workspace = true, optional = true }
|
||||||
fetch = { workspace = true }
|
fetch = { workspace = true }
|
||||||
dashmap = "6.1.0"
|
|
||||||
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "nightly-2025-02-20"
|
channel = "nightly-2025-05-14"
|
||||||
components = ["rustfmt", "clippy"]
|
components = ["rustfmt", "clippy"]
|
||||||
profile = "default"
|
profile = "default"
|
||||||
|
Loading…
Reference in New Issue
Block a user