refactor: switch error handle to snafu

This commit is contained in:
master 2025-04-02 00:22:52 +08:00
parent 011f62829a
commit 234441e6a3
32 changed files with 549 additions and 436 deletions

111
Cargo.lock generated
View File

@ -913,33 +913,6 @@ version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "color-eyre"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5"
dependencies = [
"backtrace",
"color-spantrace",
"eyre",
"indenter",
"once_cell",
"owo-colors",
"tracing-error",
]
[[package]]
name = "color-spantrace"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2"
dependencies = [
"once_cell",
"owo-colors",
"tracing-core",
"tracing-error",
]
[[package]] [[package]]
name = "colorchoice" name = "colorchoice"
version = "1.0.3" version = "1.0.3"
@ -1720,16 +1693,6 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
] ]
[[package]]
name = "eyre"
version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec"
dependencies = [
"indenter",
"once_cell",
]
[[package]] [[package]]
name = "fancy-regex" name = "fancy-regex"
version = "0.14.0" version = "0.14.0"
@ -2776,12 +2739,6 @@ dependencies = [
"winapi-util", "winapi-util",
] ]
[[package]]
name = "indenter"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "1.9.3" version = "1.9.3"
@ -4094,12 +4051,6 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "owo-colors"
version = "3.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f"
[[package]] [[package]]
name = "p256" name = "p256"
version = "0.13.2" version = "0.13.2"
@ -4608,7 +4559,7 @@ dependencies = [
"tap", "tap",
"thiserror 2.0.12", "thiserror 2.0.12",
"tracing", "tracing",
"typed-builder", "typed-builder 0.20.1",
"url", "url",
] ]
@ -4829,7 +4780,6 @@ dependencies = [
name = "recorder" name = "recorder"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"async-graphql", "async-graphql",
"async-graphql-axum", "async-graphql-axum",
"async-stream", "async-stream",
@ -4841,7 +4791,6 @@ dependencies = [
"bytes", "bytes",
"chrono", "chrono",
"clap", "clap",
"color-eyre",
"cookie", "cookie",
"ctor", "ctor",
"dotenv", "dotenv",
@ -4891,16 +4840,17 @@ dependencies = [
"serde_with", "serde_with",
"serde_yaml", "serde_yaml",
"serial_test", "serial_test",
"snafu",
"tera", "tera",
"testcontainers", "testcontainers",
"testcontainers-modules", "testcontainers-modules",
"thiserror 2.0.12",
"tokio", "tokio",
"tower", "tower",
"tower-http", "tower-http",
"tracing", "tracing",
"tracing-appender", "tracing-appender",
"tracing-subscriber", "tracing-subscriber",
"typed-builder 0.21.0",
"url", "url",
"uuid", "uuid",
"zune-image", "zune-image",
@ -6051,6 +6001,29 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "snafu"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "223891c85e2a29c3fe8fb900c1fae5e69c2e42415e3177752e8718475efa5019"
dependencies = [
"futures-core",
"pin-project",
"snafu-derive",
]
[[package]]
name = "snafu-derive"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03c3c6b7927ffe7ecaa769ee0e3994da3b8cafc8f444578982c83ecb161af917"
dependencies = [
"heck 0.5.0",
"proc-macro2",
"quote",
"syn 2.0.100",
]
[[package]] [[package]]
name = "socket2" name = "socket2"
version = "0.5.9" version = "0.5.9"
@ -6945,16 +6918,6 @@ dependencies = [
"valuable", "valuable",
] ]
[[package]]
name = "tracing-error"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b1581020d7a273442f5b45074a6a57d5757ad0a47dac0e9f0bd57b81936f3db"
dependencies = [
"tracing",
"tracing-subscriber",
]
[[package]] [[package]]
name = "tracing-log" name = "tracing-log"
version = "0.2.0" version = "0.2.0"
@ -7026,7 +6989,16 @@ version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd9d30e3a08026c78f246b173243cf07b3696d274debd26680773b6773c2afc7" checksum = "cd9d30e3a08026c78f246b173243cf07b3696d274debd26680773b6773c2afc7"
dependencies = [ dependencies = [
"typed-builder-macro", "typed-builder-macro 0.20.1",
]
[[package]]
name = "typed-builder"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce63bcaf7e9806c206f7d7b9c1f38e0dce8bb165a80af0898161058b19248534"
dependencies = [
"typed-builder-macro 0.21.0",
] ]
[[package]] [[package]]
@ -7040,6 +7012,17 @@ dependencies = [
"syn 2.0.100", "syn 2.0.100",
] ]
[[package]]
name = "typed-builder-macro"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60d8d828da2a3d759d3519cdf29a5bac49c77d039ad36d0782edadbf9cd5415b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.100",
]
[[package]] [[package]]
name = "typenum" name = "typenum"
version = "1.18.0" version = "1.18.0"

View File

@ -22,6 +22,7 @@ testcontainers = [
] ]
[dependencies] [dependencies]
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1" serde_json = "1"
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] } tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
@ -48,7 +49,6 @@ reqwest = { version = "0.12", default-features = false, features = [
"rustls-tls", "rustls-tls",
"cookies", "cookies",
] } ] }
thiserror = "2"
rss = "2" rss = "2"
bytes = "1.9" bytes = "1.9"
itertools = "0.14" itertools = "0.14"
@ -83,9 +83,7 @@ testcontainers = { version = "0.23.3", features = [
"reusable-containers", "reusable-containers",
], optional = true } ], optional = true }
testcontainers-modules = { version = "0.11.4", optional = true } testcontainers-modules = { version = "0.11.4", optional = true }
color-eyre = "0.6"
log = "0.4.22" log = "0.4.22"
anyhow = "1.0.95"
bollard = { version = "0.18", optional = true } bollard = { version = "0.18", optional = true }
async-graphql = { version = "7", features = [] } async-graphql = { version = "7", features = [] }
async-graphql-axum = "7" async-graphql-axum = "7"
@ -131,7 +129,8 @@ futures-util = "0.3.31"
ipnetwork = "0.21.1" ipnetwork = "0.21.1"
ctor = "0.4.0" ctor = "0.4.0"
librqbit = "8.0.0" librqbit = "8.0.0"
typed-builder = "0.21.0"
snafu = { version = "0.8.5", features = ["futures"] }
[dev-dependencies] [dev-dependencies]
serial_test = "3" serial_test = "3"
insta = { version = "1", features = ["redactions", "yaml", "filters"] } insta = { version = "1", features = ["redactions", "yaml", "filters"] }

View File

@ -1,14 +1,7 @@
use recorder::errors::RResult;
// #![allow(unused_imports)] // #![allow(unused_imports)]
// use color_eyre::eyre::Context;
// use itertools::Itertools;
// use loco_rs::{
// app::Hooks,
// boot::{BootResult, StartMode},
// environment::Environment,
// prelude::AppContext as LocoContext,
// };
// use recorder::{ // use recorder::{
// app::{App1, AppContext}, // app::{AppContext, AppContextTrait},
// errors::RResult, // errors::RResult,
// migrations::Migrator, // migrations::Migrator,
// models::{ // models::{
@ -16,7 +9,7 @@
// subscriptions::{self, SubscriptionCreateFromRssDto}, // subscriptions::{self, SubscriptionCreateFromRssDto},
// }, // },
// }; // };
// use sea_orm::ColumnTrait; // use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
// use sea_orm_migration::MigratorTrait; // use sea_orm_migration::MigratorTrait;
// async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RResult<()> { // async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RResult<()> {
@ -50,19 +43,14 @@
// Ok(()) // Ok(())
// } // }
// async fn init() -> RResult<LocoContext> {
// let ctx = loco_rs::cli::playground::<App1>().await?;
// let BootResult {
// app_context: ctx, ..
// } = loco_rs::boot::run_app::<App1>(&StartMode::ServerOnly, ctx).await?;
// Migrator::up(ctx.db(), None).await?;
// Ok(ctx)
// }
// #[tokio::main] // #[tokio::main]
// async fn main() -> color_eyre::eyre::Result<()> { // async fn main() -> RResult<()> {
// pull_mikan_bangumi_rss(&ctx).await?; // pull_mikan_bangumi_rss(&ctx).await?;
// Ok(()) // Ok(())
// } // }
fn main() {}
#[tokio::main]
async fn main() -> RResult<()> {
Ok(())
}

View File

@ -1,5 +1,3 @@
use std::fmt;
use async_graphql::dynamic::ResolverContext; use async_graphql::dynamic::ResolverContext;
use axum::{ use axum::{
Json, Json,
@ -11,72 +9,86 @@ use openidconnect::{
StandardErrorResponse, core::CoreErrorResponseType, StandardErrorResponse, core::CoreErrorResponseType,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use thiserror::Error; use snafu::prelude::*;
use crate::{fetch::HttpClientError, models::auth::AuthType}; use crate::{fetch::HttpClientError, models::auth::AuthType};
#[derive(Debug, Error)] #[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum AuthError { pub enum AuthError {
#[error("Not support auth method")] #[snafu(display("Not support auth method"))]
NotSupportAuthMethod { NotSupportAuthMethod {
supported: Vec<AuthType>, supported: Vec<AuthType>,
current: AuthType, current: AuthType,
}, },
#[error("Failed to find auth record")] #[snafu(display("Failed to find auth record"))]
FindAuthRecordError, FindAuthRecordError,
#[error("Invalid credentials")] #[snafu(display("Invalid credentials"))]
BasicInvalidCredentials, BasicInvalidCredentials,
#[error(transparent)] #[snafu(transparent)]
OidcInitError(#[from] jwt_authorizer::error::InitError), OidcInitError {
#[error("Invalid oidc provider meta client error: {0}")] source: jwt_authorizer::error::InitError,
OidcProviderHttpClientError(HttpClientError), },
#[error(transparent)] #[snafu(display("Invalid oidc provider meta client error: {source}"))]
OidcProviderMetaError(#[from] openidconnect::DiscoveryError<HttpClientError>), OidcProviderHttpClientError { source: HttpClientError },
#[error("Invalid oidc provider URL: {0}")] #[snafu(transparent)]
OidcProviderUrlError(url::ParseError), OidcProviderMetaError {
#[error("Invalid oidc redirect URI: {0}")] source: openidconnect::DiscoveryError<HttpClientError>,
OidcRequestRedirectUriError(url::ParseError), },
#[error("Oidc request session not found or expired")] #[snafu(display("Invalid oidc provider URL: {source}"))]
OidcProviderUrlError { source: url::ParseError },
#[snafu(display("Invalid oidc redirect URI: {source}"))]
OidcRequestRedirectUriError {
#[snafu(source)]
source: url::ParseError,
},
#[snafu(display("Oidc request session not found or expired"))]
OidcCallbackRecordNotFoundOrExpiredError, OidcCallbackRecordNotFoundOrExpiredError,
#[error("Invalid oidc request callback nonce")] #[snafu(display("Invalid oidc request callback nonce"))]
OidcInvalidNonceError, OidcInvalidNonceError,
#[error("Invalid oidc request callback state")] #[snafu(display("Invalid oidc request callback state"))]
OidcInvalidStateError, OidcInvalidStateError,
#[error("Invalid oidc request callback code")] #[snafu(display("Invalid oidc request callback code"))]
OidcInvalidCodeError, OidcInvalidCodeError,
#[error(transparent)] #[snafu(transparent)]
OidcCallbackTokenConfigurationError(#[from] ConfigurationError), OidcCallbackTokenConfigurationError { source: ConfigurationError },
#[error(transparent)] #[snafu(transparent)]
OidcRequestTokenError( OidcRequestTokenError {
#[from] RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>, source: RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>,
), },
#[error("Invalid oidc id token")] #[snafu(display("Invalid oidc id token"))]
OidcInvalidIdTokenError, OidcInvalidIdTokenError,
#[error("Invalid oidc access token")] #[snafu(display("Invalid oidc access token"))]
OidcInvalidAccessTokenError, OidcInvalidAccessTokenError,
#[error(transparent)] #[snafu(transparent)]
OidcSignatureVerificationError(#[from] SignatureVerificationError), OidcSignatureVerificationError { source: SignatureVerificationError },
#[error(transparent)] #[snafu(transparent)]
OidcSigningError(#[from] SigningError), OidcSigningError { source: SigningError },
#[error(transparent)] #[snafu(transparent)]
OidcJwtAuthError(#[from] jwt_authorizer::AuthError), OidcJwtAuthError { source: jwt_authorizer::AuthError },
#[error("Extra scopes {expected} do not match found scopes {found}")] #[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
OidcExtraScopesMatchError { expected: String, found: String }, OidcExtraScopesMatchError { expected: String, found: String },
#[error("Extra claim {key} does not match expected value {expected}, found {found}")] #[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
OidcExtraClaimMatchError { OidcExtraClaimMatchError {
key: String, key: String,
expected: String, expected: String,
found: String, found: String,
}, },
#[error("Extra claim {0} missing")] #[snafu(display("Extra claim {claim} missing"))]
OidcExtraClaimMissingError(String), OidcExtraClaimMissingError { claim: String },
#[error("Audience {0} missing")] #[snafu(display("Audience {aud} missing"))]
OidcAudMissingError(String), OidcAudMissingError { aud: String },
#[error("Subject missing")] #[snafu(display("Subject missing"))]
OidcSubMissingError, OidcSubMissingError,
#[error(fmt = display_graphql_permission_error)] #[snafu(display(
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
(if field.is_empty() { "" } else { "." }),
(if column.is_empty() { "" } else { "." }),
source.message
))]
GraphQLPermissionError { GraphQLPermissionError {
inner_error: async_graphql::Error, #[snafu(source(false))]
source: Box<async_graphql::Error>,
field: String, field: String,
column: String, column: String,
context_path: String, context_path: String,
@ -85,13 +97,13 @@ pub enum AuthError {
impl AuthError { impl AuthError {
pub fn from_graphql_subscribe_id_guard( pub fn from_graphql_subscribe_id_guard(
inner_error: async_graphql::Error, source: async_graphql::Error,
context: &ResolverContext, context: &ResolverContext,
field_name: &str, field_name: &str,
column_name: &str, column_name: &str,
) -> AuthError { ) -> AuthError {
AuthError::GraphQLPermissionError { AuthError::GraphQLPermissionError {
inner_error, source: Box::new(source),
field: field_name.to_string(), field: field_name.to_string(),
column: column_name.to_string(), column: column_name.to_string(),
context_path: context context_path: context
@ -103,22 +115,6 @@ impl AuthError {
} }
} }
fn display_graphql_permission_error(
inner_error: &async_graphql::Error,
field: &String,
column: &String,
context_path: &String,
formatter: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(
formatter,
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
(if field.is_empty() { "" } else { "." }),
(if column.is_empty() { "" } else { "." }),
inner_error.message
)
}
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AuthErrorResponse { pub struct AuthErrorResponse {
pub success: bool, pub success: bool,

View File

@ -16,11 +16,12 @@ use openidconnect::{
use sea_orm::DbErr; use sea_orm::DbErr;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use snafu::ResultExt;
use url::Url; use url::Url;
use super::{ use super::{
config::OidcAuthConfig, config::OidcAuthConfig,
errors::AuthError, errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
service::{AuthServiceTrait, AuthUserInfo}, service::{AuthServiceTrait, AuthUserInfo},
}; };
use crate::{app::AppContextTrait, errors::RError, fetch::HttpClient, models::auth::AuthType}; use crate::{app::AppContextTrait, errors::RError, fetch::HttpClient, models::auth::AuthType};
@ -125,13 +126,13 @@ impl OidcAuthService {
redirect_uri: &str, redirect_uri: &str,
) -> Result<OidcAuthRequest, AuthError> { ) -> Result<OidcAuthRequest, AuthError> {
let provider_metadata = CoreProviderMetadata::discover_async( let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).map_err(AuthError::OidcProviderUrlError)?, IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&self.oidc_provider_client, &self.oidc_provider_client,
) )
.await?; .await?;
let redirect_uri = RedirectUrl::new(redirect_uri.to_string()) let redirect_uri =
.map_err(AuthError::OidcRequestRedirectUriError)?; RedirectUrl::new(redirect_uri.to_string()).context(OidcRequestRedirectUriSnafu)?;
let oidc_client = CoreClient::from_provider_metadata( let oidc_client = CoreClient::from_provider_metadata(
provider_metadata, provider_metadata,
@ -207,7 +208,7 @@ impl OidcAuthService {
let request_cache = self.load_authorization_request(&csrf_token).await?; let request_cache = self.load_authorization_request(&csrf_token).await?;
let provider_metadata = CoreProviderMetadata::discover_async( let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).map_err(AuthError::OidcProviderUrlError)?, IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&self.oidc_provider_client, &self.oidc_provider_client,
) )
.await?; .await?;
@ -265,9 +266,10 @@ impl AuthServiceTrait for OidcAuthService {
request: &mut Parts, request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> { ) -> Result<AuthUserInfo, AuthError> {
let config = &self.config; let config = &self.config;
let token = self.api_authorizer.extract_token(&request.headers).ok_or( let token = self
AuthError::OidcJwtAuthError(jwt_authorizer::AuthError::MissingToken()), .api_authorizer
)?; .extract_token(&request.headers)
.ok_or(jwt_authorizer::AuthError::MissingToken())?;
let token_data = self.api_authorizer.check_auth(&token).await?; let token_data = self.api_authorizer.check_auth(&token).await?;
let claims = token_data.claims; let claims = token_data.claims;
@ -277,7 +279,9 @@ impl AuthServiceTrait for OidcAuthService {
return Err(AuthError::OidcSubMissingError); return Err(AuthError::OidcSubMissingError);
}; };
if !claims.contains_audience(&config.audience) { if !claims.contains_audience(&config.audience) {
return Err(AuthError::OidcAudMissingError(config.audience.clone())); return Err(AuthError::OidcAudMissingError {
aud: config.audience.clone(),
});
} }
if let Some(expected_scopes) = config.extra_scopes.as_ref() { if let Some(expected_scopes) = config.extra_scopes.as_ref() {
let found_scopes = claims.scopes().collect::<HashSet<_>>(); let found_scopes = claims.scopes().collect::<HashSet<_>>();
@ -293,7 +297,7 @@ impl AuthServiceTrait for OidcAuthService {
} }
if let Some(key) = config.extra_claim_key.as_ref() { if let Some(key) = config.extra_claim_key.as_ref() {
if !claims.has_claim(key) { if !claims.has_claim(key) {
return Err(AuthError::OidcExtraClaimMissingError(key.clone())); return Err(AuthError::OidcExtraClaimMissingError { claim: key.clone() });
} }
if let Some(value) = config.extra_claim_value.as_ref() { if let Some(value) = config.extra_claim_value.as_ref() {
if claims.get_claim(key).is_none_or(|v| &v != value) { if claims.get_claim(key).is_none_or(|v| &v != value) {
@ -306,9 +310,9 @@ impl AuthServiceTrait for OidcAuthService {
} }
} }
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await { let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(RError::DbError(DbErr::RecordNotFound(..))) => { Err(RError::DbError {
crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await source: DbErr::RecordNotFound(..),
} }) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await,
r => r, r => r,
} }
.map_err(|_| AuthError::FindAuthRecordError)?; .map_err(|_| AuthError::FindAuthRecordError)?;

View File

@ -9,11 +9,12 @@ use axum::{
use jwt_authorizer::{JwtAuthorizer, Validation}; use jwt_authorizer::{JwtAuthorizer, Validation};
use moka::future::Cache; use moka::future::Cache;
use reqwest::header::HeaderValue; use reqwest::header::HeaderValue;
use snafu::prelude::*;
use super::{ use super::{
AuthConfig, AuthConfig,
basic::BasicAuthService, basic::BasicAuthService,
errors::AuthError, errors::{AuthError, OidcProviderHttpClientSnafu},
oidc::{OidcAuthClaims, OidcAuthService}, oidc::{OidcAuthClaims, OidcAuthService},
}; };
use crate::{ use crate::{
@ -59,14 +60,14 @@ pub trait AuthServiceTrait {
} }
pub enum AuthService { pub enum AuthService {
Basic(BasicAuthService), Basic(Box<BasicAuthService>),
Oidc(OidcAuthService), Oidc(Box<OidcAuthService>),
} }
impl AuthService { impl AuthService {
pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> { pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> {
let result = match config { let result = match config {
AuthConfig::Basic(config) => AuthService::Basic(BasicAuthService { config }), AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
AuthConfig::Oidc(config) => { AuthConfig::Oidc(config) => {
let validation = Validation::new() let validation = Validation::new()
.iss(&[&config.issuer]) .iss(&[&config.issuer])
@ -78,14 +79,14 @@ impl AuthService {
cache_preset: Some(HttpClientCachePresetConfig::RFC7234), cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
..Default::default() ..Default::default()
}) })
.map_err(AuthError::OidcProviderHttpClientError)?; .context(OidcProviderHttpClientSnafu)?;
let api_authorizer = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer) let api_authorizer = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer)
.validation(validation) .validation(validation)
.build() .build()
.await?; .await?;
AuthService::Oidc(OidcAuthService { AuthService::Oidc(Box::new(OidcAuthService {
config, config,
api_authorizer, api_authorizer,
oidc_provider_client, oidc_provider_client,
@ -93,7 +94,7 @@ impl AuthService {
.time_to_live(Duration::from_mins(5)) .time_to_live(Duration::from_mins(5))
.name("oidc_request_cache") .name("oidc_request_cache")
.build(), .build(),
}) }))
} }
}; };
Ok(result) Ok(result)

View File

@ -1,10 +1,7 @@
use color_eyre::{self, eyre}; use recorder::{app::AppBuilder, errors::RResult};
use recorder::app::AppBuilder;
#[tokio::main] #[tokio::main]
async fn main() -> eyre::Result<()> { async fn main() -> RResult<()> {
color_eyre::install()?;
let builder = AppBuilder::from_main_cli(None).await?; let builder = AppBuilder::from_main_cli(None).await?;
let app = builder.build().await?; let app = builder.build().await?;

View File

@ -1,26 +0,0 @@
use std::{borrow::Cow, time::Duration};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum TorrentDownloadError {
#[error("Invalid mime (expected {expected:?}, got {found:?})")]
InvalidMime { expected: String, found: String },
#[error("Invalid url schema (expected {expected:?}, got {found:?})")]
InvalidUrlSchema { expected: String, found: String },
#[error("Invalid url parse: {0:?}")]
InvalidUrlParse(#[from] url::ParseError),
#[error("Invalid url format: {reason}")]
InvalidUrlFormat { reason: Cow<'static, str> },
#[error("QBit api error: {0:?}")]
QBitAPIError(#[from] qbit_rs::Error),
#[error("Timeout error ({action} timeouts out of {timeout:?})")]
TimeoutError {
action: Cow<'static, str>,
timeout: Duration,
},
#[error("Invalid torrent file format")]
InvalidTorrentFileFormat,
#[error("Invalid magnet file format (url = {url})")]
InvalidMagnetFormat { url: String },
}

View File

@ -1 +0,0 @@
use librqbit::TorrentMetadata;

View File

@ -10,9 +10,10 @@ use librqbit_core::{
use quirks_path::{Path, PathBuf}; use quirks_path::{Path, PathBuf};
use regex::Regex; use regex::Regex;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use snafu::prelude::*;
use url::Url; use url::Url;
use super::{QbitTorrent, QbitTorrentContent, TorrentDownloadError}; use super::{DownloaderError, QbitTorrent, QbitTorrentContent, errors::DownloadFetchSnafu};
use crate::fetch::{HttpClientTrait, fetch_bytes}; use crate::fetch::{HttpClientTrait, fetch_bytes};
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent"; pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
@ -57,10 +58,7 @@ pub enum TorrentSource {
} }
impl TorrentSource { impl TorrentSource {
pub async fn parse<H: HttpClientTrait>( pub async fn parse<H: HttpClientTrait>(client: &H, url: &str) -> Result<Self, DownloaderError> {
client: &H,
url: &str,
) -> color_eyre::eyre::Result<Self> {
let url = Url::parse(url)?; let url = Url::parse(url)?;
let source = if url.scheme() == MAGNET_SCHEMA { let source = if url.scheme() == MAGNET_SCHEMA {
TorrentSource::from_magnet_url(url)? TorrentSource::from_magnet_url(url)?
@ -75,22 +73,25 @@ impl TorrentSource {
) { ) {
TorrentSource::from_torrent_url(url, match_hash.as_str().to_string())? TorrentSource::from_torrent_url(url, match_hash.as_str().to_string())?
} else { } else {
let contents = fetch_bytes(client, url).await?; let contents = fetch_bytes(client, url)
.await
.boxed()
.context(DownloadFetchSnafu)?;
TorrentSource::from_torrent_file(contents.to_vec(), Some(basename.to_string()))? TorrentSource::from_torrent_file(contents.to_vec(), Some(basename.to_string()))?
} }
} else { } else {
let contents = fetch_bytes(client, url).await?; let contents = fetch_bytes(client, url)
.await
.boxed()
.context(DownloadFetchSnafu)?;
TorrentSource::from_torrent_file(contents.to_vec(), None)? TorrentSource::from_torrent_file(contents.to_vec(), None)?
}; };
Ok(source) Ok(source)
} }
pub fn from_torrent_file( pub fn from_torrent_file(file: Vec<u8>, name: Option<String>) -> Result<Self, DownloaderError> {
file: Vec<u8>, let torrent: TorrentMetaV1Owned =
name: Option<String>, torrent_from_bytes(&file).map_err(|_| DownloaderError::TorrentFileFormatError)?;
) -> color_eyre::eyre::Result<Self> {
let torrent: TorrentMetaV1Owned = torrent_from_bytes(&file)
.map_err(|_| TorrentDownloadError::InvalidTorrentFileFormat)?;
let hash = torrent.info_hash.as_string(); let hash = torrent.info_hash.as_string();
Ok(TorrentSource::TorrentFile { Ok(TorrentSource::TorrentFile {
torrent: file, torrent: file,
@ -99,23 +100,21 @@ impl TorrentSource {
}) })
} }
pub fn from_magnet_url(url: Url) -> color_eyre::eyre::Result<Self> { pub fn from_magnet_url(url: Url) -> Result<Self, DownloaderError> {
if url.scheme() != MAGNET_SCHEMA { if url.scheme() != MAGNET_SCHEMA {
Err(TorrentDownloadError::InvalidUrlSchema { Err(DownloaderError::DownloadSchemaError {
found: url.scheme().to_string(), found: url.scheme().to_string(),
expected: MAGNET_SCHEMA.to_string(), expected: MAGNET_SCHEMA.to_string(),
} })
.into())
} else { } else {
let magnet = Magnet::parse(url.as_str()).map_err(|_| { let magnet =
TorrentDownloadError::InvalidMagnetFormat { Magnet::parse(url.as_str()).map_err(|_| DownloaderError::MagnetFormatError {
url: url.as_str().to_string(), url: url.as_str().to_string(),
} })?;
})?;
let hash = magnet let hash = magnet
.as_id20() .as_id20()
.ok_or_else(|| TorrentDownloadError::InvalidMagnetFormat { .ok_or_else(|| DownloaderError::MagnetFormatError {
url: url.as_str().to_string(), url: url.as_str().to_string(),
})? })?
.as_string(); .as_string();
@ -123,7 +122,7 @@ impl TorrentSource {
} }
} }
pub fn from_torrent_url(url: Url, hash: String) -> color_eyre::eyre::Result<Self> { pub fn from_torrent_url(url: Url, hash: String) -> Result<Self, DownloaderError> {
Ok(TorrentSource::TorrentUrl { url, hash }) Ok(TorrentSource::TorrentUrl { url, hash })
} }
@ -252,47 +251,47 @@ pub trait TorrentDownloader {
status_filter: TorrentFilter, status_filter: TorrentFilter,
category: Option<String>, category: Option<String>,
tag: Option<String>, tag: Option<String>,
) -> color_eyre::eyre::Result<Vec<Torrent>>; ) -> Result<Vec<Torrent>, DownloaderError>;
async fn add_torrents( async fn add_torrents(
&self, &self,
source: TorrentSource, source: TorrentSource,
save_path: String, save_path: String,
category: Option<&str>, category: Option<&str>,
) -> color_eyre::eyre::Result<()>; ) -> Result<(), DownloaderError>;
async fn delete_torrents(&self, hashes: Vec<String>) -> color_eyre::eyre::Result<()>; async fn delete_torrents(&self, hashes: Vec<String>) -> Result<(), DownloaderError>;
async fn rename_torrent_file( async fn rename_torrent_file(
&self, &self,
hash: &str, hash: &str,
old_path: &str, old_path: &str,
new_path: &str, new_path: &str,
) -> color_eyre::eyre::Result<()>; ) -> Result<(), DownloaderError>;
async fn move_torrents( async fn move_torrents(
&self, &self,
hashes: Vec<String>, hashes: Vec<String>,
new_path: &str, new_path: &str,
) -> color_eyre::eyre::Result<()>; ) -> Result<(), DownloaderError>;
async fn get_torrent_path(&self, hashes: String) -> color_eyre::eyre::Result<Option<String>>; async fn get_torrent_path(&self, hashes: String) -> Result<Option<String>, DownloaderError>;
async fn check_connection(&self) -> color_eyre::eyre::Result<()>; async fn check_connection(&self) -> Result<(), DownloaderError>;
async fn set_torrents_category( async fn set_torrents_category(
&self, &self,
hashes: Vec<String>, hashes: Vec<String>,
category: &str, category: &str,
) -> color_eyre::eyre::Result<()>; ) -> Result<(), DownloaderError>;
async fn add_torrent_tags( async fn add_torrent_tags(
&self, &self,
hashes: Vec<String>, hashes: Vec<String>,
tags: Vec<String>, tags: Vec<String>,
) -> color_eyre::eyre::Result<()>; ) -> Result<(), DownloaderError>;
async fn add_category(&self, category: &str) -> color_eyre::eyre::Result<()>; async fn add_category(&self, category: &str) -> Result<(), DownloaderError>;
fn get_save_path(&self, sub_path: &Path) -> PathBuf; fn get_save_path(&self, sub_path: &Path) -> PathBuf;
} }

View File

@ -0,0 +1,58 @@
use std::{borrow::Cow, time::Duration};
use snafu::prelude::*;
use crate::errors::OptionWhateverAsync;
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum DownloaderError {
#[snafu(display("Invalid mime (expected {expected:?}, got {found:?})"))]
DownloadMimeError { expected: String, found: String },
#[snafu(display("Invalid url schema (expected {expected:?}, got {found:?})"))]
DownloadSchemaError { expected: String, found: String },
#[snafu(transparent)]
DownloadUrlParseError { source: url::ParseError },
#[snafu(display("Invalid url format: {reason}"))]
DownloadUrlFormatError { reason: Cow<'static, str> },
#[snafu(transparent)]
QBitAPIError { source: qbit_rs::Error },
#[snafu(display("Timeout error (action = {action}, timeout = {timeout:?})"))]
DownloadTimeoutError {
action: Cow<'static, str>,
timeout: Duration,
},
#[snafu(display("Invalid torrent file format"))]
TorrentFileFormatError,
#[snafu(display("Invalid magnet format (url = {url})"))]
MagnetFormatError { url: String },
#[snafu(display("Failed to fetch: {source}"))]
DownloadFetchError {
#[snafu(source)]
source: Box<dyn snafu::Error + Send + Sync>,
},
#[snafu(display("{message}"))]
Whatever {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: OptionWhateverAsync,
},
}
impl snafu::FromString for DownloaderError {
type Source = Box<dyn std::error::Error + Send + Sync>;
fn without_source(message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::none(),
}
}
fn with_source(source: Self::Source, message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::some(source),
}
}
}

View File

@ -1,15 +1,15 @@
pub mod core; pub mod core;
pub mod error; pub mod errors;
pub mod qbit; pub mod qbit;
pub mod rqbit; pub mod rqbit;
pub mod utils; pub mod utils;
pub use core::{ pub use core::{
BITTORRENT_MIME_TYPE, MAGNET_SCHEMA, Torrent, TorrentContent, TorrentDownloader, TorrentFilter, Torrent, TorrentContent, TorrentDownloader, TorrentFilter, TorrentSource, BITTORRENT_MIME_TYPE,
TorrentSource, MAGNET_SCHEMA,
}; };
pub use error::TorrentDownloadError; pub use errors::DownloaderError;
pub use qbit::{ pub use qbit::{
QBittorrentDownloader, QBittorrentDownloaderCreation, QbitTorrent, QbitTorrentContent, QBittorrentDownloader, QBittorrentDownloaderCreation, QbitTorrent, QbitTorrentContent,
QbitTorrentFile, QbitTorrentFilter, QbitTorrentSource, QbitTorrentFile, QbitTorrentFilter, QbitTorrentSource,

View File

@ -3,7 +3,6 @@ use std::{
}; };
use async_trait::async_trait; use async_trait::async_trait;
use color_eyre::eyre::OptionExt;
use futures::future::try_join_all; use futures::future::try_join_all;
pub use qbit_rs::model::{ pub use qbit_rs::model::{
Torrent as QbitTorrent, TorrentContent as QbitTorrentContent, TorrentFile as QbitTorrentFile, Torrent as QbitTorrent, TorrentContent as QbitTorrentContent, TorrentFile as QbitTorrentFile,
@ -14,12 +13,13 @@ use qbit_rs::{
model::{AddTorrentArg, Credential, GetTorrentListArg, NonEmptyStr, SyncData}, model::{AddTorrentArg, Credential, GetTorrentListArg, NonEmptyStr, SyncData},
}; };
use quirks_path::{Path, PathBuf}; use quirks_path::{Path, PathBuf};
use snafu::prelude::*;
use tokio::time::sleep; use tokio::time::sleep;
use tracing::instrument; use tracing::instrument;
use url::Url; use url::Url;
use super::{ use super::{
Torrent, TorrentDownloadError, TorrentDownloader, TorrentFilter, TorrentSource, DownloaderError, Torrent, TorrentDownloader, TorrentFilter, TorrentSource,
utils::path_equals_as_file_url, utils::path_equals_as_file_url,
}; };
@ -83,18 +83,14 @@ pub struct QBittorrentDownloader {
impl QBittorrentDownloader { impl QBittorrentDownloader {
pub async fn from_creation( pub async fn from_creation(
creation: QBittorrentDownloaderCreation, creation: QBittorrentDownloaderCreation,
) -> Result<Self, TorrentDownloadError> { ) -> Result<Self, DownloaderError> {
let endpoint_url = let endpoint_url = Url::parse(&creation.endpoint)?;
Url::parse(&creation.endpoint).map_err(TorrentDownloadError::InvalidUrlParse)?;
let credential = Credential::new(creation.username, creation.password); let credential = Credential::new(creation.username, creation.password);
let client = Qbit::new(endpoint_url.clone(), credential); let client = Qbit::new(endpoint_url.clone(), credential);
client client.login(false).await?;
.login(false)
.await
.map_err(TorrentDownloadError::QBitAPIError)?;
client.sync(None).await?; client.sync(None).await?;
@ -108,7 +104,7 @@ impl QBittorrentDownloader {
} }
#[instrument(level = "debug")] #[instrument(level = "debug")]
pub async fn api_version(&self) -> color_eyre::eyre::Result<String> { pub async fn api_version(&self) -> Result<String, DownloaderError> {
let result = self.client.get_webapi_version().await?; let result = self.client.get_webapi_version().await?;
Ok(result) Ok(result)
} }
@ -119,11 +115,11 @@ impl QBittorrentDownloader {
fetch_data_fn: G, fetch_data_fn: G,
mut stop_wait_fn: F, mut stop_wait_fn: F,
timeout: Option<Duration>, timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()> ) -> Result<(), DownloaderError>
where where
H: FnOnce() -> E, H: FnOnce() -> E,
G: Fn(Arc<Qbit>, E) -> Fut, G: Fn(Arc<Qbit>, E) -> Fut,
Fut: Future<Output = color_eyre::eyre::Result<D>>, Fut: Future<Output = Result<D, DownloaderError>>,
F: FnMut(&D) -> bool, F: FnMut(&D) -> bool,
E: Clone, E: Clone,
D: Debug + serde::Serialize, D: Debug + serde::Serialize,
@ -142,11 +138,10 @@ impl QBittorrentDownloader {
break; break;
} else { } else {
tracing::warn!(name = "wait_until timeout", sync_data = serde_json::to_string(&sync_data).unwrap(), timeout = ?timeout); tracing::warn!(name = "wait_until timeout", sync_data = serde_json::to_string(&sync_data).unwrap(), timeout = ?timeout);
return Err(TorrentDownloadError::TimeoutError { return Err(DownloaderError::DownloadTimeoutError {
action: Cow::Borrowed("QBittorrentDownloader::wait_unit"), action: Cow::Borrowed("QBittorrentDownloader::wait_unit"),
timeout, timeout,
} });
.into());
} }
} }
let sync_data = fetch_data_fn(self.client.clone(), env.clone()).await?; let sync_data = fetch_data_fn(self.client.clone(), env.clone()).await?;
@ -164,7 +159,7 @@ impl QBittorrentDownloader {
arg: GetTorrentListArg, arg: GetTorrentListArg,
stop_wait_fn: F, stop_wait_fn: F,
timeout: Option<Duration>, timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()> ) -> Result<(), DownloaderError>
where where
F: FnMut(&Vec<QbitTorrent>) -> bool, F: FnMut(&Vec<QbitTorrent>) -> bool,
{ {
@ -172,7 +167,7 @@ impl QBittorrentDownloader {
|| arg, || arg,
async move |client: Arc<Qbit>, async move |client: Arc<Qbit>,
arg: GetTorrentListArg| arg: GetTorrentListArg|
-> color_eyre::eyre::Result<Vec<QbitTorrent>> { -> Result<Vec<QbitTorrent>, DownloaderError> {
let data = client.get_torrent_list(arg).await?; let data = client.get_torrent_list(arg).await?;
Ok(data) Ok(data)
}, },
@ -187,10 +182,10 @@ impl QBittorrentDownloader {
&self, &self,
stop_wait_fn: F, stop_wait_fn: F,
timeout: Option<Duration>, timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()> { ) -> Result<(), DownloaderError> {
self.wait_until( self.wait_until(
|| (), || (),
async move |client: Arc<Qbit>, _| -> color_eyre::eyre::Result<SyncData> { async move |client: Arc<Qbit>, _| -> Result<SyncData, DownloaderError> {
let data = client.sync(None).await?; let data = client.sync(None).await?;
Ok(data) Ok(data)
}, },
@ -206,12 +201,12 @@ impl QBittorrentDownloader {
hash: &str, hash: &str,
stop_wait_fn: F, stop_wait_fn: F,
timeout: Option<Duration>, timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()> { ) -> Result<(), DownloaderError> {
self.wait_until( self.wait_until(
|| Arc::new(hash.to_string()), || Arc::new(hash.to_string()),
async move |client: Arc<Qbit>, async move |client: Arc<Qbit>,
hash_arc: Arc<String>| hash_arc: Arc<String>|
-> color_eyre::eyre::Result<Vec<QbitTorrentContent>> { -> Result<Vec<QbitTorrentContent>, DownloaderError> {
let data = client.get_torrent_contents(hash_arc.as_str(), None).await?; let data = client.get_torrent_contents(hash_arc.as_str(), None).await?;
Ok(data) Ok(data)
}, },
@ -230,7 +225,7 @@ impl TorrentDownloader for QBittorrentDownloader {
status_filter: TorrentFilter, status_filter: TorrentFilter,
category: Option<String>, category: Option<String>,
tag: Option<String>, tag: Option<String>,
) -> color_eyre::eyre::Result<Vec<Torrent>> { ) -> Result<Vec<Torrent>, DownloaderError> {
let arg = GetTorrentListArg { let arg = GetTorrentListArg {
filter: Some(status_filter.into()), filter: Some(status_filter.into()),
category, category,
@ -259,7 +254,7 @@ impl TorrentDownloader for QBittorrentDownloader {
source: TorrentSource, source: TorrentSource,
save_path: String, save_path: String,
category: Option<&str>, category: Option<&str>,
) -> color_eyre::eyre::Result<()> { ) -> Result<(), DownloaderError> {
let arg = AddTorrentArg { let arg = AddTorrentArg {
source: source.clone().into(), source: source.clone().into(),
savepath: Some(save_path), savepath: Some(save_path),
@ -293,7 +288,7 @@ impl TorrentDownloader for QBittorrentDownloader {
} }
#[instrument(level = "debug", skip(self))] #[instrument(level = "debug", skip(self))]
async fn delete_torrents(&self, hashes: Vec<String>) -> color_eyre::eyre::Result<()> { async fn delete_torrents(&self, hashes: Vec<String>) -> Result<(), DownloaderError> {
self.client self.client
.delete_torrents(hashes.clone(), Some(true)) .delete_torrents(hashes.clone(), Some(true))
.await?; .await?;
@ -314,7 +309,7 @@ impl TorrentDownloader for QBittorrentDownloader {
hash: &str, hash: &str,
old_path: &str, old_path: &str,
new_path: &str, new_path: &str,
) -> color_eyre::eyre::Result<()> { ) -> Result<(), DownloaderError> {
self.client.rename_file(hash, old_path, new_path).await?; self.client.rename_file(hash, old_path, new_path).await?;
let new_path = self.save_path.join(new_path); let new_path = self.save_path.join(new_path);
let save_path = self.save_path.as_path(); let save_path = self.save_path.as_path();
@ -340,7 +335,7 @@ impl TorrentDownloader for QBittorrentDownloader {
&self, &self,
hashes: Vec<String>, hashes: Vec<String>,
new_path: &str, new_path: &str,
) -> color_eyre::eyre::Result<()> { ) -> Result<(), DownloaderError> {
self.client self.client
.set_torrent_location(hashes.clone(), new_path) .set_torrent_location(hashes.clone(), new_path)
.await?; .await?;
@ -364,7 +359,7 @@ impl TorrentDownloader for QBittorrentDownloader {
Ok(()) Ok(())
} }
async fn get_torrent_path(&self, hashes: String) -> color_eyre::eyre::Result<Option<String>> { async fn get_torrent_path(&self, hashes: String) -> Result<Option<String>, DownloaderError> {
let mut torrent_list = self let mut torrent_list = self
.client .client
.get_torrent_list(GetTorrentListArg { .get_torrent_list(GetTorrentListArg {
@ -372,12 +367,14 @@ impl TorrentDownloader for QBittorrentDownloader {
..Default::default() ..Default::default()
}) })
.await?; .await?;
let torrent = torrent_list.first_mut().ok_or_eyre("No torrent found")?; let torrent = torrent_list
.first_mut()
.whatever_context::<_, DownloaderError>("No torrent found")?;
Ok(torrent.save_path.take()) Ok(torrent.save_path.take())
} }
#[instrument(level = "debug", skip(self))] #[instrument(level = "debug", skip(self))]
async fn check_connection(&self) -> color_eyre::eyre::Result<()> { async fn check_connection(&self) -> Result<(), DownloaderError> {
self.api_version().await?; self.api_version().await?;
Ok(()) Ok(())
} }
@ -387,7 +384,7 @@ impl TorrentDownloader for QBittorrentDownloader {
&self, &self,
hashes: Vec<String>, hashes: Vec<String>,
category: &str, category: &str,
) -> color_eyre::eyre::Result<()> { ) -> Result<(), DownloaderError> {
let result = self let result = self
.client .client
.set_torrent_category(hashes.clone(), category) .set_torrent_category(hashes.clone(), category)
@ -420,9 +417,9 @@ impl TorrentDownloader for QBittorrentDownloader {
&self, &self,
hashes: Vec<String>, hashes: Vec<String>,
tags: Vec<String>, tags: Vec<String>,
) -> color_eyre::eyre::Result<()> { ) -> Result<(), DownloaderError> {
if tags.is_empty() { if tags.is_empty() {
return Err(color_eyre::eyre::eyre!("add torrent tags can not be empty")); whatever!("add torrent tags can not be empty");
} }
self.client self.client
.add_torrent_tags(hashes.clone(), tags.clone()) .add_torrent_tags(hashes.clone(), tags.clone())
@ -450,10 +447,11 @@ impl TorrentDownloader for QBittorrentDownloader {
} }
#[instrument(level = "debug", skip(self))] #[instrument(level = "debug", skip(self))]
async fn add_category(&self, category: &str) -> color_eyre::eyre::Result<()> { async fn add_category(&self, category: &str) -> Result<(), DownloaderError> {
self.client self.client
.add_category( .add_category(
NonEmptyStr::new(category).ok_or_eyre("category can not be empty")?, NonEmptyStr::new(category)
.whatever_context::<_, DownloaderError>("category can not be empty")?,
self.save_path.as_str(), self.save_path.as_str(),
) )
.await?; .await?;
@ -490,7 +488,7 @@ pub mod tests {
use itertools::Itertools; use itertools::Itertools;
use super::*; use super::*;
use crate::test_utils::fetch::build_testing_http_client; use crate::{errors::RResult, test_utils::fetch::build_testing_http_client};
fn get_tmp_qbit_test_folder() -> &'static str { fn get_tmp_qbit_test_folder() -> &'static str {
if cfg!(all(windows, not(feature = "testcontainers"))) { if cfg!(all(windows, not(feature = "testcontainers"))) {
@ -502,8 +500,7 @@ pub mod tests {
#[cfg(feature = "testcontainers")] #[cfg(feature = "testcontainers")]
pub async fn create_qbit_testcontainer() pub async fn create_qbit_testcontainer()
-> color_eyre::eyre::Result<testcontainers::ContainerRequest<testcontainers::GenericImage>> -> RResult<testcontainers::ContainerRequest<testcontainers::GenericImage>> {
{
use testcontainers::{ use testcontainers::{
GenericImage, GenericImage,
core::{ core::{
@ -539,7 +536,7 @@ pub mod tests {
#[cfg(feature = "testcontainers")] #[cfg(feature = "testcontainers")]
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_qbittorrent_downloader() -> color_eyre::eyre::Result<()> { async fn test_qbittorrent_downloader() -> RResult<()> {
use testcontainers::runners::AsyncRunner; use testcontainers::runners::AsyncRunner;
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
@ -590,7 +587,7 @@ pub mod tests {
async fn test_qbittorrent_downloader_impl( async fn test_qbittorrent_downloader_impl(
username: Option<&str>, username: Option<&str>,
password: Option<&str>, password: Option<&str>,
) -> color_eyre::eyre::Result<()> { ) -> RResult<()> {
let http_client = build_testing_http_client()?; let http_client = build_testing_http_client()?;
let base_save_path = Path::new(get_tmp_qbit_test_folder()); let base_save_path = Path::new(get_tmp_qbit_test_folder());
@ -625,7 +622,7 @@ pub mod tests {
.add_torrents(torrent_source, save_path.to_string(), Some("bangumi")) .add_torrents(torrent_source, save_path.to_string(), Some("bangumi"))
.await?; .await?;
let get_torrent = async || -> color_eyre::eyre::Result<Torrent> { let get_torrent = async || -> Result<Torrent, DownloaderError> {
let torrent_infos = downloader let torrent_infos = downloader
.get_torrents_info(TorrentFilter::All, None, None) .get_torrents_info(TorrentFilter::All, None, None)
.await?; .await?;
@ -633,7 +630,7 @@ pub mod tests {
let result = torrent_infos let result = torrent_infos
.into_iter() .into_iter()
.find(|t| (t.get_hash() == Some("47ee2d69e7f19af783ad896541a07b012676f858"))) .find(|t| (t.get_hash() == Some("47ee2d69e7f19af783ad896541a07b012676f858")))
.ok_or_eyre("no torrent")?; .whatever_context::<_, DownloaderError>("no torrent")?;
Ok(result) Ok(result)
}; };

View File

@ -0,0 +1 @@

View File

@ -1,4 +1,5 @@
use std::{borrow::Cow, error::Error as StdError}; pub mod whatever;
use std::borrow::Cow;
use axum::{ use axum::{
Json, Json,
@ -6,105 +7,157 @@ use axum::{
}; };
use http::StatusCode; use http::StatusCode;
use serde::{Deserialize, Deserializer, Serialize}; use serde::{Deserialize, Deserializer, Serialize};
use thiserror::Error as ThisError; use snafu::prelude::*;
pub use whatever::OptionWhateverAsync;
use crate::{auth::AuthError, fetch::HttpClientError}; use crate::{auth::AuthError, downloader::DownloaderError, fetch::HttpClientError};
#[derive(ThisError, Debug)] #[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum RError { pub enum RError {
#[error(transparent)] #[snafu(transparent, context(false))]
InvalidMethodError(#[from] http::method::InvalidMethod), FancyRegexError {
#[error(transparent)] #[snafu(source(from(fancy_regex::Error, Box::new)))]
InvalidHeaderNameError(#[from] http::header::InvalidHeaderName), source: Box<fancy_regex::Error>,
#[error(transparent)] },
TracingAppenderInitError(#[from] tracing_appender::rolling::InitError), #[snafu(transparent)]
#[error(transparent)] RegexError { source: regex::Error },
GraphQLSchemaError(#[from] async_graphql::dynamic::SchemaError), #[snafu(transparent)]
#[error(transparent)] InvalidMethodError { source: http::method::InvalidMethod },
AuthError(#[from] AuthError), #[snafu(transparent)]
#[error(transparent)] InvalidHeaderNameError {
RSSError(#[from] rss::Error), source: http::header::InvalidHeaderName,
#[error(transparent)] },
DotEnvError(#[from] dotenv::Error), #[snafu(transparent)]
#[error(transparent)] TracingAppenderInitError {
TeraError(#[from] tera::Error), source: tracing_appender::rolling::InitError,
#[error(transparent)] },
IOError(#[from] std::io::Error), #[snafu(transparent)]
#[error(transparent)] GraphQLSchemaError {
DbError(#[from] sea_orm::DbErr), source: async_graphql::dynamic::SchemaError,
#[error(transparent)] },
CookieParseError(#[from] cookie::ParseError), #[snafu(transparent)]
#[error(transparent)] AuthError { source: AuthError },
FigmentError(#[from] figment::Error), #[snafu(transparent)]
#[error(transparent)] DownloadError { source: DownloaderError },
SerdeJsonError(#[from] serde_json::Error), #[snafu(transparent)]
#[error(transparent)] RSSError { source: rss::Error },
ReqwestMiddlewareError(#[from] reqwest_middleware::Error), #[snafu(transparent)]
#[error(transparent)] DotEnvError { source: dotenv::Error },
ReqwestError(#[from] reqwest::Error), #[snafu(transparent)]
#[error(transparent)] TeraError { source: tera::Error },
ParseUrlError(#[from] url::ParseError), #[snafu(transparent)]
#[error(transparent)] IOError { source: std::io::Error },
OpenDALError(#[from] opendal::Error), #[snafu(transparent)]
#[error(transparent)] DbError { source: sea_orm::DbErr },
InvalidHeaderValueError(#[from] http::header::InvalidHeaderValue), #[snafu(transparent)]
#[error(transparent)] CookieParseError { source: cookie::ParseError },
HttpClientError(#[from] HttpClientError), #[snafu(transparent, context(false))]
#[error("Extract {desc} with mime error, expected {expected}, but got {found}")] FigmentError {
#[snafu(source(from(figment::Error, Box::new)))]
source: Box<figment::Error>,
},
#[snafu(transparent)]
SerdeJsonError { source: serde_json::Error },
#[snafu(transparent)]
ReqwestMiddlewareError { source: reqwest_middleware::Error },
#[snafu(transparent)]
ReqwestError { source: reqwest::Error },
#[snafu(transparent)]
ParseUrlError { source: url::ParseError },
#[snafu(display("{source}"), context(false))]
OpenDALError {
#[snafu(source(from(opendal::Error, Box::new)))]
source: Box<opendal::Error>,
},
#[snafu(transparent)]
InvalidHeaderValueError {
source: http::header::InvalidHeaderValue,
},
#[snafu(transparent)]
HttpClientError { source: HttpClientError },
#[cfg(all(feature = "testcontainers", test))]
#[snafu(transparent)]
TestcontainersError {
source: testcontainers::TestcontainersError,
},
#[snafu(display("Extract {desc} with mime error, expected {expected}, but got {found}"))]
MimeError { MimeError {
desc: String, desc: String,
expected: String, expected: String,
found: String, found: String,
}, },
#[error("Invalid or unknown format in extracting mikan rss")] #[snafu(display("Invalid or unknown format in extracting mikan rss"))]
MikanRssInvalidFormatError, MikanRssInvalidFormatError,
#[error("Invalid field {field} in extracting mikan rss")] #[snafu(display("Invalid field {field} in extracting mikan rss"))]
MikanRssInvalidFieldError { MikanRssInvalidFieldError {
field: Cow<'static, str>, field: Cow<'static, str>,
#[source] #[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: Option<Box<dyn StdError + Send + Sync>>, source: OptionWhateverAsync,
}, },
#[error("Missing field {field} in extracting mikan meta")] #[snafu(display("Missing field {field} in extracting mikan meta"))]
MikanMetaMissingFieldError { MikanMetaMissingFieldError {
field: Cow<'static, str>, field: Cow<'static, str>,
#[source] #[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: Option<Box<dyn StdError + Send + Sync>>, source: OptionWhateverAsync,
}, },
#[error("Model Entity {entity} not found")] #[snafu(display("Model Entity {entity} not found"))]
ModelEntityNotFound { entity: Cow<'static, str> }, ModelEntityNotFound { entity: Cow<'static, str> },
#[error("{0}")] #[snafu(display("{message}"))]
CustomMessageStr(&'static str), Whatever {
#[error("{0}")] message: String,
CustomMessageString(String), #[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: OptionWhateverAsync,
},
} }
impl RError { impl RError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self { pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError { Self::MikanMetaMissingFieldError {
field, field,
source: None, source: None.into(),
} }
} }
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self { pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
Self::MikanRssInvalidFieldError { Self::MikanRssInvalidFieldError {
field, field,
source: None, source: None.into(),
} }
} }
pub fn from_mikan_rss_invalid_field_and_source( pub fn from_mikan_rss_invalid_field_and_source(
field: Cow<'static, str>, field: Cow<'static, str>,
source: Box<dyn StdError + Send + Sync>, source: impl std::error::Error + Send + Sync + 'static,
) -> Self { ) -> Self {
Self::MikanRssInvalidFieldError { Self::MikanRssInvalidFieldError {
field, field,
source: Some(source), source: OptionWhateverAsync::some_boxed(source),
} }
} }
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self { pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
Self::DbError(sea_orm::DbErr::RecordNotFound(detail.to_string())) Self::DbError {
source: sea_orm::DbErr::RecordNotFound(detail.to_string()),
}
}
}
impl snafu::FromString for RError {
type Source = Box<dyn std::error::Error + Send + Sync>;
fn without_source(message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::none(),
}
}
fn with_source(source: Self::Source, message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::some(source),
}
} }
} }
@ -129,7 +182,7 @@ impl<T> From<String> for StandardErrorResponse<T> {
impl IntoResponse for RError { impl IntoResponse for RError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
match self { match self {
Self::AuthError(auth_error) => auth_error.into_response(), Self::AuthError { source: auth_error } => auth_error.into_response(),
err => ( err => (
StatusCode::INTERNAL_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR,
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())), Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
@ -154,7 +207,10 @@ impl<'de> Deserialize<'de> for RError {
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
let s = String::deserialize(deserializer)?; let s = String::deserialize(deserializer)?;
Ok(Self::CustomMessageString(s)) Ok(Self::Whatever {
message: s,
source: None.into(),
})
} }
} }

View File

@ -0,0 +1,55 @@
use std::fmt::Display;
#[derive(Debug)]
pub struct OptionWhateverAsync(Option<Box<dyn std::error::Error + Send + Sync>>);
impl AsRef<dyn snafu::Error> for OptionWhateverAsync {
fn as_ref(&self) -> &(dyn snafu::Error + 'static) {
self
}
}
impl OptionWhateverAsync {
pub fn some_boxed<E: std::error::Error + Send + Sync + 'static>(e: E) -> Self {
Self(Some(Box::new(e)))
}
pub fn some(e: Box<dyn std::error::Error + Send + Sync>) -> Self {
Self(Some(e))
}
pub fn none() -> Self {
Self(None)
}
}
impl Display for OptionWhateverAsync {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.0 {
Some(e) => e.fmt(f),
None => write!(f, "None"),
}
}
}
impl snafu::Error for OptionWhateverAsync {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
fn cause(&self) -> Option<&dyn std::error::Error> {
self.source()
}
}
impl From<Option<Box<dyn std::error::Error + Send + Sync>>> for OptionWhateverAsync {
fn from(value: Option<Box<dyn std::error::Error + Send + Sync>>) -> Self {
Self(value)
}
}
impl From<Box<dyn std::error::Error + Send + Sync>> for OptionWhateverAsync {
fn from(value: Box<dyn std::error::Error + Send + Sync>) -> Self {
Self::some(value)
}
}

View File

@ -8,7 +8,7 @@ use tracing::instrument;
use url::Url; use url::Url;
use crate::{ use crate::{
download::core::BITTORRENT_MIME_TYPE, downloader::core::BITTORRENT_MIME_TYPE,
errors::{RError, RResult}, errors::{RError, RResult},
extract::mikan::{ extract::mikan::{
MikanClient, MikanClient,
@ -120,10 +120,10 @@ impl TryFrom<rss::Item> for MikanRssItem {
.title .title
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?; .ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?;
let enclosure_url = Url::parse(&enclosure.url).map_err(|inner| { let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
RError::from_mikan_rss_invalid_field_and_source( RError::from_mikan_rss_invalid_field_and_source(
Cow::Borrowed("enclosure_url:enclosure.link"), "enclosure_url:enclosure.link".into(),
Box::new(inner), err,
) )
})?; })?;
@ -334,12 +334,12 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
mod tests { mod tests {
use std::assert_matches::assert_matches; use std::assert_matches::assert_matches;
use color_eyre::eyre;
use rstest::rstest; use rstest::rstest;
use url::Url; use url::Url;
use crate::{ use crate::{
download::core::BITTORRENT_MIME_TYPE, downloader::core::BITTORRENT_MIME_TYPE,
errors::RResult,
extract::mikan::{ extract::mikan::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel, MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
extract_mikan_rss_channel_from_rss_link, extract_mikan_rss_channel_from_rss_link,
@ -349,7 +349,7 @@ mod tests {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_parse_mikan_rss_channel_from_rss_link() -> eyre::Result<()> { async fn test_parse_mikan_rss_channel_from_rss_link() -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;

View File

@ -491,7 +491,6 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
#[cfg(test)] #[cfg(test)]
mod test { mod test {
#![allow(unused_variables)] #![allow(unused_variables)]
use color_eyre::eyre;
use futures::{TryStreamExt, pin_mut}; use futures::{TryStreamExt, pin_mut};
use http::header; use http::header;
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
@ -512,7 +511,7 @@ mod test {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_extract_mikan_poster_from_src(before_each: ()) -> eyre::Result<()> { async fn test_extract_mikan_poster_from_src(before_each: ()) -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -543,7 +542,7 @@ mod test {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_extract_mikan_episode(before_each: ()) -> eyre::Result<()> { async fn test_extract_mikan_episode(before_each: ()) -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -583,9 +582,7 @@ mod test {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage( async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(before_each: ()) -> RResult<()> {
before_each: (),
) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -622,9 +619,7 @@ mod test {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page( async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(before_each: ()) -> RResult<()> {
before_each: (),
) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;

View File

@ -7,8 +7,12 @@ use itertools::Itertools;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use regex::Regex; use regex::Regex;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use snafu::whatever;
use crate::extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE}; use crate::{
errors::RResult,
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
};
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2"; const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
@ -71,10 +75,7 @@ fn replace_ch_bracket_to_en(raw_name: &str) -> String {
raw_name.replace('【', "[").replace('】', "]") raw_name.replace('【', "[").replace('】', "]")
} }
fn title_body_pre_process( fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RResult<String> {
title_body: &str,
fansub: Option<&str>,
) -> color_eyre::eyre::Result<String> {
let raw_without_fansub = if let Some(fansub) = fansub { let raw_without_fansub = if let Some(fansub) = fansub {
let fan_sub_re = Regex::new(&format!(".{fansub}."))?; let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
fan_sub_re.replace_all(title_body, "") fan_sub_re.replace_all(title_body, "")
@ -262,7 +263,7 @@ pub fn check_is_movie(title: &str) -> bool {
MOVIE_TITLE_RE.is_match(title) MOVIE_TITLE_RE.is_match(title)
} }
pub fn parse_episode_meta_from_raw_name(s: &str) -> color_eyre::eyre::Result<RawEpisodeMeta> { pub fn parse_episode_meta_from_raw_name(s: &str) -> RResult<RawEpisodeMeta> {
let raw_title = s.trim(); let raw_title = s.trim();
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title); let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
let fansub = extract_fansub(&raw_title_without_ch_brackets); let fansub = extract_fansub(&raw_title_without_ch_brackets);
@ -315,10 +316,7 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> color_eyre::eyre::Result<Raw
resolution, resolution,
}) })
} else { } else {
Err(color_eyre::eyre::eyre!( whatever!("Can not parse episode meta from raw filename {}", raw_title)
"Can not parse episode meta from raw filename {}",
raw_title
))
} }
} }

View File

@ -1,11 +1,14 @@
use color_eyre::eyre::OptionExt;
use fancy_regex::Regex as FancyRegex; use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use quirks_path::Path; use quirks_path::Path;
use regex::Regex; use regex::Regex;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use snafu::{OptionExt, whatever};
use crate::extract::defs::SUBTITLE_LANG; use crate::{
errors::{RError, RResult},
extract::defs::SUBTITLE_LANG,
};
lazy_static! { lazy_static! {
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = { static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
@ -101,10 +104,12 @@ pub fn parse_episode_media_meta_from_torrent(
torrent_path: &Path, torrent_path: &Path,
torrent_name: Option<&str>, torrent_name: Option<&str>,
season: Option<i32>, season: Option<i32>,
) -> color_eyre::eyre::Result<TorrentEpisodeMediaMeta> { ) -> RResult<TorrentEpisodeMediaMeta> {
let media_name = torrent_path let media_name = torrent_path
.file_name() .file_name()
.ok_or_else(|| color_eyre::eyre::eyre!("failed to get file name of {}", torrent_path))?; .with_whatever_context::<_, _, RError>(|| {
format!("failed to get file name of {}", torrent_path)
})?;
let mut match_obj = None; let mut match_obj = None;
for rule in TORRENT_EP_PARSE_RULES.iter() { for rule in TORRENT_EP_PARSE_RULES.iter() {
match_obj = if let Some(torrent_name) = torrent_name.as_ref() { match_obj = if let Some(torrent_name) = torrent_name.as_ref() {
@ -119,7 +124,7 @@ pub fn parse_episode_media_meta_from_torrent(
if let Some(match_obj) = match_obj { if let Some(match_obj) = match_obj {
let group_season_and_title = match_obj let group_season_and_title = match_obj
.get(1) .get(1)
.ok_or_else(|| color_eyre::eyre::eyre!("should have 1 group"))? .whatever_context::<_, RError>("should have 1 group")?
.as_str(); .as_str();
let (fansub, season_and_title) = get_fansub(group_season_and_title); let (fansub, season_and_title) = get_fansub(group_season_and_title);
let (title, season) = if let Some(season) = season { let (title, season) = if let Some(season) = season {
@ -130,7 +135,7 @@ pub fn parse_episode_media_meta_from_torrent(
}; };
let episode_index = match_obj let episode_index = match_obj
.get(2) .get(2)
.ok_or_eyre("should have 2 group")? .whatever_context::<_, RError>("should have 2 group")?
.as_str() .as_str()
.parse::<i32>() .parse::<i32>()
.unwrap_or(1); .unwrap_or(1);
@ -146,11 +151,11 @@ pub fn parse_episode_media_meta_from_torrent(
extname, extname,
}) })
} else { } else {
Err(color_eyre::eyre::eyre!( whatever!(
"failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'", "failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'",
torrent_path, torrent_path,
torrent_name torrent_name
)) )
} }
} }
@ -158,11 +163,13 @@ pub fn parse_episode_subtitle_meta_from_torrent(
torrent_path: &Path, torrent_path: &Path,
torrent_name: Option<&str>, torrent_name: Option<&str>,
season: Option<i32>, season: Option<i32>,
) -> color_eyre::eyre::Result<TorrentEpisodeSubtitleMeta> { ) -> RResult<TorrentEpisodeSubtitleMeta> {
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?; let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
let media_name = torrent_path let media_name = torrent_path
.file_name() .file_name()
.ok_or_else(|| color_eyre::eyre::eyre!("failed to get file name of {}", torrent_path))?; .with_whatever_context::<_, _, RError>(|| {
format!("failed to get file name of {}", torrent_path)
})?;
let lang = get_subtitle_lang(media_name); let lang = get_subtitle_lang(media_name);
@ -177,8 +184,8 @@ mod tests {
use quirks_path::Path; use quirks_path::Path;
use super::{ use super::{
parse_episode_media_meta_from_torrent, parse_episode_subtitle_meta_from_torrent, TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta, parse_episode_media_meta_from_torrent,
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta, parse_episode_subtitle_meta_from_torrent,
}; };
#[test] #[test]

View File

@ -14,7 +14,7 @@ use reqwest_retry::{RetryTransientMiddleware, policies::ExponentialBackoff};
use reqwest_tracing::TracingMiddleware; use reqwest_tracing::TracingMiddleware;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_with::serde_as; use serde_with::serde_as;
use thiserror::Error; use snafu::Snafu;
use super::HttpClientSecrecyDataTrait; use super::HttpClientSecrecyDataTrait;
use crate::fetch::get_random_mobile_ua; use crate::fetch::get_random_mobile_ua;
@ -101,14 +101,14 @@ impl CacheManager for CacheBackend {
} }
} }
#[derive(Debug, Error)] #[derive(Debug, Snafu)]
pub enum HttpClientError { pub enum HttpClientError {
#[error(transparent)] #[snafu(transparent)]
ReqwestError(#[from] reqwest::Error), ReqwestError { source: reqwest::Error },
#[error(transparent)] #[snafu(transparent)]
ReqwestMiddlewareError(#[from] reqwest_middleware::Error), ReqwestMiddlewareError { source: reqwest_middleware::Error },
#[error(transparent)] #[snafu(transparent)]
HttpError(#[from] http::Error), HttpError { source: http::Error },
} }
pub trait HttpClientTrait: Deref<Target = ClientWithMiddleware> + Debug {} pub trait HttpClientTrait: Deref<Target = ClientWithMiddleware> + Debug {}

View File

@ -2,7 +2,7 @@ use std::{future::Future, pin::Pin};
use axum::http; use axum::http;
use super::{client::HttpClientError, HttpClient}; use super::{HttpClient, client::HttpClientError};
impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient { impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
type Error = HttpClientError; type Error = HttpClientError;
@ -30,7 +30,7 @@ impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
builder builder
.body(response.bytes().await?.to_vec()) .body(response.bytes().await?.to_vec())
.map_err(HttpClientError::HttpError) .map_err(HttpClientError::from)
}) })
} }
} }

View File

@ -5,14 +5,15 @@
impl_trait_in_bindings, impl_trait_in_bindings,
iterator_try_collect, iterator_try_collect,
async_fn_traits, async_fn_traits,
let_chains let_chains,
error_generic_member_access
)] )]
pub mod app; pub mod app;
pub mod auth; pub mod auth;
pub mod cache; pub mod cache;
pub mod database; pub mod database;
pub mod download; pub mod downloader;
pub mod errors; pub mod errors;
pub mod extract; pub mod extract;
pub mod fetch; pub mod fetch;

View File

@ -1,5 +1,6 @@
use std::sync::OnceLock; use std::sync::OnceLock;
use snafu::prelude::*;
use tracing_appender::non_blocking::WorkerGuard; use tracing_appender::non_blocking::WorkerGuard;
use tracing_subscriber::{ use tracing_subscriber::{
EnvFilter, Layer, Registry, EnvFilter, Layer, Registry,
@ -9,7 +10,7 @@ use tracing_subscriber::{
}; };
use super::{LogFormat, LogLevel, LogRotation, LoggerConfig}; use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
use crate::errors::{RError, RResult}; use crate::errors::RResult;
// Function to initialize the logger based on the provided configuration // Function to initialize the logger based on the provided configuration
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"]; const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];
@ -119,9 +120,9 @@ impl LoggerService {
let file_appender_layer = if file_appender_config.non_blocking { let file_appender_layer = if file_appender_config.non_blocking {
let (non_blocking_file_appender, work_guard) = let (non_blocking_file_appender, work_guard) =
tracing_appender::non_blocking(file_appender); tracing_appender::non_blocking(file_appender);
NONBLOCKING_WORK_GUARD_KEEP if NONBLOCKING_WORK_GUARD_KEEP.set(work_guard).is_err() {
.set(work_guard) whatever!("cannot lock for appender");
.map_err(|_| RError::CustomMessageStr("cannot lock for appender"))?; };
Self::init_layer( Self::init_layer(
non_blocking_file_appender, non_blocking_file_appender,
&file_appender_config.format, &file_appender_config.format,

View File

@ -191,7 +191,7 @@ impl ActiveModel {
pub fn from_mikan_episode_meta( pub fn from_mikan_episode_meta(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
creation: MikanEpsiodeCreation, creation: MikanEpsiodeCreation,
) -> color_eyre::eyre::Result<Self> { ) -> RResult<Self> {
let item = creation.episode; let item = creation.episode;
let bgm = creation.bangumi; let bgm = creation.bangumi;
let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title) let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title)

View File

@ -142,7 +142,7 @@ impl StorageService {
subscriber_pid: &str, subscriber_pid: &str,
bucket: Option<&str>, bucket: Option<&str>,
filename: &str, filename: &str,
) -> color_eyre::eyre::Result<Buffer> { ) -> RResult<Buffer> {
match content_category { match content_category {
StorageContentCategory::Image => { StorageContentCategory::Image => {
let fullname = [ let fullname = [

View File

@ -1,8 +1,6 @@
use color_eyre::eyre; use crate::{errors::RResult, fetch::HttpClient};
use crate::fetch::HttpClient; pub fn build_testing_http_client() -> RResult<HttpClient> {
pub fn build_testing_http_client() -> eyre::Result<HttpClient> {
let mikan_client = HttpClient::default(); let mikan_client = HttpClient::default();
Ok(mikan_client) Ok(mikan_client)
} }

View File

@ -2,7 +2,8 @@ use async_trait::async_trait;
use bollard::container::ListContainersOptions; use bollard::container::ListContainersOptions;
use itertools::Itertools; use itertools::Itertools;
use testcontainers::{ use testcontainers::{
core::logs::consumer::logging_consumer::LoggingConsumer, ContainerRequest, Image, ImageExt, ContainerRequest, Image, ImageExt, TestcontainersError,
core::logs::consumer::logging_consumer::LoggingConsumer,
}; };
pub const TESTCONTAINERS_PROJECT_KEY: &str = "tech.enfw.testcontainers.project"; pub const TESTCONTAINERS_PROJECT_KEY: &str = "tech.enfw.testcontainers.project";
@ -19,7 +20,7 @@ where
container_label: &str, container_label: &str,
prune: bool, prune: bool,
force: bool, force: bool,
) -> color_eyre::eyre::Result<Self>; ) -> Result<Self, TestcontainersError>;
fn with_default_log_consumer(self) -> Self; fn with_default_log_consumer(self) -> Self;
} }
@ -34,7 +35,7 @@ where
container_label: &str, container_label: &str,
prune: bool, prune: bool,
force: bool, force: bool,
) -> color_eyre::eyre::Result<Self> { ) -> Result<Self, TestcontainersError> {
use std::collections::HashMap; use std::collections::HashMap;
use bollard::container::PruneContainersOptions; use bollard::container::PruneContainersOptions;
@ -61,7 +62,8 @@ where
filters: filters.clone(), filters: filters.clone(),
..Default::default() ..Default::default()
})) }))
.await?; .await
.map_err(|err| TestcontainersError::Other(Box::new(err)))?;
let remove_containers = result let remove_containers = result
.iter() .iter()
@ -74,14 +76,16 @@ where
.iter() .iter()
.map(|c| client.stop_container(c, None)), .map(|c| client.stop_container(c, None)),
) )
.await?; .await
.map_err(|error| TestcontainersError::Other(Box::new(error)))?;
tracing::warn!(name = "stop running containers", result = ?remove_containers); tracing::warn!(name = "stop running containers", result = ?remove_containers);
} }
let result = client let result = client
.prune_containers(Some(PruneContainersOptions { filters })) .prune_containers(Some(PruneContainersOptions { filters }))
.await?; .await
.map_err(|err| TestcontainersError::Other(Box::new(err)))?;
tracing::warn!(name = "prune existed containers", result = ?result); tracing::warn!(name = "prune existed containers", result = ?result);
} }

View File

@ -6,12 +6,14 @@ use axum::{
http::request::Parts, http::request::Parts,
routing::get, routing::get,
}; };
use snafu::prelude::*;
use super::core::Controller; use super::core::Controller;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
auth::{ auth::{
AuthError, AuthService, AuthServiceTrait, AuthError, AuthService, AuthServiceTrait,
errors::OidcRequestRedirectUriSnafu,
oidc::{OidcAuthCallbackPayload, OidcAuthCallbackQuery, OidcAuthRequest}, oidc::{OidcAuthCallbackPayload, OidcAuthCallbackQuery, OidcAuthRequest},
}, },
errors::RResult, errors::RResult,
@ -47,7 +49,8 @@ async fn oidc_auth(
if let AuthService::Oidc(oidc_auth_service) = auth_service { if let AuthService::Oidc(oidc_auth_service) = auth_service {
let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts) let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts)
.resolved_origin() .resolved_origin()
.ok_or_else(|| AuthError::OidcRequestRedirectUriError(url::ParseError::EmptyHost))?; .ok_or(url::ParseError::EmptyHost)
.context(OidcRequestRedirectUriSnafu)?;
redirect_uri.set_path(&format!("{CONTROLLER_PREFIX}/callback")); redirect_uri.set_path(&format!("{CONTROLLER_PREFIX}/callback"));

View File

@ -27,6 +27,7 @@ use axum::{
use futures_util::future::BoxFuture; use futures_util::future::BoxFuture;
use ipnetwork::IpNetwork; use ipnetwork::IpNetwork;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use snafu::ResultExt;
use tower::{Layer, Service}; use tower::{Layer, Service};
use tracing::error; use tracing::error;
@ -233,12 +234,14 @@ impl RemoteIPLayer {
proxies proxies
.iter() .iter()
.map(|proxy| { .map(|proxy| {
IpNetwork::from_str(proxy).map_err(|err| { IpNetwork::from_str(proxy)
RError::CustomMessageString(format!( .boxed()
"remote ip middleare cannot parse trusted proxy \ .with_whatever_context::<_, _, RError>(|_| {
configuration: `{proxy}`, reason: `{err}`", format!(
)) "remote ip middleare cannot parse trusted proxy \
}) configuration: `{proxy}`"
)
})
}) })
.collect::<RResult<Vec<_>>>() .collect::<RResult<Vec<_>>>()
}) })
@ -284,8 +287,7 @@ where
let xff_ip = maybe_get_forwarded(req.headers(), layer.trusted_proxies.as_ref()); let xff_ip = maybe_get_forwarded(req.headers(), layer.trusted_proxies.as_ref());
let remote_ip = xff_ip.map_or_else( let remote_ip = xff_ip.map_or_else(
|| { || {
let ip = req req.extensions()
.extensions()
.get::<ConnectInfo<SocketAddr>>() .get::<ConnectInfo<SocketAddr>>()
.map_or_else( .map_or_else(
|| { || {
@ -296,8 +298,7 @@ where
RemoteIP::None RemoteIP::None
}, },
|info| RemoteIP::Socket(info.ip()), |info| RemoteIP::Socket(info.ip()),
); )
ip
}, },
RemoteIP::Forwarded, RemoteIP::Forwarded,
); );

View File

@ -18,13 +18,10 @@ use axum::{
use futures_util::future::BoxFuture; use futures_util::future::BoxFuture;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::{self, json}; use serde_json::{self, json};
use snafu::whatever;
use tower::{Layer, Service}; use tower::{Layer, Service};
use crate::{ use crate::{app::AppContextTrait, errors::RResult, web::middleware::MiddlewareLayer};
app::AppContextTrait,
web::middleware::MiddlewareLayer,
errors::{RError, RResult},
};
static PRESETS: OnceLock<HashMap<String, BTreeMap<String, String>>> = OnceLock::new(); static PRESETS: OnceLock<HashMap<String, BTreeMap<String, String>>> = OnceLock::new();
fn get_presets() -> &'static HashMap<String, BTreeMap<String, String>> { fn get_presets() -> &'static HashMap<String, BTreeMap<String, String>> {
@ -115,7 +112,10 @@ impl MiddlewareLayer for SecureHeader {
} }
/// Applies the secure headers layer to the application router /// Applies the secure headers layer to the application router
fn apply(&self, app: Router<Arc<dyn AppContextTrait>>) -> RResult<Router<Arc<dyn AppContextTrait>>> { fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(SecureHeaders::new(self)?)) Ok(app.layer(SecureHeaders::new(self)?))
} }
} }
@ -128,17 +128,15 @@ impl SecureHeader {
let mut headers = vec![]; let mut headers = vec![];
let preset = &self.preset; let preset = &self.preset;
let p = get_presets().get(preset).ok_or_else(|| { if let Some(p) = get_presets().get(preset) {
RError::CustomMessageString(format!( Self::push_headers(&mut headers, p)?;
"secure_headers: a preset named `{preset}` does not exist" if let Some(overrides) = &self.overrides {
)) Self::push_headers(&mut headers, overrides)?;
})?; }
Ok(headers)
Self::push_headers(&mut headers, p)?; } else {
if let Some(overrides) = &self.overrides { whatever!("secure_headers: a preset named `{preset}` does not exist")
Self::push_headers(&mut headers, overrides)?;
} }
Ok(headers)
} }
/// Helper function to push headers into a mutable vector. /// Helper function to push headers into a mutable vector.