refactor: switch error handle to snafu

This commit is contained in:
master 2025-04-02 00:22:52 +08:00
parent 011f62829a
commit 234441e6a3
32 changed files with 549 additions and 436 deletions

111
Cargo.lock generated
View File

@ -913,33 +913,6 @@ version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "color-eyre"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5"
dependencies = [
"backtrace",
"color-spantrace",
"eyre",
"indenter",
"once_cell",
"owo-colors",
"tracing-error",
]
[[package]]
name = "color-spantrace"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2"
dependencies = [
"once_cell",
"owo-colors",
"tracing-core",
"tracing-error",
]
[[package]]
name = "colorchoice"
version = "1.0.3"
@ -1720,16 +1693,6 @@ dependencies = [
"pin-project-lite",
]
[[package]]
name = "eyre"
version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec"
dependencies = [
"indenter",
"once_cell",
]
[[package]]
name = "fancy-regex"
version = "0.14.0"
@ -2776,12 +2739,6 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "indenter"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
[[package]]
name = "indexmap"
version = "1.9.3"
@ -4094,12 +4051,6 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "owo-colors"
version = "3.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f"
[[package]]
name = "p256"
version = "0.13.2"
@ -4608,7 +4559,7 @@ dependencies = [
"tap",
"thiserror 2.0.12",
"tracing",
"typed-builder",
"typed-builder 0.20.1",
"url",
]
@ -4829,7 +4780,6 @@ dependencies = [
name = "recorder"
version = "0.1.0"
dependencies = [
"anyhow",
"async-graphql",
"async-graphql-axum",
"async-stream",
@ -4841,7 +4791,6 @@ dependencies = [
"bytes",
"chrono",
"clap",
"color-eyre",
"cookie",
"ctor",
"dotenv",
@ -4891,16 +4840,17 @@ dependencies = [
"serde_with",
"serde_yaml",
"serial_test",
"snafu",
"tera",
"testcontainers",
"testcontainers-modules",
"thiserror 2.0.12",
"tokio",
"tower",
"tower-http",
"tracing",
"tracing-appender",
"tracing-subscriber",
"typed-builder 0.21.0",
"url",
"uuid",
"zune-image",
@ -6051,6 +6001,29 @@ dependencies = [
"serde",
]
[[package]]
name = "snafu"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "223891c85e2a29c3fe8fb900c1fae5e69c2e42415e3177752e8718475efa5019"
dependencies = [
"futures-core",
"pin-project",
"snafu-derive",
]
[[package]]
name = "snafu-derive"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03c3c6b7927ffe7ecaa769ee0e3994da3b8cafc8f444578982c83ecb161af917"
dependencies = [
"heck 0.5.0",
"proc-macro2",
"quote",
"syn 2.0.100",
]
[[package]]
name = "socket2"
version = "0.5.9"
@ -6945,16 +6918,6 @@ dependencies = [
"valuable",
]
[[package]]
name = "tracing-error"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b1581020d7a273442f5b45074a6a57d5757ad0a47dac0e9f0bd57b81936f3db"
dependencies = [
"tracing",
"tracing-subscriber",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
@ -7026,7 +6989,16 @@ version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd9d30e3a08026c78f246b173243cf07b3696d274debd26680773b6773c2afc7"
dependencies = [
"typed-builder-macro",
"typed-builder-macro 0.20.1",
]
[[package]]
name = "typed-builder"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce63bcaf7e9806c206f7d7b9c1f38e0dce8bb165a80af0898161058b19248534"
dependencies = [
"typed-builder-macro 0.21.0",
]
[[package]]
@ -7040,6 +7012,17 @@ dependencies = [
"syn 2.0.100",
]
[[package]]
name = "typed-builder-macro"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60d8d828da2a3d759d3519cdf29a5bac49c77d039ad36d0782edadbf9cd5415b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.100",
]
[[package]]
name = "typenum"
version = "1.18.0"

View File

@ -22,6 +22,7 @@ testcontainers = [
]
[dependencies]
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
@ -48,7 +49,6 @@ reqwest = { version = "0.12", default-features = false, features = [
"rustls-tls",
"cookies",
] }
thiserror = "2"
rss = "2"
bytes = "1.9"
itertools = "0.14"
@ -83,9 +83,7 @@ testcontainers = { version = "0.23.3", features = [
"reusable-containers",
], optional = true }
testcontainers-modules = { version = "0.11.4", optional = true }
color-eyre = "0.6"
log = "0.4.22"
anyhow = "1.0.95"
bollard = { version = "0.18", optional = true }
async-graphql = { version = "7", features = [] }
async-graphql-axum = "7"
@ -131,7 +129,8 @@ futures-util = "0.3.31"
ipnetwork = "0.21.1"
ctor = "0.4.0"
librqbit = "8.0.0"
typed-builder = "0.21.0"
snafu = { version = "0.8.5", features = ["futures"] }
[dev-dependencies]
serial_test = "3"
insta = { version = "1", features = ["redactions", "yaml", "filters"] }

View File

@ -1,14 +1,7 @@
use recorder::errors::RResult;
// #![allow(unused_imports)]
// use color_eyre::eyre::Context;
// use itertools::Itertools;
// use loco_rs::{
// app::Hooks,
// boot::{BootResult, StartMode},
// environment::Environment,
// prelude::AppContext as LocoContext,
// };
// use recorder::{
// app::{App1, AppContext},
// app::{AppContext, AppContextTrait},
// errors::RResult,
// migrations::Migrator,
// models::{
@ -16,7 +9,7 @@
// subscriptions::{self, SubscriptionCreateFromRssDto},
// },
// };
// use sea_orm::ColumnTrait;
// use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
// use sea_orm_migration::MigratorTrait;
// async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RResult<()> {
@ -50,19 +43,14 @@
// Ok(())
// }
// async fn init() -> RResult<LocoContext> {
// let ctx = loco_rs::cli::playground::<App1>().await?;
// let BootResult {
// app_context: ctx, ..
// } = loco_rs::boot::run_app::<App1>(&StartMode::ServerOnly, ctx).await?;
// Migrator::up(ctx.db(), None).await?;
// Ok(ctx)
// }
// #[tokio::main]
// async fn main() -> color_eyre::eyre::Result<()> {
// async fn main() -> RResult<()> {
// pull_mikan_bangumi_rss(&ctx).await?;
// Ok(())
// }
fn main() {}
#[tokio::main]
async fn main() -> RResult<()> {
Ok(())
}

View File

@ -1,5 +1,3 @@
use std::fmt;
use async_graphql::dynamic::ResolverContext;
use axum::{
Json,
@ -11,72 +9,86 @@ use openidconnect::{
StandardErrorResponse, core::CoreErrorResponseType,
};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use snafu::prelude::*;
use crate::{fetch::HttpClientError, models::auth::AuthType};
#[derive(Debug, Error)]
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum AuthError {
#[error("Not support auth method")]
#[snafu(display("Not support auth method"))]
NotSupportAuthMethod {
supported: Vec<AuthType>,
current: AuthType,
},
#[error("Failed to find auth record")]
#[snafu(display("Failed to find auth record"))]
FindAuthRecordError,
#[error("Invalid credentials")]
#[snafu(display("Invalid credentials"))]
BasicInvalidCredentials,
#[error(transparent)]
OidcInitError(#[from] jwt_authorizer::error::InitError),
#[error("Invalid oidc provider meta client error: {0}")]
OidcProviderHttpClientError(HttpClientError),
#[error(transparent)]
OidcProviderMetaError(#[from] openidconnect::DiscoveryError<HttpClientError>),
#[error("Invalid oidc provider URL: {0}")]
OidcProviderUrlError(url::ParseError),
#[error("Invalid oidc redirect URI: {0}")]
OidcRequestRedirectUriError(url::ParseError),
#[error("Oidc request session not found or expired")]
#[snafu(transparent)]
OidcInitError {
source: jwt_authorizer::error::InitError,
},
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
OidcProviderHttpClientError { source: HttpClientError },
#[snafu(transparent)]
OidcProviderMetaError {
source: openidconnect::DiscoveryError<HttpClientError>,
},
#[snafu(display("Invalid oidc provider URL: {source}"))]
OidcProviderUrlError { source: url::ParseError },
#[snafu(display("Invalid oidc redirect URI: {source}"))]
OidcRequestRedirectUriError {
#[snafu(source)]
source: url::ParseError,
},
#[snafu(display("Oidc request session not found or expired"))]
OidcCallbackRecordNotFoundOrExpiredError,
#[error("Invalid oidc request callback nonce")]
#[snafu(display("Invalid oidc request callback nonce"))]
OidcInvalidNonceError,
#[error("Invalid oidc request callback state")]
#[snafu(display("Invalid oidc request callback state"))]
OidcInvalidStateError,
#[error("Invalid oidc request callback code")]
#[snafu(display("Invalid oidc request callback code"))]
OidcInvalidCodeError,
#[error(transparent)]
OidcCallbackTokenConfigurationError(#[from] ConfigurationError),
#[error(transparent)]
OidcRequestTokenError(
#[from] RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>,
),
#[error("Invalid oidc id token")]
#[snafu(transparent)]
OidcCallbackTokenConfigurationError { source: ConfigurationError },
#[snafu(transparent)]
OidcRequestTokenError {
source: RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>,
},
#[snafu(display("Invalid oidc id token"))]
OidcInvalidIdTokenError,
#[error("Invalid oidc access token")]
#[snafu(display("Invalid oidc access token"))]
OidcInvalidAccessTokenError,
#[error(transparent)]
OidcSignatureVerificationError(#[from] SignatureVerificationError),
#[error(transparent)]
OidcSigningError(#[from] SigningError),
#[error(transparent)]
OidcJwtAuthError(#[from] jwt_authorizer::AuthError),
#[error("Extra scopes {expected} do not match found scopes {found}")]
#[snafu(transparent)]
OidcSignatureVerificationError { source: SignatureVerificationError },
#[snafu(transparent)]
OidcSigningError { source: SigningError },
#[snafu(transparent)]
OidcJwtAuthError { source: jwt_authorizer::AuthError },
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
OidcExtraScopesMatchError { expected: String, found: String },
#[error("Extra claim {key} does not match expected value {expected}, found {found}")]
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
OidcExtraClaimMatchError {
key: String,
expected: String,
found: String,
},
#[error("Extra claim {0} missing")]
OidcExtraClaimMissingError(String),
#[error("Audience {0} missing")]
OidcAudMissingError(String),
#[error("Subject missing")]
#[snafu(display("Extra claim {claim} missing"))]
OidcExtraClaimMissingError { claim: String },
#[snafu(display("Audience {aud} missing"))]
OidcAudMissingError { aud: String },
#[snafu(display("Subject missing"))]
OidcSubMissingError,
#[error(fmt = display_graphql_permission_error)]
#[snafu(display(
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
(if field.is_empty() { "" } else { "." }),
(if column.is_empty() { "" } else { "." }),
source.message
))]
GraphQLPermissionError {
inner_error: async_graphql::Error,
#[snafu(source(false))]
source: Box<async_graphql::Error>,
field: String,
column: String,
context_path: String,
@ -85,13 +97,13 @@ pub enum AuthError {
impl AuthError {
pub fn from_graphql_subscribe_id_guard(
inner_error: async_graphql::Error,
source: async_graphql::Error,
context: &ResolverContext,
field_name: &str,
column_name: &str,
) -> AuthError {
AuthError::GraphQLPermissionError {
inner_error,
source: Box::new(source),
field: field_name.to_string(),
column: column_name.to_string(),
context_path: context
@ -103,22 +115,6 @@ impl AuthError {
}
}
fn display_graphql_permission_error(
inner_error: &async_graphql::Error,
field: &String,
column: &String,
context_path: &String,
formatter: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(
formatter,
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
(if field.is_empty() { "" } else { "." }),
(if column.is_empty() { "" } else { "." }),
inner_error.message
)
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AuthErrorResponse {
pub success: bool,

View File

@ -16,11 +16,12 @@ use openidconnect::{
use sea_orm::DbErr;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use snafu::ResultExt;
use url::Url;
use super::{
config::OidcAuthConfig,
errors::AuthError,
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{app::AppContextTrait, errors::RError, fetch::HttpClient, models::auth::AuthType};
@ -125,13 +126,13 @@ impl OidcAuthService {
redirect_uri: &str,
) -> Result<OidcAuthRequest, AuthError> {
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).map_err(AuthError::OidcProviderUrlError)?,
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&self.oidc_provider_client,
)
.await?;
let redirect_uri = RedirectUrl::new(redirect_uri.to_string())
.map_err(AuthError::OidcRequestRedirectUriError)?;
let redirect_uri =
RedirectUrl::new(redirect_uri.to_string()).context(OidcRequestRedirectUriSnafu)?;
let oidc_client = CoreClient::from_provider_metadata(
provider_metadata,
@ -207,7 +208,7 @@ impl OidcAuthService {
let request_cache = self.load_authorization_request(&csrf_token).await?;
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).map_err(AuthError::OidcProviderUrlError)?,
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&self.oidc_provider_client,
)
.await?;
@ -265,9 +266,10 @@ impl AuthServiceTrait for OidcAuthService {
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
let config = &self.config;
let token = self.api_authorizer.extract_token(&request.headers).ok_or(
AuthError::OidcJwtAuthError(jwt_authorizer::AuthError::MissingToken()),
)?;
let token = self
.api_authorizer
.extract_token(&request.headers)
.ok_or(jwt_authorizer::AuthError::MissingToken())?;
let token_data = self.api_authorizer.check_auth(&token).await?;
let claims = token_data.claims;
@ -277,7 +279,9 @@ impl AuthServiceTrait for OidcAuthService {
return Err(AuthError::OidcSubMissingError);
};
if !claims.contains_audience(&config.audience) {
return Err(AuthError::OidcAudMissingError(config.audience.clone()));
return Err(AuthError::OidcAudMissingError {
aud: config.audience.clone(),
});
}
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
let found_scopes = claims.scopes().collect::<HashSet<_>>();
@ -293,7 +297,7 @@ impl AuthServiceTrait for OidcAuthService {
}
if let Some(key) = config.extra_claim_key.as_ref() {
if !claims.has_claim(key) {
return Err(AuthError::OidcExtraClaimMissingError(key.clone()));
return Err(AuthError::OidcExtraClaimMissingError { claim: key.clone() });
}
if let Some(value) = config.extra_claim_value.as_ref() {
if claims.get_claim(key).is_none_or(|v| &v != value) {
@ -306,9 +310,9 @@ impl AuthServiceTrait for OidcAuthService {
}
}
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(RError::DbError(DbErr::RecordNotFound(..))) => {
crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await
}
Err(RError::DbError {
source: DbErr::RecordNotFound(..),
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await,
r => r,
}
.map_err(|_| AuthError::FindAuthRecordError)?;

View File

@ -9,11 +9,12 @@ use axum::{
use jwt_authorizer::{JwtAuthorizer, Validation};
use moka::future::Cache;
use reqwest::header::HeaderValue;
use snafu::prelude::*;
use super::{
AuthConfig,
basic::BasicAuthService,
errors::AuthError,
errors::{AuthError, OidcProviderHttpClientSnafu},
oidc::{OidcAuthClaims, OidcAuthService},
};
use crate::{
@ -59,14 +60,14 @@ pub trait AuthServiceTrait {
}
pub enum AuthService {
Basic(BasicAuthService),
Oidc(OidcAuthService),
Basic(Box<BasicAuthService>),
Oidc(Box<OidcAuthService>),
}
impl AuthService {
pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> {
let result = match config {
AuthConfig::Basic(config) => AuthService::Basic(BasicAuthService { config }),
AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
AuthConfig::Oidc(config) => {
let validation = Validation::new()
.iss(&[&config.issuer])
@ -78,14 +79,14 @@ impl AuthService {
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
..Default::default()
})
.map_err(AuthError::OidcProviderHttpClientError)?;
.context(OidcProviderHttpClientSnafu)?;
let api_authorizer = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer)
.validation(validation)
.build()
.await?;
AuthService::Oidc(OidcAuthService {
AuthService::Oidc(Box::new(OidcAuthService {
config,
api_authorizer,
oidc_provider_client,
@ -93,7 +94,7 @@ impl AuthService {
.time_to_live(Duration::from_mins(5))
.name("oidc_request_cache")
.build(),
})
}))
}
};
Ok(result)

View File

@ -1,10 +1,7 @@
use color_eyre::{self, eyre};
use recorder::app::AppBuilder;
use recorder::{app::AppBuilder, errors::RResult};
#[tokio::main]
async fn main() -> eyre::Result<()> {
color_eyre::install()?;
async fn main() -> RResult<()> {
let builder = AppBuilder::from_main_cli(None).await?;
let app = builder.build().await?;

View File

@ -1,26 +0,0 @@
use std::{borrow::Cow, time::Duration};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum TorrentDownloadError {
#[error("Invalid mime (expected {expected:?}, got {found:?})")]
InvalidMime { expected: String, found: String },
#[error("Invalid url schema (expected {expected:?}, got {found:?})")]
InvalidUrlSchema { expected: String, found: String },
#[error("Invalid url parse: {0:?}")]
InvalidUrlParse(#[from] url::ParseError),
#[error("Invalid url format: {reason}")]
InvalidUrlFormat { reason: Cow<'static, str> },
#[error("QBit api error: {0:?}")]
QBitAPIError(#[from] qbit_rs::Error),
#[error("Timeout error ({action} timeouts out of {timeout:?})")]
TimeoutError {
action: Cow<'static, str>,
timeout: Duration,
},
#[error("Invalid torrent file format")]
InvalidTorrentFileFormat,
#[error("Invalid magnet file format (url = {url})")]
InvalidMagnetFormat { url: String },
}

View File

@ -1 +0,0 @@
use librqbit::TorrentMetadata;

View File

@ -10,9 +10,10 @@ use librqbit_core::{
use quirks_path::{Path, PathBuf};
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::prelude::*;
use url::Url;
use super::{QbitTorrent, QbitTorrentContent, TorrentDownloadError};
use super::{DownloaderError, QbitTorrent, QbitTorrentContent, errors::DownloadFetchSnafu};
use crate::fetch::{HttpClientTrait, fetch_bytes};
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
@ -57,10 +58,7 @@ pub enum TorrentSource {
}
impl TorrentSource {
pub async fn parse<H: HttpClientTrait>(
client: &H,
url: &str,
) -> color_eyre::eyre::Result<Self> {
pub async fn parse<H: HttpClientTrait>(client: &H, url: &str) -> Result<Self, DownloaderError> {
let url = Url::parse(url)?;
let source = if url.scheme() == MAGNET_SCHEMA {
TorrentSource::from_magnet_url(url)?
@ -75,22 +73,25 @@ impl TorrentSource {
) {
TorrentSource::from_torrent_url(url, match_hash.as_str().to_string())?
} else {
let contents = fetch_bytes(client, url).await?;
let contents = fetch_bytes(client, url)
.await
.boxed()
.context(DownloadFetchSnafu)?;
TorrentSource::from_torrent_file(contents.to_vec(), Some(basename.to_string()))?
}
} else {
let contents = fetch_bytes(client, url).await?;
let contents = fetch_bytes(client, url)
.await
.boxed()
.context(DownloadFetchSnafu)?;
TorrentSource::from_torrent_file(contents.to_vec(), None)?
};
Ok(source)
}
pub fn from_torrent_file(
file: Vec<u8>,
name: Option<String>,
) -> color_eyre::eyre::Result<Self> {
let torrent: TorrentMetaV1Owned = torrent_from_bytes(&file)
.map_err(|_| TorrentDownloadError::InvalidTorrentFileFormat)?;
pub fn from_torrent_file(file: Vec<u8>, name: Option<String>) -> Result<Self, DownloaderError> {
let torrent: TorrentMetaV1Owned =
torrent_from_bytes(&file).map_err(|_| DownloaderError::TorrentFileFormatError)?;
let hash = torrent.info_hash.as_string();
Ok(TorrentSource::TorrentFile {
torrent: file,
@ -99,23 +100,21 @@ impl TorrentSource {
})
}
pub fn from_magnet_url(url: Url) -> color_eyre::eyre::Result<Self> {
pub fn from_magnet_url(url: Url) -> Result<Self, DownloaderError> {
if url.scheme() != MAGNET_SCHEMA {
Err(TorrentDownloadError::InvalidUrlSchema {
Err(DownloaderError::DownloadSchemaError {
found: url.scheme().to_string(),
expected: MAGNET_SCHEMA.to_string(),
}
.into())
})
} else {
let magnet = Magnet::parse(url.as_str()).map_err(|_| {
TorrentDownloadError::InvalidMagnetFormat {
let magnet =
Magnet::parse(url.as_str()).map_err(|_| DownloaderError::MagnetFormatError {
url: url.as_str().to_string(),
}
})?;
let hash = magnet
.as_id20()
.ok_or_else(|| TorrentDownloadError::InvalidMagnetFormat {
.ok_or_else(|| DownloaderError::MagnetFormatError {
url: url.as_str().to_string(),
})?
.as_string();
@ -123,7 +122,7 @@ impl TorrentSource {
}
}
pub fn from_torrent_url(url: Url, hash: String) -> color_eyre::eyre::Result<Self> {
pub fn from_torrent_url(url: Url, hash: String) -> Result<Self, DownloaderError> {
Ok(TorrentSource::TorrentUrl { url, hash })
}
@ -252,47 +251,47 @@ pub trait TorrentDownloader {
status_filter: TorrentFilter,
category: Option<String>,
tag: Option<String>,
) -> color_eyre::eyre::Result<Vec<Torrent>>;
) -> Result<Vec<Torrent>, DownloaderError>;
async fn add_torrents(
&self,
source: TorrentSource,
save_path: String,
category: Option<&str>,
) -> color_eyre::eyre::Result<()>;
) -> Result<(), DownloaderError>;
async fn delete_torrents(&self, hashes: Vec<String>) -> color_eyre::eyre::Result<()>;
async fn delete_torrents(&self, hashes: Vec<String>) -> Result<(), DownloaderError>;
async fn rename_torrent_file(
&self,
hash: &str,
old_path: &str,
new_path: &str,
) -> color_eyre::eyre::Result<()>;
) -> Result<(), DownloaderError>;
async fn move_torrents(
&self,
hashes: Vec<String>,
new_path: &str,
) -> color_eyre::eyre::Result<()>;
) -> Result<(), DownloaderError>;
async fn get_torrent_path(&self, hashes: String) -> color_eyre::eyre::Result<Option<String>>;
async fn get_torrent_path(&self, hashes: String) -> Result<Option<String>, DownloaderError>;
async fn check_connection(&self) -> color_eyre::eyre::Result<()>;
async fn check_connection(&self) -> Result<(), DownloaderError>;
async fn set_torrents_category(
&self,
hashes: Vec<String>,
category: &str,
) -> color_eyre::eyre::Result<()>;
) -> Result<(), DownloaderError>;
async fn add_torrent_tags(
&self,
hashes: Vec<String>,
tags: Vec<String>,
) -> color_eyre::eyre::Result<()>;
) -> Result<(), DownloaderError>;
async fn add_category(&self, category: &str) -> color_eyre::eyre::Result<()>;
async fn add_category(&self, category: &str) -> Result<(), DownloaderError>;
fn get_save_path(&self, sub_path: &Path) -> PathBuf;
}

View File

@ -0,0 +1,58 @@
use std::{borrow::Cow, time::Duration};
use snafu::prelude::*;
use crate::errors::OptionWhateverAsync;
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum DownloaderError {
#[snafu(display("Invalid mime (expected {expected:?}, got {found:?})"))]
DownloadMimeError { expected: String, found: String },
#[snafu(display("Invalid url schema (expected {expected:?}, got {found:?})"))]
DownloadSchemaError { expected: String, found: String },
#[snafu(transparent)]
DownloadUrlParseError { source: url::ParseError },
#[snafu(display("Invalid url format: {reason}"))]
DownloadUrlFormatError { reason: Cow<'static, str> },
#[snafu(transparent)]
QBitAPIError { source: qbit_rs::Error },
#[snafu(display("Timeout error (action = {action}, timeout = {timeout:?})"))]
DownloadTimeoutError {
action: Cow<'static, str>,
timeout: Duration,
},
#[snafu(display("Invalid torrent file format"))]
TorrentFileFormatError,
#[snafu(display("Invalid magnet format (url = {url})"))]
MagnetFormatError { url: String },
#[snafu(display("Failed to fetch: {source}"))]
DownloadFetchError {
#[snafu(source)]
source: Box<dyn snafu::Error + Send + Sync>,
},
#[snafu(display("{message}"))]
Whatever {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: OptionWhateverAsync,
},
}
impl snafu::FromString for DownloaderError {
type Source = Box<dyn std::error::Error + Send + Sync>;
fn without_source(message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::none(),
}
}
fn with_source(source: Self::Source, message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::some(source),
}
}
}

View File

@ -1,15 +1,15 @@
pub mod core;
pub mod error;
pub mod errors;
pub mod qbit;
pub mod rqbit;
pub mod utils;
pub use core::{
BITTORRENT_MIME_TYPE, MAGNET_SCHEMA, Torrent, TorrentContent, TorrentDownloader, TorrentFilter,
TorrentSource,
Torrent, TorrentContent, TorrentDownloader, TorrentFilter, TorrentSource, BITTORRENT_MIME_TYPE,
MAGNET_SCHEMA,
};
pub use error::TorrentDownloadError;
pub use errors::DownloaderError;
pub use qbit::{
QBittorrentDownloader, QBittorrentDownloaderCreation, QbitTorrent, QbitTorrentContent,
QbitTorrentFile, QbitTorrentFilter, QbitTorrentSource,

View File

@ -3,7 +3,6 @@ use std::{
};
use async_trait::async_trait;
use color_eyre::eyre::OptionExt;
use futures::future::try_join_all;
pub use qbit_rs::model::{
Torrent as QbitTorrent, TorrentContent as QbitTorrentContent, TorrentFile as QbitTorrentFile,
@ -14,12 +13,13 @@ use qbit_rs::{
model::{AddTorrentArg, Credential, GetTorrentListArg, NonEmptyStr, SyncData},
};
use quirks_path::{Path, PathBuf};
use snafu::prelude::*;
use tokio::time::sleep;
use tracing::instrument;
use url::Url;
use super::{
Torrent, TorrentDownloadError, TorrentDownloader, TorrentFilter, TorrentSource,
DownloaderError, Torrent, TorrentDownloader, TorrentFilter, TorrentSource,
utils::path_equals_as_file_url,
};
@ -83,18 +83,14 @@ pub struct QBittorrentDownloader {
impl QBittorrentDownloader {
pub async fn from_creation(
creation: QBittorrentDownloaderCreation,
) -> Result<Self, TorrentDownloadError> {
let endpoint_url =
Url::parse(&creation.endpoint).map_err(TorrentDownloadError::InvalidUrlParse)?;
) -> Result<Self, DownloaderError> {
let endpoint_url = Url::parse(&creation.endpoint)?;
let credential = Credential::new(creation.username, creation.password);
let client = Qbit::new(endpoint_url.clone(), credential);
client
.login(false)
.await
.map_err(TorrentDownloadError::QBitAPIError)?;
client.login(false).await?;
client.sync(None).await?;
@ -108,7 +104,7 @@ impl QBittorrentDownloader {
}
#[instrument(level = "debug")]
pub async fn api_version(&self) -> color_eyre::eyre::Result<String> {
pub async fn api_version(&self) -> Result<String, DownloaderError> {
let result = self.client.get_webapi_version().await?;
Ok(result)
}
@ -119,11 +115,11 @@ impl QBittorrentDownloader {
fetch_data_fn: G,
mut stop_wait_fn: F,
timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()>
) -> Result<(), DownloaderError>
where
H: FnOnce() -> E,
G: Fn(Arc<Qbit>, E) -> Fut,
Fut: Future<Output = color_eyre::eyre::Result<D>>,
Fut: Future<Output = Result<D, DownloaderError>>,
F: FnMut(&D) -> bool,
E: Clone,
D: Debug + serde::Serialize,
@ -142,11 +138,10 @@ impl QBittorrentDownloader {
break;
} else {
tracing::warn!(name = "wait_until timeout", sync_data = serde_json::to_string(&sync_data).unwrap(), timeout = ?timeout);
return Err(TorrentDownloadError::TimeoutError {
return Err(DownloaderError::DownloadTimeoutError {
action: Cow::Borrowed("QBittorrentDownloader::wait_unit"),
timeout,
}
.into());
});
}
}
let sync_data = fetch_data_fn(self.client.clone(), env.clone()).await?;
@ -164,7 +159,7 @@ impl QBittorrentDownloader {
arg: GetTorrentListArg,
stop_wait_fn: F,
timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()>
) -> Result<(), DownloaderError>
where
F: FnMut(&Vec<QbitTorrent>) -> bool,
{
@ -172,7 +167,7 @@ impl QBittorrentDownloader {
|| arg,
async move |client: Arc<Qbit>,
arg: GetTorrentListArg|
-> color_eyre::eyre::Result<Vec<QbitTorrent>> {
-> Result<Vec<QbitTorrent>, DownloaderError> {
let data = client.get_torrent_list(arg).await?;
Ok(data)
},
@ -187,10 +182,10 @@ impl QBittorrentDownloader {
&self,
stop_wait_fn: F,
timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()> {
) -> Result<(), DownloaderError> {
self.wait_until(
|| (),
async move |client: Arc<Qbit>, _| -> color_eyre::eyre::Result<SyncData> {
async move |client: Arc<Qbit>, _| -> Result<SyncData, DownloaderError> {
let data = client.sync(None).await?;
Ok(data)
},
@ -206,12 +201,12 @@ impl QBittorrentDownloader {
hash: &str,
stop_wait_fn: F,
timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()> {
) -> Result<(), DownloaderError> {
self.wait_until(
|| Arc::new(hash.to_string()),
async move |client: Arc<Qbit>,
hash_arc: Arc<String>|
-> color_eyre::eyre::Result<Vec<QbitTorrentContent>> {
-> Result<Vec<QbitTorrentContent>, DownloaderError> {
let data = client.get_torrent_contents(hash_arc.as_str(), None).await?;
Ok(data)
},
@ -230,7 +225,7 @@ impl TorrentDownloader for QBittorrentDownloader {
status_filter: TorrentFilter,
category: Option<String>,
tag: Option<String>,
) -> color_eyre::eyre::Result<Vec<Torrent>> {
) -> Result<Vec<Torrent>, DownloaderError> {
let arg = GetTorrentListArg {
filter: Some(status_filter.into()),
category,
@ -259,7 +254,7 @@ impl TorrentDownloader for QBittorrentDownloader {
source: TorrentSource,
save_path: String,
category: Option<&str>,
) -> color_eyre::eyre::Result<()> {
) -> Result<(), DownloaderError> {
let arg = AddTorrentArg {
source: source.clone().into(),
savepath: Some(save_path),
@ -293,7 +288,7 @@ impl TorrentDownloader for QBittorrentDownloader {
}
#[instrument(level = "debug", skip(self))]
async fn delete_torrents(&self, hashes: Vec<String>) -> color_eyre::eyre::Result<()> {
async fn delete_torrents(&self, hashes: Vec<String>) -> Result<(), DownloaderError> {
self.client
.delete_torrents(hashes.clone(), Some(true))
.await?;
@ -314,7 +309,7 @@ impl TorrentDownloader for QBittorrentDownloader {
hash: &str,
old_path: &str,
new_path: &str,
) -> color_eyre::eyre::Result<()> {
) -> Result<(), DownloaderError> {
self.client.rename_file(hash, old_path, new_path).await?;
let new_path = self.save_path.join(new_path);
let save_path = self.save_path.as_path();
@ -340,7 +335,7 @@ impl TorrentDownloader for QBittorrentDownloader {
&self,
hashes: Vec<String>,
new_path: &str,
) -> color_eyre::eyre::Result<()> {
) -> Result<(), DownloaderError> {
self.client
.set_torrent_location(hashes.clone(), new_path)
.await?;
@ -364,7 +359,7 @@ impl TorrentDownloader for QBittorrentDownloader {
Ok(())
}
async fn get_torrent_path(&self, hashes: String) -> color_eyre::eyre::Result<Option<String>> {
async fn get_torrent_path(&self, hashes: String) -> Result<Option<String>, DownloaderError> {
let mut torrent_list = self
.client
.get_torrent_list(GetTorrentListArg {
@ -372,12 +367,14 @@ impl TorrentDownloader for QBittorrentDownloader {
..Default::default()
})
.await?;
let torrent = torrent_list.first_mut().ok_or_eyre("No torrent found")?;
let torrent = torrent_list
.first_mut()
.whatever_context::<_, DownloaderError>("No torrent found")?;
Ok(torrent.save_path.take())
}
#[instrument(level = "debug", skip(self))]
async fn check_connection(&self) -> color_eyre::eyre::Result<()> {
async fn check_connection(&self) -> Result<(), DownloaderError> {
self.api_version().await?;
Ok(())
}
@ -387,7 +384,7 @@ impl TorrentDownloader for QBittorrentDownloader {
&self,
hashes: Vec<String>,
category: &str,
) -> color_eyre::eyre::Result<()> {
) -> Result<(), DownloaderError> {
let result = self
.client
.set_torrent_category(hashes.clone(), category)
@ -420,9 +417,9 @@ impl TorrentDownloader for QBittorrentDownloader {
&self,
hashes: Vec<String>,
tags: Vec<String>,
) -> color_eyre::eyre::Result<()> {
) -> Result<(), DownloaderError> {
if tags.is_empty() {
return Err(color_eyre::eyre::eyre!("add torrent tags can not be empty"));
whatever!("add torrent tags can not be empty");
}
self.client
.add_torrent_tags(hashes.clone(), tags.clone())
@ -450,10 +447,11 @@ impl TorrentDownloader for QBittorrentDownloader {
}
#[instrument(level = "debug", skip(self))]
async fn add_category(&self, category: &str) -> color_eyre::eyre::Result<()> {
async fn add_category(&self, category: &str) -> Result<(), DownloaderError> {
self.client
.add_category(
NonEmptyStr::new(category).ok_or_eyre("category can not be empty")?,
NonEmptyStr::new(category)
.whatever_context::<_, DownloaderError>("category can not be empty")?,
self.save_path.as_str(),
)
.await?;
@ -490,7 +488,7 @@ pub mod tests {
use itertools::Itertools;
use super::*;
use crate::test_utils::fetch::build_testing_http_client;
use crate::{errors::RResult, test_utils::fetch::build_testing_http_client};
fn get_tmp_qbit_test_folder() -> &'static str {
if cfg!(all(windows, not(feature = "testcontainers"))) {
@ -502,8 +500,7 @@ pub mod tests {
#[cfg(feature = "testcontainers")]
pub async fn create_qbit_testcontainer()
-> color_eyre::eyre::Result<testcontainers::ContainerRequest<testcontainers::GenericImage>>
{
-> RResult<testcontainers::ContainerRequest<testcontainers::GenericImage>> {
use testcontainers::{
GenericImage,
core::{
@ -539,7 +536,7 @@ pub mod tests {
#[cfg(feature = "testcontainers")]
#[tokio::test(flavor = "multi_thread")]
async fn test_qbittorrent_downloader() -> color_eyre::eyre::Result<()> {
async fn test_qbittorrent_downloader() -> RResult<()> {
use testcontainers::runners::AsyncRunner;
use tokio::io::AsyncReadExt;
@ -590,7 +587,7 @@ pub mod tests {
async fn test_qbittorrent_downloader_impl(
username: Option<&str>,
password: Option<&str>,
) -> color_eyre::eyre::Result<()> {
) -> RResult<()> {
let http_client = build_testing_http_client()?;
let base_save_path = Path::new(get_tmp_qbit_test_folder());
@ -625,7 +622,7 @@ pub mod tests {
.add_torrents(torrent_source, save_path.to_string(), Some("bangumi"))
.await?;
let get_torrent = async || -> color_eyre::eyre::Result<Torrent> {
let get_torrent = async || -> Result<Torrent, DownloaderError> {
let torrent_infos = downloader
.get_torrents_info(TorrentFilter::All, None, None)
.await?;
@ -633,7 +630,7 @@ pub mod tests {
let result = torrent_infos
.into_iter()
.find(|t| (t.get_hash() == Some("47ee2d69e7f19af783ad896541a07b012676f858")))
.ok_or_eyre("no torrent")?;
.whatever_context::<_, DownloaderError>("no torrent")?;
Ok(result)
};

View File

@ -0,0 +1 @@

View File

@ -1,4 +1,5 @@
use std::{borrow::Cow, error::Error as StdError};
pub mod whatever;
use std::borrow::Cow;
use axum::{
Json,
@ -6,105 +7,157 @@ use axum::{
};
use http::StatusCode;
use serde::{Deserialize, Deserializer, Serialize};
use thiserror::Error as ThisError;
use snafu::prelude::*;
pub use whatever::OptionWhateverAsync;
use crate::{auth::AuthError, fetch::HttpClientError};
use crate::{auth::AuthError, downloader::DownloaderError, fetch::HttpClientError};
#[derive(ThisError, Debug)]
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum RError {
#[error(transparent)]
InvalidMethodError(#[from] http::method::InvalidMethod),
#[error(transparent)]
InvalidHeaderNameError(#[from] http::header::InvalidHeaderName),
#[error(transparent)]
TracingAppenderInitError(#[from] tracing_appender::rolling::InitError),
#[error(transparent)]
GraphQLSchemaError(#[from] async_graphql::dynamic::SchemaError),
#[error(transparent)]
AuthError(#[from] AuthError),
#[error(transparent)]
RSSError(#[from] rss::Error),
#[error(transparent)]
DotEnvError(#[from] dotenv::Error),
#[error(transparent)]
TeraError(#[from] tera::Error),
#[error(transparent)]
IOError(#[from] std::io::Error),
#[error(transparent)]
DbError(#[from] sea_orm::DbErr),
#[error(transparent)]
CookieParseError(#[from] cookie::ParseError),
#[error(transparent)]
FigmentError(#[from] figment::Error),
#[error(transparent)]
SerdeJsonError(#[from] serde_json::Error),
#[error(transparent)]
ReqwestMiddlewareError(#[from] reqwest_middleware::Error),
#[error(transparent)]
ReqwestError(#[from] reqwest::Error),
#[error(transparent)]
ParseUrlError(#[from] url::ParseError),
#[error(transparent)]
OpenDALError(#[from] opendal::Error),
#[error(transparent)]
InvalidHeaderValueError(#[from] http::header::InvalidHeaderValue),
#[error(transparent)]
HttpClientError(#[from] HttpClientError),
#[error("Extract {desc} with mime error, expected {expected}, but got {found}")]
#[snafu(transparent, context(false))]
FancyRegexError {
#[snafu(source(from(fancy_regex::Error, Box::new)))]
source: Box<fancy_regex::Error>,
},
#[snafu(transparent)]
RegexError { source: regex::Error },
#[snafu(transparent)]
InvalidMethodError { source: http::method::InvalidMethod },
#[snafu(transparent)]
InvalidHeaderNameError {
source: http::header::InvalidHeaderName,
},
#[snafu(transparent)]
TracingAppenderInitError {
source: tracing_appender::rolling::InitError,
},
#[snafu(transparent)]
GraphQLSchemaError {
source: async_graphql::dynamic::SchemaError,
},
#[snafu(transparent)]
AuthError { source: AuthError },
#[snafu(transparent)]
DownloadError { source: DownloaderError },
#[snafu(transparent)]
RSSError { source: rss::Error },
#[snafu(transparent)]
DotEnvError { source: dotenv::Error },
#[snafu(transparent)]
TeraError { source: tera::Error },
#[snafu(transparent)]
IOError { source: std::io::Error },
#[snafu(transparent)]
DbError { source: sea_orm::DbErr },
#[snafu(transparent)]
CookieParseError { source: cookie::ParseError },
#[snafu(transparent, context(false))]
FigmentError {
#[snafu(source(from(figment::Error, Box::new)))]
source: Box<figment::Error>,
},
#[snafu(transparent)]
SerdeJsonError { source: serde_json::Error },
#[snafu(transparent)]
ReqwestMiddlewareError { source: reqwest_middleware::Error },
#[snafu(transparent)]
ReqwestError { source: reqwest::Error },
#[snafu(transparent)]
ParseUrlError { source: url::ParseError },
#[snafu(display("{source}"), context(false))]
OpenDALError {
#[snafu(source(from(opendal::Error, Box::new)))]
source: Box<opendal::Error>,
},
#[snafu(transparent)]
InvalidHeaderValueError {
source: http::header::InvalidHeaderValue,
},
#[snafu(transparent)]
HttpClientError { source: HttpClientError },
#[cfg(all(feature = "testcontainers", test))]
#[snafu(transparent)]
TestcontainersError {
source: testcontainers::TestcontainersError,
},
#[snafu(display("Extract {desc} with mime error, expected {expected}, but got {found}"))]
MimeError {
desc: String,
expected: String,
found: String,
},
#[error("Invalid or unknown format in extracting mikan rss")]
#[snafu(display("Invalid or unknown format in extracting mikan rss"))]
MikanRssInvalidFormatError,
#[error("Invalid field {field} in extracting mikan rss")]
#[snafu(display("Invalid field {field} in extracting mikan rss"))]
MikanRssInvalidFieldError {
field: Cow<'static, str>,
#[source]
source: Option<Box<dyn StdError + Send + Sync>>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: OptionWhateverAsync,
},
#[error("Missing field {field} in extracting mikan meta")]
#[snafu(display("Missing field {field} in extracting mikan meta"))]
MikanMetaMissingFieldError {
field: Cow<'static, str>,
#[source]
source: Option<Box<dyn StdError + Send + Sync>>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: OptionWhateverAsync,
},
#[error("Model Entity {entity} not found")]
#[snafu(display("Model Entity {entity} not found"))]
ModelEntityNotFound { entity: Cow<'static, str> },
#[error("{0}")]
CustomMessageStr(&'static str),
#[error("{0}")]
CustomMessageString(String),
#[snafu(display("{message}"))]
Whatever {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: OptionWhateverAsync,
},
}
impl RError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
source: None,
source: None.into(),
}
}
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: None,
source: None.into(),
}
}
pub fn from_mikan_rss_invalid_field_and_source(
field: Cow<'static, str>,
source: Box<dyn StdError + Send + Sync>,
source: impl std::error::Error + Send + Sync + 'static,
) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: Some(source),
source: OptionWhateverAsync::some_boxed(source),
}
}
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
Self::DbError(sea_orm::DbErr::RecordNotFound(detail.to_string()))
Self::DbError {
source: sea_orm::DbErr::RecordNotFound(detail.to_string()),
}
}
}
impl snafu::FromString for RError {
type Source = Box<dyn std::error::Error + Send + Sync>;
fn without_source(message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::none(),
}
}
fn with_source(source: Self::Source, message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::some(source),
}
}
}
@ -129,7 +182,7 @@ impl<T> From<String> for StandardErrorResponse<T> {
impl IntoResponse for RError {
fn into_response(self) -> Response {
match self {
Self::AuthError(auth_error) => auth_error.into_response(),
Self::AuthError { source: auth_error } => auth_error.into_response(),
err => (
StatusCode::INTERNAL_SERVER_ERROR,
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
@ -154,7 +207,10 @@ impl<'de> Deserialize<'de> for RError {
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(Self::CustomMessageString(s))
Ok(Self::Whatever {
message: s,
source: None.into(),
})
}
}

View File

@ -0,0 +1,55 @@
use std::fmt::Display;
#[derive(Debug)]
pub struct OptionWhateverAsync(Option<Box<dyn std::error::Error + Send + Sync>>);
impl AsRef<dyn snafu::Error> for OptionWhateverAsync {
fn as_ref(&self) -> &(dyn snafu::Error + 'static) {
self
}
}
impl OptionWhateverAsync {
pub fn some_boxed<E: std::error::Error + Send + Sync + 'static>(e: E) -> Self {
Self(Some(Box::new(e)))
}
pub fn some(e: Box<dyn std::error::Error + Send + Sync>) -> Self {
Self(Some(e))
}
pub fn none() -> Self {
Self(None)
}
}
impl Display for OptionWhateverAsync {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.0 {
Some(e) => e.fmt(f),
None => write!(f, "None"),
}
}
}
impl snafu::Error for OptionWhateverAsync {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
fn cause(&self) -> Option<&dyn std::error::Error> {
self.source()
}
}
impl From<Option<Box<dyn std::error::Error + Send + Sync>>> for OptionWhateverAsync {
fn from(value: Option<Box<dyn std::error::Error + Send + Sync>>) -> Self {
Self(value)
}
}
impl From<Box<dyn std::error::Error + Send + Sync>> for OptionWhateverAsync {
fn from(value: Box<dyn std::error::Error + Send + Sync>) -> Self {
Self::some(value)
}
}

View File

@ -8,7 +8,7 @@ use tracing::instrument;
use url::Url;
use crate::{
download::core::BITTORRENT_MIME_TYPE,
downloader::core::BITTORRENT_MIME_TYPE,
errors::{RError, RResult},
extract::mikan::{
MikanClient,
@ -120,10 +120,10 @@ impl TryFrom<rss::Item> for MikanRssItem {
.title
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?;
let enclosure_url = Url::parse(&enclosure.url).map_err(|inner| {
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
RError::from_mikan_rss_invalid_field_and_source(
Cow::Borrowed("enclosure_url:enclosure.link"),
Box::new(inner),
"enclosure_url:enclosure.link".into(),
err,
)
})?;
@ -334,12 +334,12 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
mod tests {
use std::assert_matches::assert_matches;
use color_eyre::eyre;
use rstest::rstest;
use url::Url;
use crate::{
download::core::BITTORRENT_MIME_TYPE,
downloader::core::BITTORRENT_MIME_TYPE,
errors::RResult,
extract::mikan::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
extract_mikan_rss_channel_from_rss_link,
@ -349,7 +349,7 @@ mod tests {
#[rstest]
#[tokio::test]
async fn test_parse_mikan_rss_channel_from_rss_link() -> eyre::Result<()> {
async fn test_parse_mikan_rss_channel_from_rss_link() -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;

View File

@ -491,7 +491,6 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
#[cfg(test)]
mod test {
#![allow(unused_variables)]
use color_eyre::eyre;
use futures::{TryStreamExt, pin_mut};
use http::header;
use rstest::{fixture, rstest};
@ -512,7 +511,7 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_extract_mikan_poster_from_src(before_each: ()) -> eyre::Result<()> {
async fn test_extract_mikan_poster_from_src(before_each: ()) -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -543,7 +542,7 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_extract_mikan_episode(before_each: ()) -> eyre::Result<()> {
async fn test_extract_mikan_episode(before_each: ()) -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -583,9 +582,7 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(
before_each: (),
) -> eyre::Result<()> {
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(before_each: ()) -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -622,9 +619,7 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(
before_each: (),
) -> eyre::Result<()> {
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(before_each: ()) -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;

View File

@ -7,8 +7,12 @@ use itertools::Itertools;
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::whatever;
use crate::extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE};
use crate::{
errors::RResult,
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
};
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
@ -71,10 +75,7 @@ fn replace_ch_bracket_to_en(raw_name: &str) -> String {
raw_name.replace('【', "[").replace('】', "]")
}
fn title_body_pre_process(
title_body: &str,
fansub: Option<&str>,
) -> color_eyre::eyre::Result<String> {
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RResult<String> {
let raw_without_fansub = if let Some(fansub) = fansub {
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
fan_sub_re.replace_all(title_body, "")
@ -262,7 +263,7 @@ pub fn check_is_movie(title: &str) -> bool {
MOVIE_TITLE_RE.is_match(title)
}
pub fn parse_episode_meta_from_raw_name(s: &str) -> color_eyre::eyre::Result<RawEpisodeMeta> {
pub fn parse_episode_meta_from_raw_name(s: &str) -> RResult<RawEpisodeMeta> {
let raw_title = s.trim();
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
let fansub = extract_fansub(&raw_title_without_ch_brackets);
@ -315,10 +316,7 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> color_eyre::eyre::Result<Raw
resolution,
})
} else {
Err(color_eyre::eyre::eyre!(
"Can not parse episode meta from raw filename {}",
raw_title
))
whatever!("Can not parse episode meta from raw filename {}", raw_title)
}
}

View File

@ -1,11 +1,14 @@
use color_eyre::eyre::OptionExt;
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use quirks_path::Path;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::{OptionExt, whatever};
use crate::extract::defs::SUBTITLE_LANG;
use crate::{
errors::{RError, RResult},
extract::defs::SUBTITLE_LANG,
};
lazy_static! {
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
@ -101,10 +104,12 @@ pub fn parse_episode_media_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> color_eyre::eyre::Result<TorrentEpisodeMediaMeta> {
) -> RResult<TorrentEpisodeMediaMeta> {
let media_name = torrent_path
.file_name()
.ok_or_else(|| color_eyre::eyre::eyre!("failed to get file name of {}", torrent_path))?;
.with_whatever_context::<_, _, RError>(|| {
format!("failed to get file name of {}", torrent_path)
})?;
let mut match_obj = None;
for rule in TORRENT_EP_PARSE_RULES.iter() {
match_obj = if let Some(torrent_name) = torrent_name.as_ref() {
@ -119,7 +124,7 @@ pub fn parse_episode_media_meta_from_torrent(
if let Some(match_obj) = match_obj {
let group_season_and_title = match_obj
.get(1)
.ok_or_else(|| color_eyre::eyre::eyre!("should have 1 group"))?
.whatever_context::<_, RError>("should have 1 group")?
.as_str();
let (fansub, season_and_title) = get_fansub(group_season_and_title);
let (title, season) = if let Some(season) = season {
@ -130,7 +135,7 @@ pub fn parse_episode_media_meta_from_torrent(
};
let episode_index = match_obj
.get(2)
.ok_or_eyre("should have 2 group")?
.whatever_context::<_, RError>("should have 2 group")?
.as_str()
.parse::<i32>()
.unwrap_or(1);
@ -146,11 +151,11 @@ pub fn parse_episode_media_meta_from_torrent(
extname,
})
} else {
Err(color_eyre::eyre::eyre!(
whatever!(
"failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'",
torrent_path,
torrent_name
))
)
}
}
@ -158,11 +163,13 @@ pub fn parse_episode_subtitle_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> color_eyre::eyre::Result<TorrentEpisodeSubtitleMeta> {
) -> RResult<TorrentEpisodeSubtitleMeta> {
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
let media_name = torrent_path
.file_name()
.ok_or_else(|| color_eyre::eyre::eyre!("failed to get file name of {}", torrent_path))?;
.with_whatever_context::<_, _, RError>(|| {
format!("failed to get file name of {}", torrent_path)
})?;
let lang = get_subtitle_lang(media_name);
@ -177,8 +184,8 @@ mod tests {
use quirks_path::Path;
use super::{
parse_episode_media_meta_from_torrent, parse_episode_subtitle_meta_from_torrent,
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta,
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta, parse_episode_media_meta_from_torrent,
parse_episode_subtitle_meta_from_torrent,
};
#[test]

View File

@ -14,7 +14,7 @@ use reqwest_retry::{RetryTransientMiddleware, policies::ExponentialBackoff};
use reqwest_tracing::TracingMiddleware;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use thiserror::Error;
use snafu::Snafu;
use super::HttpClientSecrecyDataTrait;
use crate::fetch::get_random_mobile_ua;
@ -101,14 +101,14 @@ impl CacheManager for CacheBackend {
}
}
#[derive(Debug, Error)]
#[derive(Debug, Snafu)]
pub enum HttpClientError {
#[error(transparent)]
ReqwestError(#[from] reqwest::Error),
#[error(transparent)]
ReqwestMiddlewareError(#[from] reqwest_middleware::Error),
#[error(transparent)]
HttpError(#[from] http::Error),
#[snafu(transparent)]
ReqwestError { source: reqwest::Error },
#[snafu(transparent)]
ReqwestMiddlewareError { source: reqwest_middleware::Error },
#[snafu(transparent)]
HttpError { source: http::Error },
}
pub trait HttpClientTrait: Deref<Target = ClientWithMiddleware> + Debug {}

View File

@ -2,7 +2,7 @@ use std::{future::Future, pin::Pin};
use axum::http;
use super::{client::HttpClientError, HttpClient};
use super::{HttpClient, client::HttpClientError};
impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
type Error = HttpClientError;
@ -30,7 +30,7 @@ impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
builder
.body(response.bytes().await?.to_vec())
.map_err(HttpClientError::HttpError)
.map_err(HttpClientError::from)
})
}
}

View File

@ -5,14 +5,15 @@
impl_trait_in_bindings,
iterator_try_collect,
async_fn_traits,
let_chains
let_chains,
error_generic_member_access
)]
pub mod app;
pub mod auth;
pub mod cache;
pub mod database;
pub mod download;
pub mod downloader;
pub mod errors;
pub mod extract;
pub mod fetch;

View File

@ -1,5 +1,6 @@
use std::sync::OnceLock;
use snafu::prelude::*;
use tracing_appender::non_blocking::WorkerGuard;
use tracing_subscriber::{
EnvFilter, Layer, Registry,
@ -9,7 +10,7 @@ use tracing_subscriber::{
};
use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
use crate::errors::{RError, RResult};
use crate::errors::RResult;
// Function to initialize the logger based on the provided configuration
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];
@ -119,9 +120,9 @@ impl LoggerService {
let file_appender_layer = if file_appender_config.non_blocking {
let (non_blocking_file_appender, work_guard) =
tracing_appender::non_blocking(file_appender);
NONBLOCKING_WORK_GUARD_KEEP
.set(work_guard)
.map_err(|_| RError::CustomMessageStr("cannot lock for appender"))?;
if NONBLOCKING_WORK_GUARD_KEEP.set(work_guard).is_err() {
whatever!("cannot lock for appender");
};
Self::init_layer(
non_blocking_file_appender,
&file_appender_config.format,

View File

@ -191,7 +191,7 @@ impl ActiveModel {
pub fn from_mikan_episode_meta(
ctx: &dyn AppContextTrait,
creation: MikanEpsiodeCreation,
) -> color_eyre::eyre::Result<Self> {
) -> RResult<Self> {
let item = creation.episode;
let bgm = creation.bangumi;
let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title)

View File

@ -142,7 +142,7 @@ impl StorageService {
subscriber_pid: &str,
bucket: Option<&str>,
filename: &str,
) -> color_eyre::eyre::Result<Buffer> {
) -> RResult<Buffer> {
match content_category {
StorageContentCategory::Image => {
let fullname = [

View File

@ -1,8 +1,6 @@
use color_eyre::eyre;
use crate::{errors::RResult, fetch::HttpClient};
use crate::fetch::HttpClient;
pub fn build_testing_http_client() -> eyre::Result<HttpClient> {
pub fn build_testing_http_client() -> RResult<HttpClient> {
let mikan_client = HttpClient::default();
Ok(mikan_client)
}

View File

@ -2,7 +2,8 @@ use async_trait::async_trait;
use bollard::container::ListContainersOptions;
use itertools::Itertools;
use testcontainers::{
core::logs::consumer::logging_consumer::LoggingConsumer, ContainerRequest, Image, ImageExt,
ContainerRequest, Image, ImageExt, TestcontainersError,
core::logs::consumer::logging_consumer::LoggingConsumer,
};
pub const TESTCONTAINERS_PROJECT_KEY: &str = "tech.enfw.testcontainers.project";
@ -19,7 +20,7 @@ where
container_label: &str,
prune: bool,
force: bool,
) -> color_eyre::eyre::Result<Self>;
) -> Result<Self, TestcontainersError>;
fn with_default_log_consumer(self) -> Self;
}
@ -34,7 +35,7 @@ where
container_label: &str,
prune: bool,
force: bool,
) -> color_eyre::eyre::Result<Self> {
) -> Result<Self, TestcontainersError> {
use std::collections::HashMap;
use bollard::container::PruneContainersOptions;
@ -61,7 +62,8 @@ where
filters: filters.clone(),
..Default::default()
}))
.await?;
.await
.map_err(|err| TestcontainersError::Other(Box::new(err)))?;
let remove_containers = result
.iter()
@ -74,14 +76,16 @@ where
.iter()
.map(|c| client.stop_container(c, None)),
)
.await?;
.await
.map_err(|error| TestcontainersError::Other(Box::new(error)))?;
tracing::warn!(name = "stop running containers", result = ?remove_containers);
}
let result = client
.prune_containers(Some(PruneContainersOptions { filters }))
.await?;
.await
.map_err(|err| TestcontainersError::Other(Box::new(err)))?;
tracing::warn!(name = "prune existed containers", result = ?result);
}

View File

@ -6,12 +6,14 @@ use axum::{
http::request::Parts,
routing::get,
};
use snafu::prelude::*;
use super::core::Controller;
use crate::{
app::AppContextTrait,
auth::{
AuthError, AuthService, AuthServiceTrait,
errors::OidcRequestRedirectUriSnafu,
oidc::{OidcAuthCallbackPayload, OidcAuthCallbackQuery, OidcAuthRequest},
},
errors::RResult,
@ -47,7 +49,8 @@ async fn oidc_auth(
if let AuthService::Oidc(oidc_auth_service) = auth_service {
let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts)
.resolved_origin()
.ok_or_else(|| AuthError::OidcRequestRedirectUriError(url::ParseError::EmptyHost))?;
.ok_or(url::ParseError::EmptyHost)
.context(OidcRequestRedirectUriSnafu)?;
redirect_uri.set_path(&format!("{CONTROLLER_PREFIX}/callback"));

View File

@ -27,6 +27,7 @@ use axum::{
use futures_util::future::BoxFuture;
use ipnetwork::IpNetwork;
use serde::{Deserialize, Serialize};
use snafu::ResultExt;
use tower::{Layer, Service};
use tracing::error;
@ -233,11 +234,13 @@ impl RemoteIPLayer {
proxies
.iter()
.map(|proxy| {
IpNetwork::from_str(proxy).map_err(|err| {
RError::CustomMessageString(format!(
IpNetwork::from_str(proxy)
.boxed()
.with_whatever_context::<_, _, RError>(|_| {
format!(
"remote ip middleare cannot parse trusted proxy \
configuration: `{proxy}`, reason: `{err}`",
))
configuration: `{proxy}`"
)
})
})
.collect::<RResult<Vec<_>>>()
@ -284,8 +287,7 @@ where
let xff_ip = maybe_get_forwarded(req.headers(), layer.trusted_proxies.as_ref());
let remote_ip = xff_ip.map_or_else(
|| {
let ip = req
.extensions()
req.extensions()
.get::<ConnectInfo<SocketAddr>>()
.map_or_else(
|| {
@ -296,8 +298,7 @@ where
RemoteIP::None
},
|info| RemoteIP::Socket(info.ip()),
);
ip
)
},
RemoteIP::Forwarded,
);

View File

@ -18,13 +18,10 @@ use axum::{
use futures_util::future::BoxFuture;
use serde::{Deserialize, Serialize};
use serde_json::{self, json};
use snafu::whatever;
use tower::{Layer, Service};
use crate::{
app::AppContextTrait,
web::middleware::MiddlewareLayer,
errors::{RError, RResult},
};
use crate::{app::AppContextTrait, errors::RResult, web::middleware::MiddlewareLayer};
static PRESETS: OnceLock<HashMap<String, BTreeMap<String, String>>> = OnceLock::new();
fn get_presets() -> &'static HashMap<String, BTreeMap<String, String>> {
@ -115,7 +112,10 @@ impl MiddlewareLayer for SecureHeader {
}
/// Applies the secure headers layer to the application router
fn apply(&self, app: Router<Arc<dyn AppContextTrait>>) -> RResult<Router<Arc<dyn AppContextTrait>>> {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(SecureHeaders::new(self)?))
}
}
@ -128,17 +128,15 @@ impl SecureHeader {
let mut headers = vec![];
let preset = &self.preset;
let p = get_presets().get(preset).ok_or_else(|| {
RError::CustomMessageString(format!(
"secure_headers: a preset named `{preset}` does not exist"
))
})?;
if let Some(p) = get_presets().get(preset) {
Self::push_headers(&mut headers, p)?;
if let Some(overrides) = &self.overrides {
Self::push_headers(&mut headers, overrides)?;
}
Ok(headers)
} else {
whatever!("secure_headers: a preset named `{preset}` does not exist")
}
}
/// Helper function to push headers into a mutable vector.