Compare commits

..

4 Commits

104 changed files with 2069 additions and 1466 deletions

View File

@ -2,7 +2,7 @@
recorder-playground = "run -p recorder --example playground -- --environment development"
[build]
rustflags = ["-Zthreads=8"]
rustflags = ["-Zthreads=8", "--cfg", "feature=\"testcontainers\""]
[target.x86_64-unknown-linux-gnu]
linker = "clang"

View File

@ -5,6 +5,7 @@
"unifiedjs.vscode-mdx",
"mikestead.dotenv",
"christian-kohler.npm-intellisense",
"skellock.just"
"skellock.just",
"zerotaskx.rust-extension-pack"
]
}

650
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,9 +1,52 @@
[workspace]
members = ["apps/recorder"]
members = [
"packages/testing-torrents",
"packages/util",
"packages/fetch",
"packages/downloader",
"apps/recorder",
]
resolver = "2"
[workspace.dependencies]
moka = "0.12"
futures = "0.3"
futures-util = "0.3"
quirks_path = "0.1"
snafu = { version = "0.8", features = ["futures"] }
testcontainers = { version = "0.23.3" }
testcontainers-modules = { version = "0.11.4" }
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
serde = { version = "1", features = ["derive"] }
tokio = { version = "1", features = ["macros", "fs", "rt-multi-thread"] }
serde_json = "1"
async-trait = "0.1"
tracing = "0.1"
url = "2.5.2"
anyhow = "1"
itertools = "0.14"
chrono = "0.4"
bytes = "1"
serde_with = "3"
regex = "1.11"
lazy_static = "1.5"
axum = { version = "0.8.3", features = ["macros"] }
reqwest = { version = "0.12", default-features = false, features = [
"charset",
"http2",
"json",
"macos-system-configuration",
"rustls-tls",
"cookies",
] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
axum-extra = "0.10"
testing-torrents = { path = "./packages/testing-torrents" }
util = { path = "./packages/util" }
fetch = { path = "./packages/fetch" }
downloader = { path = "./packages/downloader" }
recorder = { path = "./apps/recorder" }
[patch.crates-io]
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
# [patch."https://github.com/lonelyhentxi/qbit.git"]
# qbit-rs = { path = "./patches/qbit-rs" }

View File

@ -18,17 +18,36 @@ default = []
testcontainers = [
"dep:testcontainers",
"dep:testcontainers-modules",
"dep:bollard",
"dep:testcontainers-ext",
]
[dependencies]
serde = { workspace = true }
tokio = { workspace = true }
serde_json = { workspace = true }
async-trait = { workspace = true }
testcontainers = { workspace = true, optional = true }
testcontainers-modules = { workspace = true, optional = true }
testcontainers-ext = { workspace = true, optional = true, features = [
"tracing",
] }
tracing = { workspace = true }
axum = { workspace = true }
axum-extra = { workspace = true }
snafu = { workspace = true }
itertools = { workspace = true }
url = { workspace = true }
regex = { workspace = true }
lazy_static = { workspace = true }
quirks_path = { workspace = true }
futures = { workspace = true }
bytes = { workspace = true }
serde_with = { workspace = true }
moka = { workspace = true }
chrono = { workspace = true }
tracing-subscriber = { workspace = true }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
async-trait = "0.1.83"
tracing = "0.1"
chrono = "0.4"
sea-orm = { version = "1.1", features = [
"sqlx-sqlite",
"sqlx-postgres",
@ -37,62 +56,25 @@ sea-orm = { version = "1.1", features = [
"debug-print",
] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
axum = "0.8"
uuid = { version = "1.6.0", features = ["v4"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio-rustls"] }
reqwest = { version = "0.12", default-features = false, features = [
"charset",
"http2",
"json",
"macos-system-configuration",
"rustls-tls",
"cookies",
] }
rss = "2"
bytes = "1.9"
itertools = "0.14"
url = "2.5"
fancy-regex = "0.14"
regex = "1.11"
lazy_static = "1.5"
maplit = "1.0.2"
lightningcss = "1.0.0-alpha.61"
lightningcss = "1.0.0-alpha.65"
html-escape = "0.2.13"
opendal = { version = "0.51.0", features = ["default", "services-fs"] }
opendal = { version = "0.53", features = ["default", "services-fs"] }
zune-image = "0.4.15"
once_cell = "1.20.2"
reqwest-middleware = "0.4.0"
reqwest-retry = "0.7.0"
reqwest-tracing = "0.5.5"
scraper = "0.23"
leaky-bucket = "1.1.2"
serde_with = "3"
jwt-authorizer = "0.15.0"
futures = "0.3.31"
librqbit-core = "4"
qbit-rs = { git = "https://github.com/lonelyhentxi/qbit.git", rev = "72d53138ebe", features = [
"default",
"builder",
] }
testcontainers = { version = "0.23.3", features = [
"default",
"properties-config",
"watchdog",
"http_wait",
"reusable-containers",
], optional = true }
testcontainers-modules = { version = "0.11.4", optional = true }
log = "0.4.22"
bollard = { version = "0.18", optional = true }
log = "0.4"
async-graphql = { version = "7", features = [] }
async-graphql-axum = "7"
fastrand = "2.3.0"
seaography = { version = "1.1" }
quirks_path = "0.1.1"
base64 = "0.22.1"
tower = "0.5.2"
axum-extra = "0.10"
tower-http = { version = "0.6", features = [
"trace",
"catch-panic",
@ -105,33 +87,20 @@ tower-http = { version = "0.6", features = [
] }
tera = "1.20.0"
openidconnect = { version = "4", features = ["rustls-tls"] }
http-cache-reqwest = { version = "0.15", features = [
"manager-cacache",
"manager-moka",
] }
moka = "0.12.10"
http-cache = { version = "0.20.0", features = [
"cacache-tokio",
"manager-cacache",
"manager-moka",
], default-features = false }
http-cache-semantics = "2.1.0"
dotenv = "0.15.0"
http = "1.2.0"
cookie = "0.18.1"
async-stream = "0.3.6"
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.31"
futures-util = "0.3.31"
ipnetwork = "0.21.1"
librqbit = "8.0.0"
typed-builder = "0.21.0"
snafu = { version = "0.8.5", features = ["futures"] }
anyhow = "1.0.97"
serde_yaml = "0.9.34"
merge-struct = "0.1.0"
serde-value = "0.7.0"
downloader = { workspace = true }
util = { workspace = true }
fetch = { workspace = true }
string-interner = "0.19.0"
[dev-dependencies]
serial_test = "3"

View File

@ -1,8 +1,8 @@
use recorder::errors::app_error::RResult;
use recorder::errors::RecorderResult;
// #![allow(unused_imports)]
// use recorder::{
// app::{AppContext, AppContextTrait},
// errors::RResult,
// errors::RecorderResult,
// migrations::Migrator,
// models::{
// subscribers::SEED_SUBSCRIBER,
@ -12,7 +12,7 @@ use recorder::errors::app_error::RResult;
// use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
// use sea_orm_migration::MigratorTrait;
// async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RResult<()> {
// async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RecorderResult<()> {
// let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
// // let rss_link =
@ -44,13 +44,13 @@ use recorder::errors::app_error::RResult;
// }
// #[tokio::main]
// async fn main() -> RResult<()> {
// async fn main() -> RecorderResult<()> {
// pull_mikan_bangumi_rss(&ctx).await?;
// Ok(())
// }
#[tokio::main]
async fn main() -> RResult<()> {
async fn main() -> RecorderResult<()> {
Ok(())
}

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use clap::{Parser, command};
use super::{AppContext, core::App, env::Environment};
use crate::{app::config::AppConfig, errors::app_error::RResult};
use crate::{app::config::AppConfig, errors::RecorderResult};
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
@ -33,7 +33,7 @@ pub struct AppBuilder {
}
impl AppBuilder {
pub async fn from_main_cli(environment: Option<Environment>) -> RResult<Self> {
pub async fn from_main_cli(environment: Option<Environment>) -> RecorderResult<Self> {
let args = MainCliArgs::parse();
let environment = environment.unwrap_or_else(|| {
@ -68,7 +68,7 @@ impl AppBuilder {
Ok(builder)
}
pub async fn build(self) -> RResult<App> {
pub async fn build(self) -> RecorderResult<App> {
AppConfig::load_dotenv(
&self.environment,
&self.working_dir,

View File

@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
use super::env::Environment;
use crate::{
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::app_error::RResult,
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::RecorderResult,
extract::mikan::MikanConfig, graphql::GraphQLConfig, logger::LoggerConfig,
storage::StorageConfig, web::WebServerConfig,
};
@ -64,7 +64,7 @@ impl AppConfig {
fig: Figment,
filepath: impl AsRef<Path>,
ext: &str,
) -> RResult<Figment> {
) -> RecorderResult<Figment> {
let content = fs::read_to_string(filepath)?;
let rendered = tera::Tera::one_off(
@ -85,7 +85,7 @@ impl AppConfig {
environment: &Environment,
working_dir: &str,
dotenv_file: Option<&str>,
) -> RResult<()> {
) -> RecorderResult<()> {
let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
vec![dotenv_file]
} else {
@ -124,7 +124,7 @@ impl AppConfig {
environment: &Environment,
working_dir: &str,
config_file: Option<&str>,
) -> RResult<AppConfig> {
) -> RecorderResult<AppConfig> {
let try_config_file_or_dirs = if config_file.is_some() {
vec![config_file]
} else {

View File

@ -1,6 +1,6 @@
use super::{Environment, config::AppConfig};
use crate::{
auth::AuthService, cache::CacheService, database::DatabaseService, errors::app_error::RResult,
auth::AuthService, cache::CacheService, database::DatabaseService, errors::RecorderResult,
extract::mikan::MikanClient, graphql::GraphQLService, logger::LoggerService,
storage::StorageService,
};
@ -36,7 +36,7 @@ impl AppContext {
environment: Environment,
config: AppConfig,
working_dir: impl ToString,
) -> RResult<Self> {
) -> RecorderResult<Self> {
let config_cloned = config.clone();
let logger = LoggerService::from_config(config.logger).await?;

View File

@ -1,12 +1,11 @@
use std::{net::SocketAddr, sync::Arc};
use axum::Router;
use futures::try_join;
use tokio::signal;
use super::{builder::AppBuilder, context::AppContextTrait};
use crate::{
errors::app_error::RResult,
errors::RecorderResult,
web::{
controller::{self, core::ControllerTrait},
middleware::default_middleware_stack,
@ -23,7 +22,7 @@ impl App {
AppBuilder::default()
}
pub async fn serve(&self) -> RResult<()> {
pub async fn serve(&self) -> RecorderResult<()> {
let context = &self.context;
let config = context.config();
let listener = tokio::net::TcpListener::bind(&format!(
@ -34,7 +33,7 @@ impl App {
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
let (graphql_c, oidc_c, metadata_c) = try_join!(
let (graphql_c, oidc_c, metadata_c) = futures::try_join!(
controller::graphql::create(context.clone()),
controller::oidc::create(context.clone()),
controller::metadata::create(context.clone())

View File

@ -1,7 +1,7 @@
use async_trait::async_trait;
use axum::http::{HeaderValue, request::Parts};
use base64::{self, Engine};
use reqwest::header::AUTHORIZATION;
use http::header::AUTHORIZATION;
use super::{
config::BasicAuthConfig,

View File

@ -4,6 +4,7 @@ use axum::{
http::StatusCode,
response::{IntoResponse, Response},
};
use fetch::HttpClientError;
use openidconnect::{
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
StandardErrorResponse, core::CoreErrorResponseType,
@ -11,7 +12,7 @@ use openidconnect::{
use serde::{Deserialize, Serialize};
use snafu::prelude::*;
use crate::{fetch::HttpClientError, models::auth::AuthType};
use crate::models::auth::AuthType;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]

View File

@ -1,10 +1,17 @@
use std::{
collections::{HashMap, HashSet},
future::Future,
ops::Deref,
pin::Pin,
sync::Arc,
};
use async_trait::async_trait;
use axum::http::{HeaderValue, request::Parts};
use axum::{
http,
http::{HeaderValue, request::Parts},
};
use fetch::{HttpClient, client::HttpClientError};
use itertools::Itertools;
use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer};
use moka::future::Cache;
@ -24,9 +31,49 @@ use super::{
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{
app::AppContextTrait, errors::app_error::RError, fetch::HttpClient, models::auth::AuthType,
};
use crate::{app::AppContextTrait, errors::RecorderError, models::auth::AuthType};
pub struct OidcHttpClient(pub Arc<HttpClient>);
impl Deref for OidcHttpClient {
type Target = HttpClient;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'c> openidconnect::AsyncHttpClient<'c> for OidcHttpClient {
type Error = HttpClientError;
#[cfg(target_arch = "wasm32")]
type Future =
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + 'c>>;
#[cfg(not(target_arch = "wasm32"))]
type Future =
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + Send + 'c>>;
fn call(&'c self, request: openidconnect::HttpRequest) -> Self::Future {
Box::pin(async move {
let response = self.execute(request.try_into()?).await?;
let mut builder = http::Response::builder().status(response.status());
#[cfg(not(target_arch = "wasm32"))]
{
builder = builder.version(response.version());
}
for (name, value) in response.headers().iter() {
builder = builder.header(name, value);
}
builder
.body(response.bytes().await?.to_vec())
.map_err(HttpClientError::from)
})
}
}
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OidcAuthClaims {
@ -118,7 +165,7 @@ pub struct OidcAuthCallbackPayload {
pub struct OidcAuthService {
pub config: OidcAuthConfig,
pub api_authorizer: Authorizer<OidcAuthClaims>,
pub oidc_provider_client: HttpClient,
pub oidc_provider_client: Arc<HttpClient>,
pub oidc_request_cache: Cache<String, OidcAuthRequest>,
}
@ -127,9 +174,10 @@ impl OidcAuthService {
&self,
redirect_uri: &str,
) -> Result<OidcAuthRequest, AuthError> {
let oidc_provider_client = OidcHttpClient(self.oidc_provider_client.clone());
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&self.oidc_provider_client,
&oidc_provider_client,
)
.await?;
@ -203,6 +251,7 @@ impl OidcAuthService {
&self,
query: OidcAuthCallbackQuery,
) -> Result<OidcAuthCallbackPayload, AuthError> {
let oidc_http_client = OidcHttpClient(self.oidc_provider_client.clone());
let csrf_token = query.state.ok_or(AuthError::OidcInvalidStateError)?;
let code = query.code.ok_or(AuthError::OidcInvalidCodeError)?;
@ -211,7 +260,7 @@ impl OidcAuthService {
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&self.oidc_provider_client,
&oidc_http_client,
)
.await?;
@ -227,7 +276,7 @@ impl OidcAuthService {
let token_response = oidc_client
.exchange_code(AuthorizationCode::new(code))?
.set_pkce_verifier(pkce_verifier)
.request_async(&HttpClient::default())
.request_async(&oidc_http_client)
.await?;
let id_token = token_response
@ -312,7 +361,7 @@ impl AuthServiceTrait for OidcAuthService {
}
}
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(RError::DbError {
Err(RecorderError::DbError {
source: DbErr::RecordNotFound(..),
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await,
r => r,

View File

@ -6,9 +6,13 @@ use axum::{
http::request::Parts,
response::{IntoResponse as _, Response},
};
use fetch::{
HttpClient, HttpClientConfig,
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
};
use http::header::HeaderValue;
use jwt_authorizer::{JwtAuthorizer, Validation};
use moka::future::Cache;
use reqwest::header::HeaderValue;
use snafu::prelude::*;
use super::{
@ -17,14 +21,7 @@ use super::{
errors::{AuthError, OidcProviderHttpClientSnafu},
oidc::{OidcAuthClaims, OidcAuthService},
};
use crate::{
app::AppContextTrait,
fetch::{
HttpClient, HttpClientConfig,
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
},
models::auth::AuthType,
};
use crate::{app::AppContextTrait, models::auth::AuthType};
#[derive(Clone, Debug)]
pub struct AuthUserInfo {
@ -89,7 +86,7 @@ impl AuthService {
AuthService::Oidc(Box::new(OidcAuthService {
config,
api_authorizer,
oidc_provider_client,
oidc_provider_client: Arc::new(oidc_provider_client),
oidc_request_cache: Cache::builder()
.time_to_live(Duration::from_mins(5))
.name("oidc_request_cache")

View File

@ -1,7 +1,7 @@
use recorder::{app::AppBuilder, errors::app_error::RResult};
use recorder::{app::AppBuilder, errors::RecorderResult};
#[tokio::main]
async fn main() -> RResult<()> {
async fn main() -> RecorderResult<()> {
let builder = AppBuilder::from_main_cli(None).await?;
let app = builder.build().await?;

View File

@ -1,10 +1,10 @@
use super::CacheConfig;
use crate::errors::app_error::RResult;
use crate::errors::RecorderResult;
pub struct CacheService {}
impl CacheService {
pub async fn from_config(_config: CacheConfig) -> RResult<Self> {
pub async fn from_config(_config: CacheConfig) -> RecorderResult<Self> {
Ok(Self {})
}
}

View File

@ -7,14 +7,14 @@ use sea_orm::{
use sea_orm_migration::MigratorTrait;
use super::DatabaseConfig;
use crate::{errors::app_error::RResult, migrations::Migrator};
use crate::{errors::RecorderResult, migrations::Migrator};
pub struct DatabaseService {
connection: DatabaseConnection,
}
impl DatabaseService {
pub async fn from_config(config: DatabaseConfig) -> RResult<Self> {
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
let mut opt = ConnectOptions::new(&config.uri);
opt.max_connections(config.max_connections)
.min_connections(config.min_connections)

View File

@ -1,12 +0,0 @@
pub mod bittorrent;
pub mod core;
pub mod errors;
pub mod qbit;
pub mod rqbit;
pub mod utils;
pub use errors::DownloaderError;
pub use qbit::{
QBittorrentDownloader, QBittorrentDownloaderCreation, QbitTorrent, QbitTorrentContent,
QbitTorrentFile, QbitTorrentFilter, QbitTorrentSource,
};

View File

@ -1 +0,0 @@

View File

@ -4,6 +4,7 @@ use axum::{
Json,
response::{IntoResponse, Response},
};
use fetch::{FetchError, HttpClientError};
use http::StatusCode;
use serde::{Deserialize, Deserializer, Serialize};
use snafu::Snafu;
@ -12,12 +13,11 @@ use crate::{
auth::AuthError,
downloader::DownloaderError,
errors::{OptDynErr, response::StandardErrorResponse},
fetch::HttpClientError,
};
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum RError {
pub enum RecorderError {
#[snafu(transparent, context(false))]
FancyRegexError {
#[snafu(source(from(fancy_regex::Error, Box::new)))]
@ -53,8 +53,6 @@ pub enum RError {
IOError { source: std::io::Error },
#[snafu(transparent)]
DbError { source: sea_orm::DbErr },
#[snafu(transparent)]
CookieParseError { source: cookie::ParseError },
#[snafu(transparent, context(false))]
FigmentError {
#[snafu(source(from(figment::Error, Box::new)))]
@ -63,10 +61,6 @@ pub enum RError {
#[snafu(transparent)]
SerdeJsonError { source: serde_json::Error },
#[snafu(transparent)]
ReqwestMiddlewareError { source: reqwest_middleware::Error },
#[snafu(transparent)]
ReqwestError { source: reqwest::Error },
#[snafu(transparent)]
ParseUrlError { source: url::ParseError },
#[snafu(display("{source}"), context(false))]
OpenDALError {
@ -106,6 +100,8 @@ pub enum RError {
},
#[snafu(display("Model Entity {entity} not found"))]
ModelEntityNotFound { entity: Cow<'static, str> },
#[snafu(transparent)]
FetchError { source: FetchError },
#[snafu(display("{message}"))]
Whatever {
message: String,
@ -114,7 +110,7 @@ pub enum RError {
},
}
impl RError {
impl RecorderError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
@ -146,7 +142,7 @@ impl RError {
}
}
impl snafu::FromString for RError {
impl snafu::FromString for RecorderError {
type Source = Box<dyn std::error::Error + Send + Sync>;
fn without_source(message: String) -> Self {
@ -164,7 +160,7 @@ impl snafu::FromString for RError {
}
}
impl IntoResponse for RError {
impl IntoResponse for RecorderError {
fn into_response(self) -> Response {
match self {
Self::AuthError { source: auth_error } => auth_error.into_response(),
@ -177,7 +173,7 @@ impl IntoResponse for RError {
}
}
impl Serialize for RError {
impl Serialize for RecorderError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
@ -186,7 +182,7 @@ impl Serialize for RError {
}
}
impl<'de> Deserialize<'de> for RError {
impl<'de> Deserialize<'de> for RecorderError {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
@ -199,4 +195,4 @@ impl<'de> Deserialize<'de> for RError {
}
}
pub type RResult<T> = Result<T, RError>;
pub type RecorderResult<T> = Result<T, RecorderError>;

View File

@ -1,9 +0,0 @@
pub trait RAnyhowResultExt<T>: snafu::ResultExt<T, anyhow::Error> {
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>>;
}
impl<T> RAnyhowResultExt<T> for Result<T, anyhow::Error> {
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>> {
self.map_err(|e| e.into())
}
}

View File

@ -1,9 +1,6 @@
pub mod alias;
pub mod app_error;
pub mod ext;
pub mod response;
pub use alias::OptDynErr;
pub use app_error::*;
pub use ext::RAnyhowResultExt;
pub use app_error::{RecorderError, RecorderResult};
pub use response::StandardErrorResponse;
pub use util::errors::OptDynErr;

View File

@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize};
use snafu::{OptionExt, whatever};
use crate::{
errors::app_error::{RError, RResult},
errors::app_error::{RecorderError, RecorderResult},
extract::defs::SUBTITLE_LANG,
};
@ -104,10 +104,10 @@ pub fn parse_episode_media_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> RResult<TorrentEpisodeMediaMeta> {
) -> RecorderResult<TorrentEpisodeMediaMeta> {
let media_name = torrent_path
.file_name()
.with_whatever_context::<_, _, RError>(|| {
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {}", torrent_path)
})?;
let mut match_obj = None;
@ -124,7 +124,7 @@ pub fn parse_episode_media_meta_from_torrent(
if let Some(match_obj) = match_obj {
let group_season_and_title = match_obj
.get(1)
.whatever_context::<_, RError>("should have 1 group")?
.whatever_context::<_, RecorderError>("should have 1 group")?
.as_str();
let (fansub, season_and_title) = get_fansub(group_season_and_title);
let (title, season) = if let Some(season) = season {
@ -135,7 +135,7 @@ pub fn parse_episode_media_meta_from_torrent(
};
let episode_index = match_obj
.get(2)
.whatever_context::<_, RError>("should have 2 group")?
.whatever_context::<_, RecorderError>("should have 2 group")?
.as_str()
.parse::<i32>()
.unwrap_or(1);
@ -163,11 +163,11 @@ pub fn parse_episode_subtitle_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> RResult<TorrentEpisodeSubtitleMeta> {
) -> RecorderResult<TorrentEpisodeSubtitleMeta> {
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
let media_name = torrent_path
.file_name()
.with_whatever_context::<_, _, RError>(|| {
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {}", torrent_path)
})?;

View File

@ -1,6 +1,3 @@
pub mod core;
pub mod extract;
pub use core::{BITTORRENT_MIME_TYPE, MAGNET_SCHEMA};
pub use extract::*;

View File

@ -1,15 +1,11 @@
use std::{fmt::Debug, ops::Deref};
use reqwest_middleware::ClientWithMiddleware;
use fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth};
use serde::{Deserialize, Serialize};
use url::Url;
use super::MikanConfig;
use crate::{
errors::app_error::RError,
fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth},
};
use crate::errors::RecorderError;
#[derive(Default, Clone, Deserialize, Serialize)]
pub struct MikanAuthSecrecy {
pub cookie: String,
@ -26,8 +22,9 @@ impl Debug for MikanAuthSecrecy {
}
impl MikanAuthSecrecy {
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RError> {
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RecorderError> {
HttpClientCookiesAuth::from_cookies(&self.cookie, url, self.user_agent)
.map_err(RecorderError::from)
}
}
@ -38,7 +35,7 @@ pub struct MikanClient {
}
impl MikanClient {
pub async fn from_config(config: MikanConfig) -> Result<Self, RError> {
pub async fn from_config(config: MikanConfig) -> Result<Self, RecorderError> {
let http_client = HttpClient::from_config(config.http_client)?;
let base_url = config.base_url;
Ok(Self {
@ -47,7 +44,7 @@ impl MikanClient {
})
}
pub fn fork_with_auth(&self, secrecy: Option<MikanAuthSecrecy>) -> Result<Self, RError> {
pub fn fork_with_auth(&self, secrecy: Option<MikanAuthSecrecy>) -> Result<Self, RecorderError> {
let mut fork = self.http_client.fork();
if let Some(secrecy) = secrecy {
@ -71,10 +68,10 @@ impl MikanClient {
}
impl Deref for MikanClient {
type Target = ClientWithMiddleware;
type Target = fetch::reqwest_middleware::ClientWithMiddleware;
fn deref(&self) -> &Self::Target {
self.http_client.deref()
&self.http_client
}
}

View File

@ -1,8 +1,7 @@
use fetch::HttpClientConfig;
use serde::{Deserialize, Serialize};
use url::Url;
use crate::fetch::HttpClientConfig;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct MikanConfig {
pub http_client: HttpClientConfig,

View File

@ -1,22 +1,19 @@
use std::borrow::Cow;
use chrono::DateTime;
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
use fetch::{FetchError, IntoUrl, bytes::fetch_bytes};
use itertools::Itertools;
use reqwest::IntoUrl;
use serde::{Deserialize, Serialize};
use tracing::instrument;
use url::Url;
use crate::{
errors::app_error::{RError, RResult},
extract::{
bittorrent::BITTORRENT_MIME_TYPE,
mikan::{
MikanClient,
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
},
errors::app_error::{RecorderError, RecorderResult},
extract::mikan::{
MikanClient,
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
},
fetch::bytes::fetch_bytes,
};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
@ -102,28 +99,28 @@ impl MikanRssChannel {
}
impl TryFrom<rss::Item> for MikanRssItem {
type Error = RError;
type Error = RecorderError;
fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
let enclosure = item
.enclosure
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure")))?;
let enclosure = item.enclosure.ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure"))
})?;
let mime_type = enclosure.mime_type;
if mime_type != BITTORRENT_MIME_TYPE {
return Err(RError::MimeError {
return Err(RecorderError::MimeError {
expected: String::from(BITTORRENT_MIME_TYPE),
found: mime_type.to_string(),
desc: String::from("MikanRssItem"),
});
}
let title = item
.title
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?;
let title = item.title.ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title"))
})?;
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
RError::from_mikan_rss_invalid_field_and_source(
RecorderError::from_mikan_rss_invalid_field_and_source(
"enclosure_url:enclosure.link".into(),
err,
)
@ -132,12 +129,14 @@ impl TryFrom<rss::Item> for MikanRssItem {
let homepage = item
.link
.and_then(|link| Url::parse(&link).ok())
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link")))?;
.ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link"))
})?;
let MikanEpisodeHomepage {
mikan_episode_id, ..
} = extract_mikan_episode_id_from_homepage(&homepage).ok_or_else(|| {
RError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
})?;
Ok(MikanRssItem {
@ -170,8 +169,8 @@ pub fn build_mikan_bangumi_rss_link(
mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> RResult<Url> {
let mut url = mikan_base_url.into_url()?;
) -> RecorderResult<Url> {
let mut url = mikan_base_url.into_url().map_err(FetchError::from)?;
url.set_path("/RSS/Bangumi");
url.query_pairs_mut()
.append_pair("bangumiId", mikan_bangumi_id);
@ -185,7 +184,7 @@ pub fn build_mikan_bangumi_rss_link(
pub fn build_mikan_subscriber_aggregation_rss_link(
mikan_base_url: &str,
mikan_aggregation_id: &str,
) -> RResult<Url> {
) -> RecorderResult<Url> {
let mut url = Url::parse(mikan_base_url)?;
url.set_path("/RSS/MyBangumi");
url.query_pairs_mut()
@ -227,7 +226,7 @@ pub fn extract_mikan_subscriber_aggregation_id_from_rss_link(
pub async fn extract_mikan_rss_channel_from_rss_link(
http_client: &MikanClient,
channel_rss_link: impl IntoUrl,
) -> RResult<MikanRssChannel> {
) -> RecorderResult<MikanRssChannel> {
let bytes = fetch_bytes(http_client, channel_rss_link.as_str()).await?;
let channel = rss::Channel::read_from(&bytes[..])?;
@ -326,7 +325,7 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
},
))
} else {
Err(RError::MikanRssInvalidFormatError).inspect_err(|error| {
Err(RecorderError::MikanRssInvalidFormatError).inspect_err(|error| {
tracing::warn!(error = %error);
})
}
@ -336,24 +335,22 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
mod tests {
use std::assert_matches::assert_matches;
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
use rstest::rstest;
use url::Url;
use crate::{
errors::app_error::RResult,
extract::{
bittorrent::BITTORRENT_MIME_TYPE,
mikan::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
extract_mikan_rss_channel_from_rss_link,
},
errors::RecorderResult,
extract::mikan::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
extract_mikan_rss_channel_from_rss_link,
},
test_utils::mikan::build_testing_mikan_client,
};
#[rstest]
#[tokio::test]
async fn test_parse_mikan_rss_channel_from_rss_link() -> RResult<()> {
async fn test_parse_mikan_rss_channel_from_rss_link() -> RecorderResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;

View File

@ -2,6 +2,7 @@ use std::{borrow::Cow, sync::Arc};
use async_stream::try_stream;
use bytes::Bytes;
use fetch::{html::fetch_html, image::fetch_image};
use futures::Stream;
use itertools::Itertools;
use scraper::{Html, Selector};
@ -15,12 +16,11 @@ use super::{
};
use crate::{
app::AppContextTrait,
errors::app_error::{RError, RResult},
errors::app_error::{RecorderResult, RecorderError},
extract::{
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str,
},
fetch::{html::fetch_html, image::fetch_image},
storage::StorageContentCategory,
};
@ -115,7 +115,7 @@ pub fn extract_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeH
pub async fn extract_mikan_poster_meta_from_src(
http_client: &MikanClient,
origin_poster_src_url: Url,
) -> Result<MikanBangumiPosterMeta, RError> {
) -> Result<MikanBangumiPosterMeta, RecorderError> {
let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
@ -128,7 +128,7 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
ctx: &dyn AppContextTrait,
origin_poster_src_url: Url,
subscriber_id: i32,
) -> RResult<MikanBangumiPosterMeta> {
) -> RecorderResult<MikanBangumiPosterMeta> {
let dal_client = ctx.storage();
let mikan_client = ctx.mikan();
if let Some(poster_src) = dal_client
@ -170,7 +170,7 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
pub async fn extract_mikan_episode_meta_from_episode_homepage(
http_client: &MikanClient,
mikan_episode_homepage_url: Url,
) -> Result<MikanEpisodeMeta, RError> {
) -> Result<MikanEpisodeMeta, RecorderError> {
let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?;
@ -186,7 +186,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
.select(bangumi_title_selector)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -201,18 +201,22 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
.and_then(|el| el.value().attr("href"))
.and_then(|s| mikan_episode_homepage_url.join(s).ok())
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")))
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?;
let mikan_fansub_id = mikan_fansub_id
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id")))
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?;
let episode_title = html
.select(&Selector::parse("title").unwrap())
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -220,7 +224,9 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
let MikanEpisodeHomepage {
mikan_episode_id, ..
} = extract_mikan_episode_id_from_homepage(&mikan_episode_homepage_url)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id")))
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
})
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -232,7 +238,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -275,7 +281,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
http_client: &MikanClient,
mikan_bangumi_homepage_url: Url,
) -> Result<MikanBangumiMeta, RError> {
) -> Result<MikanBangumiMeta, RecorderError> {
let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?;
let html = Html::parse_document(&content);
@ -289,7 +295,7 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
.select(bangumi_title_selector)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| tracing::warn!(error = %error))?;
let mikan_bangumi_id = html
@ -303,7 +309,9 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
mikan_bangumi_id, ..
}| mikan_bangumi_id,
)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")))
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
@ -353,8 +361,8 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
context: Arc<dyn AppContextTrait>,
my_bangumi_page_url: Url,
auth_secrecy: Option<MikanAuthSecrecy>,
history: &[Arc<RResult<MikanBangumiMeta>>],
) -> impl Stream<Item = RResult<MikanBangumiMeta>> {
history: &[Arc<RecorderResult<MikanBangumiMeta>>],
) -> impl Stream<Item = RecorderResult<MikanBangumiMeta>> {
try_stream! {
let http_client = &context.mikan().fork_with_auth(auth_secrecy.clone())?;
@ -511,7 +519,7 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_extract_mikan_poster_from_src(before_each: ()) -> RResult<()> {
async fn test_extract_mikan_poster_from_src(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -542,7 +550,7 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_extract_mikan_episode(before_each: ()) -> RResult<()> {
async fn test_extract_mikan_episode(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -582,7 +590,7 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(before_each: ()) -> RResult<()> {
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -619,7 +627,7 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(before_each: ()) -> RResult<()> {
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;

View File

@ -10,7 +10,7 @@ use serde::{Deserialize, Serialize};
use snafu::whatever;
use crate::{
errors::app_error::RResult,
errors::RecorderResult,
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
};
@ -75,7 +75,7 @@ fn replace_ch_bracket_to_en(raw_name: &str) -> String {
raw_name.replace('【', "[").replace('】', "]")
}
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RResult<String> {
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RecorderResult<String> {
let raw_without_fansub = if let Some(fansub) = fansub {
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
fan_sub_re.replace_all(title_body, "")
@ -263,7 +263,7 @@ pub fn check_is_movie(title: &str) -> bool {
MOVIE_TITLE_RE.is_match(title)
}
pub fn parse_episode_meta_from_raw_name(s: &str) -> RResult<RawEpisodeMeta> {
pub fn parse_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
let raw_title = s.trim();
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
let fansub = extract_fansub(&raw_title_without_ch_brackets);

View File

@ -1,36 +0,0 @@
use std::{future::Future, pin::Pin};
use axum::http;
use super::{HttpClient, client::HttpClientError};
impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
type Error = HttpClientError;
#[cfg(target_arch = "wasm32")]
type Future = Pin<Box<dyn Future<Output = Result<HttpResponse, Self::Error>> + 'c>>;
#[cfg(not(target_arch = "wasm32"))]
type Future =
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + Send + 'c>>;
fn call(&'c self, request: openidconnect::HttpRequest) -> Self::Future {
Box::pin(async move {
let response = self.execute(request.try_into()?).await?;
let mut builder = http::Response::builder().status(response.status());
#[cfg(not(target_arch = "wasm32"))]
{
builder = builder.version(response.version());
}
for (name, value) in response.headers().iter() {
builder = builder.header(name, value);
}
builder
.body(response.bytes().await?.to_vec())
.map_err(HttpClientError::from)
})
}
}

View File

@ -2,7 +2,7 @@ use async_graphql::dynamic::Schema;
use sea_orm::DatabaseConnection;
use super::{config::GraphQLConfig, schema_root};
use crate::errors::app_error::RResult;
use crate::errors::RecorderResult;
#[derive(Debug)]
pub struct GraphQLService {
@ -13,7 +13,7 @@ impl GraphQLService {
pub async fn from_config_and_database(
config: GraphQLConfig,
db: DatabaseConnection,
) -> RResult<Self> {
) -> RecorderResult<Self> {
let schema = schema_root::schema(
db,
config.depth_limit.and_then(|l| l.into()),

View File

@ -5,19 +5,18 @@
impl_trait_in_bindings,
iterator_try_collect,
async_fn_traits,
let_chains,
error_generic_member_access
error_generic_member_access,
associated_type_defaults,
let_chains
)]
#![feature(associated_type_defaults)]
pub use downloader;
pub mod app;
pub mod auth;
pub mod cache;
pub mod database;
pub mod downloader;
pub mod errors;
pub mod extract;
pub mod fetch;
pub mod graphql;
pub mod logger;
pub mod migrations;
@ -26,5 +25,4 @@ pub mod storage;
pub mod tasks;
#[cfg(test)]
pub mod test_utils;
pub mod utils;
pub mod web;

View File

@ -10,7 +10,7 @@ use tracing_subscriber::{
};
use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
use crate::errors::app_error::RResult;
use crate::errors::RecorderResult;
// Function to initialize the logger based on the provided configuration
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];
@ -74,7 +74,7 @@ impl LoggerService {
.expect("logger initialization failed")
}
pub async fn from_config(config: LoggerConfig) -> RResult<Self> {
pub async fn from_config(config: LoggerConfig) -> RecorderResult<Self> {
let mut layers: Vec<Box<dyn Layer<Registry> + Sync + Send>> = Vec::new();
if let Some(file_appender_config) = config.file_appender.as_ref() {

View File

@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
use super::subscribers::{self, SEED_SUBSCRIBER};
use crate::{
app::AppContextTrait,
errors::app_error::{RError, RResult},
errors::app_error::{RecorderError, RecorderResult},
};
#[derive(
@ -57,17 +57,17 @@ impl Related<super::subscribers::Entity> for Entity {
impl ActiveModelBehavior for ActiveModel {}
impl Model {
pub async fn find_by_pid(ctx: &dyn AppContextTrait, pid: &str) -> RResult<Self> {
pub async fn find_by_pid(ctx: &dyn AppContextTrait, pid: &str) -> RecorderResult<Self> {
let db = ctx.db();
let subscriber_auth = Entity::find()
.filter(Column::Pid.eq(pid))
.one(db)
.await?
.ok_or_else(|| RError::from_db_record_not_found("auth::find_by_pid"))?;
.ok_or_else(|| RecorderError::from_db_record_not_found("auth::find_by_pid"))?;
Ok(subscriber_auth)
}
pub async fn create_from_oidc(ctx: &dyn AppContextTrait, sub: String) -> RResult<Self> {
pub async fn create_from_oidc(ctx: &dyn AppContextTrait, sub: String) -> RecorderResult<Self> {
let db = ctx.db();
let txn = db.begin().await?;

View File

@ -4,7 +4,7 @@ use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::O
use serde::{Deserialize, Serialize};
use super::subscription_bangumi;
use crate::{app::AppContextTrait, errors::app_error::RResult};
use crate::{app::AppContextTrait, errors::RecorderResult};
#[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
@ -119,9 +119,9 @@ impl Model {
mikan_bangumi_id: String,
mikan_fansub_id: String,
f: F,
) -> RResult<Model>
) -> RecorderResult<Model>
where
F: AsyncFnOnce(&mut ActiveModel) -> RResult<()>,
F: AsyncFnOnce(&mut ActiveModel) -> RecorderResult<()>,
{
let db = ctx.db();
if let Some(existed) = Entity::find()

View File

@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use super::{bangumi, query::InsertManyReturningExt, subscription_episode};
use crate::{
app::AppContextTrait,
errors::app_error::RResult,
errors::RecorderResult,
extract::{
mikan::{MikanEpisodeMeta, build_mikan_episode_homepage},
rawname::parse_episode_meta_from_raw_name,
@ -140,7 +140,7 @@ impl Model {
subscriber_id: i32,
subscription_id: i32,
creations: impl IntoIterator<Item = MikanEpsiodeCreation>,
) -> RResult<()> {
) -> RecorderResult<()> {
let db = ctx.db();
let new_episode_active_modes = creations
.into_iter()
@ -191,7 +191,7 @@ impl ActiveModel {
pub fn from_mikan_episode_meta(
ctx: &dyn AppContextTrait,
creation: MikanEpsiodeCreation,
) -> RResult<Self> {
) -> RecorderResult<Self> {
let item = creation.episode;
let bgm = creation.bangumi;
let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title)

View File

@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::app_error::{RError, RResult},
errors::app_error::{RecorderResult, RecorderError},
};
pub const SEED_SUBSCRIBER: &str = "konobangu";
@ -95,22 +95,22 @@ pub struct SubscriberIdParams {
impl ActiveModelBehavior for ActiveModel {}
impl Model {
pub async fn find_seed_subscriber_id(ctx: &dyn AppContextTrait) -> RResult<i32> {
pub async fn find_seed_subscriber_id(ctx: &dyn AppContextTrait) -> RecorderResult<i32> {
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER).await?;
Ok(subscriber_auth.subscriber_id)
}
pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RResult<Self> {
pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RecorderResult<Self> {
let db = ctx.db();
let subscriber = Entity::find_by_id(id)
.one(db)
.await?
.ok_or_else(|| RError::from_db_record_not_found("subscriptions::find_by_id"))?;
.ok_or_else(|| RecorderError::from_db_record_not_found("subscriptions::find_by_id"))?;
Ok(subscriber)
}
pub async fn create_root(ctx: &dyn AppContextTrait) -> RResult<Self> {
pub async fn create_root(ctx: &dyn AppContextTrait) -> RecorderResult<Self> {
let db = ctx.db();
let txn = db.begin().await?;

View File

@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize};
use super::{bangumi, episodes, query::filter_values_in};
use crate::{
app::AppContextTrait,
errors::app_error::RResult,
errors::RecorderResult,
extract::{
mikan::{
build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link,
@ -182,7 +182,7 @@ impl Model {
ctx: &dyn AppContextTrait,
create_dto: SubscriptionCreateDto,
subscriber_id: i32,
) -> RResult<Self> {
) -> RecorderResult<Self> {
let db = ctx.db();
let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id);
@ -193,7 +193,7 @@ impl Model {
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
enabled: bool,
) -> RResult<()> {
) -> RecorderResult<()> {
let db = ctx.db();
Entity::update_many()
.col_expr(Column::Enabled, Expr::value(enabled))
@ -206,7 +206,7 @@ impl Model {
pub async fn delete_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
) -> RResult<()> {
) -> RecorderResult<()> {
let db = ctx.db();
Entity::delete_many()
.filter(Column::Id.is_in(ids))
@ -215,7 +215,7 @@ impl Model {
Ok(())
}
pub async fn pull_subscription(&self, ctx: &dyn AppContextTrait) -> RResult<()> {
pub async fn pull_subscription(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
match &self.category {
SubscriptionCategory::Mikan => {
let mikan_client = ctx.mikan();
@ -287,7 +287,7 @@ impl Model {
self.id,
mikan_bangumi_id.to_string(),
mikan_fansub_id.to_string(),
async |am| -> RResult<()> {
async |am| -> RecorderResult<()> {
let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage(
mikan_client,
bgm_homepage.clone(),

View File

@ -2,7 +2,7 @@ use async_trait::async_trait;
use sea_orm::{QuerySelect, entity::prelude::*};
use serde::{Deserialize, Serialize};
use crate::{app::AppContextTrait, errors::app_error::RResult};
use crate::{app::AppContextTrait, errors::RecorderResult};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
@ -76,7 +76,7 @@ impl Model {
pub async fn find_stream_task_by_id(
ctx: &dyn AppContextTrait,
task_id: i32,
) -> RResult<Option<(Model, Vec<super::task_stream_item::Model>)>> {
) -> RecorderResult<Option<(Model, Vec<super::task_stream_item::Model>)>> {
let db = ctx.db();
let res = Entity::find()
.filter(Column::Id.eq(task_id))

View File

@ -8,7 +8,7 @@ use url::Url;
use uuid::Uuid;
use super::StorageConfig;
use crate::errors::app_error::{RError, RResult};
use crate::errors::app_error::{RecorderError, RecorderResult};
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
@ -50,7 +50,7 @@ pub struct StorageService {
}
impl StorageService {
pub async fn from_config(config: StorageConfig) -> RResult<Self> {
pub async fn from_config(config: StorageConfig) -> RecorderResult<Self> {
Ok(Self {
data_dir: config.data_dir.to_string(),
})
@ -71,7 +71,7 @@ impl StorageService {
bucket: Option<&str>,
filename: &str,
data: Bytes,
) -> Result<StorageStoredUrl, RError> {
) -> Result<StorageStoredUrl, RecorderError> {
match content_category {
StorageContentCategory::Image => {
let fullname = [
@ -108,7 +108,7 @@ impl StorageService {
subscriber_id: i32,
bucket: Option<&str>,
filename: &str,
) -> Result<Option<StorageStoredUrl>, RError> {
) -> Result<Option<StorageStoredUrl>, RecorderError> {
match content_category {
StorageContentCategory::Image => {
let fullname = [
@ -142,7 +142,7 @@ impl StorageService {
subscriber_pid: &str,
bucket: Option<&str>,
filename: &str,
) -> RResult<Buffer> {
) -> RecorderResult<Buffer> {
match content_category {
StorageContentCategory::Image => {
let fullname = [

View File

@ -7,7 +7,7 @@ use tokio::sync::{RwLock, mpsc};
use crate::{
app::AppContextTrait,
errors::app_error::{RError, RResult},
errors::app_error::{RecorderError, RecorderResult},
models,
};
@ -103,41 +103,41 @@ pub trait StreamTaskCoreTrait: Sized {
}
pub trait StreamTaskReplayLayoutTrait: StreamTaskCoreTrait {
fn history(&self) -> &[Arc<RResult<Self::Item>>];
fn history(&self) -> &[Arc<RecorderResult<Self::Item>>];
fn resume_from_model(
task: models::tasks::Model,
stream_items: Vec<models::task_stream_item::Model>,
) -> RResult<Self>;
) -> RecorderResult<Self>;
fn running_receiver(
&self,
) -> impl Future<Output = Option<mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>>>;
) -> impl Future<Output = Option<mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>>>;
#[allow(clippy::type_complexity)]
fn init_receiver(
&self,
) -> impl Future<
Output = (
mpsc::UnboundedSender<Arc<RResult<Self::Item>>>,
mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>,
mpsc::UnboundedSender<Arc<RecorderResult<Self::Item>>>,
mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>,
),
>;
fn serialize_request(request: Self::Request) -> RResult<serde_json::Value> {
serde_json::to_value(request).map_err(RError::from)
fn serialize_request(request: Self::Request) -> RecorderResult<serde_json::Value> {
serde_json::to_value(request).map_err(RecorderError::from)
}
fn serialize_item(item: RResult<Self::Item>) -> RResult<serde_json::Value> {
serde_json::to_value(item).map_err(RError::from)
fn serialize_item(item: RecorderResult<Self::Item>) -> RecorderResult<serde_json::Value> {
serde_json::to_value(item).map_err(RecorderError::from)
}
fn deserialize_request(request: serde_json::Value) -> RResult<Self::Request> {
serde_json::from_value(request).map_err(RError::from)
fn deserialize_request(request: serde_json::Value) -> RecorderResult<Self::Request> {
serde_json::from_value(request).map_err(RecorderError::from)
}
fn deserialize_item(item: serde_json::Value) -> RResult<RResult<Self::Item>> {
serde_json::from_value(item).map_err(RError::from)
fn deserialize_item(item: serde_json::Value) -> RecorderResult<RecorderResult<Self::Item>> {
serde_json::from_value(item).map_err(RecorderError::from)
}
}
@ -145,15 +145,15 @@ pub trait StreamTaskRunnerTrait: StreamTaskCoreTrait {
fn run(
context: Arc<dyn AppContextTrait>,
request: &Self::Request,
history: &[Arc<RResult<Self::Item>>],
) -> impl Stream<Item = RResult<Self::Item>>;
history: &[Arc<RecorderResult<Self::Item>>],
) -> impl Stream<Item = RecorderResult<Self::Item>>;
}
pub trait StreamTaskReplayRunnerTrait: StreamTaskRunnerTrait + StreamTaskReplayLayoutTrait {
fn run_shared(
&self,
context: Arc<dyn AppContextTrait>,
) -> impl Stream<Item = Arc<RResult<Self::Item>>> {
) -> impl Stream<Item = Arc<RecorderResult<Self::Item>>> {
stream! {
if let Some(mut receiver) = self.running_receiver().await {
while let Some(item) = receiver.recv().await {
@ -185,9 +185,9 @@ where
{
pub meta: TaskMeta,
pub request: Request,
pub history: Vec<Arc<RResult<Item>>>,
pub history: Vec<Arc<RecorderResult<Item>>>,
#[allow(clippy::type_complexity)]
pub channel: Arc<RwLock<Option<ReplayChannel<Arc<RResult<Item>>>>>>,
pub channel: Arc<RwLock<Option<ReplayChannel<Arc<RecorderResult<Item>>>>>>,
}
impl<Request, Item> StreamTaskCoreTrait for StandardStreamTaskReplayLayout<Request, Item>
@ -225,14 +225,14 @@ where
Request: Serialize + DeserializeOwned,
Item: Serialize + DeserializeOwned + Sync + Send + 'static,
{
fn history(&self) -> &[Arc<RResult<Self::Item>>] {
fn history(&self) -> &[Arc<RecorderResult<Self::Item>>] {
&self.history
}
fn resume_from_model(
task: models::tasks::Model,
stream_items: Vec<models::task_stream_item::Model>,
) -> RResult<Self> {
) -> RecorderResult<Self> {
Ok(Self {
meta: TaskMeta {
task_id: task.id,
@ -243,12 +243,14 @@ where
history: stream_items
.into_iter()
.map(|m| Self::deserialize_item(m.item).map(Arc::new))
.collect::<RResult<Vec<_>>>()?,
.collect::<RecorderResult<Vec<_>>>()?,
channel: Arc::new(RwLock::new(None)),
})
}
async fn running_receiver(&self) -> Option<mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>> {
async fn running_receiver(
&self,
) -> Option<mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>> {
if let Some(channel) = self.channel.read().await.as_ref() {
Some(channel.receiver().await)
} else {
@ -259,8 +261,8 @@ where
async fn init_receiver(
&self,
) -> (
mpsc::UnboundedSender<Arc<RResult<Self::Item>>>,
mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>,
mpsc::UnboundedSender<Arc<RecorderResult<Self::Item>>>,
mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>,
) {
let channel = ReplayChannel::new(self.history.clone());
let rx = channel.receiver().await;

View File

@ -6,7 +6,7 @@ use url::Url;
use crate::{
app::AppContextTrait,
errors::app_error::RResult,
errors::RecorderResult,
extract::mikan::{MikanAuthSecrecy, MikanBangumiMeta, web_extract},
tasks::core::{StandardStreamTaskReplayLayout, StreamTaskRunnerTrait},
};
@ -24,8 +24,8 @@ impl StreamTaskRunnerTrait for ExtractMikanBangumisMetaFromMyBangumiTask {
fn run(
context: Arc<dyn AppContextTrait>,
request: &Self::Request,
history: &[Arc<RResult<Self::Item>>],
) -> impl Stream<Item = RResult<Self::Item>> {
history: &[Arc<RecorderResult<Self::Item>>],
) -> impl Stream<Item = RecorderResult<Self::Item>> {
let context = context.clone();
web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page(
context,

View File

@ -1,6 +0,0 @@
use crate::{errors::app_error::RResult, fetch::HttpClient};
pub fn build_testing_http_client() -> RResult<HttpClient> {
let mikan_client = HttpClient::default();
Ok(mikan_client)
}

View File

@ -1,17 +1,18 @@
use reqwest::IntoUrl;
use fetch::{FetchError, HttpClientConfig, IntoUrl};
use crate::{
errors::app_error::RResult,
errors::RecorderResult,
extract::mikan::{MikanClient, MikanConfig},
fetch::HttpClientConfig,
};
pub async fn build_testing_mikan_client(base_mikan_url: impl IntoUrl) -> RResult<MikanClient> {
pub async fn build_testing_mikan_client(
base_mikan_url: impl IntoUrl,
) -> RecorderResult<MikanClient> {
let mikan_client = MikanClient::from_config(MikanConfig {
http_client: HttpClientConfig {
..Default::default()
},
base_url: base_mikan_url.into_url()?,
base_url: base_mikan_url.into_url().map_err(FetchError::from)?,
})
.await?;
Ok(mikan_client)

View File

@ -1,6 +1,3 @@
pub mod app;
pub mod fetch;
pub mod mikan;
#[cfg(feature = "testcontainers")]
pub mod testcontainers;
pub mod tracing;

View File

@ -1,117 +0,0 @@
use async_trait::async_trait;
use bollard::container::ListContainersOptions;
use itertools::Itertools;
use testcontainers::{
ContainerRequest, Image, ImageExt, TestcontainersError,
core::logs::consumer::logging_consumer::LoggingConsumer,
};
pub const TESTCONTAINERS_PROJECT_KEY: &str = "tech.enfw.testcontainers.project";
pub const TESTCONTAINERS_CONTAINER_KEY: &str = "tech.enfw.testcontainers.container";
pub const TESTCONTAINERS_PRUNE_KEY: &str = "tech.enfw.testcontainers.prune";
#[async_trait]
pub trait ContainerRequestEnhancedExt<I>: Sized + ImageExt<I>
where
I: Image,
{
async fn with_prune_existed_label(
self,
container_label: &str,
prune: bool,
force: bool,
) -> Result<Self, TestcontainersError>;
fn with_default_log_consumer(self) -> Self;
}
#[async_trait]
impl<I> ContainerRequestEnhancedExt<I> for ContainerRequest<I>
where
I: Image,
{
async fn with_prune_existed_label(
self,
container_label: &str,
prune: bool,
force: bool,
) -> Result<Self, TestcontainersError> {
use std::collections::HashMap;
use bollard::container::PruneContainersOptions;
use testcontainers::core::client::docker_client_instance;
if prune {
let client = docker_client_instance().await?;
let mut filters = HashMap::<String, Vec<String>>::new();
filters.insert(
String::from("label"),
vec![
format!("{TESTCONTAINERS_PRUNE_KEY}=true"),
format!("{}={}", TESTCONTAINERS_PROJECT_KEY, "konobangu"),
format!("{}={}", TESTCONTAINERS_CONTAINER_KEY, container_label),
],
);
if force {
let result = client
.list_containers(Some(ListContainersOptions {
all: false,
filters: filters.clone(),
..Default::default()
}))
.await
.map_err(|err| TestcontainersError::Other(Box::new(err)))?;
let remove_containers = result
.iter()
.filter(|c| matches!(c.state.as_deref(), Some("running")))
.flat_map(|c| c.id.as_deref())
.collect_vec();
futures::future::try_join_all(
remove_containers
.iter()
.map(|c| client.stop_container(c, None)),
)
.await
.map_err(|error| TestcontainersError::Other(Box::new(error)))?;
if !remove_containers.is_empty() {
tracing::warn!(name = "stop running containers", result = ?remove_containers);
}
}
let result = client
.prune_containers(Some(PruneContainersOptions { filters }))
.await
.map_err(|err| TestcontainersError::Other(Box::new(err)))?;
if result
.containers_deleted
.as_ref()
.is_some_and(|c| !c.is_empty())
{
tracing::warn!(name = "prune existed containers", result = ?result);
}
}
let result = self.with_labels([
(TESTCONTAINERS_PRUNE_KEY, "true"),
(TESTCONTAINERS_PROJECT_KEY, "konobangu"),
(TESTCONTAINERS_CONTAINER_KEY, container_label),
]);
Ok(result)
}
fn with_default_log_consumer(self) -> Self {
self.with_log_consumer(
LoggingConsumer::new()
.with_stdout_level(log::Level::Info)
.with_stderr_level(log::Level::Error),
)
}
}

View File

@ -1 +0,0 @@

View File

@ -7,7 +7,7 @@ use super::core::Controller;
use crate::{
app::AppContextTrait,
auth::{AuthUserInfo, header_www_authenticate_middleware},
errors::app_error::RResult,
errors::RecorderResult,
};
pub const CONTROLLER_PREFIX: &str = "/api/graphql";
@ -25,7 +25,7 @@ async fn graphql_handler(
graphql_service.schema.execute(req).await.into()
}
pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RResult<Controller> {
pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
let router = Router::<Arc<dyn AppContextTrait>>::new()
.route("/", post(graphql_handler))
.layer(from_fn_with_state(ctx, header_www_authenticate_middleware));

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use axum::{Json, Router, extract::State, routing::get};
use serde::Serialize;
use crate::{app::AppContextTrait, errors::app_error::RResult, web::controller::Controller};
use crate::{app::AppContextTrait, errors::RecorderResult, web::controller::Controller};
pub const CONTROLLER_PREFIX: &str = "/api/metadata";
@ -13,7 +13,9 @@ pub struct StandardResponse {
pub message: String,
}
async fn health(State(ctx): State<Arc<dyn AppContextTrait>>) -> RResult<Json<StandardResponse>> {
async fn health(
State(ctx): State<Arc<dyn AppContextTrait>>,
) -> RecorderResult<Json<StandardResponse>> {
ctx.db().ping().await.inspect_err(
|err| tracing::error!(err.msg = %err, err.detail = ?err, "health check database ping error"),
)?;
@ -31,7 +33,7 @@ async fn ping() -> Json<StandardResponse> {
})
}
pub async fn create(_context: Arc<dyn AppContextTrait>) -> RResult<Controller> {
pub async fn create(_context: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
let router = Router::<Arc<dyn AppContextTrait>>::new()
.route("/health", get(health))
.route("/ping", get(ping));

View File

@ -2,13 +2,11 @@ use std::sync::Arc;
use axum::{
Json, Router,
extract::{Query, State},
http::request::Parts,
extract::{Query, Request, State},
routing::get,
};
use snafu::prelude::*;
use snafu::ResultExt;
use super::core::Controller;
use crate::{
app::AppContextTrait,
auth::{
@ -16,9 +14,10 @@ use crate::{
errors::OidcRequestRedirectUriSnafu,
oidc::{OidcAuthCallbackPayload, OidcAuthCallbackQuery, OidcAuthRequest},
},
errors::app_error::RResult,
errors::RecorderResult,
extract::http::ForwardedRelatedInfo,
models::auth::AuthType,
web::controller::core::Controller,
};
pub const CONTROLLER_PREFIX: &str = "/api/oidc";
@ -43,10 +42,11 @@ async fn oidc_callback(
async fn oidc_auth(
State(ctx): State<Arc<dyn AppContextTrait>>,
parts: Parts,
request: Request,
) -> Result<Json<OidcAuthRequest>, AuthError> {
let auth_service = ctx.auth();
if let AuthService::Oidc(oidc_auth_service) = auth_service {
let (parts, _) = request.into_parts();
let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts)
.resolved_origin()
.ok_or(url::ParseError::EmptyHost)
@ -73,7 +73,7 @@ async fn oidc_auth(
}
}
pub async fn create(_context: Arc<dyn AppContextTrait>) -> RResult<Controller> {
pub async fn create(_context: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
let router = Router::<Arc<dyn AppContextTrait>>::new()
.route("/auth", get(oidc_auth))
.route("/callback", get(oidc_callback));

View File

@ -12,7 +12,7 @@ use http::StatusCode;
use serde::{Deserialize, Serialize};
use tower_http::catch_panic::CatchPanicLayer;
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CatchPanic {
@ -55,7 +55,7 @@ impl MiddlewareLayer for CatchPanic {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(CatchPanicLayer::custom(handle_panic)))
}
}

View File

@ -11,7 +11,7 @@ use axum::Router;
use serde::{Deserialize, Serialize};
use tower_http::compression::CompressionLayer;
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Compression {
@ -38,7 +38,7 @@ impl MiddlewareLayer for Compression {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(CompressionLayer::new()))
}
}

View File

@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize};
use serde_json::json;
use tower_http::cors::{self, Any};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
/// CORS middleware configuration
#[derive(Debug, Clone, Deserialize, Serialize)]
@ -80,7 +80,7 @@ impl Cors {
///
/// In all of these cases, the error returned will be the result of the
/// `parse` method of the corresponding type.
pub fn cors(&self) -> RResult<cors::CorsLayer> {
pub fn cors(&self) -> RecorderResult<cors::CorsLayer> {
let mut cors: cors::CorsLayer = cors::CorsLayer::new();
// testing CORS, assuming https://example.com in the allow list:
@ -160,7 +160,7 @@ impl MiddlewareLayer for Cors {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(self.cors()?))
}
}

View File

@ -21,11 +21,11 @@ use axum::{
},
response::Response,
};
use futures_util::future::BoxFuture;
use futures::future::BoxFuture;
use serde::{Deserialize, Serialize};
use tower::{Layer, Service};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Etag {
@ -52,7 +52,7 @@ impl MiddlewareLayer for Etag {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(EtagLayer))
}
}

View File

@ -1,4 +1,3 @@
//! Detect a content type and format and responds accordingly
use axum::{
extract::FromRequestParts,
http::{
@ -8,7 +7,7 @@ use axum::{
};
use serde::{Deserialize, Serialize};
use crate::errors::app_error::RError as Error;
use crate::errors::RecorderError as Error;
#[derive(Debug, Deserialize, Serialize)]
pub struct Format(pub RespondTo);

View File

@ -15,7 +15,7 @@ use tower_http::{add_extension::AddExtensionLayer, trace::TraceLayer};
use crate::{
app::{AppContextTrait, Environment},
errors::app_error::RResult,
errors::RecorderResult,
web::middleware::{MiddlewareLayer, request_id::LocoRequestId},
};
@ -70,7 +70,7 @@ impl MiddlewareLayer for Middleware {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app
.layer(
TraceLayer::new_for_http().make_span_with(|request: &http::Request<_>| {

View File

@ -14,7 +14,7 @@ use std::sync::Arc;
use axum::Router;
use serde::{Deserialize, Serialize};
use crate::{app::AppContextTrait, errors::app_error::RResult};
use crate::{app::AppContextTrait, errors::RecorderResult};
/// Trait representing the behavior of middleware components in the application.
/// When implementing a new middleware, make sure to go over this checklist:
@ -55,7 +55,7 @@ pub trait MiddlewareLayer {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>>;
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>>;
}
#[allow(clippy::unnecessary_lazy_evaluations)]

View File

@ -24,7 +24,7 @@ use axum::{
http::{header::HeaderMap, request::Parts},
response::Response,
};
use futures_util::future::BoxFuture;
use futures::future::BoxFuture;
use ipnetwork::IpNetwork;
use serde::{Deserialize, Serialize};
use snafu::ResultExt;
@ -33,7 +33,7 @@ use tracing::error;
use crate::{
app::AppContextTrait,
errors::app_error::{RError, RResult},
errors::app_error::{RecorderError, RecorderResult},
web::middleware::MiddlewareLayer,
};
@ -127,7 +127,7 @@ impl MiddlewareLayer for RemoteIpMiddleware {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(RemoteIPLayer::new(self)?))
}
}
@ -225,7 +225,7 @@ impl RemoteIPLayer {
///
/// # Errors
/// Fails if invalid header values found
pub fn new(config: &RemoteIpMiddleware) -> RResult<Self> {
pub fn new(config: &RemoteIpMiddleware) -> RecorderResult<Self> {
Ok(Self {
trusted_proxies: config
.trusted_proxies
@ -236,14 +236,14 @@ impl RemoteIPLayer {
.map(|proxy| {
IpNetwork::from_str(proxy)
.boxed()
.with_whatever_context::<_, _, RError>(|_| {
.with_whatever_context::<_, _, RecorderError>(|_| {
format!(
"remote ip middleare cannot parse trusted proxy \
configuration: `{proxy}`"
)
})
})
.collect::<RResult<Vec<_>>>()
.collect::<RecorderResult<Vec<_>>>()
})
.transpose()?,
})

View File

@ -18,7 +18,7 @@ const MAX_LEN: usize = 255;
use std::sync::{Arc, OnceLock};
use crate::errors::app_error::RResult;
use crate::errors::RecorderResult;
static ID_CLEANUP: OnceLock<Regex> = OnceLock::new();
@ -57,7 +57,7 @@ impl MiddlewareLayer for RequestId {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(axum::middleware::from_fn(request_id_middleware)))
}
}

View File

@ -15,13 +15,13 @@ use axum::{
http::{HeaderName, HeaderValue, Request},
response::Response,
};
use futures_util::future::BoxFuture;
use futures::future::BoxFuture;
use serde::{Deserialize, Serialize};
use serde_json::{self, json};
use snafu::whatever;
use tower::{Layer, Service};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
static PRESETS: OnceLock<HashMap<String, BTreeMap<String, String>>> = OnceLock::new();
fn get_presets() -> &'static HashMap<String, BTreeMap<String, String>> {
@ -115,7 +115,7 @@ impl MiddlewareLayer for SecureHeader {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(SecureHeaders::new(self)?))
}
}
@ -124,7 +124,7 @@ impl SecureHeader {
/// Converts the configuration into a list of headers.
///
/// Applies the preset headers and any custom overrides.
fn as_headers(&self) -> RResult<Vec<(HeaderName, HeaderValue)>> {
fn as_headers(&self) -> RecorderResult<Vec<(HeaderName, HeaderValue)>> {
let mut headers = vec![];
let preset = &self.preset;
@ -147,7 +147,7 @@ impl SecureHeader {
fn push_headers(
headers: &mut Vec<(HeaderName, HeaderValue)>,
hm: &BTreeMap<String, String>,
) -> RResult<()> {
) -> RecorderResult<()> {
for (k, v) in hm {
headers.push((
HeaderName::from_bytes(k.clone().as_bytes())?,
@ -171,7 +171,7 @@ impl SecureHeaders {
///
/// # Errors
/// Returns an error if any header values are invalid.
pub fn new(config: &SecureHeader) -> RResult<Self> {
pub fn new(config: &SecureHeader) -> RecorderResult<Self> {
Ok(Self {
headers: config.as_headers()?,
})

View File

@ -16,7 +16,7 @@ use serde::{Deserialize, Serialize};
use serde_json::json;
use tower_http::timeout::TimeoutLayer;
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
/// Timeout middleware configuration
#[derive(Debug, Clone, Deserialize, Serialize)]
@ -61,7 +61,7 @@ impl MiddlewareLayer for TimeOut {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(TimeoutLayer::new(Duration::from_millis(self.timeout))))
}
}

View File

@ -1,8 +0,0 @@
FROM mcr.microsoft.com/vscode/devcontainers/rust:0-1
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install --no-install-recommends postgresql-client \
&& cargo install sea-orm-cli cargo-insta \
&& chown -R vscode /usr/local/cargo
COPY .env /.env

View File

@ -1,7 +0,0 @@
{
"name": "Konobangu Recorder",
"dockerComposeFile": "docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
"forwardPorts": [5001]
}

View File

@ -1,40 +0,0 @@
version: "3"
services:
app:
build:
context: .
dockerfile: Dockerfile
command: sleep infinity
networks:
- db
- redis
volumes:
- ../..:/workspaces:cached
env_file:
- .env
db:
image: postgres:15.3-alpine
restart: unless-stopped
ports:
- 5432:5432
networks:
- db
volumes:
- postgres-data:/var/lib/postgresql/data
env_file:
- .env
redis:
image: redis:latest
restart: unless-stopped
ports:
- 6379:6379
networks:
- redis
volumes:
postgres-data:
networks:
db:
redis:

View File

@ -1,10 +0,0 @@
env.CARGO_TERM_COLOR = "always"
[jobs.recorder]
command = ["cargo", "run", "-p", "recorder", "--bin", "recorder_cli", "--", "--environment", "development"]
watch = ["apps/recorder"]
need_stdout = true
allow_warnings = true
background = false
on_change_strategy = "kill_then_restart"
kill = ["kill", "-s", "INT"]

30
dev.kdl Normal file
View File

@ -0,0 +1,30 @@
on_force_close "quit"
layout {
default_tab_template {
pane size=1 borderless=true {
plugin location="zellij:tab-bar"
}
children
}
tab name="WebUI" {
pane command="pnpm" {
args "run" "--filter=webui" "dev"
}
}
tab name="Proxy" {
pane command="pnpm" {
args "run" "--filter=proxy" "dev"
}
}
tab name="Recorder" {
pane command="cargo" {
args "watch" "-w" "apps/recorder" "-x" "run -p recorder --bin recorder_cli -- --environment development"
}
}
tab name="Deps" {
pane command="docker" {
args "compose" "-f" "devdeps.compose.yaml" "up"
}
}
}

14
devdeps.compose.yaml Normal file
View File

@ -0,0 +1,14 @@
services:
postgres:
image: postgres:17
environment:
POSTGRES_USER: konobangu
POSTGRES_PASSWORD: konobangu
POSTGRES_DB: konobangu
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
volumes:
postgres_data:

View File

@ -14,3 +14,12 @@ dev-proxy:
# bacon recorder # crash on windows
dev-recorder:
cargo watch -w "apps/recorder" -x "run -p recorder --bin recorder_cli -- --environment development"
dev-deps:
docker compose -f devdeps.compose.yaml up
dev-deps-clean:
docker compose -f devdeps.compose.yaml down -v
dev-all:
zellij --layout dev.kdl

View File

@ -0,0 +1,50 @@
[package]
name = "downloader"
version = "0.1.0"
edition = "2024"
[features]
default = []
testcontainers = [
"dep:testcontainers",
"dep:testcontainers-modules",
"dep:testcontainers-ext",
"dep:testing-torrents",
]
[dependencies]
futures = { workspace = true }
testcontainers = { workspace = true, optional = true }
testcontainers-modules = { workspace = true, optional = true }
testcontainers-ext = { workspace = true, optional = true }
tokio = { workspace = true }
serde_json = { workspace = true }
async-trait = { workspace = true }
tracing = { workspace = true }
snafu = { workspace = true }
url = { workspace = true }
serde = { workspace = true }
anyhow = { workspace = true }
quirks_path = { workspace = true }
itertools = { workspace = true }
chrono = { workspace = true }
bytes = { workspace = true }
serde-value = "0.7"
qbit-rs = { git = "https://github.com/lonelyhentxi/qbit.git", rev = "72d53138ebe", features = [
"default",
"builder",
] }
merge-struct = "0.1"
librqbit-core = "4"
librqbit = { version = "8", features = ["async-bt", "watch"] }
util = { workspace = true }
testing-torrents = { workspace = true, optional = true }
fetch = { workspace = true }
dashmap = "6.1.0"
[dev-dependencies]
reqwest = { workspace = true }
tracing-subscriber = { workspace = true }

View File

@ -1,2 +1,2 @@
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
pub const MAGNET_SCHEMA: &str = "magnet";
pub const MAGNET_SCHEMA: &str = "magnet";

View File

@ -1,6 +1,6 @@
use async_trait::async_trait;
use crate::downloader::{
use crate::{
DownloaderError,
bittorrent::task::{
TorrentCreationTrait, TorrentHashTrait, TorrentStateTrait, TorrentTaskTrait,
@ -21,7 +21,7 @@ where
async fn pause_downloads(
&self,
selector: Self::Selector,
selector: <Self as DownloaderTrait>::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
self.pause_torrents(hashes).await
@ -29,14 +29,14 @@ where
async fn resume_downloads(
&self,
selector: Self::Selector,
selector: <Self as DownloaderTrait>::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
self.resume_torrents(hashes).await
}
async fn remove_downloads(
&self,
selector: Self::Selector,
selector: <Self as DownloaderTrait>::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
self.remove_torrents(hashes).await
@ -44,7 +44,7 @@ where
async fn query_torrent_hashes(
&self,
selector: Self::Selector,
selector: <Self as DownloaderTrait>::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes = match selector.try_into_ids_only() {
Ok(hashes) => Self::IdSelector::from_iter(hashes),

View File

@ -0,0 +1,6 @@
pub mod defs;
pub mod downloader;
pub mod source;
pub mod task;
pub use defs::{BITTORRENT_MIME_TYPE, MAGNET_SCHEMA};

View File

@ -4,18 +4,14 @@ use std::{
};
use bytes::Bytes;
use fetch::{bytes::fetch_bytes, client::core::HttpClientTrait};
use librqbit_core::{magnet::Magnet, torrent_metainfo, torrent_metainfo::TorrentMetaV1Owned};
use snafu::ResultExt;
use url::Url;
use util::errors::AnyhowResultExt;
use crate::{
downloader::errors::{
DownloadFetchSnafu, DownloaderError, MagnetFormatSnafu, TorrentMetaSnafu,
},
errors::RAnyhowResultExt,
extract::bittorrent::core::MAGNET_SCHEMA,
fetch::{bytes::fetch_bytes, client::core::HttpClientTrait},
};
use super::defs::MAGNET_SCHEMA;
use crate::errors::{DownloadFetchSnafu, DownloaderError, MagnetFormatSnafu, TorrentMetaSnafu};
pub trait HashTorrentSourceTrait: Sized {
fn hash_info(&self) -> Cow<'_, str>;
@ -90,7 +86,7 @@ impl TorrentUrlSource {
pub struct TorrentFileSource {
pub url: Option<String>,
pub payload: Bytes,
pub meta: TorrentMetaV1Owned,
pub meta: Box<TorrentMetaV1Owned>,
pub filename: String,
}
@ -114,7 +110,7 @@ impl TorrentFileSource {
Ok(TorrentFileSource {
url,
payload: bytes,
meta,
meta: Box::new(meta),
filename,
})
}

View File

@ -2,7 +2,7 @@ use std::{borrow::Cow, hash::Hash};
use quirks_path::{Path, PathBuf};
use crate::downloader::{
use crate::{
bittorrent::source::HashTorrentSource,
core::{DownloadCreationTrait, DownloadIdTrait, DownloadStateTrait, DownloadTaskTrait},
};
@ -11,6 +11,12 @@ pub const TORRENT_TAG_NAME: &str = "konobangu";
pub trait TorrentHashTrait: DownloadIdTrait + Send + Hash {}
pub type SimpleTorrentHash = String;
impl DownloadIdTrait for SimpleTorrentHash {}
impl TorrentHashTrait for SimpleTorrentHash {}
pub trait TorrentStateTrait: DownloadStateTrait {}
pub trait TorrentTaskTrait: DownloadTaskTrait
@ -18,9 +24,10 @@ where
Self::State: TorrentStateTrait,
Self::Id: TorrentHashTrait,
{
fn hash_info(&self) -> &str;
fn hash_info(&self) -> Cow<'_, str>;
fn name(&self) -> Cow<'_, str> {
Cow::Borrowed(self.hash_info())
self.hash_info()
}
fn tags(&self) -> impl Iterator<Item = Cow<'_, str>>;

View File

@ -7,7 +7,18 @@ use async_trait::async_trait;
use super::DownloaderError;
pub trait DownloadStateTrait: Sized + Debug {}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DownloadSimpleState {
Paused,
Active,
Completed,
Error,
Unknown,
}
pub trait DownloadStateTrait: Sized + Debug {
fn to_download_state(&self) -> DownloadSimpleState;
}
pub trait DownloadIdTrait: Hash + Sized + Clone + Send + Debug {}

View File

@ -1,8 +1,7 @@
use std::{borrow::Cow, time::Duration};
use snafu::prelude::*;
use crate::errors::OptDynErr;
use util::errors::OptDynErr;
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
@ -36,6 +35,11 @@ pub enum DownloaderError {
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("{source}"))]
RqbitError {
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("{message}"))]
Whatever {
message: String,

View File

@ -0,0 +1,8 @@
pub mod bittorrent;
pub mod core;
pub mod errors;
pub mod qbit;
pub mod rqbit;
pub mod utils;
pub use errors::DownloaderError;

View File

@ -8,22 +8,17 @@ use std::{
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use futures::future::try_join_all;
use itertools::Itertools;
use merge_struct::merge;
pub use qbit_rs::model::{
Torrent as QbitTorrent, TorrentContent as QbitTorrentContent, TorrentFile as QbitTorrentFile,
TorrentFilter as QbitTorrentFilter, TorrentSource as QbitTorrentSource,
};
use qbit_rs::{
Qbit,
model::{
AddTorrentArg, Category, Credential, GetTorrentListArg, NonEmptyStr, Sep, State, SyncData,
TorrentFile, TorrentSource,
AddTorrentArg, Category, Credential, GetTorrentListArg, NonEmptyStr, Sep, SyncData,
Torrent as QbitTorrent, TorrentFile, TorrentSource,
},
};
use quirks_path::{Path, PathBuf};
use snafu::prelude::*;
use quirks_path::PathBuf;
use snafu::{OptionExt, whatever};
use tokio::{
sync::{RwLock, watch},
time::sleep,
@ -31,177 +26,23 @@ use tokio::{
use tracing::instrument;
use url::Url;
use super::{DownloaderError, utils::path_equals_as_file_url};
use crate::downloader::{
use super::QBittorrentHashSelector;
use crate::{
DownloaderError,
bittorrent::{
downloader::TorrentDownloaderTrait,
source::{HashTorrentSource, HashTorrentSourceTrait, MagnetUrlSource, TorrentFileSource},
task::{
TORRENT_TAG_NAME, TorrentCreationTrait, TorrentHashTrait, TorrentStateTrait,
TorrentTaskTrait,
},
task::TORRENT_TAG_NAME,
},
core::{
DownloadCreationTrait, DownloadIdSelector, DownloadIdTrait, DownloadSelectorTrait,
DownloadStateTrait, DownloadTaskTrait, DownloaderTrait,
core::DownloaderTrait,
qbit::task::{
QBittorrentCreation, QBittorrentHash, QBittorrentSelector, QBittorrentState,
QBittorrentTask,
},
utils::path_equals_as_file_url,
};
pub type QBittorrentHash = String;
impl DownloadIdTrait for QBittorrentHash {}
impl TorrentHashTrait for QBittorrentHash {}
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct QBittorrentState(Option<State>);
impl DownloadStateTrait for QBittorrentState {}
impl TorrentStateTrait for QBittorrentState {}
impl From<Option<State>> for QBittorrentState {
fn from(value: Option<State>) -> Self {
Self(value)
}
}
#[derive(Debug)]
pub struct QBittorrentTask {
pub hash_info: QBittorrentHash,
pub torrent: QbitTorrent,
pub contents: Vec<QbitTorrentContent>,
pub state: QBittorrentState,
}
impl QBittorrentTask {
fn from_query(
torrent: QbitTorrent,
contents: Vec<QbitTorrentContent>,
) -> Result<Self, DownloaderError> {
let hash = torrent
.hash
.clone()
.ok_or_else(|| DownloaderError::TorrentMetaError {
message: "missing hash".to_string(),
source: None.into(),
})?;
let state = QBittorrentState(torrent.state.clone());
Ok(Self {
hash_info: hash,
contents,
state,
torrent,
})
}
}
impl DownloadTaskTrait for QBittorrentTask {
type State = QBittorrentState;
type Id = QBittorrentHash;
fn id(&self) -> &Self::Id {
&self.hash_info
}
fn into_id(self) -> Self::Id {
self.hash_info
}
fn name(&self) -> Cow<'_, str> {
self.torrent
.name
.as_deref()
.map(Cow::Borrowed)
.unwrap_or_else(|| DownloadTaskTrait::name(self))
}
fn speed(&self) -> Option<u64> {
self.torrent.dlspeed.and_then(|s| u64::try_from(s).ok())
}
fn state(&self) -> &Self::State {
&self.state
}
fn dl_bytes(&self) -> Option<u64> {
self.torrent.downloaded.and_then(|v| u64::try_from(v).ok())
}
fn total_bytes(&self) -> Option<u64> {
self.torrent.size.and_then(|v| u64::try_from(v).ok())
}
fn left_bytes(&self) -> Option<u64> {
self.torrent.amount_left.and_then(|v| u64::try_from(v).ok())
}
fn et(&self) -> Option<Duration> {
self.torrent
.time_active
.and_then(|v| u64::try_from(v).ok())
.map(Duration::from_secs)
}
fn eta(&self) -> Option<Duration> {
self.torrent
.eta
.and_then(|v| u64::try_from(v).ok())
.map(Duration::from_secs)
}
fn progress(&self) -> Option<f32> {
self.torrent.progress.as_ref().map(|s| *s as f32)
}
}
impl TorrentTaskTrait for QBittorrentTask {
fn hash_info(&self) -> &str {
&self.hash_info
}
fn tags(&self) -> impl Iterator<Item = Cow<'_, str>> {
self.torrent
.tags
.as_deref()
.unwrap_or("")
.split(',')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.map(Cow::Borrowed)
}
fn category(&self) -> Option<Cow<'_, str>> {
self.torrent.category.as_deref().map(Cow::Borrowed)
}
}
#[derive(Debug, Clone, Default)]
pub struct QBittorrentCreation {
pub save_path: PathBuf,
pub tags: Vec<String>,
pub category: Option<String>,
pub sources: Vec<HashTorrentSource>,
}
impl DownloadCreationTrait for QBittorrentCreation {
type Task = QBittorrentTask;
}
impl TorrentCreationTrait for QBittorrentCreation {
fn save_path(&self) -> &Path {
self.save_path.as_ref()
}
fn save_path_mut(&mut self) -> &mut PathBuf {
&mut self.save_path
}
fn sources_mut(&mut self) -> &mut Vec<HashTorrentSource> {
&mut self.sources
}
}
pub struct QBittorrentDownloaderCreation {
pub endpoint: String,
pub username: String,
@ -212,53 +53,6 @@ pub struct QBittorrentDownloaderCreation {
pub wait_sync_timeout: Option<Duration>,
}
pub type QBittorrentHashSelector = DownloadIdSelector<QBittorrentTask>;
pub struct QBittorrentComplexSelector {
pub query: GetTorrentListArg,
}
impl From<QBittorrentHashSelector> for QBittorrentComplexSelector {
fn from(value: QBittorrentHashSelector) -> Self {
Self {
query: GetTorrentListArg {
hashes: Some(value.ids.join("|")),
..Default::default()
},
}
}
}
impl DownloadSelectorTrait for QBittorrentComplexSelector {
type Id = QBittorrentHash;
type Task = QBittorrentTask;
}
pub enum QBittorrentSelector {
Hash(QBittorrentHashSelector),
Complex(QBittorrentComplexSelector),
}
impl DownloadSelectorTrait for QBittorrentSelector {
type Id = QBittorrentHash;
type Task = QBittorrentTask;
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
match self {
QBittorrentSelector::Complex(c) => {
c.try_into_ids_only().map_err(QBittorrentSelector::Complex)
}
QBittorrentSelector::Hash(h) => {
let result = h
.try_into_ids_only()
.unwrap_or_else(|_| unreachable!("hash selector must contains hash"))
.into_iter();
Ok(result.collect_vec())
}
}
}
}
#[derive(Default)]
pub struct QBittorrentSyncData {
pub torrents: HashMap<String, QbitTorrent>,
@ -268,6 +62,7 @@ pub struct QBittorrentSyncData {
pub server_state: HashMap<String, serde_value::Value>,
pub rid: i64,
}
impl QBittorrentSyncData {
pub fn patch(&mut self, data: SyncData) {
self.rid = data.rid;
@ -337,6 +132,7 @@ pub struct QBittorrentDownloader {
}
impl QBittorrentDownloader {
#[instrument(level = "debug")]
pub async fn from_creation(
creation: QBittorrentDownloaderCreation,
) -> Result<Arc<Self>, DownloaderError> {
@ -460,10 +256,6 @@ impl QBittorrentDownloader {
Ok(())
}
pub fn get_save_path(&self, sub_path: &Path) -> PathBuf {
self.save_path.join(sub_path)
}
#[instrument(level = "debug", skip(self))]
pub async fn add_torrent_tags(
&self,
@ -531,6 +323,7 @@ impl QBittorrentDownloader {
Ok(())
}
#[instrument(level = "debug", skip(self))]
pub async fn get_torrent_path(
&self,
hashes: String,
@ -613,10 +406,11 @@ impl DownloaderTrait for QBittorrentDownloader {
type Creation = QBittorrentCreation;
type Selector = QBittorrentSelector;
#[instrument(level = "debug", skip(self))]
async fn add_downloads(
&self,
creation: Self::Creation,
) -> Result<HashSet<Self::Id>, DownloaderError> {
creation: <Self as DownloaderTrait>::Creation,
) -> Result<HashSet<<Self as DownloaderTrait>::Id>, DownloaderError> {
let tags = {
let mut tags = vec![TORRENT_TAG_NAME.to_string()];
tags.extend(creation.tags);
@ -712,29 +506,30 @@ impl DownloaderTrait for QBittorrentDownloader {
async fn pause_downloads(
&self,
selector: Self::Selector,
selector: <Self as DownloaderTrait>::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
<Self as TorrentDownloaderTrait>::pause_downloads(self, selector).await
}
async fn resume_downloads(
&self,
selector: Self::Selector,
selector: <Self as DownloaderTrait>::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
<Self as TorrentDownloaderTrait>::resume_downloads(self, selector).await
}
async fn remove_downloads(
&self,
selector: Self::Selector,
selector: <Self as DownloaderTrait>::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
<Self as TorrentDownloaderTrait>::remove_downloads(self, selector).await
}
#[instrument(level = "debug", skip(self))]
async fn query_downloads(
&self,
selector: QBittorrentSelector,
) -> Result<Vec<Self::Task>, DownloaderError> {
) -> Result<Vec<<Self as DownloaderTrait>::Task>, DownloaderError> {
let selector = match selector {
QBittorrentSelector::Hash(h) => h.into(),
QBittorrentSelector::Complex(c) => c,
@ -742,7 +537,7 @@ impl DownloaderTrait for QBittorrentDownloader {
let torrent_list = self.client.get_torrent_list(selector.query).await?;
let torrent_contents = try_join_all(torrent_list.iter().map(|s| async {
let torrent_contents = futures::future::try_join_all(torrent_list.iter().map(|s| async {
if let Some(hash) = &s.hash {
self.client.get_torrent_contents(hash as &str, None).await
} else {
@ -762,11 +557,12 @@ impl DownloaderTrait for QBittorrentDownloader {
#[async_trait]
impl TorrentDownloaderTrait for QBittorrentDownloader {
type IdSelector = DownloadIdSelector<Self::Task>;
type IdSelector = QBittorrentHashSelector;
#[instrument(level = "debug", skip(self))]
async fn pause_torrents(
&self,
hashes: Self::IdSelector,
hashes: <Self as TorrentDownloaderTrait>::IdSelector,
) -> Result<Self::IdSelector, DownloaderError> {
self.client.pause_torrents(hashes.clone()).await?;
Ok(hashes)
@ -775,8 +571,8 @@ impl TorrentDownloaderTrait for QBittorrentDownloader {
#[instrument(level = "debug", skip(self))]
async fn resume_torrents(
&self,
hashes: Self::IdSelector,
) -> Result<Self::IdSelector, DownloaderError> {
hashes: <Self as TorrentDownloaderTrait>::IdSelector,
) -> Result<<Self as TorrentDownloaderTrait>::IdSelector, DownloaderError> {
self.client.resume_torrents(hashes.clone()).await?;
Ok(hashes)
}
@ -784,7 +580,7 @@ impl TorrentDownloaderTrait for QBittorrentDownloader {
#[instrument(level = "debug", skip(self))]
async fn remove_torrents(
&self,
hashes: Self::IdSelector,
hashes: <Self as TorrentDownloaderTrait>::IdSelector,
) -> Result<Self::IdSelector, DownloaderError> {
self.client
.delete_torrents(hashes.clone(), Some(true))
@ -809,315 +605,3 @@ impl Debug for QBittorrentDownloader {
.finish()
}
}
#[cfg(test)]
pub mod tests {
use serde::{Deserialize, Serialize};
use super::*;
use crate::{
downloader::core::DownloadIdSelectorTrait,
errors::{RError, app_error::RResult},
test_utils::fetch::build_testing_http_client,
};
fn get_tmp_qbit_test_folder() -> &'static str {
if cfg!(all(windows, not(feature = "testcontainers"))) {
"C:\\Windows\\Temp\\konobangu\\qbit"
} else {
"/tmp/konobangu/qbit"
}
}
#[derive(Serialize)]
struct MockFileItem {
path: String,
size: u64,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct MockRequest {
id: String,
file_list: Vec<MockFileItem>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct MockResponse {
torrent_url: String,
magnet_url: String,
hash: String,
}
#[cfg(feature = "testcontainers")]
pub async fn create_torrents_testcontainers()
-> RResult<testcontainers::ContainerRequest<testcontainers::GenericImage>> {
use testcontainers::{
GenericImage,
core::{ContainerPort, WaitFor},
};
use testcontainers_modules::testcontainers::ImageExt;
use crate::test_utils::testcontainers::ContainerRequestEnhancedExt;
let container = GenericImage::new("ghcr.io/dumtruck/konobangu-testing-torrents", "latest")
.with_wait_for(WaitFor::message_on_stdout("Listening on"))
.with_mapped_port(6080, ContainerPort::Tcp(6080))
.with_mapped_port(6081, ContainerPort::Tcp(6081))
.with_mapped_port(6082, ContainerPort::Tcp(6082))
// .with_reuse(ReuseDirective::Always)
.with_default_log_consumer()
.with_prune_existed_label("konobangu-testing-torrents", true, true)
.await?;
Ok(container)
}
#[cfg(feature = "testcontainers")]
pub async fn create_qbit_testcontainers()
-> RResult<testcontainers::ContainerRequest<testcontainers::GenericImage>> {
use testcontainers::{
GenericImage,
core::{
ContainerPort,
// ReuseDirective,
WaitFor,
},
};
use testcontainers_modules::testcontainers::ImageExt;
use crate::test_utils::testcontainers::ContainerRequestEnhancedExt;
let container = GenericImage::new("linuxserver/qbittorrent", "latest")
.with_wait_for(WaitFor::message_on_stderr("Connection to localhost"))
.with_env_var("WEBUI_PORT", "8080")
.with_env_var("TZ", "Asia/Singapore")
.with_env_var("TORRENTING_PORT", "6881")
.with_mapped_port(6881, ContainerPort::Tcp(6881))
.with_mapped_port(8080, ContainerPort::Tcp(8080))
// .with_reuse(ReuseDirective::Always)
.with_default_log_consumer()
.with_prune_existed_label("qbit-downloader", true, true)
.await?;
Ok(container)
}
#[cfg(not(feature = "testcontainers"))]
#[tokio::test]
async fn test_qbittorrent_downloader() {
let hash = "47ee2d69e7f19af783ad896541a07b012676f858".to_string();
let torrent_url = "https://mikanani.me/Download/20240301/{}.torrent";
let _ = test_qbittorrent_downloader_impl(torrent_url, hash, None, None).await;
}
#[cfg(feature = "testcontainers")]
#[tokio::test(flavor = "multi_thread")]
async fn test_qbittorrent_downloader() -> RResult<()> {
use testcontainers::runners::AsyncRunner;
use tokio::io::AsyncReadExt;
tracing_subscriber::fmt()
.with_max_level(tracing::Level::DEBUG)
.with_test_writer()
.init();
let torrents_image = create_torrents_testcontainers().await?;
let _torrents_container = torrents_image.start().await?;
let torrents_req = MockRequest {
id: "f10ebdda-dd2e-43f8-b80c-bf0884d071c4".into(),
file_list: vec![MockFileItem {
path: "[Nekomoe kissaten&LoliHouse] Boku no Kokoro no Yabai Yatsu - 20 [WebRip \
1080p HEVC-10bit AAC ASSx2].mkv"
.into(),
size: 1024,
}],
};
let torrent_res: MockResponse = reqwest::Client::new()
.post("http://127.0.0.1:6080/api/torrents/mock")
.json(&torrents_req)
.send()
.await?
.json()
.await?;
let qbit_image = create_qbit_testcontainers().await?;
let qbit_container = qbit_image.start().await?;
let mut logs = String::new();
qbit_container
.stdout(false)
.read_to_string(&mut logs)
.await?;
let username = logs
.lines()
.find_map(|line| {
if line.contains("The WebUI administrator username is") {
line.split_whitespace().last()
} else {
None
}
})
.expect("should have username")
.trim();
let password = logs
.lines()
.find_map(|line| {
if line.contains("A temporary password is provided for") {
line.split_whitespace().last()
} else {
None
}
})
.expect("should have password")
.trim();
tracing::info!(username, password);
test_qbittorrent_downloader_impl(
torrent_res.torrent_url,
torrent_res.hash,
Some(username),
Some(password),
)
.await?;
Ok(())
}
async fn test_qbittorrent_downloader_impl(
torrent_url: String,
torrent_hash: String,
username: Option<&str>,
password: Option<&str>,
) -> RResult<()> {
let http_client = build_testing_http_client()?;
let base_save_path = Path::new(get_tmp_qbit_test_folder());
let downloader = QBittorrentDownloader::from_creation(QBittorrentDownloaderCreation {
endpoint: "http://127.0.0.1:8080".to_string(),
password: password.unwrap_or_default().to_string(),
username: username.unwrap_or_default().to_string(),
subscriber_id: 0,
save_path: base_save_path.to_string(),
downloader_id: 0,
wait_sync_timeout: Some(Duration::from_secs(3)),
})
.await?;
downloader.check_connection().await?;
downloader
.remove_torrents(vec![torrent_hash.clone()].into())
.await?;
let torrent_source =
HashTorrentSource::from_url_and_http_client(&http_client, torrent_url).await?;
let folder_name = format!("torrent_test_{}", Utc::now().timestamp());
let save_path = base_save_path.join(&folder_name);
let torrent_creation = QBittorrentCreation {
save_path,
tags: vec![],
sources: vec![torrent_source],
category: None,
};
downloader.add_downloads(torrent_creation).await?;
let get_torrent = async || -> Result<QBittorrentTask, DownloaderError> {
let torrent_infos = downloader
.query_downloads(QBittorrentSelector::Hash(QBittorrentHashSelector::from_id(
torrent_hash.clone(),
)))
.await?;
let result = torrent_infos
.into_iter()
.find(|t| t.hash_info() == torrent_hash)
.whatever_context::<_, DownloaderError>("no bittorrent")?;
Ok(result)
};
let target_torrent = get_torrent().await?;
let files = target_torrent.contents;
assert!(!files.is_empty());
let first_file = files.first().expect("should have first file");
assert!(
&first_file.name.ends_with(r#"[Nekomoe kissaten&LoliHouse] Boku no Kokoro no Yabai Yatsu - 20 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"#)
);
let test_tag = "test_tag".to_string();
downloader
.add_torrent_tags(vec![torrent_hash.clone()], vec![test_tag.clone()])
.await?;
let target_torrent = get_torrent().await?;
assert!(target_torrent.tags().any(|s| s == test_tag));
let test_category = format!("test_category_{}", Utc::now().timestamp());
downloader
.set_torrents_category(vec![torrent_hash.clone()], &test_category)
.await?;
let target_torrent = get_torrent().await?;
assert_eq!(
Some(test_category.as_str()),
target_torrent.category().as_deref()
);
let moved_torrent_path = base_save_path.join(format!("moved_{}", Utc::now().timestamp()));
downloader
.move_torrents(vec![torrent_hash.clone()], moved_torrent_path.as_str())
.await?;
let target_torrent = get_torrent().await?;
let actual_content_path = &target_torrent
.torrent
.save_path
.expect("failed to get actual save path");
assert!(
path_equals_as_file_url(actual_content_path, moved_torrent_path)
.whatever_context::<_, RError>(
"failed to compare actual torrent path and found expected torrent path"
)?
);
downloader
.remove_torrents(vec![torrent_hash.clone()].into())
.await?;
let torrent_infos1 = downloader
.query_downloads(QBittorrentSelector::Complex(QBittorrentComplexSelector {
query: GetTorrentListArg::builder()
.filter(QbitTorrentFilter::All)
.build(),
}))
.await?;
assert!(torrent_infos1.is_empty());
tracing::info!("test finished");
Ok(())
}
}

View File

@ -0,0 +1,11 @@
pub mod downloader;
pub mod task;
#[cfg(test)]
mod test;
pub use downloader::{QBittorrentDownloader, QBittorrentDownloaderCreation, QBittorrentSyncData};
pub use task::{
QBittorrentComplexSelector, QBittorrentCreation, QBittorrentHash, QBittorrentHashSelector,
QBittorrentSelector, QBittorrentState, QBittorrentTask,
};

View File

@ -0,0 +1,252 @@
use std::{borrow::Cow, time::Duration};
use itertools::Itertools;
use qbit_rs::model::{
GetTorrentListArg, State, Torrent as QbitTorrent, TorrentContent as QbitTorrentContent,
};
use quirks_path::{Path, PathBuf};
use crate::{
DownloaderError,
bittorrent::{
source::HashTorrentSource,
task::{SimpleTorrentHash, TorrentCreationTrait, TorrentStateTrait, TorrentTaskTrait},
},
core::{
DownloadCreationTrait, DownloadIdSelector, DownloadSelectorTrait, DownloadSimpleState,
DownloadStateTrait, DownloadTaskTrait,
},
};
pub type QBittorrentHash = SimpleTorrentHash;
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct QBittorrentState(Option<State>);
impl From<State> for QBittorrentState {
fn from(value: State) -> Self {
Self(Some(value))
}
}
impl From<Option<State>> for QBittorrentState {
fn from(value: Option<State>) -> Self {
Self(value)
}
}
impl DownloadStateTrait for QBittorrentState {
fn to_download_state(&self) -> DownloadSimpleState {
if let Some(ref state) = self.0 {
match state {
State::ForcedUP
| State::Uploading
| State::PausedUP
| State::QueuedUP
| State::StalledUP
| State::CheckingUP => DownloadSimpleState::Completed,
State::Error | State::MissingFiles => DownloadSimpleState::Error,
State::Unknown => DownloadSimpleState::Unknown,
State::PausedDL => DownloadSimpleState::Paused,
State::Allocating
| State::Moving
| State::MetaDL
| State::ForcedDL
| State::CheckingResumeData
| State::QueuedDL
| State::Downloading
| State::StalledDL
| State::CheckingDL => DownloadSimpleState::Active,
}
} else {
DownloadSimpleState::Unknown
}
}
}
impl TorrentStateTrait for QBittorrentState {}
#[derive(Debug)]
pub struct QBittorrentTask {
pub hash_info: QBittorrentHash,
pub torrent: QbitTorrent,
pub contents: Vec<QbitTorrentContent>,
pub state: QBittorrentState,
}
impl QBittorrentTask {
pub fn from_query(
torrent: QbitTorrent,
contents: Vec<QbitTorrentContent>,
) -> Result<Self, DownloaderError> {
let hash = torrent
.hash
.clone()
.ok_or_else(|| DownloaderError::TorrentMetaError {
message: "missing hash".to_string(),
source: None.into(),
})?;
let state = QBittorrentState(torrent.state.clone());
Ok(Self {
hash_info: hash,
contents,
state,
torrent,
})
}
}
impl DownloadTaskTrait for QBittorrentTask {
type State = QBittorrentState;
type Id = QBittorrentHash;
fn id(&self) -> &Self::Id {
&self.hash_info
}
fn into_id(self) -> Self::Id {
self.hash_info
}
fn name(&self) -> Cow<'_, str> {
self.torrent
.name
.as_deref()
.map(Cow::Borrowed)
.unwrap_or_else(|| DownloadTaskTrait::name(self))
}
fn speed(&self) -> Option<u64> {
self.torrent.dlspeed.and_then(|s| u64::try_from(s).ok())
}
fn state(&self) -> &Self::State {
&self.state
}
fn dl_bytes(&self) -> Option<u64> {
self.torrent.downloaded.and_then(|v| u64::try_from(v).ok())
}
fn total_bytes(&self) -> Option<u64> {
self.torrent.size.and_then(|v| u64::try_from(v).ok())
}
fn left_bytes(&self) -> Option<u64> {
self.torrent.amount_left.and_then(|v| u64::try_from(v).ok())
}
fn et(&self) -> Option<Duration> {
self.torrent
.time_active
.and_then(|v| u64::try_from(v).ok())
.map(Duration::from_secs)
}
fn eta(&self) -> Option<Duration> {
self.torrent
.eta
.and_then(|v| u64::try_from(v).ok())
.map(Duration::from_secs)
}
fn progress(&self) -> Option<f32> {
self.torrent.progress.as_ref().map(|s| *s as f32)
}
}
impl TorrentTaskTrait for QBittorrentTask {
fn hash_info(&self) -> Cow<'_, str> {
Cow::Borrowed(&self.hash_info)
}
fn tags(&self) -> impl Iterator<Item = Cow<'_, str>> {
self.torrent
.tags
.as_deref()
.unwrap_or("")
.split(',')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.map(Cow::Borrowed)
}
fn category(&self) -> Option<Cow<'_, str>> {
self.torrent.category.as_deref().map(Cow::Borrowed)
}
}
#[derive(Debug, Clone, Default)]
pub struct QBittorrentCreation {
pub save_path: PathBuf,
pub tags: Vec<String>,
pub category: Option<String>,
pub sources: Vec<HashTorrentSource>,
}
impl DownloadCreationTrait for QBittorrentCreation {
type Task = QBittorrentTask;
}
impl TorrentCreationTrait for QBittorrentCreation {
fn save_path(&self) -> &Path {
self.save_path.as_ref()
}
fn save_path_mut(&mut self) -> &mut PathBuf {
&mut self.save_path
}
fn sources_mut(&mut self) -> &mut Vec<HashTorrentSource> {
&mut self.sources
}
}
pub type QBittorrentHashSelector = DownloadIdSelector<QBittorrentTask>;
#[derive(Debug)]
pub struct QBittorrentComplexSelector {
pub query: GetTorrentListArg,
}
impl From<QBittorrentHashSelector> for QBittorrentComplexSelector {
fn from(value: QBittorrentHashSelector) -> Self {
Self {
query: GetTorrentListArg {
hashes: Some(value.ids.join("|")),
..Default::default()
},
}
}
}
impl DownloadSelectorTrait for QBittorrentComplexSelector {
type Id = QBittorrentHash;
type Task = QBittorrentTask;
}
#[derive(Debug)]
pub enum QBittorrentSelector {
Hash(QBittorrentHashSelector),
Complex(QBittorrentComplexSelector),
}
impl DownloadSelectorTrait for QBittorrentSelector {
type Id = QBittorrentHash;
type Task = QBittorrentTask;
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
match self {
QBittorrentSelector::Complex(c) => {
c.try_into_ids_only().map_err(QBittorrentSelector::Complex)
}
QBittorrentSelector::Hash(h) => {
let result = h
.try_into_ids_only()
.unwrap_or_else(|_| unreachable!("hash selector must contains hash"))
.into_iter();
Ok(result.collect_vec())
}
}
}
}

View File

@ -0,0 +1,274 @@
use std::time::Duration;
use chrono::Utc;
use qbit_rs::model::{GetTorrentListArg, TorrentFilter as QbitTorrentFilter};
use quirks_path::Path;
use snafu::OptionExt;
use crate::{
DownloaderError,
bittorrent::{
downloader::TorrentDownloaderTrait, source::HashTorrentSource, task::TorrentTaskTrait,
},
core::{DownloadIdSelectorTrait, DownloaderTrait},
qbit::{
QBittorrentDownloader, QBittorrentDownloaderCreation,
task::{
QBittorrentComplexSelector, QBittorrentCreation, QBittorrentHashSelector,
QBittorrentSelector, QBittorrentTask,
},
},
utils::path_equals_as_file_url,
};
fn get_tmp_qbit_test_folder() -> &'static str {
if cfg!(all(windows, not(feature = "testcontainers"))) {
"C:\\Windows\\Temp\\konobangu\\qbit"
} else {
"/tmp/konobangu/qbit"
}
}
#[cfg(feature = "testcontainers")]
pub async fn create_qbit_testcontainers()
-> anyhow::Result<testcontainers::ContainerRequest<testcontainers::GenericImage>> {
use testcontainers::{
GenericImage,
core::{
ContainerPort,
// ReuseDirective,
WaitFor,
},
};
use testcontainers_ext::{ImageDefaultLogConsumerExt, ImagePruneExistedLabelExt};
use testcontainers_modules::testcontainers::ImageExt;
let container = GenericImage::new("linuxserver/qbittorrent", "latest")
.with_wait_for(WaitFor::message_on_stderr("Connection to localhost"))
.with_env_var("WEBUI_PORT", "8080")
.with_env_var("TZ", "Asia/Singapore")
.with_env_var("TORRENTING_PORT", "6881")
.with_mapped_port(6881, ContainerPort::Tcp(6881))
.with_mapped_port(8080, ContainerPort::Tcp(8080))
// .with_reuse(ReuseDirective::Always)
.with_default_log_consumer()
.with_prune_existed_label(env!("CARGO_PKG_NAME"), "qbit-downloader", true, true)
.await?;
Ok(container)
}
#[cfg(not(feature = "testcontainers"))]
#[tokio::test]
async fn test_qbittorrent_downloader() {
let hash = "47ee2d69e7f19af783ad896541a07b012676f858".to_string();
let torrent_url = format!("https://mikanani.me/Download/20240301/{}.torrent", hash);
let _ = test_qbittorrent_downloader_impl(torrent_url, hash, None, None).await;
}
#[cfg(feature = "testcontainers")]
#[tokio::test(flavor = "multi_thread")]
async fn test_qbittorrent_downloader() -> anyhow::Result<()> {
use testcontainers::runners::AsyncRunner;
use testing_torrents::{TestTorrentRequest, TestTorrentResponse, TestingTorrentFileItem};
use tokio::io::AsyncReadExt;
tracing_subscriber::fmt()
.with_max_level(tracing::Level::DEBUG)
.with_test_writer()
.init();
let torrents_image = testing_torrents::create_testcontainers().await?;
let _torrents_container = torrents_image.start().await?;
let torrents_req = TestTorrentRequest {
id: "f10ebdda-dd2e-43f8-b80c-bf0884d071c4".into(),
file_list: vec![TestingTorrentFileItem {
path: "[Nekomoe kissaten&LoliHouse] Boku no Kokoro no Yabai Yatsu - 20 [WebRip 1080p \
HEVC-10bit AAC ASSx2].mkv"
.into(),
size: 1024,
}],
};
let torrent_res: TestTorrentResponse = reqwest::Client::new()
.post("http://127.0.0.1:6080/api/torrents/mock")
.json(&torrents_req)
.send()
.await?
.json()
.await?;
let qbit_image = create_qbit_testcontainers().await?;
let qbit_container = qbit_image.start().await?;
let mut logs = String::new();
qbit_container
.stdout(false)
.read_to_string(&mut logs)
.await?;
let username = logs
.lines()
.find_map(|line| {
if line.contains("The WebUI administrator username is") {
line.split_whitespace().last()
} else {
None
}
})
.expect("should have username")
.trim();
let password = logs
.lines()
.find_map(|line| {
if line.contains("A temporary password is provided for") {
line.split_whitespace().last()
} else {
None
}
})
.expect("should have password")
.trim();
tracing::info!(username, password);
test_qbittorrent_downloader_impl(
torrent_res.torrent_url,
torrent_res.hash,
Some(username),
Some(password),
)
.await?;
Ok(())
}
async fn test_qbittorrent_downloader_impl(
torrent_url: String,
torrent_hash: String,
username: Option<&str>,
password: Option<&str>,
) -> anyhow::Result<()> {
let http_client = fetch::test_util::build_testing_http_client()?;
let base_save_path = Path::new(get_tmp_qbit_test_folder());
let downloader = QBittorrentDownloader::from_creation(QBittorrentDownloaderCreation {
endpoint: "http://127.0.0.1:8080".to_string(),
password: password.unwrap_or_default().to_string(),
username: username.unwrap_or_default().to_string(),
subscriber_id: 0,
save_path: base_save_path.to_string(),
downloader_id: 0,
wait_sync_timeout: Some(Duration::from_secs(3)),
})
.await?;
downloader.check_connection().await?;
downloader
.remove_torrents(vec![torrent_hash.clone()].into())
.await?;
let torrent_source =
HashTorrentSource::from_url_and_http_client(&http_client, torrent_url).await?;
let folder_name = format!("torrent_test_{}", Utc::now().timestamp());
let save_path = base_save_path.join(&folder_name);
let torrent_creation = QBittorrentCreation {
save_path,
tags: vec![],
sources: vec![torrent_source],
category: None,
};
downloader.add_downloads(torrent_creation).await?;
let get_torrent = async || -> Result<QBittorrentTask, DownloaderError> {
let torrent_infos = downloader
.query_downloads(QBittorrentSelector::Hash(QBittorrentHashSelector::from_id(
torrent_hash.clone(),
)))
.await?;
let result = torrent_infos
.into_iter()
.find(|t| t.hash_info() == torrent_hash)
.whatever_context::<_, DownloaderError>("no bittorrent")?;
Ok(result)
};
let target_torrent = get_torrent().await?;
let files = target_torrent.contents;
assert!(!files.is_empty());
let first_file = files.first().expect("should have first file");
assert!(
&first_file.name.ends_with(r#"[Nekomoe kissaten&LoliHouse] Boku no Kokoro no Yabai Yatsu - 20 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"#)
);
let test_tag = "test_tag".to_string();
downloader
.add_torrent_tags(vec![torrent_hash.clone()], vec![test_tag.clone()])
.await?;
let target_torrent = get_torrent().await?;
assert!(target_torrent.tags().any(|s| s == test_tag));
let test_category = format!("test_category_{}", Utc::now().timestamp());
downloader
.set_torrents_category(vec![torrent_hash.clone()], &test_category)
.await?;
let target_torrent = get_torrent().await?;
assert_eq!(
Some(test_category.as_str()),
target_torrent.category().as_deref()
);
let moved_torrent_path = base_save_path.join(format!("moved_{}", Utc::now().timestamp()));
downloader
.move_torrents(vec![torrent_hash.clone()], moved_torrent_path.as_str())
.await?;
let target_torrent = get_torrent().await?;
let actual_content_path = &target_torrent
.torrent
.save_path
.expect("failed to get actual save path");
assert!(
path_equals_as_file_url(actual_content_path, moved_torrent_path)
.expect("failed to compare actual torrent path and found expected torrent path")
);
downloader
.remove_torrents(vec![torrent_hash.clone()].into())
.await?;
let torrent_infos1 = downloader
.query_downloads(QBittorrentSelector::Complex(QBittorrentComplexSelector {
query: GetTorrentListArg::builder()
.filter(QbitTorrentFilter::All)
.build(),
}))
.await?;
assert!(torrent_infos1.is_empty());
tracing::info!("test finished");
Ok(())
}

View File

@ -0,0 +1,278 @@
use std::{str::FromStr, sync::Arc};
use async_trait::async_trait;
use librqbit::{
AddTorrent, AddTorrentOptions, ManagedTorrent, Session, SessionOptions, api::TorrentIdOrHash,
};
use librqbit_core::Id20;
use snafu::ResultExt;
use tracing::instrument;
use util::errors::AnyhowResultExt;
use super::task::{RqbitCreation, RqbitHash, RqbitSelector, RqbitState, RqbitTask};
use crate::{
DownloaderError,
bittorrent::{
downloader::TorrentDownloaderTrait,
source::{HashTorrentSource, HashTorrentSourceTrait},
},
core::{DownloadIdSelector, DownloaderTrait},
errors::RqbitSnafu,
};
#[derive(Debug)]
pub struct RqbitDownloaderCreation {
pub save_path: String,
pub subscriber_id: i32,
pub downloader_id: i32,
}
impl RqbitDownloaderCreation {}
pub struct RqbitDownloader {
pub save_path: String,
pub subscriber_id: i32,
pub downloader_id: i32,
pub session: Arc<Session>,
}
impl RqbitDownloader {
#[instrument(level = "debug")]
pub async fn from_creation(
creation: RqbitDownloaderCreation,
) -> Result<Arc<Self>, DownloaderError> {
let session_opt = SessionOptions {
..Default::default()
};
let session = Session::new_with_opts(creation.save_path.clone().into(), session_opt)
.await
.to_dyn_boxed()
.context(RqbitSnafu {})?;
Ok(Arc::new(Self {
session,
save_path: creation.save_path,
subscriber_id: creation.subscriber_id,
downloader_id: creation.downloader_id,
}))
}
pub async fn add_torrent(
&self,
source: HashTorrentSource,
opt: Option<AddTorrentOptions>,
) -> Result<RqbitHash, DownloaderError> {
let hash = Id20::from_str(&source.hash_info() as &str)
.to_dyn_boxed()
.context(RqbitSnafu {})?;
let source = match source {
HashTorrentSource::TorrentFile(file) => AddTorrent::TorrentFileBytes(file.payload),
HashTorrentSource::MagnetUrl(magnet) => AddTorrent::Url(magnet.url.into()),
};
let response = self
.session
.add_torrent(source, opt)
.await
.to_dyn_boxed()
.context(RqbitSnafu {})?;
let handle = response
.into_handle()
.ok_or_else(|| anyhow::anyhow!("failed to get handle of add torrent task"))
.to_dyn_boxed()
.context(RqbitSnafu {})?;
handle
.wait_until_initialized()
.await
.to_dyn_boxed()
.context(RqbitSnafu {})?;
Ok(hash)
}
fn query_torrent_impl(&self, hash: RqbitHash) -> Result<Arc<ManagedTorrent>, DownloaderError> {
let torrent = self
.session
.get(TorrentIdOrHash::Hash(hash))
.ok_or_else(|| anyhow::anyhow!("could not find torrent by hash {}", hash.as_string()))
.to_dyn_boxed()
.context(RqbitSnafu {})?;
Ok(torrent)
}
pub fn query_torrent(&self, hash: RqbitHash) -> Result<RqbitTask, DownloaderError> {
let torrent = self.query_torrent_impl(hash)?;
let task = RqbitTask::from_query(torrent)?;
Ok(task)
}
pub async fn pause_torrent(&self, hash: RqbitHash) -> Result<(), DownloaderError> {
let t = self.query_torrent_impl(hash)?;
self.session
.pause(&t)
.await
.to_dyn_boxed()
.context(RqbitSnafu {})?;
Ok(())
}
pub async fn resume_torrent(&self, hash: RqbitHash) -> Result<(), DownloaderError> {
let t = self.query_torrent_impl(hash)?;
self.session
.unpause(&t)
.await
.to_dyn_boxed()
.context(RqbitSnafu {})?;
Ok(())
}
pub async fn delete_torrent(&self, hash: RqbitHash) -> Result<(), DownloaderError> {
self.session
.delete(TorrentIdOrHash::Hash(hash), true)
.await
.to_dyn_boxed()
.context(RqbitSnafu {})?;
Ok(())
}
}
#[async_trait]
impl DownloaderTrait for RqbitDownloader {
type State = RqbitState;
type Id = RqbitHash;
type Task = RqbitTask;
type Creation = RqbitCreation;
type Selector = RqbitSelector;
#[instrument(level = "debug", skip(self))]
async fn add_downloads(
&self,
creation: RqbitCreation,
) -> Result<Vec<<Self as DownloaderTrait>::Id>, DownloaderError> {
let mut sources = creation.sources;
if sources.len() == 1 {
let hash = self
.add_torrent(
sources.pop().unwrap(),
Some(AddTorrentOptions {
paused: false,
output_folder: Some(self.save_path.clone()),
..Default::default()
}),
)
.await?;
Ok(vec![hash])
} else {
let tasks = sources
.into_iter()
.map(|s| {
self.add_torrent(
s,
Some(AddTorrentOptions {
paused: false,
output_folder: Some(self.save_path.clone()),
..Default::default()
}),
)
})
.collect::<Vec<_>>();
let results = futures::future::try_join_all(tasks).await?;
Ok(results)
}
}
async fn pause_downloads(
&self,
selector: <Self as DownloaderTrait>::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
<Self as TorrentDownloaderTrait>::pause_downloads(self, selector).await
}
async fn resume_downloads(
&self,
selector: <Self as DownloaderTrait>::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
<Self as TorrentDownloaderTrait>::resume_downloads(self, selector).await
}
async fn remove_downloads(
&self,
selector: <Self as DownloaderTrait>::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
<Self as TorrentDownloaderTrait>::remove_downloads(self, selector).await
}
#[instrument(level = "debug", skip(self))]
async fn query_downloads(
&self,
selector: RqbitSelector,
) -> Result<Vec<<Self as DownloaderTrait>::Task>, DownloaderError> {
let hashes = selector.into_iter();
let tasks = hashes
.map(|h| self.query_torrent(h))
.collect::<Result<Vec<_>, DownloaderError>>()?;
Ok(tasks)
}
}
#[async_trait]
impl TorrentDownloaderTrait for RqbitDownloader {
type IdSelector = DownloadIdSelector<Self::Task>;
#[instrument(level = "debug", skip(self))]
async fn pause_torrents(
&self,
selector: Self::IdSelector,
) -> Result<Self::IdSelector, DownloaderError> {
let mut hashes: Vec<_> = selector.clone();
if hashes.len() == 1 {
self.pause_torrent(hashes.pop().unwrap()).await?;
} else {
futures::future::try_join_all(hashes.into_iter().map(|h| self.pause_torrent(h)))
.await?;
}
Ok(selector)
}
#[instrument(level = "debug", skip(self))]
async fn resume_torrents(
&self,
selector: Self::IdSelector,
) -> Result<Self::IdSelector, DownloaderError> {
let mut hashes: Vec<_> = selector.clone();
if hashes.len() == 1 {
self.resume_torrent(hashes.pop().unwrap()).await?;
} else {
futures::future::try_join_all(hashes.into_iter().map(|h| self.resume_torrent(h)))
.await?;
}
Ok(selector)
}
#[instrument(level = "debug", skip(self))]
async fn remove_torrents(
&self,
selector: Self::IdSelector,
) -> Result<Self::IdSelector, DownloaderError> {
let mut hashes: Vec<_> = selector.clone();
if hashes.len() == 1 {
self.delete_torrent(hashes.pop().unwrap()).await?;
} else {
futures::future::try_join_all(hashes.into_iter().map(|h| self.delete_torrent(h)))
.await?;
}
Ok(selector)
}
}

View File

@ -1,3 +1,4 @@
pub mod downloader;
pub mod source;
pub mod task;
#[cfg(test)]
mod test;

View File

@ -0,0 +1,189 @@
use std::{borrow::Cow, fmt::Debug, sync::Arc, time::Duration};
use librqbit::{ManagedTorrent, ManagedTorrentState, TorrentStats, TorrentStatsState};
use librqbit_core::Id20;
use quirks_path::{Path, PathBuf};
use crate::{
DownloaderError,
bittorrent::{
source::HashTorrentSource,
task::{TorrentCreationTrait, TorrentHashTrait, TorrentStateTrait, TorrentTaskTrait},
},
core::{
DownloadCreationTrait, DownloadIdSelector, DownloadIdTrait, DownloadSimpleState,
DownloadStateTrait, DownloadTaskTrait,
},
};
pub type RqbitHash = Id20;
impl DownloadIdTrait for RqbitHash {}
impl TorrentHashTrait for RqbitHash {}
#[derive(Debug, Clone)]
pub struct RqbitState(Arc<TorrentStats>);
impl DownloadStateTrait for RqbitState {
fn to_download_state(&self) -> DownloadSimpleState {
match self.0.state {
TorrentStatsState::Error => DownloadSimpleState::Error,
TorrentStatsState::Paused => DownloadSimpleState::Paused,
TorrentStatsState::Live => {
if self.0.finished {
DownloadSimpleState::Completed
} else {
DownloadSimpleState::Active
}
}
TorrentStatsState::Initializing => DownloadSimpleState::Active,
}
}
}
impl TorrentStateTrait for RqbitState {}
impl From<Arc<TorrentStats>> for RqbitState {
fn from(value: Arc<TorrentStats>) -> Self {
Self(value)
}
}
pub struct RqbitTask {
pub hash_info: RqbitHash,
pub torrent: Arc<ManagedTorrent>,
pub state: RqbitState,
pub stats: Arc<TorrentStats>,
}
impl RqbitTask {
pub fn from_query(torrent: Arc<ManagedTorrent>) -> Result<Self, DownloaderError> {
let hash = torrent.info_hash();
let stats = Arc::new(torrent.stats());
Ok(Self {
hash_info: hash,
state: stats.clone().into(),
stats,
torrent,
})
}
}
impl Debug for RqbitTask {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("RqbitTask")
.field("hash_info", &self.hash_info)
.field("state", &self.id())
.finish()
}
}
impl DownloadTaskTrait for RqbitTask {
type State = RqbitState;
type Id = RqbitHash;
fn id(&self) -> &Self::Id {
&self.hash_info
}
fn into_id(self) -> Self::Id {
self.hash_info
}
fn name(&self) -> Cow<'_, str> {
self.torrent
.metadata
.load_full()
.and_then(|m| m.name.to_owned())
.map(Cow::Owned)
.unwrap_or_else(|| DownloadTaskTrait::name(self))
}
fn speed(&self) -> Option<u64> {
self.stats
.live
.as_ref()
.map(|s| s.download_speed.mbps)
.and_then(|u| {
let v = u * 1024f64 * 1024f64;
if v.is_finite() && v > 0.0 && v < u64::MAX as f64 {
Some(v as u64)
} else {
None
}
})
}
fn state(&self) -> &Self::State {
&self.state
}
fn dl_bytes(&self) -> Option<u64> {
Some(self.stats.progress_bytes)
}
fn total_bytes(&self) -> Option<u64> {
Some(self.stats.total_bytes)
}
fn et(&self) -> Option<Duration> {
self.torrent.with_state(|l| match l {
ManagedTorrentState::Live(l) => Some(Duration::from_millis(
l.stats_snapshot().total_piece_download_ms,
)),
_ => None,
})
}
fn eta(&self) -> Option<Duration> {
self.torrent.with_state(|l| match l {
ManagedTorrentState::Live(l) => l.down_speed_estimator().time_remaining(),
_ => None,
})
}
}
impl TorrentTaskTrait for RqbitTask {
fn hash_info(&self) -> Cow<'_, str> {
Cow::Owned(self.hash_info.as_string())
}
fn tags(&self) -> impl Iterator<Item = Cow<'_, str>> {
std::iter::empty()
}
fn category(&self) -> Option<Cow<'_, str>> {
None
}
}
#[derive(Debug, Clone, Default)]
pub struct RqbitCreation {
pub save_path: PathBuf,
pub tags: Vec<String>,
pub category: Option<String>,
pub sources: Vec<HashTorrentSource>,
}
impl DownloadCreationTrait for RqbitCreation {
type Task = RqbitTask;
}
impl TorrentCreationTrait for RqbitCreation {
fn save_path(&self) -> &Path {
self.save_path.as_ref()
}
fn save_path_mut(&mut self) -> &mut PathBuf {
&mut self.save_path
}
fn sources_mut(&mut self) -> &mut Vec<HashTorrentSource> {
&mut self.sources
}
}
pub type RqbitHashSelector = DownloadIdSelector<RqbitTask>;
pub type RqbitSelector = RqbitHashSelector;

View File

35
packages/fetch/Cargo.toml Normal file
View File

@ -0,0 +1,35 @@
[package]
name = "fetch"
version = "0.1.0"
edition = "2024"
[dependencies]
snafu = { workspace = true }
bytes = { workspace = true }
url = { workspace = true }
serde = { workspace = true }
serde_with = { workspace = true }
lazy_static = { workspace = true }
serde_json = { workspace = true }
axum = { workspace = true }
axum-extra = { workspace = true }
async-trait = { workspace = true }
moka = { workspace = true }
reqwest = { workspace = true }
leaky-bucket = "1.1"
cookie = "0.18"
http-cache-reqwest = { version = "0.15", features = [
"manager-cacache",
"manager-moka",
] }
http-cache-semantics = "2.1"
fastrand = "2.3"
reqwest-middleware = "0.4"
reqwest-retry = "0.7"
reqwest-tracing = "0.5"
http-cache = { version = "0.20", features = [
"cacache-tokio",
"manager-cacache",
"manager-moka",
], default-features = false }

View File

@ -2,12 +2,12 @@ use bytes::Bytes;
use reqwest::IntoUrl;
use super::client::HttpClientTrait;
use crate::errors::app_error::RError;
use crate::FetchError;
pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>(
client: &H,
url: T,
) -> Result<Bytes, RError> {
) -> Result<Bytes, FetchError> {
let bytes = client
.get(url)
.send()

View File

@ -17,7 +17,7 @@ use serde_with::serde_as;
use snafu::Snafu;
use super::HttpClientSecrecyDataTrait;
use crate::fetch::get_random_mobile_ua;
use crate::get_random_mobile_ua;
pub struct RateLimiterMiddleware {
rate_limiter: RateLimiter,

View File

@ -4,7 +4,7 @@ use cookie::Cookie;
use reqwest::{ClientBuilder, cookie::Jar};
use url::Url;
use crate::errors::app_error::RError;
use crate::FetchError;
pub trait HttpClientSecrecyDataTrait {
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
@ -23,9 +23,9 @@ impl HttpClientCookiesAuth {
cookies: &str,
url: &Url,
user_agent: Option<String>,
) -> Result<Self, RError> {
) -> Result<Self, FetchError> {
let cookie_jar = Arc::new(Jar::default());
for cookie in Cookie::split_parse(cookies).try_collect::<Vec<_>>()? {
for cookie in Cookie::split_parse(cookies).collect::<Result<Vec<Cookie<'_>>, _>>()? {
cookie_jar.add_cookie_str(&cookie.to_string(), url);
}

View File

@ -0,0 +1,12 @@
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub))]
pub enum FetchError {
#[snafu(transparent)]
CookieParseError { source: cookie::ParseError },
#[snafu(transparent)]
ReqwestError { source: reqwest::Error },
#[snafu(transparent)]
RequestMiddlewareError { source: reqwest_middleware::Error },
}

View File

@ -1,12 +1,12 @@
use reqwest::IntoUrl;
use super::client::HttpClientTrait;
use crate::errors::app_error::RError;
use crate::FetchError;
pub async fn fetch_html<T: IntoUrl, H: HttpClientTrait>(
client: &H,
url: T,
) -> Result<String, RError> {
) -> Result<String, FetchError> {
let content = client
.get(url)
.send()

View File

@ -2,11 +2,11 @@ use bytes::Bytes;
use reqwest::IntoUrl;
use super::{bytes::fetch_bytes, client::HttpClientTrait};
use crate::errors::app_error::RError;
use crate::FetchError;
pub async fn fetch_image<T: IntoUrl, H: HttpClientTrait>(
client: &H,
url: T,
) -> Result<Bytes, RError> {
) -> Result<Bytes, FetchError> {
fetch_bytes(client, url).await
}

View File

@ -1,9 +1,10 @@
pub mod bytes;
pub mod client;
pub mod core;
pub mod errors;
pub mod html;
pub mod image;
pub mod oidc;
pub mod test_util;
pub use core::get_random_mobile_ua;
@ -12,5 +13,8 @@ pub use client::{
HttpClient, HttpClientConfig, HttpClientCookiesAuth, HttpClientError,
HttpClientSecrecyDataTrait, HttpClientTrait,
};
pub use errors::FetchError;
pub use html::fetch_html;
pub use image::fetch_image;
pub use reqwest::{self, IntoUrl};
pub use reqwest_middleware;

View File

@ -0,0 +1,6 @@
use crate::{FetchError, HttpClient};
pub fn build_testing_http_client() -> Result<HttpClient, FetchError> {
let mikan_client = HttpClient::default();
Ok(mikan_client)
}

View File

@ -0,0 +1,10 @@
[package]
name = "testing-torrents"
version = "0.1.0"
edition = "2024"
[dependencies]
testcontainers = { workspace = true }
testcontainers-modules = { workspace = true }
testcontainers-ext = { workspace = true }
serde = { workspace = true }

View File

@ -0,0 +1,44 @@
use serde::{Deserialize, Serialize};
use testcontainers::{
GenericImage,
core::{ContainerPort, WaitFor},
};
use testcontainers_ext::{ImageDefaultLogConsumerExt, ImagePruneExistedLabelExt};
use testcontainers_modules::testcontainers::ImageExt;
#[derive(Serialize)]
pub struct TestingTorrentFileItem {
pub path: String,
pub size: u64,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TestTorrentRequest {
pub id: String,
pub file_list: Vec<TestingTorrentFileItem>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TestTorrentResponse {
pub torrent_url: String,
pub magnet_url: String,
pub hash: String,
}
pub async fn create_testcontainers() -> Result<
testcontainers::ContainerRequest<testcontainers::GenericImage>,
testcontainers::TestcontainersError,
> {
let container = GenericImage::new("ghcr.io/dumtruck/konobangu-testing-torrents", "latest")
.with_wait_for(WaitFor::message_on_stdout("Listening on"))
.with_mapped_port(6080, ContainerPort::Tcp(6080))
.with_mapped_port(6081, ContainerPort::Tcp(6081))
.with_mapped_port(6082, ContainerPort::Tcp(6082))
.with_default_log_consumer()
.with_prune_existed_label("konobangu", "testing-torrents", true, true)
.await?;
Ok(container)
}

Some files were not shown because too many files have changed in this diff Show More