refactor: split modules
This commit is contained in:
@@ -19,17 +19,35 @@ testcontainers = [
|
||||
"dep:testcontainers",
|
||||
"dep:testcontainers-modules",
|
||||
"dep:testcontainers-ext",
|
||||
"dep:testing-torrents",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
serde = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
testcontainers = { workspace = true, optional = true }
|
||||
testcontainers-modules = { workspace = true, optional = true }
|
||||
testcontainers-ext = { workspace = true, optional = true, features = [
|
||||
"tracing",
|
||||
] }
|
||||
tracing = { workspace = true }
|
||||
axum = { workspace = true }
|
||||
axum-extra = { workspace = true }
|
||||
snafu = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
url = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
lazy_static = { workspace = true }
|
||||
quirks_path = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
bytes = { workspace = true }
|
||||
serde_with = { workspace = true }
|
||||
moka = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
|
||||
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
|
||||
async-trait = "0.1.83"
|
||||
tracing = "0.1"
|
||||
chrono = "0.4"
|
||||
sea-orm = { version = "1.1", features = [
|
||||
"sqlx-sqlite",
|
||||
"sqlx-postgres",
|
||||
@@ -38,58 +56,25 @@ sea-orm = { version = "1.1", features = [
|
||||
"debug-print",
|
||||
] }
|
||||
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
||||
axum = "0.8"
|
||||
uuid = { version = "1.6.0", features = ["v4"] }
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
|
||||
sea-orm-migration = { version = "1.1", features = ["runtime-tokio-rustls"] }
|
||||
reqwest = { version = "0.12", default-features = false, features = [
|
||||
"charset",
|
||||
"http2",
|
||||
"json",
|
||||
"macos-system-configuration",
|
||||
"rustls-tls",
|
||||
"cookies",
|
||||
] }
|
||||
rss = "2"
|
||||
bytes = "1.9"
|
||||
itertools = "0.14"
|
||||
url = "2.5"
|
||||
fancy-regex = "0.14"
|
||||
regex = "1.11"
|
||||
lazy_static = "1.5"
|
||||
maplit = "1.0.2"
|
||||
lightningcss = "1.0.0-alpha.61"
|
||||
html-escape = "0.2.13"
|
||||
opendal = { version = "0.51.0", features = ["default", "services-fs"] }
|
||||
zune-image = "0.4.15"
|
||||
once_cell = "1.20.2"
|
||||
reqwest-middleware = "0.4.0"
|
||||
reqwest-retry = "0.7.0"
|
||||
reqwest-tracing = "0.5.5"
|
||||
scraper = "0.23"
|
||||
leaky-bucket = "1.1.2"
|
||||
serde_with = "3"
|
||||
|
||||
jwt-authorizer = "0.15.0"
|
||||
futures = "0.3.31"
|
||||
librqbit-core = "4"
|
||||
qbit-rs = { git = "https://github.com/lonelyhentxi/qbit.git", rev = "72d53138ebe", features = [
|
||||
"default",
|
||||
"builder",
|
||||
] }
|
||||
testcontainers = { version = "0.23.3", optional = true }
|
||||
testcontainers-modules = { version = "0.11.4", optional = true }
|
||||
testcontainers-ext = { version = "0.1.0", optional = true, features = [
|
||||
"tracing",
|
||||
] }
|
||||
log = "0.4.22"
|
||||
async-graphql = { version = "7", features = [] }
|
||||
async-graphql-axum = "7"
|
||||
fastrand = "2.3.0"
|
||||
seaography = { version = "1.1" }
|
||||
quirks_path = "0.1.1"
|
||||
base64 = "0.22.1"
|
||||
tower = "0.5.2"
|
||||
axum-extra = "0.10"
|
||||
tower-http = { version = "0.6", features = [
|
||||
"trace",
|
||||
"catch-panic",
|
||||
@@ -102,34 +87,19 @@ tower-http = { version = "0.6", features = [
|
||||
] }
|
||||
tera = "1.20.0"
|
||||
openidconnect = { version = "4", features = ["rustls-tls"] }
|
||||
http-cache-reqwest = { version = "0.15", features = [
|
||||
"manager-cacache",
|
||||
"manager-moka",
|
||||
] }
|
||||
moka = "0.12.10"
|
||||
http-cache = { version = "0.20.0", features = [
|
||||
"cacache-tokio",
|
||||
"manager-cacache",
|
||||
"manager-moka",
|
||||
], default-features = false }
|
||||
http-cache-semantics = "2.1.0"
|
||||
dotenv = "0.15.0"
|
||||
http = "1.2.0"
|
||||
cookie = "0.18.1"
|
||||
async-stream = "0.3.6"
|
||||
serde_variant = "0.1.3"
|
||||
tracing-appender = "0.2.3"
|
||||
clap = "4.5.31"
|
||||
futures-util = "0.3.31"
|
||||
ipnetwork = "0.21.1"
|
||||
librqbit = "8.0.0"
|
||||
typed-builder = "0.21.0"
|
||||
snafu = { version = "0.8.5", features = ["futures"] }
|
||||
anyhow = "1.0.97"
|
||||
serde_yaml = "0.9.34"
|
||||
merge-struct = "0.1.0"
|
||||
serde-value = "0.7.0"
|
||||
testing-torrents = { path = "../../packages/testing-torrents", optional = true }
|
||||
|
||||
downloader = { workspace = true }
|
||||
util = { workspace = true }
|
||||
fetch = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
serial_test = "3"
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use recorder::errors::app_error::RResult;
|
||||
use recorder::errors::RecorderResult;
|
||||
// #![allow(unused_imports)]
|
||||
// use recorder::{
|
||||
// app::{AppContext, AppContextTrait},
|
||||
// errors::RResult,
|
||||
// errors::RecorderResult,
|
||||
// migrations::Migrator,
|
||||
// models::{
|
||||
// subscribers::SEED_SUBSCRIBER,
|
||||
@@ -12,7 +12,7 @@ use recorder::errors::app_error::RResult;
|
||||
// use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
|
||||
// use sea_orm_migration::MigratorTrait;
|
||||
|
||||
// async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RResult<()> {
|
||||
// async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RecorderResult<()> {
|
||||
// let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
|
||||
|
||||
// // let rss_link =
|
||||
@@ -44,13 +44,13 @@ use recorder::errors::app_error::RResult;
|
||||
// }
|
||||
|
||||
// #[tokio::main]
|
||||
// async fn main() -> RResult<()> {
|
||||
// async fn main() -> RecorderResult<()> {
|
||||
// pull_mikan_bangumi_rss(&ctx).await?;
|
||||
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> RResult<()> {
|
||||
async fn main() -> RecorderResult<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::sync::Arc;
|
||||
use clap::{Parser, command};
|
||||
|
||||
use super::{AppContext, core::App, env::Environment};
|
||||
use crate::{app::config::AppConfig, errors::app_error::RResult};
|
||||
use crate::{app::config::AppConfig, errors::RecorderResult};
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about, long_about = None)]
|
||||
@@ -33,7 +33,7 @@ pub struct AppBuilder {
|
||||
}
|
||||
|
||||
impl AppBuilder {
|
||||
pub async fn from_main_cli(environment: Option<Environment>) -> RResult<Self> {
|
||||
pub async fn from_main_cli(environment: Option<Environment>) -> RecorderResult<Self> {
|
||||
let args = MainCliArgs::parse();
|
||||
|
||||
let environment = environment.unwrap_or_else(|| {
|
||||
@@ -68,7 +68,7 @@ impl AppBuilder {
|
||||
Ok(builder)
|
||||
}
|
||||
|
||||
pub async fn build(self) -> RResult<App> {
|
||||
pub async fn build(self) -> RecorderResult<App> {
|
||||
AppConfig::load_dotenv(
|
||||
&self.environment,
|
||||
&self.working_dir,
|
||||
|
||||
@@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::env::Environment;
|
||||
use crate::{
|
||||
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::app_error::RResult,
|
||||
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::RecorderResult,
|
||||
extract::mikan::MikanConfig, graphql::GraphQLConfig, logger::LoggerConfig,
|
||||
storage::StorageConfig, web::WebServerConfig,
|
||||
};
|
||||
@@ -64,7 +64,7 @@ impl AppConfig {
|
||||
fig: Figment,
|
||||
filepath: impl AsRef<Path>,
|
||||
ext: &str,
|
||||
) -> RResult<Figment> {
|
||||
) -> RecorderResult<Figment> {
|
||||
let content = fs::read_to_string(filepath)?;
|
||||
|
||||
let rendered = tera::Tera::one_off(
|
||||
@@ -85,7 +85,7 @@ impl AppConfig {
|
||||
environment: &Environment,
|
||||
working_dir: &str,
|
||||
dotenv_file: Option<&str>,
|
||||
) -> RResult<()> {
|
||||
) -> RecorderResult<()> {
|
||||
let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
|
||||
vec![dotenv_file]
|
||||
} else {
|
||||
@@ -124,7 +124,7 @@ impl AppConfig {
|
||||
environment: &Environment,
|
||||
working_dir: &str,
|
||||
config_file: Option<&str>,
|
||||
) -> RResult<AppConfig> {
|
||||
) -> RecorderResult<AppConfig> {
|
||||
let try_config_file_or_dirs = if config_file.is_some() {
|
||||
vec![config_file]
|
||||
} else {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use super::{Environment, config::AppConfig};
|
||||
use crate::{
|
||||
auth::AuthService, cache::CacheService, database::DatabaseService, errors::app_error::RResult,
|
||||
auth::AuthService, cache::CacheService, database::DatabaseService, errors::RecorderResult,
|
||||
extract::mikan::MikanClient, graphql::GraphQLService, logger::LoggerService,
|
||||
storage::StorageService,
|
||||
};
|
||||
@@ -36,7 +36,7 @@ impl AppContext {
|
||||
environment: Environment,
|
||||
config: AppConfig,
|
||||
working_dir: impl ToString,
|
||||
) -> RResult<Self> {
|
||||
) -> RecorderResult<Self> {
|
||||
let config_cloned = config.clone();
|
||||
|
||||
let logger = LoggerService::from_config(config.logger).await?;
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use std::{net::SocketAddr, sync::Arc};
|
||||
|
||||
use axum::Router;
|
||||
use futures::try_join;
|
||||
use tokio::signal;
|
||||
|
||||
use super::{builder::AppBuilder, context::AppContextTrait};
|
||||
use crate::{
|
||||
errors::app_error::RResult,
|
||||
errors::RecorderResult,
|
||||
web::{
|
||||
controller::{self, core::ControllerTrait},
|
||||
middleware::default_middleware_stack,
|
||||
@@ -23,7 +22,7 @@ impl App {
|
||||
AppBuilder::default()
|
||||
}
|
||||
|
||||
pub async fn serve(&self) -> RResult<()> {
|
||||
pub async fn serve(&self) -> RecorderResult<()> {
|
||||
let context = &self.context;
|
||||
let config = context.config();
|
||||
let listener = tokio::net::TcpListener::bind(&format!(
|
||||
@@ -34,7 +33,7 @@ impl App {
|
||||
|
||||
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
||||
|
||||
let (graphql_c, oidc_c, metadata_c) = try_join!(
|
||||
let (graphql_c, oidc_c, metadata_c) = futures::try_join!(
|
||||
controller::graphql::create(context.clone()),
|
||||
controller::oidc::create(context.clone()),
|
||||
controller::metadata::create(context.clone())
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use async_trait::async_trait;
|
||||
use axum::http::{HeaderValue, request::Parts};
|
||||
use base64::{self, Engine};
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use http::header::AUTHORIZATION;
|
||||
|
||||
use super::{
|
||||
config::BasicAuthConfig,
|
||||
|
||||
@@ -4,6 +4,7 @@ use axum::{
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use fetch::HttpClientError;
|
||||
use openidconnect::{
|
||||
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
|
||||
StandardErrorResponse, core::CoreErrorResponseType,
|
||||
@@ -11,7 +12,7 @@ use openidconnect::{
|
||||
use serde::{Deserialize, Serialize};
|
||||
use snafu::prelude::*;
|
||||
|
||||
use crate::{fetch::HttpClientError, models::auth::AuthType};
|
||||
use crate::models::auth::AuthType;
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(visibility(pub(crate)))]
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
future::Future,
|
||||
ops::Deref,
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use axum::http::{HeaderValue, request::Parts};
|
||||
use axum::{
|
||||
http,
|
||||
http::{HeaderValue, request::Parts},
|
||||
};
|
||||
use fetch::{HttpClient, client::HttpClientError};
|
||||
use itertools::Itertools;
|
||||
use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer};
|
||||
use moka::future::Cache;
|
||||
@@ -24,9 +31,49 @@ use super::{
|
||||
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
|
||||
service::{AuthServiceTrait, AuthUserInfo},
|
||||
};
|
||||
use crate::{
|
||||
app::AppContextTrait, errors::app_error::RError, fetch::HttpClient, models::auth::AuthType,
|
||||
};
|
||||
use crate::{app::AppContextTrait, errors::RecorderError, models::auth::AuthType};
|
||||
|
||||
pub struct OidcHttpClient(pub Arc<HttpClient>);
|
||||
|
||||
impl<'a> Deref for OidcHttpClient {
|
||||
type Target = HttpClient;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'c> openidconnect::AsyncHttpClient<'c> for OidcHttpClient {
|
||||
type Error = HttpClientError;
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
type Future =
|
||||
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + 'c>>;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
type Future =
|
||||
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + Send + 'c>>;
|
||||
|
||||
fn call(&'c self, request: openidconnect::HttpRequest) -> Self::Future {
|
||||
Box::pin(async move {
|
||||
let response = self.execute(request.try_into()?).await?;
|
||||
|
||||
let mut builder = http::Response::builder().status(response.status());
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
builder = builder.version(response.version());
|
||||
}
|
||||
|
||||
for (name, value) in response.headers().iter() {
|
||||
builder = builder.header(name, value);
|
||||
}
|
||||
|
||||
builder
|
||||
.body(response.bytes().await?.to_vec())
|
||||
.map_err(HttpClientError::from)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, Debug)]
|
||||
pub struct OidcAuthClaims {
|
||||
@@ -118,18 +165,19 @@ pub struct OidcAuthCallbackPayload {
|
||||
pub struct OidcAuthService {
|
||||
pub config: OidcAuthConfig,
|
||||
pub api_authorizer: Authorizer<OidcAuthClaims>,
|
||||
pub oidc_provider_client: HttpClient,
|
||||
pub oidc_provider_client: Arc<HttpClient>,
|
||||
pub oidc_request_cache: Cache<String, OidcAuthRequest>,
|
||||
}
|
||||
|
||||
impl OidcAuthService {
|
||||
pub async fn build_authorization_request(
|
||||
&self,
|
||||
pub async fn build_authorization_request<'a>(
|
||||
&'a self,
|
||||
redirect_uri: &str,
|
||||
) -> Result<OidcAuthRequest, AuthError> {
|
||||
let oidc_provider_client = OidcHttpClient(self.oidc_provider_client.clone());
|
||||
let provider_metadata = CoreProviderMetadata::discover_async(
|
||||
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
|
||||
&self.oidc_provider_client,
|
||||
&oidc_provider_client,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -199,10 +247,11 @@ impl OidcAuthService {
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub async fn extract_authorization_request_callback(
|
||||
&self,
|
||||
pub async fn extract_authorization_request_callback<'a>(
|
||||
&'a self,
|
||||
query: OidcAuthCallbackQuery,
|
||||
) -> Result<OidcAuthCallbackPayload, AuthError> {
|
||||
let oidc_http_client = OidcHttpClient(self.oidc_provider_client.clone());
|
||||
let csrf_token = query.state.ok_or(AuthError::OidcInvalidStateError)?;
|
||||
|
||||
let code = query.code.ok_or(AuthError::OidcInvalidCodeError)?;
|
||||
@@ -211,7 +260,7 @@ impl OidcAuthService {
|
||||
|
||||
let provider_metadata = CoreProviderMetadata::discover_async(
|
||||
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
|
||||
&self.oidc_provider_client,
|
||||
&oidc_http_client,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -227,7 +276,7 @@ impl OidcAuthService {
|
||||
let token_response = oidc_client
|
||||
.exchange_code(AuthorizationCode::new(code))?
|
||||
.set_pkce_verifier(pkce_verifier)
|
||||
.request_async(&HttpClient::default())
|
||||
.request_async(&oidc_http_client)
|
||||
.await?;
|
||||
|
||||
let id_token = token_response
|
||||
@@ -312,7 +361,7 @@ impl AuthServiceTrait for OidcAuthService {
|
||||
}
|
||||
}
|
||||
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
||||
Err(RError::DbError {
|
||||
Err(RecorderError::DbError {
|
||||
source: DbErr::RecordNotFound(..),
|
||||
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await,
|
||||
r => r,
|
||||
|
||||
@@ -6,9 +6,13 @@ use axum::{
|
||||
http::request::Parts,
|
||||
response::{IntoResponse as _, Response},
|
||||
};
|
||||
use fetch::{
|
||||
HttpClient, HttpClientConfig,
|
||||
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
|
||||
};
|
||||
use http::header::HeaderValue;
|
||||
use jwt_authorizer::{JwtAuthorizer, Validation};
|
||||
use moka::future::Cache;
|
||||
use reqwest::header::HeaderValue;
|
||||
use snafu::prelude::*;
|
||||
|
||||
use super::{
|
||||
@@ -17,14 +21,7 @@ use super::{
|
||||
errors::{AuthError, OidcProviderHttpClientSnafu},
|
||||
oidc::{OidcAuthClaims, OidcAuthService},
|
||||
};
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
fetch::{
|
||||
HttpClient, HttpClientConfig,
|
||||
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
|
||||
},
|
||||
models::auth::AuthType,
|
||||
};
|
||||
use crate::{app::AppContextTrait, models::auth::AuthType};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AuthUserInfo {
|
||||
@@ -89,7 +86,7 @@ impl AuthService {
|
||||
AuthService::Oidc(Box::new(OidcAuthService {
|
||||
config,
|
||||
api_authorizer,
|
||||
oidc_provider_client,
|
||||
oidc_provider_client: Arc::new(oidc_provider_client),
|
||||
oidc_request_cache: Cache::builder()
|
||||
.time_to_live(Duration::from_mins(5))
|
||||
.name("oidc_request_cache")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use recorder::{app::AppBuilder, errors::app_error::RResult};
|
||||
use recorder::{app::AppBuilder, errors::RecorderResult};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> RResult<()> {
|
||||
async fn main() -> RecorderResult<()> {
|
||||
let builder = AppBuilder::from_main_cli(None).await?;
|
||||
|
||||
let app = builder.build().await?;
|
||||
|
||||
4
apps/recorder/src/cache/service.rs
vendored
4
apps/recorder/src/cache/service.rs
vendored
@@ -1,10 +1,10 @@
|
||||
use super::CacheConfig;
|
||||
use crate::errors::app_error::RResult;
|
||||
use crate::errors::RecorderResult;
|
||||
|
||||
pub struct CacheService {}
|
||||
|
||||
impl CacheService {
|
||||
pub async fn from_config(_config: CacheConfig) -> RResult<Self> {
|
||||
pub async fn from_config(_config: CacheConfig) -> RecorderResult<Self> {
|
||||
Ok(Self {})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,14 +7,14 @@ use sea_orm::{
|
||||
use sea_orm_migration::MigratorTrait;
|
||||
|
||||
use super::DatabaseConfig;
|
||||
use crate::{errors::app_error::RResult, migrations::Migrator};
|
||||
use crate::{errors::RecorderResult, migrations::Migrator};
|
||||
|
||||
pub struct DatabaseService {
|
||||
connection: DatabaseConnection,
|
||||
}
|
||||
|
||||
impl DatabaseService {
|
||||
pub async fn from_config(config: DatabaseConfig) -> RResult<Self> {
|
||||
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
|
||||
let mut opt = ConnectOptions::new(&config.uri);
|
||||
opt.max_connections(config.max_connections)
|
||||
.min_connections(config.min_connections)
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
use async_trait::async_trait;
|
||||
|
||||
use crate::downloader::{
|
||||
DownloaderError,
|
||||
bittorrent::task::{
|
||||
TorrentCreationTrait, TorrentHashTrait, TorrentStateTrait, TorrentTaskTrait,
|
||||
},
|
||||
core::{DownloadIdSelectorTrait, DownloadSelectorTrait, DownloadTaskTrait, DownloaderTrait},
|
||||
};
|
||||
|
||||
#[async_trait]
|
||||
pub trait TorrentDownloaderTrait: DownloaderTrait
|
||||
where
|
||||
Self::State: TorrentStateTrait,
|
||||
Self::Id: TorrentHashTrait,
|
||||
Self::Task: TorrentTaskTrait<State = Self::State, Id = Self::Id>,
|
||||
Self::Creation: TorrentCreationTrait<Task = Self::Task>,
|
||||
Self::Selector: DownloadSelectorTrait<Task = Self::Task, Id = Self::Id>,
|
||||
{
|
||||
type IdSelector: DownloadIdSelectorTrait<Task = Self::Task, Id = Self::Id>;
|
||||
|
||||
async fn pause_downloads(
|
||||
&self,
|
||||
selector: Self::Selector,
|
||||
) -> Result<Self::IdSelector, DownloaderError> {
|
||||
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
|
||||
self.pause_torrents(hashes).await
|
||||
}
|
||||
|
||||
async fn resume_downloads(
|
||||
&self,
|
||||
selector: Self::Selector,
|
||||
) -> Result<Self::IdSelector, DownloaderError> {
|
||||
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
|
||||
self.resume_torrents(hashes).await
|
||||
}
|
||||
async fn remove_downloads(
|
||||
&self,
|
||||
selector: Self::Selector,
|
||||
) -> Result<Self::IdSelector, DownloaderError> {
|
||||
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
|
||||
self.remove_torrents(hashes).await
|
||||
}
|
||||
|
||||
async fn query_torrent_hashes(
|
||||
&self,
|
||||
selector: Self::Selector,
|
||||
) -> Result<Self::IdSelector, DownloaderError> {
|
||||
let hashes = match selector.try_into_ids_only() {
|
||||
Ok(hashes) => Self::IdSelector::from_iter(hashes),
|
||||
Err(selector) => {
|
||||
let tasks = self.query_downloads(selector).await?;
|
||||
|
||||
Self::IdSelector::from_iter(tasks.into_iter().map(|s| s.into_id()))
|
||||
}
|
||||
};
|
||||
Ok(hashes)
|
||||
}
|
||||
|
||||
async fn pause_torrents(
|
||||
&self,
|
||||
hashes: Self::IdSelector,
|
||||
) -> Result<Self::IdSelector, DownloaderError>;
|
||||
|
||||
async fn resume_torrents(
|
||||
&self,
|
||||
hashes: Self::IdSelector,
|
||||
) -> Result<Self::IdSelector, DownloaderError>;
|
||||
|
||||
async fn remove_torrents(
|
||||
&self,
|
||||
hashes: Self::IdSelector,
|
||||
) -> Result<Self::IdSelector, DownloaderError>;
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
pub mod downloader;
|
||||
pub mod source;
|
||||
pub mod task;
|
||||
@@ -1,228 +0,0 @@
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
fmt::{Debug, Formatter},
|
||||
};
|
||||
|
||||
use bytes::Bytes;
|
||||
use librqbit_core::{magnet::Magnet, torrent_metainfo, torrent_metainfo::TorrentMetaV1Owned};
|
||||
use snafu::ResultExt;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
downloader::errors::{
|
||||
DownloadFetchSnafu, DownloaderError, MagnetFormatSnafu, TorrentMetaSnafu,
|
||||
},
|
||||
errors::RAnyhowResultExt,
|
||||
extract::bittorrent::core::MAGNET_SCHEMA,
|
||||
fetch::{bytes::fetch_bytes, client::core::HttpClientTrait},
|
||||
};
|
||||
|
||||
pub trait HashTorrentSourceTrait: Sized {
|
||||
fn hash_info(&self) -> Cow<'_, str>;
|
||||
}
|
||||
|
||||
pub struct MagnetUrlSource {
|
||||
pub magnet: Magnet,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
impl MagnetUrlSource {
|
||||
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
|
||||
let magnet = Magnet::parse(&url)
|
||||
.to_dyn_boxed()
|
||||
.context(MagnetFormatSnafu {
|
||||
message: url.clone(),
|
||||
})?;
|
||||
|
||||
Ok(Self { magnet, url })
|
||||
}
|
||||
}
|
||||
|
||||
impl HashTorrentSourceTrait for MagnetUrlSource {
|
||||
fn hash_info(&self) -> Cow<'_, str> {
|
||||
let hash_info = self
|
||||
.magnet
|
||||
.as_id32()
|
||||
.map(|s| s.as_string())
|
||||
.or_else(|| self.magnet.as_id20().map(|s| s.as_string()))
|
||||
.unwrap_or_else(|| unreachable!("hash of magnet must existed"));
|
||||
hash_info.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for MagnetUrlSource {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("MagnetUrlSource")
|
||||
.field("url", &self.url)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for MagnetUrlSource {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
magnet: Magnet::parse(&self.url).unwrap(),
|
||||
url: self.url.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for MagnetUrlSource {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.url == other.url
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for MagnetUrlSource {}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct TorrentUrlSource {
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
impl TorrentUrlSource {
|
||||
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
|
||||
Ok(Self { url })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TorrentFileSource {
|
||||
pub url: Option<String>,
|
||||
pub payload: Bytes,
|
||||
pub meta: TorrentMetaV1Owned,
|
||||
pub filename: String,
|
||||
}
|
||||
|
||||
impl TorrentFileSource {
|
||||
pub fn from_bytes(
|
||||
filename: String,
|
||||
bytes: Bytes,
|
||||
url: Option<String>,
|
||||
) -> Result<Self, DownloaderError> {
|
||||
let meta = torrent_metainfo::torrent_from_bytes(bytes.as_ref())
|
||||
.to_dyn_boxed()
|
||||
.with_context(|_| TorrentMetaSnafu {
|
||||
message: format!(
|
||||
"filename = {}, url = {}",
|
||||
filename,
|
||||
url.as_deref().unwrap_or_default()
|
||||
),
|
||||
})?
|
||||
.to_owned();
|
||||
|
||||
Ok(TorrentFileSource {
|
||||
url,
|
||||
payload: bytes,
|
||||
meta,
|
||||
filename,
|
||||
})
|
||||
}
|
||||
pub async fn from_url_and_http_client(
|
||||
client: &impl HttpClientTrait,
|
||||
url: String,
|
||||
) -> Result<TorrentFileSource, DownloaderError> {
|
||||
let payload = fetch_bytes(client, &url)
|
||||
.await
|
||||
.boxed()
|
||||
.with_context(|_| DownloadFetchSnafu { url: url.clone() })?;
|
||||
|
||||
let filename = Url::parse(&url)
|
||||
.boxed()
|
||||
.and_then(|s| {
|
||||
s.path_segments()
|
||||
.and_then(|mut p| p.next_back())
|
||||
.map(String::from)
|
||||
.ok_or_else(|| anyhow::anyhow!("invalid url"))
|
||||
.to_dyn_boxed()
|
||||
})
|
||||
.with_context(|_| DownloadFetchSnafu { url: url.clone() })?;
|
||||
|
||||
Self::from_bytes(filename, payload, Some(url))
|
||||
}
|
||||
}
|
||||
|
||||
impl HashTorrentSourceTrait for TorrentFileSource {
|
||||
fn hash_info(&self) -> Cow<'_, str> {
|
||||
self.meta.info_hash.as_string().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for TorrentFileSource {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("TorrentFileSource")
|
||||
.field("hash", &self.meta.info_hash.as_string())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum UrlTorrentSource {
|
||||
MagnetUrl(MagnetUrlSource),
|
||||
TorrentUrl(TorrentUrlSource),
|
||||
}
|
||||
|
||||
impl UrlTorrentSource {
|
||||
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
|
||||
let url_ = Url::parse(&url)?;
|
||||
let source = if url_.scheme() == MAGNET_SCHEMA {
|
||||
Self::from_magnet_url(url)?
|
||||
} else {
|
||||
Self::from_torrent_url(url)?
|
||||
};
|
||||
Ok(source)
|
||||
}
|
||||
|
||||
pub fn from_magnet_url(url: String) -> Result<Self, DownloaderError> {
|
||||
let magnet_source = MagnetUrlSource::from_url(url)?;
|
||||
Ok(Self::MagnetUrl(magnet_source))
|
||||
}
|
||||
|
||||
pub fn from_torrent_url(url: String) -> Result<Self, DownloaderError> {
|
||||
let torrent_source = TorrentUrlSource::from_url(url)?;
|
||||
Ok(Self::TorrentUrl(torrent_source))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum HashTorrentSource {
|
||||
MagnetUrl(MagnetUrlSource),
|
||||
TorrentFile(TorrentFileSource),
|
||||
}
|
||||
|
||||
impl HashTorrentSource {
|
||||
pub async fn from_url_and_http_client(
|
||||
client: &impl HttpClientTrait,
|
||||
url: String,
|
||||
) -> Result<Self, DownloaderError> {
|
||||
let url_ = Url::parse(&url)?;
|
||||
let source = if url_.scheme() == MAGNET_SCHEMA {
|
||||
Self::from_magnet_url(url)?
|
||||
} else {
|
||||
Self::from_torrent_url_and_http_client(client, url).await?
|
||||
};
|
||||
Ok(source)
|
||||
}
|
||||
|
||||
pub fn from_magnet_url(url: String) -> Result<Self, DownloaderError> {
|
||||
let magnet_source = MagnetUrlSource::from_url(url)?;
|
||||
Ok(Self::MagnetUrl(magnet_source))
|
||||
}
|
||||
|
||||
pub async fn from_torrent_url_and_http_client(
|
||||
client: &impl HttpClientTrait,
|
||||
url: String,
|
||||
) -> Result<Self, DownloaderError> {
|
||||
let torrent_source = TorrentFileSource::from_url_and_http_client(client, url).await?;
|
||||
Ok(Self::TorrentFile(torrent_source))
|
||||
}
|
||||
}
|
||||
|
||||
impl HashTorrentSourceTrait for HashTorrentSource {
|
||||
fn hash_info(&self) -> Cow<'_, str> {
|
||||
match self {
|
||||
HashTorrentSource::MagnetUrl(m) => m.hash_info(),
|
||||
HashTorrentSource::TorrentFile(t) => t.hash_info(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
use std::{borrow::Cow, hash::Hash};
|
||||
|
||||
use quirks_path::{Path, PathBuf};
|
||||
|
||||
use crate::downloader::{
|
||||
bittorrent::source::HashTorrentSource,
|
||||
core::{DownloadCreationTrait, DownloadIdTrait, DownloadStateTrait, DownloadTaskTrait},
|
||||
};
|
||||
|
||||
pub const TORRENT_TAG_NAME: &str = "konobangu";
|
||||
|
||||
pub trait TorrentHashTrait: DownloadIdTrait + Send + Hash {}
|
||||
|
||||
pub trait TorrentStateTrait: DownloadStateTrait {}
|
||||
|
||||
pub trait TorrentTaskTrait: DownloadTaskTrait
|
||||
where
|
||||
Self::State: TorrentStateTrait,
|
||||
Self::Id: TorrentHashTrait,
|
||||
{
|
||||
fn hash_info(&self) -> &str;
|
||||
fn name(&self) -> Cow<'_, str> {
|
||||
Cow::Borrowed(self.hash_info())
|
||||
}
|
||||
|
||||
fn tags(&self) -> impl Iterator<Item = Cow<'_, str>>;
|
||||
|
||||
fn category(&self) -> Option<Cow<'_, str>>;
|
||||
}
|
||||
|
||||
pub trait TorrentCreationTrait: DownloadCreationTrait {
|
||||
fn save_path(&self) -> &Path;
|
||||
|
||||
fn save_path_mut(&mut self) -> &mut PathBuf;
|
||||
|
||||
fn sources_mut(&mut self) -> &mut Vec<HashTorrentSource>;
|
||||
}
|
||||
@@ -1,218 +0,0 @@
|
||||
use std::{
|
||||
any::Any, borrow::Cow, fmt::Debug, hash::Hash, marker::PhantomData, ops::Deref, time::Duration,
|
||||
vec::IntoIter,
|
||||
};
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
use super::DownloaderError;
|
||||
|
||||
pub trait DownloadStateTrait: Sized + Debug {}
|
||||
|
||||
pub trait DownloadIdTrait: Hash + Sized + Clone + Send + Debug {}
|
||||
|
||||
pub trait DownloadTaskTrait: Sized + Send + Debug {
|
||||
type State: DownloadStateTrait;
|
||||
type Id: DownloadIdTrait;
|
||||
|
||||
fn id(&self) -> &Self::Id;
|
||||
fn into_id(self) -> Self::Id;
|
||||
fn name(&self) -> Cow<'_, str>;
|
||||
fn speed(&self) -> Option<u64>;
|
||||
fn state(&self) -> &Self::State;
|
||||
fn dl_bytes(&self) -> Option<u64>;
|
||||
fn total_bytes(&self) -> Option<u64>;
|
||||
fn left_bytes(&self) -> Option<u64> {
|
||||
if let (Some(tt), Some(dl)) = (self.total_bytes(), self.dl_bytes()) {
|
||||
tt.checked_sub(dl)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn et(&self) -> Option<Duration>;
|
||||
fn eta(&self) -> Option<Duration> {
|
||||
if let (Some(left_bytes), Some(speed)) = (self.left_bytes(), self.speed()) {
|
||||
if speed > 0 {
|
||||
Some(Duration::from_secs_f64(left_bytes as f64 / speed as f64))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn average_speed(&self) -> Option<f64> {
|
||||
if let (Some(et), Some(dl_bytes)) = (self.et(), self.dl_bytes()) {
|
||||
let secs = et.as_secs_f64();
|
||||
|
||||
if secs > 0.0 {
|
||||
Some(dl_bytes as f64 / secs)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn progress(&self) -> Option<f32> {
|
||||
if let (Some(dl), Some(tt)) = (self.dl_bytes(), self.total_bytes()) {
|
||||
if dl > 0 {
|
||||
if tt > 0 {
|
||||
Some(dl as f32 / tt as f32)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
Some(0.0)
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DownloadCreationTrait: Sized {
|
||||
type Task: DownloadTaskTrait;
|
||||
}
|
||||
|
||||
pub trait DownloadSelectorTrait: Sized + Any + Send {
|
||||
type Id: DownloadIdTrait;
|
||||
type Task: DownloadTaskTrait<Id = Self::Id>;
|
||||
|
||||
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
|
||||
Err(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DownloadIdSelectorTrait:
|
||||
DownloadSelectorTrait
|
||||
+ IntoIterator<Item = Self::Id>
|
||||
+ FromIterator<Self::Id>
|
||||
+ Into<Vec<Self::Id>>
|
||||
+ From<Vec<Self::Id>>
|
||||
{
|
||||
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
|
||||
Ok(Vec::from_iter(self))
|
||||
}
|
||||
|
||||
fn from_id(id: Self::Id) -> Self;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DownloadIdSelector<Task>
|
||||
where
|
||||
Task: DownloadTaskTrait,
|
||||
{
|
||||
pub ids: Vec<Task::Id>,
|
||||
pub marker: PhantomData<Task>,
|
||||
}
|
||||
|
||||
impl<Task> Deref for DownloadIdSelector<Task>
|
||||
where
|
||||
Task: DownloadTaskTrait,
|
||||
{
|
||||
type Target = Vec<Task::Id>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.ids
|
||||
}
|
||||
}
|
||||
|
||||
impl<Task> IntoIterator for DownloadIdSelector<Task>
|
||||
where
|
||||
Task: DownloadTaskTrait,
|
||||
{
|
||||
type Item = Task::Id;
|
||||
type IntoIter = IntoIter<Task::Id>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.ids.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Task> FromIterator<Task::Id> for DownloadIdSelector<Task>
|
||||
where
|
||||
Task: DownloadTaskTrait,
|
||||
{
|
||||
fn from_iter<T: IntoIterator<Item = Task::Id>>(iter: T) -> Self {
|
||||
Self {
|
||||
ids: Vec::from_iter(iter),
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Task> DownloadSelectorTrait for DownloadIdSelector<Task>
|
||||
where
|
||||
Task: DownloadTaskTrait + 'static,
|
||||
{
|
||||
type Id = Task::Id;
|
||||
type Task = Task;
|
||||
}
|
||||
|
||||
impl<Task> From<Vec<Task::Id>> for DownloadIdSelector<Task>
|
||||
where
|
||||
Task: DownloadTaskTrait + 'static,
|
||||
{
|
||||
fn from(value: Vec<Task::Id>) -> Self {
|
||||
Self {
|
||||
ids: value,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Task> From<DownloadIdSelector<Task>> for Vec<Task::Id>
|
||||
where
|
||||
Task: DownloadTaskTrait + 'static,
|
||||
{
|
||||
fn from(value: DownloadIdSelector<Task>) -> Self {
|
||||
value.ids
|
||||
}
|
||||
}
|
||||
|
||||
impl<Task> DownloadIdSelectorTrait for DownloadIdSelector<Task>
|
||||
where
|
||||
Task: DownloadTaskTrait + 'static,
|
||||
{
|
||||
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
|
||||
Ok(self.ids)
|
||||
}
|
||||
|
||||
fn from_id(id: Self::Id) -> Self {
|
||||
Self {
|
||||
ids: vec![id],
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait DownloaderTrait {
|
||||
type State: DownloadStateTrait;
|
||||
type Id: DownloadIdTrait;
|
||||
type Task: DownloadTaskTrait<State = Self::State, Id = Self::Id>;
|
||||
type Creation: DownloadCreationTrait<Task = Self::Task>;
|
||||
type Selector: DownloadSelectorTrait<Task = Self::Task>;
|
||||
|
||||
async fn add_downloads(
|
||||
&self,
|
||||
creation: Self::Creation,
|
||||
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
|
||||
async fn pause_downloads(
|
||||
&self,
|
||||
selector: Self::Selector,
|
||||
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
|
||||
async fn resume_downloads(
|
||||
&self,
|
||||
selector: Self::Selector,
|
||||
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
|
||||
async fn remove_downloads(
|
||||
&self,
|
||||
selector: Self::Selector,
|
||||
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
|
||||
async fn query_downloads(
|
||||
&self,
|
||||
selector: Self::Selector,
|
||||
) -> Result<impl IntoIterator<Item = Self::Task>, DownloaderError>;
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
use std::{borrow::Cow, time::Duration};
|
||||
|
||||
use snafu::prelude::*;
|
||||
|
||||
use crate::errors::OptDynErr;
|
||||
|
||||
#[derive(Snafu, Debug)]
|
||||
#[snafu(visibility(pub(crate)))]
|
||||
pub enum DownloaderError {
|
||||
#[snafu(transparent)]
|
||||
DownloadUrlParseError { source: url::ParseError },
|
||||
#[snafu(transparent)]
|
||||
QBitAPIError { source: qbit_rs::Error },
|
||||
#[snafu(transparent)]
|
||||
DownloaderIOError { source: std::io::Error },
|
||||
#[snafu(display("Timeout error (action = {action}, timeout = {timeout:?})"))]
|
||||
DownloadTimeoutError {
|
||||
action: Cow<'static, str>,
|
||||
timeout: Duration,
|
||||
},
|
||||
#[snafu(display("Invalid magnet format ({message})"))]
|
||||
MagnetFormatError {
|
||||
message: String,
|
||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||
source: OptDynErr,
|
||||
},
|
||||
#[snafu(display("Invalid torrent meta format ({message})"))]
|
||||
TorrentMetaError {
|
||||
message: String,
|
||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||
source: OptDynErr,
|
||||
},
|
||||
#[snafu(display("Failed to fetch: {source}"))]
|
||||
DownloadFetchError {
|
||||
url: String,
|
||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||
source: OptDynErr,
|
||||
},
|
||||
#[snafu(display("{message}"))]
|
||||
Whatever {
|
||||
message: String,
|
||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||
source: OptDynErr,
|
||||
},
|
||||
}
|
||||
|
||||
impl snafu::FromString for DownloaderError {
|
||||
type Source = Box<dyn std::error::Error + Send + Sync>;
|
||||
|
||||
fn without_source(message: String) -> Self {
|
||||
Self::Whatever {
|
||||
message,
|
||||
source: OptDynErr::none(),
|
||||
}
|
||||
}
|
||||
|
||||
fn with_source(source: Self::Source, message: String) -> Self {
|
||||
Self::Whatever {
|
||||
message,
|
||||
source: OptDynErr::some(source),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
pub mod bittorrent;
|
||||
pub mod core;
|
||||
pub mod errors;
|
||||
pub mod qbit;
|
||||
pub mod rqbit;
|
||||
pub mod utils;
|
||||
|
||||
pub use errors::DownloaderError;
|
||||
@@ -1,605 +0,0 @@
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::Debug,
|
||||
sync::{Arc, Weak},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures_util::future::try_join_all;
|
||||
use itertools::Itertools;
|
||||
use merge_struct::merge;
|
||||
use qbit_rs::{
|
||||
Qbit,
|
||||
model::{
|
||||
AddTorrentArg, Category, Credential, GetTorrentListArg, NonEmptyStr, Sep, SyncData,
|
||||
Torrent as QbitTorrent, TorrentFile, TorrentSource,
|
||||
},
|
||||
};
|
||||
use quirks_path::{Path, PathBuf};
|
||||
use snafu::{OptionExt, whatever};
|
||||
use tokio::{
|
||||
sync::{RwLock, watch},
|
||||
time::sleep,
|
||||
};
|
||||
use tracing::instrument;
|
||||
use url::Url;
|
||||
|
||||
use crate::downloader::{
|
||||
DownloaderError,
|
||||
bittorrent::{
|
||||
downloader::TorrentDownloaderTrait,
|
||||
source::{HashTorrentSource, HashTorrentSourceTrait, MagnetUrlSource, TorrentFileSource},
|
||||
task::TORRENT_TAG_NAME,
|
||||
},
|
||||
core::{DownloadIdSelector, DownloaderTrait},
|
||||
qbit::task::{
|
||||
QBittorrentCreation, QBittorrentHash, QBittorrentSelector, QBittorrentState,
|
||||
QBittorrentTask,
|
||||
},
|
||||
utils::path_equals_as_file_url,
|
||||
};
|
||||
|
||||
pub struct QBittorrentDownloaderCreation {
|
||||
pub endpoint: String,
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
pub save_path: String,
|
||||
pub subscriber_id: i32,
|
||||
pub downloader_id: i32,
|
||||
pub wait_sync_timeout: Option<Duration>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct QBittorrentSyncData {
|
||||
pub torrents: HashMap<String, QbitTorrent>,
|
||||
pub categories: HashMap<String, Category>,
|
||||
pub tags: HashSet<String>,
|
||||
pub trackers: HashMap<String, Vec<String>>,
|
||||
pub server_state: HashMap<String, serde_value::Value>,
|
||||
pub rid: i64,
|
||||
}
|
||||
|
||||
impl QBittorrentSyncData {
|
||||
pub fn patch(&mut self, data: SyncData) {
|
||||
self.rid = data.rid;
|
||||
if data.full_update.is_some_and(|s| s) {
|
||||
self.torrents.clear();
|
||||
self.categories.clear();
|
||||
self.tags.clear();
|
||||
self.trackers.clear();
|
||||
}
|
||||
if let Some(remove_categories) = data.categories_removed {
|
||||
for c in remove_categories {
|
||||
self.categories.remove(&c);
|
||||
}
|
||||
}
|
||||
if let Some(add_categories) = data.categories {
|
||||
self.categories.extend(add_categories);
|
||||
}
|
||||
if let Some(remove_tags) = data.tags_removed {
|
||||
for t in remove_tags {
|
||||
self.tags.remove(&t);
|
||||
}
|
||||
}
|
||||
if let Some(add_tags) = data.tags {
|
||||
self.tags.extend(add_tags);
|
||||
}
|
||||
if let Some(remove_torrents) = data.torrents_removed {
|
||||
for t in remove_torrents {
|
||||
self.torrents.remove(&t);
|
||||
}
|
||||
}
|
||||
if let Some(add_torrents) = data.torrents {
|
||||
for (hash, torrent_patch) in add_torrents {
|
||||
if let Some(torrent_full) = self.torrents.get_mut(&hash) {
|
||||
*torrent_full = merge(torrent_full, &torrent_patch).unwrap_or_else(|_| {
|
||||
unreachable!("failed to merge torrents, but they are same type")
|
||||
});
|
||||
} else {
|
||||
self.torrents.insert(hash, torrent_patch);
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(remove_trackers) = data.trackers_removed {
|
||||
for t in remove_trackers {
|
||||
self.trackers.remove(&t);
|
||||
}
|
||||
}
|
||||
if let Some(add_trackers) = data.trackers {
|
||||
self.trackers.extend(add_trackers);
|
||||
}
|
||||
if let Some(server_state) = data.server_state {
|
||||
self.server_state = merge(&self.server_state, &server_state).unwrap_or_else(|_| {
|
||||
unreachable!("failed to merge server state, but they are same type")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct QBittorrentDownloader {
|
||||
pub subscriber_id: i32,
|
||||
pub downloader_id: i32,
|
||||
pub endpoint_url: Url,
|
||||
pub client: Arc<Qbit>,
|
||||
pub save_path: PathBuf,
|
||||
pub wait_sync_timeout: Duration,
|
||||
pub sync_watch: watch::Sender<DateTime<Utc>>,
|
||||
pub sync_data: Arc<RwLock<QBittorrentSyncData>>,
|
||||
}
|
||||
|
||||
impl QBittorrentDownloader {
|
||||
pub async fn from_creation(
|
||||
creation: QBittorrentDownloaderCreation,
|
||||
) -> Result<Arc<Self>, DownloaderError> {
|
||||
let endpoint_url = Url::parse(&creation.endpoint)?;
|
||||
|
||||
let credential = Credential::new(creation.username, creation.password);
|
||||
|
||||
let client = Qbit::new(endpoint_url.clone(), credential);
|
||||
|
||||
client.login(false).await?;
|
||||
|
||||
client.sync(None).await?;
|
||||
|
||||
let downloader = Arc::new(Self {
|
||||
client: Arc::new(client),
|
||||
endpoint_url,
|
||||
subscriber_id: creation.subscriber_id,
|
||||
save_path: creation.save_path.into(),
|
||||
wait_sync_timeout: creation
|
||||
.wait_sync_timeout
|
||||
.unwrap_or(Duration::from_secs(10)),
|
||||
downloader_id: creation.downloader_id,
|
||||
sync_watch: watch::channel(Utc::now()).0,
|
||||
sync_data: Arc::new(RwLock::new(QBittorrentSyncData::default())),
|
||||
});
|
||||
|
||||
let event_loop_me = Arc::downgrade(&downloader);
|
||||
|
||||
tokio::spawn(async move { Self::start_event_loop(event_loop_me).await });
|
||||
|
||||
Ok(downloader)
|
||||
}
|
||||
|
||||
async fn start_event_loop(me: Weak<Self>) {
|
||||
let mut tick = 0;
|
||||
|
||||
loop {
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
if let Some(me) = me.upgrade() {
|
||||
if tick >= 100 {
|
||||
let _ = me.sync_data().await.inspect_err(|e| {
|
||||
tracing::error!(name = "sync_data", error = ?e);
|
||||
});
|
||||
tick = 0;
|
||||
continue;
|
||||
}
|
||||
let count = me.sync_watch.receiver_count();
|
||||
if count > 0 && tick >= 10 {
|
||||
let _ = me.sync_data().await.inspect_err(|e| {
|
||||
tracing::error!(name = "sync_data", error = ?e);
|
||||
});
|
||||
tick = i32::max(0, tick - 10);
|
||||
} else {
|
||||
tick += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(level = "debug")]
|
||||
pub async fn api_version(&self) -> Result<String, DownloaderError> {
|
||||
let result = self.client.get_webapi_version().await?;
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
pub async fn add_category(&self, category: &str) -> Result<(), DownloaderError> {
|
||||
self.client
|
||||
.add_category(
|
||||
NonEmptyStr::new(category)
|
||||
.whatever_context::<_, DownloaderError>("category can not be empty")?,
|
||||
self.save_path.as_str(),
|
||||
)
|
||||
.await?;
|
||||
self.wait_sync_until(
|
||||
|sync_data| sync_data.categories.contains_key(category),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
pub async fn check_connection(&self) -> Result<(), DownloaderError> {
|
||||
self.api_version().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
pub async fn set_torrents_category(
|
||||
&self,
|
||||
hashes: Vec<String>,
|
||||
category: &str,
|
||||
) -> Result<(), DownloaderError> {
|
||||
{
|
||||
let category_no_exists = {
|
||||
let sync_data = self.sync_data.read().await;
|
||||
!sync_data.categories.contains_key(category)
|
||||
};
|
||||
|
||||
if category_no_exists {
|
||||
self.add_category(category).await?;
|
||||
}
|
||||
}
|
||||
self.client
|
||||
.set_torrent_category(hashes.clone(), category)
|
||||
.await?;
|
||||
self.wait_sync_until(
|
||||
|sync_data| {
|
||||
let torrents = &sync_data.torrents;
|
||||
hashes.iter().all(|h| {
|
||||
torrents
|
||||
.get(h)
|
||||
.is_some_and(|t| t.category.as_deref().is_some_and(|c| c == category))
|
||||
})
|
||||
},
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_save_path(&self, sub_path: &Path) -> PathBuf {
|
||||
self.save_path.join(sub_path)
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
pub async fn add_torrent_tags(
|
||||
&self,
|
||||
hashes: Vec<String>,
|
||||
tags: Vec<String>,
|
||||
) -> Result<(), DownloaderError> {
|
||||
if tags.is_empty() {
|
||||
whatever!("add bittorrent tags can not be empty");
|
||||
}
|
||||
self.client
|
||||
.add_torrent_tags(hashes.clone(), tags.clone())
|
||||
.await?;
|
||||
let tag_sets = tags.iter().map(|s| s.as_str()).collect::<HashSet<&str>>();
|
||||
self.wait_sync_until(
|
||||
|sync_data| {
|
||||
let torrents = &sync_data.torrents;
|
||||
|
||||
hashes.iter().all(|h| {
|
||||
torrents.get(h).is_some_and(|t| {
|
||||
t.tags.as_ref().is_some_and(|t| {
|
||||
t.split(',')
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect::<HashSet<&str>>()
|
||||
.is_superset(&tag_sets)
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
pub async fn move_torrents(
|
||||
&self,
|
||||
hashes: Vec<String>,
|
||||
new_path: &str,
|
||||
) -> Result<(), DownloaderError> {
|
||||
self.client
|
||||
.set_torrent_location(hashes.clone(), new_path)
|
||||
.await?;
|
||||
|
||||
self.wait_sync_until(
|
||||
|sync_data| -> bool {
|
||||
let torrents = &sync_data.torrents;
|
||||
|
||||
hashes.iter().all(|h| {
|
||||
torrents.get(h).is_some_and(|t| {
|
||||
t.save_path.as_deref().is_some_and(|p| {
|
||||
path_equals_as_file_url(p, new_path)
|
||||
.inspect_err(|error| {
|
||||
tracing::warn!(name = "path_equals_as_file_url", error = ?error);
|
||||
})
|
||||
.unwrap_or(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_torrent_path(
|
||||
&self,
|
||||
hashes: String,
|
||||
) -> Result<Option<String>, DownloaderError> {
|
||||
let mut torrent_list = self
|
||||
.client
|
||||
.get_torrent_list(GetTorrentListArg {
|
||||
hashes: Some(hashes),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let torrent = torrent_list
|
||||
.first_mut()
|
||||
.whatever_context::<_, DownloaderError>("No bittorrent found")?;
|
||||
Ok(torrent.save_path.take())
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
async fn sync_data(&self) -> Result<(), DownloaderError> {
|
||||
let rid = { self.sync_data.read().await.rid };
|
||||
let sync_data_patch = self.client.sync(Some(rid)).await?;
|
||||
{
|
||||
let mut sync_data = self.sync_data.write().await;
|
||||
sync_data.patch(sync_data_patch);
|
||||
}
|
||||
let now = Utc::now();
|
||||
self.sync_watch.send_replace(now);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn wait_sync_until<S>(
|
||||
&self,
|
||||
stop_wait_fn: S,
|
||||
timeout: Option<Duration>,
|
||||
) -> Result<(), DownloaderError>
|
||||
where
|
||||
S: Fn(&QBittorrentSyncData) -> bool,
|
||||
{
|
||||
{
|
||||
let sync_data = &self.sync_data.read().await;
|
||||
if stop_wait_fn(sync_data) {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
let timeout = timeout.unwrap_or(self.wait_sync_timeout);
|
||||
let start_time = Utc::now();
|
||||
|
||||
let mut receiver = self.sync_watch.subscribe();
|
||||
|
||||
while let Ok(()) = receiver.changed().await {
|
||||
let has_timeout = {
|
||||
let sync_time = *receiver.borrow();
|
||||
let diff_time = sync_time - start_time;
|
||||
diff_time.num_milliseconds() > timeout.as_millis() as i64
|
||||
};
|
||||
if has_timeout {
|
||||
tracing::warn!(name = "wait_until timeout", timeout = ?timeout);
|
||||
return Err(DownloaderError::DownloadTimeoutError {
|
||||
action: Cow::Borrowed("QBittorrentDownloader::wait_unit"),
|
||||
timeout,
|
||||
});
|
||||
}
|
||||
{
|
||||
let sync_data = &self.sync_data.read().await;
|
||||
if stop_wait_fn(sync_data) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DownloaderTrait for QBittorrentDownloader {
|
||||
type State = QBittorrentState;
|
||||
type Id = QBittorrentHash;
|
||||
type Task = QBittorrentTask;
|
||||
type Creation = QBittorrentCreation;
|
||||
type Selector = QBittorrentSelector;
|
||||
|
||||
async fn add_downloads(
|
||||
&self,
|
||||
creation: Self::Creation,
|
||||
) -> Result<HashSet<Self::Id>, DownloaderError> {
|
||||
let tags = {
|
||||
let mut tags = vec![TORRENT_TAG_NAME.to_string()];
|
||||
tags.extend(creation.tags);
|
||||
Some(tags.into_iter().filter(|s| !s.is_empty()).join(","))
|
||||
};
|
||||
|
||||
let save_path = Some(creation.save_path.into_string());
|
||||
|
||||
let sources = creation.sources;
|
||||
let hashes = HashSet::from_iter(sources.iter().map(|s| s.hash_info().to_string()));
|
||||
let (urls_source, files_source) = {
|
||||
let mut urls = vec![];
|
||||
let mut files = vec![];
|
||||
for s in sources {
|
||||
match s {
|
||||
HashTorrentSource::MagnetUrl(MagnetUrlSource { url, .. }) => {
|
||||
urls.push(Url::parse(&url)?)
|
||||
}
|
||||
HashTorrentSource::TorrentFile(TorrentFileSource {
|
||||
payload, filename, ..
|
||||
}) => files.push(TorrentFile {
|
||||
filename,
|
||||
data: payload.into(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
(
|
||||
if urls.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(TorrentSource::Urls {
|
||||
urls: Sep::from(urls),
|
||||
})
|
||||
},
|
||||
if files.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(TorrentSource::TorrentFiles { torrents: files })
|
||||
},
|
||||
)
|
||||
};
|
||||
|
||||
let category = creation.category;
|
||||
|
||||
if let Some(category) = category.as_deref() {
|
||||
let has_caetgory = {
|
||||
self.sync_data
|
||||
.read()
|
||||
.await
|
||||
.categories
|
||||
.contains_key(category)
|
||||
};
|
||||
if !has_caetgory {
|
||||
self.add_category(category).await?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(source) = urls_source {
|
||||
self.client
|
||||
.add_torrent(AddTorrentArg {
|
||||
source,
|
||||
savepath: save_path.clone(),
|
||||
auto_torrent_management: Some(false),
|
||||
category: category.clone(),
|
||||
tags: tags.clone(),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(source) = files_source {
|
||||
self.client
|
||||
.add_torrent(AddTorrentArg {
|
||||
source,
|
||||
savepath: save_path,
|
||||
auto_torrent_management: Some(false),
|
||||
category,
|
||||
tags,
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
self.wait_sync_until(
|
||||
|sync_data| {
|
||||
let torrents = &sync_data.torrents;
|
||||
hashes.iter().all(|hash| torrents.contains_key(hash))
|
||||
},
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
Ok(hashes)
|
||||
}
|
||||
|
||||
async fn pause_downloads(
|
||||
&self,
|
||||
selector: Self::Selector,
|
||||
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
|
||||
<Self as TorrentDownloaderTrait>::pause_downloads(self, selector).await
|
||||
}
|
||||
|
||||
async fn resume_downloads(
|
||||
&self,
|
||||
selector: Self::Selector,
|
||||
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
|
||||
<Self as TorrentDownloaderTrait>::resume_downloads(self, selector).await
|
||||
}
|
||||
|
||||
async fn remove_downloads(
|
||||
&self,
|
||||
selector: Self::Selector,
|
||||
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError> {
|
||||
<Self as TorrentDownloaderTrait>::remove_downloads(self, selector).await
|
||||
}
|
||||
|
||||
async fn query_downloads(
|
||||
&self,
|
||||
selector: QBittorrentSelector,
|
||||
) -> Result<Vec<Self::Task>, DownloaderError> {
|
||||
let selector = match selector {
|
||||
QBittorrentSelector::Hash(h) => h.into(),
|
||||
QBittorrentSelector::Complex(c) => c,
|
||||
};
|
||||
|
||||
let torrent_list = self.client.get_torrent_list(selector.query).await?;
|
||||
|
||||
let torrent_contents = try_join_all(torrent_list.iter().map(|s| async {
|
||||
if let Some(hash) = &s.hash {
|
||||
self.client.get_torrent_contents(hash as &str, None).await
|
||||
} else {
|
||||
Ok(vec![])
|
||||
}
|
||||
}))
|
||||
.await?;
|
||||
|
||||
let tasks = torrent_list
|
||||
.into_iter()
|
||||
.zip(torrent_contents)
|
||||
.map(|(t, c)| Self::Task::from_query(t, c))
|
||||
.collect::<Result<Vec<Self::Task>, _>>()?;
|
||||
Ok(tasks)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl TorrentDownloaderTrait for QBittorrentDownloader {
|
||||
type IdSelector = DownloadIdSelector<Self::Task>;
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
async fn pause_torrents(
|
||||
&self,
|
||||
hashes: Self::IdSelector,
|
||||
) -> Result<Self::IdSelector, DownloaderError> {
|
||||
self.client.pause_torrents(hashes.clone()).await?;
|
||||
Ok(hashes)
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
async fn resume_torrents(
|
||||
&self,
|
||||
hashes: Self::IdSelector,
|
||||
) -> Result<Self::IdSelector, DownloaderError> {
|
||||
self.client.resume_torrents(hashes.clone()).await?;
|
||||
Ok(hashes)
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
async fn remove_torrents(
|
||||
&self,
|
||||
hashes: Self::IdSelector,
|
||||
) -> Result<Self::IdSelector, DownloaderError> {
|
||||
self.client
|
||||
.delete_torrents(hashes.clone(), Some(true))
|
||||
.await?;
|
||||
self.wait_sync_until(
|
||||
|sync_data| -> bool {
|
||||
let torrents = &sync_data.torrents;
|
||||
hashes.iter().all(|h| !torrents.contains_key(h))
|
||||
},
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
Ok(hashes)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for QBittorrentDownloader {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("QBittorrentDownloader")
|
||||
.field("subscriber_id", &self.subscriber_id)
|
||||
.field("client", &self.endpoint_url.as_str())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
pub mod downloader;
|
||||
pub mod task;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
pub use downloader::{QBittorrentDownloader, QBittorrentDownloaderCreation, QBittorrentSyncData};
|
||||
pub use task::{
|
||||
QBittorrentComplexSelector, QBittorrentCreation, QBittorrentHash, QBittorrentHashSelector,
|
||||
QBittorrentSelector, QBittorrentState, QBittorrentTask,
|
||||
};
|
||||
@@ -1,221 +0,0 @@
|
||||
use std::{borrow::Cow, time::Duration};
|
||||
|
||||
use itertools::Itertools;
|
||||
use qbit_rs::model::{
|
||||
GetTorrentListArg, State, Torrent as QbitTorrent, TorrentContent as QbitTorrentContent,
|
||||
};
|
||||
use quirks_path::{Path, PathBuf};
|
||||
|
||||
use crate::downloader::{
|
||||
DownloaderError,
|
||||
bittorrent::{
|
||||
source::HashTorrentSource,
|
||||
task::{TorrentCreationTrait, TorrentHashTrait, TorrentStateTrait, TorrentTaskTrait},
|
||||
},
|
||||
core::{
|
||||
DownloadCreationTrait, DownloadIdSelector, DownloadIdTrait, DownloadSelectorTrait,
|
||||
DownloadStateTrait, DownloadTaskTrait,
|
||||
},
|
||||
};
|
||||
|
||||
pub type QBittorrentHash = String;
|
||||
|
||||
impl DownloadIdTrait for QBittorrentHash {}
|
||||
|
||||
impl TorrentHashTrait for QBittorrentHash {}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
|
||||
pub struct QBittorrentState(Option<State>);
|
||||
|
||||
impl DownloadStateTrait for QBittorrentState {}
|
||||
|
||||
impl TorrentStateTrait for QBittorrentState {}
|
||||
|
||||
impl From<Option<State>> for QBittorrentState {
|
||||
fn from(value: Option<State>) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct QBittorrentTask {
|
||||
pub hash_info: QBittorrentHash,
|
||||
pub torrent: QbitTorrent,
|
||||
pub contents: Vec<QbitTorrentContent>,
|
||||
pub state: QBittorrentState,
|
||||
}
|
||||
|
||||
impl QBittorrentTask {
|
||||
pub fn from_query(
|
||||
torrent: QbitTorrent,
|
||||
contents: Vec<QbitTorrentContent>,
|
||||
) -> Result<Self, DownloaderError> {
|
||||
let hash = torrent
|
||||
.hash
|
||||
.clone()
|
||||
.ok_or_else(|| DownloaderError::TorrentMetaError {
|
||||
message: "missing hash".to_string(),
|
||||
source: None.into(),
|
||||
})?;
|
||||
let state = QBittorrentState(torrent.state.clone());
|
||||
Ok(Self {
|
||||
hash_info: hash,
|
||||
contents,
|
||||
state,
|
||||
torrent,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl DownloadTaskTrait for QBittorrentTask {
|
||||
type State = QBittorrentState;
|
||||
type Id = QBittorrentHash;
|
||||
|
||||
fn id(&self) -> &Self::Id {
|
||||
&self.hash_info
|
||||
}
|
||||
|
||||
fn into_id(self) -> Self::Id {
|
||||
self.hash_info
|
||||
}
|
||||
|
||||
fn name(&self) -> Cow<'_, str> {
|
||||
self.torrent
|
||||
.name
|
||||
.as_deref()
|
||||
.map(Cow::Borrowed)
|
||||
.unwrap_or_else(|| DownloadTaskTrait::name(self))
|
||||
}
|
||||
|
||||
fn speed(&self) -> Option<u64> {
|
||||
self.torrent.dlspeed.and_then(|s| u64::try_from(s).ok())
|
||||
}
|
||||
|
||||
fn state(&self) -> &Self::State {
|
||||
&self.state
|
||||
}
|
||||
|
||||
fn dl_bytes(&self) -> Option<u64> {
|
||||
self.torrent.downloaded.and_then(|v| u64::try_from(v).ok())
|
||||
}
|
||||
|
||||
fn total_bytes(&self) -> Option<u64> {
|
||||
self.torrent.size.and_then(|v| u64::try_from(v).ok())
|
||||
}
|
||||
|
||||
fn left_bytes(&self) -> Option<u64> {
|
||||
self.torrent.amount_left.and_then(|v| u64::try_from(v).ok())
|
||||
}
|
||||
|
||||
fn et(&self) -> Option<Duration> {
|
||||
self.torrent
|
||||
.time_active
|
||||
.and_then(|v| u64::try_from(v).ok())
|
||||
.map(Duration::from_secs)
|
||||
}
|
||||
|
||||
fn eta(&self) -> Option<Duration> {
|
||||
self.torrent
|
||||
.eta
|
||||
.and_then(|v| u64::try_from(v).ok())
|
||||
.map(Duration::from_secs)
|
||||
}
|
||||
|
||||
fn progress(&self) -> Option<f32> {
|
||||
self.torrent.progress.as_ref().map(|s| *s as f32)
|
||||
}
|
||||
}
|
||||
|
||||
impl TorrentTaskTrait for QBittorrentTask {
|
||||
fn hash_info(&self) -> &str {
|
||||
&self.hash_info
|
||||
}
|
||||
|
||||
fn tags(&self) -> impl Iterator<Item = Cow<'_, str>> {
|
||||
self.torrent
|
||||
.tags
|
||||
.as_deref()
|
||||
.unwrap_or("")
|
||||
.split(',')
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(Cow::Borrowed)
|
||||
}
|
||||
|
||||
fn category(&self) -> Option<Cow<'_, str>> {
|
||||
self.torrent.category.as_deref().map(Cow::Borrowed)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct QBittorrentCreation {
|
||||
pub save_path: PathBuf,
|
||||
pub tags: Vec<String>,
|
||||
pub category: Option<String>,
|
||||
pub sources: Vec<HashTorrentSource>,
|
||||
}
|
||||
|
||||
impl DownloadCreationTrait for QBittorrentCreation {
|
||||
type Task = QBittorrentTask;
|
||||
}
|
||||
|
||||
impl TorrentCreationTrait for QBittorrentCreation {
|
||||
fn save_path(&self) -> &Path {
|
||||
self.save_path.as_ref()
|
||||
}
|
||||
|
||||
fn save_path_mut(&mut self) -> &mut PathBuf {
|
||||
&mut self.save_path
|
||||
}
|
||||
|
||||
fn sources_mut(&mut self) -> &mut Vec<HashTorrentSource> {
|
||||
&mut self.sources
|
||||
}
|
||||
}
|
||||
|
||||
pub type QBittorrentHashSelector = DownloadIdSelector<QBittorrentTask>;
|
||||
|
||||
pub struct QBittorrentComplexSelector {
|
||||
pub query: GetTorrentListArg,
|
||||
}
|
||||
|
||||
impl From<QBittorrentHashSelector> for QBittorrentComplexSelector {
|
||||
fn from(value: QBittorrentHashSelector) -> Self {
|
||||
Self {
|
||||
query: GetTorrentListArg {
|
||||
hashes: Some(value.ids.join("|")),
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DownloadSelectorTrait for QBittorrentComplexSelector {
|
||||
type Id = QBittorrentHash;
|
||||
type Task = QBittorrentTask;
|
||||
}
|
||||
|
||||
pub enum QBittorrentSelector {
|
||||
Hash(QBittorrentHashSelector),
|
||||
Complex(QBittorrentComplexSelector),
|
||||
}
|
||||
|
||||
impl DownloadSelectorTrait for QBittorrentSelector {
|
||||
type Id = QBittorrentHash;
|
||||
type Task = QBittorrentTask;
|
||||
|
||||
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
|
||||
match self {
|
||||
QBittorrentSelector::Complex(c) => {
|
||||
c.try_into_ids_only().map_err(QBittorrentSelector::Complex)
|
||||
}
|
||||
QBittorrentSelector::Hash(h) => {
|
||||
let result = h
|
||||
.try_into_ids_only()
|
||||
.unwrap_or_else(|_| unreachable!("hash selector must contains hash"))
|
||||
.into_iter();
|
||||
Ok(result.collect_vec())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,280 +0,0 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use chrono::Utc;
|
||||
use qbit_rs::model::{GetTorrentListArg, TorrentFilter as QbitTorrentFilter};
|
||||
use quirks_path::Path;
|
||||
use snafu::{OptionExt, ResultExt};
|
||||
|
||||
use crate::{
|
||||
downloader::{
|
||||
DownloaderError,
|
||||
bittorrent::{
|
||||
downloader::TorrentDownloaderTrait, source::HashTorrentSource, task::TorrentTaskTrait,
|
||||
},
|
||||
core::{DownloadIdSelectorTrait, DownloaderTrait},
|
||||
qbit::{
|
||||
QBittorrentDownloader, QBittorrentDownloaderCreation,
|
||||
task::{
|
||||
QBittorrentComplexSelector, QBittorrentCreation, QBittorrentHashSelector,
|
||||
QBittorrentSelector, QBittorrentTask,
|
||||
},
|
||||
},
|
||||
utils::path_equals_as_file_url,
|
||||
},
|
||||
errors::{RError, RResult},
|
||||
test_utils::fetch::build_testing_http_client,
|
||||
};
|
||||
|
||||
fn get_tmp_qbit_test_folder() -> &'static str {
|
||||
if cfg!(all(windows, not(feature = "testcontainers"))) {
|
||||
"C:\\Windows\\Temp\\konobangu\\qbit"
|
||||
} else {
|
||||
"/tmp/konobangu/qbit"
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "testcontainers")]
|
||||
pub async fn create_qbit_testcontainers()
|
||||
-> RResult<testcontainers::ContainerRequest<testcontainers::GenericImage>> {
|
||||
use testcontainers::{
|
||||
GenericImage,
|
||||
core::{
|
||||
ContainerPort,
|
||||
// ReuseDirective,
|
||||
WaitFor,
|
||||
},
|
||||
};
|
||||
use testcontainers_ext::{ImageDefaultLogConsumerExt, ImagePruneExistedLabelExt};
|
||||
use testcontainers_modules::testcontainers::ImageExt;
|
||||
|
||||
let container = GenericImage::new("linuxserver/qbittorrent", "latest")
|
||||
.with_wait_for(WaitFor::message_on_stderr("Connection to localhost"))
|
||||
.with_env_var("WEBUI_PORT", "8080")
|
||||
.with_env_var("TZ", "Asia/Singapore")
|
||||
.with_env_var("TORRENTING_PORT", "6881")
|
||||
.with_mapped_port(6881, ContainerPort::Tcp(6881))
|
||||
.with_mapped_port(8080, ContainerPort::Tcp(8080))
|
||||
// .with_reuse(ReuseDirective::Always)
|
||||
.with_default_log_consumer()
|
||||
.with_prune_existed_label(env!("CARGO_PKG_NAME"), "qbit-downloader", true, true)
|
||||
.await?;
|
||||
|
||||
Ok(container)
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "testcontainers"))]
|
||||
#[tokio::test]
|
||||
async fn test_qbittorrent_downloader() {
|
||||
let hash = "47ee2d69e7f19af783ad896541a07b012676f858".to_string();
|
||||
let torrent_url = "https://mikanani.me/Download/20240301/{}.torrent";
|
||||
let _ = test_qbittorrent_downloader_impl(torrent_url, hash, None, None).await;
|
||||
}
|
||||
|
||||
#[cfg(feature = "testcontainers")]
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_qbittorrent_downloader() -> RResult<()> {
|
||||
use testcontainers::runners::AsyncRunner;
|
||||
use testing_torrents::{TestTorrentRequest, TestTorrentResponse, TestingTorrentFileItem};
|
||||
use tokio::io::AsyncReadExt;
|
||||
|
||||
tracing_subscriber::fmt()
|
||||
.with_max_level(tracing::Level::DEBUG)
|
||||
.with_test_writer()
|
||||
.init();
|
||||
|
||||
let torrents_image = testing_torrents::create_testcontainers().await?;
|
||||
let _torrents_container = torrents_image.start().await?;
|
||||
|
||||
let torrents_req = TestTorrentRequest {
|
||||
id: "f10ebdda-dd2e-43f8-b80c-bf0884d071c4".into(),
|
||||
file_list: vec![TestingTorrentFileItem {
|
||||
path: "[Nekomoe kissaten&LoliHouse] Boku no Kokoro no Yabai Yatsu - 20 [WebRip 1080p \
|
||||
HEVC-10bit AAC ASSx2].mkv"
|
||||
.into(),
|
||||
size: 1024,
|
||||
}],
|
||||
};
|
||||
|
||||
let torrent_res: TestTorrentResponse = reqwest::Client::new()
|
||||
.post("http://127.0.0.1:6080/api/torrents/mock")
|
||||
.json(&torrents_req)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
|
||||
let qbit_image = create_qbit_testcontainers().await?;
|
||||
let qbit_container = qbit_image.start().await?;
|
||||
|
||||
let mut logs = String::new();
|
||||
|
||||
qbit_container
|
||||
.stdout(false)
|
||||
.read_to_string(&mut logs)
|
||||
.await?;
|
||||
|
||||
let username = logs
|
||||
.lines()
|
||||
.find_map(|line| {
|
||||
if line.contains("The WebUI administrator username is") {
|
||||
line.split_whitespace().last()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.expect("should have username")
|
||||
.trim();
|
||||
|
||||
let password = logs
|
||||
.lines()
|
||||
.find_map(|line| {
|
||||
if line.contains("A temporary password is provided for") {
|
||||
line.split_whitespace().last()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.expect("should have password")
|
||||
.trim();
|
||||
|
||||
tracing::info!(username, password);
|
||||
|
||||
test_qbittorrent_downloader_impl(
|
||||
torrent_res.torrent_url,
|
||||
torrent_res.hash,
|
||||
Some(username),
|
||||
Some(password),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn test_qbittorrent_downloader_impl(
|
||||
torrent_url: String,
|
||||
torrent_hash: String,
|
||||
username: Option<&str>,
|
||||
password: Option<&str>,
|
||||
) -> RResult<()> {
|
||||
let http_client = build_testing_http_client()?;
|
||||
let base_save_path = Path::new(get_tmp_qbit_test_folder());
|
||||
|
||||
let downloader = QBittorrentDownloader::from_creation(QBittorrentDownloaderCreation {
|
||||
endpoint: "http://127.0.0.1:8080".to_string(),
|
||||
password: password.unwrap_or_default().to_string(),
|
||||
username: username.unwrap_or_default().to_string(),
|
||||
subscriber_id: 0,
|
||||
save_path: base_save_path.to_string(),
|
||||
downloader_id: 0,
|
||||
wait_sync_timeout: Some(Duration::from_secs(3)),
|
||||
})
|
||||
.await?;
|
||||
|
||||
downloader.check_connection().await?;
|
||||
|
||||
downloader
|
||||
.remove_torrents(vec![torrent_hash.clone()].into())
|
||||
.await?;
|
||||
|
||||
let torrent_source =
|
||||
HashTorrentSource::from_url_and_http_client(&http_client, torrent_url).await?;
|
||||
|
||||
let folder_name = format!("torrent_test_{}", Utc::now().timestamp());
|
||||
let save_path = base_save_path.join(&folder_name);
|
||||
|
||||
let torrent_creation = QBittorrentCreation {
|
||||
save_path,
|
||||
tags: vec![],
|
||||
sources: vec![torrent_source],
|
||||
category: None,
|
||||
};
|
||||
|
||||
downloader.add_downloads(torrent_creation).await?;
|
||||
|
||||
let get_torrent = async || -> Result<QBittorrentTask, DownloaderError> {
|
||||
let torrent_infos = downloader
|
||||
.query_downloads(QBittorrentSelector::Hash(QBittorrentHashSelector::from_id(
|
||||
torrent_hash.clone(),
|
||||
)))
|
||||
.await?;
|
||||
|
||||
let result = torrent_infos
|
||||
.into_iter()
|
||||
.find(|t| t.hash_info() == torrent_hash)
|
||||
.whatever_context::<_, DownloaderError>("no bittorrent")?;
|
||||
|
||||
Ok(result)
|
||||
};
|
||||
|
||||
let target_torrent = get_torrent().await?;
|
||||
|
||||
let files = target_torrent.contents;
|
||||
|
||||
assert!(!files.is_empty());
|
||||
|
||||
let first_file = files.first().expect("should have first file");
|
||||
assert!(
|
||||
&first_file.name.ends_with(r#"[Nekomoe kissaten&LoliHouse] Boku no Kokoro no Yabai Yatsu - 20 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"#)
|
||||
);
|
||||
|
||||
let test_tag = "test_tag".to_string();
|
||||
|
||||
downloader
|
||||
.add_torrent_tags(vec![torrent_hash.clone()], vec![test_tag.clone()])
|
||||
.await?;
|
||||
|
||||
let target_torrent = get_torrent().await?;
|
||||
|
||||
assert!(target_torrent.tags().any(|s| s == test_tag));
|
||||
|
||||
let test_category = format!("test_category_{}", Utc::now().timestamp());
|
||||
|
||||
downloader
|
||||
.set_torrents_category(vec![torrent_hash.clone()], &test_category)
|
||||
.await?;
|
||||
|
||||
let target_torrent = get_torrent().await?;
|
||||
|
||||
assert_eq!(
|
||||
Some(test_category.as_str()),
|
||||
target_torrent.category().as_deref()
|
||||
);
|
||||
|
||||
let moved_torrent_path = base_save_path.join(format!("moved_{}", Utc::now().timestamp()));
|
||||
|
||||
downloader
|
||||
.move_torrents(vec![torrent_hash.clone()], moved_torrent_path.as_str())
|
||||
.await?;
|
||||
|
||||
let target_torrent = get_torrent().await?;
|
||||
|
||||
let actual_content_path = &target_torrent
|
||||
.torrent
|
||||
.save_path
|
||||
.expect("failed to get actual save path");
|
||||
|
||||
assert!(
|
||||
path_equals_as_file_url(actual_content_path, moved_torrent_path)
|
||||
.whatever_context::<_, RError>(
|
||||
"failed to compare actual torrent path and found expected torrent path"
|
||||
)?
|
||||
);
|
||||
|
||||
downloader
|
||||
.remove_torrents(vec![torrent_hash.clone()].into())
|
||||
.await?;
|
||||
|
||||
let torrent_infos1 = downloader
|
||||
.query_downloads(QBittorrentSelector::Complex(QBittorrentComplexSelector {
|
||||
query: GetTorrentListArg::builder()
|
||||
.filter(QbitTorrentFilter::All)
|
||||
.build(),
|
||||
}))
|
||||
.await?;
|
||||
|
||||
assert!(torrent_infos1.is_empty());
|
||||
|
||||
tracing::info!("test finished");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
use quirks_path::{Path, PathToUrlError};
|
||||
|
||||
pub fn path_equals_as_file_url<A: AsRef<Path>, B: AsRef<Path>>(
|
||||
a: A,
|
||||
b: B,
|
||||
) -> Result<bool, PathToUrlError> {
|
||||
let u1 = a.as_ref().to_file_url()?;
|
||||
let u2 = b.as_ref().to_file_url()?;
|
||||
|
||||
Ok(u1.as_str() == u2.as_str())
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct OptDynErr(Option<Box<dyn std::error::Error + Send + Sync>>);
|
||||
|
||||
impl AsRef<dyn snafu::Error> for OptDynErr {
|
||||
fn as_ref(&self) -> &(dyn snafu::Error + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl OptDynErr {
|
||||
pub fn some_boxed<E: std::error::Error + Send + Sync + 'static>(e: E) -> Self {
|
||||
Self(Some(Box::new(e)))
|
||||
}
|
||||
|
||||
pub fn some(e: Box<dyn std::error::Error + Send + Sync>) -> Self {
|
||||
Self(Some(e))
|
||||
}
|
||||
|
||||
pub fn none() -> Self {
|
||||
Self(None)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for OptDynErr {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match &self.0 {
|
||||
Some(e) => e.fmt(f),
|
||||
None => write!(f, "None"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl snafu::Error for OptDynErr {
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
None
|
||||
}
|
||||
|
||||
fn cause(&self) -> Option<&dyn std::error::Error> {
|
||||
self.source()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<Box<dyn std::error::Error + Send + Sync>>> for OptDynErr {
|
||||
fn from(value: Option<Box<dyn std::error::Error + Send + Sync>>) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Box<dyn std::error::Error + Send + Sync>> for OptDynErr {
|
||||
fn from(value: Box<dyn std::error::Error + Send + Sync>) -> Self {
|
||||
Self::some(value)
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ use axum::{
|
||||
Json,
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use fetch::{FetchError, HttpClientError};
|
||||
use http::StatusCode;
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use snafu::Snafu;
|
||||
@@ -12,12 +13,11 @@ use crate::{
|
||||
auth::AuthError,
|
||||
downloader::DownloaderError,
|
||||
errors::{OptDynErr, response::StandardErrorResponse},
|
||||
fetch::HttpClientError,
|
||||
};
|
||||
|
||||
#[derive(Snafu, Debug)]
|
||||
#[snafu(visibility(pub(crate)))]
|
||||
pub enum RError {
|
||||
pub enum RecorderError {
|
||||
#[snafu(transparent, context(false))]
|
||||
FancyRegexError {
|
||||
#[snafu(source(from(fancy_regex::Error, Box::new)))]
|
||||
@@ -53,8 +53,6 @@ pub enum RError {
|
||||
IOError { source: std::io::Error },
|
||||
#[snafu(transparent)]
|
||||
DbError { source: sea_orm::DbErr },
|
||||
#[snafu(transparent)]
|
||||
CookieParseError { source: cookie::ParseError },
|
||||
#[snafu(transparent, context(false))]
|
||||
FigmentError {
|
||||
#[snafu(source(from(figment::Error, Box::new)))]
|
||||
@@ -63,10 +61,6 @@ pub enum RError {
|
||||
#[snafu(transparent)]
|
||||
SerdeJsonError { source: serde_json::Error },
|
||||
#[snafu(transparent)]
|
||||
ReqwestMiddlewareError { source: reqwest_middleware::Error },
|
||||
#[snafu(transparent)]
|
||||
ReqwestError { source: reqwest::Error },
|
||||
#[snafu(transparent)]
|
||||
ParseUrlError { source: url::ParseError },
|
||||
#[snafu(display("{source}"), context(false))]
|
||||
OpenDALError {
|
||||
@@ -106,6 +100,8 @@ pub enum RError {
|
||||
},
|
||||
#[snafu(display("Model Entity {entity} not found"))]
|
||||
ModelEntityNotFound { entity: Cow<'static, str> },
|
||||
#[snafu(transparent)]
|
||||
FetchError { source: FetchError },
|
||||
#[snafu(display("{message}"))]
|
||||
Whatever {
|
||||
message: String,
|
||||
@@ -114,7 +110,7 @@ pub enum RError {
|
||||
},
|
||||
}
|
||||
|
||||
impl RError {
|
||||
impl RecorderError {
|
||||
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
|
||||
Self::MikanMetaMissingFieldError {
|
||||
field,
|
||||
@@ -146,7 +142,7 @@ impl RError {
|
||||
}
|
||||
}
|
||||
|
||||
impl snafu::FromString for RError {
|
||||
impl snafu::FromString for RecorderError {
|
||||
type Source = Box<dyn std::error::Error + Send + Sync>;
|
||||
|
||||
fn without_source(message: String) -> Self {
|
||||
@@ -164,7 +160,7 @@ impl snafu::FromString for RError {
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoResponse for RError {
|
||||
impl IntoResponse for RecorderError {
|
||||
fn into_response(self) -> Response {
|
||||
match self {
|
||||
Self::AuthError { source: auth_error } => auth_error.into_response(),
|
||||
@@ -177,7 +173,7 @@ impl IntoResponse for RError {
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for RError {
|
||||
impl Serialize for RecorderError {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
@@ -186,7 +182,7 @@ impl Serialize for RError {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for RError {
|
||||
impl<'de> Deserialize<'de> for RecorderError {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
@@ -199,4 +195,4 @@ impl<'de> Deserialize<'de> for RError {
|
||||
}
|
||||
}
|
||||
|
||||
pub type RResult<T> = Result<T, RError>;
|
||||
pub type RecorderResult<T> = Result<T, RecorderError>;
|
||||
|
||||
@@ -1,9 +1 @@
|
||||
pub trait RAnyhowResultExt<T>: snafu::ResultExt<T, anyhow::Error> {
|
||||
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>>;
|
||||
}
|
||||
|
||||
impl<T> RAnyhowResultExt<T> for Result<T, anyhow::Error> {
|
||||
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>> {
|
||||
self.map_err(|e| e.into())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
pub mod alias;
|
||||
pub mod app_error;
|
||||
pub mod ext;
|
||||
pub mod response;
|
||||
|
||||
pub use alias::OptDynErr;
|
||||
pub use app_error::*;
|
||||
pub use ext::RAnyhowResultExt;
|
||||
pub use app_error::{RecorderError, RecorderResult};
|
||||
pub use response::StandardErrorResponse;
|
||||
pub use util::errors::OptDynErr;
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
|
||||
pub const MAGNET_SCHEMA: &str = "magnet";
|
||||
@@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize};
|
||||
use snafu::{OptionExt, whatever};
|
||||
|
||||
use crate::{
|
||||
errors::app_error::{RError, RResult},
|
||||
errors::app_error::{RecorderError, RecorderResult},
|
||||
extract::defs::SUBTITLE_LANG,
|
||||
};
|
||||
|
||||
@@ -104,10 +104,10 @@ pub fn parse_episode_media_meta_from_torrent(
|
||||
torrent_path: &Path,
|
||||
torrent_name: Option<&str>,
|
||||
season: Option<i32>,
|
||||
) -> RResult<TorrentEpisodeMediaMeta> {
|
||||
) -> RecorderResult<TorrentEpisodeMediaMeta> {
|
||||
let media_name = torrent_path
|
||||
.file_name()
|
||||
.with_whatever_context::<_, _, RError>(|| {
|
||||
.with_whatever_context::<_, _, RecorderError>(|| {
|
||||
format!("failed to get file name of {}", torrent_path)
|
||||
})?;
|
||||
let mut match_obj = None;
|
||||
@@ -124,7 +124,7 @@ pub fn parse_episode_media_meta_from_torrent(
|
||||
if let Some(match_obj) = match_obj {
|
||||
let group_season_and_title = match_obj
|
||||
.get(1)
|
||||
.whatever_context::<_, RError>("should have 1 group")?
|
||||
.whatever_context::<_, RecorderError>("should have 1 group")?
|
||||
.as_str();
|
||||
let (fansub, season_and_title) = get_fansub(group_season_and_title);
|
||||
let (title, season) = if let Some(season) = season {
|
||||
@@ -135,7 +135,7 @@ pub fn parse_episode_media_meta_from_torrent(
|
||||
};
|
||||
let episode_index = match_obj
|
||||
.get(2)
|
||||
.whatever_context::<_, RError>("should have 2 group")?
|
||||
.whatever_context::<_, RecorderError>("should have 2 group")?
|
||||
.as_str()
|
||||
.parse::<i32>()
|
||||
.unwrap_or(1);
|
||||
@@ -163,11 +163,11 @@ pub fn parse_episode_subtitle_meta_from_torrent(
|
||||
torrent_path: &Path,
|
||||
torrent_name: Option<&str>,
|
||||
season: Option<i32>,
|
||||
) -> RResult<TorrentEpisodeSubtitleMeta> {
|
||||
) -> RecorderResult<TorrentEpisodeSubtitleMeta> {
|
||||
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
|
||||
let media_name = torrent_path
|
||||
.file_name()
|
||||
.with_whatever_context::<_, _, RError>(|| {
|
||||
.with_whatever_context::<_, _, RecorderError>(|| {
|
||||
format!("failed to get file name of {}", torrent_path)
|
||||
})?;
|
||||
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
pub mod core;
|
||||
pub mod extract;
|
||||
|
||||
pub use core::{BITTORRENT_MIME_TYPE, MAGNET_SCHEMA};
|
||||
|
||||
pub use extract::*;
|
||||
|
||||
@@ -1,15 +1,11 @@
|
||||
use std::{fmt::Debug, ops::Deref};
|
||||
|
||||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use fetch::{FetchError, HttpClient, HttpClientTrait, client::HttpClientCookiesAuth};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
use super::MikanConfig;
|
||||
use crate::{
|
||||
errors::app_error::RError,
|
||||
fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth},
|
||||
};
|
||||
|
||||
use crate::errors::RecorderError;
|
||||
#[derive(Default, Clone, Deserialize, Serialize)]
|
||||
pub struct MikanAuthSecrecy {
|
||||
pub cookie: String,
|
||||
@@ -26,8 +22,10 @@ impl Debug for MikanAuthSecrecy {
|
||||
}
|
||||
|
||||
impl MikanAuthSecrecy {
|
||||
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RError> {
|
||||
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RecorderError> {
|
||||
HttpClientCookiesAuth::from_cookies(&self.cookie, url, self.user_agent)
|
||||
.map_err(FetchError::from)
|
||||
.map_err(RecorderError::from)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,7 +36,7 @@ pub struct MikanClient {
|
||||
}
|
||||
|
||||
impl MikanClient {
|
||||
pub async fn from_config(config: MikanConfig) -> Result<Self, RError> {
|
||||
pub async fn from_config(config: MikanConfig) -> Result<Self, RecorderError> {
|
||||
let http_client = HttpClient::from_config(config.http_client)?;
|
||||
let base_url = config.base_url;
|
||||
Ok(Self {
|
||||
@@ -47,7 +45,7 @@ impl MikanClient {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn fork_with_auth(&self, secrecy: Option<MikanAuthSecrecy>) -> Result<Self, RError> {
|
||||
pub fn fork_with_auth(&self, secrecy: Option<MikanAuthSecrecy>) -> Result<Self, RecorderError> {
|
||||
let mut fork = self.http_client.fork();
|
||||
|
||||
if let Some(secrecy) = secrecy {
|
||||
@@ -71,10 +69,10 @@ impl MikanClient {
|
||||
}
|
||||
|
||||
impl Deref for MikanClient {
|
||||
type Target = ClientWithMiddleware;
|
||||
type Target = fetch::reqwest_middleware::ClientWithMiddleware;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.http_client.deref()
|
||||
&self.http_client
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use fetch::HttpClientConfig;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
use crate::fetch::HttpClientConfig;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct MikanConfig {
|
||||
pub http_client: HttpClientConfig,
|
||||
|
||||
@@ -1,22 +1,19 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use chrono::DateTime;
|
||||
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
|
||||
use fetch::{FetchError, IntoUrl, bytes::fetch_bytes};
|
||||
use itertools::Itertools;
|
||||
use reqwest::IntoUrl;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::instrument;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
errors::app_error::{RError, RResult},
|
||||
extract::{
|
||||
bittorrent::BITTORRENT_MIME_TYPE,
|
||||
mikan::{
|
||||
MikanClient,
|
||||
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
|
||||
},
|
||||
errors::app_error::{RecorderError, RecorderResult},
|
||||
extract::mikan::{
|
||||
MikanClient,
|
||||
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
|
||||
},
|
||||
fetch::bytes::fetch_bytes,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
@@ -102,28 +99,28 @@ impl MikanRssChannel {
|
||||
}
|
||||
|
||||
impl TryFrom<rss::Item> for MikanRssItem {
|
||||
type Error = RError;
|
||||
type Error = RecorderError;
|
||||
|
||||
fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
|
||||
let enclosure = item
|
||||
.enclosure
|
||||
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure")))?;
|
||||
let enclosure = item.enclosure.ok_or_else(|| {
|
||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure"))
|
||||
})?;
|
||||
|
||||
let mime_type = enclosure.mime_type;
|
||||
if mime_type != BITTORRENT_MIME_TYPE {
|
||||
return Err(RError::MimeError {
|
||||
return Err(RecorderError::MimeError {
|
||||
expected: String::from(BITTORRENT_MIME_TYPE),
|
||||
found: mime_type.to_string(),
|
||||
desc: String::from("MikanRssItem"),
|
||||
});
|
||||
}
|
||||
|
||||
let title = item
|
||||
.title
|
||||
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?;
|
||||
let title = item.title.ok_or_else(|| {
|
||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title"))
|
||||
})?;
|
||||
|
||||
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
|
||||
RError::from_mikan_rss_invalid_field_and_source(
|
||||
RecorderError::from_mikan_rss_invalid_field_and_source(
|
||||
"enclosure_url:enclosure.link".into(),
|
||||
err,
|
||||
)
|
||||
@@ -132,12 +129,14 @@ impl TryFrom<rss::Item> for MikanRssItem {
|
||||
let homepage = item
|
||||
.link
|
||||
.and_then(|link| Url::parse(&link).ok())
|
||||
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link")))?;
|
||||
.ok_or_else(|| {
|
||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link"))
|
||||
})?;
|
||||
|
||||
let MikanEpisodeHomepage {
|
||||
mikan_episode_id, ..
|
||||
} = extract_mikan_episode_id_from_homepage(&homepage).ok_or_else(|| {
|
||||
RError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
|
||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
|
||||
})?;
|
||||
|
||||
Ok(MikanRssItem {
|
||||
@@ -170,8 +169,8 @@ pub fn build_mikan_bangumi_rss_link(
|
||||
mikan_base_url: impl IntoUrl,
|
||||
mikan_bangumi_id: &str,
|
||||
mikan_fansub_id: Option<&str>,
|
||||
) -> RResult<Url> {
|
||||
let mut url = mikan_base_url.into_url()?;
|
||||
) -> RecorderResult<Url> {
|
||||
let mut url = mikan_base_url.into_url().map_err(FetchError::from)?;
|
||||
url.set_path("/RSS/Bangumi");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("bangumiId", mikan_bangumi_id);
|
||||
@@ -185,7 +184,7 @@ pub fn build_mikan_bangumi_rss_link(
|
||||
pub fn build_mikan_subscriber_aggregation_rss_link(
|
||||
mikan_base_url: &str,
|
||||
mikan_aggregation_id: &str,
|
||||
) -> RResult<Url> {
|
||||
) -> RecorderResult<Url> {
|
||||
let mut url = Url::parse(mikan_base_url)?;
|
||||
url.set_path("/RSS/MyBangumi");
|
||||
url.query_pairs_mut()
|
||||
@@ -227,7 +226,7 @@ pub fn extract_mikan_subscriber_aggregation_id_from_rss_link(
|
||||
pub async fn extract_mikan_rss_channel_from_rss_link(
|
||||
http_client: &MikanClient,
|
||||
channel_rss_link: impl IntoUrl,
|
||||
) -> RResult<MikanRssChannel> {
|
||||
) -> RecorderResult<MikanRssChannel> {
|
||||
let bytes = fetch_bytes(http_client, channel_rss_link.as_str()).await?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
@@ -326,7 +325,7 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
|
||||
},
|
||||
))
|
||||
} else {
|
||||
Err(RError::MikanRssInvalidFormatError).inspect_err(|error| {
|
||||
Err(RecorderError::MikanRssInvalidFormatError).inspect_err(|error| {
|
||||
tracing::warn!(error = %error);
|
||||
})
|
||||
}
|
||||
@@ -336,24 +335,22 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
|
||||
mod tests {
|
||||
use std::assert_matches::assert_matches;
|
||||
|
||||
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
|
||||
use rstest::rstest;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
errors::app_error::RResult,
|
||||
extract::{
|
||||
bittorrent::BITTORRENT_MIME_TYPE,
|
||||
mikan::{
|
||||
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
|
||||
extract_mikan_rss_channel_from_rss_link,
|
||||
},
|
||||
errors::RecorderResult,
|
||||
extract::mikan::{
|
||||
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
|
||||
extract_mikan_rss_channel_from_rss_link,
|
||||
},
|
||||
test_utils::mikan::build_testing_mikan_client,
|
||||
};
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_parse_mikan_rss_channel_from_rss_link() -> RResult<()> {
|
||||
async fn test_parse_mikan_rss_channel_from_rss_link() -> RecorderResult<()> {
|
||||
let mut mikan_server = mockito::Server::new_async().await;
|
||||
|
||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::{borrow::Cow, sync::Arc};
|
||||
|
||||
use async_stream::try_stream;
|
||||
use bytes::Bytes;
|
||||
use fetch::{html::fetch_html, image::fetch_image};
|
||||
use futures::Stream;
|
||||
use itertools::Itertools;
|
||||
use scraper::{Html, Selector};
|
||||
@@ -15,12 +16,11 @@ use super::{
|
||||
};
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::app_error::{RError, RResult},
|
||||
errors::app_error::{RecorderResult, RecorderError},
|
||||
extract::{
|
||||
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
|
||||
media::extract_image_src_from_str,
|
||||
},
|
||||
fetch::{html::fetch_html, image::fetch_image},
|
||||
storage::StorageContentCategory,
|
||||
};
|
||||
|
||||
@@ -115,7 +115,7 @@ pub fn extract_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeH
|
||||
pub async fn extract_mikan_poster_meta_from_src(
|
||||
http_client: &MikanClient,
|
||||
origin_poster_src_url: Url,
|
||||
) -> Result<MikanBangumiPosterMeta, RError> {
|
||||
) -> Result<MikanBangumiPosterMeta, RecorderError> {
|
||||
let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?;
|
||||
Ok(MikanBangumiPosterMeta {
|
||||
origin_poster_src: origin_poster_src_url,
|
||||
@@ -128,7 +128,7 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
|
||||
ctx: &dyn AppContextTrait,
|
||||
origin_poster_src_url: Url,
|
||||
subscriber_id: i32,
|
||||
) -> RResult<MikanBangumiPosterMeta> {
|
||||
) -> RecorderResult<MikanBangumiPosterMeta> {
|
||||
let dal_client = ctx.storage();
|
||||
let mikan_client = ctx.mikan();
|
||||
if let Some(poster_src) = dal_client
|
||||
@@ -170,7 +170,7 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
|
||||
pub async fn extract_mikan_episode_meta_from_episode_homepage(
|
||||
http_client: &MikanClient,
|
||||
mikan_episode_homepage_url: Url,
|
||||
) -> Result<MikanEpisodeMeta, RError> {
|
||||
) -> Result<MikanEpisodeMeta, RecorderError> {
|
||||
let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?;
|
||||
let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?;
|
||||
|
||||
@@ -186,7 +186,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
|
||||
.select(bangumi_title_selector)
|
||||
.next()
|
||||
.map(extract_inner_text_from_element_ref)
|
||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
|
||||
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
|
||||
.inspect_err(|error| {
|
||||
tracing::warn!(error = %error);
|
||||
})?;
|
||||
@@ -201,18 +201,22 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
|
||||
.and_then(|el| el.value().attr("href"))
|
||||
.and_then(|s| mikan_episode_homepage_url.join(s).ok())
|
||||
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
|
||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")))
|
||||
.ok_or_else(|| {
|
||||
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
|
||||
})
|
||||
.inspect_err(|error| tracing::error!(error = %error))?;
|
||||
|
||||
let mikan_fansub_id = mikan_fansub_id
|
||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id")))
|
||||
.ok_or_else(|| {
|
||||
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id"))
|
||||
})
|
||||
.inspect_err(|error| tracing::error!(error = %error))?;
|
||||
|
||||
let episode_title = html
|
||||
.select(&Selector::parse("title").unwrap())
|
||||
.next()
|
||||
.map(extract_inner_text_from_element_ref)
|
||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
|
||||
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
|
||||
.inspect_err(|error| {
|
||||
tracing::warn!(error = %error);
|
||||
})?;
|
||||
@@ -220,7 +224,9 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
|
||||
let MikanEpisodeHomepage {
|
||||
mikan_episode_id, ..
|
||||
} = extract_mikan_episode_id_from_homepage(&mikan_episode_homepage_url)
|
||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id")))
|
||||
.ok_or_else(|| {
|
||||
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
|
||||
})
|
||||
.inspect_err(|error| {
|
||||
tracing::warn!(error = %error);
|
||||
})?;
|
||||
@@ -232,7 +238,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
|
||||
)
|
||||
.next()
|
||||
.map(extract_inner_text_from_element_ref)
|
||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
|
||||
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
|
||||
.inspect_err(|error| {
|
||||
tracing::warn!(error = %error);
|
||||
})?;
|
||||
@@ -275,7 +281,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
|
||||
pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
|
||||
http_client: &MikanClient,
|
||||
mikan_bangumi_homepage_url: Url,
|
||||
) -> Result<MikanBangumiMeta, RError> {
|
||||
) -> Result<MikanBangumiMeta, RecorderError> {
|
||||
let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?;
|
||||
let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?;
|
||||
let html = Html::parse_document(&content);
|
||||
@@ -289,7 +295,7 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
|
||||
.select(bangumi_title_selector)
|
||||
.next()
|
||||
.map(extract_inner_text_from_element_ref)
|
||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
|
||||
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
|
||||
.inspect_err(|error| tracing::warn!(error = %error))?;
|
||||
|
||||
let mikan_bangumi_id = html
|
||||
@@ -303,7 +309,9 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
|
||||
mikan_bangumi_id, ..
|
||||
}| mikan_bangumi_id,
|
||||
)
|
||||
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")))
|
||||
.ok_or_else(|| {
|
||||
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
|
||||
})
|
||||
.inspect_err(|error| tracing::error!(error = %error))?;
|
||||
|
||||
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
|
||||
@@ -353,8 +361,8 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
|
||||
context: Arc<dyn AppContextTrait>,
|
||||
my_bangumi_page_url: Url,
|
||||
auth_secrecy: Option<MikanAuthSecrecy>,
|
||||
history: &[Arc<RResult<MikanBangumiMeta>>],
|
||||
) -> impl Stream<Item = RResult<MikanBangumiMeta>> {
|
||||
history: &[Arc<RecorderResult<MikanBangumiMeta>>],
|
||||
) -> impl Stream<Item = RecorderResult<MikanBangumiMeta>> {
|
||||
try_stream! {
|
||||
let http_client = &context.mikan().fork_with_auth(auth_secrecy.clone())?;
|
||||
|
||||
@@ -511,7 +519,7 @@ mod test {
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_extract_mikan_poster_from_src(before_each: ()) -> RResult<()> {
|
||||
async fn test_extract_mikan_poster_from_src(before_each: ()) -> RecorderResult<()> {
|
||||
let mut mikan_server = mockito::Server::new_async().await;
|
||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||
@@ -542,7 +550,7 @@ mod test {
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_extract_mikan_episode(before_each: ()) -> RResult<()> {
|
||||
async fn test_extract_mikan_episode(before_each: ()) -> RecorderResult<()> {
|
||||
let mut mikan_server = mockito::Server::new_async().await;
|
||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||
@@ -582,7 +590,7 @@ mod test {
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(before_each: ()) -> RResult<()> {
|
||||
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(before_each: ()) -> RecorderResult<()> {
|
||||
let mut mikan_server = mockito::Server::new_async().await;
|
||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||
@@ -619,7 +627,7 @@ mod test {
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(before_each: ()) -> RResult<()> {
|
||||
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(before_each: ()) -> RecorderResult<()> {
|
||||
let mut mikan_server = mockito::Server::new_async().await;
|
||||
|
||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
||||
|
||||
@@ -10,7 +10,7 @@ use serde::{Deserialize, Serialize};
|
||||
use snafu::whatever;
|
||||
|
||||
use crate::{
|
||||
errors::app_error::RResult,
|
||||
errors::RecorderResult,
|
||||
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
|
||||
};
|
||||
|
||||
@@ -75,7 +75,7 @@ fn replace_ch_bracket_to_en(raw_name: &str) -> String {
|
||||
raw_name.replace('【', "[").replace('】', "]")
|
||||
}
|
||||
|
||||
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RResult<String> {
|
||||
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RecorderResult<String> {
|
||||
let raw_without_fansub = if let Some(fansub) = fansub {
|
||||
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
|
||||
fan_sub_re.replace_all(title_body, "")
|
||||
@@ -263,7 +263,7 @@ pub fn check_is_movie(title: &str) -> bool {
|
||||
MOVIE_TITLE_RE.is_match(title)
|
||||
}
|
||||
|
||||
pub fn parse_episode_meta_from_raw_name(s: &str) -> RResult<RawEpisodeMeta> {
|
||||
pub fn parse_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
|
||||
let raw_title = s.trim();
|
||||
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
||||
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
use bytes::Bytes;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::client::HttpClientTrait;
|
||||
use crate::errors::app_error::RError;
|
||||
|
||||
pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>(
|
||||
client: &H,
|
||||
url: T,
|
||||
) -> Result<Bytes, RError> {
|
||||
let bytes = client
|
||||
.get(url)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.bytes()
|
||||
.await?;
|
||||
Ok(bytes)
|
||||
}
|
||||
@@ -1,322 +0,0 @@
|
||||
use std::{fmt::Debug, ops::Deref, sync::Arc, time::Duration};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use axum::http::{self, Extensions};
|
||||
use http_cache_reqwest::{
|
||||
Cache, CacheManager, CacheMode, HttpCache, HttpCacheOptions, MokaManager,
|
||||
};
|
||||
use leaky_bucket::RateLimiter;
|
||||
use reqwest::{ClientBuilder, Request, Response};
|
||||
use reqwest_middleware::{
|
||||
ClientBuilder as ClientWithMiddlewareBuilder, ClientWithMiddleware, Middleware, Next,
|
||||
};
|
||||
use reqwest_retry::{RetryTransientMiddleware, policies::ExponentialBackoff};
|
||||
use reqwest_tracing::TracingMiddleware;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::serde_as;
|
||||
use snafu::Snafu;
|
||||
|
||||
use super::HttpClientSecrecyDataTrait;
|
||||
use crate::fetch::get_random_mobile_ua;
|
||||
|
||||
pub struct RateLimiterMiddleware {
|
||||
rate_limiter: RateLimiter,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Middleware for RateLimiterMiddleware {
|
||||
async fn handle(
|
||||
&self,
|
||||
req: Request,
|
||||
extensions: &'_ mut Extensions,
|
||||
next: Next<'_>,
|
||||
) -> reqwest_middleware::Result<Response> {
|
||||
self.rate_limiter.acquire_one().await;
|
||||
next.run(req, extensions).await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case", tag = "type")]
|
||||
pub enum HttpClientCacheBackendConfig {
|
||||
Moka { cache_size: u64 },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum HttpClientCachePresetConfig {
|
||||
#[serde(rename = "rfc7234")]
|
||||
RFC7234,
|
||||
}
|
||||
|
||||
impl Default for HttpClientCachePresetConfig {
|
||||
fn default() -> Self {
|
||||
Self::RFC7234
|
||||
}
|
||||
}
|
||||
|
||||
#[serde_as]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub struct HttpClientConfig {
|
||||
pub exponential_backoff_max_retries: Option<u32>,
|
||||
pub leaky_bucket_max_tokens: Option<u32>,
|
||||
pub leaky_bucket_initial_tokens: Option<u32>,
|
||||
pub leaky_bucket_refill_tokens: Option<u32>,
|
||||
#[serde_as(as = "Option<serde_with::DurationMilliSeconds>")]
|
||||
pub leaky_bucket_refill_interval: Option<Duration>,
|
||||
pub user_agent: Option<String>,
|
||||
pub cache_backend: Option<HttpClientCacheBackendConfig>,
|
||||
pub cache_preset: Option<HttpClientCachePresetConfig>,
|
||||
}
|
||||
|
||||
pub(crate) struct CacheBackend(Box<dyn CacheManager>);
|
||||
|
||||
impl CacheBackend {
|
||||
pub(crate) fn new<T: CacheManager>(backend: T) -> Self {
|
||||
Self(Box::new(backend))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl CacheManager for CacheBackend {
|
||||
async fn get(
|
||||
&self,
|
||||
cache_key: &str,
|
||||
) -> http_cache::Result<Option<(http_cache::HttpResponse, http_cache_semantics::CachePolicy)>>
|
||||
{
|
||||
self.0.get(cache_key).await
|
||||
}
|
||||
|
||||
/// Attempts to cache a response and related policy.
|
||||
async fn put(
|
||||
&self,
|
||||
cache_key: String,
|
||||
res: http_cache::HttpResponse,
|
||||
policy: http_cache_semantics::CachePolicy,
|
||||
) -> http_cache::Result<http_cache::HttpResponse> {
|
||||
self.0.put(cache_key, res, policy).await
|
||||
}
|
||||
/// Attempts to remove a record from cache.
|
||||
async fn delete(&self, cache_key: &str) -> http_cache::Result<()> {
|
||||
self.0.delete(cache_key).await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum HttpClientError {
|
||||
#[snafu(transparent)]
|
||||
ReqwestError { source: reqwest::Error },
|
||||
#[snafu(transparent)]
|
||||
ReqwestMiddlewareError { source: reqwest_middleware::Error },
|
||||
#[snafu(transparent)]
|
||||
HttpError { source: http::Error },
|
||||
}
|
||||
|
||||
pub trait HttpClientTrait: Deref<Target = ClientWithMiddleware> + Debug {}
|
||||
|
||||
pub struct HttpClientFork {
|
||||
pub client_builder: ClientBuilder,
|
||||
pub middleware_stack: Vec<Arc<dyn Middleware>>,
|
||||
pub config: HttpClientConfig,
|
||||
}
|
||||
|
||||
impl HttpClientFork {
|
||||
pub fn attach_secrecy<S: HttpClientSecrecyDataTrait>(self, secrecy: S) -> Self {
|
||||
let mut fork = self;
|
||||
fork.client_builder = secrecy.attach_secrecy_to_client(fork.client_builder);
|
||||
fork
|
||||
}
|
||||
}
|
||||
|
||||
pub struct HttpClient {
|
||||
client: ClientWithMiddleware,
|
||||
middleware_stack: Vec<Arc<dyn Middleware>>,
|
||||
pub config: HttpClientConfig,
|
||||
}
|
||||
|
||||
impl Debug for HttpClient {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("HttpClient")
|
||||
.field("config", &self.config)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HttpClient> for ClientWithMiddleware {
|
||||
fn from(val: HttpClient) -> Self {
|
||||
val.client
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for HttpClient {
|
||||
type Target = ClientWithMiddleware;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.client
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpClient {
|
||||
pub fn from_config(config: HttpClientConfig) -> Result<Self, HttpClientError> {
|
||||
let mut middleware_stack: Vec<Arc<dyn Middleware>> = vec![];
|
||||
let reqwest_client_builder = ClientBuilder::new().user_agent(
|
||||
config
|
||||
.user_agent
|
||||
.as_deref()
|
||||
.unwrap_or_else(|| get_random_mobile_ua()),
|
||||
);
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let reqwest_client_builder =
|
||||
reqwest_client_builder.redirect(reqwest::redirect::Policy::none());
|
||||
|
||||
let reqwest_client = reqwest_client_builder.build()?;
|
||||
let mut reqwest_with_middleware_builder = ClientWithMiddlewareBuilder::new(reqwest_client);
|
||||
|
||||
{
|
||||
let tracing_middleware = Arc::new(TracingMiddleware::default());
|
||||
|
||||
middleware_stack.push(tracing_middleware.clone());
|
||||
|
||||
reqwest_with_middleware_builder =
|
||||
reqwest_with_middleware_builder.with_arc(tracing_middleware)
|
||||
}
|
||||
|
||||
{
|
||||
if let Some(ref x) = config.exponential_backoff_max_retries {
|
||||
let retry_policy = ExponentialBackoff::builder().build_with_max_retries(*x);
|
||||
|
||||
let retry_transient_middleware =
|
||||
Arc::new(RetryTransientMiddleware::new_with_policy(retry_policy));
|
||||
|
||||
middleware_stack.push(retry_transient_middleware.clone());
|
||||
|
||||
reqwest_with_middleware_builder =
|
||||
reqwest_with_middleware_builder.with_arc(retry_transient_middleware);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
if let (None, None, None, None) = (
|
||||
config.leaky_bucket_initial_tokens.as_ref(),
|
||||
config.leaky_bucket_refill_tokens.as_ref(),
|
||||
config.leaky_bucket_refill_interval.as_ref(),
|
||||
config.leaky_bucket_max_tokens.as_ref(),
|
||||
) {
|
||||
} else {
|
||||
let mut rate_limiter_builder = RateLimiter::builder();
|
||||
|
||||
if let Some(ref x) = config.leaky_bucket_max_tokens {
|
||||
rate_limiter_builder.max(*x as usize);
|
||||
}
|
||||
if let Some(ref x) = config.leaky_bucket_initial_tokens {
|
||||
rate_limiter_builder.initial(*x as usize);
|
||||
}
|
||||
if let Some(ref x) = config.leaky_bucket_refill_tokens {
|
||||
rate_limiter_builder.refill(*x as usize);
|
||||
}
|
||||
if let Some(ref x) = config.leaky_bucket_refill_interval {
|
||||
rate_limiter_builder.interval(*x);
|
||||
}
|
||||
|
||||
let rate_limiter = rate_limiter_builder.build();
|
||||
|
||||
let rate_limiter_middleware = Arc::new(RateLimiterMiddleware { rate_limiter });
|
||||
|
||||
middleware_stack.push(rate_limiter_middleware.clone());
|
||||
|
||||
reqwest_with_middleware_builder =
|
||||
reqwest_with_middleware_builder.with_arc(rate_limiter_middleware);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
if let (None, None) = (config.cache_backend.as_ref(), config.cache_preset.as_ref()) {
|
||||
} else {
|
||||
let cache_preset = config.cache_preset.as_ref().cloned().unwrap_or_default();
|
||||
let cache_backend = config
|
||||
.cache_backend
|
||||
.as_ref()
|
||||
.map(|b| match b {
|
||||
HttpClientCacheBackendConfig::Moka { cache_size } => {
|
||||
CacheBackend::new(MokaManager {
|
||||
cache: Arc::new(moka::future::Cache::new(*cache_size)),
|
||||
})
|
||||
}
|
||||
})
|
||||
.unwrap_or_else(|| CacheBackend::new(MokaManager::default()));
|
||||
|
||||
let http_cache = match cache_preset {
|
||||
HttpClientCachePresetConfig::RFC7234 => HttpCache {
|
||||
mode: CacheMode::Default,
|
||||
manager: cache_backend,
|
||||
options: HttpCacheOptions::default(),
|
||||
},
|
||||
};
|
||||
|
||||
let http_cache_middleware = Arc::new(Cache(http_cache));
|
||||
|
||||
middleware_stack.push(http_cache_middleware.clone());
|
||||
|
||||
reqwest_with_middleware_builder =
|
||||
reqwest_with_middleware_builder.with_arc(http_cache_middleware);
|
||||
}
|
||||
}
|
||||
|
||||
let reqwest_with_middleware = reqwest_with_middleware_builder.build();
|
||||
|
||||
Ok(Self {
|
||||
client: reqwest_with_middleware,
|
||||
middleware_stack,
|
||||
config,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn fork(&self) -> HttpClientFork {
|
||||
let reqwest_client_builder = ClientBuilder::new().user_agent(
|
||||
self.config
|
||||
.user_agent
|
||||
.as_deref()
|
||||
.unwrap_or_else(|| get_random_mobile_ua()),
|
||||
);
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let reqwest_client_builder =
|
||||
reqwest_client_builder.redirect(reqwest::redirect::Policy::none());
|
||||
|
||||
HttpClientFork {
|
||||
client_builder: reqwest_client_builder,
|
||||
middleware_stack: self.middleware_stack.clone(),
|
||||
config: self.config.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_fork(fork: HttpClientFork) -> Result<Self, HttpClientError> {
|
||||
let HttpClientFork {
|
||||
client_builder,
|
||||
middleware_stack,
|
||||
config,
|
||||
} = fork;
|
||||
let reqwest_client = client_builder.build()?;
|
||||
let mut reqwest_with_middleware_builder = ClientWithMiddlewareBuilder::new(reqwest_client);
|
||||
|
||||
for m in &middleware_stack {
|
||||
reqwest_with_middleware_builder = reqwest_with_middleware_builder.with_arc(m.clone());
|
||||
}
|
||||
|
||||
let reqwest_with_middleware = reqwest_with_middleware_builder.build();
|
||||
|
||||
Ok(Self {
|
||||
client: reqwest_with_middleware,
|
||||
middleware_stack,
|
||||
config,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for HttpClient {
|
||||
fn default() -> Self {
|
||||
HttpClient::from_config(Default::default()).expect("Failed to create default HttpClient")
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpClientTrait for HttpClient {}
|
||||
@@ -1,9 +0,0 @@
|
||||
pub mod core;
|
||||
pub mod secrecy;
|
||||
|
||||
pub use core::{
|
||||
HttpClient, HttpClientCacheBackendConfig, HttpClientCachePresetConfig, HttpClientConfig,
|
||||
HttpClientError, HttpClientTrait,
|
||||
};
|
||||
|
||||
pub use secrecy::{HttpClientCookiesAuth, HttpClientSecrecyDataTrait};
|
||||
@@ -1,47 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use cookie::Cookie;
|
||||
use reqwest::{ClientBuilder, cookie::Jar};
|
||||
use url::Url;
|
||||
|
||||
use crate::errors::app_error::RError;
|
||||
|
||||
pub trait HttpClientSecrecyDataTrait {
|
||||
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
|
||||
client_builder
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct HttpClientCookiesAuth {
|
||||
pub cookie_jar: Arc<Jar>,
|
||||
pub user_agent: Option<String>,
|
||||
}
|
||||
|
||||
impl HttpClientCookiesAuth {
|
||||
pub fn from_cookies(
|
||||
cookies: &str,
|
||||
url: &Url,
|
||||
user_agent: Option<String>,
|
||||
) -> Result<Self, RError> {
|
||||
let cookie_jar = Arc::new(Jar::default());
|
||||
for cookie in Cookie::split_parse(cookies).try_collect::<Vec<_>>()? {
|
||||
cookie_jar.add_cookie_str(&cookie.to_string(), url);
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
cookie_jar,
|
||||
user_agent,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpClientSecrecyDataTrait for HttpClientCookiesAuth {
|
||||
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
|
||||
let mut client_builder = client_builder.cookie_provider(self.cookie_jar.clone());
|
||||
if let Some(ref user_agent) = self.user_agent {
|
||||
client_builder = client_builder.user_agent(user_agent);
|
||||
}
|
||||
client_builder
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
lazy_static! {
|
||||
static ref DEFAULT_HTTP_CLIENT_USER_AGENT: Vec<String> =
|
||||
serde_json::from_str::<Vec<String>>(include_str!("./ua.json")).unwrap();
|
||||
}
|
||||
|
||||
pub fn get_random_mobile_ua() -> &'static str {
|
||||
DEFAULT_HTTP_CLIENT_USER_AGENT[fastrand::usize(0..DEFAULT_HTTP_CLIENT_USER_AGENT.len())]
|
||||
.as_str()
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::client::HttpClientTrait;
|
||||
use crate::errors::app_error::RError;
|
||||
|
||||
pub async fn fetch_html<T: IntoUrl, H: HttpClientTrait>(
|
||||
client: &H,
|
||||
url: T,
|
||||
) -> Result<String, RError> {
|
||||
let content = client
|
||||
.get(url)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.text()
|
||||
.await?;
|
||||
|
||||
Ok(content)
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
use bytes::Bytes;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::{bytes::fetch_bytes, client::HttpClientTrait};
|
||||
use crate::errors::app_error::RError;
|
||||
|
||||
pub async fn fetch_image<T: IntoUrl, H: HttpClientTrait>(
|
||||
client: &H,
|
||||
url: T,
|
||||
) -> Result<Bytes, RError> {
|
||||
fetch_bytes(client, url).await
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
pub mod bytes;
|
||||
pub mod client;
|
||||
pub mod core;
|
||||
pub mod html;
|
||||
pub mod image;
|
||||
pub mod oidc;
|
||||
|
||||
pub use core::get_random_mobile_ua;
|
||||
|
||||
pub use bytes::fetch_bytes;
|
||||
pub use client::{
|
||||
HttpClient, HttpClientConfig, HttpClientCookiesAuth, HttpClientError,
|
||||
HttpClientSecrecyDataTrait, HttpClientTrait,
|
||||
};
|
||||
pub use html::fetch_html;
|
||||
pub use image::fetch_image;
|
||||
@@ -1,36 +0,0 @@
|
||||
use std::{future::Future, pin::Pin};
|
||||
|
||||
use axum::http;
|
||||
|
||||
use super::{HttpClient, client::HttpClientError};
|
||||
|
||||
impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
|
||||
type Error = HttpClientError;
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
type Future = Pin<Box<dyn Future<Output = Result<HttpResponse, Self::Error>> + 'c>>;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
type Future =
|
||||
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + Send + 'c>>;
|
||||
|
||||
fn call(&'c self, request: openidconnect::HttpRequest) -> Self::Future {
|
||||
Box::pin(async move {
|
||||
let response = self.execute(request.try_into()?).await?;
|
||||
|
||||
let mut builder = http::Response::builder().status(response.status());
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
builder = builder.version(response.version());
|
||||
}
|
||||
|
||||
for (name, value) in response.headers().iter() {
|
||||
builder = builder.header(name, value);
|
||||
}
|
||||
|
||||
builder
|
||||
.body(response.bytes().await?.to_vec())
|
||||
.map_err(HttpClientError::from)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
[
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.6 Safari/605.1.1",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.3",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.3",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:133.0) Gecko/20100101 Firefox/133.",
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Herring/97.1.8280.8",
|
||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.3",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36 OPR/115.0.0.",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 AtContent/95.5.5462.5",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.3",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 OPR/114.0.0.",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.3"
|
||||
]
|
||||
@@ -2,7 +2,7 @@ use async_graphql::dynamic::Schema;
|
||||
use sea_orm::DatabaseConnection;
|
||||
|
||||
use super::{config::GraphQLConfig, schema_root};
|
||||
use crate::errors::app_error::RResult;
|
||||
use crate::errors::RecorderResult;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GraphQLService {
|
||||
@@ -13,7 +13,7 @@ impl GraphQLService {
|
||||
pub async fn from_config_and_database(
|
||||
config: GraphQLConfig,
|
||||
db: DatabaseConnection,
|
||||
) -> RResult<Self> {
|
||||
) -> RecorderResult<Self> {
|
||||
let schema = schema_root::schema(
|
||||
db,
|
||||
config.depth_limit.and_then(|l| l.into()),
|
||||
|
||||
@@ -5,19 +5,18 @@
|
||||
impl_trait_in_bindings,
|
||||
iterator_try_collect,
|
||||
async_fn_traits,
|
||||
let_chains,
|
||||
error_generic_member_access
|
||||
error_generic_member_access,
|
||||
associated_type_defaults,
|
||||
let_chains
|
||||
)]
|
||||
#![feature(associated_type_defaults)]
|
||||
pub use downloader;
|
||||
|
||||
pub mod app;
|
||||
pub mod auth;
|
||||
pub mod cache;
|
||||
pub mod database;
|
||||
pub mod downloader;
|
||||
pub mod errors;
|
||||
pub mod extract;
|
||||
pub mod fetch;
|
||||
pub mod graphql;
|
||||
pub mod logger;
|
||||
pub mod migrations;
|
||||
@@ -26,5 +25,4 @@ pub mod storage;
|
||||
pub mod tasks;
|
||||
#[cfg(test)]
|
||||
pub mod test_utils;
|
||||
pub mod utils;
|
||||
pub mod web;
|
||||
|
||||
@@ -10,7 +10,7 @@ use tracing_subscriber::{
|
||||
};
|
||||
|
||||
use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
|
||||
use crate::errors::app_error::RResult;
|
||||
use crate::errors::RecorderResult;
|
||||
|
||||
// Function to initialize the logger based on the provided configuration
|
||||
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];
|
||||
@@ -74,7 +74,7 @@ impl LoggerService {
|
||||
.expect("logger initialization failed")
|
||||
}
|
||||
|
||||
pub async fn from_config(config: LoggerConfig) -> RResult<Self> {
|
||||
pub async fn from_config(config: LoggerConfig) -> RecorderResult<Self> {
|
||||
let mut layers: Vec<Box<dyn Layer<Registry> + Sync + Send>> = Vec::new();
|
||||
|
||||
if let Some(file_appender_config) = config.file_appender.as_ref() {
|
||||
|
||||
@@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
|
||||
use super::subscribers::{self, SEED_SUBSCRIBER};
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::app_error::{RError, RResult},
|
||||
errors::app_error::{RecorderError, RecorderResult},
|
||||
};
|
||||
|
||||
#[derive(
|
||||
@@ -57,17 +57,17 @@ impl Related<super::subscribers::Entity> for Entity {
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl Model {
|
||||
pub async fn find_by_pid(ctx: &dyn AppContextTrait, pid: &str) -> RResult<Self> {
|
||||
pub async fn find_by_pid(ctx: &dyn AppContextTrait, pid: &str) -> RecorderResult<Self> {
|
||||
let db = ctx.db();
|
||||
let subscriber_auth = Entity::find()
|
||||
.filter(Column::Pid.eq(pid))
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or_else(|| RError::from_db_record_not_found("auth::find_by_pid"))?;
|
||||
.ok_or_else(|| RecorderError::from_db_record_not_found("auth::find_by_pid"))?;
|
||||
Ok(subscriber_auth)
|
||||
}
|
||||
|
||||
pub async fn create_from_oidc(ctx: &dyn AppContextTrait, sub: String) -> RResult<Self> {
|
||||
pub async fn create_from_oidc(ctx: &dyn AppContextTrait, sub: String) -> RecorderResult<Self> {
|
||||
let db = ctx.db();
|
||||
|
||||
let txn = db.begin().await?;
|
||||
|
||||
@@ -4,7 +4,7 @@ use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::O
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::subscription_bangumi;
|
||||
use crate::{app::AppContextTrait, errors::app_error::RResult};
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
|
||||
@@ -119,9 +119,9 @@ impl Model {
|
||||
mikan_bangumi_id: String,
|
||||
mikan_fansub_id: String,
|
||||
f: F,
|
||||
) -> RResult<Model>
|
||||
) -> RecorderResult<Model>
|
||||
where
|
||||
F: AsyncFnOnce(&mut ActiveModel) -> RResult<()>,
|
||||
F: AsyncFnOnce(&mut ActiveModel) -> RecorderResult<()>,
|
||||
{
|
||||
let db = ctx.db();
|
||||
if let Some(existed) = Entity::find()
|
||||
|
||||
@@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
|
||||
use super::{bangumi, query::InsertManyReturningExt, subscription_episode};
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::app_error::RResult,
|
||||
errors::RecorderResult,
|
||||
extract::{
|
||||
mikan::{MikanEpisodeMeta, build_mikan_episode_homepage},
|
||||
rawname::parse_episode_meta_from_raw_name,
|
||||
@@ -140,7 +140,7 @@ impl Model {
|
||||
subscriber_id: i32,
|
||||
subscription_id: i32,
|
||||
creations: impl IntoIterator<Item = MikanEpsiodeCreation>,
|
||||
) -> RResult<()> {
|
||||
) -> RecorderResult<()> {
|
||||
let db = ctx.db();
|
||||
let new_episode_active_modes = creations
|
||||
.into_iter()
|
||||
@@ -191,7 +191,7 @@ impl ActiveModel {
|
||||
pub fn from_mikan_episode_meta(
|
||||
ctx: &dyn AppContextTrait,
|
||||
creation: MikanEpsiodeCreation,
|
||||
) -> RResult<Self> {
|
||||
) -> RecorderResult<Self> {
|
||||
let item = creation.episode;
|
||||
let bgm = creation.bangumi;
|
||||
let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title)
|
||||
|
||||
@@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::app_error::{RError, RResult},
|
||||
errors::app_error::{RecorderResult, RecorderError},
|
||||
};
|
||||
|
||||
pub const SEED_SUBSCRIBER: &str = "konobangu";
|
||||
@@ -95,22 +95,22 @@ pub struct SubscriberIdParams {
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl Model {
|
||||
pub async fn find_seed_subscriber_id(ctx: &dyn AppContextTrait) -> RResult<i32> {
|
||||
pub async fn find_seed_subscriber_id(ctx: &dyn AppContextTrait) -> RecorderResult<i32> {
|
||||
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER).await?;
|
||||
Ok(subscriber_auth.subscriber_id)
|
||||
}
|
||||
|
||||
pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RResult<Self> {
|
||||
pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RecorderResult<Self> {
|
||||
let db = ctx.db();
|
||||
|
||||
let subscriber = Entity::find_by_id(id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or_else(|| RError::from_db_record_not_found("subscriptions::find_by_id"))?;
|
||||
.ok_or_else(|| RecorderError::from_db_record_not_found("subscriptions::find_by_id"))?;
|
||||
Ok(subscriber)
|
||||
}
|
||||
|
||||
pub async fn create_root(ctx: &dyn AppContextTrait) -> RResult<Self> {
|
||||
pub async fn create_root(ctx: &dyn AppContextTrait) -> RecorderResult<Self> {
|
||||
let db = ctx.db();
|
||||
let txn = db.begin().await?;
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize};
|
||||
use super::{bangumi, episodes, query::filter_values_in};
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::app_error::RResult,
|
||||
errors::RecorderResult,
|
||||
extract::{
|
||||
mikan::{
|
||||
build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link,
|
||||
@@ -182,7 +182,7 @@ impl Model {
|
||||
ctx: &dyn AppContextTrait,
|
||||
create_dto: SubscriptionCreateDto,
|
||||
subscriber_id: i32,
|
||||
) -> RResult<Self> {
|
||||
) -> RecorderResult<Self> {
|
||||
let db = ctx.db();
|
||||
let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id);
|
||||
|
||||
@@ -193,7 +193,7 @@ impl Model {
|
||||
ctx: &dyn AppContextTrait,
|
||||
ids: impl Iterator<Item = i32>,
|
||||
enabled: bool,
|
||||
) -> RResult<()> {
|
||||
) -> RecorderResult<()> {
|
||||
let db = ctx.db();
|
||||
Entity::update_many()
|
||||
.col_expr(Column::Enabled, Expr::value(enabled))
|
||||
@@ -206,7 +206,7 @@ impl Model {
|
||||
pub async fn delete_with_ids(
|
||||
ctx: &dyn AppContextTrait,
|
||||
ids: impl Iterator<Item = i32>,
|
||||
) -> RResult<()> {
|
||||
) -> RecorderResult<()> {
|
||||
let db = ctx.db();
|
||||
Entity::delete_many()
|
||||
.filter(Column::Id.is_in(ids))
|
||||
@@ -215,7 +215,7 @@ impl Model {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn pull_subscription(&self, ctx: &dyn AppContextTrait) -> RResult<()> {
|
||||
pub async fn pull_subscription(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
|
||||
match &self.category {
|
||||
SubscriptionCategory::Mikan => {
|
||||
let mikan_client = ctx.mikan();
|
||||
@@ -287,7 +287,7 @@ impl Model {
|
||||
self.id,
|
||||
mikan_bangumi_id.to_string(),
|
||||
mikan_fansub_id.to_string(),
|
||||
async |am| -> RResult<()> {
|
||||
async |am| -> RecorderResult<()> {
|
||||
let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage(
|
||||
mikan_client,
|
||||
bgm_homepage.clone(),
|
||||
|
||||
@@ -2,7 +2,7 @@ use async_trait::async_trait;
|
||||
use sea_orm::{QuerySelect, entity::prelude::*};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{app::AppContextTrait, errors::app_error::RResult};
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
@@ -76,7 +76,7 @@ impl Model {
|
||||
pub async fn find_stream_task_by_id(
|
||||
ctx: &dyn AppContextTrait,
|
||||
task_id: i32,
|
||||
) -> RResult<Option<(Model, Vec<super::task_stream_item::Model>)>> {
|
||||
) -> RecorderResult<Option<(Model, Vec<super::task_stream_item::Model>)>> {
|
||||
let db = ctx.db();
|
||||
let res = Entity::find()
|
||||
.filter(Column::Id.eq(task_id))
|
||||
|
||||
@@ -8,7 +8,7 @@ use url::Url;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::StorageConfig;
|
||||
use crate::errors::app_error::{RError, RResult};
|
||||
use crate::errors::app_error::{RecorderError, RecorderResult};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
@@ -50,7 +50,7 @@ pub struct StorageService {
|
||||
}
|
||||
|
||||
impl StorageService {
|
||||
pub async fn from_config(config: StorageConfig) -> RResult<Self> {
|
||||
pub async fn from_config(config: StorageConfig) -> RecorderResult<Self> {
|
||||
Ok(Self {
|
||||
data_dir: config.data_dir.to_string(),
|
||||
})
|
||||
@@ -71,7 +71,7 @@ impl StorageService {
|
||||
bucket: Option<&str>,
|
||||
filename: &str,
|
||||
data: Bytes,
|
||||
) -> Result<StorageStoredUrl, RError> {
|
||||
) -> Result<StorageStoredUrl, RecorderError> {
|
||||
match content_category {
|
||||
StorageContentCategory::Image => {
|
||||
let fullname = [
|
||||
@@ -108,7 +108,7 @@ impl StorageService {
|
||||
subscriber_id: i32,
|
||||
bucket: Option<&str>,
|
||||
filename: &str,
|
||||
) -> Result<Option<StorageStoredUrl>, RError> {
|
||||
) -> Result<Option<StorageStoredUrl>, RecorderError> {
|
||||
match content_category {
|
||||
StorageContentCategory::Image => {
|
||||
let fullname = [
|
||||
@@ -142,7 +142,7 @@ impl StorageService {
|
||||
subscriber_pid: &str,
|
||||
bucket: Option<&str>,
|
||||
filename: &str,
|
||||
) -> RResult<Buffer> {
|
||||
) -> RecorderResult<Buffer> {
|
||||
match content_category {
|
||||
StorageContentCategory::Image => {
|
||||
let fullname = [
|
||||
|
||||
@@ -7,7 +7,7 @@ use tokio::sync::{RwLock, mpsc};
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::app_error::{RError, RResult},
|
||||
errors::app_error::{RecorderError, RecorderResult},
|
||||
models,
|
||||
};
|
||||
|
||||
@@ -103,41 +103,41 @@ pub trait StreamTaskCoreTrait: Sized {
|
||||
}
|
||||
|
||||
pub trait StreamTaskReplayLayoutTrait: StreamTaskCoreTrait {
|
||||
fn history(&self) -> &[Arc<RResult<Self::Item>>];
|
||||
fn history(&self) -> &[Arc<RecorderResult<Self::Item>>];
|
||||
|
||||
fn resume_from_model(
|
||||
task: models::tasks::Model,
|
||||
stream_items: Vec<models::task_stream_item::Model>,
|
||||
) -> RResult<Self>;
|
||||
) -> RecorderResult<Self>;
|
||||
|
||||
fn running_receiver(
|
||||
&self,
|
||||
) -> impl Future<Output = Option<mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>>>;
|
||||
) -> impl Future<Output = Option<mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>>>;
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn init_receiver(
|
||||
&self,
|
||||
) -> impl Future<
|
||||
Output = (
|
||||
mpsc::UnboundedSender<Arc<RResult<Self::Item>>>,
|
||||
mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>,
|
||||
mpsc::UnboundedSender<Arc<RecorderResult<Self::Item>>>,
|
||||
mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>,
|
||||
),
|
||||
>;
|
||||
|
||||
fn serialize_request(request: Self::Request) -> RResult<serde_json::Value> {
|
||||
serde_json::to_value(request).map_err(RError::from)
|
||||
fn serialize_request(request: Self::Request) -> RecorderResult<serde_json::Value> {
|
||||
serde_json::to_value(request).map_err(RecorderError::from)
|
||||
}
|
||||
|
||||
fn serialize_item(item: RResult<Self::Item>) -> RResult<serde_json::Value> {
|
||||
serde_json::to_value(item).map_err(RError::from)
|
||||
fn serialize_item(item: RecorderResult<Self::Item>) -> RecorderResult<serde_json::Value> {
|
||||
serde_json::to_value(item).map_err(RecorderError::from)
|
||||
}
|
||||
|
||||
fn deserialize_request(request: serde_json::Value) -> RResult<Self::Request> {
|
||||
serde_json::from_value(request).map_err(RError::from)
|
||||
fn deserialize_request(request: serde_json::Value) -> RecorderResult<Self::Request> {
|
||||
serde_json::from_value(request).map_err(RecorderError::from)
|
||||
}
|
||||
|
||||
fn deserialize_item(item: serde_json::Value) -> RResult<RResult<Self::Item>> {
|
||||
serde_json::from_value(item).map_err(RError::from)
|
||||
fn deserialize_item(item: serde_json::Value) -> RecorderResult<RecorderResult<Self::Item>> {
|
||||
serde_json::from_value(item).map_err(RecorderError::from)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -145,15 +145,15 @@ pub trait StreamTaskRunnerTrait: StreamTaskCoreTrait {
|
||||
fn run(
|
||||
context: Arc<dyn AppContextTrait>,
|
||||
request: &Self::Request,
|
||||
history: &[Arc<RResult<Self::Item>>],
|
||||
) -> impl Stream<Item = RResult<Self::Item>>;
|
||||
history: &[Arc<RecorderResult<Self::Item>>],
|
||||
) -> impl Stream<Item = RecorderResult<Self::Item>>;
|
||||
}
|
||||
|
||||
pub trait StreamTaskReplayRunnerTrait: StreamTaskRunnerTrait + StreamTaskReplayLayoutTrait {
|
||||
fn run_shared(
|
||||
&self,
|
||||
context: Arc<dyn AppContextTrait>,
|
||||
) -> impl Stream<Item = Arc<RResult<Self::Item>>> {
|
||||
) -> impl Stream<Item = Arc<RecorderResult<Self::Item>>> {
|
||||
stream! {
|
||||
if let Some(mut receiver) = self.running_receiver().await {
|
||||
while let Some(item) = receiver.recv().await {
|
||||
@@ -185,9 +185,9 @@ where
|
||||
{
|
||||
pub meta: TaskMeta,
|
||||
pub request: Request,
|
||||
pub history: Vec<Arc<RResult<Item>>>,
|
||||
pub history: Vec<Arc<RecorderResult<Item>>>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub channel: Arc<RwLock<Option<ReplayChannel<Arc<RResult<Item>>>>>>,
|
||||
pub channel: Arc<RwLock<Option<ReplayChannel<Arc<RecorderResult<Item>>>>>>,
|
||||
}
|
||||
|
||||
impl<Request, Item> StreamTaskCoreTrait for StandardStreamTaskReplayLayout<Request, Item>
|
||||
@@ -225,14 +225,14 @@ where
|
||||
Request: Serialize + DeserializeOwned,
|
||||
Item: Serialize + DeserializeOwned + Sync + Send + 'static,
|
||||
{
|
||||
fn history(&self) -> &[Arc<RResult<Self::Item>>] {
|
||||
fn history(&self) -> &[Arc<RecorderResult<Self::Item>>] {
|
||||
&self.history
|
||||
}
|
||||
|
||||
fn resume_from_model(
|
||||
task: models::tasks::Model,
|
||||
stream_items: Vec<models::task_stream_item::Model>,
|
||||
) -> RResult<Self> {
|
||||
) -> RecorderResult<Self> {
|
||||
Ok(Self {
|
||||
meta: TaskMeta {
|
||||
task_id: task.id,
|
||||
@@ -243,12 +243,14 @@ where
|
||||
history: stream_items
|
||||
.into_iter()
|
||||
.map(|m| Self::deserialize_item(m.item).map(Arc::new))
|
||||
.collect::<RResult<Vec<_>>>()?,
|
||||
.collect::<RecorderResult<Vec<_>>>()?,
|
||||
channel: Arc::new(RwLock::new(None)),
|
||||
})
|
||||
}
|
||||
|
||||
async fn running_receiver(&self) -> Option<mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>> {
|
||||
async fn running_receiver(
|
||||
&self,
|
||||
) -> Option<mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>> {
|
||||
if let Some(channel) = self.channel.read().await.as_ref() {
|
||||
Some(channel.receiver().await)
|
||||
} else {
|
||||
@@ -259,8 +261,8 @@ where
|
||||
async fn init_receiver(
|
||||
&self,
|
||||
) -> (
|
||||
mpsc::UnboundedSender<Arc<RResult<Self::Item>>>,
|
||||
mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>,
|
||||
mpsc::UnboundedSender<Arc<RecorderResult<Self::Item>>>,
|
||||
mpsc::UnboundedReceiver<Arc<RecorderResult<Self::Item>>>,
|
||||
) {
|
||||
let channel = ReplayChannel::new(self.history.clone());
|
||||
let rx = channel.receiver().await;
|
||||
|
||||
@@ -6,7 +6,7 @@ use url::Url;
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::app_error::RResult,
|
||||
errors::RecorderResult,
|
||||
extract::mikan::{MikanAuthSecrecy, MikanBangumiMeta, web_extract},
|
||||
tasks::core::{StandardStreamTaskReplayLayout, StreamTaskRunnerTrait},
|
||||
};
|
||||
@@ -24,8 +24,8 @@ impl StreamTaskRunnerTrait for ExtractMikanBangumisMetaFromMyBangumiTask {
|
||||
fn run(
|
||||
context: Arc<dyn AppContextTrait>,
|
||||
request: &Self::Request,
|
||||
history: &[Arc<RResult<Self::Item>>],
|
||||
) -> impl Stream<Item = RResult<Self::Item>> {
|
||||
history: &[Arc<RecorderResult<Self::Item>>],
|
||||
) -> impl Stream<Item = RecorderResult<Self::Item>> {
|
||||
let context = context.clone();
|
||||
web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page(
|
||||
context,
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
use crate::{errors::app_error::RResult, fetch::HttpClient};
|
||||
|
||||
pub fn build_testing_http_client() -> RResult<HttpClient> {
|
||||
let mikan_client = HttpClient::default();
|
||||
Ok(mikan_client)
|
||||
}
|
||||
@@ -1,17 +1,18 @@
|
||||
use reqwest::IntoUrl;
|
||||
use fetch::{FetchError, HttpClientConfig, IntoUrl};
|
||||
|
||||
use crate::{
|
||||
errors::app_error::RResult,
|
||||
errors::RecorderResult,
|
||||
extract::mikan::{MikanClient, MikanConfig},
|
||||
fetch::HttpClientConfig,
|
||||
};
|
||||
|
||||
pub async fn build_testing_mikan_client(base_mikan_url: impl IntoUrl) -> RResult<MikanClient> {
|
||||
pub async fn build_testing_mikan_client(
|
||||
base_mikan_url: impl IntoUrl,
|
||||
) -> RecorderResult<MikanClient> {
|
||||
let mikan_client = MikanClient::from_config(MikanConfig {
|
||||
http_client: HttpClientConfig {
|
||||
..Default::default()
|
||||
},
|
||||
base_url: base_mikan_url.into_url()?,
|
||||
base_url: base_mikan_url.into_url().map_err(FetchError::from)?,
|
||||
})
|
||||
.await?;
|
||||
Ok(mikan_client)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
pub mod app;
|
||||
pub mod fetch;
|
||||
pub mod mikan;
|
||||
pub mod tracing;
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -7,7 +7,7 @@ use super::core::Controller;
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
auth::{AuthUserInfo, header_www_authenticate_middleware},
|
||||
errors::app_error::RResult,
|
||||
errors::RecorderResult,
|
||||
};
|
||||
|
||||
pub const CONTROLLER_PREFIX: &str = "/api/graphql";
|
||||
@@ -25,7 +25,7 @@ async fn graphql_handler(
|
||||
graphql_service.schema.execute(req).await.into()
|
||||
}
|
||||
|
||||
pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RResult<Controller> {
|
||||
pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
|
||||
let router = Router::<Arc<dyn AppContextTrait>>::new()
|
||||
.route("/", post(graphql_handler))
|
||||
.layer(from_fn_with_state(ctx, header_www_authenticate_middleware));
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::sync::Arc;
|
||||
use axum::{Json, Router, extract::State, routing::get};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::{app::AppContextTrait, errors::app_error::RResult, web::controller::Controller};
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult, web::controller::Controller};
|
||||
|
||||
pub const CONTROLLER_PREFIX: &str = "/api/metadata";
|
||||
|
||||
@@ -13,7 +13,9 @@ pub struct StandardResponse {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
async fn health(State(ctx): State<Arc<dyn AppContextTrait>>) -> RResult<Json<StandardResponse>> {
|
||||
async fn health(
|
||||
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||
) -> RecorderResult<Json<StandardResponse>> {
|
||||
ctx.db().ping().await.inspect_err(
|
||||
|err| tracing::error!(err.msg = %err, err.detail = ?err, "health check database ping error"),
|
||||
)?;
|
||||
@@ -31,7 +33,7 @@ async fn ping() -> Json<StandardResponse> {
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn create(_context: Arc<dyn AppContextTrait>) -> RResult<Controller> {
|
||||
pub async fn create(_context: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
|
||||
let router = Router::<Arc<dyn AppContextTrait>>::new()
|
||||
.route("/health", get(health))
|
||||
.route("/ping", get(ping));
|
||||
|
||||
@@ -2,13 +2,11 @@ use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
Json, Router,
|
||||
extract::{Query, State},
|
||||
http::request::Parts,
|
||||
extract::{Query, Request, State},
|
||||
routing::get,
|
||||
};
|
||||
use snafu::prelude::*;
|
||||
use snafu::ResultExt;
|
||||
|
||||
use super::core::Controller;
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
auth::{
|
||||
@@ -16,9 +14,10 @@ use crate::{
|
||||
errors::OidcRequestRedirectUriSnafu,
|
||||
oidc::{OidcAuthCallbackPayload, OidcAuthCallbackQuery, OidcAuthRequest},
|
||||
},
|
||||
errors::app_error::RResult,
|
||||
errors::RecorderResult,
|
||||
extract::http::ForwardedRelatedInfo,
|
||||
models::auth::AuthType,
|
||||
web::controller::core::Controller,
|
||||
};
|
||||
|
||||
pub const CONTROLLER_PREFIX: &str = "/api/oidc";
|
||||
@@ -43,10 +42,11 @@ async fn oidc_callback(
|
||||
|
||||
async fn oidc_auth(
|
||||
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||
parts: Parts,
|
||||
request: Request,
|
||||
) -> Result<Json<OidcAuthRequest>, AuthError> {
|
||||
let auth_service = ctx.auth();
|
||||
if let AuthService::Oidc(oidc_auth_service) = auth_service {
|
||||
let (parts, _) = request.into_parts();
|
||||
let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts)
|
||||
.resolved_origin()
|
||||
.ok_or(url::ParseError::EmptyHost)
|
||||
@@ -73,7 +73,7 @@ async fn oidc_auth(
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn create(_context: Arc<dyn AppContextTrait>) -> RResult<Controller> {
|
||||
pub async fn create(_context: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
|
||||
let router = Router::<Arc<dyn AppContextTrait>>::new()
|
||||
.route("/auth", get(oidc_auth))
|
||||
.route("/callback", get(oidc_callback));
|
||||
|
||||
@@ -12,7 +12,7 @@ use http::StatusCode;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower_http::catch_panic::CatchPanicLayer;
|
||||
|
||||
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct CatchPanic {
|
||||
@@ -55,7 +55,7 @@ impl MiddlewareLayer for CatchPanic {
|
||||
fn apply(
|
||||
&self,
|
||||
app: Router<Arc<dyn AppContextTrait>>,
|
||||
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
Ok(app.layer(CatchPanicLayer::custom(handle_panic)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ use axum::Router;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower_http::compression::CompressionLayer;
|
||||
|
||||
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct Compression {
|
||||
@@ -38,7 +38,7 @@ impl MiddlewareLayer for Compression {
|
||||
fn apply(
|
||||
&self,
|
||||
app: Router<Arc<dyn AppContextTrait>>,
|
||||
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
Ok(app.layer(CompressionLayer::new()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use tower_http::cors::{self, Any};
|
||||
|
||||
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
|
||||
|
||||
/// CORS middleware configuration
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
@@ -80,7 +80,7 @@ impl Cors {
|
||||
///
|
||||
/// In all of these cases, the error returned will be the result of the
|
||||
/// `parse` method of the corresponding type.
|
||||
pub fn cors(&self) -> RResult<cors::CorsLayer> {
|
||||
pub fn cors(&self) -> RecorderResult<cors::CorsLayer> {
|
||||
let mut cors: cors::CorsLayer = cors::CorsLayer::new();
|
||||
|
||||
// testing CORS, assuming https://example.com in the allow list:
|
||||
@@ -160,7 +160,7 @@ impl MiddlewareLayer for Cors {
|
||||
fn apply(
|
||||
&self,
|
||||
app: Router<Arc<dyn AppContextTrait>>,
|
||||
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
Ok(app.layer(self.cors()?))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,11 +21,11 @@ use axum::{
|
||||
},
|
||||
response::Response,
|
||||
};
|
||||
use futures_util::future::BoxFuture;
|
||||
use futures::future::BoxFuture;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower::{Layer, Service};
|
||||
|
||||
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct Etag {
|
||||
@@ -52,7 +52,7 @@ impl MiddlewareLayer for Etag {
|
||||
fn apply(
|
||||
&self,
|
||||
app: Router<Arc<dyn AppContextTrait>>,
|
||||
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
Ok(app.layer(EtagLayer))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
//! Detect a content type and format and responds accordingly
|
||||
use axum::{
|
||||
extract::FromRequestParts,
|
||||
http::{
|
||||
@@ -8,7 +7,7 @@ use axum::{
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::errors::app_error::RError as Error;
|
||||
use crate::errors::RecorderError as Error;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct Format(pub RespondTo);
|
||||
|
||||
@@ -15,7 +15,7 @@ use tower_http::{add_extension::AddExtensionLayer, trace::TraceLayer};
|
||||
|
||||
use crate::{
|
||||
app::{AppContextTrait, Environment},
|
||||
errors::app_error::RResult,
|
||||
errors::RecorderResult,
|
||||
web::middleware::{MiddlewareLayer, request_id::LocoRequestId},
|
||||
};
|
||||
|
||||
@@ -70,7 +70,7 @@ impl MiddlewareLayer for Middleware {
|
||||
fn apply(
|
||||
&self,
|
||||
app: Router<Arc<dyn AppContextTrait>>,
|
||||
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
Ok(app
|
||||
.layer(
|
||||
TraceLayer::new_for_http().make_span_with(|request: &http::Request<_>| {
|
||||
|
||||
@@ -14,7 +14,7 @@ use std::sync::Arc;
|
||||
use axum::Router;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{app::AppContextTrait, errors::app_error::RResult};
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult};
|
||||
|
||||
/// Trait representing the behavior of middleware components in the application.
|
||||
/// When implementing a new middleware, make sure to go over this checklist:
|
||||
@@ -55,7 +55,7 @@ pub trait MiddlewareLayer {
|
||||
fn apply(
|
||||
&self,
|
||||
app: Router<Arc<dyn AppContextTrait>>,
|
||||
) -> RResult<Router<Arc<dyn AppContextTrait>>>;
|
||||
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>>;
|
||||
}
|
||||
|
||||
#[allow(clippy::unnecessary_lazy_evaluations)]
|
||||
|
||||
@@ -24,7 +24,7 @@ use axum::{
|
||||
http::{header::HeaderMap, request::Parts},
|
||||
response::Response,
|
||||
};
|
||||
use futures_util::future::BoxFuture;
|
||||
use futures::future::BoxFuture;
|
||||
use ipnetwork::IpNetwork;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use snafu::ResultExt;
|
||||
@@ -33,7 +33,7 @@ use tracing::error;
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::app_error::{RError, RResult},
|
||||
errors::app_error::{RecorderError, RecorderResult},
|
||||
web::middleware::MiddlewareLayer,
|
||||
};
|
||||
|
||||
@@ -127,7 +127,7 @@ impl MiddlewareLayer for RemoteIpMiddleware {
|
||||
fn apply(
|
||||
&self,
|
||||
app: Router<Arc<dyn AppContextTrait>>,
|
||||
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
Ok(app.layer(RemoteIPLayer::new(self)?))
|
||||
}
|
||||
}
|
||||
@@ -225,7 +225,7 @@ impl RemoteIPLayer {
|
||||
///
|
||||
/// # Errors
|
||||
/// Fails if invalid header values found
|
||||
pub fn new(config: &RemoteIpMiddleware) -> RResult<Self> {
|
||||
pub fn new(config: &RemoteIpMiddleware) -> RecorderResult<Self> {
|
||||
Ok(Self {
|
||||
trusted_proxies: config
|
||||
.trusted_proxies
|
||||
@@ -236,14 +236,14 @@ impl RemoteIPLayer {
|
||||
.map(|proxy| {
|
||||
IpNetwork::from_str(proxy)
|
||||
.boxed()
|
||||
.with_whatever_context::<_, _, RError>(|_| {
|
||||
.with_whatever_context::<_, _, RecorderError>(|_| {
|
||||
format!(
|
||||
"remote ip middleare cannot parse trusted proxy \
|
||||
configuration: `{proxy}`"
|
||||
)
|
||||
})
|
||||
})
|
||||
.collect::<RResult<Vec<_>>>()
|
||||
.collect::<RecorderResult<Vec<_>>>()
|
||||
})
|
||||
.transpose()?,
|
||||
})
|
||||
|
||||
@@ -18,7 +18,7 @@ const MAX_LEN: usize = 255;
|
||||
|
||||
use std::sync::{Arc, OnceLock};
|
||||
|
||||
use crate::errors::app_error::RResult;
|
||||
use crate::errors::RecorderResult;
|
||||
|
||||
static ID_CLEANUP: OnceLock<Regex> = OnceLock::new();
|
||||
|
||||
@@ -57,7 +57,7 @@ impl MiddlewareLayer for RequestId {
|
||||
fn apply(
|
||||
&self,
|
||||
app: Router<Arc<dyn AppContextTrait>>,
|
||||
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
Ok(app.layer(axum::middleware::from_fn(request_id_middleware)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,13 +15,13 @@ use axum::{
|
||||
http::{HeaderName, HeaderValue, Request},
|
||||
response::Response,
|
||||
};
|
||||
use futures_util::future::BoxFuture;
|
||||
use futures::future::BoxFuture;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{self, json};
|
||||
use snafu::whatever;
|
||||
use tower::{Layer, Service};
|
||||
|
||||
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
|
||||
|
||||
static PRESETS: OnceLock<HashMap<String, BTreeMap<String, String>>> = OnceLock::new();
|
||||
fn get_presets() -> &'static HashMap<String, BTreeMap<String, String>> {
|
||||
@@ -115,7 +115,7 @@ impl MiddlewareLayer for SecureHeader {
|
||||
fn apply(
|
||||
&self,
|
||||
app: Router<Arc<dyn AppContextTrait>>,
|
||||
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
Ok(app.layer(SecureHeaders::new(self)?))
|
||||
}
|
||||
}
|
||||
@@ -124,7 +124,7 @@ impl SecureHeader {
|
||||
/// Converts the configuration into a list of headers.
|
||||
///
|
||||
/// Applies the preset headers and any custom overrides.
|
||||
fn as_headers(&self) -> RResult<Vec<(HeaderName, HeaderValue)>> {
|
||||
fn as_headers(&self) -> RecorderResult<Vec<(HeaderName, HeaderValue)>> {
|
||||
let mut headers = vec![];
|
||||
|
||||
let preset = &self.preset;
|
||||
@@ -147,7 +147,7 @@ impl SecureHeader {
|
||||
fn push_headers(
|
||||
headers: &mut Vec<(HeaderName, HeaderValue)>,
|
||||
hm: &BTreeMap<String, String>,
|
||||
) -> RResult<()> {
|
||||
) -> RecorderResult<()> {
|
||||
for (k, v) in hm {
|
||||
headers.push((
|
||||
HeaderName::from_bytes(k.clone().as_bytes())?,
|
||||
@@ -171,7 +171,7 @@ impl SecureHeaders {
|
||||
///
|
||||
/// # Errors
|
||||
/// Returns an error if any header values are invalid.
|
||||
pub fn new(config: &SecureHeader) -> RResult<Self> {
|
||||
pub fn new(config: &SecureHeader) -> RecorderResult<Self> {
|
||||
Ok(Self {
|
||||
headers: config.as_headers()?,
|
||||
})
|
||||
|
||||
@@ -16,7 +16,7 @@ use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use tower_http::timeout::TimeoutLayer;
|
||||
|
||||
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult, web::middleware::MiddlewareLayer};
|
||||
|
||||
/// Timeout middleware configuration
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
@@ -61,7 +61,7 @@ impl MiddlewareLayer for TimeOut {
|
||||
fn apply(
|
||||
&self,
|
||||
app: Router<Arc<dyn AppContextTrait>>,
|
||||
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
) -> RecorderResult<Router<Arc<dyn AppContextTrait>>> {
|
||||
Ok(app.layer(TimeoutLayer::new(Duration::from_millis(self.timeout))))
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user