refactor: rewrite qbit downlaoder

This commit is contained in:
master 2025-04-03 02:22:26 +08:00
parent 234441e6a3
commit 27b52f7fd1
76 changed files with 1501 additions and 955 deletions

2
Cargo.lock generated
View File

@ -4780,6 +4780,7 @@ dependencies = [
name = "recorder"
version = "0.1.0"
dependencies = [
"anyhow",
"async-graphql",
"async-graphql-axum",
"async-stream",
@ -4793,6 +4794,7 @@ dependencies = [
"clap",
"cookie",
"ctor",
"dashmap 6.1.0",
"dotenv",
"fancy-regex",
"fastrand",

View File

@ -25,3 +25,4 @@ Cargo.lock
# Dist
node_modules
dist/
temp/

View File

@ -131,6 +131,8 @@ ctor = "0.4.0"
librqbit = "8.0.0"
typed-builder = "0.21.0"
snafu = { version = "0.8.5", features = ["futures"] }
anyhow = "1.0.97"
dashmap = "6.1.0"
[dev-dependencies]
serial_test = "3"
insta = { version = "1", features = ["redactions", "yaml", "filters"] }

View File

@ -1,4 +1,4 @@
use recorder::errors::RResult;
use recorder::errors::app_error::RResult;
// #![allow(unused_imports)]
// use recorder::{
// app::{AppContext, AppContextTrait},

View File

@ -1,7 +0,0 @@
<html>
<body>
not found :-(
</body>
</html>

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use clap::{Parser, command};
use super::{AppContext, core::App, env::Environment};
use crate::{app::config::AppConfig, errors::RResult};
use crate::{app::config::AppConfig, errors::app_error::RResult};
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]

View File

@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
use super::env::Environment;
use crate::{
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::RResult,
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::app_error::RResult,
extract::mikan::MikanConfig, graphql::GraphQLConfig, logger::LoggerConfig,
storage::StorageConfig, web::WebServerConfig,
};

View File

@ -1,6 +1,6 @@
use super::{Environment, config::AppConfig};
use crate::{
auth::AuthService, cache::CacheService, database::DatabaseService, errors::RResult,
auth::AuthService, cache::CacheService, database::DatabaseService, errors::app_error::RResult,
extract::mikan::MikanClient, graphql::GraphQLService, logger::LoggerService,
storage::StorageService,
};

View File

@ -6,7 +6,7 @@ use tokio::signal;
use super::{builder::AppBuilder, context::AppContextTrait};
use crate::{
errors::RResult,
errors::app_error::RResult,
web::{
controller::{self, core::ControllerTrait},
middleware::default_middleware_stack,

View File

@ -24,7 +24,9 @@ use super::{
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{app::AppContextTrait, errors::RError, fetch::HttpClient, models::auth::AuthType};
use crate::{
app::AppContextTrait, errors::app_error::RError, fetch::HttpClient, models::auth::AuthType,
};
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OidcAuthClaims {

View File

@ -1,4 +1,4 @@
use recorder::{app::AppBuilder, errors::RResult};
use recorder::{app::AppBuilder, errors::app_error::RResult};
#[tokio::main]
async fn main() -> RResult<()> {

View File

@ -1,5 +1,5 @@
use super::CacheConfig;
use crate::errors::RResult;
use crate::errors::app_error::RResult;
pub struct CacheService {}

View File

@ -7,7 +7,7 @@ use sea_orm::{
use sea_orm_migration::MigratorTrait;
use super::DatabaseConfig;
use crate::{errors::RResult, migrations::Migrator};
use crate::{errors::app_error::RResult, migrations::Migrator};
pub struct DatabaseService {
connection: DatabaseConnection,

View File

@ -0,0 +1,77 @@
use async_trait::async_trait;
use crate::downloader::{
DownloaderError,
bittorrent::task::{
TorrentCreationTrait, TorrentHashTrait, TorrentStateTrait, TorrentTaskTrait,
},
core::{DownloadIdSelectorTrait, DownloadSelectorTrait, DownloadTaskTrait, DownloaderTrait},
};
#[async_trait]
pub trait TorrentDownloaderTrait: DownloaderTrait
where
Self::State: TorrentStateTrait,
Self::Id: TorrentHashTrait,
Self::Task: TorrentTaskTrait<State = Self::State, Id = Self::Id>,
Self::Creation: TorrentCreationTrait<Task = Self::Task>,
Self::Selector: DownloadSelectorTrait<Task = Self::Task, Id = Self::Id>,
{
type IdSelector: DownloadIdSelectorTrait<Task = Self::Task, Id = Self::Id>;
async fn pause_downloads(
&self,
selector: Self::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes =
<Self as TorrentDownloaderTrait>::query_torrent_hashes(&self, selector).await?;
self.pause_torrents(hashes).await
}
async fn resume_downloads(
&self,
selector: Self::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes =
<Self as TorrentDownloaderTrait>::query_torrent_hashes(&self, selector).await?;
self.resume_torrents(hashes).await
}
async fn remove_downloads(
&self,
selector: Self::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes =
<Self as TorrentDownloaderTrait>::query_torrent_hashes(&self, selector).await?;
self.remove_torrents(hashes).await
}
async fn query_torrent_hashes(
&self,
selector: Self::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes = match selector.try_into_ids_only() {
Ok(hashes) => Self::IdSelector::from_iter(hashes),
Err(selector) => {
let tasks = self.query_downloads(selector).await?;
Self::IdSelector::from_iter(tasks.into_iter().map(|s| s.into_id()))
}
};
Ok(hashes)
}
async fn pause_torrents(
&self,
hashes: Self::IdSelector,
) -> Result<Self::IdSelector, DownloaderError>;
async fn resume_torrents(
&self,
hashes: Self::IdSelector,
) -> Result<Self::IdSelector, DownloaderError>;
async fn remove_torrents(
&self,
hashes: Self::IdSelector,
) -> Result<Self::IdSelector, DownloaderError>;
}

View File

@ -0,0 +1,3 @@
pub mod downloader;
pub mod source;
pub mod task;

View File

@ -0,0 +1,228 @@
use std::{
borrow::Cow,
fmt::{Debug, Formatter},
};
use bytes::Bytes;
use librqbit_core::{magnet::Magnet, torrent_metainfo, torrent_metainfo::TorrentMetaV1Owned};
use snafu::ResultExt;
use url::Url;
use crate::{
downloader::errors::{
DownloadFetchSnafu, DownloaderError, MagnetFormatSnafu, TorrentMetaSnafu,
},
errors::RAnyhowResultExt,
extract::bittorrent::core::MAGNET_SCHEMA,
fetch::{bytes::fetch_bytes, client::core::HttpClientTrait},
};
pub trait HashTorrentSourceTrait: Sized {
fn hash_info(&self) -> Cow<'_, str>;
}
pub struct MagnetUrlSource {
pub magnet: Magnet,
pub url: String,
}
impl MagnetUrlSource {
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
let magnet = Magnet::parse(&url)
.to_dyn_boxed()
.context(MagnetFormatSnafu {
message: url.clone(),
})?;
Ok(Self { magnet, url })
}
}
impl HashTorrentSourceTrait for MagnetUrlSource {
fn hash_info(&self) -> Cow<'_, str> {
let hash_info = self
.magnet
.as_id32()
.map(|s| s.as_string())
.or_else(|| self.magnet.as_id20().map(|s| s.as_string()))
.unwrap_or_else(|| unreachable!("hash of magnet must existed"));
hash_info.into()
}
}
impl Debug for MagnetUrlSource {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MagnetUrlSource")
.field("url", &self.url)
.finish()
}
}
impl Clone for MagnetUrlSource {
fn clone(&self) -> Self {
Self {
magnet: Magnet::parse(&self.url).unwrap(),
url: self.url.clone(),
}
}
}
impl PartialEq for MagnetUrlSource {
fn eq(&self, other: &Self) -> bool {
self.url == other.url
}
}
impl Eq for MagnetUrlSource {}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TorrentUrlSource {
pub url: String,
}
impl TorrentUrlSource {
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
Ok(Self { url })
}
}
#[derive(Clone)]
pub struct TorrentFileSource {
pub url: Option<String>,
pub payload: Bytes,
pub meta: TorrentMetaV1Owned,
pub filename: String,
}
impl TorrentFileSource {
pub fn from_bytes(
filename: String,
bytes: Bytes,
url: Option<String>,
) -> Result<Self, DownloaderError> {
let meta = torrent_metainfo::torrent_from_bytes(bytes.as_ref())
.to_dyn_boxed()
.with_context(|_| TorrentMetaSnafu {
message: format!(
"filename = {}, url = {}",
filename,
url.as_deref().unwrap_or_default()
),
})?
.to_owned();
Ok(TorrentFileSource {
url,
payload: bytes,
meta,
filename,
})
}
pub async fn from_url_and_http_client(
client: &impl HttpClientTrait,
url: String,
) -> Result<TorrentFileSource, DownloaderError> {
let payload = fetch_bytes(client, &url)
.await
.boxed()
.with_context(|_| DownloadFetchSnafu { url: url.clone() })?;
let filename = Url::parse(&url)
.boxed()
.and_then(|s| {
s.path_segments()
.and_then(|p| p.last())
.map(String::from)
.ok_or_else(|| anyhow::anyhow!("invalid url"))
.to_dyn_boxed()
})
.with_context(|_| DownloadFetchSnafu { url: url.clone() })?;
Self::from_bytes(filename, payload, Some(url))
}
}
impl HashTorrentSourceTrait for TorrentFileSource {
fn hash_info(&self) -> Cow<'_, str> {
self.meta.info_hash.as_string().into()
}
}
impl Debug for TorrentFileSource {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("TorrentFileSource")
.field("hash", &self.meta.info_hash.as_string())
.finish()
}
}
#[derive(Clone, Debug)]
pub enum UrlTorrentSource {
MagnetUrl(MagnetUrlSource),
TorrentUrl(TorrentUrlSource),
}
impl UrlTorrentSource {
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
let url_ = Url::parse(&url)?;
let source = if url_.scheme() == MAGNET_SCHEMA {
Self::from_magnet_url(url)?
} else {
Self::from_torrent_url(url)?
};
Ok(source)
}
pub fn from_magnet_url(url: String) -> Result<Self, DownloaderError> {
let magnet_source = MagnetUrlSource::from_url(url)?;
Ok(Self::MagnetUrl(magnet_source))
}
pub fn from_torrent_url(url: String) -> Result<Self, DownloaderError> {
let torrent_source = TorrentUrlSource::from_url(url)?;
Ok(Self::TorrentUrl(torrent_source))
}
}
#[derive(Debug, Clone)]
pub enum HashTorrentSource {
MagnetUrl(MagnetUrlSource),
TorrentFile(TorrentFileSource),
}
impl HashTorrentSource {
pub async fn from_url_and_http_client(
client: &impl HttpClientTrait,
url: String,
) -> Result<Self, DownloaderError> {
let url_ = Url::parse(&url)?;
let source = if url_.scheme() == MAGNET_SCHEMA {
Self::from_magnet_url(url)?
} else {
Self::from_torrent_url_and_http_client(client, url).await?
};
Ok(source)
}
pub fn from_magnet_url(url: String) -> Result<Self, DownloaderError> {
let magnet_source = MagnetUrlSource::from_url(url)?;
Ok(Self::MagnetUrl(magnet_source))
}
pub async fn from_torrent_url_and_http_client(
client: &impl HttpClientTrait,
url: String,
) -> Result<Self, DownloaderError> {
let torrent_source = TorrentFileSource::from_url_and_http_client(client, url).await?;
Ok(Self::TorrentFile(torrent_source))
}
}
impl HashTorrentSourceTrait for HashTorrentSource {
fn hash_info(&self) -> Cow<'_, str> {
match self {
HashTorrentSource::MagnetUrl(m) => m.hash_info(),
HashTorrentSource::TorrentFile(t) => t.hash_info(),
}
}
}

View File

@ -0,0 +1,37 @@
use std::{borrow::Cow, hash::Hash};
use quirks_path::{Path, PathBuf};
use crate::downloader::{
bittorrent::source::HashTorrentSource,
core::{DownloadCreationTrait, DownloadIdTrait, DownloadStateTrait, DownloadTaskTrait},
};
pub const TORRENT_TAG_NAME: &str = "konobangu";
pub trait TorrentHashTrait: DownloadIdTrait + Send + Hash {}
pub trait TorrentStateTrait: DownloadStateTrait {}
pub trait TorrentTaskTrait: DownloadTaskTrait
where
Self::State: TorrentStateTrait,
Self::Id: TorrentHashTrait,
{
fn hash_info(&self) -> &str;
fn name(&self) -> Cow<'_, str> {
Cow::Borrowed(self.hash_info())
}
fn tags(&self) -> impl Iterator<Item = Cow<'_, str>>;
fn category(&self) -> Option<Cow<'_, str>>;
}
pub trait TorrentCreationTrait: DownloadCreationTrait {
fn save_path(&self) -> &Path;
fn save_path_mut(&mut self) -> &mut PathBuf;
fn sources_mut(&mut self) -> &mut Vec<HashTorrentSource>;
}

View File

@ -1,297 +1,218 @@
use std::fmt::Debug;
use std::{
any::Any, borrow::Cow, fmt::Debug, hash::Hash, marker::PhantomData, ops::Deref, time::Duration,
vec::IntoIter,
};
use async_trait::async_trait;
use itertools::Itertools;
use lazy_static::lazy_static;
use librqbit_core::{
magnet::Magnet,
torrent_metainfo::{TorrentMetaV1Owned, torrent_from_bytes},
};
use quirks_path::{Path, PathBuf};
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::prelude::*;
use url::Url;
use super::{DownloaderError, QbitTorrent, QbitTorrentContent, errors::DownloadFetchSnafu};
use crate::fetch::{HttpClientTrait, fetch_bytes};
use super::DownloaderError;
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
pub const MAGNET_SCHEMA: &str = "magnet";
pub trait DownloadStateTrait: Sized + Debug {}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum TorrentFilter {
All,
Downloading,
Completed,
Paused,
Active,
Inactive,
Resumed,
Stalled,
StalledUploading,
StalledDownloading,
Errored,
}
pub trait DownloadIdTrait: Hash + Sized + Clone + Send + Debug {}
lazy_static! {
static ref TORRENT_HASH_RE: Regex = Regex::new(r"[a-fA-F0-9]{40}").unwrap();
static ref TORRENT_EXT_RE: Regex = Regex::new(r"\.torrent$").unwrap();
}
pub trait DownloadTaskTrait: Sized + Send + Debug {
type State: DownloadStateTrait;
type Id: DownloadIdTrait;
#[derive(Clone, PartialEq, Eq)]
pub enum TorrentSource {
MagnetUrl {
url: Url,
hash: String,
},
TorrentUrl {
url: Url,
hash: String,
},
TorrentFile {
torrent: Vec<u8>,
hash: String,
name: Option<String>,
},
}
impl TorrentSource {
pub async fn parse<H: HttpClientTrait>(client: &H, url: &str) -> Result<Self, DownloaderError> {
let url = Url::parse(url)?;
let source = if url.scheme() == MAGNET_SCHEMA {
TorrentSource::from_magnet_url(url)?
} else if let Some(basename) = url
.clone()
.path_segments()
.and_then(|mut segments| segments.next_back())
{
if let (Some(match_hash), true) = (
TORRENT_HASH_RE.find(basename),
TORRENT_EXT_RE.is_match(basename),
) {
TorrentSource::from_torrent_url(url, match_hash.as_str().to_string())?
fn id(&self) -> &Self::Id;
fn into_id(self) -> Self::Id;
fn name(&self) -> Cow<'_, str>;
fn speed(&self) -> Option<u64>;
fn state(&self) -> &Self::State;
fn dl_bytes(&self) -> Option<u64>;
fn total_bytes(&self) -> Option<u64>;
fn left_bytes(&self) -> Option<u64> {
if let (Some(tt), Some(dl)) = (self.total_bytes(), self.dl_bytes()) {
tt.checked_sub(dl)
} else {
None
}
}
fn et(&self) -> Option<Duration>;
fn eta(&self) -> Option<Duration> {
if let (Some(left_bytes), Some(speed)) = (self.left_bytes(), self.speed()) {
if speed > 0 {
Some(Duration::from_secs_f64(left_bytes as f64 / speed as f64))
} else {
let contents = fetch_bytes(client, url)
.await
.boxed()
.context(DownloadFetchSnafu)?;
TorrentSource::from_torrent_file(contents.to_vec(), Some(basename.to_string()))?
None
}
} else {
let contents = fetch_bytes(client, url)
.await
.boxed()
.context(DownloadFetchSnafu)?;
TorrentSource::from_torrent_file(contents.to_vec(), None)?
};
Ok(source)
None
}
}
fn average_speed(&self) -> Option<f64> {
if let (Some(et), Some(dl_bytes)) = (self.et(), self.dl_bytes()) {
let secs = et.as_secs_f64();
pub fn from_torrent_file(file: Vec<u8>, name: Option<String>) -> Result<Self, DownloaderError> {
let torrent: TorrentMetaV1Owned =
torrent_from_bytes(&file).map_err(|_| DownloaderError::TorrentFileFormatError)?;
let hash = torrent.info_hash.as_string();
Ok(TorrentSource::TorrentFile {
torrent: file,
hash,
name,
})
}
pub fn from_magnet_url(url: Url) -> Result<Self, DownloaderError> {
if url.scheme() != MAGNET_SCHEMA {
Err(DownloaderError::DownloadSchemaError {
found: url.scheme().to_string(),
expected: MAGNET_SCHEMA.to_string(),
})
if secs > 0.0 {
Some(dl_bytes as f64 / secs)
} else {
None
}
} else {
let magnet =
Magnet::parse(url.as_str()).map_err(|_| DownloaderError::MagnetFormatError {
url: url.as_str().to_string(),
})?;
let hash = magnet
.as_id20()
.ok_or_else(|| DownloaderError::MagnetFormatError {
url: url.as_str().to_string(),
})?
.as_string();
Ok(TorrentSource::MagnetUrl { url, hash })
None
}
}
pub fn from_torrent_url(url: Url, hash: String) -> Result<Self, DownloaderError> {
Ok(TorrentSource::TorrentUrl { url, hash })
}
pub fn hash(&self) -> &str {
match self {
TorrentSource::MagnetUrl { hash, .. } => hash,
TorrentSource::TorrentUrl { hash, .. } => hash,
TorrentSource::TorrentFile { hash, .. } => hash,
}
}
}
impl Debug for TorrentSource {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
TorrentSource::MagnetUrl { url, .. } => {
write!(f, "MagnetUrl {{ url: {} }}", url.as_str())
fn progress(&self) -> Option<f32> {
if let (Some(dl), Some(tt)) = (self.dl_bytes(), self.total_bytes()) {
if dl > 0 {
if tt > 0 {
Some(dl as f32 / tt as f32)
} else {
None
}
} else {
Some(0.0)
}
TorrentSource::TorrentUrl { url, .. } => {
write!(f, "TorrentUrl {{ url: {} }}", url.as_str())
}
TorrentSource::TorrentFile { name, hash, .. } => write!(
f,
"TorrentFile {{ name: \"{}\", hash: \"{hash}\" }}",
name.as_deref().unwrap_or_default()
),
} else {
None
}
}
}
pub trait TorrentContent {
fn get_name(&self) -> &str;
fn get_all_size(&self) -> u64;
fn get_progress(&self) -> f64;
fn get_curr_size(&self) -> u64;
pub trait DownloadCreationTrait: Sized {
type Task: DownloadTaskTrait;
}
impl TorrentContent for QbitTorrentContent {
fn get_name(&self) -> &str {
self.name.as_str()
}
pub trait DownloadSelectorTrait: Sized + Any + Send {
type Id: DownloadIdTrait;
type Task: DownloadTaskTrait<Id = Self::Id>;
fn get_all_size(&self) -> u64 {
self.size
}
fn get_progress(&self) -> f64 {
self.progress
}
fn get_curr_size(&self) -> u64 {
u64::clamp(
f64::round(self.get_all_size() as f64 * self.get_progress()) as u64,
0,
self.get_all_size(),
)
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
Err(self)
}
}
#[derive(Debug, Clone)]
pub enum Torrent {
Qbit {
torrent: QbitTorrent,
contents: Vec<QbitTorrentContent>,
},
pub trait DownloadIdSelectorTrait:
DownloadSelectorTrait
+ IntoIterator<Item = Self::Id>
+ FromIterator<Self::Id>
+ Into<Vec<Self::Id>>
+ From<Vec<Self::Id>>
{
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
Ok(Vec::from_iter(self))
}
fn from_id(id: Self::Id) -> Self;
}
impl Torrent {
pub fn iter_files(&self) -> impl Iterator<Item = &dyn TorrentContent> {
match self {
Torrent::Qbit { contents, .. } => {
contents.iter().map(|item| item as &dyn TorrentContent)
}
#[derive(Debug)]
pub struct DownloadIdSelector<Task>
where
Task: DownloadTaskTrait,
{
pub ids: Vec<Task::Id>,
pub marker: PhantomData<Task>,
}
impl<Task> Deref for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait,
{
type Target = Vec<Task::Id>;
fn deref(&self) -> &Self::Target {
&self.ids
}
}
impl<Task> IntoIterator for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait,
{
type Item = Task::Id;
type IntoIter = IntoIter<Task::Id>;
fn into_iter(self) -> Self::IntoIter {
self.ids.into_iter()
}
}
impl<Task> FromIterator<Task::Id> for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait,
{
fn from_iter<T: IntoIterator<Item = Task::Id>>(iter: T) -> Self {
Self {
ids: Vec::from_iter(iter),
marker: PhantomData,
}
}
}
pub fn get_name(&self) -> Option<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.name.as_deref(),
impl<Task> DownloadSelectorTrait for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait + 'static,
{
type Id = Task::Id;
type Task = Task;
}
impl<Task> From<Vec<Task::Id>> for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait + 'static,
{
fn from(value: Vec<Task::Id>) -> Self {
Self {
ids: value,
marker: PhantomData,
}
}
}
pub fn get_hash(&self) -> Option<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.hash.as_deref(),
}
impl<Task> From<DownloadIdSelector<Task>> for Vec<Task::Id>
where
Task: DownloadTaskTrait + 'static,
{
fn from(value: DownloadIdSelector<Task>) -> Self {
value.ids
}
}
impl<Task> DownloadIdSelectorTrait for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait + 'static,
{
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
Ok(self.ids)
}
pub fn get_save_path(&self) -> Option<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.save_path.as_deref(),
}
}
pub fn get_content_path(&self) -> Option<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.content_path.as_deref(),
}
}
pub fn get_tags(&self) -> Vec<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.tags.as_deref().map_or_else(Vec::new, |s| {
s.split(',')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect_vec()
}),
}
}
pub fn get_category(&self) -> Option<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.category.as_deref(),
fn from_id(id: Self::Id) -> Self {
Self {
ids: vec![id],
marker: PhantomData,
}
}
}
#[async_trait]
pub trait TorrentDownloader {
async fn get_torrents_info(
pub trait DownloaderTrait {
type State: DownloadStateTrait;
type Id: DownloadIdTrait;
type Task: DownloadTaskTrait<State = Self::State, Id = Self::Id>;
type Creation: DownloadCreationTrait<Task = Self::Task>;
type Selector: DownloadSelectorTrait<Task = Self::Task>;
async fn add_downloads(
&self,
status_filter: TorrentFilter,
category: Option<String>,
tag: Option<String>,
) -> Result<Vec<Torrent>, DownloaderError>;
async fn add_torrents(
creation: Self::Creation,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
async fn pause_downloads(
&self,
source: TorrentSource,
save_path: String,
category: Option<&str>,
) -> Result<(), DownloaderError>;
async fn delete_torrents(&self, hashes: Vec<String>) -> Result<(), DownloaderError>;
async fn rename_torrent_file(
selector: Self::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
async fn resume_downloads(
&self,
hash: &str,
old_path: &str,
new_path: &str,
) -> Result<(), DownloaderError>;
async fn move_torrents(
selector: Self::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
async fn remove_downloads(
&self,
hashes: Vec<String>,
new_path: &str,
) -> Result<(), DownloaderError>;
async fn get_torrent_path(&self, hashes: String) -> Result<Option<String>, DownloaderError>;
async fn check_connection(&self) -> Result<(), DownloaderError>;
async fn set_torrents_category(
selector: Self::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
async fn query_downloads(
&self,
hashes: Vec<String>,
category: &str,
) -> Result<(), DownloaderError>;
async fn add_torrent_tags(
&self,
hashes: Vec<String>,
tags: Vec<String>,
) -> Result<(), DownloaderError>;
async fn add_category(&self, category: &str) -> Result<(), DownloaderError>;
fn get_save_path(&self, sub_path: &Path) -> PathBuf;
selector: Self::Selector,
) -> Result<impl IntoIterator<Item = Self::Task>, DownloaderError>;
}

View File

@ -2,40 +2,45 @@ use std::{borrow::Cow, time::Duration};
use snafu::prelude::*;
use crate::errors::OptionWhateverAsync;
use crate::errors::OptDynErr;
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum DownloaderError {
#[snafu(display("Invalid mime (expected {expected:?}, got {found:?})"))]
DownloadMimeError { expected: String, found: String },
#[snafu(display("Invalid url schema (expected {expected:?}, got {found:?})"))]
DownloadSchemaError { expected: String, found: String },
#[snafu(transparent)]
DownloadUrlParseError { source: url::ParseError },
#[snafu(display("Invalid url format: {reason}"))]
DownloadUrlFormatError { reason: Cow<'static, str> },
#[snafu(transparent)]
QBitAPIError { source: qbit_rs::Error },
#[snafu(transparent)]
DownloaderIOError { source: std::io::Error },
#[snafu(display("Timeout error (action = {action}, timeout = {timeout:?})"))]
DownloadTimeoutError {
action: Cow<'static, str>,
timeout: Duration,
},
#[snafu(display("Invalid torrent file format"))]
TorrentFileFormatError,
#[snafu(display("Invalid magnet format (url = {url})"))]
MagnetFormatError { url: String },
#[snafu(display("Invalid magnet format ({message})"))]
MagnetFormatError {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Invalid torrent meta format ({message})"))]
TorrentMetaError {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Failed to fetch: {source}"))]
DownloadFetchError {
#[snafu(source)]
source: Box<dyn snafu::Error + Send + Sync>,
url: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("{message}"))]
Whatever {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: OptionWhateverAsync,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
}
@ -45,14 +50,14 @@ impl snafu::FromString for DownloaderError {
fn without_source(message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::none(),
source: OptDynErr::none(),
}
}
fn with_source(source: Self::Source, message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::some(source),
source: OptDynErr::some(source),
}
}
}

View File

@ -1,14 +1,10 @@
pub mod bittorrent;
pub mod core;
pub mod errors;
pub mod qbit;
pub mod rqbit;
pub mod utils;
pub use core::{
Torrent, TorrentContent, TorrentDownloader, TorrentFilter, TorrentSource, BITTORRENT_MIME_TYPE,
MAGNET_SCHEMA,
};
pub use errors::DownloaderError;
pub use qbit::{
QBittorrentDownloader, QBittorrentDownloaderCreation, QbitTorrent, QbitTorrentContent,

File diff suppressed because it is too large Load Diff

View File

@ -1,15 +1,15 @@
use std::fmt::Display;
#[derive(Debug)]
pub struct OptionWhateverAsync(Option<Box<dyn std::error::Error + Send + Sync>>);
pub struct OptDynErr(Option<Box<dyn std::error::Error + Send + Sync>>);
impl AsRef<dyn snafu::Error> for OptionWhateverAsync {
impl AsRef<dyn snafu::Error> for OptDynErr {
fn as_ref(&self) -> &(dyn snafu::Error + 'static) {
self
}
}
impl OptionWhateverAsync {
impl OptDynErr {
pub fn some_boxed<E: std::error::Error + Send + Sync + 'static>(e: E) -> Self {
Self(Some(Box::new(e)))
}
@ -23,7 +23,7 @@ impl OptionWhateverAsync {
}
}
impl Display for OptionWhateverAsync {
impl Display for OptDynErr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.0 {
Some(e) => e.fmt(f),
@ -32,7 +32,7 @@ impl Display for OptionWhateverAsync {
}
}
impl snafu::Error for OptionWhateverAsync {
impl snafu::Error for OptDynErr {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
@ -42,13 +42,13 @@ impl snafu::Error for OptionWhateverAsync {
}
}
impl From<Option<Box<dyn std::error::Error + Send + Sync>>> for OptionWhateverAsync {
impl From<Option<Box<dyn std::error::Error + Send + Sync>>> for OptDynErr {
fn from(value: Option<Box<dyn std::error::Error + Send + Sync>>) -> Self {
Self(value)
}
}
impl From<Box<dyn std::error::Error + Send + Sync>> for OptionWhateverAsync {
impl From<Box<dyn std::error::Error + Send + Sync>> for OptDynErr {
fn from(value: Box<dyn std::error::Error + Send + Sync>) -> Self {
Self::some(value)
}

View File

@ -0,0 +1,202 @@
use std::borrow::Cow;
use axum::{
Json,
response::{IntoResponse, Response},
};
use http::StatusCode;
use serde::{Deserialize, Deserializer, Serialize};
use snafu::Snafu;
use crate::{
auth::AuthError,
downloader::DownloaderError,
errors::{OptDynErr, response::StandardErrorResponse},
fetch::HttpClientError,
};
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum RError {
#[snafu(transparent, context(false))]
FancyRegexError {
#[snafu(source(from(fancy_regex::Error, Box::new)))]
source: Box<fancy_regex::Error>,
},
#[snafu(transparent)]
RegexError { source: regex::Error },
#[snafu(transparent)]
InvalidMethodError { source: http::method::InvalidMethod },
#[snafu(transparent)]
InvalidHeaderNameError {
source: http::header::InvalidHeaderName,
},
#[snafu(transparent)]
TracingAppenderInitError {
source: tracing_appender::rolling::InitError,
},
#[snafu(transparent)]
GraphQLSchemaError {
source: async_graphql::dynamic::SchemaError,
},
#[snafu(transparent)]
AuthError { source: AuthError },
#[snafu(transparent)]
DownloadError { source: DownloaderError },
#[snafu(transparent)]
RSSError { source: rss::Error },
#[snafu(transparent)]
DotEnvError { source: dotenv::Error },
#[snafu(transparent)]
TeraError { source: tera::Error },
#[snafu(transparent)]
IOError { source: std::io::Error },
#[snafu(transparent)]
DbError { source: sea_orm::DbErr },
#[snafu(transparent)]
CookieParseError { source: cookie::ParseError },
#[snafu(transparent, context(false))]
FigmentError {
#[snafu(source(from(figment::Error, Box::new)))]
source: Box<figment::Error>,
},
#[snafu(transparent)]
SerdeJsonError { source: serde_json::Error },
#[snafu(transparent)]
ReqwestMiddlewareError { source: reqwest_middleware::Error },
#[snafu(transparent)]
ReqwestError { source: reqwest::Error },
#[snafu(transparent)]
ParseUrlError { source: url::ParseError },
#[snafu(display("{source}"), context(false))]
OpenDALError {
#[snafu(source(from(opendal::Error, Box::new)))]
source: Box<opendal::Error>,
},
#[snafu(transparent)]
InvalidHeaderValueError {
source: http::header::InvalidHeaderValue,
},
#[snafu(transparent)]
HttpClientError { source: HttpClientError },
#[cfg(all(feature = "testcontainers", test))]
#[snafu(transparent)]
TestcontainersError {
source: testcontainers::TestcontainersError,
},
#[snafu(display("Extract {desc} with mime error, expected {expected}, but got {found}"))]
MimeError {
desc: String,
expected: String,
found: String,
},
#[snafu(display("Invalid or unknown format in extracting mikan rss"))]
MikanRssInvalidFormatError,
#[snafu(display("Invalid field {field} in extracting mikan rss"))]
MikanRssInvalidFieldError {
field: Cow<'static, str>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Missing field {field} in extracting mikan meta"))]
MikanMetaMissingFieldError {
field: Cow<'static, str>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Model Entity {entity} not found"))]
ModelEntityNotFound { entity: Cow<'static, str> },
#[snafu(display("{message}"))]
Whatever {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
}
impl RError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
source: None.into(),
}
}
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: None.into(),
}
}
pub fn from_mikan_rss_invalid_field_and_source(
field: Cow<'static, str>,
source: impl std::error::Error + Send + Sync + 'static,
) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: OptDynErr::some_boxed(source),
}
}
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
Self::DbError {
source: sea_orm::DbErr::RecordNotFound(detail.to_string()),
}
}
}
impl snafu::FromString for RError {
type Source = Box<dyn std::error::Error + Send + Sync>;
fn without_source(message: String) -> Self {
Self::Whatever {
message,
source: OptDynErr::none(),
}
}
fn with_source(source: Self::Source, message: String) -> Self {
Self::Whatever {
message,
source: OptDynErr::some(source),
}
}
}
impl IntoResponse for RError {
fn into_response(self) -> Response {
match self {
Self::AuthError { source: auth_error } => auth_error.into_response(),
err => (
StatusCode::INTERNAL_SERVER_ERROR,
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
)
.into_response(),
}
}
}
impl Serialize for RError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for RError {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(Self::Whatever {
message: s,
source: None.into(),
})
}
}
pub type RResult<T> = Result<T, RError>;

View File

@ -0,0 +1,9 @@
pub trait RAnyhowResultExt<T>: snafu::ResultExt<T, anyhow::Error> {
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>>;
}
impl<T> RAnyhowResultExt<T> for Result<T, anyhow::Error> {
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>> {
self.map_err(|e| e.into())
}
}

View File

@ -1,217 +1,9 @@
pub mod whatever;
use std::borrow::Cow;
pub mod alias;
pub mod app_error;
pub mod ext;
pub mod response;
use axum::{
Json,
response::{IntoResponse, Response},
};
use http::StatusCode;
use serde::{Deserialize, Deserializer, Serialize};
use snafu::prelude::*;
pub use whatever::OptionWhateverAsync;
use crate::{auth::AuthError, downloader::DownloaderError, fetch::HttpClientError};
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum RError {
#[snafu(transparent, context(false))]
FancyRegexError {
#[snafu(source(from(fancy_regex::Error, Box::new)))]
source: Box<fancy_regex::Error>,
},
#[snafu(transparent)]
RegexError { source: regex::Error },
#[snafu(transparent)]
InvalidMethodError { source: http::method::InvalidMethod },
#[snafu(transparent)]
InvalidHeaderNameError {
source: http::header::InvalidHeaderName,
},
#[snafu(transparent)]
TracingAppenderInitError {
source: tracing_appender::rolling::InitError,
},
#[snafu(transparent)]
GraphQLSchemaError {
source: async_graphql::dynamic::SchemaError,
},
#[snafu(transparent)]
AuthError { source: AuthError },
#[snafu(transparent)]
DownloadError { source: DownloaderError },
#[snafu(transparent)]
RSSError { source: rss::Error },
#[snafu(transparent)]
DotEnvError { source: dotenv::Error },
#[snafu(transparent)]
TeraError { source: tera::Error },
#[snafu(transparent)]
IOError { source: std::io::Error },
#[snafu(transparent)]
DbError { source: sea_orm::DbErr },
#[snafu(transparent)]
CookieParseError { source: cookie::ParseError },
#[snafu(transparent, context(false))]
FigmentError {
#[snafu(source(from(figment::Error, Box::new)))]
source: Box<figment::Error>,
},
#[snafu(transparent)]
SerdeJsonError { source: serde_json::Error },
#[snafu(transparent)]
ReqwestMiddlewareError { source: reqwest_middleware::Error },
#[snafu(transparent)]
ReqwestError { source: reqwest::Error },
#[snafu(transparent)]
ParseUrlError { source: url::ParseError },
#[snafu(display("{source}"), context(false))]
OpenDALError {
#[snafu(source(from(opendal::Error, Box::new)))]
source: Box<opendal::Error>,
},
#[snafu(transparent)]
InvalidHeaderValueError {
source: http::header::InvalidHeaderValue,
},
#[snafu(transparent)]
HttpClientError { source: HttpClientError },
#[cfg(all(feature = "testcontainers", test))]
#[snafu(transparent)]
TestcontainersError {
source: testcontainers::TestcontainersError,
},
#[snafu(display("Extract {desc} with mime error, expected {expected}, but got {found}"))]
MimeError {
desc: String,
expected: String,
found: String,
},
#[snafu(display("Invalid or unknown format in extracting mikan rss"))]
MikanRssInvalidFormatError,
#[snafu(display("Invalid field {field} in extracting mikan rss"))]
MikanRssInvalidFieldError {
field: Cow<'static, str>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: OptionWhateverAsync,
},
#[snafu(display("Missing field {field} in extracting mikan meta"))]
MikanMetaMissingFieldError {
field: Cow<'static, str>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: OptionWhateverAsync,
},
#[snafu(display("Model Entity {entity} not found"))]
ModelEntityNotFound { entity: Cow<'static, str> },
#[snafu(display("{message}"))]
Whatever {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptionWhateverAsync::some)))]
source: OptionWhateverAsync,
},
}
impl RError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
source: None.into(),
}
}
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: None.into(),
}
}
pub fn from_mikan_rss_invalid_field_and_source(
field: Cow<'static, str>,
source: impl std::error::Error + Send + Sync + 'static,
) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: OptionWhateverAsync::some_boxed(source),
}
}
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
Self::DbError {
source: sea_orm::DbErr::RecordNotFound(detail.to_string()),
}
}
}
impl snafu::FromString for RError {
type Source = Box<dyn std::error::Error + Send + Sync>;
fn without_source(message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::none(),
}
}
fn with_source(source: Self::Source, message: String) -> Self {
Self::Whatever {
message,
source: OptionWhateverAsync::some(source),
}
}
}
#[derive(Serialize, Debug, Clone)]
pub struct StandardErrorResponse<T = ()> {
pub success: bool,
pub message: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub result: Option<T>,
}
impl<T> From<String> for StandardErrorResponse<T> {
fn from(value: String) -> Self {
StandardErrorResponse {
success: false,
message: value,
result: None,
}
}
}
impl IntoResponse for RError {
fn into_response(self) -> Response {
match self {
Self::AuthError { source: auth_error } => auth_error.into_response(),
err => (
StatusCode::INTERNAL_SERVER_ERROR,
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
)
.into_response(),
}
}
}
impl Serialize for RError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for RError {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(Self::Whatever {
message: s,
source: None.into(),
})
}
}
pub type RResult<T> = Result<T, RError>;
pub use alias::OptDynErr;
pub use app_error::*;
pub use ext::RAnyhowResultExt;
pub use response::StandardErrorResponse;

View File

@ -0,0 +1,19 @@
use serde::Serialize;
#[derive(Serialize, Debug, Clone)]
pub struct StandardErrorResponse<T = ()> {
pub success: bool,
pub message: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub result: Option<T>,
}
impl<T> From<String> for StandardErrorResponse<T> {
fn from(value: String) -> Self {
StandardErrorResponse {
success: false,
message: value,
result: None,
}
}
}

View File

@ -0,0 +1,2 @@
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
pub const MAGNET_SCHEMA: &str = "magnet";

View File

@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize};
use snafu::{OptionExt, whatever};
use crate::{
errors::{RError, RResult},
errors::app_error::{RError, RResult},
extract::defs::SUBTITLE_LANG,
};

View File

@ -0,0 +1,6 @@
pub mod core;
pub mod extract;
pub use core::{BITTORRENT_MIME_TYPE, MAGNET_SCHEMA};
pub use extract::*;

View File

@ -6,7 +6,7 @@ use url::Url;
use super::MikanConfig;
use crate::{
errors::RError,
errors::app_error::RError,
fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth},
};

View File

@ -8,11 +8,13 @@ use tracing::instrument;
use url::Url;
use crate::{
downloader::core::BITTORRENT_MIME_TYPE,
errors::{RError, RResult},
extract::mikan::{
MikanClient,
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
errors::app_error::{RError, RResult},
extract::{
bittorrent::BITTORRENT_MIME_TYPE,
mikan::{
MikanClient,
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
},
},
fetch::bytes::fetch_bytes,
};
@ -338,11 +340,13 @@ mod tests {
use url::Url;
use crate::{
downloader::core::BITTORRENT_MIME_TYPE,
errors::RResult,
extract::mikan::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
extract_mikan_rss_channel_from_rss_link,
errors::app_error::RResult,
extract::{
bittorrent::BITTORRENT_MIME_TYPE,
mikan::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
extract_mikan_rss_channel_from_rss_link,
},
},
test_utils::mikan::build_testing_mikan_client,
};

View File

@ -15,7 +15,7 @@ use super::{
};
use crate::{
app::AppContextTrait,
errors::{RError, RResult},
errors::app_error::{RError, RResult},
extract::{
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str,

View File

@ -4,4 +4,4 @@ pub mod http;
pub mod media;
pub mod mikan;
pub mod rawname;
pub mod torrent;
pub mod bittorrent;

View File

@ -10,7 +10,7 @@ use serde::{Deserialize, Serialize};
use snafu::whatever;
use crate::{
errors::RResult,
errors::app_error::RResult,
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
};

View File

@ -1,3 +0,0 @@
mod parser;
pub use parser::*;

View File

@ -2,7 +2,7 @@ use bytes::Bytes;
use reqwest::IntoUrl;
use super::client::HttpClientTrait;
use crate::errors::RError;
use crate::errors::app_error::RError;
pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>(
client: &H,

View File

@ -4,7 +4,7 @@ use cookie::Cookie;
use reqwest::{ClientBuilder, cookie::Jar};
use url::Url;
use crate::errors::RError;
use crate::errors::app_error::RError;
pub trait HttpClientSecrecyDataTrait {
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {

View File

@ -1,7 +1,7 @@
use reqwest::IntoUrl;
use super::client::HttpClientTrait;
use crate::errors::RError;
use crate::errors::app_error::RError;
pub async fn fetch_html<T: IntoUrl, H: HttpClientTrait>(
client: &H,

View File

@ -2,7 +2,7 @@ use bytes::Bytes;
use reqwest::IntoUrl;
use super::{bytes::fetch_bytes, client::HttpClientTrait};
use crate::errors::RError;
use crate::errors::app_error::RError;
pub async fn fetch_image<T: IntoUrl, H: HttpClientTrait>(
client: &H,

View File

@ -2,7 +2,7 @@ use async_graphql::dynamic::Schema;
use sea_orm::DatabaseConnection;
use super::{config::GraphQLConfig, schema_root};
use crate::errors::RResult;
use crate::errors::app_error::RResult;
#[derive(Debug)]
pub struct GraphQLService {

View File

@ -8,6 +8,7 @@
let_chains,
error_generic_member_access
)]
#![feature(associated_type_defaults)]
pub mod app;
pub mod auth;

View File

@ -10,7 +10,7 @@ use tracing_subscriber::{
};
use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
use crate::errors::RResult;
use crate::errors::app_error::RResult;
// Function to initialize the logger based on the provided configuration
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];

View File

@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
use super::subscribers::{self, SEED_SUBSCRIBER};
use crate::{
app::AppContextTrait,
errors::{RError, RResult},
errors::app_error::{RError, RResult},
};
#[derive(

View File

@ -4,7 +4,7 @@ use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::O
use serde::{Deserialize, Serialize};
use super::subscription_bangumi;
use crate::{app::AppContextTrait, errors::RResult};
use crate::{app::AppContextTrait, errors::app_error::RResult};
#[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,

View File

@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use super::{bangumi, query::InsertManyReturningExt, subscription_episode};
use crate::{
app::AppContextTrait,
errors::RResult,
errors::app_error::RResult,
extract::{
mikan::{MikanEpisodeMeta, build_mikan_episode_homepage},
rawname::parse_episode_meta_from_raw_name,

View File

@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::{RError, RResult},
errors::app_error::{RError, RResult},
};
pub const SEED_SUBSCRIBER: &str = "konobangu";

View File

@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize};
use super::{bangumi, episodes, query::filter_values_in};
use crate::{
app::AppContextTrait,
errors::RResult,
errors::app_error::RResult,
extract::{
mikan::{
build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link,

View File

@ -2,7 +2,7 @@ use async_trait::async_trait;
use sea_orm::{QuerySelect, entity::prelude::*};
use serde::{Deserialize, Serialize};
use crate::{app::AppContextTrait, errors::RResult};
use crate::{app::AppContextTrait, errors::app_error::RResult};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,

View File

@ -8,7 +8,7 @@ use url::Url;
use uuid::Uuid;
use super::StorageConfig;
use crate::errors::{RError, RResult};
use crate::errors::app_error::{RError, RResult};
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]

View File

@ -7,7 +7,7 @@ use tokio::sync::{RwLock, mpsc};
use crate::{
app::AppContextTrait,
errors::{RError, RResult},
errors::app_error::{RError, RResult},
models,
};

View File

@ -6,7 +6,7 @@ use url::Url;
use crate::{
app::AppContextTrait,
errors::RResult,
errors::app_error::RResult,
extract::mikan::{MikanAuthSecrecy, MikanBangumiMeta, web_extract},
tasks::core::{StandardStreamTaskReplayLayout, StreamTaskRunnerTrait},
};

View File

@ -1,4 +1,4 @@
use crate::{errors::RResult, fetch::HttpClient};
use crate::{errors::app_error::RResult, fetch::HttpClient};
pub fn build_testing_http_client() -> RResult<HttpClient> {
let mikan_client = HttpClient::default();

View File

@ -1,7 +1,7 @@
use reqwest::IntoUrl;
use crate::{
errors::RResult,
errors::app_error::RResult,
extract::mikan::{MikanClient, MikanConfig},
fetch::HttpClientConfig,
};

View File

@ -7,7 +7,7 @@ use super::core::Controller;
use crate::{
app::AppContextTrait,
auth::{AuthUserInfo, header_www_authenticate_middleware},
errors::RResult,
errors::app_error::RResult,
};
pub const CONTROLLER_PREFIX: &str = "/api/graphql";

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use axum::{Json, Router, extract::State, routing::get};
use serde::Serialize;
use crate::{app::AppContextTrait, errors::RResult, web::controller::Controller};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::controller::Controller};
pub const CONTROLLER_PREFIX: &str = "/api/metadata";

View File

@ -16,7 +16,7 @@ use crate::{
errors::OidcRequestRedirectUriSnafu,
oidc::{OidcAuthCallbackPayload, OidcAuthCallbackQuery, OidcAuthRequest},
},
errors::RResult,
errors::app_error::RResult,
extract::http::ForwardedRelatedInfo,
models::auth::AuthType,
};

View File

@ -12,7 +12,7 @@ use http::StatusCode;
use serde::{Deserialize, Serialize};
use tower_http::catch_panic::CatchPanicLayer;
use crate::{app::AppContextTrait, errors::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CatchPanic {

View File

@ -11,7 +11,7 @@ use axum::Router;
use serde::{Deserialize, Serialize};
use tower_http::compression::CompressionLayer;
use crate::{app::AppContextTrait, errors::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Compression {

View File

@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize};
use serde_json::json;
use tower_http::cors::{self, Any};
use crate::{app::AppContextTrait, web::middleware::MiddlewareLayer, errors::RResult};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
/// CORS middleware configuration
#[derive(Debug, Clone, Deserialize, Serialize)]
@ -157,7 +157,10 @@ impl MiddlewareLayer for Cors {
}
/// Applies the CORS middleware layer to the Axum router.
fn apply(&self, app: Router<Arc<dyn AppContextTrait>>) -> RResult<Router<Arc<dyn AppContextTrait>>> {
fn apply(
&self,
app: Router<Arc<dyn AppContextTrait>>,
) -> RResult<Router<Arc<dyn AppContextTrait>>> {
Ok(app.layer(self.cors()?))
}
}

View File

@ -25,7 +25,7 @@ use futures_util::future::BoxFuture;
use serde::{Deserialize, Serialize};
use tower::{Layer, Service};
use crate::{app::AppContextTrait, errors::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Etag {

View File

@ -8,7 +8,7 @@ use axum::{
};
use serde::{Deserialize, Serialize};
use crate::errors::RError as Error;
use crate::errors::app_error::RError as Error;
#[derive(Debug, Deserialize, Serialize)]
pub struct Format(pub RespondTo);

View File

@ -15,7 +15,7 @@ use tower_http::{add_extension::AddExtensionLayer, trace::TraceLayer};
use crate::{
app::{AppContextTrait, Environment},
errors::RResult,
errors::app_error::RResult,
web::middleware::{MiddlewareLayer, request_id::LocoRequestId},
};

View File

@ -14,7 +14,7 @@ use std::sync::Arc;
use axum::Router;
use serde::{Deserialize, Serialize};
use crate::{app::AppContextTrait, errors::RResult};
use crate::{app::AppContextTrait, errors::app_error::RResult};
/// Trait representing the behavior of middleware components in the application.
/// When implementing a new middleware, make sure to go over this checklist:

View File

@ -33,7 +33,7 @@ use tracing::error;
use crate::{
app::AppContextTrait,
errors::{RError, RResult},
errors::app_error::{RError, RResult},
web::middleware::MiddlewareLayer,
};

View File

@ -11,13 +11,15 @@ use regex::Regex;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::{app::AppContextTrait, errors::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, web::middleware::MiddlewareLayer};
const X_REQUEST_ID: &str = "x-request-id";
const MAX_LEN: usize = 255;
use std::sync::{Arc, OnceLock};
use crate::errors::app_error::RResult;
static ID_CLEANUP: OnceLock<Regex> = OnceLock::new();
fn get_id_cleanup() -> &'static Regex {

View File

@ -21,7 +21,7 @@ use serde_json::{self, json};
use snafu::whatever;
use tower::{Layer, Service};
use crate::{app::AppContextTrait, errors::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
static PRESETS: OnceLock<HashMap<String, BTreeMap<String, String>>> = OnceLock::new();
fn get_presets() -> &'static HashMap<String, BTreeMap<String, String>> {

View File

@ -16,7 +16,7 @@ use serde::{Deserialize, Serialize};
use serde_json::json;
use tower_http::timeout::TimeoutLayer;
use crate::{app::AppContextTrait, errors::RResult, web::middleware::MiddlewareLayer};
use crate::{app::AppContextTrait, errors::app_error::RResult, web::middleware::MiddlewareLayer};
/// Timeout middleware configuration
#[derive(Debug, Clone, Deserialize, Serialize)]

View File

@ -1 +0,0 @@
{"rustc_fingerprint":12631718921104437280,"outputs":{"9566862992471862046":{"success":true,"status":"","code":0,"stdout":"___.exe\nlib___.rlib\n___.dll\n___.dll\n___.lib\n___.dll\nC:\\code\\scoop\\persist\\rustup\\.rustup\\toolchains\\nightly-x86_64-pc-windows-msvc\npacked\n___\ndebug_assertions\nfmt_debug=\"full\"\noverflow_checks\npanic=\"unwind\"\nproc_macro\nrelocation_model=\"pic\"\ntarget_abi=\"\"\ntarget_arch=\"x86_64\"\ntarget_endian=\"little\"\ntarget_env=\"msvc\"\ntarget_family=\"windows\"\ntarget_feature=\"cmpxchg16b\"\ntarget_feature=\"fxsr\"\ntarget_feature=\"lahfsahf\"\ntarget_feature=\"sse\"\ntarget_feature=\"sse2\"\ntarget_feature=\"sse3\"\ntarget_feature=\"x87\"\ntarget_has_atomic\ntarget_has_atomic=\"128\"\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_has_atomic_equal_alignment=\"128\"\ntarget_has_atomic_equal_alignment=\"16\"\ntarget_has_atomic_equal_alignment=\"32\"\ntarget_has_atomic_equal_alignment=\"64\"\ntarget_has_atomic_equal_alignment=\"8\"\ntarget_has_atomic_equal_alignment=\"ptr\"\ntarget_has_atomic_load_store\ntarget_has_atomic_load_store=\"128\"\ntarget_has_atomic_load_store=\"16\"\ntarget_has_atomic_load_store=\"32\"\ntarget_has_atomic_load_store=\"64\"\ntarget_has_atomic_load_store=\"8\"\ntarget_has_atomic_load_store=\"ptr\"\ntarget_os=\"windows\"\ntarget_pointer_width=\"64\"\ntarget_thread_local\ntarget_vendor=\"pc\"\nub_checks\nwindows\n","stderr":""},"5537925964935398022":{"success":true,"status":"","code":0,"stdout":"rustc 1.86.0-nightly (43ca9d18e 2025-02-08)\nbinary: rustc\ncommit-hash: 43ca9d18e333797f0aa3b525501a7cec8d61a96b\ncommit-date: 2025-02-08\nhost: x86_64-pc-windows-msvc\nrelease: 1.86.0-nightly\nLLVM version: 19.1.7\n","stderr":""}},"successes":{}}

View File

@ -1,3 +0,0 @@
Signature: 8a477f597d28d172789f06886806bc55
# This file is a cache directory tag created by cargo.
# For information about cache directory tags see https://bford.info/cachedir/

View File

@ -84,7 +84,7 @@ async function generateMockFile(filePath: string, size: number) {
await fsp.truncate(filePath, size);
}
// Generate torrent file
// Generate bittorrent file
function generateTorrent(folderPath: string, torrentPath: string) {
return new Promise<void>((resolve, reject) => {
createTorrent(
@ -113,7 +113,7 @@ function generateTorrent(folderPath: string, torrentPath: string) {
});
}
// Add torrent and seed
// Add bittorrent and seed
async function seedTorrent(torrentPath: string): Promise<Torrent> {
return new Promise((resolve) => {
const torrent = webTorrent.seed(

2
pnpm-lock.yaml generated
View File

@ -5515,7 +5515,7 @@ packages:
engines: {node: '>=6'}
terminal-link@2.1.1:
resolution: {integrity: sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==}
resolution: {integrity: sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bittorrent+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==}
engines: {node: '>=8'}
terser-webpack-plugin@5.3.13:

View File

@ -6,3 +6,4 @@ use_small_heuristics = "Default"
group_imports = "StdExternalCrate"
format_strings = true
tab_spaces = 4
reorder_imports = true