Compare commits

...

16 Commits

245 changed files with 13297 additions and 6992 deletions

View File

@ -1,5 +1,4 @@
[alias]
recorder = "run -p recorder --bin recorder_cli -- --environment development"
recorder-playground = "run -p recorder --example playground -- --environment development"
[build]

View File

@ -1,107 +0,0 @@
name: CI
on:
push:
branches:
- master
- main
pull_request:
env:
RUST_TOOLCHAIN: stable
TOOLCHAIN_PROFILE: minimal
jobs:
rustfmt:
name: Check Style
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout the code
uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with:
profile: ${{ env.TOOLCHAIN_PROFILE }}
toolchain: ${{ env.RUST_TOOLCHAIN }}
override: true
components: rustfmt
- name: Run cargo fmt
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
clippy:
name: Run Clippy
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout the code
uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with:
profile: ${{ env.TOOLCHAIN_PROFILE }}
toolchain: ${{ env.RUST_TOOLCHAIN }}
override: true
- name: Setup Rust cache
uses: Swatinem/rust-cache@v2
- name: Run cargo clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: --all-features -- -D warnings -W clippy::pedantic -W clippy::nursery -W rust-2018-idioms
test:
name: Run Tests
runs-on: ubuntu-latest
permissions:
contents: read
services:
redis:
image: redis
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- "6379:6379"
postgres:
image: postgres
env:
POSTGRES_DB: postgress_test
POSTGRES_USER: postgress
POSTGRES_PASSWORD: postgress
ports:
- "5432:5432"
# Set health checks to wait until postgres has started
options: --health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout the code
uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with:
profile: ${{ env.TOOLCHAIN_PROFILE }}
toolchain: ${{ env.RUST_TOOLCHAIN }}
override: true
- name: Setup Rust cache
uses: Swatinem/rust-cache@v2
- name: Run cargo test
uses: actions-rs/cargo@v1
with:
command: test
args: --all-features --all
env:
REDIS_URL: redis://localhost:${{job.services.redis.ports[6379]}}
DATABASE_URL: postgres://postgress:postgress@localhost:5432/postgress_test

View File

@ -0,0 +1,36 @@
name: Testing Torrents Container
on:
workflow_dispatch:
env:
REGISTRY: ghcr.io
ORG: dumtruck
PROJECT: konobangu
jobs:
build-container:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GHCR
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: 'packages/testing-torrents'
file: 'packages/testing-torrents/Dockerfile'
push: true
tags: '${{ env.REGISTRY }}/${{ env.ORG }}/${{ env.PROJECT }}-testing-torrents:latest'
cache-from: type=gha
cache-to: type=gha,mode=max

6
.gitignore vendored
View File

@ -158,11 +158,8 @@ web_modules/
.yarn-integrity
# Local env files
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
.env.*.local
# parcel-bundler cache (https://parceljs.org/)
.cache
@ -219,6 +216,7 @@ index.d.ts.map
# Added by cargo
/target
/ide-target
!/examples/.gitkeep
/.env
/.env.bk

View File

@ -27,7 +27,6 @@
},
"emmet.showExpandedAbbreviation": "never",
"prettier.enable": false,
"tailwindCSS.experimental.configFile": "./packages/tailwind-config/config.ts",
"typescript.tsdk": "node_modules/typescript/lib",
"rust-analyzer.cargo.features": ["testcontainers"]
}

1508
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -5,8 +5,6 @@
}
```
^https://konobangu.com/api/playground*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5002/api/playground$1
^wss://konobangu.com/api/playground*** reqHeaders://{x-forwarded.json} ws://127.0.0.1:5002/api/playground$1
^https://konobangu.com/api*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/api$1 excludeFilter://^^https://konobangu.com/api/playground***
^https://konobangu.com*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000$1 excludeFilter://^https://konobangu.com/api***
^https://konobangu.com/api*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/api$1
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000/$1 excludeFilter://^https://konobangu.com/api***
^wss://konobangu.com/*** reqHeaders://{x-forwarded.json} ws://127.0.0.1:5000/$1 excludeFilter://^wss://konobangu.com/api

8
apps/recorder/.env Normal file
View File

@ -0,0 +1,8 @@
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_API_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"

View File

@ -25,3 +25,4 @@ Cargo.lock
# Dist
node_modules
dist/
temp/

View File

@ -22,6 +22,7 @@ testcontainers = [
]
[dependencies]
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
@ -48,7 +49,6 @@ reqwest = { version = "0.12", default-features = false, features = [
"rustls-tls",
"cookies",
] }
thiserror = "2"
rss = "2"
bytes = "1.9"
itertools = "0.14"
@ -65,7 +65,7 @@ once_cell = "1.20.2"
reqwest-middleware = "0.4.0"
reqwest-retry = "0.7.0"
reqwest-tracing = "0.5.5"
scraper = "0.22.0"
scraper = "0.23"
leaky-bucket = "1.1.2"
serde_with = "3"
jwt-authorizer = "0.15.0"
@ -83,12 +83,10 @@ testcontainers = { version = "0.23.3", features = [
"reusable-containers",
], optional = true }
testcontainers-modules = { version = "0.11.4", optional = true }
color-eyre = "0.6"
log = "0.4.22"
anyhow = "1.0.95"
bollard = { version = "0.18", optional = true }
async-graphql = { version = "7.0.15", features = [] }
async-graphql-axum = "7.0.15"
async-graphql = { version = "7", features = [] }
async-graphql-axum = "7"
fastrand = "2.3.0"
seaography = { version = "1.1" }
quirks_path = "0.1.1"
@ -105,7 +103,6 @@ tower-http = { version = "0.6", features = [
"set-header",
"compression-full",
] }
serde_yaml = "0.9.34"
tera = "1.20.0"
openidconnect = { version = "4", features = ["rustls-tls"] }
http-cache-reqwest = { version = "0.15", features = [
@ -120,8 +117,6 @@ http-cache = { version = "0.20.0", features = [
], default-features = false }
http-cache-semantics = "2.1.0"
dotenv = "0.15.0"
nom = "8.0.0"
secrecy = { version = "0.10.3", features = ["serde"] }
http = "1.2.0"
cookie = "0.18.1"
async-stream = "0.3.6"
@ -130,9 +125,17 @@ tracing-appender = "0.2.3"
clap = "4.5.31"
futures-util = "0.3.31"
ipnetwork = "0.21.1"
librqbit = "8.0.0"
typed-builder = "0.21.0"
snafu = { version = "0.8.5", features = ["futures"] }
anyhow = "1.0.97"
serde_yaml = "0.9.34"
merge-struct = "0.1.0"
serde-value = "0.7.0"
[dev-dependencies]
serial_test = "3"
insta = { version = "1", features = ["redactions", "yaml", "filters"] }
mockito = "1.6.1"
rstest = "0.24.0"
rstest = "0.25"
ctor = "0.4.0"

View File

@ -1,14 +1,7 @@
use recorder::errors::app_error::RResult;
// #![allow(unused_imports)]
// use color_eyre::eyre::Context;
// use itertools::Itertools;
// use loco_rs::{
// app::Hooks,
// boot::{BootResult, StartMode},
// environment::Environment,
// prelude::AppContext as LocoContext,
// };
// use recorder::{
// app::{App1, AppContext},
// app::{AppContext, AppContextTrait},
// errors::RResult,
// migrations::Migrator,
// models::{
@ -16,10 +9,10 @@
// subscriptions::{self, SubscriptionCreateFromRssDto},
// },
// };
// use sea_orm::ColumnTrait;
// use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
// use sea_orm_migration::MigratorTrait;
// async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> RResult<()> {
// async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RResult<()> {
// let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
// // let rss_link =
@ -27,7 +20,7 @@
// let subscription = if let Some(subscription) =
// subscriptions::Entity::find()
// .filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
// .one(&ctx.db)
// .one(ctx.db())
// .await?
// {
// subscription
@ -50,19 +43,14 @@
// Ok(())
// }
// async fn init() -> RResult<LocoContext> {
// let ctx = loco_rs::cli::playground::<App1>().await?;
// let BootResult {
// app_context: ctx, ..
// } = loco_rs::boot::run_app::<App1>(&StartMode::ServerOnly, ctx).await?;
// Migrator::up(&ctx.db, None).await?;
// Ok(ctx)
// }
// #[tokio::main]
// async fn main() -> color_eyre::eyre::Result<()> {
// async fn main() -> RResult<()> {
// pull_mikan_bangumi_rss(&ctx).await?;
// Ok(())
// }
fn main() {}
#[tokio::main]
async fn main() -> RResult<()> {
Ok(())
}

View File

@ -1,24 +0,0 @@
{
"name": "recorder",
"version": "1.0.0",
"type": "module",
"scripts": {
"dev": "rsbuild dev",
"build": "rsbuild build",
"preview": "rsbuild preview"
},
"dependencies": {
"@graphiql/react": "^0.28.2",
"@graphiql/toolkit": "^0.11.1",
"graphiql": "^3.8.3",
"graphql-ws": "^6.0.4",
"observable-hooks": "^4.2.4",
"react": "^19.0.0",
"react-dom": "^19.0.0"
},
"devDependencies": {
"@rsbuild/plugin-react": "^1.1.1",
"@types/react": "^19.0.7",
"@types/react-dom": "^19.0.3"
}
}

View File

@ -1,5 +0,0 @@
export default {
plugins: {
'@tailwindcss/postcss': {},
},
};

View File

@ -1,7 +0,0 @@
<html>
<body>
not found :-(
</body>
</html>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

View File

@ -1,75 +0,0 @@
import { defineConfig } from '@rsbuild/core';
import { pluginReact } from '@rsbuild/plugin-react';
import { TanStackRouterRspack } from '@tanstack/router-plugin/rspack';
export default defineConfig({
plugins: [pluginReact()],
html: {
favicon: './public/assets/favicon.ico',
// tags: [
// {
// tag: 'script',
// attrs: { src: 'https://cdn.tailwindcss.com' },
// },
// ],
},
tools: {
rspack: {
plugins: [TanStackRouterRspack()],
},
},
source: {
entry: {
index: './src/main.tsx',
},
define: {
'process.env.AUTH_TYPE': JSON.stringify(process.env.AUTH_TYPE),
'process.env.OIDC_CLIENT_ID': JSON.stringify(process.env.OIDC_CLIENT_ID),
'process.env.OIDC_CLIENT_SECRET': JSON.stringify(
process.env.OIDC_CLIENT_SECRET
),
'process.env.OIDC_ISSUER': JSON.stringify(process.env.OIDC_ISSUER),
'process.env.OIDC_AUDIENCE': JSON.stringify(process.env.OIDC_AUDIENCE),
'process.env.OIDC_EXTRA_SCOPES': JSON.stringify(
process.env.OIDC_EXTRA_SCOPES
),
},
},
dev: {
client: {
path: '/api/playground/rsbuild-hmr',
},
setupMiddlewares: [
(middlewares) => {
middlewares.unshift((req, res, next) => {
if (process.env.AUTH_TYPE === 'basic') {
res.setHeader('WWW-Authenticate', 'Basic realm="konobangu"');
const authorization =
(req.headers.authorization || '').split(' ')[1] || '';
const [user, password] = Buffer.from(authorization, 'base64')
.toString()
.split(':');
if (
user !== process.env.BASIC_USER ||
password !== process.env.BASIC_PASSWORD
) {
res.statusCode = 401;
res.write('Unauthorized');
res.end();
return;
}
}
next();
});
return middlewares;
},
],
},
server: {
base: '/api/playground/',
host: '0.0.0.0',
port: 5002,
},
});

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use clap::{Parser, command};
use super::{AppContext, core::App, env::Environment};
use crate::{app::config::AppConfig, errors::RResult};
use crate::{app::config::AppConfig, errors::app_error::RResult};
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
@ -29,7 +29,7 @@ pub struct AppBuilder {
dotenv_file: Option<String>,
config_file: Option<String>,
working_dir: String,
enviornment: Environment,
environment: Environment,
}
impl AppBuilder {
@ -70,21 +70,21 @@ impl AppBuilder {
pub async fn build(self) -> RResult<App> {
AppConfig::load_dotenv(
&self.enviornment,
&self.environment,
&self.working_dir,
self.dotenv_file.as_deref(),
)
.await?;
let config = AppConfig::load_config(
&self.enviornment,
&self.environment,
&self.working_dir,
self.config_file.as_deref(),
)
.await?;
let app_context = Arc::new(
AppContext::new(self.enviornment.clone(), config, self.working_dir.clone()).await?,
AppContext::new(self.environment.clone(), config, self.working_dir.clone()).await?,
);
Ok(App {
@ -101,7 +101,7 @@ impl AppBuilder {
pub fn environment(self, environment: Environment) -> Self {
let mut ret = self;
ret.enviornment = environment;
ret.environment = environment;
ret
}
@ -130,7 +130,7 @@ impl AppBuilder {
impl Default for AppBuilder {
fn default() -> Self {
Self {
enviornment: Environment::Production,
environment: Environment::Production,
dotenv_file: None,
config_file: None,
working_dir: String::from("."),

View File

@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
use super::env::Environment;
use crate::{
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::RResult,
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::app_error::RResult,
extract::mikan::MikanConfig, graphql::GraphQLConfig, logger::LoggerConfig,
storage::StorageConfig, web::WebServerConfig,
};

View File

@ -1,21 +1,34 @@
use super::{Environment, config::AppConfig};
use crate::{
auth::AuthService, cache::CacheService, database::DatabaseService, errors::RResult,
auth::AuthService, cache::CacheService, database::DatabaseService, errors::app_error::RResult,
extract::mikan::MikanClient, graphql::GraphQLService, logger::LoggerService,
storage::StorageService,
};
pub trait AppContextTrait: Send + Sync {
fn logger(&self) -> &LoggerService;
fn db(&self) -> &DatabaseService;
fn config(&self) -> &AppConfig;
fn cache(&self) -> &CacheService;
fn mikan(&self) -> &MikanClient;
fn auth(&self) -> &AuthService;
fn graphql(&self) -> &GraphQLService;
fn storage(&self) -> &StorageService;
fn working_dir(&self) -> &String;
fn environment(&self) -> &Environment;
}
pub struct AppContext {
pub logger: LoggerService,
pub db: DatabaseService,
pub config: AppConfig,
pub cache: CacheService,
pub mikan: MikanClient,
pub auth: AuthService,
pub graphql: GraphQLService,
pub storage: StorageService,
pub working_dir: String,
pub environment: Environment,
logger: LoggerService,
db: DatabaseService,
config: AppConfig,
cache: CacheService,
mikan: MikanClient,
auth: AuthService,
graphql: GraphQLService,
storage: StorageService,
working_dir: String,
environment: Environment,
}
impl AppContext {
@ -48,3 +61,35 @@ impl AppContext {
})
}
}
impl AppContextTrait for AppContext {
fn logger(&self) -> &LoggerService {
&self.logger
}
fn db(&self) -> &DatabaseService {
&self.db
}
fn config(&self) -> &AppConfig {
&self.config
}
fn cache(&self) -> &CacheService {
&self.cache
}
fn mikan(&self) -> &MikanClient {
&self.mikan
}
fn auth(&self) -> &AuthService {
&self.auth
}
fn graphql(&self) -> &GraphQLService {
&self.graphql
}
fn storage(&self) -> &StorageService {
&self.storage
}
fn working_dir(&self) -> &String {
&self.working_dir
}
fn environment(&self) -> &Environment {
&self.environment
}
}

View File

@ -4,9 +4,9 @@ use axum::Router;
use futures::try_join;
use tokio::signal;
use super::{builder::AppBuilder, context::AppContext};
use super::{builder::AppBuilder, context::AppContextTrait};
use crate::{
errors::RResult,
errors::app_error::RResult,
web::{
controller::{self, core::ControllerTrait},
middleware::default_middleware_stack,
@ -14,7 +14,7 @@ use crate::{
};
pub struct App {
pub context: Arc<AppContext>,
pub context: Arc<dyn AppContextTrait>,
pub builder: AppBuilder,
}
@ -25,21 +25,22 @@ impl App {
pub async fn serve(&self) -> RResult<()> {
let context = &self.context;
let config = &context.config;
let config = context.config();
let listener = tokio::net::TcpListener::bind(&format!(
"{}:{}",
config.server.binding, config.server.port
))
.await?;
let mut router = Router::<Arc<AppContext>>::new();
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
let (graphqlc, oidcc) = try_join!(
let (graphql_c, oidc_c, metadata_c) = try_join!(
controller::graphql::create(context.clone()),
controller::oidc::create(context.clone()),
controller::metadata::create(context.clone())
)?;
for c in [graphqlc, oidcc] {
for c in [graphql_c, oidc_c, metadata_c] {
router = c.apply_to(router);
}

View File

@ -8,5 +8,5 @@ pub use core::App;
pub use builder::AppBuilder;
pub use config::AppConfig;
pub use context::AppContext;
pub use context::{AppContext, AppContextTrait};
pub use env::Environment;

View File

@ -9,7 +9,7 @@ use super::{
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{
app::AppContext,
app::AppContextTrait,
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
};
@ -64,7 +64,7 @@ pub struct BasicAuthService {
impl AuthServiceTrait for BasicAuthService {
async fn extract_user_info(
&self,
ctx: &AppContext,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
if let Ok(AuthBasic {

View File

@ -1,5 +1,3 @@
use std::fmt;
use async_graphql::dynamic::ResolverContext;
use axum::{
Json,
@ -11,72 +9,86 @@ use openidconnect::{
StandardErrorResponse, core::CoreErrorResponseType,
};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use snafu::prelude::*;
use crate::{fetch::HttpClientError, models::auth::AuthType};
#[derive(Debug, Error)]
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum AuthError {
#[error("Not support auth method")]
#[snafu(display("Not support auth method"))]
NotSupportAuthMethod {
supported: Vec<AuthType>,
current: AuthType,
},
#[error("Failed to find auth record")]
#[snafu(display("Failed to find auth record"))]
FindAuthRecordError,
#[error("Invalid credentials")]
#[snafu(display("Invalid credentials"))]
BasicInvalidCredentials,
#[error(transparent)]
OidcInitError(#[from] jwt_authorizer::error::InitError),
#[error("Invalid oidc provider meta client error: {0}")]
OidcProviderHttpClientError(HttpClientError),
#[error(transparent)]
OidcProviderMetaError(#[from] openidconnect::DiscoveryError<HttpClientError>),
#[error("Invalid oidc provider URL: {0}")]
OidcProviderUrlError(url::ParseError),
#[error("Invalid oidc redirect URI: {0}")]
OidcRequestRedirectUriError(url::ParseError),
#[error("Oidc request session not found or expired")]
#[snafu(transparent)]
OidcInitError {
source: jwt_authorizer::error::InitError,
},
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
OidcProviderHttpClientError { source: HttpClientError },
#[snafu(transparent)]
OidcProviderMetaError {
source: openidconnect::DiscoveryError<HttpClientError>,
},
#[snafu(display("Invalid oidc provider URL: {source}"))]
OidcProviderUrlError { source: url::ParseError },
#[snafu(display("Invalid oidc redirect URI: {source}"))]
OidcRequestRedirectUriError {
#[snafu(source)]
source: url::ParseError,
},
#[snafu(display("Oidc request session not found or expired"))]
OidcCallbackRecordNotFoundOrExpiredError,
#[error("Invalid oidc request callback nonce")]
#[snafu(display("Invalid oidc request callback nonce"))]
OidcInvalidNonceError,
#[error("Invalid oidc request callback state")]
#[snafu(display("Invalid oidc request callback state"))]
OidcInvalidStateError,
#[error("Invalid oidc request callback code")]
#[snafu(display("Invalid oidc request callback code"))]
OidcInvalidCodeError,
#[error(transparent)]
OidcCallbackTokenConfigrationError(#[from] ConfigurationError),
#[error(transparent)]
OidcRequestTokenError(
#[from] RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>,
),
#[error("Invalid oidc id token")]
#[snafu(transparent)]
OidcCallbackTokenConfigurationError { source: ConfigurationError },
#[snafu(transparent)]
OidcRequestTokenError {
source: RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>,
},
#[snafu(display("Invalid oidc id token"))]
OidcInvalidIdTokenError,
#[error("Invalid oidc access token")]
#[snafu(display("Invalid oidc access token"))]
OidcInvalidAccessTokenError,
#[error(transparent)]
OidcSignatureVerificationError(#[from] SignatureVerificationError),
#[error(transparent)]
OidcSigningError(#[from] SigningError),
#[error(transparent)]
OidcJwtAuthError(#[from] jwt_authorizer::AuthError),
#[error("Extra scopes {expected} do not match found scopes {found}")]
#[snafu(transparent)]
OidcSignatureVerificationError { source: SignatureVerificationError },
#[snafu(transparent)]
OidcSigningError { source: SigningError },
#[snafu(transparent)]
OidcJwtAuthError { source: jwt_authorizer::AuthError },
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
OidcExtraScopesMatchError { expected: String, found: String },
#[error("Extra claim {key} does not match expected value {expected}, found {found}")]
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
OidcExtraClaimMatchError {
key: String,
expected: String,
found: String,
},
#[error("Extra claim {0} missing")]
OidcExtraClaimMissingError(String),
#[error("Audience {0} missing")]
OidcAudMissingError(String),
#[error("Subject missing")]
#[snafu(display("Extra claim {claim} missing"))]
OidcExtraClaimMissingError { claim: String },
#[snafu(display("Audience {aud} missing"))]
OidcAudMissingError { aud: String },
#[snafu(display("Subject missing"))]
OidcSubMissingError,
#[error(fmt = display_graphql_permission_error)]
#[snafu(display(
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
(if field.is_empty() { "" } else { "." }),
(if column.is_empty() { "" } else { "." }),
source.message
))]
GraphQLPermissionError {
inner_error: async_graphql::Error,
#[snafu(source(false))]
source: Box<async_graphql::Error>,
field: String,
column: String,
context_path: String,
@ -85,13 +97,13 @@ pub enum AuthError {
impl AuthError {
pub fn from_graphql_subscribe_id_guard(
inner_error: async_graphql::Error,
source: async_graphql::Error,
context: &ResolverContext,
field_name: &str,
column_name: &str,
) -> AuthError {
AuthError::GraphQLPermissionError {
inner_error,
source: Box::new(source),
field: field_name.to_string(),
column: column_name.to_string(),
context_path: context
@ -103,39 +115,27 @@ impl AuthError {
}
}
fn display_graphql_permission_error(
inner_error: &async_graphql::Error,
field: &String,
column: &String,
context_path: &String,
formatter: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(
formatter,
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
(if field.is_empty() { "" } else { "." }),
(if column.is_empty() { "" } else { "." }),
inner_error.message
)
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AuthErrorBody {
pub error_code: i32,
pub error_msg: String,
pub struct AuthErrorResponse {
pub success: bool,
pub message: String,
}
impl From<AuthError> for AuthErrorBody {
impl From<AuthError> for AuthErrorResponse {
fn from(value: AuthError) -> Self {
AuthErrorBody {
error_code: StatusCode::UNAUTHORIZED.as_u16() as i32,
error_msg: value.to_string(),
AuthErrorResponse {
success: false,
message: value.to_string(),
}
}
}
impl IntoResponse for AuthError {
fn into_response(self) -> Response {
(StatusCode::UNAUTHORIZED, Json(AuthErrorBody::from(self))).into_response()
(
StatusCode::UNAUTHORIZED,
Json(AuthErrorResponse::from(self)),
)
.into_response()
}
}

View File

@ -1,41 +0,0 @@
import type { Observable } from '@graphiql/toolkit';
import { InjectionToken, inject } from '@outposts/injection-js';
import {
type AuthFeature,
EventTypes,
PublicEventsService,
} from 'oidc-client-rx';
import { filter, shareReplay } from 'rxjs';
export type CheckAuthResultEventType =
| { type: EventTypes.CheckingAuthFinished }
| {
type: EventTypes.CheckingAuthFinishedWithError;
value: string;
};
export const CHECK_AUTH_RESULT_EVENT = new InjectionToken<
Observable<CheckAuthResultEventType>
>('CHECK_AUTH_RESULT_EVENT');
export function withCheckAuthResultEvent(): AuthFeature {
return {
ɵproviders: [
{
provide: CHECK_AUTH_RESULT_EVENT,
useFactory: () => {
const publishEventService = inject(PublicEventsService);
return publishEventService.registerForEvents().pipe(
filter(
(e) =>
e.type === EventTypes.CheckingAuthFinishedWithError ||
e.type === EventTypes.CheckingAuthFinished
),
shareReplay(1)
);
},
deps: [PublicEventsService],
},
],
};
}

View File

@ -1,52 +0,0 @@
import { useObservableEagerState, useObservableState } from 'observable-hooks';
import {
InjectorContextVoidInjector,
useOidcClient,
} from 'oidc-client-rx/adapters/react';
import { useMemo } from 'react';
import { NEVER, type Observable, of } from 'rxjs';
import { isBasicAuth } from './config';
import {
CHECK_AUTH_RESULT_EVENT,
type CheckAuthResultEventType,
} from './event';
const BASIC_AUTH_IS_AUTHENTICATED$ = of({
isAuthenticated: true,
allConfigsAuthenticated: [],
});
const BASIC_AUTH_USER_DATA$ = of({
userData: {},
allUserData: [],
});
export function useAuth() {
const { oidcSecurityService, injector } = isBasicAuth
? { oidcSecurityService: undefined, injector: InjectorContextVoidInjector }
: // biome-ignore lint/correctness/useHookAtTopLevel: <explanation>
useOidcClient();
const { isAuthenticated } = useObservableEagerState(
oidcSecurityService?.isAuthenticated$ ?? BASIC_AUTH_IS_AUTHENTICATED$
);
const { userData } = useObservableEagerState(
oidcSecurityService?.userData$ ?? BASIC_AUTH_USER_DATA$
);
const checkAuthResultEvent = useObservableState(
useMemo(
() => (isBasicAuth ? NEVER : injector.get(CHECK_AUTH_RESULT_EVENT)),
[injector]
) as Observable<CheckAuthResultEventType>
);
return {
oidcSecurityService,
isAuthenticated,
userData,
injector,
checkAuthResultEvent,
};
}

View File

@ -7,18 +7,21 @@ use axum::{
response::{IntoResponse, Response},
};
use crate::{app::AppContext, auth::AuthServiceTrait};
use crate::{app::AppContextTrait, auth::AuthServiceTrait};
pub async fn header_www_authenticate_middleware(
State(ctx): State<Arc<AppContext>>,
State(ctx): State<Arc<dyn AppContextTrait>>,
request: Request,
next: Next,
) -> Response {
let auth_service = &ctx.auth;
let auth_service = ctx.auth();
let (mut parts, body) = request.into_parts();
let mut response = match auth_service.extract_user_info(&ctx, &mut parts).await {
let mut response = match auth_service
.extract_user_info(ctx.as_ref() as &dyn AppContextTrait, &mut parts)
.await
{
Ok(auth_user_info) => {
let mut request = Request::from_parts(parts, body);
request.extensions_mut().insert(auth_user_info);

View File

@ -16,14 +16,17 @@ use openidconnect::{
use sea_orm::DbErr;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use snafu::ResultExt;
use url::Url;
use super::{
config::OidcAuthConfig,
errors::AuthError,
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{app::AppContext, errors::RError, fetch::HttpClient, models::auth::AuthType};
use crate::{
app::AppContextTrait, errors::app_error::RError, fetch::HttpClient, models::auth::AuthType,
};
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OidcAuthClaims {
@ -125,13 +128,13 @@ impl OidcAuthService {
redirect_uri: &str,
) -> Result<OidcAuthRequest, AuthError> {
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).map_err(AuthError::OidcProviderUrlError)?,
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&self.oidc_provider_client,
)
.await?;
let redirect_uri = RedirectUrl::new(redirect_uri.to_string())
.map_err(AuthError::OidcRequestRedirectUriError)?;
let redirect_uri =
RedirectUrl::new(redirect_uri.to_string()).context(OidcRequestRedirectUriSnafu)?;
let oidc_client = CoreClient::from_provider_metadata(
provider_metadata,
@ -207,7 +210,7 @@ impl OidcAuthService {
let request_cache = self.load_authorization_request(&csrf_token).await?;
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).map_err(AuthError::OidcProviderUrlError)?,
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&self.oidc_provider_client,
)
.await?;
@ -261,13 +264,14 @@ impl OidcAuthService {
impl AuthServiceTrait for OidcAuthService {
async fn extract_user_info(
&self,
ctx: &AppContext,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
let config = &self.config;
let token = self.api_authorizer.extract_token(&request.headers).ok_or(
AuthError::OidcJwtAuthError(jwt_authorizer::AuthError::MissingToken()),
)?;
let token = self
.api_authorizer
.extract_token(&request.headers)
.ok_or(jwt_authorizer::AuthError::MissingToken())?;
let token_data = self.api_authorizer.check_auth(&token).await?;
let claims = token_data.claims;
@ -277,7 +281,9 @@ impl AuthServiceTrait for OidcAuthService {
return Err(AuthError::OidcSubMissingError);
};
if !claims.contains_audience(&config.audience) {
return Err(AuthError::OidcAudMissingError(config.audience.clone()));
return Err(AuthError::OidcAudMissingError {
aud: config.audience.clone(),
});
}
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
let found_scopes = claims.scopes().collect::<HashSet<_>>();
@ -293,7 +299,7 @@ impl AuthServiceTrait for OidcAuthService {
}
if let Some(key) = config.extra_claim_key.as_ref() {
if !claims.has_claim(key) {
return Err(AuthError::OidcExtraClaimMissingError(key.clone()));
return Err(AuthError::OidcExtraClaimMissingError { claim: key.clone() });
}
if let Some(value) = config.extra_claim_value.as_ref() {
if claims.get_claim(key).is_none_or(|v| &v != value) {
@ -306,9 +312,9 @@ impl AuthServiceTrait for OidcAuthService {
}
}
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(RError::DbError(DbErr::RecordNotFound(..))) => {
crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await
}
Err(RError::DbError {
source: DbErr::RecordNotFound(..),
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await,
r => r,
}
.map_err(|_| AuthError::FindAuthRecordError)?;

View File

@ -1,4 +1,4 @@
use std::time::Duration;
use std::{sync::Arc, time::Duration};
use async_trait::async_trait;
use axum::{
@ -9,15 +9,16 @@ use axum::{
use jwt_authorizer::{JwtAuthorizer, Validation};
use moka::future::Cache;
use reqwest::header::HeaderValue;
use snafu::prelude::*;
use super::{
AuthConfig,
basic::BasicAuthService,
errors::AuthError,
errors::{AuthError, OidcProviderHttpClientSnafu},
oidc::{OidcAuthClaims, OidcAuthService},
};
use crate::{
app::AppContext,
app::AppContextTrait,
fetch::{
HttpClient, HttpClientConfig,
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
@ -31,17 +32,17 @@ pub struct AuthUserInfo {
pub auth_type: AuthType,
}
impl FromRequestParts<AppContext> for AuthUserInfo {
impl FromRequestParts<Arc<dyn AppContextTrait>> for AuthUserInfo {
type Rejection = Response;
async fn from_request_parts(
parts: &mut Parts,
state: &AppContext,
state: &Arc<dyn AppContextTrait>,
) -> Result<Self, Self::Rejection> {
let auth_service = &state.auth;
let auth_service = state.auth();
auth_service
.extract_user_info(state, parts)
.extract_user_info(state.as_ref(), parts)
.await
.map_err(|err| err.into_response())
}
@ -51,7 +52,7 @@ impl FromRequestParts<AppContext> for AuthUserInfo {
pub trait AuthServiceTrait {
async fn extract_user_info(
&self,
ctx: &AppContext,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError>;
fn www_authenticate_header_value(&self) -> Option<HeaderValue>;
@ -59,14 +60,14 @@ pub trait AuthServiceTrait {
}
pub enum AuthService {
Basic(BasicAuthService),
Oidc(OidcAuthService),
Basic(Box<BasicAuthService>),
Oidc(Box<OidcAuthService>),
}
impl AuthService {
pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> {
let result = match config {
AuthConfig::Basic(config) => AuthService::Basic(BasicAuthService { config }),
AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
AuthConfig::Oidc(config) => {
let validation = Validation::new()
.iss(&[&config.issuer])
@ -78,14 +79,14 @@ impl AuthService {
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
..Default::default()
})
.map_err(AuthError::OidcProviderHttpClientError)?;
.context(OidcProviderHttpClientSnafu)?;
let api_authorizer = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer)
.validation(validation)
.build()
.await?;
AuthService::Oidc(OidcAuthService {
AuthService::Oidc(Box::new(OidcAuthService {
config,
api_authorizer,
oidc_provider_client,
@ -93,7 +94,7 @@ impl AuthService {
.time_to_live(Duration::from_mins(5))
.name("oidc_request_cache")
.build(),
})
}))
}
};
Ok(result)
@ -104,7 +105,7 @@ impl AuthService {
impl AuthServiceTrait for AuthService {
async fn extract_user_info(
&self,
ctx: &AppContext,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
match self {

View File

@ -1,10 +1,7 @@
use color_eyre::{self, eyre};
use recorder::app::AppBuilder;
use recorder::{app::AppBuilder, errors::app_error::RResult};
#[tokio::main]
async fn main() -> eyre::Result<()> {
color_eyre::install()?;
async fn main() -> RResult<()> {
let builder = AppBuilder::from_main_cli(None).await?;
let app = builder.build().await?;

View File

@ -1,5 +1,5 @@
use super::CacheConfig;
use crate::errors::RResult;
use crate::errors::app_error::RResult;
pub struct CacheService {}

View File

@ -7,7 +7,7 @@ use sea_orm::{
use sea_orm_migration::MigratorTrait;
use super::DatabaseConfig;
use crate::{errors::RResult, migrations::Migrator};
use crate::{errors::app_error::RResult, migrations::Migrator};
pub struct DatabaseService {
connection: DatabaseConnection,

View File

@ -0,0 +1,74 @@
use async_trait::async_trait;
use crate::downloader::{
DownloaderError,
bittorrent::task::{
TorrentCreationTrait, TorrentHashTrait, TorrentStateTrait, TorrentTaskTrait,
},
core::{DownloadIdSelectorTrait, DownloadSelectorTrait, DownloadTaskTrait, DownloaderTrait},
};
#[async_trait]
pub trait TorrentDownloaderTrait: DownloaderTrait
where
Self::State: TorrentStateTrait,
Self::Id: TorrentHashTrait,
Self::Task: TorrentTaskTrait<State = Self::State, Id = Self::Id>,
Self::Creation: TorrentCreationTrait<Task = Self::Task>,
Self::Selector: DownloadSelectorTrait<Task = Self::Task, Id = Self::Id>,
{
type IdSelector: DownloadIdSelectorTrait<Task = Self::Task, Id = Self::Id>;
async fn pause_downloads(
&self,
selector: Self::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
self.pause_torrents(hashes).await
}
async fn resume_downloads(
&self,
selector: Self::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
self.resume_torrents(hashes).await
}
async fn remove_downloads(
&self,
selector: Self::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
self.remove_torrents(hashes).await
}
async fn query_torrent_hashes(
&self,
selector: Self::Selector,
) -> Result<Self::IdSelector, DownloaderError> {
let hashes = match selector.try_into_ids_only() {
Ok(hashes) => Self::IdSelector::from_iter(hashes),
Err(selector) => {
let tasks = self.query_downloads(selector).await?;
Self::IdSelector::from_iter(tasks.into_iter().map(|s| s.into_id()))
}
};
Ok(hashes)
}
async fn pause_torrents(
&self,
hashes: Self::IdSelector,
) -> Result<Self::IdSelector, DownloaderError>;
async fn resume_torrents(
&self,
hashes: Self::IdSelector,
) -> Result<Self::IdSelector, DownloaderError>;
async fn remove_torrents(
&self,
hashes: Self::IdSelector,
) -> Result<Self::IdSelector, DownloaderError>;
}

View File

@ -0,0 +1,3 @@
pub mod downloader;
pub mod source;
pub mod task;

View File

@ -0,0 +1,228 @@
use std::{
borrow::Cow,
fmt::{Debug, Formatter},
};
use bytes::Bytes;
use librqbit_core::{magnet::Magnet, torrent_metainfo, torrent_metainfo::TorrentMetaV1Owned};
use snafu::ResultExt;
use url::Url;
use crate::{
downloader::errors::{
DownloadFetchSnafu, DownloaderError, MagnetFormatSnafu, TorrentMetaSnafu,
},
errors::RAnyhowResultExt,
extract::bittorrent::core::MAGNET_SCHEMA,
fetch::{bytes::fetch_bytes, client::core::HttpClientTrait},
};
pub trait HashTorrentSourceTrait: Sized {
fn hash_info(&self) -> Cow<'_, str>;
}
pub struct MagnetUrlSource {
pub magnet: Magnet,
pub url: String,
}
impl MagnetUrlSource {
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
let magnet = Magnet::parse(&url)
.to_dyn_boxed()
.context(MagnetFormatSnafu {
message: url.clone(),
})?;
Ok(Self { magnet, url })
}
}
impl HashTorrentSourceTrait for MagnetUrlSource {
fn hash_info(&self) -> Cow<'_, str> {
let hash_info = self
.magnet
.as_id32()
.map(|s| s.as_string())
.or_else(|| self.magnet.as_id20().map(|s| s.as_string()))
.unwrap_or_else(|| unreachable!("hash of magnet must existed"));
hash_info.into()
}
}
impl Debug for MagnetUrlSource {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MagnetUrlSource")
.field("url", &self.url)
.finish()
}
}
impl Clone for MagnetUrlSource {
fn clone(&self) -> Self {
Self {
magnet: Magnet::parse(&self.url).unwrap(),
url: self.url.clone(),
}
}
}
impl PartialEq for MagnetUrlSource {
fn eq(&self, other: &Self) -> bool {
self.url == other.url
}
}
impl Eq for MagnetUrlSource {}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TorrentUrlSource {
pub url: String,
}
impl TorrentUrlSource {
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
Ok(Self { url })
}
}
#[derive(Clone)]
pub struct TorrentFileSource {
pub url: Option<String>,
pub payload: Bytes,
pub meta: TorrentMetaV1Owned,
pub filename: String,
}
impl TorrentFileSource {
pub fn from_bytes(
filename: String,
bytes: Bytes,
url: Option<String>,
) -> Result<Self, DownloaderError> {
let meta = torrent_metainfo::torrent_from_bytes(bytes.as_ref())
.to_dyn_boxed()
.with_context(|_| TorrentMetaSnafu {
message: format!(
"filename = {}, url = {}",
filename,
url.as_deref().unwrap_or_default()
),
})?
.to_owned();
Ok(TorrentFileSource {
url,
payload: bytes,
meta,
filename,
})
}
pub async fn from_url_and_http_client(
client: &impl HttpClientTrait,
url: String,
) -> Result<TorrentFileSource, DownloaderError> {
let payload = fetch_bytes(client, &url)
.await
.boxed()
.with_context(|_| DownloadFetchSnafu { url: url.clone() })?;
let filename = Url::parse(&url)
.boxed()
.and_then(|s| {
s.path_segments()
.and_then(|mut p| p.next_back())
.map(String::from)
.ok_or_else(|| anyhow::anyhow!("invalid url"))
.to_dyn_boxed()
})
.with_context(|_| DownloadFetchSnafu { url: url.clone() })?;
Self::from_bytes(filename, payload, Some(url))
}
}
impl HashTorrentSourceTrait for TorrentFileSource {
fn hash_info(&self) -> Cow<'_, str> {
self.meta.info_hash.as_string().into()
}
}
impl Debug for TorrentFileSource {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("TorrentFileSource")
.field("hash", &self.meta.info_hash.as_string())
.finish()
}
}
#[derive(Clone, Debug)]
pub enum UrlTorrentSource {
MagnetUrl(MagnetUrlSource),
TorrentUrl(TorrentUrlSource),
}
impl UrlTorrentSource {
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
let url_ = Url::parse(&url)?;
let source = if url_.scheme() == MAGNET_SCHEMA {
Self::from_magnet_url(url)?
} else {
Self::from_torrent_url(url)?
};
Ok(source)
}
pub fn from_magnet_url(url: String) -> Result<Self, DownloaderError> {
let magnet_source = MagnetUrlSource::from_url(url)?;
Ok(Self::MagnetUrl(magnet_source))
}
pub fn from_torrent_url(url: String) -> Result<Self, DownloaderError> {
let torrent_source = TorrentUrlSource::from_url(url)?;
Ok(Self::TorrentUrl(torrent_source))
}
}
#[derive(Debug, Clone)]
pub enum HashTorrentSource {
MagnetUrl(MagnetUrlSource),
TorrentFile(TorrentFileSource),
}
impl HashTorrentSource {
pub async fn from_url_and_http_client(
client: &impl HttpClientTrait,
url: String,
) -> Result<Self, DownloaderError> {
let url_ = Url::parse(&url)?;
let source = if url_.scheme() == MAGNET_SCHEMA {
Self::from_magnet_url(url)?
} else {
Self::from_torrent_url_and_http_client(client, url).await?
};
Ok(source)
}
pub fn from_magnet_url(url: String) -> Result<Self, DownloaderError> {
let magnet_source = MagnetUrlSource::from_url(url)?;
Ok(Self::MagnetUrl(magnet_source))
}
pub async fn from_torrent_url_and_http_client(
client: &impl HttpClientTrait,
url: String,
) -> Result<Self, DownloaderError> {
let torrent_source = TorrentFileSource::from_url_and_http_client(client, url).await?;
Ok(Self::TorrentFile(torrent_source))
}
}
impl HashTorrentSourceTrait for HashTorrentSource {
fn hash_info(&self) -> Cow<'_, str> {
match self {
HashTorrentSource::MagnetUrl(m) => m.hash_info(),
HashTorrentSource::TorrentFile(t) => t.hash_info(),
}
}
}

View File

@ -0,0 +1,37 @@
use std::{borrow::Cow, hash::Hash};
use quirks_path::{Path, PathBuf};
use crate::downloader::{
bittorrent::source::HashTorrentSource,
core::{DownloadCreationTrait, DownloadIdTrait, DownloadStateTrait, DownloadTaskTrait},
};
pub const TORRENT_TAG_NAME: &str = "konobangu";
pub trait TorrentHashTrait: DownloadIdTrait + Send + Hash {}
pub trait TorrentStateTrait: DownloadStateTrait {}
pub trait TorrentTaskTrait: DownloadTaskTrait
where
Self::State: TorrentStateTrait,
Self::Id: TorrentHashTrait,
{
fn hash_info(&self) -> &str;
fn name(&self) -> Cow<'_, str> {
Cow::Borrowed(self.hash_info())
}
fn tags(&self) -> impl Iterator<Item = Cow<'_, str>>;
fn category(&self) -> Option<Cow<'_, str>>;
}
pub trait TorrentCreationTrait: DownloadCreationTrait {
fn save_path(&self) -> &Path;
fn save_path_mut(&mut self) -> &mut PathBuf;
fn sources_mut(&mut self) -> &mut Vec<HashTorrentSource>;
}

View File

@ -0,0 +1,218 @@
use std::{
any::Any, borrow::Cow, fmt::Debug, hash::Hash, marker::PhantomData, ops::Deref, time::Duration,
vec::IntoIter,
};
use async_trait::async_trait;
use super::DownloaderError;
pub trait DownloadStateTrait: Sized + Debug {}
pub trait DownloadIdTrait: Hash + Sized + Clone + Send + Debug {}
pub trait DownloadTaskTrait: Sized + Send + Debug {
type State: DownloadStateTrait;
type Id: DownloadIdTrait;
fn id(&self) -> &Self::Id;
fn into_id(self) -> Self::Id;
fn name(&self) -> Cow<'_, str>;
fn speed(&self) -> Option<u64>;
fn state(&self) -> &Self::State;
fn dl_bytes(&self) -> Option<u64>;
fn total_bytes(&self) -> Option<u64>;
fn left_bytes(&self) -> Option<u64> {
if let (Some(tt), Some(dl)) = (self.total_bytes(), self.dl_bytes()) {
tt.checked_sub(dl)
} else {
None
}
}
fn et(&self) -> Option<Duration>;
fn eta(&self) -> Option<Duration> {
if let (Some(left_bytes), Some(speed)) = (self.left_bytes(), self.speed()) {
if speed > 0 {
Some(Duration::from_secs_f64(left_bytes as f64 / speed as f64))
} else {
None
}
} else {
None
}
}
fn average_speed(&self) -> Option<f64> {
if let (Some(et), Some(dl_bytes)) = (self.et(), self.dl_bytes()) {
let secs = et.as_secs_f64();
if secs > 0.0 {
Some(dl_bytes as f64 / secs)
} else {
None
}
} else {
None
}
}
fn progress(&self) -> Option<f32> {
if let (Some(dl), Some(tt)) = (self.dl_bytes(), self.total_bytes()) {
if dl > 0 {
if tt > 0 {
Some(dl as f32 / tt as f32)
} else {
None
}
} else {
Some(0.0)
}
} else {
None
}
}
}
pub trait DownloadCreationTrait: Sized {
type Task: DownloadTaskTrait;
}
pub trait DownloadSelectorTrait: Sized + Any + Send {
type Id: DownloadIdTrait;
type Task: DownloadTaskTrait<Id = Self::Id>;
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
Err(self)
}
}
pub trait DownloadIdSelectorTrait:
DownloadSelectorTrait
+ IntoIterator<Item = Self::Id>
+ FromIterator<Self::Id>
+ Into<Vec<Self::Id>>
+ From<Vec<Self::Id>>
{
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
Ok(Vec::from_iter(self))
}
fn from_id(id: Self::Id) -> Self;
}
#[derive(Debug)]
pub struct DownloadIdSelector<Task>
where
Task: DownloadTaskTrait,
{
pub ids: Vec<Task::Id>,
pub marker: PhantomData<Task>,
}
impl<Task> Deref for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait,
{
type Target = Vec<Task::Id>;
fn deref(&self) -> &Self::Target {
&self.ids
}
}
impl<Task> IntoIterator for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait,
{
type Item = Task::Id;
type IntoIter = IntoIter<Task::Id>;
fn into_iter(self) -> Self::IntoIter {
self.ids.into_iter()
}
}
impl<Task> FromIterator<Task::Id> for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait,
{
fn from_iter<T: IntoIterator<Item = Task::Id>>(iter: T) -> Self {
Self {
ids: Vec::from_iter(iter),
marker: PhantomData,
}
}
}
impl<Task> DownloadSelectorTrait for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait + 'static,
{
type Id = Task::Id;
type Task = Task;
}
impl<Task> From<Vec<Task::Id>> for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait + 'static,
{
fn from(value: Vec<Task::Id>) -> Self {
Self {
ids: value,
marker: PhantomData,
}
}
}
impl<Task> From<DownloadIdSelector<Task>> for Vec<Task::Id>
where
Task: DownloadTaskTrait + 'static,
{
fn from(value: DownloadIdSelector<Task>) -> Self {
value.ids
}
}
impl<Task> DownloadIdSelectorTrait for DownloadIdSelector<Task>
where
Task: DownloadTaskTrait + 'static,
{
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
Ok(self.ids)
}
fn from_id(id: Self::Id) -> Self {
Self {
ids: vec![id],
marker: PhantomData,
}
}
}
#[async_trait]
pub trait DownloaderTrait {
type State: DownloadStateTrait;
type Id: DownloadIdTrait;
type Task: DownloadTaskTrait<State = Self::State, Id = Self::Id>;
type Creation: DownloadCreationTrait<Task = Self::Task>;
type Selector: DownloadSelectorTrait<Task = Self::Task>;
async fn add_downloads(
&self,
creation: Self::Creation,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
async fn pause_downloads(
&self,
selector: Self::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
async fn resume_downloads(
&self,
selector: Self::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
async fn remove_downloads(
&self,
selector: Self::Selector,
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
async fn query_downloads(
&self,
selector: Self::Selector,
) -> Result<impl IntoIterator<Item = Self::Task>, DownloaderError>;
}

View File

@ -0,0 +1,63 @@
use std::{borrow::Cow, time::Duration};
use snafu::prelude::*;
use crate::errors::OptDynErr;
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum DownloaderError {
#[snafu(transparent)]
DownloadUrlParseError { source: url::ParseError },
#[snafu(transparent)]
QBitAPIError { source: qbit_rs::Error },
#[snafu(transparent)]
DownloaderIOError { source: std::io::Error },
#[snafu(display("Timeout error (action = {action}, timeout = {timeout:?})"))]
DownloadTimeoutError {
action: Cow<'static, str>,
timeout: Duration,
},
#[snafu(display("Invalid magnet format ({message})"))]
MagnetFormatError {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Invalid torrent meta format ({message})"))]
TorrentMetaError {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Failed to fetch: {source}"))]
DownloadFetchError {
url: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("{message}"))]
Whatever {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
}
impl snafu::FromString for DownloaderError {
type Source = Box<dyn std::error::Error + Send + Sync>;
fn without_source(message: String) -> Self {
Self::Whatever {
message,
source: OptDynErr::none(),
}
}
fn with_source(source: Self::Source, message: String) -> Self {
Self::Whatever {
message,
source: OptDynErr::some(source),
}
}
}

View File

@ -0,0 +1,12 @@
pub mod bittorrent;
pub mod core;
pub mod errors;
pub mod qbit;
pub mod rqbit;
pub mod utils;
pub use errors::DownloaderError;
pub use qbit::{
QBittorrentDownloader, QBittorrentDownloaderCreation, QbitTorrent, QbitTorrentContent,
QbitTorrentFile, QbitTorrentFilter, QbitTorrentSource,
};

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@

View File

@ -1 +0,0 @@
/// <reference types="@rsbuild/core/types" />

View File

@ -0,0 +1,55 @@
use std::fmt::Display;
#[derive(Debug)]
pub struct OptDynErr(Option<Box<dyn std::error::Error + Send + Sync>>);
impl AsRef<dyn snafu::Error> for OptDynErr {
fn as_ref(&self) -> &(dyn snafu::Error + 'static) {
self
}
}
impl OptDynErr {
pub fn some_boxed<E: std::error::Error + Send + Sync + 'static>(e: E) -> Self {
Self(Some(Box::new(e)))
}
pub fn some(e: Box<dyn std::error::Error + Send + Sync>) -> Self {
Self(Some(e))
}
pub fn none() -> Self {
Self(None)
}
}
impl Display for OptDynErr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.0 {
Some(e) => e.fmt(f),
None => write!(f, "None"),
}
}
}
impl snafu::Error for OptDynErr {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
fn cause(&self) -> Option<&dyn std::error::Error> {
self.source()
}
}
impl From<Option<Box<dyn std::error::Error + Send + Sync>>> for OptDynErr {
fn from(value: Option<Box<dyn std::error::Error + Send + Sync>>) -> Self {
Self(value)
}
}
impl From<Box<dyn std::error::Error + Send + Sync>> for OptDynErr {
fn from(value: Box<dyn std::error::Error + Send + Sync>) -> Self {
Self::some(value)
}
}

View File

@ -0,0 +1,202 @@
use std::borrow::Cow;
use axum::{
Json,
response::{IntoResponse, Response},
};
use http::StatusCode;
use serde::{Deserialize, Deserializer, Serialize};
use snafu::Snafu;
use crate::{
auth::AuthError,
downloader::DownloaderError,
errors::{OptDynErr, response::StandardErrorResponse},
fetch::HttpClientError,
};
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum RError {
#[snafu(transparent, context(false))]
FancyRegexError {
#[snafu(source(from(fancy_regex::Error, Box::new)))]
source: Box<fancy_regex::Error>,
},
#[snafu(transparent)]
RegexError { source: regex::Error },
#[snafu(transparent)]
InvalidMethodError { source: http::method::InvalidMethod },
#[snafu(transparent)]
InvalidHeaderNameError {
source: http::header::InvalidHeaderName,
},
#[snafu(transparent)]
TracingAppenderInitError {
source: tracing_appender::rolling::InitError,
},
#[snafu(transparent)]
GraphQLSchemaError {
source: async_graphql::dynamic::SchemaError,
},
#[snafu(transparent)]
AuthError { source: AuthError },
#[snafu(transparent)]
DownloadError { source: DownloaderError },
#[snafu(transparent)]
RSSError { source: rss::Error },
#[snafu(transparent)]
DotEnvError { source: dotenv::Error },
#[snafu(transparent)]
TeraError { source: tera::Error },
#[snafu(transparent)]
IOError { source: std::io::Error },
#[snafu(transparent)]
DbError { source: sea_orm::DbErr },
#[snafu(transparent)]
CookieParseError { source: cookie::ParseError },
#[snafu(transparent, context(false))]
FigmentError {
#[snafu(source(from(figment::Error, Box::new)))]
source: Box<figment::Error>,
},
#[snafu(transparent)]
SerdeJsonError { source: serde_json::Error },
#[snafu(transparent)]
ReqwestMiddlewareError { source: reqwest_middleware::Error },
#[snafu(transparent)]
ReqwestError { source: reqwest::Error },
#[snafu(transparent)]
ParseUrlError { source: url::ParseError },
#[snafu(display("{source}"), context(false))]
OpenDALError {
#[snafu(source(from(opendal::Error, Box::new)))]
source: Box<opendal::Error>,
},
#[snafu(transparent)]
InvalidHeaderValueError {
source: http::header::InvalidHeaderValue,
},
#[snafu(transparent)]
HttpClientError { source: HttpClientError },
#[cfg(all(feature = "testcontainers", test))]
#[snafu(transparent)]
TestcontainersError {
source: testcontainers::TestcontainersError,
},
#[snafu(display("Extract {desc} with mime error, expected {expected}, but got {found}"))]
MimeError {
desc: String,
expected: String,
found: String,
},
#[snafu(display("Invalid or unknown format in extracting mikan rss"))]
MikanRssInvalidFormatError,
#[snafu(display("Invalid field {field} in extracting mikan rss"))]
MikanRssInvalidFieldError {
field: Cow<'static, str>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Missing field {field} in extracting mikan meta"))]
MikanMetaMissingFieldError {
field: Cow<'static, str>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Model Entity {entity} not found"))]
ModelEntityNotFound { entity: Cow<'static, str> },
#[snafu(display("{message}"))]
Whatever {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
}
impl RError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
source: None.into(),
}
}
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: None.into(),
}
}
pub fn from_mikan_rss_invalid_field_and_source(
field: Cow<'static, str>,
source: impl std::error::Error + Send + Sync + 'static,
) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: OptDynErr::some_boxed(source),
}
}
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
Self::DbError {
source: sea_orm::DbErr::RecordNotFound(detail.to_string()),
}
}
}
impl snafu::FromString for RError {
type Source = Box<dyn std::error::Error + Send + Sync>;
fn without_source(message: String) -> Self {
Self::Whatever {
message,
source: OptDynErr::none(),
}
}
fn with_source(source: Self::Source, message: String) -> Self {
Self::Whatever {
message,
source: OptDynErr::some(source),
}
}
}
impl IntoResponse for RError {
fn into_response(self) -> Response {
match self {
Self::AuthError { source: auth_error } => auth_error.into_response(),
err => (
StatusCode::INTERNAL_SERVER_ERROR,
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
)
.into_response(),
}
}
}
impl Serialize for RError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for RError {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(Self::Whatever {
message: s,
source: None.into(),
})
}
}
pub type RResult<T> = Result<T, RError>;

View File

@ -0,0 +1,9 @@
pub trait RAnyhowResultExt<T>: snafu::ResultExt<T, anyhow::Error> {
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>>;
}
impl<T> RAnyhowResultExt<T> for Result<T, anyhow::Error> {
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>> {
self.map_err(|e| e.into())
}
}

View File

@ -1,116 +1,9 @@
use std::{borrow::Cow, error::Error as StdError};
pub mod alias;
pub mod app_error;
pub mod ext;
pub mod response;
use axum::response::{IntoResponse, Response};
use http::StatusCode;
use thiserror::Error as ThisError;
use crate::{auth::AuthError, fetch::HttpClientError};
#[derive(ThisError, Debug)]
pub enum RError {
#[error(transparent)]
InvalidMethodError(#[from] http::method::InvalidMethod),
#[error(transparent)]
InvalidHeaderNameError(#[from] http::header::InvalidHeaderName),
#[error(transparent)]
TracingAppenderInitError(#[from] tracing_appender::rolling::InitError),
#[error(transparent)]
GraphQLSchemaError(#[from] async_graphql::dynamic::SchemaError),
#[error(transparent)]
AuthError(#[from] AuthError),
#[error(transparent)]
RSSError(#[from] rss::Error),
#[error(transparent)]
DotEnvError(#[from] dotenv::Error),
#[error(transparent)]
TeraError(#[from] tera::Error),
#[error(transparent)]
IOError(#[from] std::io::Error),
#[error(transparent)]
DbError(#[from] sea_orm::DbErr),
#[error(transparent)]
CookieParseError(#[from] cookie::ParseError),
#[error(transparent)]
FigmentError(#[from] figment::Error),
#[error(transparent)]
SerdeJsonError(#[from] serde_json::Error),
#[error(transparent)]
ReqwestMiddlewareError(#[from] reqwest_middleware::Error),
#[error(transparent)]
ReqwestError(#[from] reqwest::Error),
#[error(transparent)]
ParseUrlError(#[from] url::ParseError),
#[error(transparent)]
OpenDALError(#[from] opendal::Error),
#[error(transparent)]
InvalidHeaderValueError(#[from] http::header::InvalidHeaderValue),
#[error(transparent)]
HttpClientError(#[from] HttpClientError),
#[error("Extract {desc} with mime error, expected {expected}, but got {found}")]
MimeError {
desc: String,
expected: String,
found: String,
},
#[error("Invalid or unknown format in extracting mikan rss")]
MikanRssInvalidFormatError,
#[error("Invalid field {field} in extracting mikan rss")]
MikanRssInvalidFieldError {
field: Cow<'static, str>,
#[source]
source: Option<Box<dyn StdError + Send + Sync>>,
},
#[error("Missing field {field} in extracting mikan meta")]
MikanMetaMissingFieldError {
field: Cow<'static, str>,
#[source]
source: Option<Box<dyn StdError + Send + Sync>>,
},
#[error("Model Entity {entity} not found")]
ModelEntityNotFound { entity: Cow<'static, str> },
#[error("{0}")]
CustomMessageStr(&'static str),
#[error("{0}")]
CustomMessageString(String),
}
impl RError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
source: None,
}
}
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: None,
}
}
pub fn from_mikan_rss_invalid_field_and_source(
field: Cow<'static, str>,
source: Box<dyn StdError + Send + Sync>,
) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: Some(source),
}
}
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
Self::DbError(sea_orm::DbErr::RecordNotFound(detail.to_string()))
}
}
impl IntoResponse for RError {
fn into_response(self) -> Response {
match self {
Self::AuthError(auth_error) => auth_error.into_response(),
err => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}
}
}
pub type RResult<T> = Result<T, RError>;
pub use alias::OptDynErr;
pub use app_error::*;
pub use ext::RAnyhowResultExt;
pub use response::StandardErrorResponse;

View File

@ -0,0 +1,19 @@
use serde::Serialize;
#[derive(Serialize, Debug, Clone)]
pub struct StandardErrorResponse<T = ()> {
pub success: bool,
pub message: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub result: Option<T>,
}
impl<T> From<String> for StandardErrorResponse<T> {
fn from(value: String) -> Self {
StandardErrorResponse {
success: false,
message: value,
result: None,
}
}
}

View File

@ -0,0 +1,2 @@
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
pub const MAGNET_SCHEMA: &str = "magnet";

View File

@ -1,11 +1,14 @@
use color_eyre::eyre::OptionExt;
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use quirks_path::Path;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::{OptionExt, whatever};
use crate::extract::defs::SUBTITLE_LANG;
use crate::{
errors::app_error::{RError, RResult},
extract::defs::SUBTITLE_LANG,
};
lazy_static! {
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
@ -101,10 +104,12 @@ pub fn parse_episode_media_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> color_eyre::eyre::Result<TorrentEpisodeMediaMeta> {
) -> RResult<TorrentEpisodeMediaMeta> {
let media_name = torrent_path
.file_name()
.ok_or_else(|| color_eyre::eyre::eyre!("failed to get file name of {}", torrent_path))?;
.with_whatever_context::<_, _, RError>(|| {
format!("failed to get file name of {}", torrent_path)
})?;
let mut match_obj = None;
for rule in TORRENT_EP_PARSE_RULES.iter() {
match_obj = if let Some(torrent_name) = torrent_name.as_ref() {
@ -119,7 +124,7 @@ pub fn parse_episode_media_meta_from_torrent(
if let Some(match_obj) = match_obj {
let group_season_and_title = match_obj
.get(1)
.ok_or_else(|| color_eyre::eyre::eyre!("should have 1 group"))?
.whatever_context::<_, RError>("should have 1 group")?
.as_str();
let (fansub, season_and_title) = get_fansub(group_season_and_title);
let (title, season) = if let Some(season) = season {
@ -130,7 +135,7 @@ pub fn parse_episode_media_meta_from_torrent(
};
let episode_index = match_obj
.get(2)
.ok_or_eyre("should have 2 group")?
.whatever_context::<_, RError>("should have 2 group")?
.as_str()
.parse::<i32>()
.unwrap_or(1);
@ -146,11 +151,11 @@ pub fn parse_episode_media_meta_from_torrent(
extname,
})
} else {
Err(color_eyre::eyre::eyre!(
whatever!(
"failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'",
torrent_path,
torrent_name
))
)
}
}
@ -158,11 +163,13 @@ pub fn parse_episode_subtitle_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> color_eyre::eyre::Result<TorrentEpisodeSubtitleMeta> {
) -> RResult<TorrentEpisodeSubtitleMeta> {
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
let media_name = torrent_path
.file_name()
.ok_or_else(|| color_eyre::eyre::eyre!("failed to get file name of {}", torrent_path))?;
.with_whatever_context::<_, _, RError>(|| {
format!("failed to get file name of {}", torrent_path)
})?;
let lang = get_subtitle_lang(media_name);
@ -177,8 +184,8 @@ mod tests {
use quirks_path::Path;
use super::{
parse_episode_media_meta_from_torrent, parse_episode_subtitle_meta_from_torrent,
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta,
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta, parse_episode_media_meta_from_torrent,
parse_episode_subtitle_meta_from_torrent,
};
#[test]

View File

@ -0,0 +1,6 @@
pub mod core;
pub mod extract;
pub use core::{BITTORRENT_MIME_TYPE, MAGNET_SCHEMA};
pub use extract::*;

View File

@ -1,24 +1,33 @@
use std::ops::Deref;
use std::{fmt::Debug, ops::Deref};
use reqwest_middleware::ClientWithMiddleware;
use secrecy::{ExposeSecret, SecretString};
use serde::{Deserialize, Serialize};
use url::Url;
use super::MikanConfig;
use crate::{
errors::RError,
errors::app_error::RError,
fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth},
};
#[derive(Debug, Default, Clone)]
#[derive(Default, Clone, Deserialize, Serialize)]
pub struct MikanAuthSecrecy {
pub cookie: SecretString,
pub cookie: String,
pub user_agent: Option<String>,
}
impl Debug for MikanAuthSecrecy {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MikanAuthSecrecy")
.field("cookie", &String::from("[secrecy]"))
.field("user_agent", &String::from("[secrecy]"))
.finish()
}
}
impl MikanAuthSecrecy {
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RError> {
HttpClientCookiesAuth::from_cookies(self.cookie.expose_secret(), url, self.user_agent)
HttpClientCookiesAuth::from_cookies(&self.cookie, url, self.user_agent)
}
}
@ -38,9 +47,13 @@ impl MikanClient {
})
}
pub fn fork_with_auth(&self, secrecy: MikanAuthSecrecy) -> Result<Self, RError> {
let cookie_auth = secrecy.into_cookie_auth(&self.base_url)?;
let fork = self.http_client.fork().attach_secrecy(cookie_auth);
pub fn fork_with_auth(&self, secrecy: Option<MikanAuthSecrecy>) -> Result<Self, RError> {
let mut fork = self.http_client.fork();
if let Some(secrecy) = secrecy {
let cookie_auth = secrecy.into_cookie_auth(&self.base_url)?;
fork = fork.attach_secrecy(cookie_auth);
}
Ok(Self {
http_client: HttpClient::from_fork(fork)?,

View File

@ -8,13 +8,15 @@ use tracing::instrument;
use url::Url;
use crate::{
errors::{RError, RResult},
extract::mikan::{
MikanClient,
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
errors::app_error::{RError, RResult},
extract::{
bittorrent::BITTORRENT_MIME_TYPE,
mikan::{
MikanClient,
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
},
},
fetch::bytes::fetch_bytes,
sync::core::BITTORRENT_MIME_TYPE,
};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
@ -120,10 +122,10 @@ impl TryFrom<rss::Item> for MikanRssItem {
.title
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?;
let enclosure_url = Url::parse(&enclosure.url).map_err(|inner| {
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
RError::from_mikan_rss_invalid_field_and_source(
Cow::Borrowed("enclosure_url:enclosure.link"),
Box::new(inner),
"enclosure_url:enclosure.link".into(),
err,
)
})?;
@ -334,22 +336,24 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
mod tests {
use std::assert_matches::assert_matches;
use color_eyre::eyre;
use rstest::rstest;
use url::Url;
use crate::{
extract::mikan::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
extract_mikan_rss_channel_from_rss_link,
errors::app_error::RResult,
extract::{
bittorrent::BITTORRENT_MIME_TYPE,
mikan::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
extract_mikan_rss_channel_from_rss_link,
},
},
sync::core::BITTORRENT_MIME_TYPE,
test_utils::mikan::build_testing_mikan_client,
};
#[rstest]
#[tokio::test]
async fn test_parse_mikan_rss_channel_from_rss_link() -> eyre::Result<()> {
async fn test_parse_mikan_rss_channel_from_rss_link() -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;

View File

@ -1,19 +1,21 @@
use std::borrow::Cow;
use std::{borrow::Cow, sync::Arc};
use async_stream::try_stream;
use bytes::Bytes;
use futures::Stream;
use itertools::Itertools;
use scraper::{Html, Selector};
use serde::{Deserialize, Serialize};
use tracing::instrument;
use url::Url;
use super::{
MIKAN_BUCKET_KEY, MikanBangumiRssLink, MikanClient, extract_mikan_bangumi_id_from_rss_link,
MIKAN_BUCKET_KEY, MikanAuthSecrecy, MikanBangumiRssLink, MikanClient,
extract_mikan_bangumi_id_from_rss_link,
};
use crate::{
app::AppContext,
errors::{RError, RResult},
app::AppContextTrait,
errors::app_error::{RError, RResult},
extract::{
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str,
@ -34,7 +36,7 @@ pub struct MikanEpisodeMeta {
pub mikan_episode_id: String,
}
#[derive(Clone, Debug, PartialEq)]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MikanBangumiMeta {
pub homepage: Url,
pub origin_poster_src: Option<Url>,
@ -123,12 +125,12 @@ pub async fn extract_mikan_poster_meta_from_src(
}
pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
ctx: &AppContext,
ctx: &dyn AppContextTrait,
origin_poster_src_url: Url,
subscriber_id: i32,
) -> RResult<MikanBangumiPosterMeta> {
let dal_client = &ctx.storage;
let mikan_client = &ctx.mikan;
let dal_client = ctx.storage();
let mikan_client = ctx.mikan();
if let Some(poster_src) = dal_client
.exists_object(
StorageContentCategory::Image,
@ -346,126 +348,132 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
})
}
/**
* @logined-required
*/
#[instrument(skip_all, fields(my_bangumi_page_url = my_bangumi_page_url.as_str()))]
#[instrument(skip_all, fields(my_bangumi_page_url, auth_secrecy = ?auth_secrecy, history = history.len()))]
pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
http_client: &MikanClient,
context: Arc<dyn AppContextTrait>,
my_bangumi_page_url: Url,
) -> impl Stream<Item = Result<MikanBangumiMeta, RError>> {
auth_secrecy: Option<MikanAuthSecrecy>,
history: &[Arc<RResult<MikanBangumiMeta>>],
) -> impl Stream<Item = RResult<MikanBangumiMeta>> {
try_stream! {
let http_client = &context.mikan().fork_with_auth(auth_secrecy.clone())?;
let mikan_base_url = Url::parse(&my_bangumi_page_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, my_bangumi_page_url.clone()).await?;
let bangumi_container_selector = &Selector::parse(".sk-bangumi .an-ul>li").unwrap();
let bangumi_info_selector = &Selector::parse(".an-info a.an-text").unwrap();
let bangumi_poster_selector =
&Selector::parse("span[data-src][data-bangumiid], span[data-bangumiid][style]")
.unwrap();
let fansub_container_selector =
&Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap();
&Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap();
let fansub_title_selector = &Selector::parse(".tag-res-name[title]").unwrap();
let fansub_id_selector =
&Selector::parse(".active[data-subtitlegroupid][data-bangumiid]").unwrap();
let bangumi_iters = {
let bangumi_items = {
let html = Html::parse_document(&content);
let bangumi_container_selector = &Selector::parse(".sk-bangumi .an-ul>li").unwrap();
let bangumi_info_selector = &Selector::parse(".an-info a.an-text").unwrap();
let bangumi_poster_selector =
&Selector::parse("span[data-src][data-bangumiid], span[data-bangumiid][style]")
.unwrap();
html.select(bangumi_container_selector)
.filter_map(|bangumi_elem| {
let title_and_href_elem = bangumi_elem.select(bangumi_info_selector).next();
let poster_elem = bangumi_elem.select(bangumi_poster_selector).next();
if let (Some(bangumi_home_page_url), Some(bangumi_title)) = (
title_and_href_elem.and_then(|elem| elem.attr("href")),
title_and_href_elem.and_then(|elem| elem.attr("title")),
) {
let origin_poster_src = poster_elem.and_then(|ele| {
ele.attr("data-src")
.and_then(|data_src| {
extract_image_src_from_str(data_src, &mikan_base_url)
.filter_map(|bangumi_elem| {
let title_and_href_elem =
bangumi_elem.select(bangumi_info_selector).next();
let poster_elem = bangumi_elem.select(bangumi_poster_selector).next();
if let (Some(bangumi_home_page_url), Some(bangumi_title)) = (
title_and_href_elem.and_then(|elem| elem.attr("href")),
title_and_href_elem.and_then(|elem| elem.attr("title")),
) {
let origin_poster_src = poster_elem.and_then(|ele| {
ele.attr("data-src")
.and_then(|data_src| {
extract_image_src_from_str(data_src, &mikan_base_url)
})
.or_else(|| {
ele.attr("style").and_then(|style| {
extract_background_image_src_from_style_attr(
style,
&mikan_base_url,
)
})
.or_else(|| {
ele.attr("style").and_then(|style| {
extract_background_image_src_from_style_attr(
style,
&mikan_base_url,
)
})
})
});
let bangumi_title = bangumi_title.to_string();
let bangumi_home_page_url =
my_bangumi_page_url.join(bangumi_home_page_url).ok()?;
let MikanBangumiHomepage {
mikan_bangumi_id, ..
} = extract_mikan_bangumi_id_from_homepage(&bangumi_home_page_url)?;
if let Some(origin_poster_src) = origin_poster_src.as_ref() {
tracing::trace!(
origin_poster_src = origin_poster_src.as_str(),
bangumi_title,
mikan_bangumi_id,
"bangumi info extracted"
);
} else {
tracing::warn!(
bangumi_title,
mikan_bangumi_id,
"bangumi info extracted, but failed to extract poster_src"
);
}
let bangumi_expand_info_url = build_mikan_bangumi_expand_info_url(
mikan_base_url.clone(),
&mikan_bangumi_id,
);
Some((
})
});
let bangumi_title = bangumi_title.to_string();
let bangumi_home_page_url =
my_bangumi_page_url.join(bangumi_home_page_url).ok()?;
let MikanBangumiHomepage {
mikan_bangumi_id, ..
} = extract_mikan_bangumi_id_from_homepage(&bangumi_home_page_url)?;
if let Some(origin_poster_src) = origin_poster_src.as_ref() {
tracing::trace!(
origin_poster_src = origin_poster_src.as_str(),
bangumi_title,
mikan_bangumi_id,
bangumi_expand_info_url,
origin_poster_src,
))
"bangumi info extracted"
);
} else {
None
tracing::warn!(
bangumi_title,
mikan_bangumi_id,
"bangumi info extracted, but failed to extract poster_src"
);
}
})
.collect_vec()
};
for (bangumi_title, mikan_bangumi_id, bangumi_expand_info_url, origin_poster_src) in
bangumi_iters
{
if let Some((fansub_name, mikan_fansub_id)) = {
let bangumi_expand_info_content = fetch_html(http_client, bangumi_expand_info_url).await?;
let bangumi_expand_info_fragment = Html::parse_fragment(&bangumi_expand_info_content);
bangumi_expand_info_fragment.select(fansub_container_selector).next().and_then(|fansub_info| {
if let (Some(fansub_name), Some(mikan_fansub_id)) = (
fansub_info
.select(fansub_title_selector)
.next()
.and_then(|ele| ele.attr("title"))
.map(String::from),
fansub_info
.select(fansub_id_selector)
.next()
.and_then(|ele| ele.attr("data-subtitlegroupid"))
.map(String::from)
) {
Some((fansub_name, mikan_fansub_id))
} else {
None
}
})
} {
tracing::trace!(
fansub_name,
mikan_fansub_id,
"subscribed fansub extracted"
);
yield MikanBangumiMeta {
homepage: build_mikan_bangumi_homepage(
let bangumi_expand_info_url = build_mikan_bangumi_expand_info_url(
mikan_base_url.clone(),
&mikan_bangumi_id,
);
Some((
bangumi_title,
mikan_bangumi_id,
bangumi_expand_info_url,
origin_poster_src,
))
} else {
None
}
})
.collect_vec()
};
for (idx, (bangumi_title, mikan_bangumi_id, bangumi_expand_info_url, origin_poster_src)) in
bangumi_items.iter().enumerate()
{
if history.get(idx).is_some() {
continue;
} else if let Some((fansub_name, mikan_fansub_id)) = {
let bangumi_expand_info_content =
fetch_html(http_client, bangumi_expand_info_url.clone()).await?;
let bangumi_expand_info_fragment =
Html::parse_fragment(&bangumi_expand_info_content);
bangumi_expand_info_fragment
.select(fansub_container_selector)
.next()
.and_then(|fansub_info| {
if let (Some(fansub_name), Some(mikan_fansub_id)) = (
fansub_info
.select(fansub_title_selector)
.next()
.and_then(|ele| ele.attr("title"))
.map(String::from),
fansub_info
.select(fansub_id_selector)
.next()
.and_then(|ele| ele.attr("data-subtitlegroupid"))
.map(String::from),
) {
Some((fansub_name, mikan_fansub_id))
} else {
None
}
})
} {
tracing::trace!(fansub_name, mikan_fansub_id, "subscribed fansub extracted");
let item = MikanBangumiMeta {
homepage: build_mikan_bangumi_homepage(
mikan_base_url.clone(),
mikan_bangumi_id,
Some(&mikan_fansub_id),
),
bangumi_title: bangumi_title.to_string(),
@ -474,6 +482,7 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
fansub: Some(fansub_name),
origin_poster_src: origin_poster_src.clone(),
};
yield item;
}
}
}
@ -482,31 +491,27 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
#[cfg(test)]
mod test {
#![allow(unused_variables)]
use color_eyre::eyre;
use futures::{TryStreamExt, pin_mut};
use http::header;
use rstest::{fixture, rstest};
use secrecy::SecretString;
use tracing::Level;
use url::Url;
use zune_image::{codecs::ImageFormat, image::Image};
use super::*;
use crate::{
extract::mikan::{
MikanAuthSecrecy, web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page,
},
test_utils::{mikan::build_testing_mikan_client, tracing::init_testing_tracing},
use crate::test_utils::{
app::UnitTestAppContext, mikan::build_testing_mikan_client,
tracing::try_init_testing_tracing,
};
#[fixture]
fn before_each() {
init_testing_tracing(Level::INFO);
try_init_testing_tracing(Level::INFO);
}
#[rstest]
#[tokio::test]
async fn test_extract_mikan_poster_from_src(before_each: ()) -> eyre::Result<()> {
async fn test_extract_mikan_poster_from_src(before_each: ()) -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -537,7 +542,7 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_extract_mikan_episode(before_each: ()) -> eyre::Result<()> {
async fn test_extract_mikan_episode(before_each: ()) -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -577,9 +582,7 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(
before_each: (),
) -> eyre::Result<()> {
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(before_each: ()) -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
@ -616,16 +619,18 @@ mod test {
#[rstest]
#[tokio::test]
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(
before_each: (),
) -> eyre::Result<()> {
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(before_each: ()) -> RResult<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let my_bangumi_page_url = mikan_base_url.join("/Home/MyBangumi")?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
let context = Arc::new(
UnitTestAppContext::builder()
.mikan(build_testing_mikan_client(mikan_base_url.clone()).await?)
.build(),
);
{
let my_bangumi_without_cookie_mock = mikan_server
@ -636,8 +641,10 @@ mod test {
.await;
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
&mikan_client,
context.clone(),
my_bangumi_page_url.clone(),
None,
&[],
);
pin_mut!(bangumi_metas);
@ -671,8 +678,8 @@ mod test {
.create_async()
.await;
let mikan_client_with_cookie = mikan_client.fork_with_auth(MikanAuthSecrecy {
cookie: SecretString::from(
let auth_secrecy = Some(MikanAuthSecrecy {
cookie: String::from(
"mikan-announcement=1; .AspNetCore.Antiforgery.abc=abc; \
.AspNetCore.Identity.Application=abc; ",
),
@ -680,11 +687,13 @@ mod test {
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like \
Gecko) Chrome/133.0.0.0 Safari/537.36 Edg/133.0.0.0",
)),
})?;
});
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
&mikan_client_with_cookie,
context.clone(),
my_bangumi_page_url,
auth_secrecy,
&[],
);
pin_mut!(bangumi_metas);
let bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;

View File

@ -4,4 +4,4 @@ pub mod http;
pub mod media;
pub mod mikan;
pub mod rawname;
pub mod torrent;
pub mod bittorrent;

View File

@ -7,8 +7,12 @@ use itertools::Itertools;
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::whatever;
use crate::extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE};
use crate::{
errors::app_error::RResult,
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
};
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
@ -71,10 +75,7 @@ fn replace_ch_bracket_to_en(raw_name: &str) -> String {
raw_name.replace('【', "[").replace('】', "]")
}
fn title_body_pre_process(
title_body: &str,
fansub: Option<&str>,
) -> color_eyre::eyre::Result<String> {
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RResult<String> {
let raw_without_fansub = if let Some(fansub) = fansub {
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
fan_sub_re.replace_all(title_body, "")
@ -262,7 +263,7 @@ pub fn check_is_movie(title: &str) -> bool {
MOVIE_TITLE_RE.is_match(title)
}
pub fn parse_episode_meta_from_raw_name(s: &str) -> color_eyre::eyre::Result<RawEpisodeMeta> {
pub fn parse_episode_meta_from_raw_name(s: &str) -> RResult<RawEpisodeMeta> {
let raw_title = s.trim();
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
let fansub = extract_fansub(&raw_title_without_ch_brackets);
@ -315,10 +316,7 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> color_eyre::eyre::Result<Raw
resolution,
})
} else {
Err(color_eyre::eyre::eyre!(
"Can not parse episode meta from raw filename {}",
raw_title
))
whatever!("Can not parse episode meta from raw filename {}", raw_title)
}
}

View File

@ -1,3 +0,0 @@
mod parser;
pub use parser::*;

View File

@ -2,7 +2,7 @@ use bytes::Bytes;
use reqwest::IntoUrl;
use super::client::HttpClientTrait;
use crate::errors::RError;
use crate::errors::app_error::RError;
pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>(
client: &H,

View File

@ -14,7 +14,7 @@ use reqwest_retry::{RetryTransientMiddleware, policies::ExponentialBackoff};
use reqwest_tracing::TracingMiddleware;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use thiserror::Error;
use snafu::Snafu;
use super::HttpClientSecrecyDataTrait;
use crate::fetch::get_random_mobile_ua;
@ -101,14 +101,14 @@ impl CacheManager for CacheBackend {
}
}
#[derive(Debug, Error)]
#[derive(Debug, Snafu)]
pub enum HttpClientError {
#[error(transparent)]
ReqwestError(#[from] reqwest::Error),
#[error(transparent)]
ReqwestMiddlewareError(#[from] reqwest_middleware::Error),
#[error(transparent)]
HttpError(#[from] http::Error),
#[snafu(transparent)]
ReqwestError { source: reqwest::Error },
#[snafu(transparent)]
ReqwestMiddlewareError { source: reqwest_middleware::Error },
#[snafu(transparent)]
HttpError { source: http::Error },
}
pub trait HttpClientTrait: Deref<Target = ClientWithMiddleware> + Debug {}

View File

@ -2,12 +2,11 @@ use std::sync::Arc;
use cookie::Cookie;
use reqwest::{ClientBuilder, cookie::Jar};
use secrecy::zeroize::Zeroize;
use url::Url;
use crate::errors::RError;
use crate::errors::app_error::RError;
pub trait HttpClientSecrecyDataTrait: Zeroize {
pub trait HttpClientSecrecyDataTrait {
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
client_builder
}
@ -37,13 +36,6 @@ impl HttpClientCookiesAuth {
}
}
impl Zeroize for HttpClientCookiesAuth {
fn zeroize(&mut self) {
self.cookie_jar = Arc::new(Jar::default());
self.user_agent = None;
}
}
impl HttpClientSecrecyDataTrait for HttpClientCookiesAuth {
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
let mut client_builder = client_builder.cookie_provider(self.cookie_jar.clone());

View File

@ -1,7 +1,7 @@
use reqwest::IntoUrl;
use super::client::HttpClientTrait;
use crate::errors::RError;
use crate::errors::app_error::RError;
pub async fn fetch_html<T: IntoUrl, H: HttpClientTrait>(
client: &H,

View File

@ -2,7 +2,7 @@ use bytes::Bytes;
use reqwest::IntoUrl;
use super::{bytes::fetch_bytes, client::HttpClientTrait};
use crate::errors::RError;
use crate::errors::app_error::RError;
pub async fn fetch_image<T: IntoUrl, H: HttpClientTrait>(
client: &H,

View File

@ -2,7 +2,7 @@ use std::{future::Future, pin::Pin};
use axum::http;
use super::{client::HttpClientError, HttpClient};
use super::{HttpClient, client::HttpClientError};
impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
type Error = HttpClientError;
@ -30,7 +30,7 @@ impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
builder
.body(response.bytes().await?.to_vec())
.map_err(HttpClientError::HttpError)
.map_err(HttpClientError::from)
})
}
}

View File

@ -3,6 +3,7 @@ pub mod filter;
pub mod guard;
pub mod schema_root;
pub mod service;
pub mod subscriptions;
pub mod util;
pub use config::GraphQLConfig;

View File

@ -2,7 +2,7 @@ use async_graphql::dynamic::Schema;
use sea_orm::DatabaseConnection;
use super::{config::GraphQLConfig, schema_root};
use crate::errors::RResult;
use crate::errors::app_error::RResult;
#[derive(Debug)]
pub struct GraphQLService {

View File

@ -5,13 +5,16 @@
impl_trait_in_bindings,
iterator_try_collect,
async_fn_traits,
let_chains
let_chains,
error_generic_member_access
)]
#![feature(associated_type_defaults)]
pub mod app;
pub mod auth;
pub mod cache;
pub mod database;
pub mod downloader;
pub mod errors;
pub mod extract;
pub mod fetch;
@ -20,7 +23,6 @@ pub mod logger;
pub mod migrations;
pub mod models;
pub mod storage;
pub mod sync;
pub mod tasks;
#[cfg(test)]
pub mod test_utils;

View File

@ -1,5 +1,6 @@
use std::sync::OnceLock;
use snafu::prelude::*;
use tracing_appender::non_blocking::WorkerGuard;
use tracing_subscriber::{
EnvFilter, Layer, Registry,
@ -9,7 +10,7 @@ use tracing_subscriber::{
};
use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
use crate::errors::{RError, RResult};
use crate::errors::app_error::RResult;
// Function to initialize the logger based on the provided configuration
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];
@ -119,9 +120,9 @@ impl LoggerService {
let file_appender_layer = if file_appender_config.non_blocking {
let (non_blocking_file_appender, work_guard) =
tracing_appender::non_blocking(file_appender);
NONBLOCKING_WORK_GUARD_KEEP
.set(work_guard)
.map_err(|_| RError::CustomMessageStr("cannot lock for appender"))?;
if NONBLOCKING_WORK_GUARD_KEEP.set(work_guard).is_err() {
whatever!("cannot lock for appender");
};
Self::init_layer(
non_blocking_file_appender,
&file_appender_config.format,

View File

@ -1 +0,0 @@
@import "tailwindcss";

View File

@ -1,96 +0,0 @@
import '@abraham/reflection';
import { type Injector, ReflectiveInjector } from '@outposts/injection-js';
import { RouterProvider, createRouter } from '@tanstack/react-router';
import {
OidcSecurityService,
provideAuth,
withDefaultFeatures,
} from 'oidc-client-rx';
import {
InjectorContextVoidInjector,
InjectorProvider,
} from 'oidc-client-rx/adapters/react';
import { withTanstackRouter } from 'oidc-client-rx/adapters/tanstack-router';
import React from 'react';
import ReactDOM from 'react-dom/client';
import { buildOidcConfig, isBasicAuth } from './auth/config';
import { withCheckAuthResultEvent } from './auth/event';
import { useAuth } from './auth/hooks';
import { routeTree } from './routeTree.gen';
import './main.css';
const router = createRouter({
routeTree,
basepath: '/api/playground',
defaultPreload: 'intent',
context: {
isAuthenticated: isBasicAuth,
injector: InjectorContextVoidInjector,
oidcSecurityService: {} as OidcSecurityService,
},
});
// Register things for typesafety
declare module '@tanstack/react-router' {
interface Register {
router: typeof router;
}
}
const injector: Injector = isBasicAuth
? ReflectiveInjector.resolveAndCreate([])
: ReflectiveInjector.resolveAndCreate(
provideAuth(
{
config: buildOidcConfig(),
},
withDefaultFeatures({
router: { enabled: false },
securityStorage: { type: 'local-storage' },
}),
withTanstackRouter(router),
withCheckAuthResultEvent()
)
);
// if needed, check when init
let oidcSecurityService: OidcSecurityService | undefined;
if (!isBasicAuth) {
oidcSecurityService = injector.get(OidcSecurityService);
oidcSecurityService.checkAuth().subscribe();
}
const AppWithBasicAuth = () => {
return <RouterProvider router={router} />;
};
const AppWithOidcAuth = () => {
const { isAuthenticated, oidcSecurityService, injector } = useAuth();
return (
<RouterProvider
router={router}
context={{
isAuthenticated,
oidcSecurityService,
injector,
}}
/>
);
};
const App = isBasicAuth ? AppWithBasicAuth : AppWithOidcAuth;
const rootEl = document.getElementById('root');
if (rootEl) {
rootEl.classList.add('min-h-svh');
const root = ReactDOM.createRoot(rootEl);
root.render(
<React.StrictMode>
<InjectorProvider injector={injector}>
<App />
</InjectorProvider>
</React.StrictMode>
);
}

View File

@ -4,8 +4,8 @@ use serde::{Deserialize, Serialize};
use super::subscribers::{self, SEED_SUBSCRIBER};
use crate::{
app::AppContext,
errors::{RError, RResult},
app::AppContextTrait,
errors::app_error::{RError, RResult},
};
#[derive(
@ -57,8 +57,8 @@ impl Related<super::subscribers::Entity> for Entity {
impl ActiveModelBehavior for ActiveModel {}
impl Model {
pub async fn find_by_pid(ctx: &AppContext, pid: &str) -> RResult<Self> {
let db = &ctx.db;
pub async fn find_by_pid(ctx: &dyn AppContextTrait, pid: &str) -> RResult<Self> {
let db = ctx.db();
let subscriber_auth = Entity::find()
.filter(Column::Pid.eq(pid))
.one(db)
@ -67,8 +67,8 @@ impl Model {
Ok(subscriber_auth)
}
pub async fn create_from_oidc(ctx: &AppContext, sub: String) -> RResult<Self> {
let db = &ctx.db;
pub async fn create_from_oidc(ctx: &dyn AppContextTrait, sub: String) -> RResult<Self> {
let db = ctx.db();
let txn = db.begin().await?;

View File

@ -4,7 +4,7 @@ use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::O
use serde::{Deserialize, Serialize};
use super::subscription_bangumi;
use crate::{app::AppContext, errors::RResult};
use crate::{app::AppContextTrait, errors::app_error::RResult};
#[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
@ -113,7 +113,7 @@ pub enum RelatedEntity {
impl Model {
pub async fn get_or_insert_from_mikan<F>(
ctx: &AppContext,
ctx: &dyn AppContextTrait,
subscriber_id: i32,
subscription_id: i32,
mikan_bangumi_id: String,
@ -123,7 +123,7 @@ impl Model {
where
F: AsyncFnOnce(&mut ActiveModel) -> RResult<()>,
{
let db = &ctx.db;
let db = ctx.db();
if let Some(existed) = Entity::find()
.filter(
Column::MikanBangumiId

View File

@ -6,8 +6,8 @@ use serde::{Deserialize, Serialize};
use super::{bangumi, query::InsertManyReturningExt, subscription_episode};
use crate::{
app::AppContext,
errors::RResult,
app::AppContextTrait,
errors::app_error::RResult,
extract::{
mikan::{MikanEpisodeMeta, build_mikan_episode_homepage},
rawname::parse_episode_meta_from_raw_name,
@ -136,12 +136,12 @@ pub struct MikanEpsiodeCreation {
impl Model {
pub async fn add_episodes(
ctx: &AppContext,
ctx: &dyn AppContextTrait,
subscriber_id: i32,
subscription_id: i32,
creations: impl IntoIterator<Item = MikanEpsiodeCreation>,
) -> RResult<()> {
let db = &ctx.db;
let db = ctx.db();
let new_episode_active_modes = creations
.into_iter()
.map(|cr| ActiveModel::from_mikan_episode_meta(ctx, cr))
@ -189,9 +189,9 @@ impl Model {
impl ActiveModel {
pub fn from_mikan_episode_meta(
ctx: &AppContext,
ctx: &dyn AppContextTrait,
creation: MikanEpsiodeCreation,
) -> color_eyre::eyre::Result<Self> {
) -> RResult<Self> {
let item = creation.episode;
let bgm = creation.bangumi;
let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title)
@ -201,7 +201,7 @@ impl ActiveModel {
.ok()
.unwrap_or_default();
let homepage =
build_mikan_episode_homepage(ctx.mikan.base_url().clone(), &item.mikan_episode_id);
build_mikan_episode_homepage(ctx.mikan().base_url().clone(), &item.mikan_episode_id);
Ok(Self {
mikan_episode_id: ActiveValue::Set(Some(item.mikan_episode_id)),

View File

@ -8,3 +8,5 @@ pub mod subscribers;
pub mod subscription_bangumi;
pub mod subscription_episode;
pub mod subscriptions;
pub mod task_stream_item;
pub mod tasks;

View File

@ -4,8 +4,8 @@ use sea_orm::{ActiveValue, FromJsonQueryResult, TransactionTrait, entity::prelud
use serde::{Deserialize, Serialize};
use crate::{
app::AppContext,
errors::{RError, RResult},
app::AppContextTrait,
errors::app_error::{RError, RResult},
};
pub const SEED_SUBSCRIBER: &str = "konobangu";
@ -95,13 +95,13 @@ pub struct SubscriberIdParams {
impl ActiveModelBehavior for ActiveModel {}
impl Model {
pub async fn find_seed_subscriber_id(ctx: &AppContext) -> RResult<i32> {
pub async fn find_seed_subscriber_id(ctx: &dyn AppContextTrait) -> RResult<i32> {
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER).await?;
Ok(subscriber_auth.subscriber_id)
}
pub async fn find_by_id(ctx: &AppContext, id: i32) -> RResult<Self> {
let db = &ctx.db;
pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RResult<Self> {
let db = ctx.db();
let subscriber = Entity::find_by_id(id)
.one(db)
@ -110,8 +110,8 @@ impl Model {
Ok(subscriber)
}
pub async fn create_root(ctx: &AppContext) -> RResult<Self> {
let db = &ctx.db;
pub async fn create_root(ctx: &dyn AppContextTrait) -> RResult<Self> {
let db = ctx.db();
let txn = db.begin().await?;
let user = ActiveModel {

View File

@ -7,8 +7,8 @@ use serde::{Deserialize, Serialize};
use super::{bangumi, episodes, query::filter_values_in};
use crate::{
app::AppContext,
errors::RResult,
app::AppContextTrait,
errors::app_error::RResult,
extract::{
mikan::{
build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link,
@ -179,22 +179,22 @@ impl ActiveModel {
impl Model {
pub async fn add_subscription(
ctx: &AppContext,
ctx: &dyn AppContextTrait,
create_dto: SubscriptionCreateDto,
subscriber_id: i32,
) -> RResult<Self> {
let db = &ctx.db;
let db = ctx.db();
let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id);
Ok(subscription.insert(db).await?)
}
pub async fn toggle_with_ids(
ctx: &AppContext,
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
enabled: bool,
) -> RResult<()> {
let db = &ctx.db;
let db = ctx.db();
Entity::update_many()
.col_expr(Column::Enabled, Expr::value(enabled))
.filter(Column::Id.is_in(ids))
@ -203,8 +203,11 @@ impl Model {
Ok(())
}
pub async fn delete_with_ids(ctx: &AppContext, ids: impl Iterator<Item = i32>) -> RResult<()> {
let db = &ctx.db;
pub async fn delete_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
) -> RResult<()> {
let db = ctx.db();
Entity::delete_many()
.filter(Column::Id.is_in(ids))
.exec(db)
@ -212,16 +215,16 @@ impl Model {
Ok(())
}
pub async fn pull_subscription(&self, ctx: &AppContext) -> RResult<()> {
pub async fn pull_subscription(&self, ctx: &dyn AppContextTrait) -> RResult<()> {
match &self.category {
SubscriptionCategory::Mikan => {
let mikan_client = &ctx.mikan;
let mikan_client = ctx.mikan();
let channel =
extract_mikan_rss_channel_from_rss_link(mikan_client, &self.source_url).await?;
let items = channel.into_items();
let db = &ctx.db;
let db = ctx.db();
let items = items.into_iter().collect_vec();
let mut stmt = filter_values_in(
@ -266,7 +269,7 @@ impl Model {
for ((mikan_bangumi_id, mikan_fansub_id), new_ep_metas) in new_mikan_bangumi_groups
{
let mikan_base_url = ctx.mikan.base_url();
let mikan_base_url = ctx.mikan().base_url();
let bgm_homepage = build_mikan_bangumi_homepage(
mikan_base_url.clone(),
&mikan_bangumi_id,

View File

@ -0,0 +1,62 @@
use async_trait::async_trait;
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "task_status")]
#[serde(rename_all = "snake_case")]
pub enum TaskStatus {
#[sea_orm(string_value = "r")]
Running,
#[sea_orm(string_value = "s")]
Success,
#[sea_orm(string_value = "f")]
Failed,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "tasks")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i32,
pub task_id: i32,
pub subscriber_id: i32,
pub item: serde_json::Value,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscriber,
#[sea_orm(
belongs_to = "super::tasks::Entity",
from = "Column::TaskId",
to = "super::tasks::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Task,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::Task.def()
}
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {}

View File

@ -0,0 +1,95 @@
use async_trait::async_trait;
use sea_orm::{QuerySelect, entity::prelude::*};
use serde::{Deserialize, Serialize};
use crate::{app::AppContextTrait, errors::app_error::RResult};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "task_status")]
#[serde(rename_all = "snake_case")]
pub enum TaskStatus {
#[sea_orm(string_value = "p")]
Pending,
#[sea_orm(string_value = "r")]
Running,
#[sea_orm(string_value = "s")]
Success,
#[sea_orm(string_value = "f")]
Failed,
}
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "task_status")]
#[serde(rename_all = "snake_case")]
pub enum TaskMode {
#[sea_orm(string_value = "stream")]
Stream,
#[sea_orm(string_value = "future")]
Future,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "tasks")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i32,
pub subscriber_id: i32,
pub task_mode: TaskMode,
pub task_status: TaskStatus,
pub task_type: String,
pub state_data: serde_json::Value,
pub request_data: serde_json::Value,
pub error_data: serde_json::Value,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::task_stream_item::Entity")]
StreamItem,
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscriber,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::task_stream_item::Entity> for Entity {
fn to() -> RelationDef {
Relation::StreamItem.def()
}
}
impl Model {
pub async fn find_stream_task_by_id(
ctx: &dyn AppContextTrait,
task_id: i32,
) -> RResult<Option<(Model, Vec<super::task_stream_item::Model>)>> {
let db = ctx.db();
let res = Entity::find()
.filter(Column::Id.eq(task_id))
.filter(Column::TaskMode.eq(TaskMode::Stream))
.find_with_related(super::task_stream_item::Entity)
.limit(1)
.all(db)
.await?
.pop();
Ok(res)
}
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {}

View File

@ -1,134 +0,0 @@
/* eslint-disable */
// @ts-nocheck
// noinspection JSUnusedGlobalSymbols
// This file was automatically generated by TanStack Router.
// You should NOT make any changes in this file as it will be overwritten.
// Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified.
// Import Routes
import { Route as rootRoute } from './web/controller/__root'
import { Route as IndexImport } from './web/controller/index'
import { Route as GraphqlIndexImport } from './web/controller/graphql/index'
import { Route as OidcCallbackImport } from './web/controller/oidc/callback'
// Create/Update Routes
const IndexRoute = IndexImport.update({
id: '/',
path: '/',
getParentRoute: () => rootRoute,
} as any)
const GraphqlIndexRoute = GraphqlIndexImport.update({
id: '/graphql/',
path: '/graphql/',
getParentRoute: () => rootRoute,
} as any)
const OidcCallbackRoute = OidcCallbackImport.update({
id: '/oidc/callback',
path: '/oidc/callback',
getParentRoute: () => rootRoute,
} as any)
// Populate the FileRoutesByPath interface
declare module '@tanstack/react-router' {
interface FileRoutesByPath {
'/': {
id: '/'
path: '/'
fullPath: '/'
preLoaderRoute: typeof IndexImport
parentRoute: typeof rootRoute
}
'/oidc/callback': {
id: '/oidc/callback'
path: '/oidc/callback'
fullPath: '/oidc/callback'
preLoaderRoute: typeof OidcCallbackImport
parentRoute: typeof rootRoute
}
'/graphql/': {
id: '/graphql/'
path: '/graphql'
fullPath: '/graphql'
preLoaderRoute: typeof GraphqlIndexImport
parentRoute: typeof rootRoute
}
}
}
// Create and export the route tree
export interface FileRoutesByFullPath {
'/': typeof IndexRoute
'/oidc/callback': typeof OidcCallbackRoute
'/graphql': typeof GraphqlIndexRoute
}
export interface FileRoutesByTo {
'/': typeof IndexRoute
'/oidc/callback': typeof OidcCallbackRoute
'/graphql': typeof GraphqlIndexRoute
}
export interface FileRoutesById {
__root__: typeof rootRoute
'/': typeof IndexRoute
'/oidc/callback': typeof OidcCallbackRoute
'/graphql/': typeof GraphqlIndexRoute
}
export interface FileRouteTypes {
fileRoutesByFullPath: FileRoutesByFullPath
fullPaths: '/' | '/oidc/callback' | '/graphql'
fileRoutesByTo: FileRoutesByTo
to: '/' | '/oidc/callback' | '/graphql'
id: '__root__' | '/' | '/oidc/callback' | '/graphql/'
fileRoutesById: FileRoutesById
}
export interface RootRouteChildren {
IndexRoute: typeof IndexRoute
OidcCallbackRoute: typeof OidcCallbackRoute
GraphqlIndexRoute: typeof GraphqlIndexRoute
}
const rootRouteChildren: RootRouteChildren = {
IndexRoute: IndexRoute,
OidcCallbackRoute: OidcCallbackRoute,
GraphqlIndexRoute: GraphqlIndexRoute,
}
export const routeTree = rootRoute
._addFileChildren(rootRouteChildren)
._addFileTypes<FileRouteTypes>()
/* ROUTE_MANIFEST_START
{
"routes": {
"__root__": {
"filePath": "__root.tsx",
"children": [
"/",
"/oidc/callback",
"/graphql/"
]
},
"/": {
"filePath": "index.tsx"
},
"/oidc/callback": {
"filePath": "oidc/callback.tsx"
},
"/graphql/": {
"filePath": "graphql/index.tsx"
}
}
}
ROUTE_MANIFEST_END */

View File

@ -8,7 +8,7 @@ use url::Url;
use uuid::Uuid;
use super::StorageConfig;
use crate::errors::{RError, RResult};
use crate::errors::app_error::{RError, RResult};
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
@ -142,7 +142,7 @@ impl StorageService {
subscriber_pid: &str,
bucket: Option<&str>,
filename: &str,
) -> color_eyre::eyre::Result<Buffer> {
) -> RResult<Buffer> {
match content_category {
StorageContentCategory::Image => {
let fullname = [

View File

@ -1,298 +0,0 @@
use std::fmt::Debug;
use async_trait::async_trait;
use itertools::Itertools;
use lazy_static::lazy_static;
use librqbit_core::{
magnet::Magnet,
torrent_metainfo::{TorrentMetaV1Owned, torrent_from_bytes},
};
use quirks_path::{Path, PathBuf};
use regex::Regex;
use serde::{Deserialize, Serialize};
use url::Url;
use super::{QbitTorrent, QbitTorrentContent, TorrentDownloadError};
use crate::fetch::{HttpClientTrait, fetch_bytes};
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
pub const MAGNET_SCHEMA: &str = "magnet";
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum TorrentFilter {
All,
Downloading,
Completed,
Paused,
Active,
Inactive,
Resumed,
Stalled,
StalledUploading,
StalledDownloading,
Errored,
}
lazy_static! {
static ref TORRENT_HASH_RE: Regex = Regex::new(r"[a-fA-F0-9]{40}").unwrap();
static ref TORRENT_EXT_RE: Regex = Regex::new(r"\.torrent$").unwrap();
}
#[derive(Clone, PartialEq, Eq)]
pub enum TorrentSource {
MagnetUrl {
url: Url,
hash: String,
},
TorrentUrl {
url: Url,
hash: String,
},
TorrentFile {
torrent: Vec<u8>,
hash: String,
name: Option<String>,
},
}
impl TorrentSource {
pub async fn parse<H: HttpClientTrait>(
client: &H,
url: &str,
) -> color_eyre::eyre::Result<Self> {
let url = Url::parse(url)?;
let source = if url.scheme() == MAGNET_SCHEMA {
TorrentSource::from_magnet_url(url)?
} else if let Some(basename) = url
.clone()
.path_segments()
.and_then(|mut segments| segments.next_back())
{
if let (Some(match_hash), true) = (
TORRENT_HASH_RE.find(basename),
TORRENT_EXT_RE.is_match(basename),
) {
TorrentSource::from_torrent_url(url, match_hash.as_str().to_string())?
} else {
let contents = fetch_bytes(client, url).await?;
TorrentSource::from_torrent_file(contents.to_vec(), Some(basename.to_string()))?
}
} else {
let contents = fetch_bytes(client, url).await?;
TorrentSource::from_torrent_file(contents.to_vec(), None)?
};
Ok(source)
}
pub fn from_torrent_file(
file: Vec<u8>,
name: Option<String>,
) -> color_eyre::eyre::Result<Self> {
let torrent: TorrentMetaV1Owned = torrent_from_bytes(&file)
.map_err(|_| TorrentDownloadError::InvalidTorrentFileFormat)?;
let hash = torrent.info_hash.as_string();
Ok(TorrentSource::TorrentFile {
torrent: file,
hash,
name,
})
}
pub fn from_magnet_url(url: Url) -> color_eyre::eyre::Result<Self> {
if url.scheme() != MAGNET_SCHEMA {
Err(TorrentDownloadError::InvalidUrlSchema {
found: url.scheme().to_string(),
expected: MAGNET_SCHEMA.to_string(),
}
.into())
} else {
let magnet = Magnet::parse(url.as_str()).map_err(|_| {
TorrentDownloadError::InvalidMagnetFormat {
url: url.as_str().to_string(),
}
})?;
let hash = magnet
.as_id20()
.ok_or_else(|| TorrentDownloadError::InvalidMagnetFormat {
url: url.as_str().to_string(),
})?
.as_string();
Ok(TorrentSource::MagnetUrl { url, hash })
}
}
pub fn from_torrent_url(url: Url, hash: String) -> color_eyre::eyre::Result<Self> {
Ok(TorrentSource::TorrentUrl { url, hash })
}
pub fn hash(&self) -> &str {
match self {
TorrentSource::MagnetUrl { hash, .. } => hash,
TorrentSource::TorrentUrl { hash, .. } => hash,
TorrentSource::TorrentFile { hash, .. } => hash,
}
}
}
impl Debug for TorrentSource {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
TorrentSource::MagnetUrl { url, .. } => {
write!(f, "MagnetUrl {{ url: {} }}", url.as_str())
}
TorrentSource::TorrentUrl { url, .. } => {
write!(f, "TorrentUrl {{ url: {} }}", url.as_str())
}
TorrentSource::TorrentFile { name, hash, .. } => write!(
f,
"TorrentFile {{ name: \"{}\", hash: \"{hash}\" }}",
name.as_deref().unwrap_or_default()
),
}
}
}
pub trait TorrentContent {
fn get_name(&self) -> &str;
fn get_all_size(&self) -> u64;
fn get_progress(&self) -> f64;
fn get_curr_size(&self) -> u64;
}
impl TorrentContent for QbitTorrentContent {
fn get_name(&self) -> &str {
self.name.as_str()
}
fn get_all_size(&self) -> u64 {
self.size
}
fn get_progress(&self) -> f64 {
self.progress
}
fn get_curr_size(&self) -> u64 {
u64::clamp(
f64::round(self.get_all_size() as f64 * self.get_progress()) as u64,
0,
self.get_all_size(),
)
}
}
#[derive(Debug, Clone)]
pub enum Torrent {
Qbit {
torrent: QbitTorrent,
contents: Vec<QbitTorrentContent>,
},
}
impl Torrent {
pub fn iter_files(&self) -> impl Iterator<Item = &dyn TorrentContent> {
match self {
Torrent::Qbit { contents, .. } => {
contents.iter().map(|item| item as &dyn TorrentContent)
}
}
}
pub fn get_name(&self) -> Option<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.name.as_deref(),
}
}
pub fn get_hash(&self) -> Option<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.hash.as_deref(),
}
}
pub fn get_save_path(&self) -> Option<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.save_path.as_deref(),
}
}
pub fn get_content_path(&self) -> Option<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.content_path.as_deref(),
}
}
pub fn get_tags(&self) -> Vec<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.tags.as_deref().map_or_else(Vec::new, |s| {
s.split(',')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect_vec()
}),
}
}
pub fn get_category(&self) -> Option<&str> {
match self {
Torrent::Qbit { torrent, .. } => torrent.category.as_deref(),
}
}
}
#[async_trait]
pub trait TorrentDownloader {
async fn get_torrents_info(
&self,
status_filter: TorrentFilter,
category: Option<String>,
tag: Option<String>,
) -> color_eyre::eyre::Result<Vec<Torrent>>;
async fn add_torrents(
&self,
source: TorrentSource,
save_path: String,
category: Option<&str>,
) -> color_eyre::eyre::Result<()>;
async fn delete_torrents(&self, hashes: Vec<String>) -> color_eyre::eyre::Result<()>;
async fn rename_torrent_file(
&self,
hash: &str,
old_path: &str,
new_path: &str,
) -> color_eyre::eyre::Result<()>;
async fn move_torrents(
&self,
hashes: Vec<String>,
new_path: &str,
) -> color_eyre::eyre::Result<()>;
async fn get_torrent_path(&self, hashes: String) -> color_eyre::eyre::Result<Option<String>>;
async fn check_connection(&self) -> color_eyre::eyre::Result<()>;
async fn set_torrents_category(
&self,
hashes: Vec<String>,
category: &str,
) -> color_eyre::eyre::Result<()>;
async fn add_torrent_tags(
&self,
hashes: Vec<String>,
tags: Vec<String>,
) -> color_eyre::eyre::Result<()>;
async fn add_category(&self, category: &str) -> color_eyre::eyre::Result<()>;
fn get_save_path(&self, sub_path: &Path) -> PathBuf;
}

View File

@ -1,26 +0,0 @@
use std::{borrow::Cow, time::Duration};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum TorrentDownloadError {
#[error("Invalid mime (expected {expected:?}, got {found:?})")]
InvalidMime { expected: String, found: String },
#[error("Invalid url schema (expected {expected:?}, got {found:?})")]
InvalidUrlSchema { expected: String, found: String },
#[error("Invalid url parse: {0:?}")]
InvalidUrlParse(#[from] url::ParseError),
#[error("Invalid url format: {reason}")]
InvalidUrlFormat { reason: Cow<'static, str> },
#[error("QBit api error: {0:?}")]
QBitAPIError(#[from] qbit_rs::Error),
#[error("Timeout error ({action} timeouts out of {timeout:?})")]
TimeoutError {
action: Cow<'static, str>,
timeout: Duration,
},
#[error("Invalid torrent file format")]
InvalidTorrentFileFormat,
#[error("Invalid magnet file format (url = {url})")]
InvalidMagnetFormat { url: String },
}

View File

@ -1,15 +0,0 @@
pub mod core;
pub mod error;
pub mod qbit;
mod utils;
pub use core::{
Torrent, TorrentContent, TorrentDownloader, TorrentFilter, TorrentSource, BITTORRENT_MIME_TYPE,
MAGNET_SCHEMA,
};
pub use error::TorrentDownloadError;
pub use qbit::{
QBittorrentDownloader, QBittorrentDownloaderCreation, QbitTorrent, QbitTorrentContent,
QbitTorrentFile, QbitTorrentFilter, QbitTorrentSource,
};

View File

@ -1,722 +0,0 @@
use std::{
borrow::Cow, collections::HashSet, fmt::Debug, future::Future, sync::Arc, time::Duration,
};
use async_trait::async_trait;
use color_eyre::eyre::OptionExt;
use futures::future::try_join_all;
pub use qbit_rs::model::{
Torrent as QbitTorrent, TorrentContent as QbitTorrentContent, TorrentFile as QbitTorrentFile,
TorrentFilter as QbitTorrentFilter, TorrentSource as QbitTorrentSource,
};
use qbit_rs::{
Qbit,
model::{AddTorrentArg, Credential, GetTorrentListArg, NonEmptyStr, SyncData},
};
use quirks_path::{Path, PathBuf};
use tokio::time::sleep;
use tracing::instrument;
use url::Url;
use super::{
Torrent, TorrentDownloadError, TorrentDownloader, TorrentFilter, TorrentSource,
utils::path_equals_as_file_url,
};
impl From<TorrentSource> for QbitTorrentSource {
fn from(value: TorrentSource) -> Self {
match value {
TorrentSource::MagnetUrl { url, .. } => QbitTorrentSource::Urls {
urls: qbit_rs::model::Sep::from([url]),
},
TorrentSource::TorrentUrl { url, .. } => QbitTorrentSource::Urls {
urls: qbit_rs::model::Sep::from([url]),
},
TorrentSource::TorrentFile {
torrent: torrents,
name,
..
} => QbitTorrentSource::TorrentFiles {
torrents: vec![QbitTorrentFile {
filename: name.unwrap_or_default(),
data: torrents,
}],
},
}
}
}
impl From<TorrentFilter> for QbitTorrentFilter {
fn from(val: TorrentFilter) -> Self {
match val {
TorrentFilter::All => QbitTorrentFilter::All,
TorrentFilter::Downloading => QbitTorrentFilter::Downloading,
TorrentFilter::Completed => QbitTorrentFilter::Completed,
TorrentFilter::Paused => QbitTorrentFilter::Paused,
TorrentFilter::Active => QbitTorrentFilter::Active,
TorrentFilter::Inactive => QbitTorrentFilter::Inactive,
TorrentFilter::Resumed => QbitTorrentFilter::Resumed,
TorrentFilter::Stalled => QbitTorrentFilter::Stalled,
TorrentFilter::StalledUploading => QbitTorrentFilter::StalledUploading,
TorrentFilter::StalledDownloading => QbitTorrentFilter::StalledDownloading,
TorrentFilter::Errored => QbitTorrentFilter::Errored,
}
}
}
pub struct QBittorrentDownloaderCreation {
pub endpoint: String,
pub username: String,
pub password: String,
pub save_path: String,
pub subscriber_id: i32,
}
pub struct QBittorrentDownloader {
pub subscriber_id: i32,
pub endpoint_url: Url,
pub client: Arc<Qbit>,
pub save_path: PathBuf,
pub wait_sync_timeout: Duration,
}
impl QBittorrentDownloader {
pub async fn from_creation(
creation: QBittorrentDownloaderCreation,
) -> Result<Self, TorrentDownloadError> {
let endpoint_url =
Url::parse(&creation.endpoint).map_err(TorrentDownloadError::InvalidUrlParse)?;
let credential = Credential::new(creation.username, creation.password);
let client = Qbit::new(endpoint_url.clone(), credential);
client
.login(false)
.await
.map_err(TorrentDownloadError::QBitAPIError)?;
client.sync(None).await?;
Ok(Self {
client: Arc::new(client),
endpoint_url,
subscriber_id: creation.subscriber_id,
save_path: creation.save_path.into(),
wait_sync_timeout: Duration::from_millis(10000),
})
}
#[instrument(level = "debug")]
pub async fn api_version(&self) -> color_eyre::eyre::Result<String> {
let result = self.client.get_webapi_version().await?;
Ok(result)
}
pub async fn wait_until<G, Fut, F, D, H, E>(
&self,
capture_fn: H,
fetch_data_fn: G,
mut stop_wait_fn: F,
timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()>
where
H: FnOnce() -> E,
G: Fn(Arc<Qbit>, E) -> Fut,
Fut: Future<Output = color_eyre::eyre::Result<D>>,
F: FnMut(&D) -> bool,
E: Clone,
D: Debug + serde::Serialize,
{
let mut next_wait_ms = 32u64;
let mut all_wait_ms = 0u64;
let timeout = timeout.unwrap_or(self.wait_sync_timeout);
let env = capture_fn();
loop {
sleep(Duration::from_millis(next_wait_ms)).await;
all_wait_ms += next_wait_ms;
if all_wait_ms >= timeout.as_millis() as u64 {
// full update
let sync_data = fetch_data_fn(self.client.clone(), env.clone()).await?;
if stop_wait_fn(&sync_data) {
break;
} else {
tracing::warn!(name = "wait_until timeout", sync_data = serde_json::to_string(&sync_data).unwrap(), timeout = ?timeout);
return Err(TorrentDownloadError::TimeoutError {
action: Cow::Borrowed("QBittorrentDownloader::wait_unit"),
timeout,
}
.into());
}
}
let sync_data = fetch_data_fn(self.client.clone(), env.clone()).await?;
if stop_wait_fn(&sync_data) {
break;
}
next_wait_ms *= 2;
}
Ok(())
}
#[instrument(level = "trace", skip(self, stop_wait_fn))]
pub async fn wait_torrents_until<F>(
&self,
arg: GetTorrentListArg,
stop_wait_fn: F,
timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()>
where
F: FnMut(&Vec<QbitTorrent>) -> bool,
{
self.wait_until(
|| arg,
async move |client: Arc<Qbit>,
arg: GetTorrentListArg|
-> color_eyre::eyre::Result<Vec<QbitTorrent>> {
let data = client.get_torrent_list(arg).await?;
Ok(data)
},
stop_wait_fn,
timeout,
)
.await
}
#[instrument(level = "debug", skip(self, stop_wait_fn))]
pub async fn wait_sync_until<F: FnMut(&SyncData) -> bool>(
&self,
stop_wait_fn: F,
timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()> {
self.wait_until(
|| (),
async move |client: Arc<Qbit>, _| -> color_eyre::eyre::Result<SyncData> {
let data = client.sync(None).await?;
Ok(data)
},
stop_wait_fn,
timeout,
)
.await
}
#[instrument(level = "debug", skip(self, stop_wait_fn))]
async fn wait_torrent_contents_until<F: FnMut(&Vec<QbitTorrentContent>) -> bool>(
&self,
hash: &str,
stop_wait_fn: F,
timeout: Option<Duration>,
) -> color_eyre::eyre::Result<()> {
self.wait_until(
|| Arc::new(hash.to_string()),
async move |client: Arc<Qbit>,
hash_arc: Arc<String>|
-> color_eyre::eyre::Result<Vec<QbitTorrentContent>> {
let data = client.get_torrent_contents(hash_arc.as_str(), None).await?;
Ok(data)
},
stop_wait_fn,
timeout,
)
.await
}
}
#[async_trait]
impl TorrentDownloader for QBittorrentDownloader {
#[instrument(level = "debug", skip(self))]
async fn get_torrents_info(
&self,
status_filter: TorrentFilter,
category: Option<String>,
tag: Option<String>,
) -> color_eyre::eyre::Result<Vec<Torrent>> {
let arg = GetTorrentListArg {
filter: Some(status_filter.into()),
category,
tag,
..Default::default()
};
let torrent_list = self.client.get_torrent_list(arg).await?;
let torrent_contents = try_join_all(torrent_list.iter().map(|s| async {
if let Some(hash) = &s.hash {
self.client.get_torrent_contents(hash as &str, None).await
} else {
Ok(vec![])
}
}))
.await?;
Ok(torrent_list
.into_iter()
.zip(torrent_contents)
.map(|(torrent, contents)| Torrent::Qbit { torrent, contents })
.collect::<Vec<_>>())
}
#[instrument(level = "debug", skip(self))]
async fn add_torrents(
&self,
source: TorrentSource,
save_path: String,
category: Option<&str>,
) -> color_eyre::eyre::Result<()> {
let arg = AddTorrentArg {
source: source.clone().into(),
savepath: Some(save_path),
category: category.map(String::from),
auto_torrent_management: Some(false),
..Default::default()
};
let add_result = self.client.add_torrent(arg.clone()).await;
if let (
Err(qbit_rs::Error::ApiError(qbit_rs::ApiError::CategoryNotFound)),
Some(category),
) = (&add_result, category)
{
self.add_category(category).await?;
self.client.add_torrent(arg).await?;
} else {
add_result?;
}
let source_hash = source.hash();
self.wait_sync_until(
|sync_data| {
sync_data
.torrents
.as_ref()
.is_some_and(|t| t.contains_key(source_hash))
},
None,
)
.await?;
Ok(())
}
#[instrument(level = "debug", skip(self))]
async fn delete_torrents(&self, hashes: Vec<String>) -> color_eyre::eyre::Result<()> {
self.client
.delete_torrents(hashes.clone(), Some(true))
.await?;
self.wait_torrents_until(
GetTorrentListArg::builder()
.hashes(hashes.join("|"))
.build(),
|torrents| -> bool { torrents.is_empty() },
None,
)
.await?;
Ok(())
}
#[instrument(level = "debug", skip(self))]
async fn rename_torrent_file(
&self,
hash: &str,
old_path: &str,
new_path: &str,
) -> color_eyre::eyre::Result<()> {
self.client.rename_file(hash, old_path, new_path).await?;
let new_path = self.save_path.join(new_path);
let save_path = self.save_path.as_path();
self.wait_torrent_contents_until(
hash,
|contents| -> bool {
contents.iter().any(|c| {
path_equals_as_file_url(save_path.join(&c.name), &new_path)
.inspect_err(|error| {
tracing::warn!(name = "path_equals_as_file_url", error = ?error);
})
.unwrap_or(false)
})
},
None,
)
.await?;
Ok(())
}
#[instrument(level = "debug", skip(self))]
async fn move_torrents(
&self,
hashes: Vec<String>,
new_path: &str,
) -> color_eyre::eyre::Result<()> {
self.client
.set_torrent_location(hashes.clone(), new_path)
.await?;
self.wait_torrents_until(
GetTorrentListArg::builder()
.hashes(hashes.join("|"))
.build(),
|torrents| -> bool {
torrents.iter().flat_map(|t| t.save_path.as_ref()).any(|p| {
path_equals_as_file_url(p, new_path)
.inspect_err(|error| {
tracing::warn!(name = "path_equals_as_file_url", error = ?error);
})
.unwrap_or(false)
})
},
None,
)
.await?;
Ok(())
}
async fn get_torrent_path(&self, hashes: String) -> color_eyre::eyre::Result<Option<String>> {
let mut torrent_list = self
.client
.get_torrent_list(GetTorrentListArg {
hashes: Some(hashes),
..Default::default()
})
.await?;
let torrent = torrent_list.first_mut().ok_or_eyre("No torrent found")?;
Ok(torrent.save_path.take())
}
#[instrument(level = "debug", skip(self))]
async fn check_connection(&self) -> color_eyre::eyre::Result<()> {
self.api_version().await?;
Ok(())
}
#[instrument(level = "debug", skip(self))]
async fn set_torrents_category(
&self,
hashes: Vec<String>,
category: &str,
) -> color_eyre::eyre::Result<()> {
let result = self
.client
.set_torrent_category(hashes.clone(), category)
.await;
if let Err(qbit_rs::Error::ApiError(qbit_rs::ApiError::CategoryNotFound)) = &result {
self.add_category(category).await?;
self.client
.set_torrent_category(hashes.clone(), category)
.await?;
} else {
result?;
}
self.wait_torrents_until(
GetTorrentListArg::builder()
.hashes(hashes.join("|"))
.build(),
|torrents| {
torrents
.iter()
.all(|t| t.category.as_ref().is_some_and(|c| c == category))
},
None,
)
.await?;
Ok(())
}
#[instrument(level = "debug", skip(self))]
async fn add_torrent_tags(
&self,
hashes: Vec<String>,
tags: Vec<String>,
) -> color_eyre::eyre::Result<()> {
if tags.is_empty() {
return Err(color_eyre::eyre::eyre!("add torrent tags can not be empty"));
}
self.client
.add_torrent_tags(hashes.clone(), tags.clone())
.await?;
let tag_sets = tags.iter().map(|s| s.as_str()).collect::<HashSet<&str>>();
self.wait_torrents_until(
GetTorrentListArg::builder()
.hashes(hashes.join("|"))
.build(),
|torrents| {
torrents.iter().all(|t| {
t.tags.as_ref().is_some_and(|t| {
t.split(',')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect::<HashSet<&str>>()
.is_superset(&tag_sets)
})
})
},
None,
)
.await?;
Ok(())
}
#[instrument(level = "debug", skip(self))]
async fn add_category(&self, category: &str) -> color_eyre::eyre::Result<()> {
self.client
.add_category(
NonEmptyStr::new(category).ok_or_eyre("category can not be empty")?,
self.save_path.as_str(),
)
.await?;
self.wait_sync_until(
|sync_data| {
sync_data
.categories
.as_ref()
.is_some_and(|s| s.contains_key(category))
},
None,
)
.await?;
Ok(())
}
fn get_save_path(&self, sub_path: &Path) -> PathBuf {
self.save_path.join(sub_path)
}
}
impl Debug for QBittorrentDownloader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("QBittorrentDownloader")
.field("subscriber_id", &self.subscriber_id)
.field("client", &self.endpoint_url.as_str())
.finish()
}
}
#[cfg(test)]
pub mod tests {
use itertools::Itertools;
use super::*;
use crate::test_utils::fetch::build_testing_http_client;
fn get_tmp_qbit_test_folder() -> &'static str {
if cfg!(all(windows, not(feature = "testcontainers"))) {
"C:\\Windows\\Temp\\konobangu\\qbit"
} else {
"/tmp/konobangu/qbit"
}
}
#[cfg(feature = "testcontainers")]
pub async fn create_qbit_testcontainer()
-> color_eyre::eyre::Result<testcontainers::ContainerRequest<testcontainers::GenericImage>>
{
use testcontainers::{
GenericImage,
core::{
ContainerPort,
// ReuseDirective,
WaitFor,
},
};
use testcontainers_modules::testcontainers::ImageExt;
use crate::test_utils::testcontainers::ContainerRequestEnhancedExt;
let container = GenericImage::new("linuxserver/qbittorrent", "latest")
.with_wait_for(WaitFor::message_on_stderr("Connection to localhost"))
.with_env_var("WEBUI_PORT", "8080")
.with_env_var("TZ", "Asia/Singapore")
.with_env_var("TORRENTING_PORT", "6881")
.with_mapped_port(6881, ContainerPort::Tcp(6881))
.with_mapped_port(8080, ContainerPort::Tcp(8080))
// .with_reuse(ReuseDirective::Always)
.with_default_log_consumer()
.with_prune_existed_label("qbit-downloader", true, true)
.await?;
Ok(container)
}
#[cfg(not(feature = "testcontainers"))]
#[tokio::test]
async fn test_qbittorrent_downloader() {
test_qbittorrent_downloader_impl(None, None).await;
}
#[cfg(feature = "testcontainers")]
#[tokio::test(flavor = "multi_thread")]
async fn test_qbittorrent_downloader() -> color_eyre::eyre::Result<()> {
use testcontainers::runners::AsyncRunner;
use tokio::io::AsyncReadExt;
tracing_subscriber::fmt()
.with_max_level(tracing::Level::DEBUG)
.with_test_writer()
.init();
let image = create_qbit_testcontainer().await?;
let container = image.start().await?;
let mut logs = String::new();
container.stdout(false).read_to_string(&mut logs).await?;
let username = logs
.lines()
.find_map(|line| {
if line.contains("The WebUI administrator username is") {
line.split_whitespace().last()
} else {
None
}
})
.expect("should have username")
.trim();
let password = logs
.lines()
.find_map(|line| {
if line.contains("A temporary password is provided for this session") {
line.split_whitespace().last()
} else {
None
}
})
.expect("should have password")
.trim();
tracing::info!(username, password);
test_qbittorrent_downloader_impl(Some(username), Some(password)).await?;
Ok(())
}
async fn test_qbittorrent_downloader_impl(
username: Option<&str>,
password: Option<&str>,
) -> color_eyre::eyre::Result<()> {
let http_client = build_testing_http_client()?;
let base_save_path = Path::new(get_tmp_qbit_test_folder());
let mut downloader = QBittorrentDownloader::from_creation(QBittorrentDownloaderCreation {
endpoint: "http://127.0.0.1:8080".to_string(),
password: password.unwrap_or_default().to_string(),
username: username.unwrap_or_default().to_string(),
subscriber_id: 0,
save_path: base_save_path.to_string(),
})
.await?;
downloader.wait_sync_timeout = Duration::from_secs(3);
downloader.check_connection().await?;
downloader
.delete_torrents(vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()])
.await?;
let torrent_source = TorrentSource::parse(
&http_client,
"https://mikanani.me/Download/20240301/47ee2d69e7f19af783ad896541a07b012676f858.torrent"
).await?;
let save_path = base_save_path.join(format!(
"test_add_torrents_{}",
chrono::Utc::now().timestamp()
));
downloader
.add_torrents(torrent_source, save_path.to_string(), Some("bangumi"))
.await?;
let get_torrent = async || -> color_eyre::eyre::Result<Torrent> {
let torrent_infos = downloader
.get_torrents_info(TorrentFilter::All, None, None)
.await?;
let result = torrent_infos
.into_iter()
.find(|t| (t.get_hash() == Some("47ee2d69e7f19af783ad896541a07b012676f858")))
.ok_or_eyre("no torrent")?;
Ok(result)
};
let target_torrent = get_torrent().await?;
let files = target_torrent.iter_files().collect_vec();
assert!(!files.is_empty());
let first_file = files[0];
assert_eq!(
first_file.get_name(),
r#"[Nekomoe kissaten&LoliHouse] Boku no Kokoro no Yabai Yatsu - 20 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"#
);
let test_tag = format!("test_tag_{}", chrono::Utc::now().timestamp());
downloader
.add_torrent_tags(
vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()],
vec![test_tag.clone()],
)
.await?;
let target_torrent = get_torrent().await?;
assert!(target_torrent.get_tags().iter().any(|s| s == &test_tag));
let test_category = format!("test_category_{}", chrono::Utc::now().timestamp());
downloader
.set_torrents_category(
vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()],
&test_category,
)
.await?;
let target_torrent = get_torrent().await?;
assert_eq!(Some(test_category.as_str()), target_torrent.get_category());
let moved_save_path = base_save_path.join(format!(
"moved_test_add_torrents_{}",
chrono::Utc::now().timestamp()
));
downloader
.move_torrents(
vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()],
moved_save_path.as_str(),
)
.await?;
let target_torrent = get_torrent().await?;
let content_path = target_torrent.iter_files().next().unwrap().get_name();
let new_content_path = &format!("new_{}", content_path);
downloader
.rename_torrent_file(
"47ee2d69e7f19af783ad896541a07b012676f858",
content_path,
new_content_path,
)
.await?;
let target_torrent = get_torrent().await?;
let content_path = target_torrent.iter_files().next().unwrap().get_name();
assert_eq!(content_path, new_content_path);
downloader
.delete_torrents(vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()])
.await?;
let torrent_infos1 = downloader
.get_torrents_info(TorrentFilter::All, None, None)
.await?;
assert!(torrent_infos1.is_empty());
Ok(())
}
}

View File

@ -1,16 +1,277 @@
use std::borrow::Cow;
use std::{borrow::Cow, sync::Arc};
use async_trait::async_trait;
use async_stream::stream;
use futures::{Stream, StreamExt, pin_mut};
use serde::{Serialize, de::DeserializeOwned};
use tokio::sync::{RwLock, mpsc};
use crate::{app::AppContext, errors::RResult};
use crate::{
app::AppContextTrait,
errors::app_error::{RError, RResult},
models,
};
pub struct TaskVars {}
#[async_trait]
pub trait Task: Send + Sync {
fn task_name() -> Cow<'static, str>;
fn task_id(&self) -> &str;
async fn run(&self, app_context: &AppContext, vars: &TaskVars) -> RResult<()>;
pub struct TaskMeta {
pub subscriber_id: i32,
pub task_id: i32,
pub task_kind: Cow<'static, str>,
}
pub struct ReplayChannel<T: Send + Sync + Clone + 'static> {
sender: mpsc::UnboundedSender<T>,
channels: Arc<RwLock<Vec<mpsc::UnboundedSender<T>>>>,
buffer: Arc<RwLock<Vec<T>>>,
}
impl<T: Send + Sync + Clone + 'static> ReplayChannel<T> {
pub fn new(history: Vec<T>) -> Self {
let (tx, mut rx) = mpsc::unbounded_channel::<T>();
let channels = Arc::new(RwLock::new(Vec::<mpsc::UnboundedSender<T>>::new()));
let buffer = Arc::new(RwLock::new(history));
{
let channels = channels.clone();
let buffer = buffer.clone();
tokio::spawn(async move {
loop {
match rx.recv().await {
Some(value) => {
let mut w = buffer.write().await;
let senders = channels.read().await;
for s in senders.iter() {
if !s.is_closed() {
if let Err(err) = s.send(value.clone()) {
tracing::error!(err = %err, "replay-channel broadcast to other subscribers error");
}
}
}
w.push(value);
}
None => {
drop(rx);
let mut cs = channels.write().await;
cs.clear();
break;
}
}
}
});
}
Self {
sender: tx,
channels,
buffer,
}
}
pub fn sender(&self) -> &mpsc::UnboundedSender<T> {
&self.sender
}
pub async fn receiver(&self) -> mpsc::UnboundedReceiver<T> {
let (tx, rx) = mpsc::unbounded_channel();
let items = self.buffer.read().await;
for item in items.iter() {
if let Err(err) = tx.send(item.clone()) {
tracing::error!(err = %err, "replay-channel send replay value to other subscribers error");
}
}
if !self.sender.is_closed() {
let mut sw = self.channels.write().await;
sw.push(tx);
}
rx
}
pub async fn close(&self) {
let mut senders = self.channels.write().await;
senders.clear();
}
}
pub trait StreamTaskCoreTrait: Sized {
type Request: Serialize + DeserializeOwned;
type Item: Serialize + DeserializeOwned;
fn task_id(&self) -> i32;
fn task_kind(&self) -> &str;
fn new(meta: TaskMeta, request: Self::Request) -> Self;
fn request(&self) -> &Self::Request;
}
pub trait StreamTaskReplayLayoutTrait: StreamTaskCoreTrait {
fn history(&self) -> &[Arc<RResult<Self::Item>>];
fn resume_from_model(
task: models::tasks::Model,
stream_items: Vec<models::task_stream_item::Model>,
) -> RResult<Self>;
fn running_receiver(
&self,
) -> impl Future<Output = Option<mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>>>;
#[allow(clippy::type_complexity)]
fn init_receiver(
&self,
) -> impl Future<
Output = (
mpsc::UnboundedSender<Arc<RResult<Self::Item>>>,
mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>,
),
>;
fn serialize_request(request: Self::Request) -> RResult<serde_json::Value> {
serde_json::to_value(request).map_err(RError::from)
}
fn serialize_item(item: RResult<Self::Item>) -> RResult<serde_json::Value> {
serde_json::to_value(item).map_err(RError::from)
}
fn deserialize_request(request: serde_json::Value) -> RResult<Self::Request> {
serde_json::from_value(request).map_err(RError::from)
}
fn deserialize_item(item: serde_json::Value) -> RResult<RResult<Self::Item>> {
serde_json::from_value(item).map_err(RError::from)
}
}
pub trait StreamTaskRunnerTrait: StreamTaskCoreTrait {
fn run(
context: Arc<dyn AppContextTrait>,
request: &Self::Request,
history: &[Arc<RResult<Self::Item>>],
) -> impl Stream<Item = RResult<Self::Item>>;
}
pub trait StreamTaskReplayRunnerTrait: StreamTaskRunnerTrait + StreamTaskReplayLayoutTrait {
fn run_shared(
&self,
context: Arc<dyn AppContextTrait>,
) -> impl Stream<Item = Arc<RResult<Self::Item>>> {
stream! {
if let Some(mut receiver) = self.running_receiver().await {
while let Some(item) = receiver.recv().await {
yield item
}
} else {
let (tx, _) = self.init_receiver().await;
let stream = Self::run(context, self.request(), self.history());
pin_mut!(stream);
while let Some(item) = stream.next().await {
let item = Arc::new(item);
if let Err(err) = tx.send(item.clone()) {
tracing::error!(task_id = self.task_id(), task_kind = self.task_kind(), err = %err, "run shared send error");
}
yield item
}
};
}
}
}
pub struct StandardStreamTaskReplayLayout<Request, Item>
where
Request: Serialize + DeserializeOwned,
Item: Serialize + DeserializeOwned + Sync + Send + 'static,
{
pub meta: TaskMeta,
pub request: Request,
pub history: Vec<Arc<RResult<Item>>>,
#[allow(clippy::type_complexity)]
pub channel: Arc<RwLock<Option<ReplayChannel<Arc<RResult<Item>>>>>>,
}
impl<Request, Item> StreamTaskCoreTrait for StandardStreamTaskReplayLayout<Request, Item>
where
Request: Serialize + DeserializeOwned,
Item: Serialize + DeserializeOwned + Sync + Send + 'static,
{
type Request = Request;
type Item = Item;
fn task_id(&self) -> i32 {
self.meta.task_id
}
fn request(&self) -> &Self::Request {
&self.request
}
fn task_kind(&self) -> &str {
&self.meta.task_kind
}
fn new(meta: TaskMeta, request: Self::Request) -> Self {
Self {
meta,
request,
history: vec![],
channel: Arc::new(RwLock::new(None)),
}
}
}
impl<Request, Item> StreamTaskReplayLayoutTrait for StandardStreamTaskReplayLayout<Request, Item>
where
Request: Serialize + DeserializeOwned,
Item: Serialize + DeserializeOwned + Sync + Send + 'static,
{
fn history(&self) -> &[Arc<RResult<Self::Item>>] {
&self.history
}
fn resume_from_model(
task: models::tasks::Model,
stream_items: Vec<models::task_stream_item::Model>,
) -> RResult<Self> {
Ok(Self {
meta: TaskMeta {
task_id: task.id,
subscriber_id: task.subscriber_id,
task_kind: Cow::Owned(task.task_type),
},
request: Self::deserialize_request(task.request_data)?,
history: stream_items
.into_iter()
.map(|m| Self::deserialize_item(m.item).map(Arc::new))
.collect::<RResult<Vec<_>>>()?,
channel: Arc::new(RwLock::new(None)),
})
}
async fn running_receiver(&self) -> Option<mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>> {
if let Some(channel) = self.channel.read().await.as_ref() {
Some(channel.receiver().await)
} else {
None
}
}
async fn init_receiver(
&self,
) -> (
mpsc::UnboundedSender<Arc<RResult<Self::Item>>>,
mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>,
) {
let channel = ReplayChannel::new(self.history.clone());
let rx = channel.receiver().await;
let sender = channel.sender().clone();
{
{
let mut w = self.channel.write().await;
*w = Some(channel);
}
}
(sender, rx)
}
}

View File

@ -1,49 +0,0 @@
use std::borrow::Cow;
use futures::{TryStreamExt, pin_mut};
use super::core::{Task, TaskVars};
use crate::{
app::AppContext,
errors::RResult,
extract::mikan::{
MikanAuthSecrecy, web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page,
},
};
#[derive(Debug)]
pub struct CreateMikanRSSFromMyBangumiTask {
pub subscriber_id: i32,
pub task_id: String,
pub auth_secrecy: MikanAuthSecrecy,
}
#[async_trait::async_trait]
impl Task for CreateMikanRSSFromMyBangumiTask {
fn task_name() -> Cow<'static, str> {
Cow::Borrowed("create-mikan-rss-from-my-bangumi")
}
fn task_id(&self) -> &str {
&self.task_id
}
async fn run(&self, app_context: &AppContext, _vars: &TaskVars) -> RResult<()> {
let mikan_client = app_context
.mikan
.fork_with_auth(self.auth_secrecy.clone())?;
{
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
&mikan_client,
mikan_client.base_url().join("/Home/MyBangumi")?,
);
pin_mut!(bangumi_metas);
let _bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
}
Ok(())
}
}

View File

@ -0,0 +1,37 @@
use std::sync::Arc;
use futures::Stream;
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
app::AppContextTrait,
errors::app_error::RResult,
extract::mikan::{MikanAuthSecrecy, MikanBangumiMeta, web_extract},
tasks::core::{StandardStreamTaskReplayLayout, StreamTaskRunnerTrait},
};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ExtractMikanBangumisMetaFromMyBangumiRequest {
pub my_bangumi_page_url: Url,
pub auth_secrecy: Option<MikanAuthSecrecy>,
}
pub type ExtractMikanBangumisMetaFromMyBangumiTask =
StandardStreamTaskReplayLayout<ExtractMikanBangumisMetaFromMyBangumiRequest, MikanBangumiMeta>;
impl StreamTaskRunnerTrait for ExtractMikanBangumisMetaFromMyBangumiTask {
fn run(
context: Arc<dyn AppContextTrait>,
request: &Self::Request,
history: &[Arc<RResult<Self::Item>>],
) -> impl Stream<Item = RResult<Self::Item>> {
let context = context.clone();
web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page(
context,
request.my_bangumi_page_url.clone(),
request.auth_secrecy.clone(),
history,
)
}
}

View File

@ -0,0 +1 @@
pub mod extract_mikan_bangumis_meta_from_my_bangumi;

View File

@ -1,2 +1,4 @@
pub mod core;
pub mod create_mikan_bangumi_subscriptions_from_my_bangumi_page;
pub mod mikan;
pub mod service;
pub mod registry;

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,4 @@
#[derive(Debug)]
pub struct TaskService {}
impl TaskService {}

View File

@ -0,0 +1,62 @@
use typed_builder::TypedBuilder;
use crate::app::AppContextTrait;
#[derive(TypedBuilder)]
#[builder(field_defaults(default, setter(strip_option)))]
pub struct UnitTestAppContext {
logger: Option<crate::logger::LoggerService>,
db: Option<crate::database::DatabaseService>,
config: Option<crate::app::AppConfig>,
cache: Option<crate::cache::CacheService>,
mikan: Option<crate::extract::mikan::MikanClient>,
auth: Option<crate::auth::AuthService>,
graphql: Option<crate::graphql::GraphQLService>,
storage: Option<crate::storage::StorageService>,
#[builder(default = Some(String::from(env!("CARGO_MANIFEST_DIR"))))]
working_dir: Option<String>,
#[builder(default = crate::app::Environment::Testing, setter(!strip_option))]
environment: crate::app::Environment,
}
impl AppContextTrait for UnitTestAppContext {
fn logger(&self) -> &crate::logger::LoggerService {
self.logger.as_ref().expect("should set logger")
}
fn db(&self) -> &crate::database::DatabaseService {
self.db.as_ref().expect("should set db")
}
fn config(&self) -> &crate::app::AppConfig {
self.config.as_ref().expect("should set config")
}
fn cache(&self) -> &crate::cache::CacheService {
self.cache.as_ref().expect("should set cache")
}
fn mikan(&self) -> &crate::extract::mikan::MikanClient {
self.mikan.as_ref().expect("should set mikan")
}
fn auth(&self) -> &crate::auth::AuthService {
self.auth.as_ref().expect("should set auth")
}
fn graphql(&self) -> &crate::graphql::GraphQLService {
self.graphql.as_ref().expect("should set graphql")
}
fn storage(&self) -> &crate::storage::StorageService {
self.storage.as_ref().expect("should set storage")
}
fn environment(&self) -> &crate::app::Environment {
&self.environment
}
fn working_dir(&self) -> &String {
self.working_dir.as_ref().expect("should set working_dir")
}
}

View File

@ -1,8 +1,6 @@
use color_eyre::eyre;
use crate::{errors::app_error::RResult, fetch::HttpClient};
use crate::fetch::HttpClient;
pub fn build_testing_http_client() -> eyre::Result<HttpClient> {
pub fn build_testing_http_client() -> RResult<HttpClient> {
let mikan_client = HttpClient::default();
Ok(mikan_client)
}

View File

@ -1,7 +1,7 @@
use reqwest::IntoUrl;
use crate::{
errors::RResult,
errors::app_error::RResult,
extract::mikan::{MikanClient, MikanConfig},
fetch::HttpClientConfig,
};

View File

@ -1,3 +1,4 @@
pub mod app;
pub mod fetch;
pub mod mikan;
#[cfg(feature = "testcontainers")]

View File

@ -2,7 +2,8 @@ use async_trait::async_trait;
use bollard::container::ListContainersOptions;
use itertools::Itertools;
use testcontainers::{
core::logs::consumer::logging_consumer::LoggingConsumer, ContainerRequest, Image, ImageExt,
ContainerRequest, Image, ImageExt, TestcontainersError,
core::logs::consumer::logging_consumer::LoggingConsumer,
};
pub const TESTCONTAINERS_PROJECT_KEY: &str = "tech.enfw.testcontainers.project";
@ -19,7 +20,7 @@ where
container_label: &str,
prune: bool,
force: bool,
) -> color_eyre::eyre::Result<Self>;
) -> Result<Self, TestcontainersError>;
fn with_default_log_consumer(self) -> Self;
}
@ -34,7 +35,7 @@ where
container_label: &str,
prune: bool,
force: bool,
) -> color_eyre::eyre::Result<Self> {
) -> Result<Self, TestcontainersError> {
use std::collections::HashMap;
use bollard::container::PruneContainersOptions;
@ -61,7 +62,8 @@ where
filters: filters.clone(),
..Default::default()
}))
.await?;
.await
.map_err(|err| TestcontainersError::Other(Box::new(err)))?;
let remove_containers = result
.iter()
@ -74,16 +76,26 @@ where
.iter()
.map(|c| client.stop_container(c, None)),
)
.await?;
.await
.map_err(|error| TestcontainersError::Other(Box::new(error)))?;
tracing::warn!(name = "stop running containers", result = ?remove_containers);
if !remove_containers.is_empty() {
tracing::warn!(name = "stop running containers", result = ?remove_containers);
}
}
let result = client
.prune_containers(Some(PruneContainersOptions { filters }))
.await?;
.await
.map_err(|err| TestcontainersError::Other(Box::new(err)))?;
tracing::warn!(name = "prune existed containers", result = ?result);
if result
.containers_deleted
.as_ref()
.is_some_and(|c| !c.is_empty())
{
tracing::warn!(name = "prune existed containers", result = ?result);
}
}
let result = self.with_labels([

View File

@ -1,10 +1,10 @@
use tracing::Level;
use tracing_subscriber::EnvFilter;
pub fn init_testing_tracing(level: Level) {
pub fn try_init_testing_tracing(level: Level) {
let crate_name = env!("CARGO_PKG_NAME");
let level = level.as_str().to_lowercase();
let filter = EnvFilter::new(format!("{}[]={}", crate_name, level))
.add_directive(format!("mockito[]={}", level).parse().unwrap());
tracing_subscriber::fmt().with_env_filter(filter).init();
let _ = tracing_subscriber::fmt().with_env_filter(filter).try_init();
}

View File

@ -1,52 +0,0 @@
import type { Injector } from '@outposts/injection-js';
import {
// Link,
Outlet,
createRootRouteWithContext,
} from '@tanstack/react-router';
import { TanStackRouterDevtools } from '@tanstack/router-devtools';
import type { OidcSecurityService } from 'oidc-client-rx';
export type RouterContext =
| {
isAuthenticated: false;
injector: Injector;
oidcSecurityService: OidcSecurityService;
}
| {
isAuthenticated: true;
injector?: Injector;
oidcSecurityService?: OidcSecurityService;
};
export const Route = createRootRouteWithContext<RouterContext>()({
component: RootComponent,
});
function RootComponent() {
return (
<>
{/* <div className="flex gap-2 p-2 text-lg ">
<Link
to="/"
activeProps={{
className: 'font-bold',
}}
>
Home
</Link>{' '}
<Link
to="/graphql"
activeProps={{
className: 'font-bold',
}}
>
GraphQL
</Link>
</div> */}
{/* <hr /> */}
<Outlet />
<TanStackRouterDevtools position="bottom-right" />
</>
);
}

View File

@ -2,19 +2,23 @@ use std::{borrow::Cow, sync::Arc};
use axum::Router;
use crate::app::AppContext;
use crate::app::AppContextTrait;
pub trait ControllerTrait: Sized {
fn apply_to(self, router: Router<Arc<AppContext>>) -> Router<Arc<AppContext>>;
fn apply_to(self, router: Router<Arc<dyn AppContextTrait>>)
-> Router<Arc<dyn AppContextTrait>>;
}
pub struct PrefixController {
prefix: Cow<'static, str>,
router: Router<Arc<AppContext>>,
router: Router<Arc<dyn AppContextTrait>>,
}
impl PrefixController {
pub fn new(prefix: impl Into<Cow<'static, str>>, router: Router<Arc<AppContext>>) -> Self {
pub fn new(
prefix: impl Into<Cow<'static, str>>,
router: Router<Arc<dyn AppContextTrait>>,
) -> Self {
Self {
prefix: prefix.into(),
router,
@ -23,7 +27,10 @@ impl PrefixController {
}
impl ControllerTrait for PrefixController {
fn apply_to(self, router: Router<Arc<AppContext>>) -> Router<Arc<AppContext>> {
fn apply_to(
self,
router: Router<Arc<dyn AppContextTrait>>,
) -> Router<Arc<dyn AppContextTrait>> {
router.nest(&self.prefix, self.router)
}
}
@ -35,14 +42,17 @@ pub enum Controller {
impl Controller {
pub fn from_prefix(
prefix: impl Into<Cow<'static, str>>,
router: Router<Arc<AppContext>>,
router: Router<Arc<dyn AppContextTrait>>,
) -> Self {
Self::Prefix(PrefixController::new(prefix, router))
}
}
impl ControllerTrait for Controller {
fn apply_to(self, router: Router<Arc<AppContext>>) -> Router<Arc<AppContext>> {
fn apply_to(
self,
router: Router<Arc<dyn AppContextTrait>>,
) -> Router<Arc<dyn AppContextTrait>> {
match self {
Self::Prefix(p) => p.apply_to(router),
}

View File

@ -1,36 +0,0 @@
import { type Fetcher, createGraphiQLFetcher } from '@graphiql/toolkit';
import { createFileRoute } from '@tanstack/react-router';
import GraphiQL from 'graphiql';
import { useMemo } from 'react';
import { firstValueFrom } from 'rxjs';
import { beforeLoadGuard } from '../../../auth/guard';
import { useAuth } from '../../../auth/hooks';
import 'graphiql/graphiql.css';
export const Route = createFileRoute('/graphql/')({
component: RouteComponent,
beforeLoad: beforeLoadGuard,
});
function RouteComponent() {
const { oidcSecurityService } = useAuth();
const fetcher = useMemo(
(): Fetcher => async (props) => {
const accessToken = oidcSecurityService
? await firstValueFrom(oidcSecurityService.getAccessToken())
: undefined;
return createGraphiQLFetcher({
url: '/api/graphql',
headers: accessToken
? {
Authorization: `Bearer ${accessToken}`,
}
: undefined,
})(props);
},
[oidcSecurityService]
);
return <GraphiQL fetcher={fetcher} className="!h-svh" />;
}

View File

@ -5,19 +5,19 @@ use axum::{Extension, Router, extract::State, middleware::from_fn_with_state, ro
use super::core::Controller;
use crate::{
app::AppContext,
app::AppContextTrait,
auth::{AuthUserInfo, header_www_authenticate_middleware},
errors::RResult,
errors::app_error::RResult,
};
pub const CONTROLLER_PREFIX: &str = "/api/graphql";
async fn graphql_handler(
State(ctx): State<Arc<AppContext>>,
State(ctx): State<Arc<dyn AppContextTrait>>,
Extension(auth_user_info): Extension<AuthUserInfo>,
req: GraphQLRequest,
) -> GraphQLResponse {
let graphql_service = &ctx.graphql;
let graphql_service = ctx.graphql();
let mut req = req.into_inner();
req = req.data(auth_user_info);
@ -25,8 +25,8 @@ async fn graphql_handler(
graphql_service.schema.execute(req).await.into()
}
pub async fn create(ctx: Arc<AppContext>) -> RResult<Controller> {
let router = Router::<Arc<AppContext>>::new()
pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RResult<Controller> {
let router = Router::<Arc<dyn AppContextTrait>>::new()
.route("/", post(graphql_handler))
.layer(from_fn_with_state(ctx, header_www_authenticate_middleware));
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router))

Some files were not shown because too many files have changed in this diff Show More