Compare commits
No commits in common. "a3609696c7f930c754d6d6e99aea2aeace061a6e" and "6e4c13661465561c947e0d2438ecab3b8dc685e4" have entirely different histories.
a3609696c7
...
6e4c136614
@ -1,4 +1,5 @@
|
|||||||
[alias]
|
[alias]
|
||||||
|
recorder = "run -p recorder --bin recorder_cli -- --environment development"
|
||||||
recorder-playground = "run -p recorder --example playground -- --environment development"
|
recorder-playground = "run -p recorder --example playground -- --environment development"
|
||||||
|
|
||||||
[build]
|
[build]
|
||||||
|
|||||||
107
.github.bk/workflows/ci.yaml
Normal file
107
.github.bk/workflows/ci.yaml
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
name: CI
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
env:
|
||||||
|
RUST_TOOLCHAIN: stable
|
||||||
|
TOOLCHAIN_PROFILE: minimal
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
rustfmt:
|
||||||
|
name: Check Style
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout the code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
||||||
|
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
||||||
|
override: true
|
||||||
|
components: rustfmt
|
||||||
|
- name: Run cargo fmt
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: fmt
|
||||||
|
args: --all -- --check
|
||||||
|
|
||||||
|
clippy:
|
||||||
|
name: Run Clippy
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout the code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
||||||
|
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
||||||
|
override: true
|
||||||
|
- name: Setup Rust cache
|
||||||
|
uses: Swatinem/rust-cache@v2
|
||||||
|
- name: Run cargo clippy
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: clippy
|
||||||
|
args: --all-features -- -D warnings -W clippy::pedantic -W clippy::nursery -W rust-2018-idioms
|
||||||
|
|
||||||
|
test:
|
||||||
|
name: Run Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
services:
|
||||||
|
redis:
|
||||||
|
image: redis
|
||||||
|
options: >-
|
||||||
|
--health-cmd "redis-cli ping"
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
ports:
|
||||||
|
- "6379:6379"
|
||||||
|
postgres:
|
||||||
|
image: postgres
|
||||||
|
env:
|
||||||
|
POSTGRES_DB: postgress_test
|
||||||
|
POSTGRES_USER: postgress
|
||||||
|
POSTGRES_PASSWORD: postgress
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
# Set health checks to wait until postgres has started
|
||||||
|
options: --health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout the code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
||||||
|
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
||||||
|
override: true
|
||||||
|
- name: Setup Rust cache
|
||||||
|
uses: Swatinem/rust-cache@v2
|
||||||
|
- name: Run cargo test
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
||||||
|
args: --all-features --all
|
||||||
|
env:
|
||||||
|
REDIS_URL: redis://localhost:${{job.services.redis.ports[6379]}}
|
||||||
|
DATABASE_URL: postgres://postgress:postgress@localhost:5432/postgress_test
|
||||||
@ -1,36 +0,0 @@
|
|||||||
name: Testing Torrents Container
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
env:
|
|
||||||
REGISTRY: ghcr.io
|
|
||||||
ORG: dumtruck
|
|
||||||
PROJECT: konobangu
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-container:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
- name: Log in to GHCR
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ${{ env.REGISTRY }}
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Build and push Docker image
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
context: 'packages/testing-torrents'
|
|
||||||
file: 'packages/testing-torrents/Dockerfile'
|
|
||||||
push: true
|
|
||||||
tags: '${{ env.REGISTRY }}/${{ env.ORG }}/${{ env.PROJECT }}-testing-torrents:latest'
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
6
.gitignore
vendored
6
.gitignore
vendored
@ -158,8 +158,11 @@ web_modules/
|
|||||||
.yarn-integrity
|
.yarn-integrity
|
||||||
|
|
||||||
# Local env files
|
# Local env files
|
||||||
|
.env
|
||||||
.env.local
|
.env.local
|
||||||
.env.*.local
|
.env.development.local
|
||||||
|
.env.test.local
|
||||||
|
.env.production.local
|
||||||
|
|
||||||
# parcel-bundler cache (https://parceljs.org/)
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
.cache
|
.cache
|
||||||
@ -216,7 +219,6 @@ index.d.ts.map
|
|||||||
# Added by cargo
|
# Added by cargo
|
||||||
|
|
||||||
/target
|
/target
|
||||||
/ide-target
|
|
||||||
!/examples/.gitkeep
|
!/examples/.gitkeep
|
||||||
/.env
|
/.env
|
||||||
/.env.bk
|
/.env.bk
|
||||||
|
|||||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@ -27,6 +27,7 @@
|
|||||||
},
|
},
|
||||||
"emmet.showExpandedAbbreviation": "never",
|
"emmet.showExpandedAbbreviation": "never",
|
||||||
"prettier.enable": false,
|
"prettier.enable": false,
|
||||||
|
"tailwindCSS.experimental.configFile": "./packages/tailwind-config/config.ts",
|
||||||
"typescript.tsdk": "node_modules/typescript/lib",
|
"typescript.tsdk": "node_modules/typescript/lib",
|
||||||
"rust-analyzer.cargo.features": ["testcontainers"]
|
"rust-analyzer.cargo.features": ["testcontainers"]
|
||||||
}
|
}
|
||||||
|
|||||||
1498
Cargo.lock
generated
1498
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -5,6 +5,8 @@
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
^https://konobangu.com/api*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/api$1
|
^https://konobangu.com/api/playground*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5002/api/playground$1
|
||||||
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000/$1 excludeFilter://^https://konobangu.com/api***
|
^wss://konobangu.com/api/playground*** reqHeaders://{x-forwarded.json} ws://127.0.0.1:5002/api/playground$1
|
||||||
^wss://konobangu.com/*** reqHeaders://{x-forwarded.json} ws://127.0.0.1:5000/$1 excludeFilter://^wss://konobangu.com/api
|
^https://konobangu.com/api*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/api$1 excludeFilter://^^https://konobangu.com/api/playground***
|
||||||
|
^https://konobangu.com*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000$1 excludeFilter://^https://konobangu.com/api***
|
||||||
|
|
||||||
|
|||||||
@ -1,8 +0,0 @@
|
|||||||
AUTH_TYPE = "basic" # or oidc
|
|
||||||
BASIC_USER = "konobangu"
|
|
||||||
BASIC_PASSWORD = "konobangu"
|
|
||||||
# OIDC_ISSUER="https://auth.logto.io/oidc"
|
|
||||||
# OIDC_API_AUDIENCE = "https://konobangu.com/api"
|
|
||||||
# OIDC_CLIENT_ID = "client_id"
|
|
||||||
# OIDC_CLIENT_SECRET = "client_secret" # optional
|
|
||||||
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
|
||||||
1
apps/recorder/.gitignore
vendored
1
apps/recorder/.gitignore
vendored
@ -25,4 +25,3 @@ Cargo.lock
|
|||||||
# Dist
|
# Dist
|
||||||
node_modules
|
node_modules
|
||||||
dist/
|
dist/
|
||||||
temp/
|
|
||||||
|
|||||||
@ -22,7 +22,6 @@ testcontainers = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
serde_json = "1"
|
serde_json = "1"
|
||||||
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
|
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
|
||||||
@ -49,6 +48,7 @@ reqwest = { version = "0.12", default-features = false, features = [
|
|||||||
"rustls-tls",
|
"rustls-tls",
|
||||||
"cookies",
|
"cookies",
|
||||||
] }
|
] }
|
||||||
|
thiserror = "2"
|
||||||
rss = "2"
|
rss = "2"
|
||||||
bytes = "1.9"
|
bytes = "1.9"
|
||||||
itertools = "0.14"
|
itertools = "0.14"
|
||||||
@ -65,7 +65,7 @@ once_cell = "1.20.2"
|
|||||||
reqwest-middleware = "0.4.0"
|
reqwest-middleware = "0.4.0"
|
||||||
reqwest-retry = "0.7.0"
|
reqwest-retry = "0.7.0"
|
||||||
reqwest-tracing = "0.5.5"
|
reqwest-tracing = "0.5.5"
|
||||||
scraper = "0.23"
|
scraper = "0.22.0"
|
||||||
leaky-bucket = "1.1.2"
|
leaky-bucket = "1.1.2"
|
||||||
serde_with = "3"
|
serde_with = "3"
|
||||||
jwt-authorizer = "0.15.0"
|
jwt-authorizer = "0.15.0"
|
||||||
@ -83,10 +83,12 @@ testcontainers = { version = "0.23.3", features = [
|
|||||||
"reusable-containers",
|
"reusable-containers",
|
||||||
], optional = true }
|
], optional = true }
|
||||||
testcontainers-modules = { version = "0.11.4", optional = true }
|
testcontainers-modules = { version = "0.11.4", optional = true }
|
||||||
|
color-eyre = "0.6"
|
||||||
log = "0.4.22"
|
log = "0.4.22"
|
||||||
|
anyhow = "1.0.95"
|
||||||
bollard = { version = "0.18", optional = true }
|
bollard = { version = "0.18", optional = true }
|
||||||
async-graphql = { version = "7", features = [] }
|
async-graphql = { version = "7.0.15", features = [] }
|
||||||
async-graphql-axum = "7"
|
async-graphql-axum = "7.0.15"
|
||||||
fastrand = "2.3.0"
|
fastrand = "2.3.0"
|
||||||
seaography = { version = "1.1" }
|
seaography = { version = "1.1" }
|
||||||
quirks_path = "0.1.1"
|
quirks_path = "0.1.1"
|
||||||
@ -103,6 +105,7 @@ tower-http = { version = "0.6", features = [
|
|||||||
"set-header",
|
"set-header",
|
||||||
"compression-full",
|
"compression-full",
|
||||||
] }
|
] }
|
||||||
|
serde_yaml = "0.9.34"
|
||||||
tera = "1.20.0"
|
tera = "1.20.0"
|
||||||
openidconnect = { version = "4", features = ["rustls-tls"] }
|
openidconnect = { version = "4", features = ["rustls-tls"] }
|
||||||
http-cache-reqwest = { version = "0.15", features = [
|
http-cache-reqwest = { version = "0.15", features = [
|
||||||
@ -117,6 +120,8 @@ http-cache = { version = "0.20.0", features = [
|
|||||||
], default-features = false }
|
], default-features = false }
|
||||||
http-cache-semantics = "2.1.0"
|
http-cache-semantics = "2.1.0"
|
||||||
dotenv = "0.15.0"
|
dotenv = "0.15.0"
|
||||||
|
nom = "8.0.0"
|
||||||
|
secrecy = { version = "0.10.3", features = ["serde"] }
|
||||||
http = "1.2.0"
|
http = "1.2.0"
|
||||||
cookie = "0.18.1"
|
cookie = "0.18.1"
|
||||||
async-stream = "0.3.6"
|
async-stream = "0.3.6"
|
||||||
@ -125,17 +130,9 @@ tracing-appender = "0.2.3"
|
|||||||
clap = "4.5.31"
|
clap = "4.5.31"
|
||||||
futures-util = "0.3.31"
|
futures-util = "0.3.31"
|
||||||
ipnetwork = "0.21.1"
|
ipnetwork = "0.21.1"
|
||||||
librqbit = "8.0.0"
|
|
||||||
typed-builder = "0.21.0"
|
|
||||||
snafu = { version = "0.8.5", features = ["futures"] }
|
|
||||||
anyhow = "1.0.97"
|
|
||||||
serde_yaml = "0.9.34"
|
|
||||||
merge-struct = "0.1.0"
|
|
||||||
serde-value = "0.7.0"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
serial_test = "3"
|
serial_test = "3"
|
||||||
insta = { version = "1", features = ["redactions", "yaml", "filters"] }
|
insta = { version = "1", features = ["redactions", "yaml", "filters"] }
|
||||||
mockito = "1.6.1"
|
mockito = "1.6.1"
|
||||||
rstest = "0.25"
|
rstest = "0.24.0"
|
||||||
ctor = "0.4.0"
|
|
||||||
|
|||||||
@ -1,7 +1,14 @@
|
|||||||
use recorder::errors::app_error::RResult;
|
|
||||||
// #![allow(unused_imports)]
|
// #![allow(unused_imports)]
|
||||||
|
// use color_eyre::eyre::Context;
|
||||||
|
// use itertools::Itertools;
|
||||||
|
// use loco_rs::{
|
||||||
|
// app::Hooks,
|
||||||
|
// boot::{BootResult, StartMode},
|
||||||
|
// environment::Environment,
|
||||||
|
// prelude::AppContext as LocoContext,
|
||||||
|
// };
|
||||||
// use recorder::{
|
// use recorder::{
|
||||||
// app::{AppContext, AppContextTrait},
|
// app::{App1, AppContext},
|
||||||
// errors::RResult,
|
// errors::RResult,
|
||||||
// migrations::Migrator,
|
// migrations::Migrator,
|
||||||
// models::{
|
// models::{
|
||||||
@ -9,10 +16,10 @@ use recorder::errors::app_error::RResult;
|
|||||||
// subscriptions::{self, SubscriptionCreateFromRssDto},
|
// subscriptions::{self, SubscriptionCreateFromRssDto},
|
||||||
// },
|
// },
|
||||||
// };
|
// };
|
||||||
// use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
|
// use sea_orm::ColumnTrait;
|
||||||
// use sea_orm_migration::MigratorTrait;
|
// use sea_orm_migration::MigratorTrait;
|
||||||
|
|
||||||
// async fn pull_mikan_bangumi_rss(ctx: &dyn AppContextTrait) -> RResult<()> {
|
// async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> RResult<()> {
|
||||||
// let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
|
// let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
|
||||||
|
|
||||||
// // let rss_link =
|
// // let rss_link =
|
||||||
@ -20,7 +27,7 @@ use recorder::errors::app_error::RResult;
|
|||||||
// let subscription = if let Some(subscription) =
|
// let subscription = if let Some(subscription) =
|
||||||
// subscriptions::Entity::find()
|
// subscriptions::Entity::find()
|
||||||
// .filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
|
// .filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
|
||||||
// .one(ctx.db())
|
// .one(&ctx.db)
|
||||||
// .await?
|
// .await?
|
||||||
// {
|
// {
|
||||||
// subscription
|
// subscription
|
||||||
@ -43,14 +50,19 @@ use recorder::errors::app_error::RResult;
|
|||||||
// Ok(())
|
// Ok(())
|
||||||
// }
|
// }
|
||||||
|
|
||||||
|
// async fn init() -> RResult<LocoContext> {
|
||||||
|
// let ctx = loco_rs::cli::playground::<App1>().await?;
|
||||||
|
// let BootResult {
|
||||||
|
// app_context: ctx, ..
|
||||||
|
// } = loco_rs::boot::run_app::<App1>(&StartMode::ServerOnly, ctx).await?;
|
||||||
|
// Migrator::up(&ctx.db, None).await?;
|
||||||
|
// Ok(ctx)
|
||||||
|
// }
|
||||||
|
|
||||||
// #[tokio::main]
|
// #[tokio::main]
|
||||||
// async fn main() -> RResult<()> {
|
// async fn main() -> color_eyre::eyre::Result<()> {
|
||||||
// pull_mikan_bangumi_rss(&ctx).await?;
|
// pull_mikan_bangumi_rss(&ctx).await?;
|
||||||
|
|
||||||
// Ok(())
|
// Ok(())
|
||||||
// }
|
// }
|
||||||
|
fn main() {}
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> RResult<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|||||||
24
apps/recorder/package.json
Normal file
24
apps/recorder/package.json
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"name": "recorder",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "rsbuild dev",
|
||||||
|
"build": "rsbuild build",
|
||||||
|
"preview": "rsbuild preview"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@graphiql/react": "^0.28.2",
|
||||||
|
"@graphiql/toolkit": "^0.11.1",
|
||||||
|
"graphiql": "^3.8.3",
|
||||||
|
"graphql-ws": "^6.0.4",
|
||||||
|
"observable-hooks": "^4.2.4",
|
||||||
|
"react": "^19.0.0",
|
||||||
|
"react-dom": "^19.0.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rsbuild/plugin-react": "^1.1.1",
|
||||||
|
"@types/react": "^19.0.7",
|
||||||
|
"@types/react-dom": "^19.0.3"
|
||||||
|
}
|
||||||
|
}
|
||||||
5
apps/recorder/postcss.config.mjs
Normal file
5
apps/recorder/postcss.config.mjs
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
export default {
|
||||||
|
plugins: {
|
||||||
|
'@tailwindcss/postcss': {},
|
||||||
|
},
|
||||||
|
};
|
||||||
7
apps/recorder/public/assets/404.html
Normal file
7
apps/recorder/public/assets/404.html
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
<html>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
not found :-(
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
||||||
BIN
apps/recorder/public/assets/favicon.ico
Normal file
BIN
apps/recorder/public/assets/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
75
apps/recorder/rsbuild.config.ts
Normal file
75
apps/recorder/rsbuild.config.ts
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
import { defineConfig } from '@rsbuild/core';
|
||||||
|
import { pluginReact } from '@rsbuild/plugin-react';
|
||||||
|
import { TanStackRouterRspack } from '@tanstack/router-plugin/rspack';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
plugins: [pluginReact()],
|
||||||
|
html: {
|
||||||
|
favicon: './public/assets/favicon.ico',
|
||||||
|
// tags: [
|
||||||
|
// {
|
||||||
|
// tag: 'script',
|
||||||
|
// attrs: { src: 'https://cdn.tailwindcss.com' },
|
||||||
|
// },
|
||||||
|
// ],
|
||||||
|
},
|
||||||
|
tools: {
|
||||||
|
rspack: {
|
||||||
|
plugins: [TanStackRouterRspack()],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
source: {
|
||||||
|
entry: {
|
||||||
|
index: './src/main.tsx',
|
||||||
|
},
|
||||||
|
define: {
|
||||||
|
'process.env.AUTH_TYPE': JSON.stringify(process.env.AUTH_TYPE),
|
||||||
|
'process.env.OIDC_CLIENT_ID': JSON.stringify(process.env.OIDC_CLIENT_ID),
|
||||||
|
'process.env.OIDC_CLIENT_SECRET': JSON.stringify(
|
||||||
|
process.env.OIDC_CLIENT_SECRET
|
||||||
|
),
|
||||||
|
'process.env.OIDC_ISSUER': JSON.stringify(process.env.OIDC_ISSUER),
|
||||||
|
'process.env.OIDC_AUDIENCE': JSON.stringify(process.env.OIDC_AUDIENCE),
|
||||||
|
'process.env.OIDC_EXTRA_SCOPES': JSON.stringify(
|
||||||
|
process.env.OIDC_EXTRA_SCOPES
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
dev: {
|
||||||
|
client: {
|
||||||
|
path: '/api/playground/rsbuild-hmr',
|
||||||
|
},
|
||||||
|
setupMiddlewares: [
|
||||||
|
(middlewares) => {
|
||||||
|
middlewares.unshift((req, res, next) => {
|
||||||
|
if (process.env.AUTH_TYPE === 'basic') {
|
||||||
|
res.setHeader('WWW-Authenticate', 'Basic realm="konobangu"');
|
||||||
|
|
||||||
|
const authorization =
|
||||||
|
(req.headers.authorization || '').split(' ')[1] || '';
|
||||||
|
const [user, password] = Buffer.from(authorization, 'base64')
|
||||||
|
.toString()
|
||||||
|
.split(':');
|
||||||
|
|
||||||
|
if (
|
||||||
|
user !== process.env.BASIC_USER ||
|
||||||
|
password !== process.env.BASIC_PASSWORD
|
||||||
|
) {
|
||||||
|
res.statusCode = 401;
|
||||||
|
res.write('Unauthorized');
|
||||||
|
res.end();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
return middlewares;
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
server: {
|
||||||
|
base: '/api/playground/',
|
||||||
|
host: '0.0.0.0',
|
||||||
|
port: 5002,
|
||||||
|
},
|
||||||
|
});
|
||||||
@ -3,7 +3,7 @@ use std::sync::Arc;
|
|||||||
use clap::{Parser, command};
|
use clap::{Parser, command};
|
||||||
|
|
||||||
use super::{AppContext, core::App, env::Environment};
|
use super::{AppContext, core::App, env::Environment};
|
||||||
use crate::{app::config::AppConfig, errors::app_error::RResult};
|
use crate::{app::config::AppConfig, errors::RResult};
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[command(version, about, long_about = None)]
|
#[command(version, about, long_about = None)]
|
||||||
@ -29,7 +29,7 @@ pub struct AppBuilder {
|
|||||||
dotenv_file: Option<String>,
|
dotenv_file: Option<String>,
|
||||||
config_file: Option<String>,
|
config_file: Option<String>,
|
||||||
working_dir: String,
|
working_dir: String,
|
||||||
environment: Environment,
|
enviornment: Environment,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppBuilder {
|
impl AppBuilder {
|
||||||
@ -70,21 +70,21 @@ impl AppBuilder {
|
|||||||
|
|
||||||
pub async fn build(self) -> RResult<App> {
|
pub async fn build(self) -> RResult<App> {
|
||||||
AppConfig::load_dotenv(
|
AppConfig::load_dotenv(
|
||||||
&self.environment,
|
&self.enviornment,
|
||||||
&self.working_dir,
|
&self.working_dir,
|
||||||
self.dotenv_file.as_deref(),
|
self.dotenv_file.as_deref(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let config = AppConfig::load_config(
|
let config = AppConfig::load_config(
|
||||||
&self.environment,
|
&self.enviornment,
|
||||||
&self.working_dir,
|
&self.working_dir,
|
||||||
self.config_file.as_deref(),
|
self.config_file.as_deref(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let app_context = Arc::new(
|
let app_context = Arc::new(
|
||||||
AppContext::new(self.environment.clone(), config, self.working_dir.clone()).await?,
|
AppContext::new(self.enviornment.clone(), config, self.working_dir.clone()).await?,
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(App {
|
Ok(App {
|
||||||
@ -101,7 +101,7 @@ impl AppBuilder {
|
|||||||
|
|
||||||
pub fn environment(self, environment: Environment) -> Self {
|
pub fn environment(self, environment: Environment) -> Self {
|
||||||
let mut ret = self;
|
let mut ret = self;
|
||||||
ret.environment = environment;
|
ret.enviornment = environment;
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -130,7 +130,7 @@ impl AppBuilder {
|
|||||||
impl Default for AppBuilder {
|
impl Default for AppBuilder {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
environment: Environment::Production,
|
enviornment: Environment::Production,
|
||||||
dotenv_file: None,
|
dotenv_file: None,
|
||||||
config_file: None,
|
config_file: None,
|
||||||
working_dir: String::from("."),
|
working_dir: String::from("."),
|
||||||
|
|||||||
@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
use super::env::Environment;
|
use super::env::Environment;
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::app_error::RResult,
|
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::RResult,
|
||||||
extract::mikan::MikanConfig, graphql::GraphQLConfig, logger::LoggerConfig,
|
extract::mikan::MikanConfig, graphql::GraphQLConfig, logger::LoggerConfig,
|
||||||
storage::StorageConfig, web::WebServerConfig,
|
storage::StorageConfig, web::WebServerConfig,
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,34 +1,21 @@
|
|||||||
use super::{Environment, config::AppConfig};
|
use super::{Environment, config::AppConfig};
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::AuthService, cache::CacheService, database::DatabaseService, errors::app_error::RResult,
|
auth::AuthService, cache::CacheService, database::DatabaseService, errors::RResult,
|
||||||
extract::mikan::MikanClient, graphql::GraphQLService, logger::LoggerService,
|
extract::mikan::MikanClient, graphql::GraphQLService, logger::LoggerService,
|
||||||
storage::StorageService,
|
storage::StorageService,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait AppContextTrait: Send + Sync {
|
|
||||||
fn logger(&self) -> &LoggerService;
|
|
||||||
fn db(&self) -> &DatabaseService;
|
|
||||||
fn config(&self) -> &AppConfig;
|
|
||||||
fn cache(&self) -> &CacheService;
|
|
||||||
fn mikan(&self) -> &MikanClient;
|
|
||||||
fn auth(&self) -> &AuthService;
|
|
||||||
fn graphql(&self) -> &GraphQLService;
|
|
||||||
fn storage(&self) -> &StorageService;
|
|
||||||
fn working_dir(&self) -> &String;
|
|
||||||
fn environment(&self) -> &Environment;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct AppContext {
|
pub struct AppContext {
|
||||||
logger: LoggerService,
|
pub logger: LoggerService,
|
||||||
db: DatabaseService,
|
pub db: DatabaseService,
|
||||||
config: AppConfig,
|
pub config: AppConfig,
|
||||||
cache: CacheService,
|
pub cache: CacheService,
|
||||||
mikan: MikanClient,
|
pub mikan: MikanClient,
|
||||||
auth: AuthService,
|
pub auth: AuthService,
|
||||||
graphql: GraphQLService,
|
pub graphql: GraphQLService,
|
||||||
storage: StorageService,
|
pub storage: StorageService,
|
||||||
working_dir: String,
|
pub working_dir: String,
|
||||||
environment: Environment,
|
pub environment: Environment,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppContext {
|
impl AppContext {
|
||||||
@ -61,35 +48,3 @@ impl AppContext {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AppContextTrait for AppContext {
|
|
||||||
fn logger(&self) -> &LoggerService {
|
|
||||||
&self.logger
|
|
||||||
}
|
|
||||||
fn db(&self) -> &DatabaseService {
|
|
||||||
&self.db
|
|
||||||
}
|
|
||||||
fn config(&self) -> &AppConfig {
|
|
||||||
&self.config
|
|
||||||
}
|
|
||||||
fn cache(&self) -> &CacheService {
|
|
||||||
&self.cache
|
|
||||||
}
|
|
||||||
fn mikan(&self) -> &MikanClient {
|
|
||||||
&self.mikan
|
|
||||||
}
|
|
||||||
fn auth(&self) -> &AuthService {
|
|
||||||
&self.auth
|
|
||||||
}
|
|
||||||
fn graphql(&self) -> &GraphQLService {
|
|
||||||
&self.graphql
|
|
||||||
}
|
|
||||||
fn storage(&self) -> &StorageService {
|
|
||||||
&self.storage
|
|
||||||
}
|
|
||||||
fn working_dir(&self) -> &String {
|
|
||||||
&self.working_dir
|
|
||||||
}
|
|
||||||
fn environment(&self) -> &Environment {
|
|
||||||
&self.environment
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@ -4,9 +4,9 @@ use axum::Router;
|
|||||||
use futures::try_join;
|
use futures::try_join;
|
||||||
use tokio::signal;
|
use tokio::signal;
|
||||||
|
|
||||||
use super::{builder::AppBuilder, context::AppContextTrait};
|
use super::{builder::AppBuilder, context::AppContext};
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::app_error::RResult,
|
errors::RResult,
|
||||||
web::{
|
web::{
|
||||||
controller::{self, core::ControllerTrait},
|
controller::{self, core::ControllerTrait},
|
||||||
middleware::default_middleware_stack,
|
middleware::default_middleware_stack,
|
||||||
@ -14,7 +14,7 @@ use crate::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub struct App {
|
pub struct App {
|
||||||
pub context: Arc<dyn AppContextTrait>,
|
pub context: Arc<AppContext>,
|
||||||
pub builder: AppBuilder,
|
pub builder: AppBuilder,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -25,22 +25,21 @@ impl App {
|
|||||||
|
|
||||||
pub async fn serve(&self) -> RResult<()> {
|
pub async fn serve(&self) -> RResult<()> {
|
||||||
let context = &self.context;
|
let context = &self.context;
|
||||||
let config = context.config();
|
let config = &context.config;
|
||||||
let listener = tokio::net::TcpListener::bind(&format!(
|
let listener = tokio::net::TcpListener::bind(&format!(
|
||||||
"{}:{}",
|
"{}:{}",
|
||||||
config.server.binding, config.server.port
|
config.server.binding, config.server.port
|
||||||
))
|
))
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
let mut router = Router::<Arc<AppContext>>::new();
|
||||||
|
|
||||||
let (graphql_c, oidc_c, metadata_c) = try_join!(
|
let (graphqlc, oidcc) = try_join!(
|
||||||
controller::graphql::create(context.clone()),
|
controller::graphql::create(context.clone()),
|
||||||
controller::oidc::create(context.clone()),
|
controller::oidc::create(context.clone()),
|
||||||
controller::metadata::create(context.clone())
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
for c in [graphql_c, oidc_c, metadata_c] {
|
for c in [graphqlc, oidcc] {
|
||||||
router = c.apply_to(router);
|
router = c.apply_to(router);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -8,5 +8,5 @@ pub use core::App;
|
|||||||
|
|
||||||
pub use builder::AppBuilder;
|
pub use builder::AppBuilder;
|
||||||
pub use config::AppConfig;
|
pub use config::AppConfig;
|
||||||
pub use context::{AppContext, AppContextTrait};
|
pub use context::AppContext;
|
||||||
pub use env::Environment;
|
pub use env::Environment;
|
||||||
|
|||||||
@ -9,7 +9,7 @@ use super::{
|
|||||||
service::{AuthServiceTrait, AuthUserInfo},
|
service::{AuthServiceTrait, AuthUserInfo},
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContext,
|
||||||
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
|
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ pub struct BasicAuthService {
|
|||||||
impl AuthServiceTrait for BasicAuthService {
|
impl AuthServiceTrait for BasicAuthService {
|
||||||
async fn extract_user_info(
|
async fn extract_user_info(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &AppContext,
|
||||||
request: &mut Parts,
|
request: &mut Parts,
|
||||||
) -> Result<AuthUserInfo, AuthError> {
|
) -> Result<AuthUserInfo, AuthError> {
|
||||||
if let Ok(AuthBasic {
|
if let Ok(AuthBasic {
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import { LogLevel, type OpenIdConfiguration } from 'oidc-client-rx';
|
import { LogLevel, type OpenIdConfiguration } from 'oidc-client-rx';
|
||||||
|
|
||||||
export const isBasicAuth = process.env.AUTH_TYPE === 'basic';
|
export const isBasicAuth = process.env.AUTH_TYPE === 'basic';
|
||||||
export const isOidcAuth = process.env.AUTH_TYPE === 'oidc';
|
|
||||||
|
|
||||||
export function buildOidcConfig(): OpenIdConfiguration {
|
export function buildOidcConfig(): OpenIdConfiguration {
|
||||||
const origin = window.location.origin;
|
const origin = window.location.origin;
|
||||||
@ -10,8 +9,8 @@ export function buildOidcConfig(): OpenIdConfiguration {
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
authority: process.env.OIDC_ISSUER!,
|
authority: process.env.OIDC_ISSUER!,
|
||||||
redirectUrl: `${origin}/auth/oidc/callback`,
|
redirectUrl: `${origin}/api/playground/oidc/callback`,
|
||||||
postLogoutRedirectUri: `${origin}/`,
|
postLogoutRedirectUri: `${origin}/api/playground`,
|
||||||
clientId: process.env.OIDC_CLIENT_ID!,
|
clientId: process.env.OIDC_CLIENT_ID!,
|
||||||
clientSecret: process.env.OIDC_CLIENT_SECRET,
|
clientSecret: process.env.OIDC_CLIENT_SECRET,
|
||||||
scope: process.env.OIDC_EXTRA_SCOPES
|
scope: process.env.OIDC_EXTRA_SCOPES
|
||||||
@ -1,3 +1,5 @@
|
|||||||
|
use std::fmt;
|
||||||
|
|
||||||
use async_graphql::dynamic::ResolverContext;
|
use async_graphql::dynamic::ResolverContext;
|
||||||
use axum::{
|
use axum::{
|
||||||
Json,
|
Json,
|
||||||
@ -9,86 +11,72 @@ use openidconnect::{
|
|||||||
StandardErrorResponse, core::CoreErrorResponseType,
|
StandardErrorResponse, core::CoreErrorResponseType,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use snafu::prelude::*;
|
use thiserror::Error;
|
||||||
|
|
||||||
use crate::{fetch::HttpClientError, models::auth::AuthType};
|
use crate::{fetch::HttpClientError, models::auth::AuthType};
|
||||||
|
|
||||||
#[derive(Debug, Snafu)]
|
#[derive(Debug, Error)]
|
||||||
#[snafu(visibility(pub(crate)))]
|
|
||||||
pub enum AuthError {
|
pub enum AuthError {
|
||||||
#[snafu(display("Not support auth method"))]
|
#[error("Not support auth method")]
|
||||||
NotSupportAuthMethod {
|
NotSupportAuthMethod {
|
||||||
supported: Vec<AuthType>,
|
supported: Vec<AuthType>,
|
||||||
current: AuthType,
|
current: AuthType,
|
||||||
},
|
},
|
||||||
#[snafu(display("Failed to find auth record"))]
|
#[error("Failed to find auth record")]
|
||||||
FindAuthRecordError,
|
FindAuthRecordError,
|
||||||
#[snafu(display("Invalid credentials"))]
|
#[error("Invalid credentials")]
|
||||||
BasicInvalidCredentials,
|
BasicInvalidCredentials,
|
||||||
#[snafu(transparent)]
|
#[error(transparent)]
|
||||||
OidcInitError {
|
OidcInitError(#[from] jwt_authorizer::error::InitError),
|
||||||
source: jwt_authorizer::error::InitError,
|
#[error("Invalid oidc provider meta client error: {0}")]
|
||||||
},
|
OidcProviderHttpClientError(HttpClientError),
|
||||||
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
|
#[error(transparent)]
|
||||||
OidcProviderHttpClientError { source: HttpClientError },
|
OidcProviderMetaError(#[from] openidconnect::DiscoveryError<HttpClientError>),
|
||||||
#[snafu(transparent)]
|
#[error("Invalid oidc provider URL: {0}")]
|
||||||
OidcProviderMetaError {
|
OidcProviderUrlError(url::ParseError),
|
||||||
source: openidconnect::DiscoveryError<HttpClientError>,
|
#[error("Invalid oidc redirect URI: {0}")]
|
||||||
},
|
OidcRequestRedirectUriError(url::ParseError),
|
||||||
#[snafu(display("Invalid oidc provider URL: {source}"))]
|
#[error("Oidc request session not found or expired")]
|
||||||
OidcProviderUrlError { source: url::ParseError },
|
|
||||||
#[snafu(display("Invalid oidc redirect URI: {source}"))]
|
|
||||||
OidcRequestRedirectUriError {
|
|
||||||
#[snafu(source)]
|
|
||||||
source: url::ParseError,
|
|
||||||
},
|
|
||||||
#[snafu(display("Oidc request session not found or expired"))]
|
|
||||||
OidcCallbackRecordNotFoundOrExpiredError,
|
OidcCallbackRecordNotFoundOrExpiredError,
|
||||||
#[snafu(display("Invalid oidc request callback nonce"))]
|
#[error("Invalid oidc request callback nonce")]
|
||||||
OidcInvalidNonceError,
|
OidcInvalidNonceError,
|
||||||
#[snafu(display("Invalid oidc request callback state"))]
|
#[error("Invalid oidc request callback state")]
|
||||||
OidcInvalidStateError,
|
OidcInvalidStateError,
|
||||||
#[snafu(display("Invalid oidc request callback code"))]
|
#[error("Invalid oidc request callback code")]
|
||||||
OidcInvalidCodeError,
|
OidcInvalidCodeError,
|
||||||
#[snafu(transparent)]
|
#[error(transparent)]
|
||||||
OidcCallbackTokenConfigurationError { source: ConfigurationError },
|
OidcCallbackTokenConfigrationError(#[from] ConfigurationError),
|
||||||
#[snafu(transparent)]
|
#[error(transparent)]
|
||||||
OidcRequestTokenError {
|
OidcRequestTokenError(
|
||||||
source: RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>,
|
#[from] RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>,
|
||||||
},
|
),
|
||||||
#[snafu(display("Invalid oidc id token"))]
|
#[error("Invalid oidc id token")]
|
||||||
OidcInvalidIdTokenError,
|
OidcInvalidIdTokenError,
|
||||||
#[snafu(display("Invalid oidc access token"))]
|
#[error("Invalid oidc access token")]
|
||||||
OidcInvalidAccessTokenError,
|
OidcInvalidAccessTokenError,
|
||||||
#[snafu(transparent)]
|
#[error(transparent)]
|
||||||
OidcSignatureVerificationError { source: SignatureVerificationError },
|
OidcSignatureVerificationError(#[from] SignatureVerificationError),
|
||||||
#[snafu(transparent)]
|
#[error(transparent)]
|
||||||
OidcSigningError { source: SigningError },
|
OidcSigningError(#[from] SigningError),
|
||||||
#[snafu(transparent)]
|
#[error(transparent)]
|
||||||
OidcJwtAuthError { source: jwt_authorizer::AuthError },
|
OidcJwtAuthError(#[from] jwt_authorizer::AuthError),
|
||||||
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
|
#[error("Extra scopes {expected} do not match found scopes {found}")]
|
||||||
OidcExtraScopesMatchError { expected: String, found: String },
|
OidcExtraScopesMatchError { expected: String, found: String },
|
||||||
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
|
#[error("Extra claim {key} does not match expected value {expected}, found {found}")]
|
||||||
OidcExtraClaimMatchError {
|
OidcExtraClaimMatchError {
|
||||||
key: String,
|
key: String,
|
||||||
expected: String,
|
expected: String,
|
||||||
found: String,
|
found: String,
|
||||||
},
|
},
|
||||||
#[snafu(display("Extra claim {claim} missing"))]
|
#[error("Extra claim {0} missing")]
|
||||||
OidcExtraClaimMissingError { claim: String },
|
OidcExtraClaimMissingError(String),
|
||||||
#[snafu(display("Audience {aud} missing"))]
|
#[error("Audience {0} missing")]
|
||||||
OidcAudMissingError { aud: String },
|
OidcAudMissingError(String),
|
||||||
#[snafu(display("Subject missing"))]
|
#[error("Subject missing")]
|
||||||
OidcSubMissingError,
|
OidcSubMissingError,
|
||||||
#[snafu(display(
|
#[error(fmt = display_graphql_permission_error)]
|
||||||
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
|
|
||||||
(if field.is_empty() { "" } else { "." }),
|
|
||||||
(if column.is_empty() { "" } else { "." }),
|
|
||||||
source.message
|
|
||||||
))]
|
|
||||||
GraphQLPermissionError {
|
GraphQLPermissionError {
|
||||||
#[snafu(source(false))]
|
inner_error: async_graphql::Error,
|
||||||
source: Box<async_graphql::Error>,
|
|
||||||
field: String,
|
field: String,
|
||||||
column: String,
|
column: String,
|
||||||
context_path: String,
|
context_path: String,
|
||||||
@ -97,13 +85,13 @@ pub enum AuthError {
|
|||||||
|
|
||||||
impl AuthError {
|
impl AuthError {
|
||||||
pub fn from_graphql_subscribe_id_guard(
|
pub fn from_graphql_subscribe_id_guard(
|
||||||
source: async_graphql::Error,
|
inner_error: async_graphql::Error,
|
||||||
context: &ResolverContext,
|
context: &ResolverContext,
|
||||||
field_name: &str,
|
field_name: &str,
|
||||||
column_name: &str,
|
column_name: &str,
|
||||||
) -> AuthError {
|
) -> AuthError {
|
||||||
AuthError::GraphQLPermissionError {
|
AuthError::GraphQLPermissionError {
|
||||||
source: Box::new(source),
|
inner_error,
|
||||||
field: field_name.to_string(),
|
field: field_name.to_string(),
|
||||||
column: column_name.to_string(),
|
column: column_name.to_string(),
|
||||||
context_path: context
|
context_path: context
|
||||||
@ -115,27 +103,39 @@ impl AuthError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
fn display_graphql_permission_error(
|
||||||
pub struct AuthErrorResponse {
|
inner_error: &async_graphql::Error,
|
||||||
pub success: bool,
|
field: &String,
|
||||||
pub message: String,
|
column: &String,
|
||||||
|
context_path: &String,
|
||||||
|
formatter: &mut fmt::Formatter<'_>,
|
||||||
|
) -> fmt::Result {
|
||||||
|
write!(
|
||||||
|
formatter,
|
||||||
|
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
|
||||||
|
(if field.is_empty() { "" } else { "." }),
|
||||||
|
(if column.is_empty() { "" } else { "." }),
|
||||||
|
inner_error.message
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<AuthError> for AuthErrorResponse {
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct AuthErrorBody {
|
||||||
|
pub error_code: i32,
|
||||||
|
pub error_msg: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<AuthError> for AuthErrorBody {
|
||||||
fn from(value: AuthError) -> Self {
|
fn from(value: AuthError) -> Self {
|
||||||
AuthErrorResponse {
|
AuthErrorBody {
|
||||||
success: false,
|
error_code: StatusCode::UNAUTHORIZED.as_u16() as i32,
|
||||||
message: value.to_string(),
|
error_msg: value.to_string(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoResponse for AuthError {
|
impl IntoResponse for AuthError {
|
||||||
fn into_response(self) -> Response {
|
fn into_response(self) -> Response {
|
||||||
(
|
(StatusCode::UNAUTHORIZED, Json(AuthErrorBody::from(self))).into_response()
|
||||||
StatusCode::UNAUTHORIZED,
|
|
||||||
Json(AuthErrorResponse::from(self)),
|
|
||||||
)
|
|
||||||
.into_response()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
41
apps/recorder/src/auth/event.ts
Normal file
41
apps/recorder/src/auth/event.ts
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
import type { Observable } from '@graphiql/toolkit';
|
||||||
|
import { InjectionToken, inject } from '@outposts/injection-js';
|
||||||
|
import {
|
||||||
|
type AuthFeature,
|
||||||
|
EventTypes,
|
||||||
|
PublicEventsService,
|
||||||
|
} from 'oidc-client-rx';
|
||||||
|
import { filter, shareReplay } from 'rxjs';
|
||||||
|
|
||||||
|
export type CheckAuthResultEventType =
|
||||||
|
| { type: EventTypes.CheckingAuthFinished }
|
||||||
|
| {
|
||||||
|
type: EventTypes.CheckingAuthFinishedWithError;
|
||||||
|
value: string;
|
||||||
|
};
|
||||||
|
export const CHECK_AUTH_RESULT_EVENT = new InjectionToken<
|
||||||
|
Observable<CheckAuthResultEventType>
|
||||||
|
>('CHECK_AUTH_RESULT_EVENT');
|
||||||
|
|
||||||
|
export function withCheckAuthResultEvent(): AuthFeature {
|
||||||
|
return {
|
||||||
|
ɵproviders: [
|
||||||
|
{
|
||||||
|
provide: CHECK_AUTH_RESULT_EVENT,
|
||||||
|
useFactory: () => {
|
||||||
|
const publishEventService = inject(PublicEventsService);
|
||||||
|
|
||||||
|
return publishEventService.registerForEvents().pipe(
|
||||||
|
filter(
|
||||||
|
(e) =>
|
||||||
|
e.type === EventTypes.CheckingAuthFinishedWithError ||
|
||||||
|
e.type === EventTypes.CheckingAuthFinished
|
||||||
|
),
|
||||||
|
shareReplay(1)
|
||||||
|
);
|
||||||
|
},
|
||||||
|
deps: [PublicEventsService],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -1,12 +1,12 @@
|
|||||||
import { runInInjectionContext } from '@outposts/injection-js';
|
import { runInInjectionContext } from '@outposts/injection-js';
|
||||||
import { autoLoginPartialRoutesGuard } from 'oidc-client-rx';
|
import { autoLoginPartialRoutesGuard } from 'oidc-client-rx';
|
||||||
import { firstValueFrom } from 'rxjs';
|
import { firstValueFrom } from 'rxjs';
|
||||||
import type { RouterContext } from '~/traits/router';
|
import type { RouterContext } from '../controllers/__root';
|
||||||
|
|
||||||
export const beforeLoadGuard = async ({
|
export const beforeLoadGuard = async ({
|
||||||
context,
|
context,
|
||||||
}: { context: RouterContext }) => {
|
}: { context: RouterContext }) => {
|
||||||
if (!context.isAuthenticated()) {
|
if (!context.isAuthenticated) {
|
||||||
const guard$ = runInInjectionContext(context.injector, () =>
|
const guard$ = runInInjectionContext(context.injector, () =>
|
||||||
autoLoginPartialRoutesGuard()
|
autoLoginPartialRoutesGuard()
|
||||||
);
|
);
|
||||||
52
apps/recorder/src/auth/hooks.ts
Normal file
52
apps/recorder/src/auth/hooks.ts
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
import { useObservableEagerState, useObservableState } from 'observable-hooks';
|
||||||
|
import {
|
||||||
|
InjectorContextVoidInjector,
|
||||||
|
useOidcClient,
|
||||||
|
} from 'oidc-client-rx/adapters/react';
|
||||||
|
import { useMemo } from 'react';
|
||||||
|
import { NEVER, type Observable, of } from 'rxjs';
|
||||||
|
import { isBasicAuth } from './config';
|
||||||
|
import {
|
||||||
|
CHECK_AUTH_RESULT_EVENT,
|
||||||
|
type CheckAuthResultEventType,
|
||||||
|
} from './event';
|
||||||
|
|
||||||
|
const BASIC_AUTH_IS_AUTHENTICATED$ = of({
|
||||||
|
isAuthenticated: true,
|
||||||
|
allConfigsAuthenticated: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
const BASIC_AUTH_USER_DATA$ = of({
|
||||||
|
userData: {},
|
||||||
|
allUserData: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
export function useAuth() {
|
||||||
|
const { oidcSecurityService, injector } = isBasicAuth
|
||||||
|
? { oidcSecurityService: undefined, injector: InjectorContextVoidInjector }
|
||||||
|
: // biome-ignore lint/correctness/useHookAtTopLevel: <explanation>
|
||||||
|
useOidcClient();
|
||||||
|
|
||||||
|
const { isAuthenticated } = useObservableEagerState(
|
||||||
|
oidcSecurityService?.isAuthenticated$ ?? BASIC_AUTH_IS_AUTHENTICATED$
|
||||||
|
);
|
||||||
|
|
||||||
|
const { userData } = useObservableEagerState(
|
||||||
|
oidcSecurityService?.userData$ ?? BASIC_AUTH_USER_DATA$
|
||||||
|
);
|
||||||
|
|
||||||
|
const checkAuthResultEvent = useObservableState(
|
||||||
|
useMemo(
|
||||||
|
() => (isBasicAuth ? NEVER : injector.get(CHECK_AUTH_RESULT_EVENT)),
|
||||||
|
[injector]
|
||||||
|
) as Observable<CheckAuthResultEventType>
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
oidcSecurityService,
|
||||||
|
isAuthenticated,
|
||||||
|
userData,
|
||||||
|
injector,
|
||||||
|
checkAuthResultEvent,
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -7,21 +7,18 @@ use axum::{
|
|||||||
response::{IntoResponse, Response},
|
response::{IntoResponse, Response},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{app::AppContextTrait, auth::AuthServiceTrait};
|
use crate::{app::AppContext, auth::AuthServiceTrait};
|
||||||
|
|
||||||
pub async fn header_www_authenticate_middleware(
|
pub async fn header_www_authenticate_middleware(
|
||||||
State(ctx): State<Arc<dyn AppContextTrait>>,
|
State(ctx): State<Arc<AppContext>>,
|
||||||
request: Request,
|
request: Request,
|
||||||
next: Next,
|
next: Next,
|
||||||
) -> Response {
|
) -> Response {
|
||||||
let auth_service = ctx.auth();
|
let auth_service = &ctx.auth;
|
||||||
|
|
||||||
let (mut parts, body) = request.into_parts();
|
let (mut parts, body) = request.into_parts();
|
||||||
|
|
||||||
let mut response = match auth_service
|
let mut response = match auth_service.extract_user_info(&ctx, &mut parts).await {
|
||||||
.extract_user_info(ctx.as_ref() as &dyn AppContextTrait, &mut parts)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(auth_user_info) => {
|
Ok(auth_user_info) => {
|
||||||
let mut request = Request::from_parts(parts, body);
|
let mut request = Request::from_parts(parts, body);
|
||||||
request.extensions_mut().insert(auth_user_info);
|
request.extensions_mut().insert(auth_user_info);
|
||||||
|
|||||||
@ -16,17 +16,14 @@ use openidconnect::{
|
|||||||
use sea_orm::DbErr;
|
use sea_orm::DbErr;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use snafu::ResultExt;
|
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
config::OidcAuthConfig,
|
config::OidcAuthConfig,
|
||||||
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
|
errors::AuthError,
|
||||||
service::{AuthServiceTrait, AuthUserInfo},
|
service::{AuthServiceTrait, AuthUserInfo},
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{app::AppContext, errors::RError, fetch::HttpClient, models::auth::AuthType};
|
||||||
app::AppContextTrait, errors::app_error::RError, fetch::HttpClient, models::auth::AuthType,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize, Clone, Debug)]
|
#[derive(Deserialize, Serialize, Clone, Debug)]
|
||||||
pub struct OidcAuthClaims {
|
pub struct OidcAuthClaims {
|
||||||
@ -128,13 +125,13 @@ impl OidcAuthService {
|
|||||||
redirect_uri: &str,
|
redirect_uri: &str,
|
||||||
) -> Result<OidcAuthRequest, AuthError> {
|
) -> Result<OidcAuthRequest, AuthError> {
|
||||||
let provider_metadata = CoreProviderMetadata::discover_async(
|
let provider_metadata = CoreProviderMetadata::discover_async(
|
||||||
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
|
IssuerUrl::new(self.config.issuer.clone()).map_err(AuthError::OidcProviderUrlError)?,
|
||||||
&self.oidc_provider_client,
|
&self.oidc_provider_client,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let redirect_uri =
|
let redirect_uri = RedirectUrl::new(redirect_uri.to_string())
|
||||||
RedirectUrl::new(redirect_uri.to_string()).context(OidcRequestRedirectUriSnafu)?;
|
.map_err(AuthError::OidcRequestRedirectUriError)?;
|
||||||
|
|
||||||
let oidc_client = CoreClient::from_provider_metadata(
|
let oidc_client = CoreClient::from_provider_metadata(
|
||||||
provider_metadata,
|
provider_metadata,
|
||||||
@ -210,7 +207,7 @@ impl OidcAuthService {
|
|||||||
let request_cache = self.load_authorization_request(&csrf_token).await?;
|
let request_cache = self.load_authorization_request(&csrf_token).await?;
|
||||||
|
|
||||||
let provider_metadata = CoreProviderMetadata::discover_async(
|
let provider_metadata = CoreProviderMetadata::discover_async(
|
||||||
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
|
IssuerUrl::new(self.config.issuer.clone()).map_err(AuthError::OidcProviderUrlError)?,
|
||||||
&self.oidc_provider_client,
|
&self.oidc_provider_client,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
@ -264,14 +261,13 @@ impl OidcAuthService {
|
|||||||
impl AuthServiceTrait for OidcAuthService {
|
impl AuthServiceTrait for OidcAuthService {
|
||||||
async fn extract_user_info(
|
async fn extract_user_info(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &AppContext,
|
||||||
request: &mut Parts,
|
request: &mut Parts,
|
||||||
) -> Result<AuthUserInfo, AuthError> {
|
) -> Result<AuthUserInfo, AuthError> {
|
||||||
let config = &self.config;
|
let config = &self.config;
|
||||||
let token = self
|
let token = self.api_authorizer.extract_token(&request.headers).ok_or(
|
||||||
.api_authorizer
|
AuthError::OidcJwtAuthError(jwt_authorizer::AuthError::MissingToken()),
|
||||||
.extract_token(&request.headers)
|
)?;
|
||||||
.ok_or(jwt_authorizer::AuthError::MissingToken())?;
|
|
||||||
|
|
||||||
let token_data = self.api_authorizer.check_auth(&token).await?;
|
let token_data = self.api_authorizer.check_auth(&token).await?;
|
||||||
let claims = token_data.claims;
|
let claims = token_data.claims;
|
||||||
@ -281,9 +277,7 @@ impl AuthServiceTrait for OidcAuthService {
|
|||||||
return Err(AuthError::OidcSubMissingError);
|
return Err(AuthError::OidcSubMissingError);
|
||||||
};
|
};
|
||||||
if !claims.contains_audience(&config.audience) {
|
if !claims.contains_audience(&config.audience) {
|
||||||
return Err(AuthError::OidcAudMissingError {
|
return Err(AuthError::OidcAudMissingError(config.audience.clone()));
|
||||||
aud: config.audience.clone(),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
|
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
|
||||||
let found_scopes = claims.scopes().collect::<HashSet<_>>();
|
let found_scopes = claims.scopes().collect::<HashSet<_>>();
|
||||||
@ -299,7 +293,7 @@ impl AuthServiceTrait for OidcAuthService {
|
|||||||
}
|
}
|
||||||
if let Some(key) = config.extra_claim_key.as_ref() {
|
if let Some(key) = config.extra_claim_key.as_ref() {
|
||||||
if !claims.has_claim(key) {
|
if !claims.has_claim(key) {
|
||||||
return Err(AuthError::OidcExtraClaimMissingError { claim: key.clone() });
|
return Err(AuthError::OidcExtraClaimMissingError(key.clone()));
|
||||||
}
|
}
|
||||||
if let Some(value) = config.extra_claim_value.as_ref() {
|
if let Some(value) = config.extra_claim_value.as_ref() {
|
||||||
if claims.get_claim(key).is_none_or(|v| &v != value) {
|
if claims.get_claim(key).is_none_or(|v| &v != value) {
|
||||||
@ -312,9 +306,9 @@ impl AuthServiceTrait for OidcAuthService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
||||||
Err(RError::DbError {
|
Err(RError::DbError(DbErr::RecordNotFound(..))) => {
|
||||||
source: DbErr::RecordNotFound(..),
|
crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await
|
||||||
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await,
|
}
|
||||||
r => r,
|
r => r,
|
||||||
}
|
}
|
||||||
.map_err(|_| AuthError::FindAuthRecordError)?;
|
.map_err(|_| AuthError::FindAuthRecordError)?;
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
use std::{sync::Arc, time::Duration};
|
use std::time::Duration;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use axum::{
|
use axum::{
|
||||||
@ -9,16 +9,15 @@ use axum::{
|
|||||||
use jwt_authorizer::{JwtAuthorizer, Validation};
|
use jwt_authorizer::{JwtAuthorizer, Validation};
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
use reqwest::header::HeaderValue;
|
use reqwest::header::HeaderValue;
|
||||||
use snafu::prelude::*;
|
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
AuthConfig,
|
AuthConfig,
|
||||||
basic::BasicAuthService,
|
basic::BasicAuthService,
|
||||||
errors::{AuthError, OidcProviderHttpClientSnafu},
|
errors::AuthError,
|
||||||
oidc::{OidcAuthClaims, OidcAuthService},
|
oidc::{OidcAuthClaims, OidcAuthService},
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContext,
|
||||||
fetch::{
|
fetch::{
|
||||||
HttpClient, HttpClientConfig,
|
HttpClient, HttpClientConfig,
|
||||||
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
|
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
|
||||||
@ -32,17 +31,17 @@ pub struct AuthUserInfo {
|
|||||||
pub auth_type: AuthType,
|
pub auth_type: AuthType,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromRequestParts<Arc<dyn AppContextTrait>> for AuthUserInfo {
|
impl FromRequestParts<AppContext> for AuthUserInfo {
|
||||||
type Rejection = Response;
|
type Rejection = Response;
|
||||||
|
|
||||||
async fn from_request_parts(
|
async fn from_request_parts(
|
||||||
parts: &mut Parts,
|
parts: &mut Parts,
|
||||||
state: &Arc<dyn AppContextTrait>,
|
state: &AppContext,
|
||||||
) -> Result<Self, Self::Rejection> {
|
) -> Result<Self, Self::Rejection> {
|
||||||
let auth_service = state.auth();
|
let auth_service = &state.auth;
|
||||||
|
|
||||||
auth_service
|
auth_service
|
||||||
.extract_user_info(state.as_ref(), parts)
|
.extract_user_info(state, parts)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| err.into_response())
|
.map_err(|err| err.into_response())
|
||||||
}
|
}
|
||||||
@ -52,7 +51,7 @@ impl FromRequestParts<Arc<dyn AppContextTrait>> for AuthUserInfo {
|
|||||||
pub trait AuthServiceTrait {
|
pub trait AuthServiceTrait {
|
||||||
async fn extract_user_info(
|
async fn extract_user_info(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &AppContext,
|
||||||
request: &mut Parts,
|
request: &mut Parts,
|
||||||
) -> Result<AuthUserInfo, AuthError>;
|
) -> Result<AuthUserInfo, AuthError>;
|
||||||
fn www_authenticate_header_value(&self) -> Option<HeaderValue>;
|
fn www_authenticate_header_value(&self) -> Option<HeaderValue>;
|
||||||
@ -60,14 +59,14 @@ pub trait AuthServiceTrait {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub enum AuthService {
|
pub enum AuthService {
|
||||||
Basic(Box<BasicAuthService>),
|
Basic(BasicAuthService),
|
||||||
Oidc(Box<OidcAuthService>),
|
Oidc(OidcAuthService),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AuthService {
|
impl AuthService {
|
||||||
pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> {
|
pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> {
|
||||||
let result = match config {
|
let result = match config {
|
||||||
AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
|
AuthConfig::Basic(config) => AuthService::Basic(BasicAuthService { config }),
|
||||||
AuthConfig::Oidc(config) => {
|
AuthConfig::Oidc(config) => {
|
||||||
let validation = Validation::new()
|
let validation = Validation::new()
|
||||||
.iss(&[&config.issuer])
|
.iss(&[&config.issuer])
|
||||||
@ -79,14 +78,14 @@ impl AuthService {
|
|||||||
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
|
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
.context(OidcProviderHttpClientSnafu)?;
|
.map_err(AuthError::OidcProviderHttpClientError)?;
|
||||||
|
|
||||||
let api_authorizer = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer)
|
let api_authorizer = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer)
|
||||||
.validation(validation)
|
.validation(validation)
|
||||||
.build()
|
.build()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
AuthService::Oidc(Box::new(OidcAuthService {
|
AuthService::Oidc(OidcAuthService {
|
||||||
config,
|
config,
|
||||||
api_authorizer,
|
api_authorizer,
|
||||||
oidc_provider_client,
|
oidc_provider_client,
|
||||||
@ -94,7 +93,7 @@ impl AuthService {
|
|||||||
.time_to_live(Duration::from_mins(5))
|
.time_to_live(Duration::from_mins(5))
|
||||||
.name("oidc_request_cache")
|
.name("oidc_request_cache")
|
||||||
.build(),
|
.build(),
|
||||||
}))
|
})
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
Ok(result)
|
Ok(result)
|
||||||
@ -105,7 +104,7 @@ impl AuthService {
|
|||||||
impl AuthServiceTrait for AuthService {
|
impl AuthServiceTrait for AuthService {
|
||||||
async fn extract_user_info(
|
async fn extract_user_info(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &AppContext,
|
||||||
request: &mut Parts,
|
request: &mut Parts,
|
||||||
) -> Result<AuthUserInfo, AuthError> {
|
) -> Result<AuthUserInfo, AuthError> {
|
||||||
match self {
|
match self {
|
||||||
|
|||||||
@ -1,7 +1,10 @@
|
|||||||
use recorder::{app::AppBuilder, errors::app_error::RResult};
|
use color_eyre::{self, eyre};
|
||||||
|
use recorder::app::AppBuilder;
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> RResult<()> {
|
async fn main() -> eyre::Result<()> {
|
||||||
|
color_eyre::install()?;
|
||||||
|
|
||||||
let builder = AppBuilder::from_main_cli(None).await?;
|
let builder = AppBuilder::from_main_cli(None).await?;
|
||||||
|
|
||||||
let app = builder.build().await?;
|
let app = builder.build().await?;
|
||||||
|
|||||||
2
apps/recorder/src/cache/service.rs
vendored
2
apps/recorder/src/cache/service.rs
vendored
@ -1,5 +1,5 @@
|
|||||||
use super::CacheConfig;
|
use super::CacheConfig;
|
||||||
use crate::errors::app_error::RResult;
|
use crate::errors::RResult;
|
||||||
|
|
||||||
pub struct CacheService {}
|
pub struct CacheService {}
|
||||||
|
|
||||||
|
|||||||
@ -7,7 +7,7 @@ use sea_orm::{
|
|||||||
use sea_orm_migration::MigratorTrait;
|
use sea_orm_migration::MigratorTrait;
|
||||||
|
|
||||||
use super::DatabaseConfig;
|
use super::DatabaseConfig;
|
||||||
use crate::{errors::app_error::RResult, migrations::Migrator};
|
use crate::{errors::RResult, migrations::Migrator};
|
||||||
|
|
||||||
pub struct DatabaseService {
|
pub struct DatabaseService {
|
||||||
connection: DatabaseConnection,
|
connection: DatabaseConnection,
|
||||||
|
|||||||
@ -1,74 +0,0 @@
|
|||||||
use async_trait::async_trait;
|
|
||||||
|
|
||||||
use crate::downloader::{
|
|
||||||
DownloaderError,
|
|
||||||
bittorrent::task::{
|
|
||||||
TorrentCreationTrait, TorrentHashTrait, TorrentStateTrait, TorrentTaskTrait,
|
|
||||||
},
|
|
||||||
core::{DownloadIdSelectorTrait, DownloadSelectorTrait, DownloadTaskTrait, DownloaderTrait},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
pub trait TorrentDownloaderTrait: DownloaderTrait
|
|
||||||
where
|
|
||||||
Self::State: TorrentStateTrait,
|
|
||||||
Self::Id: TorrentHashTrait,
|
|
||||||
Self::Task: TorrentTaskTrait<State = Self::State, Id = Self::Id>,
|
|
||||||
Self::Creation: TorrentCreationTrait<Task = Self::Task>,
|
|
||||||
Self::Selector: DownloadSelectorTrait<Task = Self::Task, Id = Self::Id>,
|
|
||||||
{
|
|
||||||
type IdSelector: DownloadIdSelectorTrait<Task = Self::Task, Id = Self::Id>;
|
|
||||||
|
|
||||||
async fn pause_downloads(
|
|
||||||
&self,
|
|
||||||
selector: Self::Selector,
|
|
||||||
) -> Result<Self::IdSelector, DownloaderError> {
|
|
||||||
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
|
|
||||||
self.pause_torrents(hashes).await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn resume_downloads(
|
|
||||||
&self,
|
|
||||||
selector: Self::Selector,
|
|
||||||
) -> Result<Self::IdSelector, DownloaderError> {
|
|
||||||
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
|
|
||||||
self.resume_torrents(hashes).await
|
|
||||||
}
|
|
||||||
async fn remove_downloads(
|
|
||||||
&self,
|
|
||||||
selector: Self::Selector,
|
|
||||||
) -> Result<Self::IdSelector, DownloaderError> {
|
|
||||||
let hashes = <Self as TorrentDownloaderTrait>::query_torrent_hashes(self, selector).await?;
|
|
||||||
self.remove_torrents(hashes).await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn query_torrent_hashes(
|
|
||||||
&self,
|
|
||||||
selector: Self::Selector,
|
|
||||||
) -> Result<Self::IdSelector, DownloaderError> {
|
|
||||||
let hashes = match selector.try_into_ids_only() {
|
|
||||||
Ok(hashes) => Self::IdSelector::from_iter(hashes),
|
|
||||||
Err(selector) => {
|
|
||||||
let tasks = self.query_downloads(selector).await?;
|
|
||||||
|
|
||||||
Self::IdSelector::from_iter(tasks.into_iter().map(|s| s.into_id()))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Ok(hashes)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn pause_torrents(
|
|
||||||
&self,
|
|
||||||
hashes: Self::IdSelector,
|
|
||||||
) -> Result<Self::IdSelector, DownloaderError>;
|
|
||||||
|
|
||||||
async fn resume_torrents(
|
|
||||||
&self,
|
|
||||||
hashes: Self::IdSelector,
|
|
||||||
) -> Result<Self::IdSelector, DownloaderError>;
|
|
||||||
|
|
||||||
async fn remove_torrents(
|
|
||||||
&self,
|
|
||||||
hashes: Self::IdSelector,
|
|
||||||
) -> Result<Self::IdSelector, DownloaderError>;
|
|
||||||
}
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
pub mod downloader;
|
|
||||||
pub mod source;
|
|
||||||
pub mod task;
|
|
||||||
@ -1,228 +0,0 @@
|
|||||||
use std::{
|
|
||||||
borrow::Cow,
|
|
||||||
fmt::{Debug, Formatter},
|
|
||||||
};
|
|
||||||
|
|
||||||
use bytes::Bytes;
|
|
||||||
use librqbit_core::{magnet::Magnet, torrent_metainfo, torrent_metainfo::TorrentMetaV1Owned};
|
|
||||||
use snafu::ResultExt;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
downloader::errors::{
|
|
||||||
DownloadFetchSnafu, DownloaderError, MagnetFormatSnafu, TorrentMetaSnafu,
|
|
||||||
},
|
|
||||||
errors::RAnyhowResultExt,
|
|
||||||
extract::bittorrent::core::MAGNET_SCHEMA,
|
|
||||||
fetch::{bytes::fetch_bytes, client::core::HttpClientTrait},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub trait HashTorrentSourceTrait: Sized {
|
|
||||||
fn hash_info(&self) -> Cow<'_, str>;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct MagnetUrlSource {
|
|
||||||
pub magnet: Magnet,
|
|
||||||
pub url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MagnetUrlSource {
|
|
||||||
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
|
|
||||||
let magnet = Magnet::parse(&url)
|
|
||||||
.to_dyn_boxed()
|
|
||||||
.context(MagnetFormatSnafu {
|
|
||||||
message: url.clone(),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(Self { magnet, url })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HashTorrentSourceTrait for MagnetUrlSource {
|
|
||||||
fn hash_info(&self) -> Cow<'_, str> {
|
|
||||||
let hash_info = self
|
|
||||||
.magnet
|
|
||||||
.as_id32()
|
|
||||||
.map(|s| s.as_string())
|
|
||||||
.or_else(|| self.magnet.as_id20().map(|s| s.as_string()))
|
|
||||||
.unwrap_or_else(|| unreachable!("hash of magnet must existed"));
|
|
||||||
hash_info.into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for MagnetUrlSource {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_struct("MagnetUrlSource")
|
|
||||||
.field("url", &self.url)
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Clone for MagnetUrlSource {
|
|
||||||
fn clone(&self) -> Self {
|
|
||||||
Self {
|
|
||||||
magnet: Magnet::parse(&self.url).unwrap(),
|
|
||||||
url: self.url.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for MagnetUrlSource {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.url == other.url
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for MagnetUrlSource {}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub struct TorrentUrlSource {
|
|
||||||
pub url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TorrentUrlSource {
|
|
||||||
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
|
|
||||||
Ok(Self { url })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct TorrentFileSource {
|
|
||||||
pub url: Option<String>,
|
|
||||||
pub payload: Bytes,
|
|
||||||
pub meta: TorrentMetaV1Owned,
|
|
||||||
pub filename: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TorrentFileSource {
|
|
||||||
pub fn from_bytes(
|
|
||||||
filename: String,
|
|
||||||
bytes: Bytes,
|
|
||||||
url: Option<String>,
|
|
||||||
) -> Result<Self, DownloaderError> {
|
|
||||||
let meta = torrent_metainfo::torrent_from_bytes(bytes.as_ref())
|
|
||||||
.to_dyn_boxed()
|
|
||||||
.with_context(|_| TorrentMetaSnafu {
|
|
||||||
message: format!(
|
|
||||||
"filename = {}, url = {}",
|
|
||||||
filename,
|
|
||||||
url.as_deref().unwrap_or_default()
|
|
||||||
),
|
|
||||||
})?
|
|
||||||
.to_owned();
|
|
||||||
|
|
||||||
Ok(TorrentFileSource {
|
|
||||||
url,
|
|
||||||
payload: bytes,
|
|
||||||
meta,
|
|
||||||
filename,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
pub async fn from_url_and_http_client(
|
|
||||||
client: &impl HttpClientTrait,
|
|
||||||
url: String,
|
|
||||||
) -> Result<TorrentFileSource, DownloaderError> {
|
|
||||||
let payload = fetch_bytes(client, &url)
|
|
||||||
.await
|
|
||||||
.boxed()
|
|
||||||
.with_context(|_| DownloadFetchSnafu { url: url.clone() })?;
|
|
||||||
|
|
||||||
let filename = Url::parse(&url)
|
|
||||||
.boxed()
|
|
||||||
.and_then(|s| {
|
|
||||||
s.path_segments()
|
|
||||||
.and_then(|mut p| p.next_back())
|
|
||||||
.map(String::from)
|
|
||||||
.ok_or_else(|| anyhow::anyhow!("invalid url"))
|
|
||||||
.to_dyn_boxed()
|
|
||||||
})
|
|
||||||
.with_context(|_| DownloadFetchSnafu { url: url.clone() })?;
|
|
||||||
|
|
||||||
Self::from_bytes(filename, payload, Some(url))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HashTorrentSourceTrait for TorrentFileSource {
|
|
||||||
fn hash_info(&self) -> Cow<'_, str> {
|
|
||||||
self.meta.info_hash.as_string().into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for TorrentFileSource {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_struct("TorrentFileSource")
|
|
||||||
.field("hash", &self.meta.info_hash.as_string())
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub enum UrlTorrentSource {
|
|
||||||
MagnetUrl(MagnetUrlSource),
|
|
||||||
TorrentUrl(TorrentUrlSource),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UrlTorrentSource {
|
|
||||||
pub fn from_url(url: String) -> Result<Self, DownloaderError> {
|
|
||||||
let url_ = Url::parse(&url)?;
|
|
||||||
let source = if url_.scheme() == MAGNET_SCHEMA {
|
|
||||||
Self::from_magnet_url(url)?
|
|
||||||
} else {
|
|
||||||
Self::from_torrent_url(url)?
|
|
||||||
};
|
|
||||||
Ok(source)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_magnet_url(url: String) -> Result<Self, DownloaderError> {
|
|
||||||
let magnet_source = MagnetUrlSource::from_url(url)?;
|
|
||||||
Ok(Self::MagnetUrl(magnet_source))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_torrent_url(url: String) -> Result<Self, DownloaderError> {
|
|
||||||
let torrent_source = TorrentUrlSource::from_url(url)?;
|
|
||||||
Ok(Self::TorrentUrl(torrent_source))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum HashTorrentSource {
|
|
||||||
MagnetUrl(MagnetUrlSource),
|
|
||||||
TorrentFile(TorrentFileSource),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HashTorrentSource {
|
|
||||||
pub async fn from_url_and_http_client(
|
|
||||||
client: &impl HttpClientTrait,
|
|
||||||
url: String,
|
|
||||||
) -> Result<Self, DownloaderError> {
|
|
||||||
let url_ = Url::parse(&url)?;
|
|
||||||
let source = if url_.scheme() == MAGNET_SCHEMA {
|
|
||||||
Self::from_magnet_url(url)?
|
|
||||||
} else {
|
|
||||||
Self::from_torrent_url_and_http_client(client, url).await?
|
|
||||||
};
|
|
||||||
Ok(source)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_magnet_url(url: String) -> Result<Self, DownloaderError> {
|
|
||||||
let magnet_source = MagnetUrlSource::from_url(url)?;
|
|
||||||
Ok(Self::MagnetUrl(magnet_source))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn from_torrent_url_and_http_client(
|
|
||||||
client: &impl HttpClientTrait,
|
|
||||||
url: String,
|
|
||||||
) -> Result<Self, DownloaderError> {
|
|
||||||
let torrent_source = TorrentFileSource::from_url_and_http_client(client, url).await?;
|
|
||||||
Ok(Self::TorrentFile(torrent_source))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HashTorrentSourceTrait for HashTorrentSource {
|
|
||||||
fn hash_info(&self) -> Cow<'_, str> {
|
|
||||||
match self {
|
|
||||||
HashTorrentSource::MagnetUrl(m) => m.hash_info(),
|
|
||||||
HashTorrentSource::TorrentFile(t) => t.hash_info(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,37 +0,0 @@
|
|||||||
use std::{borrow::Cow, hash::Hash};
|
|
||||||
|
|
||||||
use quirks_path::{Path, PathBuf};
|
|
||||||
|
|
||||||
use crate::downloader::{
|
|
||||||
bittorrent::source::HashTorrentSource,
|
|
||||||
core::{DownloadCreationTrait, DownloadIdTrait, DownloadStateTrait, DownloadTaskTrait},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const TORRENT_TAG_NAME: &str = "konobangu";
|
|
||||||
|
|
||||||
pub trait TorrentHashTrait: DownloadIdTrait + Send + Hash {}
|
|
||||||
|
|
||||||
pub trait TorrentStateTrait: DownloadStateTrait {}
|
|
||||||
|
|
||||||
pub trait TorrentTaskTrait: DownloadTaskTrait
|
|
||||||
where
|
|
||||||
Self::State: TorrentStateTrait,
|
|
||||||
Self::Id: TorrentHashTrait,
|
|
||||||
{
|
|
||||||
fn hash_info(&self) -> &str;
|
|
||||||
fn name(&self) -> Cow<'_, str> {
|
|
||||||
Cow::Borrowed(self.hash_info())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tags(&self) -> impl Iterator<Item = Cow<'_, str>>;
|
|
||||||
|
|
||||||
fn category(&self) -> Option<Cow<'_, str>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait TorrentCreationTrait: DownloadCreationTrait {
|
|
||||||
fn save_path(&self) -> &Path;
|
|
||||||
|
|
||||||
fn save_path_mut(&mut self) -> &mut PathBuf;
|
|
||||||
|
|
||||||
fn sources_mut(&mut self) -> &mut Vec<HashTorrentSource>;
|
|
||||||
}
|
|
||||||
@ -1,218 +0,0 @@
|
|||||||
use std::{
|
|
||||||
any::Any, borrow::Cow, fmt::Debug, hash::Hash, marker::PhantomData, ops::Deref, time::Duration,
|
|
||||||
vec::IntoIter,
|
|
||||||
};
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
|
|
||||||
use super::DownloaderError;
|
|
||||||
|
|
||||||
pub trait DownloadStateTrait: Sized + Debug {}
|
|
||||||
|
|
||||||
pub trait DownloadIdTrait: Hash + Sized + Clone + Send + Debug {}
|
|
||||||
|
|
||||||
pub trait DownloadTaskTrait: Sized + Send + Debug {
|
|
||||||
type State: DownloadStateTrait;
|
|
||||||
type Id: DownloadIdTrait;
|
|
||||||
|
|
||||||
fn id(&self) -> &Self::Id;
|
|
||||||
fn into_id(self) -> Self::Id;
|
|
||||||
fn name(&self) -> Cow<'_, str>;
|
|
||||||
fn speed(&self) -> Option<u64>;
|
|
||||||
fn state(&self) -> &Self::State;
|
|
||||||
fn dl_bytes(&self) -> Option<u64>;
|
|
||||||
fn total_bytes(&self) -> Option<u64>;
|
|
||||||
fn left_bytes(&self) -> Option<u64> {
|
|
||||||
if let (Some(tt), Some(dl)) = (self.total_bytes(), self.dl_bytes()) {
|
|
||||||
tt.checked_sub(dl)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn et(&self) -> Option<Duration>;
|
|
||||||
fn eta(&self) -> Option<Duration> {
|
|
||||||
if let (Some(left_bytes), Some(speed)) = (self.left_bytes(), self.speed()) {
|
|
||||||
if speed > 0 {
|
|
||||||
Some(Duration::from_secs_f64(left_bytes as f64 / speed as f64))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn average_speed(&self) -> Option<f64> {
|
|
||||||
if let (Some(et), Some(dl_bytes)) = (self.et(), self.dl_bytes()) {
|
|
||||||
let secs = et.as_secs_f64();
|
|
||||||
|
|
||||||
if secs > 0.0 {
|
|
||||||
Some(dl_bytes as f64 / secs)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn progress(&self) -> Option<f32> {
|
|
||||||
if let (Some(dl), Some(tt)) = (self.dl_bytes(), self.total_bytes()) {
|
|
||||||
if dl > 0 {
|
|
||||||
if tt > 0 {
|
|
||||||
Some(dl as f32 / tt as f32)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Some(0.0)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait DownloadCreationTrait: Sized {
|
|
||||||
type Task: DownloadTaskTrait;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait DownloadSelectorTrait: Sized + Any + Send {
|
|
||||||
type Id: DownloadIdTrait;
|
|
||||||
type Task: DownloadTaskTrait<Id = Self::Id>;
|
|
||||||
|
|
||||||
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
|
|
||||||
Err(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait DownloadIdSelectorTrait:
|
|
||||||
DownloadSelectorTrait
|
|
||||||
+ IntoIterator<Item = Self::Id>
|
|
||||||
+ FromIterator<Self::Id>
|
|
||||||
+ Into<Vec<Self::Id>>
|
|
||||||
+ From<Vec<Self::Id>>
|
|
||||||
{
|
|
||||||
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
|
|
||||||
Ok(Vec::from_iter(self))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_id(id: Self::Id) -> Self;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct DownloadIdSelector<Task>
|
|
||||||
where
|
|
||||||
Task: DownloadTaskTrait,
|
|
||||||
{
|
|
||||||
pub ids: Vec<Task::Id>,
|
|
||||||
pub marker: PhantomData<Task>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Task> Deref for DownloadIdSelector<Task>
|
|
||||||
where
|
|
||||||
Task: DownloadTaskTrait,
|
|
||||||
{
|
|
||||||
type Target = Vec<Task::Id>;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.ids
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Task> IntoIterator for DownloadIdSelector<Task>
|
|
||||||
where
|
|
||||||
Task: DownloadTaskTrait,
|
|
||||||
{
|
|
||||||
type Item = Task::Id;
|
|
||||||
type IntoIter = IntoIter<Task::Id>;
|
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
|
||||||
self.ids.into_iter()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Task> FromIterator<Task::Id> for DownloadIdSelector<Task>
|
|
||||||
where
|
|
||||||
Task: DownloadTaskTrait,
|
|
||||||
{
|
|
||||||
fn from_iter<T: IntoIterator<Item = Task::Id>>(iter: T) -> Self {
|
|
||||||
Self {
|
|
||||||
ids: Vec::from_iter(iter),
|
|
||||||
marker: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Task> DownloadSelectorTrait for DownloadIdSelector<Task>
|
|
||||||
where
|
|
||||||
Task: DownloadTaskTrait + 'static,
|
|
||||||
{
|
|
||||||
type Id = Task::Id;
|
|
||||||
type Task = Task;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Task> From<Vec<Task::Id>> for DownloadIdSelector<Task>
|
|
||||||
where
|
|
||||||
Task: DownloadTaskTrait + 'static,
|
|
||||||
{
|
|
||||||
fn from(value: Vec<Task::Id>) -> Self {
|
|
||||||
Self {
|
|
||||||
ids: value,
|
|
||||||
marker: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Task> From<DownloadIdSelector<Task>> for Vec<Task::Id>
|
|
||||||
where
|
|
||||||
Task: DownloadTaskTrait + 'static,
|
|
||||||
{
|
|
||||||
fn from(value: DownloadIdSelector<Task>) -> Self {
|
|
||||||
value.ids
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Task> DownloadIdSelectorTrait for DownloadIdSelector<Task>
|
|
||||||
where
|
|
||||||
Task: DownloadTaskTrait + 'static,
|
|
||||||
{
|
|
||||||
fn try_into_ids_only(self) -> Result<Vec<Self::Id>, Self> {
|
|
||||||
Ok(self.ids)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_id(id: Self::Id) -> Self {
|
|
||||||
Self {
|
|
||||||
ids: vec![id],
|
|
||||||
marker: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
pub trait DownloaderTrait {
|
|
||||||
type State: DownloadStateTrait;
|
|
||||||
type Id: DownloadIdTrait;
|
|
||||||
type Task: DownloadTaskTrait<State = Self::State, Id = Self::Id>;
|
|
||||||
type Creation: DownloadCreationTrait<Task = Self::Task>;
|
|
||||||
type Selector: DownloadSelectorTrait<Task = Self::Task>;
|
|
||||||
|
|
||||||
async fn add_downloads(
|
|
||||||
&self,
|
|
||||||
creation: Self::Creation,
|
|
||||||
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
|
|
||||||
async fn pause_downloads(
|
|
||||||
&self,
|
|
||||||
selector: Self::Selector,
|
|
||||||
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
|
|
||||||
async fn resume_downloads(
|
|
||||||
&self,
|
|
||||||
selector: Self::Selector,
|
|
||||||
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
|
|
||||||
async fn remove_downloads(
|
|
||||||
&self,
|
|
||||||
selector: Self::Selector,
|
|
||||||
) -> Result<impl IntoIterator<Item = Self::Id>, DownloaderError>;
|
|
||||||
async fn query_downloads(
|
|
||||||
&self,
|
|
||||||
selector: Self::Selector,
|
|
||||||
) -> Result<impl IntoIterator<Item = Self::Task>, DownloaderError>;
|
|
||||||
}
|
|
||||||
@ -1,63 +0,0 @@
|
|||||||
use std::{borrow::Cow, time::Duration};
|
|
||||||
|
|
||||||
use snafu::prelude::*;
|
|
||||||
|
|
||||||
use crate::errors::OptDynErr;
|
|
||||||
|
|
||||||
#[derive(Snafu, Debug)]
|
|
||||||
#[snafu(visibility(pub(crate)))]
|
|
||||||
pub enum DownloaderError {
|
|
||||||
#[snafu(transparent)]
|
|
||||||
DownloadUrlParseError { source: url::ParseError },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
QBitAPIError { source: qbit_rs::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
DownloaderIOError { source: std::io::Error },
|
|
||||||
#[snafu(display("Timeout error (action = {action}, timeout = {timeout:?})"))]
|
|
||||||
DownloadTimeoutError {
|
|
||||||
action: Cow<'static, str>,
|
|
||||||
timeout: Duration,
|
|
||||||
},
|
|
||||||
#[snafu(display("Invalid magnet format ({message})"))]
|
|
||||||
MagnetFormatError {
|
|
||||||
message: String,
|
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
|
||||||
source: OptDynErr,
|
|
||||||
},
|
|
||||||
#[snafu(display("Invalid torrent meta format ({message})"))]
|
|
||||||
TorrentMetaError {
|
|
||||||
message: String,
|
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
|
||||||
source: OptDynErr,
|
|
||||||
},
|
|
||||||
#[snafu(display("Failed to fetch: {source}"))]
|
|
||||||
DownloadFetchError {
|
|
||||||
url: String,
|
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
|
||||||
source: OptDynErr,
|
|
||||||
},
|
|
||||||
#[snafu(display("{message}"))]
|
|
||||||
Whatever {
|
|
||||||
message: String,
|
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
|
||||||
source: OptDynErr,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl snafu::FromString for DownloaderError {
|
|
||||||
type Source = Box<dyn std::error::Error + Send + Sync>;
|
|
||||||
|
|
||||||
fn without_source(message: String) -> Self {
|
|
||||||
Self::Whatever {
|
|
||||||
message,
|
|
||||||
source: OptDynErr::none(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_source(source: Self::Source, message: String) -> Self {
|
|
||||||
Self::Whatever {
|
|
||||||
message,
|
|
||||||
source: OptDynErr::some(source),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
pub mod bittorrent;
|
|
||||||
pub mod core;
|
|
||||||
pub mod errors;
|
|
||||||
pub mod qbit;
|
|
||||||
pub mod rqbit;
|
|
||||||
pub mod utils;
|
|
||||||
|
|
||||||
pub use errors::DownloaderError;
|
|
||||||
pub use qbit::{
|
|
||||||
QBittorrentDownloader, QBittorrentDownloaderCreation, QbitTorrent, QbitTorrentContent,
|
|
||||||
QbitTorrentFile, QbitTorrentFilter, QbitTorrentSource,
|
|
||||||
};
|
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
|||||||
|
|
||||||
1
apps/recorder/src/env.d.ts
vendored
Normal file
1
apps/recorder/src/env.d.ts
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
/// <reference types="@rsbuild/core/types" />
|
||||||
@ -1,55 +0,0 @@
|
|||||||
use std::fmt::Display;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct OptDynErr(Option<Box<dyn std::error::Error + Send + Sync>>);
|
|
||||||
|
|
||||||
impl AsRef<dyn snafu::Error> for OptDynErr {
|
|
||||||
fn as_ref(&self) -> &(dyn snafu::Error + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OptDynErr {
|
|
||||||
pub fn some_boxed<E: std::error::Error + Send + Sync + 'static>(e: E) -> Self {
|
|
||||||
Self(Some(Box::new(e)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn some(e: Box<dyn std::error::Error + Send + Sync>) -> Self {
|
|
||||||
Self(Some(e))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn none() -> Self {
|
|
||||||
Self(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for OptDynErr {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match &self.0 {
|
|
||||||
Some(e) => e.fmt(f),
|
|
||||||
None => write!(f, "None"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl snafu::Error for OptDynErr {
|
|
||||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn cause(&self) -> Option<&dyn std::error::Error> {
|
|
||||||
self.source()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Option<Box<dyn std::error::Error + Send + Sync>>> for OptDynErr {
|
|
||||||
fn from(value: Option<Box<dyn std::error::Error + Send + Sync>>) -> Self {
|
|
||||||
Self(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Box<dyn std::error::Error + Send + Sync>> for OptDynErr {
|
|
||||||
fn from(value: Box<dyn std::error::Error + Send + Sync>) -> Self {
|
|
||||||
Self::some(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,202 +0,0 @@
|
|||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
use axum::{
|
|
||||||
Json,
|
|
||||||
response::{IntoResponse, Response},
|
|
||||||
};
|
|
||||||
use http::StatusCode;
|
|
||||||
use serde::{Deserialize, Deserializer, Serialize};
|
|
||||||
use snafu::Snafu;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
auth::AuthError,
|
|
||||||
downloader::DownloaderError,
|
|
||||||
errors::{OptDynErr, response::StandardErrorResponse},
|
|
||||||
fetch::HttpClientError,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Snafu, Debug)]
|
|
||||||
#[snafu(visibility(pub(crate)))]
|
|
||||||
pub enum RError {
|
|
||||||
#[snafu(transparent, context(false))]
|
|
||||||
FancyRegexError {
|
|
||||||
#[snafu(source(from(fancy_regex::Error, Box::new)))]
|
|
||||||
source: Box<fancy_regex::Error>,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
RegexError { source: regex::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
InvalidMethodError { source: http::method::InvalidMethod },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
InvalidHeaderNameError {
|
|
||||||
source: http::header::InvalidHeaderName,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
TracingAppenderInitError {
|
|
||||||
source: tracing_appender::rolling::InitError,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
GraphQLSchemaError {
|
|
||||||
source: async_graphql::dynamic::SchemaError,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
AuthError { source: AuthError },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
DownloadError { source: DownloaderError },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
RSSError { source: rss::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
DotEnvError { source: dotenv::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
TeraError { source: tera::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
IOError { source: std::io::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
DbError { source: sea_orm::DbErr },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
CookieParseError { source: cookie::ParseError },
|
|
||||||
#[snafu(transparent, context(false))]
|
|
||||||
FigmentError {
|
|
||||||
#[snafu(source(from(figment::Error, Box::new)))]
|
|
||||||
source: Box<figment::Error>,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
SerdeJsonError { source: serde_json::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
ReqwestMiddlewareError { source: reqwest_middleware::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
ReqwestError { source: reqwest::Error },
|
|
||||||
#[snafu(transparent)]
|
|
||||||
ParseUrlError { source: url::ParseError },
|
|
||||||
#[snafu(display("{source}"), context(false))]
|
|
||||||
OpenDALError {
|
|
||||||
#[snafu(source(from(opendal::Error, Box::new)))]
|
|
||||||
source: Box<opendal::Error>,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
InvalidHeaderValueError {
|
|
||||||
source: http::header::InvalidHeaderValue,
|
|
||||||
},
|
|
||||||
#[snafu(transparent)]
|
|
||||||
HttpClientError { source: HttpClientError },
|
|
||||||
#[cfg(all(feature = "testcontainers", test))]
|
|
||||||
#[snafu(transparent)]
|
|
||||||
TestcontainersError {
|
|
||||||
source: testcontainers::TestcontainersError,
|
|
||||||
},
|
|
||||||
#[snafu(display("Extract {desc} with mime error, expected {expected}, but got {found}"))]
|
|
||||||
MimeError {
|
|
||||||
desc: String,
|
|
||||||
expected: String,
|
|
||||||
found: String,
|
|
||||||
},
|
|
||||||
#[snafu(display("Invalid or unknown format in extracting mikan rss"))]
|
|
||||||
MikanRssInvalidFormatError,
|
|
||||||
#[snafu(display("Invalid field {field} in extracting mikan rss"))]
|
|
||||||
MikanRssInvalidFieldError {
|
|
||||||
field: Cow<'static, str>,
|
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
|
||||||
source: OptDynErr,
|
|
||||||
},
|
|
||||||
#[snafu(display("Missing field {field} in extracting mikan meta"))]
|
|
||||||
MikanMetaMissingFieldError {
|
|
||||||
field: Cow<'static, str>,
|
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
|
||||||
source: OptDynErr,
|
|
||||||
},
|
|
||||||
#[snafu(display("Model Entity {entity} not found"))]
|
|
||||||
ModelEntityNotFound { entity: Cow<'static, str> },
|
|
||||||
#[snafu(display("{message}"))]
|
|
||||||
Whatever {
|
|
||||||
message: String,
|
|
||||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
|
||||||
source: OptDynErr,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RError {
|
|
||||||
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
|
|
||||||
Self::MikanMetaMissingFieldError {
|
|
||||||
field,
|
|
||||||
source: None.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
|
|
||||||
Self::MikanRssInvalidFieldError {
|
|
||||||
field,
|
|
||||||
source: None.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_mikan_rss_invalid_field_and_source(
|
|
||||||
field: Cow<'static, str>,
|
|
||||||
source: impl std::error::Error + Send + Sync + 'static,
|
|
||||||
) -> Self {
|
|
||||||
Self::MikanRssInvalidFieldError {
|
|
||||||
field,
|
|
||||||
source: OptDynErr::some_boxed(source),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
|
|
||||||
Self::DbError {
|
|
||||||
source: sea_orm::DbErr::RecordNotFound(detail.to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl snafu::FromString for RError {
|
|
||||||
type Source = Box<dyn std::error::Error + Send + Sync>;
|
|
||||||
|
|
||||||
fn without_source(message: String) -> Self {
|
|
||||||
Self::Whatever {
|
|
||||||
message,
|
|
||||||
source: OptDynErr::none(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_source(source: Self::Source, message: String) -> Self {
|
|
||||||
Self::Whatever {
|
|
||||||
message,
|
|
||||||
source: OptDynErr::some(source),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoResponse for RError {
|
|
||||||
fn into_response(self) -> Response {
|
|
||||||
match self {
|
|
||||||
Self::AuthError { source: auth_error } => auth_error.into_response(),
|
|
||||||
err => (
|
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
|
||||||
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
|
|
||||||
)
|
|
||||||
.into_response(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for RError {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: serde::Serializer,
|
|
||||||
{
|
|
||||||
serializer.serialize_str(&self.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for RError {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let s = String::deserialize(deserializer)?;
|
|
||||||
Ok(Self::Whatever {
|
|
||||||
message: s,
|
|
||||||
source: None.into(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type RResult<T> = Result<T, RError>;
|
|
||||||
@ -1,9 +0,0 @@
|
|||||||
pub trait RAnyhowResultExt<T>: snafu::ResultExt<T, anyhow::Error> {
|
|
||||||
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> RAnyhowResultExt<T> for Result<T, anyhow::Error> {
|
|
||||||
fn to_dyn_boxed(self) -> Result<T, Box<dyn std::error::Error + Send + Sync>> {
|
|
||||||
self.map_err(|e| e.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,9 +1,116 @@
|
|||||||
pub mod alias;
|
use std::{borrow::Cow, error::Error as StdError};
|
||||||
pub mod app_error;
|
|
||||||
pub mod ext;
|
|
||||||
pub mod response;
|
|
||||||
|
|
||||||
pub use alias::OptDynErr;
|
use axum::response::{IntoResponse, Response};
|
||||||
pub use app_error::*;
|
use http::StatusCode;
|
||||||
pub use ext::RAnyhowResultExt;
|
use thiserror::Error as ThisError;
|
||||||
pub use response::StandardErrorResponse;
|
|
||||||
|
use crate::{auth::AuthError, fetch::HttpClientError};
|
||||||
|
|
||||||
|
#[derive(ThisError, Debug)]
|
||||||
|
pub enum RError {
|
||||||
|
#[error(transparent)]
|
||||||
|
InvalidMethodError(#[from] http::method::InvalidMethod),
|
||||||
|
#[error(transparent)]
|
||||||
|
InvalidHeaderNameError(#[from] http::header::InvalidHeaderName),
|
||||||
|
#[error(transparent)]
|
||||||
|
TracingAppenderInitError(#[from] tracing_appender::rolling::InitError),
|
||||||
|
#[error(transparent)]
|
||||||
|
GraphQLSchemaError(#[from] async_graphql::dynamic::SchemaError),
|
||||||
|
#[error(transparent)]
|
||||||
|
AuthError(#[from] AuthError),
|
||||||
|
#[error(transparent)]
|
||||||
|
RSSError(#[from] rss::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
DotEnvError(#[from] dotenv::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
TeraError(#[from] tera::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
IOError(#[from] std::io::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
DbError(#[from] sea_orm::DbErr),
|
||||||
|
#[error(transparent)]
|
||||||
|
CookieParseError(#[from] cookie::ParseError),
|
||||||
|
#[error(transparent)]
|
||||||
|
FigmentError(#[from] figment::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
SerdeJsonError(#[from] serde_json::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
ReqwestMiddlewareError(#[from] reqwest_middleware::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
ReqwestError(#[from] reqwest::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
ParseUrlError(#[from] url::ParseError),
|
||||||
|
#[error(transparent)]
|
||||||
|
OpenDALError(#[from] opendal::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
InvalidHeaderValueError(#[from] http::header::InvalidHeaderValue),
|
||||||
|
#[error(transparent)]
|
||||||
|
HttpClientError(#[from] HttpClientError),
|
||||||
|
#[error("Extract {desc} with mime error, expected {expected}, but got {found}")]
|
||||||
|
MimeError {
|
||||||
|
desc: String,
|
||||||
|
expected: String,
|
||||||
|
found: String,
|
||||||
|
},
|
||||||
|
#[error("Invalid or unknown format in extracting mikan rss")]
|
||||||
|
MikanRssInvalidFormatError,
|
||||||
|
#[error("Invalid field {field} in extracting mikan rss")]
|
||||||
|
MikanRssInvalidFieldError {
|
||||||
|
field: Cow<'static, str>,
|
||||||
|
#[source]
|
||||||
|
source: Option<Box<dyn StdError + Send + Sync>>,
|
||||||
|
},
|
||||||
|
#[error("Missing field {field} in extracting mikan meta")]
|
||||||
|
MikanMetaMissingFieldError {
|
||||||
|
field: Cow<'static, str>,
|
||||||
|
#[source]
|
||||||
|
source: Option<Box<dyn StdError + Send + Sync>>,
|
||||||
|
},
|
||||||
|
#[error("Model Entity {entity} not found")]
|
||||||
|
ModelEntityNotFound { entity: Cow<'static, str> },
|
||||||
|
#[error("{0}")]
|
||||||
|
CustomMessageStr(&'static str),
|
||||||
|
#[error("{0}")]
|
||||||
|
CustomMessageString(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RError {
|
||||||
|
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
|
||||||
|
Self::MikanMetaMissingFieldError {
|
||||||
|
field,
|
||||||
|
source: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
|
||||||
|
Self::MikanRssInvalidFieldError {
|
||||||
|
field,
|
||||||
|
source: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_mikan_rss_invalid_field_and_source(
|
||||||
|
field: Cow<'static, str>,
|
||||||
|
source: Box<dyn StdError + Send + Sync>,
|
||||||
|
) -> Self {
|
||||||
|
Self::MikanRssInvalidFieldError {
|
||||||
|
field,
|
||||||
|
source: Some(source),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
|
||||||
|
Self::DbError(sea_orm::DbErr::RecordNotFound(detail.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoResponse for RError {
|
||||||
|
fn into_response(self) -> Response {
|
||||||
|
match self {
|
||||||
|
Self::AuthError(auth_error) => auth_error.into_response(),
|
||||||
|
err => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type RResult<T> = Result<T, RError>;
|
||||||
|
|||||||
@ -1,19 +0,0 @@
|
|||||||
use serde::Serialize;
|
|
||||||
|
|
||||||
#[derive(Serialize, Debug, Clone)]
|
|
||||||
pub struct StandardErrorResponse<T = ()> {
|
|
||||||
pub success: bool,
|
|
||||||
pub message: String,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub result: Option<T>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> From<String> for StandardErrorResponse<T> {
|
|
||||||
fn from(value: String) -> Self {
|
|
||||||
StandardErrorResponse {
|
|
||||||
success: false,
|
|
||||||
message: value,
|
|
||||||
result: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
|
|
||||||
pub const MAGNET_SCHEMA: &str = "magnet";
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
pub mod core;
|
|
||||||
pub mod extract;
|
|
||||||
|
|
||||||
pub use core::{BITTORRENT_MIME_TYPE, MAGNET_SCHEMA};
|
|
||||||
|
|
||||||
pub use extract::*;
|
|
||||||
@ -1,33 +1,24 @@
|
|||||||
use std::{fmt::Debug, ops::Deref};
|
use std::ops::Deref;
|
||||||
|
|
||||||
use reqwest_middleware::ClientWithMiddleware;
|
use reqwest_middleware::ClientWithMiddleware;
|
||||||
use serde::{Deserialize, Serialize};
|
use secrecy::{ExposeSecret, SecretString};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use super::MikanConfig;
|
use super::MikanConfig;
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::app_error::RError,
|
errors::RError,
|
||||||
fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth},
|
fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Default, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Default, Clone)]
|
||||||
pub struct MikanAuthSecrecy {
|
pub struct MikanAuthSecrecy {
|
||||||
pub cookie: String,
|
pub cookie: SecretString,
|
||||||
pub user_agent: Option<String>,
|
pub user_agent: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for MikanAuthSecrecy {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_struct("MikanAuthSecrecy")
|
|
||||||
.field("cookie", &String::from("[secrecy]"))
|
|
||||||
.field("user_agent", &String::from("[secrecy]"))
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MikanAuthSecrecy {
|
impl MikanAuthSecrecy {
|
||||||
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RError> {
|
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RError> {
|
||||||
HttpClientCookiesAuth::from_cookies(&self.cookie, url, self.user_agent)
|
HttpClientCookiesAuth::from_cookies(self.cookie.expose_secret(), url, self.user_agent)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,13 +38,9 @@ impl MikanClient {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fork_with_auth(&self, secrecy: Option<MikanAuthSecrecy>) -> Result<Self, RError> {
|
pub fn fork_with_auth(&self, secrecy: MikanAuthSecrecy) -> Result<Self, RError> {
|
||||||
let mut fork = self.http_client.fork();
|
|
||||||
|
|
||||||
if let Some(secrecy) = secrecy {
|
|
||||||
let cookie_auth = secrecy.into_cookie_auth(&self.base_url)?;
|
let cookie_auth = secrecy.into_cookie_auth(&self.base_url)?;
|
||||||
fork = fork.attach_secrecy(cookie_auth);
|
let fork = self.http_client.fork().attach_secrecy(cookie_auth);
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
http_client: HttpClient::from_fork(fork)?,
|
http_client: HttpClient::from_fork(fork)?,
|
||||||
|
|||||||
@ -8,15 +8,13 @@ use tracing::instrument;
|
|||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::app_error::{RError, RResult},
|
errors::{RError, RResult},
|
||||||
extract::{
|
extract::mikan::{
|
||||||
bittorrent::BITTORRENT_MIME_TYPE,
|
|
||||||
mikan::{
|
|
||||||
MikanClient,
|
MikanClient,
|
||||||
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
|
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
|
||||||
},
|
},
|
||||||
},
|
|
||||||
fetch::bytes::fetch_bytes,
|
fetch::bytes::fetch_bytes,
|
||||||
|
sync::core::BITTORRENT_MIME_TYPE,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
@ -122,10 +120,10 @@ impl TryFrom<rss::Item> for MikanRssItem {
|
|||||||
.title
|
.title
|
||||||
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?;
|
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?;
|
||||||
|
|
||||||
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
|
let enclosure_url = Url::parse(&enclosure.url).map_err(|inner| {
|
||||||
RError::from_mikan_rss_invalid_field_and_source(
|
RError::from_mikan_rss_invalid_field_and_source(
|
||||||
"enclosure_url:enclosure.link".into(),
|
Cow::Borrowed("enclosure_url:enclosure.link"),
|
||||||
err,
|
Box::new(inner),
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
@ -336,24 +334,22 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
|
|||||||
mod tests {
|
mod tests {
|
||||||
use std::assert_matches::assert_matches;
|
use std::assert_matches::assert_matches;
|
||||||
|
|
||||||
|
use color_eyre::eyre;
|
||||||
use rstest::rstest;
|
use rstest::rstest;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::app_error::RResult,
|
extract::mikan::{
|
||||||
extract::{
|
|
||||||
bittorrent::BITTORRENT_MIME_TYPE,
|
|
||||||
mikan::{
|
|
||||||
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
|
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
|
||||||
extract_mikan_rss_channel_from_rss_link,
|
extract_mikan_rss_channel_from_rss_link,
|
||||||
},
|
},
|
||||||
},
|
sync::core::BITTORRENT_MIME_TYPE,
|
||||||
test_utils::mikan::build_testing_mikan_client,
|
test_utils::mikan::build_testing_mikan_client,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_parse_mikan_rss_channel_from_rss_link() -> RResult<()> {
|
async fn test_parse_mikan_rss_channel_from_rss_link() -> eyre::Result<()> {
|
||||||
let mut mikan_server = mockito::Server::new_async().await;
|
let mut mikan_server = mockito::Server::new_async().await;
|
||||||
|
|
||||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
||||||
|
|||||||
@ -1,21 +1,19 @@
|
|||||||
use std::{borrow::Cow, sync::Arc};
|
use std::borrow::Cow;
|
||||||
|
|
||||||
use async_stream::try_stream;
|
use async_stream::try_stream;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use futures::Stream;
|
use futures::Stream;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use scraper::{Html, Selector};
|
use scraper::{Html, Selector};
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
MIKAN_BUCKET_KEY, MikanAuthSecrecy, MikanBangumiRssLink, MikanClient,
|
MIKAN_BUCKET_KEY, MikanBangumiRssLink, MikanClient, extract_mikan_bangumi_id_from_rss_link,
|
||||||
extract_mikan_bangumi_id_from_rss_link,
|
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContext,
|
||||||
errors::app_error::{RError, RResult},
|
errors::{RError, RResult},
|
||||||
extract::{
|
extract::{
|
||||||
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
|
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
|
||||||
media::extract_image_src_from_str,
|
media::extract_image_src_from_str,
|
||||||
@ -36,7 +34,7 @@ pub struct MikanEpisodeMeta {
|
|||||||
pub mikan_episode_id: String,
|
pub mikan_episode_id: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct MikanBangumiMeta {
|
pub struct MikanBangumiMeta {
|
||||||
pub homepage: Url,
|
pub homepage: Url,
|
||||||
pub origin_poster_src: Option<Url>,
|
pub origin_poster_src: Option<Url>,
|
||||||
@ -125,12 +123,12 @@ pub async fn extract_mikan_poster_meta_from_src(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
|
pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &AppContext,
|
||||||
origin_poster_src_url: Url,
|
origin_poster_src_url: Url,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
) -> RResult<MikanBangumiPosterMeta> {
|
) -> RResult<MikanBangumiPosterMeta> {
|
||||||
let dal_client = ctx.storage();
|
let dal_client = &ctx.storage;
|
||||||
let mikan_client = ctx.mikan();
|
let mikan_client = &ctx.mikan;
|
||||||
if let Some(poster_src) = dal_client
|
if let Some(poster_src) = dal_client
|
||||||
.exists_object(
|
.exists_object(
|
||||||
StorageContentCategory::Image,
|
StorageContentCategory::Image,
|
||||||
@ -348,38 +346,36 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(my_bangumi_page_url, auth_secrecy = ?auth_secrecy, history = history.len()))]
|
/**
|
||||||
|
* @logined-required
|
||||||
|
*/
|
||||||
|
#[instrument(skip_all, fields(my_bangumi_page_url = my_bangumi_page_url.as_str()))]
|
||||||
pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
|
pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
|
||||||
context: Arc<dyn AppContextTrait>,
|
http_client: &MikanClient,
|
||||||
my_bangumi_page_url: Url,
|
my_bangumi_page_url: Url,
|
||||||
auth_secrecy: Option<MikanAuthSecrecy>,
|
) -> impl Stream<Item = Result<MikanBangumiMeta, RError>> {
|
||||||
history: &[Arc<RResult<MikanBangumiMeta>>],
|
|
||||||
) -> impl Stream<Item = RResult<MikanBangumiMeta>> {
|
|
||||||
try_stream! {
|
try_stream! {
|
||||||
let http_client = &context.mikan().fork_with_auth(auth_secrecy.clone())?;
|
|
||||||
|
|
||||||
let mikan_base_url = Url::parse(&my_bangumi_page_url.origin().unicode_serialization())?;
|
let mikan_base_url = Url::parse(&my_bangumi_page_url.origin().unicode_serialization())?;
|
||||||
|
|
||||||
let content = fetch_html(http_client, my_bangumi_page_url.clone()).await?;
|
let content = fetch_html(http_client, my_bangumi_page_url.clone()).await?;
|
||||||
|
|
||||||
let fansub_container_selector =
|
|
||||||
&Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap();
|
|
||||||
let fansub_title_selector = &Selector::parse(".tag-res-name[title]").unwrap();
|
|
||||||
let fansub_id_selector =
|
|
||||||
&Selector::parse(".active[data-subtitlegroupid][data-bangumiid]").unwrap();
|
|
||||||
|
|
||||||
let bangumi_items = {
|
|
||||||
let html = Html::parse_document(&content);
|
|
||||||
|
|
||||||
let bangumi_container_selector = &Selector::parse(".sk-bangumi .an-ul>li").unwrap();
|
let bangumi_container_selector = &Selector::parse(".sk-bangumi .an-ul>li").unwrap();
|
||||||
let bangumi_info_selector = &Selector::parse(".an-info a.an-text").unwrap();
|
let bangumi_info_selector = &Selector::parse(".an-info a.an-text").unwrap();
|
||||||
let bangumi_poster_selector =
|
let bangumi_poster_selector =
|
||||||
&Selector::parse("span[data-src][data-bangumiid], span[data-bangumiid][style]")
|
&Selector::parse("span[data-src][data-bangumiid], span[data-bangumiid][style]")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
let fansub_container_selector =
|
||||||
|
&Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap();
|
||||||
|
let fansub_title_selector = &Selector::parse(".tag-res-name[title]").unwrap();
|
||||||
|
let fansub_id_selector =
|
||||||
|
&Selector::parse(".active[data-subtitlegroupid][data-bangumiid]").unwrap();
|
||||||
|
|
||||||
|
let bangumi_iters = {
|
||||||
|
let html = Html::parse_document(&content);
|
||||||
|
|
||||||
html.select(bangumi_container_selector)
|
html.select(bangumi_container_selector)
|
||||||
.filter_map(|bangumi_elem| {
|
.filter_map(|bangumi_elem| {
|
||||||
let title_and_href_elem =
|
let title_and_href_elem = bangumi_elem.select(bangumi_info_selector).next();
|
||||||
bangumi_elem.select(bangumi_info_selector).next();
|
|
||||||
let poster_elem = bangumi_elem.select(bangumi_poster_selector).next();
|
let poster_elem = bangumi_elem.select(bangumi_poster_selector).next();
|
||||||
if let (Some(bangumi_home_page_url), Some(bangumi_title)) = (
|
if let (Some(bangumi_home_page_url), Some(bangumi_title)) = (
|
||||||
title_and_href_elem.and_then(|elem| elem.attr("href")),
|
title_and_href_elem.and_then(|elem| elem.attr("href")),
|
||||||
@ -436,21 +432,13 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
|
|||||||
.collect_vec()
|
.collect_vec()
|
||||||
};
|
};
|
||||||
|
|
||||||
for (idx, (bangumi_title, mikan_bangumi_id, bangumi_expand_info_url, origin_poster_src)) in
|
for (bangumi_title, mikan_bangumi_id, bangumi_expand_info_url, origin_poster_src) in
|
||||||
bangumi_items.iter().enumerate()
|
bangumi_iters
|
||||||
{
|
{
|
||||||
|
if let Some((fansub_name, mikan_fansub_id)) = {
|
||||||
if history.get(idx).is_some() {
|
let bangumi_expand_info_content = fetch_html(http_client, bangumi_expand_info_url).await?;
|
||||||
continue;
|
let bangumi_expand_info_fragment = Html::parse_fragment(&bangumi_expand_info_content);
|
||||||
} else if let Some((fansub_name, mikan_fansub_id)) = {
|
bangumi_expand_info_fragment.select(fansub_container_selector).next().and_then(|fansub_info| {
|
||||||
let bangumi_expand_info_content =
|
|
||||||
fetch_html(http_client, bangumi_expand_info_url.clone()).await?;
|
|
||||||
let bangumi_expand_info_fragment =
|
|
||||||
Html::parse_fragment(&bangumi_expand_info_content);
|
|
||||||
bangumi_expand_info_fragment
|
|
||||||
.select(fansub_container_selector)
|
|
||||||
.next()
|
|
||||||
.and_then(|fansub_info| {
|
|
||||||
if let (Some(fansub_name), Some(mikan_fansub_id)) = (
|
if let (Some(fansub_name), Some(mikan_fansub_id)) = (
|
||||||
fansub_info
|
fansub_info
|
||||||
.select(fansub_title_selector)
|
.select(fansub_title_selector)
|
||||||
@ -461,7 +449,7 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
|
|||||||
.select(fansub_id_selector)
|
.select(fansub_id_selector)
|
||||||
.next()
|
.next()
|
||||||
.and_then(|ele| ele.attr("data-subtitlegroupid"))
|
.and_then(|ele| ele.attr("data-subtitlegroupid"))
|
||||||
.map(String::from),
|
.map(String::from)
|
||||||
) {
|
) {
|
||||||
Some((fansub_name, mikan_fansub_id))
|
Some((fansub_name, mikan_fansub_id))
|
||||||
} else {
|
} else {
|
||||||
@ -469,11 +457,15 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
} {
|
} {
|
||||||
tracing::trace!(fansub_name, mikan_fansub_id, "subscribed fansub extracted");
|
tracing::trace!(
|
||||||
let item = MikanBangumiMeta {
|
fansub_name,
|
||||||
|
mikan_fansub_id,
|
||||||
|
"subscribed fansub extracted"
|
||||||
|
);
|
||||||
|
yield MikanBangumiMeta {
|
||||||
homepage: build_mikan_bangumi_homepage(
|
homepage: build_mikan_bangumi_homepage(
|
||||||
mikan_base_url.clone(),
|
mikan_base_url.clone(),
|
||||||
mikan_bangumi_id,
|
&mikan_bangumi_id,
|
||||||
Some(&mikan_fansub_id),
|
Some(&mikan_fansub_id),
|
||||||
),
|
),
|
||||||
bangumi_title: bangumi_title.to_string(),
|
bangumi_title: bangumi_title.to_string(),
|
||||||
@ -482,7 +474,6 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
|
|||||||
fansub: Some(fansub_name),
|
fansub: Some(fansub_name),
|
||||||
origin_poster_src: origin_poster_src.clone(),
|
origin_poster_src: origin_poster_src.clone(),
|
||||||
};
|
};
|
||||||
yield item;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -491,27 +482,31 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
#![allow(unused_variables)]
|
#![allow(unused_variables)]
|
||||||
|
use color_eyre::eyre;
|
||||||
use futures::{TryStreamExt, pin_mut};
|
use futures::{TryStreamExt, pin_mut};
|
||||||
use http::header;
|
use http::header;
|
||||||
use rstest::{fixture, rstest};
|
use rstest::{fixture, rstest};
|
||||||
|
use secrecy::SecretString;
|
||||||
use tracing::Level;
|
use tracing::Level;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use zune_image::{codecs::ImageFormat, image::Image};
|
use zune_image::{codecs::ImageFormat, image::Image};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::test_utils::{
|
use crate::{
|
||||||
app::UnitTestAppContext, mikan::build_testing_mikan_client,
|
extract::mikan::{
|
||||||
tracing::try_init_testing_tracing,
|
MikanAuthSecrecy, web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page,
|
||||||
|
},
|
||||||
|
test_utils::{mikan::build_testing_mikan_client, tracing::init_testing_tracing},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[fixture]
|
#[fixture]
|
||||||
fn before_each() {
|
fn before_each() {
|
||||||
try_init_testing_tracing(Level::INFO);
|
init_testing_tracing(Level::INFO);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_extract_mikan_poster_from_src(before_each: ()) -> RResult<()> {
|
async fn test_extract_mikan_poster_from_src(before_each: ()) -> eyre::Result<()> {
|
||||||
let mut mikan_server = mockito::Server::new_async().await;
|
let mut mikan_server = mockito::Server::new_async().await;
|
||||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
||||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||||
@ -542,7 +537,7 @@ mod test {
|
|||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_extract_mikan_episode(before_each: ()) -> RResult<()> {
|
async fn test_extract_mikan_episode(before_each: ()) -> eyre::Result<()> {
|
||||||
let mut mikan_server = mockito::Server::new_async().await;
|
let mut mikan_server = mockito::Server::new_async().await;
|
||||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
||||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||||
@ -582,7 +577,9 @@ mod test {
|
|||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(before_each: ()) -> RResult<()> {
|
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(
|
||||||
|
before_each: (),
|
||||||
|
) -> eyre::Result<()> {
|
||||||
let mut mikan_server = mockito::Server::new_async().await;
|
let mut mikan_server = mockito::Server::new_async().await;
|
||||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
||||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||||
@ -619,18 +616,16 @@ mod test {
|
|||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(before_each: ()) -> RResult<()> {
|
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(
|
||||||
|
before_each: (),
|
||||||
|
) -> eyre::Result<()> {
|
||||||
let mut mikan_server = mockito::Server::new_async().await;
|
let mut mikan_server = mockito::Server::new_async().await;
|
||||||
|
|
||||||
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
let mikan_base_url = Url::parse(&mikan_server.url())?;
|
||||||
|
|
||||||
let my_bangumi_page_url = mikan_base_url.join("/Home/MyBangumi")?;
|
let my_bangumi_page_url = mikan_base_url.join("/Home/MyBangumi")?;
|
||||||
|
|
||||||
let context = Arc::new(
|
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||||
UnitTestAppContext::builder()
|
|
||||||
.mikan(build_testing_mikan_client(mikan_base_url.clone()).await?)
|
|
||||||
.build(),
|
|
||||||
);
|
|
||||||
|
|
||||||
{
|
{
|
||||||
let my_bangumi_without_cookie_mock = mikan_server
|
let my_bangumi_without_cookie_mock = mikan_server
|
||||||
@ -641,10 +636,8 @@ mod test {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
|
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
|
||||||
context.clone(),
|
&mikan_client,
|
||||||
my_bangumi_page_url.clone(),
|
my_bangumi_page_url.clone(),
|
||||||
None,
|
|
||||||
&[],
|
|
||||||
);
|
);
|
||||||
|
|
||||||
pin_mut!(bangumi_metas);
|
pin_mut!(bangumi_metas);
|
||||||
@ -678,8 +671,8 @@ mod test {
|
|||||||
.create_async()
|
.create_async()
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
let auth_secrecy = Some(MikanAuthSecrecy {
|
let mikan_client_with_cookie = mikan_client.fork_with_auth(MikanAuthSecrecy {
|
||||||
cookie: String::from(
|
cookie: SecretString::from(
|
||||||
"mikan-announcement=1; .AspNetCore.Antiforgery.abc=abc; \
|
"mikan-announcement=1; .AspNetCore.Antiforgery.abc=abc; \
|
||||||
.AspNetCore.Identity.Application=abc; ",
|
.AspNetCore.Identity.Application=abc; ",
|
||||||
),
|
),
|
||||||
@ -687,13 +680,11 @@ mod test {
|
|||||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like \
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like \
|
||||||
Gecko) Chrome/133.0.0.0 Safari/537.36 Edg/133.0.0.0",
|
Gecko) Chrome/133.0.0.0 Safari/537.36 Edg/133.0.0.0",
|
||||||
)),
|
)),
|
||||||
});
|
})?;
|
||||||
|
|
||||||
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
|
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
|
||||||
context.clone(),
|
&mikan_client_with_cookie,
|
||||||
my_bangumi_page_url,
|
my_bangumi_page_url,
|
||||||
auth_secrecy,
|
|
||||||
&[],
|
|
||||||
);
|
);
|
||||||
pin_mut!(bangumi_metas);
|
pin_mut!(bangumi_metas);
|
||||||
let bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
|
let bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
|
||||||
|
|||||||
@ -4,4 +4,4 @@ pub mod http;
|
|||||||
pub mod media;
|
pub mod media;
|
||||||
pub mod mikan;
|
pub mod mikan;
|
||||||
pub mod rawname;
|
pub mod rawname;
|
||||||
pub mod bittorrent;
|
pub mod torrent;
|
||||||
|
|||||||
@ -7,12 +7,8 @@ use itertools::Itertools;
|
|||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use snafu::whatever;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE};
|
||||||
errors::app_error::RResult,
|
|
||||||
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
|
|
||||||
};
|
|
||||||
|
|
||||||
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
|
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
|
||||||
|
|
||||||
@ -75,7 +71,10 @@ fn replace_ch_bracket_to_en(raw_name: &str) -> String {
|
|||||||
raw_name.replace('【', "[").replace('】', "]")
|
raw_name.replace('【', "[").replace('】', "]")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RResult<String> {
|
fn title_body_pre_process(
|
||||||
|
title_body: &str,
|
||||||
|
fansub: Option<&str>,
|
||||||
|
) -> color_eyre::eyre::Result<String> {
|
||||||
let raw_without_fansub = if let Some(fansub) = fansub {
|
let raw_without_fansub = if let Some(fansub) = fansub {
|
||||||
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
|
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
|
||||||
fan_sub_re.replace_all(title_body, "")
|
fan_sub_re.replace_all(title_body, "")
|
||||||
@ -263,7 +262,7 @@ pub fn check_is_movie(title: &str) -> bool {
|
|||||||
MOVIE_TITLE_RE.is_match(title)
|
MOVIE_TITLE_RE.is_match(title)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_episode_meta_from_raw_name(s: &str) -> RResult<RawEpisodeMeta> {
|
pub fn parse_episode_meta_from_raw_name(s: &str) -> color_eyre::eyre::Result<RawEpisodeMeta> {
|
||||||
let raw_title = s.trim();
|
let raw_title = s.trim();
|
||||||
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
||||||
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
||||||
@ -316,7 +315,10 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> RResult<RawEpisodeMeta> {
|
|||||||
resolution,
|
resolution,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
whatever!("Can not parse episode meta from raw filename {}", raw_title)
|
Err(color_eyre::eyre::eyre!(
|
||||||
|
"Can not parse episode meta from raw filename {}",
|
||||||
|
raw_title
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
3
apps/recorder/src/extract/torrent/mod.rs
Normal file
3
apps/recorder/src/extract/torrent/mod.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
mod parser;
|
||||||
|
|
||||||
|
pub use parser::*;
|
||||||
@ -1,14 +1,11 @@
|
|||||||
|
use color_eyre::eyre::OptionExt;
|
||||||
use fancy_regex::Regex as FancyRegex;
|
use fancy_regex::Regex as FancyRegex;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use quirks_path::Path;
|
use quirks_path::Path;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use snafu::{OptionExt, whatever};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::extract::defs::SUBTITLE_LANG;
|
||||||
errors::app_error::{RError, RResult},
|
|
||||||
extract::defs::SUBTITLE_LANG,
|
|
||||||
};
|
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
|
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
|
||||||
@ -104,12 +101,10 @@ pub fn parse_episode_media_meta_from_torrent(
|
|||||||
torrent_path: &Path,
|
torrent_path: &Path,
|
||||||
torrent_name: Option<&str>,
|
torrent_name: Option<&str>,
|
||||||
season: Option<i32>,
|
season: Option<i32>,
|
||||||
) -> RResult<TorrentEpisodeMediaMeta> {
|
) -> color_eyre::eyre::Result<TorrentEpisodeMediaMeta> {
|
||||||
let media_name = torrent_path
|
let media_name = torrent_path
|
||||||
.file_name()
|
.file_name()
|
||||||
.with_whatever_context::<_, _, RError>(|| {
|
.ok_or_else(|| color_eyre::eyre::eyre!("failed to get file name of {}", torrent_path))?;
|
||||||
format!("failed to get file name of {}", torrent_path)
|
|
||||||
})?;
|
|
||||||
let mut match_obj = None;
|
let mut match_obj = None;
|
||||||
for rule in TORRENT_EP_PARSE_RULES.iter() {
|
for rule in TORRENT_EP_PARSE_RULES.iter() {
|
||||||
match_obj = if let Some(torrent_name) = torrent_name.as_ref() {
|
match_obj = if let Some(torrent_name) = torrent_name.as_ref() {
|
||||||
@ -124,7 +119,7 @@ pub fn parse_episode_media_meta_from_torrent(
|
|||||||
if let Some(match_obj) = match_obj {
|
if let Some(match_obj) = match_obj {
|
||||||
let group_season_and_title = match_obj
|
let group_season_and_title = match_obj
|
||||||
.get(1)
|
.get(1)
|
||||||
.whatever_context::<_, RError>("should have 1 group")?
|
.ok_or_else(|| color_eyre::eyre::eyre!("should have 1 group"))?
|
||||||
.as_str();
|
.as_str();
|
||||||
let (fansub, season_and_title) = get_fansub(group_season_and_title);
|
let (fansub, season_and_title) = get_fansub(group_season_and_title);
|
||||||
let (title, season) = if let Some(season) = season {
|
let (title, season) = if let Some(season) = season {
|
||||||
@ -135,7 +130,7 @@ pub fn parse_episode_media_meta_from_torrent(
|
|||||||
};
|
};
|
||||||
let episode_index = match_obj
|
let episode_index = match_obj
|
||||||
.get(2)
|
.get(2)
|
||||||
.whatever_context::<_, RError>("should have 2 group")?
|
.ok_or_eyre("should have 2 group")?
|
||||||
.as_str()
|
.as_str()
|
||||||
.parse::<i32>()
|
.parse::<i32>()
|
||||||
.unwrap_or(1);
|
.unwrap_or(1);
|
||||||
@ -151,11 +146,11 @@ pub fn parse_episode_media_meta_from_torrent(
|
|||||||
extname,
|
extname,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
whatever!(
|
Err(color_eyre::eyre::eyre!(
|
||||||
"failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'",
|
"failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'",
|
||||||
torrent_path,
|
torrent_path,
|
||||||
torrent_name
|
torrent_name
|
||||||
)
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -163,13 +158,11 @@ pub fn parse_episode_subtitle_meta_from_torrent(
|
|||||||
torrent_path: &Path,
|
torrent_path: &Path,
|
||||||
torrent_name: Option<&str>,
|
torrent_name: Option<&str>,
|
||||||
season: Option<i32>,
|
season: Option<i32>,
|
||||||
) -> RResult<TorrentEpisodeSubtitleMeta> {
|
) -> color_eyre::eyre::Result<TorrentEpisodeSubtitleMeta> {
|
||||||
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
|
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
|
||||||
let media_name = torrent_path
|
let media_name = torrent_path
|
||||||
.file_name()
|
.file_name()
|
||||||
.with_whatever_context::<_, _, RError>(|| {
|
.ok_or_else(|| color_eyre::eyre::eyre!("failed to get file name of {}", torrent_path))?;
|
||||||
format!("failed to get file name of {}", torrent_path)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let lang = get_subtitle_lang(media_name);
|
let lang = get_subtitle_lang(media_name);
|
||||||
|
|
||||||
@ -184,8 +177,8 @@ mod tests {
|
|||||||
use quirks_path::Path;
|
use quirks_path::Path;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta, parse_episode_media_meta_from_torrent,
|
parse_episode_media_meta_from_torrent, parse_episode_subtitle_meta_from_torrent,
|
||||||
parse_episode_subtitle_meta_from_torrent,
|
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2,7 +2,7 @@ use bytes::Bytes;
|
|||||||
use reqwest::IntoUrl;
|
use reqwest::IntoUrl;
|
||||||
|
|
||||||
use super::client::HttpClientTrait;
|
use super::client::HttpClientTrait;
|
||||||
use crate::errors::app_error::RError;
|
use crate::errors::RError;
|
||||||
|
|
||||||
pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>(
|
pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>(
|
||||||
client: &H,
|
client: &H,
|
||||||
|
|||||||
@ -14,7 +14,7 @@ use reqwest_retry::{RetryTransientMiddleware, policies::ExponentialBackoff};
|
|||||||
use reqwest_tracing::TracingMiddleware;
|
use reqwest_tracing::TracingMiddleware;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_with::serde_as;
|
use serde_with::serde_as;
|
||||||
use snafu::Snafu;
|
use thiserror::Error;
|
||||||
|
|
||||||
use super::HttpClientSecrecyDataTrait;
|
use super::HttpClientSecrecyDataTrait;
|
||||||
use crate::fetch::get_random_mobile_ua;
|
use crate::fetch::get_random_mobile_ua;
|
||||||
@ -101,14 +101,14 @@ impl CacheManager for CacheBackend {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Snafu)]
|
#[derive(Debug, Error)]
|
||||||
pub enum HttpClientError {
|
pub enum HttpClientError {
|
||||||
#[snafu(transparent)]
|
#[error(transparent)]
|
||||||
ReqwestError { source: reqwest::Error },
|
ReqwestError(#[from] reqwest::Error),
|
||||||
#[snafu(transparent)]
|
#[error(transparent)]
|
||||||
ReqwestMiddlewareError { source: reqwest_middleware::Error },
|
ReqwestMiddlewareError(#[from] reqwest_middleware::Error),
|
||||||
#[snafu(transparent)]
|
#[error(transparent)]
|
||||||
HttpError { source: http::Error },
|
HttpError(#[from] http::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait HttpClientTrait: Deref<Target = ClientWithMiddleware> + Debug {}
|
pub trait HttpClientTrait: Deref<Target = ClientWithMiddleware> + Debug {}
|
||||||
|
|||||||
@ -2,11 +2,12 @@ use std::sync::Arc;
|
|||||||
|
|
||||||
use cookie::Cookie;
|
use cookie::Cookie;
|
||||||
use reqwest::{ClientBuilder, cookie::Jar};
|
use reqwest::{ClientBuilder, cookie::Jar};
|
||||||
|
use secrecy::zeroize::Zeroize;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use crate::errors::app_error::RError;
|
use crate::errors::RError;
|
||||||
|
|
||||||
pub trait HttpClientSecrecyDataTrait {
|
pub trait HttpClientSecrecyDataTrait: Zeroize {
|
||||||
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
|
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
|
||||||
client_builder
|
client_builder
|
||||||
}
|
}
|
||||||
@ -36,6 +37,13 @@ impl HttpClientCookiesAuth {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Zeroize for HttpClientCookiesAuth {
|
||||||
|
fn zeroize(&mut self) {
|
||||||
|
self.cookie_jar = Arc::new(Jar::default());
|
||||||
|
self.user_agent = None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl HttpClientSecrecyDataTrait for HttpClientCookiesAuth {
|
impl HttpClientSecrecyDataTrait for HttpClientCookiesAuth {
|
||||||
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
|
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
|
||||||
let mut client_builder = client_builder.cookie_provider(self.cookie_jar.clone());
|
let mut client_builder = client_builder.cookie_provider(self.cookie_jar.clone());
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
use reqwest::IntoUrl;
|
use reqwest::IntoUrl;
|
||||||
|
|
||||||
use super::client::HttpClientTrait;
|
use super::client::HttpClientTrait;
|
||||||
use crate::errors::app_error::RError;
|
use crate::errors::RError;
|
||||||
|
|
||||||
pub async fn fetch_html<T: IntoUrl, H: HttpClientTrait>(
|
pub async fn fetch_html<T: IntoUrl, H: HttpClientTrait>(
|
||||||
client: &H,
|
client: &H,
|
||||||
|
|||||||
@ -2,7 +2,7 @@ use bytes::Bytes;
|
|||||||
use reqwest::IntoUrl;
|
use reqwest::IntoUrl;
|
||||||
|
|
||||||
use super::{bytes::fetch_bytes, client::HttpClientTrait};
|
use super::{bytes::fetch_bytes, client::HttpClientTrait};
|
||||||
use crate::errors::app_error::RError;
|
use crate::errors::RError;
|
||||||
|
|
||||||
pub async fn fetch_image<T: IntoUrl, H: HttpClientTrait>(
|
pub async fn fetch_image<T: IntoUrl, H: HttpClientTrait>(
|
||||||
client: &H,
|
client: &H,
|
||||||
|
|||||||
@ -2,7 +2,7 @@ use std::{future::Future, pin::Pin};
|
|||||||
|
|
||||||
use axum::http;
|
use axum::http;
|
||||||
|
|
||||||
use super::{HttpClient, client::HttpClientError};
|
use super::{client::HttpClientError, HttpClient};
|
||||||
|
|
||||||
impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
|
impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
|
||||||
type Error = HttpClientError;
|
type Error = HttpClientError;
|
||||||
@ -30,7 +30,7 @@ impl<'c> openidconnect::AsyncHttpClient<'c> for HttpClient {
|
|||||||
|
|
||||||
builder
|
builder
|
||||||
.body(response.bytes().await?.to_vec())
|
.body(response.bytes().await?.to_vec())
|
||||||
.map_err(HttpClientError::from)
|
.map_err(HttpClientError::HttpError)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,7 +3,6 @@ pub mod filter;
|
|||||||
pub mod guard;
|
pub mod guard;
|
||||||
pub mod schema_root;
|
pub mod schema_root;
|
||||||
pub mod service;
|
pub mod service;
|
||||||
pub mod subscriptions;
|
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
|
||||||
pub use config::GraphQLConfig;
|
pub use config::GraphQLConfig;
|
||||||
|
|||||||
@ -2,7 +2,7 @@ use async_graphql::dynamic::Schema;
|
|||||||
use sea_orm::DatabaseConnection;
|
use sea_orm::DatabaseConnection;
|
||||||
|
|
||||||
use super::{config::GraphQLConfig, schema_root};
|
use super::{config::GraphQLConfig, schema_root};
|
||||||
use crate::errors::app_error::RResult;
|
use crate::errors::RResult;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct GraphQLService {
|
pub struct GraphQLService {
|
||||||
|
|||||||
@ -5,16 +5,13 @@
|
|||||||
impl_trait_in_bindings,
|
impl_trait_in_bindings,
|
||||||
iterator_try_collect,
|
iterator_try_collect,
|
||||||
async_fn_traits,
|
async_fn_traits,
|
||||||
let_chains,
|
let_chains
|
||||||
error_generic_member_access
|
|
||||||
)]
|
)]
|
||||||
#![feature(associated_type_defaults)]
|
|
||||||
|
|
||||||
pub mod app;
|
pub mod app;
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
pub mod cache;
|
pub mod cache;
|
||||||
pub mod database;
|
pub mod database;
|
||||||
pub mod downloader;
|
|
||||||
pub mod errors;
|
pub mod errors;
|
||||||
pub mod extract;
|
pub mod extract;
|
||||||
pub mod fetch;
|
pub mod fetch;
|
||||||
@ -23,6 +20,7 @@ pub mod logger;
|
|||||||
pub mod migrations;
|
pub mod migrations;
|
||||||
pub mod models;
|
pub mod models;
|
||||||
pub mod storage;
|
pub mod storage;
|
||||||
|
pub mod sync;
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod test_utils;
|
pub mod test_utils;
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
use std::sync::OnceLock;
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
use snafu::prelude::*;
|
|
||||||
use tracing_appender::non_blocking::WorkerGuard;
|
use tracing_appender::non_blocking::WorkerGuard;
|
||||||
use tracing_subscriber::{
|
use tracing_subscriber::{
|
||||||
EnvFilter, Layer, Registry,
|
EnvFilter, Layer, Registry,
|
||||||
@ -10,7 +9,7 @@ use tracing_subscriber::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
|
use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
|
||||||
use crate::errors::app_error::RResult;
|
use crate::errors::{RError, RResult};
|
||||||
|
|
||||||
// Function to initialize the logger based on the provided configuration
|
// Function to initialize the logger based on the provided configuration
|
||||||
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];
|
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];
|
||||||
@ -120,9 +119,9 @@ impl LoggerService {
|
|||||||
let file_appender_layer = if file_appender_config.non_blocking {
|
let file_appender_layer = if file_appender_config.non_blocking {
|
||||||
let (non_blocking_file_appender, work_guard) =
|
let (non_blocking_file_appender, work_guard) =
|
||||||
tracing_appender::non_blocking(file_appender);
|
tracing_appender::non_blocking(file_appender);
|
||||||
if NONBLOCKING_WORK_GUARD_KEEP.set(work_guard).is_err() {
|
NONBLOCKING_WORK_GUARD_KEEP
|
||||||
whatever!("cannot lock for appender");
|
.set(work_guard)
|
||||||
};
|
.map_err(|_| RError::CustomMessageStr("cannot lock for appender"))?;
|
||||||
Self::init_layer(
|
Self::init_layer(
|
||||||
non_blocking_file_appender,
|
non_blocking_file_appender,
|
||||||
&file_appender_config.format,
|
&file_appender_config.format,
|
||||||
|
|||||||
1
apps/recorder/src/main.css
Normal file
1
apps/recorder/src/main.css
Normal file
@ -0,0 +1 @@
|
|||||||
|
@import "tailwindcss";
|
||||||
96
apps/recorder/src/main.tsx
Normal file
96
apps/recorder/src/main.tsx
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
import '@abraham/reflection';
|
||||||
|
import { type Injector, ReflectiveInjector } from '@outposts/injection-js';
|
||||||
|
import { RouterProvider, createRouter } from '@tanstack/react-router';
|
||||||
|
import {
|
||||||
|
OidcSecurityService,
|
||||||
|
provideAuth,
|
||||||
|
withDefaultFeatures,
|
||||||
|
} from 'oidc-client-rx';
|
||||||
|
import {
|
||||||
|
InjectorContextVoidInjector,
|
||||||
|
InjectorProvider,
|
||||||
|
} from 'oidc-client-rx/adapters/react';
|
||||||
|
import { withTanstackRouter } from 'oidc-client-rx/adapters/tanstack-router';
|
||||||
|
import React from 'react';
|
||||||
|
import ReactDOM from 'react-dom/client';
|
||||||
|
import { buildOidcConfig, isBasicAuth } from './auth/config';
|
||||||
|
import { withCheckAuthResultEvent } from './auth/event';
|
||||||
|
import { useAuth } from './auth/hooks';
|
||||||
|
import { routeTree } from './routeTree.gen';
|
||||||
|
import './main.css';
|
||||||
|
|
||||||
|
const router = createRouter({
|
||||||
|
routeTree,
|
||||||
|
basepath: '/api/playground',
|
||||||
|
defaultPreload: 'intent',
|
||||||
|
context: {
|
||||||
|
isAuthenticated: isBasicAuth,
|
||||||
|
injector: InjectorContextVoidInjector,
|
||||||
|
oidcSecurityService: {} as OidcSecurityService,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Register things for typesafety
|
||||||
|
declare module '@tanstack/react-router' {
|
||||||
|
interface Register {
|
||||||
|
router: typeof router;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const injector: Injector = isBasicAuth
|
||||||
|
? ReflectiveInjector.resolveAndCreate([])
|
||||||
|
: ReflectiveInjector.resolveAndCreate(
|
||||||
|
provideAuth(
|
||||||
|
{
|
||||||
|
config: buildOidcConfig(),
|
||||||
|
},
|
||||||
|
withDefaultFeatures({
|
||||||
|
router: { enabled: false },
|
||||||
|
securityStorage: { type: 'local-storage' },
|
||||||
|
}),
|
||||||
|
withTanstackRouter(router),
|
||||||
|
withCheckAuthResultEvent()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// if needed, check when init
|
||||||
|
let oidcSecurityService: OidcSecurityService | undefined;
|
||||||
|
if (!isBasicAuth) {
|
||||||
|
oidcSecurityService = injector.get(OidcSecurityService);
|
||||||
|
oidcSecurityService.checkAuth().subscribe();
|
||||||
|
}
|
||||||
|
|
||||||
|
const AppWithBasicAuth = () => {
|
||||||
|
return <RouterProvider router={router} />;
|
||||||
|
};
|
||||||
|
|
||||||
|
const AppWithOidcAuth = () => {
|
||||||
|
const { isAuthenticated, oidcSecurityService, injector } = useAuth();
|
||||||
|
return (
|
||||||
|
<RouterProvider
|
||||||
|
router={router}
|
||||||
|
context={{
|
||||||
|
isAuthenticated,
|
||||||
|
oidcSecurityService,
|
||||||
|
injector,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const App = isBasicAuth ? AppWithBasicAuth : AppWithOidcAuth;
|
||||||
|
|
||||||
|
const rootEl = document.getElementById('root');
|
||||||
|
|
||||||
|
if (rootEl) {
|
||||||
|
rootEl.classList.add('min-h-svh');
|
||||||
|
const root = ReactDOM.createRoot(rootEl);
|
||||||
|
|
||||||
|
root.render(
|
||||||
|
<React.StrictMode>
|
||||||
|
<InjectorProvider injector={injector}>
|
||||||
|
<App />
|
||||||
|
</InjectorProvider>
|
||||||
|
</React.StrictMode>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -4,8 +4,8 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
use super::subscribers::{self, SEED_SUBSCRIBER};
|
use super::subscribers::{self, SEED_SUBSCRIBER};
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContext,
|
||||||
errors::app_error::{RError, RResult},
|
errors::{RError, RResult},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(
|
#[derive(
|
||||||
@ -57,8 +57,8 @@ impl Related<super::subscribers::Entity> for Entity {
|
|||||||
impl ActiveModelBehavior for ActiveModel {}
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
|
|
||||||
impl Model {
|
impl Model {
|
||||||
pub async fn find_by_pid(ctx: &dyn AppContextTrait, pid: &str) -> RResult<Self> {
|
pub async fn find_by_pid(ctx: &AppContext, pid: &str) -> RResult<Self> {
|
||||||
let db = ctx.db();
|
let db = &ctx.db;
|
||||||
let subscriber_auth = Entity::find()
|
let subscriber_auth = Entity::find()
|
||||||
.filter(Column::Pid.eq(pid))
|
.filter(Column::Pid.eq(pid))
|
||||||
.one(db)
|
.one(db)
|
||||||
@ -67,8 +67,8 @@ impl Model {
|
|||||||
Ok(subscriber_auth)
|
Ok(subscriber_auth)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_from_oidc(ctx: &dyn AppContextTrait, sub: String) -> RResult<Self> {
|
pub async fn create_from_oidc(ctx: &AppContext, sub: String) -> RResult<Self> {
|
||||||
let db = ctx.db();
|
let db = &ctx.db;
|
||||||
|
|
||||||
let txn = db.begin().await?;
|
let txn = db.begin().await?;
|
||||||
|
|
||||||
|
|||||||
@ -4,7 +4,7 @@ use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::O
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use super::subscription_bangumi;
|
use super::subscription_bangumi;
|
||||||
use crate::{app::AppContextTrait, errors::app_error::RResult};
|
use crate::{app::AppContext, errors::RResult};
|
||||||
|
|
||||||
#[derive(
|
#[derive(
|
||||||
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
|
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
|
||||||
@ -113,7 +113,7 @@ pub enum RelatedEntity {
|
|||||||
|
|
||||||
impl Model {
|
impl Model {
|
||||||
pub async fn get_or_insert_from_mikan<F>(
|
pub async fn get_or_insert_from_mikan<F>(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &AppContext,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
subscription_id: i32,
|
subscription_id: i32,
|
||||||
mikan_bangumi_id: String,
|
mikan_bangumi_id: String,
|
||||||
@ -123,7 +123,7 @@ impl Model {
|
|||||||
where
|
where
|
||||||
F: AsyncFnOnce(&mut ActiveModel) -> RResult<()>,
|
F: AsyncFnOnce(&mut ActiveModel) -> RResult<()>,
|
||||||
{
|
{
|
||||||
let db = ctx.db();
|
let db = &ctx.db;
|
||||||
if let Some(existed) = Entity::find()
|
if let Some(existed) = Entity::find()
|
||||||
.filter(
|
.filter(
|
||||||
Column::MikanBangumiId
|
Column::MikanBangumiId
|
||||||
|
|||||||
@ -6,8 +6,8 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
use super::{bangumi, query::InsertManyReturningExt, subscription_episode};
|
use super::{bangumi, query::InsertManyReturningExt, subscription_episode};
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContext,
|
||||||
errors::app_error::RResult,
|
errors::RResult,
|
||||||
extract::{
|
extract::{
|
||||||
mikan::{MikanEpisodeMeta, build_mikan_episode_homepage},
|
mikan::{MikanEpisodeMeta, build_mikan_episode_homepage},
|
||||||
rawname::parse_episode_meta_from_raw_name,
|
rawname::parse_episode_meta_from_raw_name,
|
||||||
@ -136,12 +136,12 @@ pub struct MikanEpsiodeCreation {
|
|||||||
|
|
||||||
impl Model {
|
impl Model {
|
||||||
pub async fn add_episodes(
|
pub async fn add_episodes(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &AppContext,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
subscription_id: i32,
|
subscription_id: i32,
|
||||||
creations: impl IntoIterator<Item = MikanEpsiodeCreation>,
|
creations: impl IntoIterator<Item = MikanEpsiodeCreation>,
|
||||||
) -> RResult<()> {
|
) -> RResult<()> {
|
||||||
let db = ctx.db();
|
let db = &ctx.db;
|
||||||
let new_episode_active_modes = creations
|
let new_episode_active_modes = creations
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|cr| ActiveModel::from_mikan_episode_meta(ctx, cr))
|
.map(|cr| ActiveModel::from_mikan_episode_meta(ctx, cr))
|
||||||
@ -189,9 +189,9 @@ impl Model {
|
|||||||
|
|
||||||
impl ActiveModel {
|
impl ActiveModel {
|
||||||
pub fn from_mikan_episode_meta(
|
pub fn from_mikan_episode_meta(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &AppContext,
|
||||||
creation: MikanEpsiodeCreation,
|
creation: MikanEpsiodeCreation,
|
||||||
) -> RResult<Self> {
|
) -> color_eyre::eyre::Result<Self> {
|
||||||
let item = creation.episode;
|
let item = creation.episode;
|
||||||
let bgm = creation.bangumi;
|
let bgm = creation.bangumi;
|
||||||
let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title)
|
let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title)
|
||||||
@ -201,7 +201,7 @@ impl ActiveModel {
|
|||||||
.ok()
|
.ok()
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
let homepage =
|
let homepage =
|
||||||
build_mikan_episode_homepage(ctx.mikan().base_url().clone(), &item.mikan_episode_id);
|
build_mikan_episode_homepage(ctx.mikan.base_url().clone(), &item.mikan_episode_id);
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
mikan_episode_id: ActiveValue::Set(Some(item.mikan_episode_id)),
|
mikan_episode_id: ActiveValue::Set(Some(item.mikan_episode_id)),
|
||||||
|
|||||||
@ -8,5 +8,3 @@ pub mod subscribers;
|
|||||||
pub mod subscription_bangumi;
|
pub mod subscription_bangumi;
|
||||||
pub mod subscription_episode;
|
pub mod subscription_episode;
|
||||||
pub mod subscriptions;
|
pub mod subscriptions;
|
||||||
pub mod task_stream_item;
|
|
||||||
pub mod tasks;
|
|
||||||
|
|||||||
@ -4,8 +4,8 @@ use sea_orm::{ActiveValue, FromJsonQueryResult, TransactionTrait, entity::prelud
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContext,
|
||||||
errors::app_error::{RError, RResult},
|
errors::{RError, RResult},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const SEED_SUBSCRIBER: &str = "konobangu";
|
pub const SEED_SUBSCRIBER: &str = "konobangu";
|
||||||
@ -95,13 +95,13 @@ pub struct SubscriberIdParams {
|
|||||||
impl ActiveModelBehavior for ActiveModel {}
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
|
|
||||||
impl Model {
|
impl Model {
|
||||||
pub async fn find_seed_subscriber_id(ctx: &dyn AppContextTrait) -> RResult<i32> {
|
pub async fn find_seed_subscriber_id(ctx: &AppContext) -> RResult<i32> {
|
||||||
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER).await?;
|
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER).await?;
|
||||||
Ok(subscriber_auth.subscriber_id)
|
Ok(subscriber_auth.subscriber_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RResult<Self> {
|
pub async fn find_by_id(ctx: &AppContext, id: i32) -> RResult<Self> {
|
||||||
let db = ctx.db();
|
let db = &ctx.db;
|
||||||
|
|
||||||
let subscriber = Entity::find_by_id(id)
|
let subscriber = Entity::find_by_id(id)
|
||||||
.one(db)
|
.one(db)
|
||||||
@ -110,8 +110,8 @@ impl Model {
|
|||||||
Ok(subscriber)
|
Ok(subscriber)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_root(ctx: &dyn AppContextTrait) -> RResult<Self> {
|
pub async fn create_root(ctx: &AppContext) -> RResult<Self> {
|
||||||
let db = ctx.db();
|
let db = &ctx.db;
|
||||||
let txn = db.begin().await?;
|
let txn = db.begin().await?;
|
||||||
|
|
||||||
let user = ActiveModel {
|
let user = ActiveModel {
|
||||||
|
|||||||
@ -7,8 +7,8 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
use super::{bangumi, episodes, query::filter_values_in};
|
use super::{bangumi, episodes, query::filter_values_in};
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContext,
|
||||||
errors::app_error::RResult,
|
errors::RResult,
|
||||||
extract::{
|
extract::{
|
||||||
mikan::{
|
mikan::{
|
||||||
build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link,
|
build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link,
|
||||||
@ -179,22 +179,22 @@ impl ActiveModel {
|
|||||||
|
|
||||||
impl Model {
|
impl Model {
|
||||||
pub async fn add_subscription(
|
pub async fn add_subscription(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &AppContext,
|
||||||
create_dto: SubscriptionCreateDto,
|
create_dto: SubscriptionCreateDto,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
) -> RResult<Self> {
|
) -> RResult<Self> {
|
||||||
let db = ctx.db();
|
let db = &ctx.db;
|
||||||
let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id);
|
let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id);
|
||||||
|
|
||||||
Ok(subscription.insert(db).await?)
|
Ok(subscription.insert(db).await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn toggle_with_ids(
|
pub async fn toggle_with_ids(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &AppContext,
|
||||||
ids: impl Iterator<Item = i32>,
|
ids: impl Iterator<Item = i32>,
|
||||||
enabled: bool,
|
enabled: bool,
|
||||||
) -> RResult<()> {
|
) -> RResult<()> {
|
||||||
let db = ctx.db();
|
let db = &ctx.db;
|
||||||
Entity::update_many()
|
Entity::update_many()
|
||||||
.col_expr(Column::Enabled, Expr::value(enabled))
|
.col_expr(Column::Enabled, Expr::value(enabled))
|
||||||
.filter(Column::Id.is_in(ids))
|
.filter(Column::Id.is_in(ids))
|
||||||
@ -203,11 +203,8 @@ impl Model {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_with_ids(
|
pub async fn delete_with_ids(ctx: &AppContext, ids: impl Iterator<Item = i32>) -> RResult<()> {
|
||||||
ctx: &dyn AppContextTrait,
|
let db = &ctx.db;
|
||||||
ids: impl Iterator<Item = i32>,
|
|
||||||
) -> RResult<()> {
|
|
||||||
let db = ctx.db();
|
|
||||||
Entity::delete_many()
|
Entity::delete_many()
|
||||||
.filter(Column::Id.is_in(ids))
|
.filter(Column::Id.is_in(ids))
|
||||||
.exec(db)
|
.exec(db)
|
||||||
@ -215,16 +212,16 @@ impl Model {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn pull_subscription(&self, ctx: &dyn AppContextTrait) -> RResult<()> {
|
pub async fn pull_subscription(&self, ctx: &AppContext) -> RResult<()> {
|
||||||
match &self.category {
|
match &self.category {
|
||||||
SubscriptionCategory::Mikan => {
|
SubscriptionCategory::Mikan => {
|
||||||
let mikan_client = ctx.mikan();
|
let mikan_client = &ctx.mikan;
|
||||||
let channel =
|
let channel =
|
||||||
extract_mikan_rss_channel_from_rss_link(mikan_client, &self.source_url).await?;
|
extract_mikan_rss_channel_from_rss_link(mikan_client, &self.source_url).await?;
|
||||||
|
|
||||||
let items = channel.into_items();
|
let items = channel.into_items();
|
||||||
|
|
||||||
let db = ctx.db();
|
let db = &ctx.db;
|
||||||
let items = items.into_iter().collect_vec();
|
let items = items.into_iter().collect_vec();
|
||||||
|
|
||||||
let mut stmt = filter_values_in(
|
let mut stmt = filter_values_in(
|
||||||
@ -269,7 +266,7 @@ impl Model {
|
|||||||
|
|
||||||
for ((mikan_bangumi_id, mikan_fansub_id), new_ep_metas) in new_mikan_bangumi_groups
|
for ((mikan_bangumi_id, mikan_fansub_id), new_ep_metas) in new_mikan_bangumi_groups
|
||||||
{
|
{
|
||||||
let mikan_base_url = ctx.mikan().base_url();
|
let mikan_base_url = ctx.mikan.base_url();
|
||||||
let bgm_homepage = build_mikan_bangumi_homepage(
|
let bgm_homepage = build_mikan_bangumi_homepage(
|
||||||
mikan_base_url.clone(),
|
mikan_base_url.clone(),
|
||||||
&mikan_bangumi_id,
|
&mikan_bangumi_id,
|
||||||
|
|||||||
@ -1,62 +0,0 @@
|
|||||||
use async_trait::async_trait;
|
|
||||||
use sea_orm::entity::prelude::*;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
|
||||||
)]
|
|
||||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "task_status")]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum TaskStatus {
|
|
||||||
#[sea_orm(string_value = "r")]
|
|
||||||
Running,
|
|
||||||
#[sea_orm(string_value = "s")]
|
|
||||||
Success,
|
|
||||||
#[sea_orm(string_value = "f")]
|
|
||||||
Failed,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
|
||||||
#[sea_orm(table_name = "tasks")]
|
|
||||||
pub struct Model {
|
|
||||||
#[sea_orm(primary_key)]
|
|
||||||
pub id: i32,
|
|
||||||
pub task_id: i32,
|
|
||||||
pub subscriber_id: i32,
|
|
||||||
pub item: serde_json::Value,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
|
||||||
pub enum Relation {
|
|
||||||
#[sea_orm(
|
|
||||||
belongs_to = "super::subscribers::Entity",
|
|
||||||
from = "Column::SubscriberId",
|
|
||||||
to = "super::subscribers::Column::Id",
|
|
||||||
on_update = "Cascade",
|
|
||||||
on_delete = "Cascade"
|
|
||||||
)]
|
|
||||||
Subscriber,
|
|
||||||
#[sea_orm(
|
|
||||||
belongs_to = "super::tasks::Entity",
|
|
||||||
from = "Column::TaskId",
|
|
||||||
to = "super::tasks::Column::Id",
|
|
||||||
on_update = "Cascade",
|
|
||||||
on_delete = "Cascade"
|
|
||||||
)]
|
|
||||||
Task,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Related<super::subscribers::Entity> for Entity {
|
|
||||||
fn to() -> RelationDef {
|
|
||||||
Relation::Subscriber.def()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Related<super::tasks::Entity> for Entity {
|
|
||||||
fn to() -> RelationDef {
|
|
||||||
Relation::Task.def()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
||||||
@ -1,95 +0,0 @@
|
|||||||
use async_trait::async_trait;
|
|
||||||
use sea_orm::{QuerySelect, entity::prelude::*};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use crate::{app::AppContextTrait, errors::app_error::RResult};
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
|
||||||
)]
|
|
||||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "task_status")]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum TaskStatus {
|
|
||||||
#[sea_orm(string_value = "p")]
|
|
||||||
Pending,
|
|
||||||
#[sea_orm(string_value = "r")]
|
|
||||||
Running,
|
|
||||||
#[sea_orm(string_value = "s")]
|
|
||||||
Success,
|
|
||||||
#[sea_orm(string_value = "f")]
|
|
||||||
Failed,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
|
||||||
)]
|
|
||||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "task_status")]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum TaskMode {
|
|
||||||
#[sea_orm(string_value = "stream")]
|
|
||||||
Stream,
|
|
||||||
#[sea_orm(string_value = "future")]
|
|
||||||
Future,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
|
||||||
#[sea_orm(table_name = "tasks")]
|
|
||||||
pub struct Model {
|
|
||||||
#[sea_orm(primary_key)]
|
|
||||||
pub id: i32,
|
|
||||||
pub subscriber_id: i32,
|
|
||||||
pub task_mode: TaskMode,
|
|
||||||
pub task_status: TaskStatus,
|
|
||||||
pub task_type: String,
|
|
||||||
pub state_data: serde_json::Value,
|
|
||||||
pub request_data: serde_json::Value,
|
|
||||||
pub error_data: serde_json::Value,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
|
||||||
pub enum Relation {
|
|
||||||
#[sea_orm(has_many = "super::task_stream_item::Entity")]
|
|
||||||
StreamItem,
|
|
||||||
#[sea_orm(
|
|
||||||
belongs_to = "super::subscribers::Entity",
|
|
||||||
from = "Column::SubscriberId",
|
|
||||||
to = "super::subscribers::Column::Id",
|
|
||||||
on_update = "Cascade",
|
|
||||||
on_delete = "Cascade"
|
|
||||||
)]
|
|
||||||
Subscriber,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Related<super::subscribers::Entity> for Entity {
|
|
||||||
fn to() -> RelationDef {
|
|
||||||
Relation::Subscriber.def()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Related<super::task_stream_item::Entity> for Entity {
|
|
||||||
fn to() -> RelationDef {
|
|
||||||
Relation::StreamItem.def()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Model {
|
|
||||||
pub async fn find_stream_task_by_id(
|
|
||||||
ctx: &dyn AppContextTrait,
|
|
||||||
task_id: i32,
|
|
||||||
) -> RResult<Option<(Model, Vec<super::task_stream_item::Model>)>> {
|
|
||||||
let db = ctx.db();
|
|
||||||
let res = Entity::find()
|
|
||||||
.filter(Column::Id.eq(task_id))
|
|
||||||
.filter(Column::TaskMode.eq(TaskMode::Stream))
|
|
||||||
.find_with_related(super::task_stream_item::Entity)
|
|
||||||
.limit(1)
|
|
||||||
.all(db)
|
|
||||||
.await?
|
|
||||||
.pop();
|
|
||||||
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
||||||
134
apps/recorder/src/routeTree.gen.ts
Normal file
134
apps/recorder/src/routeTree.gen.ts
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
/* eslint-disable */
|
||||||
|
|
||||||
|
// @ts-nocheck
|
||||||
|
|
||||||
|
// noinspection JSUnusedGlobalSymbols
|
||||||
|
|
||||||
|
// This file was automatically generated by TanStack Router.
|
||||||
|
// You should NOT make any changes in this file as it will be overwritten.
|
||||||
|
// Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified.
|
||||||
|
|
||||||
|
// Import Routes
|
||||||
|
|
||||||
|
import { Route as rootRoute } from './web/controller/__root'
|
||||||
|
import { Route as IndexImport } from './web/controller/index'
|
||||||
|
import { Route as GraphqlIndexImport } from './web/controller/graphql/index'
|
||||||
|
import { Route as OidcCallbackImport } from './web/controller/oidc/callback'
|
||||||
|
|
||||||
|
// Create/Update Routes
|
||||||
|
|
||||||
|
const IndexRoute = IndexImport.update({
|
||||||
|
id: '/',
|
||||||
|
path: '/',
|
||||||
|
getParentRoute: () => rootRoute,
|
||||||
|
} as any)
|
||||||
|
|
||||||
|
const GraphqlIndexRoute = GraphqlIndexImport.update({
|
||||||
|
id: '/graphql/',
|
||||||
|
path: '/graphql/',
|
||||||
|
getParentRoute: () => rootRoute,
|
||||||
|
} as any)
|
||||||
|
|
||||||
|
const OidcCallbackRoute = OidcCallbackImport.update({
|
||||||
|
id: '/oidc/callback',
|
||||||
|
path: '/oidc/callback',
|
||||||
|
getParentRoute: () => rootRoute,
|
||||||
|
} as any)
|
||||||
|
|
||||||
|
// Populate the FileRoutesByPath interface
|
||||||
|
|
||||||
|
declare module '@tanstack/react-router' {
|
||||||
|
interface FileRoutesByPath {
|
||||||
|
'/': {
|
||||||
|
id: '/'
|
||||||
|
path: '/'
|
||||||
|
fullPath: '/'
|
||||||
|
preLoaderRoute: typeof IndexImport
|
||||||
|
parentRoute: typeof rootRoute
|
||||||
|
}
|
||||||
|
'/oidc/callback': {
|
||||||
|
id: '/oidc/callback'
|
||||||
|
path: '/oidc/callback'
|
||||||
|
fullPath: '/oidc/callback'
|
||||||
|
preLoaderRoute: typeof OidcCallbackImport
|
||||||
|
parentRoute: typeof rootRoute
|
||||||
|
}
|
||||||
|
'/graphql/': {
|
||||||
|
id: '/graphql/'
|
||||||
|
path: '/graphql'
|
||||||
|
fullPath: '/graphql'
|
||||||
|
preLoaderRoute: typeof GraphqlIndexImport
|
||||||
|
parentRoute: typeof rootRoute
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create and export the route tree
|
||||||
|
|
||||||
|
export interface FileRoutesByFullPath {
|
||||||
|
'/': typeof IndexRoute
|
||||||
|
'/oidc/callback': typeof OidcCallbackRoute
|
||||||
|
'/graphql': typeof GraphqlIndexRoute
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileRoutesByTo {
|
||||||
|
'/': typeof IndexRoute
|
||||||
|
'/oidc/callback': typeof OidcCallbackRoute
|
||||||
|
'/graphql': typeof GraphqlIndexRoute
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileRoutesById {
|
||||||
|
__root__: typeof rootRoute
|
||||||
|
'/': typeof IndexRoute
|
||||||
|
'/oidc/callback': typeof OidcCallbackRoute
|
||||||
|
'/graphql/': typeof GraphqlIndexRoute
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileRouteTypes {
|
||||||
|
fileRoutesByFullPath: FileRoutesByFullPath
|
||||||
|
fullPaths: '/' | '/oidc/callback' | '/graphql'
|
||||||
|
fileRoutesByTo: FileRoutesByTo
|
||||||
|
to: '/' | '/oidc/callback' | '/graphql'
|
||||||
|
id: '__root__' | '/' | '/oidc/callback' | '/graphql/'
|
||||||
|
fileRoutesById: FileRoutesById
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RootRouteChildren {
|
||||||
|
IndexRoute: typeof IndexRoute
|
||||||
|
OidcCallbackRoute: typeof OidcCallbackRoute
|
||||||
|
GraphqlIndexRoute: typeof GraphqlIndexRoute
|
||||||
|
}
|
||||||
|
|
||||||
|
const rootRouteChildren: RootRouteChildren = {
|
||||||
|
IndexRoute: IndexRoute,
|
||||||
|
OidcCallbackRoute: OidcCallbackRoute,
|
||||||
|
GraphqlIndexRoute: GraphqlIndexRoute,
|
||||||
|
}
|
||||||
|
|
||||||
|
export const routeTree = rootRoute
|
||||||
|
._addFileChildren(rootRouteChildren)
|
||||||
|
._addFileTypes<FileRouteTypes>()
|
||||||
|
|
||||||
|
/* ROUTE_MANIFEST_START
|
||||||
|
{
|
||||||
|
"routes": {
|
||||||
|
"__root__": {
|
||||||
|
"filePath": "__root.tsx",
|
||||||
|
"children": [
|
||||||
|
"/",
|
||||||
|
"/oidc/callback",
|
||||||
|
"/graphql/"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"/": {
|
||||||
|
"filePath": "index.tsx"
|
||||||
|
},
|
||||||
|
"/oidc/callback": {
|
||||||
|
"filePath": "oidc/callback.tsx"
|
||||||
|
},
|
||||||
|
"/graphql/": {
|
||||||
|
"filePath": "graphql/index.tsx"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ROUTE_MANIFEST_END */
|
||||||
@ -8,7 +8,7 @@ use url::Url;
|
|||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::StorageConfig;
|
use super::StorageConfig;
|
||||||
use crate::errors::app_error::{RError, RResult};
|
use crate::errors::{RError, RResult};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
@ -142,7 +142,7 @@ impl StorageService {
|
|||||||
subscriber_pid: &str,
|
subscriber_pid: &str,
|
||||||
bucket: Option<&str>,
|
bucket: Option<&str>,
|
||||||
filename: &str,
|
filename: &str,
|
||||||
) -> RResult<Buffer> {
|
) -> color_eyre::eyre::Result<Buffer> {
|
||||||
match content_category {
|
match content_category {
|
||||||
StorageContentCategory::Image => {
|
StorageContentCategory::Image => {
|
||||||
let fullname = [
|
let fullname = [
|
||||||
|
|||||||
298
apps/recorder/src/sync/core.rs
Normal file
298
apps/recorder/src/sync/core.rs
Normal file
@ -0,0 +1,298 @@
|
|||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use librqbit_core::{
|
||||||
|
magnet::Magnet,
|
||||||
|
torrent_metainfo::{TorrentMetaV1Owned, torrent_from_bytes},
|
||||||
|
};
|
||||||
|
use quirks_path::{Path, PathBuf};
|
||||||
|
use regex::Regex;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use super::{QbitTorrent, QbitTorrentContent, TorrentDownloadError};
|
||||||
|
use crate::fetch::{HttpClientTrait, fetch_bytes};
|
||||||
|
|
||||||
|
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
|
||||||
|
pub const MAGNET_SCHEMA: &str = "magnet";
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum TorrentFilter {
|
||||||
|
All,
|
||||||
|
Downloading,
|
||||||
|
Completed,
|
||||||
|
Paused,
|
||||||
|
Active,
|
||||||
|
Inactive,
|
||||||
|
Resumed,
|
||||||
|
Stalled,
|
||||||
|
StalledUploading,
|
||||||
|
StalledDownloading,
|
||||||
|
Errored,
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TORRENT_HASH_RE: Regex = Regex::new(r"[a-fA-F0-9]{40}").unwrap();
|
||||||
|
static ref TORRENT_EXT_RE: Regex = Regex::new(r"\.torrent$").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq)]
|
||||||
|
pub enum TorrentSource {
|
||||||
|
MagnetUrl {
|
||||||
|
url: Url,
|
||||||
|
hash: String,
|
||||||
|
},
|
||||||
|
TorrentUrl {
|
||||||
|
url: Url,
|
||||||
|
hash: String,
|
||||||
|
},
|
||||||
|
TorrentFile {
|
||||||
|
torrent: Vec<u8>,
|
||||||
|
hash: String,
|
||||||
|
name: Option<String>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TorrentSource {
|
||||||
|
pub async fn parse<H: HttpClientTrait>(
|
||||||
|
client: &H,
|
||||||
|
url: &str,
|
||||||
|
) -> color_eyre::eyre::Result<Self> {
|
||||||
|
let url = Url::parse(url)?;
|
||||||
|
let source = if url.scheme() == MAGNET_SCHEMA {
|
||||||
|
TorrentSource::from_magnet_url(url)?
|
||||||
|
} else if let Some(basename) = url
|
||||||
|
.clone()
|
||||||
|
.path_segments()
|
||||||
|
.and_then(|mut segments| segments.next_back())
|
||||||
|
{
|
||||||
|
if let (Some(match_hash), true) = (
|
||||||
|
TORRENT_HASH_RE.find(basename),
|
||||||
|
TORRENT_EXT_RE.is_match(basename),
|
||||||
|
) {
|
||||||
|
TorrentSource::from_torrent_url(url, match_hash.as_str().to_string())?
|
||||||
|
} else {
|
||||||
|
let contents = fetch_bytes(client, url).await?;
|
||||||
|
TorrentSource::from_torrent_file(contents.to_vec(), Some(basename.to_string()))?
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let contents = fetch_bytes(client, url).await?;
|
||||||
|
TorrentSource::from_torrent_file(contents.to_vec(), None)?
|
||||||
|
};
|
||||||
|
Ok(source)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_torrent_file(
|
||||||
|
file: Vec<u8>,
|
||||||
|
name: Option<String>,
|
||||||
|
) -> color_eyre::eyre::Result<Self> {
|
||||||
|
let torrent: TorrentMetaV1Owned = torrent_from_bytes(&file)
|
||||||
|
.map_err(|_| TorrentDownloadError::InvalidTorrentFileFormat)?;
|
||||||
|
let hash = torrent.info_hash.as_string();
|
||||||
|
Ok(TorrentSource::TorrentFile {
|
||||||
|
torrent: file,
|
||||||
|
hash,
|
||||||
|
name,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_magnet_url(url: Url) -> color_eyre::eyre::Result<Self> {
|
||||||
|
if url.scheme() != MAGNET_SCHEMA {
|
||||||
|
Err(TorrentDownloadError::InvalidUrlSchema {
|
||||||
|
found: url.scheme().to_string(),
|
||||||
|
expected: MAGNET_SCHEMA.to_string(),
|
||||||
|
}
|
||||||
|
.into())
|
||||||
|
} else {
|
||||||
|
let magnet = Magnet::parse(url.as_str()).map_err(|_| {
|
||||||
|
TorrentDownloadError::InvalidMagnetFormat {
|
||||||
|
url: url.as_str().to_string(),
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let hash = magnet
|
||||||
|
.as_id20()
|
||||||
|
.ok_or_else(|| TorrentDownloadError::InvalidMagnetFormat {
|
||||||
|
url: url.as_str().to_string(),
|
||||||
|
})?
|
||||||
|
.as_string();
|
||||||
|
Ok(TorrentSource::MagnetUrl { url, hash })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_torrent_url(url: Url, hash: String) -> color_eyre::eyre::Result<Self> {
|
||||||
|
Ok(TorrentSource::TorrentUrl { url, hash })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
TorrentSource::MagnetUrl { hash, .. } => hash,
|
||||||
|
TorrentSource::TorrentUrl { hash, .. } => hash,
|
||||||
|
TorrentSource::TorrentFile { hash, .. } => hash,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for TorrentSource {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
TorrentSource::MagnetUrl { url, .. } => {
|
||||||
|
write!(f, "MagnetUrl {{ url: {} }}", url.as_str())
|
||||||
|
}
|
||||||
|
TorrentSource::TorrentUrl { url, .. } => {
|
||||||
|
write!(f, "TorrentUrl {{ url: {} }}", url.as_str())
|
||||||
|
}
|
||||||
|
TorrentSource::TorrentFile { name, hash, .. } => write!(
|
||||||
|
f,
|
||||||
|
"TorrentFile {{ name: \"{}\", hash: \"{hash}\" }}",
|
||||||
|
name.as_deref().unwrap_or_default()
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait TorrentContent {
|
||||||
|
fn get_name(&self) -> &str;
|
||||||
|
|
||||||
|
fn get_all_size(&self) -> u64;
|
||||||
|
|
||||||
|
fn get_progress(&self) -> f64;
|
||||||
|
|
||||||
|
fn get_curr_size(&self) -> u64;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TorrentContent for QbitTorrentContent {
|
||||||
|
fn get_name(&self) -> &str {
|
||||||
|
self.name.as_str()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_all_size(&self) -> u64 {
|
||||||
|
self.size
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_progress(&self) -> f64 {
|
||||||
|
self.progress
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_curr_size(&self) -> u64 {
|
||||||
|
u64::clamp(
|
||||||
|
f64::round(self.get_all_size() as f64 * self.get_progress()) as u64,
|
||||||
|
0,
|
||||||
|
self.get_all_size(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum Torrent {
|
||||||
|
Qbit {
|
||||||
|
torrent: QbitTorrent,
|
||||||
|
contents: Vec<QbitTorrentContent>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Torrent {
|
||||||
|
pub fn iter_files(&self) -> impl Iterator<Item = &dyn TorrentContent> {
|
||||||
|
match self {
|
||||||
|
Torrent::Qbit { contents, .. } => {
|
||||||
|
contents.iter().map(|item| item as &dyn TorrentContent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_name(&self) -> Option<&str> {
|
||||||
|
match self {
|
||||||
|
Torrent::Qbit { torrent, .. } => torrent.name.as_deref(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_hash(&self) -> Option<&str> {
|
||||||
|
match self {
|
||||||
|
Torrent::Qbit { torrent, .. } => torrent.hash.as_deref(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_save_path(&self) -> Option<&str> {
|
||||||
|
match self {
|
||||||
|
Torrent::Qbit { torrent, .. } => torrent.save_path.as_deref(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_content_path(&self) -> Option<&str> {
|
||||||
|
match self {
|
||||||
|
Torrent::Qbit { torrent, .. } => torrent.content_path.as_deref(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_tags(&self) -> Vec<&str> {
|
||||||
|
match self {
|
||||||
|
Torrent::Qbit { torrent, .. } => torrent.tags.as_deref().map_or_else(Vec::new, |s| {
|
||||||
|
s.split(',')
|
||||||
|
.map(|s| s.trim())
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.collect_vec()
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_category(&self) -> Option<&str> {
|
||||||
|
match self {
|
||||||
|
Torrent::Qbit { torrent, .. } => torrent.category.as_deref(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait TorrentDownloader {
|
||||||
|
async fn get_torrents_info(
|
||||||
|
&self,
|
||||||
|
status_filter: TorrentFilter,
|
||||||
|
category: Option<String>,
|
||||||
|
tag: Option<String>,
|
||||||
|
) -> color_eyre::eyre::Result<Vec<Torrent>>;
|
||||||
|
|
||||||
|
async fn add_torrents(
|
||||||
|
&self,
|
||||||
|
source: TorrentSource,
|
||||||
|
save_path: String,
|
||||||
|
category: Option<&str>,
|
||||||
|
) -> color_eyre::eyre::Result<()>;
|
||||||
|
|
||||||
|
async fn delete_torrents(&self, hashes: Vec<String>) -> color_eyre::eyre::Result<()>;
|
||||||
|
|
||||||
|
async fn rename_torrent_file(
|
||||||
|
&self,
|
||||||
|
hash: &str,
|
||||||
|
old_path: &str,
|
||||||
|
new_path: &str,
|
||||||
|
) -> color_eyre::eyre::Result<()>;
|
||||||
|
|
||||||
|
async fn move_torrents(
|
||||||
|
&self,
|
||||||
|
hashes: Vec<String>,
|
||||||
|
new_path: &str,
|
||||||
|
) -> color_eyre::eyre::Result<()>;
|
||||||
|
|
||||||
|
async fn get_torrent_path(&self, hashes: String) -> color_eyre::eyre::Result<Option<String>>;
|
||||||
|
|
||||||
|
async fn check_connection(&self) -> color_eyre::eyre::Result<()>;
|
||||||
|
|
||||||
|
async fn set_torrents_category(
|
||||||
|
&self,
|
||||||
|
hashes: Vec<String>,
|
||||||
|
category: &str,
|
||||||
|
) -> color_eyre::eyre::Result<()>;
|
||||||
|
|
||||||
|
async fn add_torrent_tags(
|
||||||
|
&self,
|
||||||
|
hashes: Vec<String>,
|
||||||
|
tags: Vec<String>,
|
||||||
|
) -> color_eyre::eyre::Result<()>;
|
||||||
|
|
||||||
|
async fn add_category(&self, category: &str) -> color_eyre::eyre::Result<()>;
|
||||||
|
|
||||||
|
fn get_save_path(&self, sub_path: &Path) -> PathBuf;
|
||||||
|
}
|
||||||
26
apps/recorder/src/sync/error.rs
Normal file
26
apps/recorder/src/sync/error.rs
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
use std::{borrow::Cow, time::Duration};
|
||||||
|
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum TorrentDownloadError {
|
||||||
|
#[error("Invalid mime (expected {expected:?}, got {found:?})")]
|
||||||
|
InvalidMime { expected: String, found: String },
|
||||||
|
#[error("Invalid url schema (expected {expected:?}, got {found:?})")]
|
||||||
|
InvalidUrlSchema { expected: String, found: String },
|
||||||
|
#[error("Invalid url parse: {0:?}")]
|
||||||
|
InvalidUrlParse(#[from] url::ParseError),
|
||||||
|
#[error("Invalid url format: {reason}")]
|
||||||
|
InvalidUrlFormat { reason: Cow<'static, str> },
|
||||||
|
#[error("QBit api error: {0:?}")]
|
||||||
|
QBitAPIError(#[from] qbit_rs::Error),
|
||||||
|
#[error("Timeout error ({action} timeouts out of {timeout:?})")]
|
||||||
|
TimeoutError {
|
||||||
|
action: Cow<'static, str>,
|
||||||
|
timeout: Duration,
|
||||||
|
},
|
||||||
|
#[error("Invalid torrent file format")]
|
||||||
|
InvalidTorrentFileFormat,
|
||||||
|
#[error("Invalid magnet file format (url = {url})")]
|
||||||
|
InvalidMagnetFormat { url: String },
|
||||||
|
}
|
||||||
15
apps/recorder/src/sync/mod.rs
Normal file
15
apps/recorder/src/sync/mod.rs
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
pub mod core;
|
||||||
|
pub mod error;
|
||||||
|
pub mod qbit;
|
||||||
|
mod utils;
|
||||||
|
|
||||||
|
pub use core::{
|
||||||
|
Torrent, TorrentContent, TorrentDownloader, TorrentFilter, TorrentSource, BITTORRENT_MIME_TYPE,
|
||||||
|
MAGNET_SCHEMA,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub use error::TorrentDownloadError;
|
||||||
|
pub use qbit::{
|
||||||
|
QBittorrentDownloader, QBittorrentDownloaderCreation, QbitTorrent, QbitTorrentContent,
|
||||||
|
QbitTorrentFile, QbitTorrentFilter, QbitTorrentSource,
|
||||||
|
};
|
||||||
722
apps/recorder/src/sync/qbit.rs
Normal file
722
apps/recorder/src/sync/qbit.rs
Normal file
@ -0,0 +1,722 @@
|
|||||||
|
use std::{
|
||||||
|
borrow::Cow, collections::HashSet, fmt::Debug, future::Future, sync::Arc, time::Duration,
|
||||||
|
};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use color_eyre::eyre::OptionExt;
|
||||||
|
use futures::future::try_join_all;
|
||||||
|
pub use qbit_rs::model::{
|
||||||
|
Torrent as QbitTorrent, TorrentContent as QbitTorrentContent, TorrentFile as QbitTorrentFile,
|
||||||
|
TorrentFilter as QbitTorrentFilter, TorrentSource as QbitTorrentSource,
|
||||||
|
};
|
||||||
|
use qbit_rs::{
|
||||||
|
Qbit,
|
||||||
|
model::{AddTorrentArg, Credential, GetTorrentListArg, NonEmptyStr, SyncData},
|
||||||
|
};
|
||||||
|
use quirks_path::{Path, PathBuf};
|
||||||
|
use tokio::time::sleep;
|
||||||
|
use tracing::instrument;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
Torrent, TorrentDownloadError, TorrentDownloader, TorrentFilter, TorrentSource,
|
||||||
|
utils::path_equals_as_file_url,
|
||||||
|
};
|
||||||
|
|
||||||
|
impl From<TorrentSource> for QbitTorrentSource {
|
||||||
|
fn from(value: TorrentSource) -> Self {
|
||||||
|
match value {
|
||||||
|
TorrentSource::MagnetUrl { url, .. } => QbitTorrentSource::Urls {
|
||||||
|
urls: qbit_rs::model::Sep::from([url]),
|
||||||
|
},
|
||||||
|
TorrentSource::TorrentUrl { url, .. } => QbitTorrentSource::Urls {
|
||||||
|
urls: qbit_rs::model::Sep::from([url]),
|
||||||
|
},
|
||||||
|
TorrentSource::TorrentFile {
|
||||||
|
torrent: torrents,
|
||||||
|
name,
|
||||||
|
..
|
||||||
|
} => QbitTorrentSource::TorrentFiles {
|
||||||
|
torrents: vec![QbitTorrentFile {
|
||||||
|
filename: name.unwrap_or_default(),
|
||||||
|
data: torrents,
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<TorrentFilter> for QbitTorrentFilter {
|
||||||
|
fn from(val: TorrentFilter) -> Self {
|
||||||
|
match val {
|
||||||
|
TorrentFilter::All => QbitTorrentFilter::All,
|
||||||
|
TorrentFilter::Downloading => QbitTorrentFilter::Downloading,
|
||||||
|
TorrentFilter::Completed => QbitTorrentFilter::Completed,
|
||||||
|
TorrentFilter::Paused => QbitTorrentFilter::Paused,
|
||||||
|
TorrentFilter::Active => QbitTorrentFilter::Active,
|
||||||
|
TorrentFilter::Inactive => QbitTorrentFilter::Inactive,
|
||||||
|
TorrentFilter::Resumed => QbitTorrentFilter::Resumed,
|
||||||
|
TorrentFilter::Stalled => QbitTorrentFilter::Stalled,
|
||||||
|
TorrentFilter::StalledUploading => QbitTorrentFilter::StalledUploading,
|
||||||
|
TorrentFilter::StalledDownloading => QbitTorrentFilter::StalledDownloading,
|
||||||
|
TorrentFilter::Errored => QbitTorrentFilter::Errored,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct QBittorrentDownloaderCreation {
|
||||||
|
pub endpoint: String,
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
pub save_path: String,
|
||||||
|
pub subscriber_id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct QBittorrentDownloader {
|
||||||
|
pub subscriber_id: i32,
|
||||||
|
pub endpoint_url: Url,
|
||||||
|
pub client: Arc<Qbit>,
|
||||||
|
pub save_path: PathBuf,
|
||||||
|
pub wait_sync_timeout: Duration,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl QBittorrentDownloader {
|
||||||
|
pub async fn from_creation(
|
||||||
|
creation: QBittorrentDownloaderCreation,
|
||||||
|
) -> Result<Self, TorrentDownloadError> {
|
||||||
|
let endpoint_url =
|
||||||
|
Url::parse(&creation.endpoint).map_err(TorrentDownloadError::InvalidUrlParse)?;
|
||||||
|
|
||||||
|
let credential = Credential::new(creation.username, creation.password);
|
||||||
|
|
||||||
|
let client = Qbit::new(endpoint_url.clone(), credential);
|
||||||
|
|
||||||
|
client
|
||||||
|
.login(false)
|
||||||
|
.await
|
||||||
|
.map_err(TorrentDownloadError::QBitAPIError)?;
|
||||||
|
|
||||||
|
client.sync(None).await?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
client: Arc::new(client),
|
||||||
|
endpoint_url,
|
||||||
|
subscriber_id: creation.subscriber_id,
|
||||||
|
save_path: creation.save_path.into(),
|
||||||
|
wait_sync_timeout: Duration::from_millis(10000),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug")]
|
||||||
|
pub async fn api_version(&self) -> color_eyre::eyre::Result<String> {
|
||||||
|
let result = self.client.get_webapi_version().await?;
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn wait_until<G, Fut, F, D, H, E>(
|
||||||
|
&self,
|
||||||
|
capture_fn: H,
|
||||||
|
fetch_data_fn: G,
|
||||||
|
mut stop_wait_fn: F,
|
||||||
|
timeout: Option<Duration>,
|
||||||
|
) -> color_eyre::eyre::Result<()>
|
||||||
|
where
|
||||||
|
H: FnOnce() -> E,
|
||||||
|
G: Fn(Arc<Qbit>, E) -> Fut,
|
||||||
|
Fut: Future<Output = color_eyre::eyre::Result<D>>,
|
||||||
|
F: FnMut(&D) -> bool,
|
||||||
|
E: Clone,
|
||||||
|
D: Debug + serde::Serialize,
|
||||||
|
{
|
||||||
|
let mut next_wait_ms = 32u64;
|
||||||
|
let mut all_wait_ms = 0u64;
|
||||||
|
let timeout = timeout.unwrap_or(self.wait_sync_timeout);
|
||||||
|
let env = capture_fn();
|
||||||
|
loop {
|
||||||
|
sleep(Duration::from_millis(next_wait_ms)).await;
|
||||||
|
all_wait_ms += next_wait_ms;
|
||||||
|
if all_wait_ms >= timeout.as_millis() as u64 {
|
||||||
|
// full update
|
||||||
|
let sync_data = fetch_data_fn(self.client.clone(), env.clone()).await?;
|
||||||
|
if stop_wait_fn(&sync_data) {
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
tracing::warn!(name = "wait_until timeout", sync_data = serde_json::to_string(&sync_data).unwrap(), timeout = ?timeout);
|
||||||
|
return Err(TorrentDownloadError::TimeoutError {
|
||||||
|
action: Cow::Borrowed("QBittorrentDownloader::wait_unit"),
|
||||||
|
timeout,
|
||||||
|
}
|
||||||
|
.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let sync_data = fetch_data_fn(self.client.clone(), env.clone()).await?;
|
||||||
|
if stop_wait_fn(&sync_data) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
next_wait_ms *= 2;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "trace", skip(self, stop_wait_fn))]
|
||||||
|
pub async fn wait_torrents_until<F>(
|
||||||
|
&self,
|
||||||
|
arg: GetTorrentListArg,
|
||||||
|
stop_wait_fn: F,
|
||||||
|
timeout: Option<Duration>,
|
||||||
|
) -> color_eyre::eyre::Result<()>
|
||||||
|
where
|
||||||
|
F: FnMut(&Vec<QbitTorrent>) -> bool,
|
||||||
|
{
|
||||||
|
self.wait_until(
|
||||||
|
|| arg,
|
||||||
|
async move |client: Arc<Qbit>,
|
||||||
|
arg: GetTorrentListArg|
|
||||||
|
-> color_eyre::eyre::Result<Vec<QbitTorrent>> {
|
||||||
|
let data = client.get_torrent_list(arg).await?;
|
||||||
|
Ok(data)
|
||||||
|
},
|
||||||
|
stop_wait_fn,
|
||||||
|
timeout,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self, stop_wait_fn))]
|
||||||
|
pub async fn wait_sync_until<F: FnMut(&SyncData) -> bool>(
|
||||||
|
&self,
|
||||||
|
stop_wait_fn: F,
|
||||||
|
timeout: Option<Duration>,
|
||||||
|
) -> color_eyre::eyre::Result<()> {
|
||||||
|
self.wait_until(
|
||||||
|
|| (),
|
||||||
|
async move |client: Arc<Qbit>, _| -> color_eyre::eyre::Result<SyncData> {
|
||||||
|
let data = client.sync(None).await?;
|
||||||
|
Ok(data)
|
||||||
|
},
|
||||||
|
stop_wait_fn,
|
||||||
|
timeout,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self, stop_wait_fn))]
|
||||||
|
async fn wait_torrent_contents_until<F: FnMut(&Vec<QbitTorrentContent>) -> bool>(
|
||||||
|
&self,
|
||||||
|
hash: &str,
|
||||||
|
stop_wait_fn: F,
|
||||||
|
timeout: Option<Duration>,
|
||||||
|
) -> color_eyre::eyre::Result<()> {
|
||||||
|
self.wait_until(
|
||||||
|
|| Arc::new(hash.to_string()),
|
||||||
|
async move |client: Arc<Qbit>,
|
||||||
|
hash_arc: Arc<String>|
|
||||||
|
-> color_eyre::eyre::Result<Vec<QbitTorrentContent>> {
|
||||||
|
let data = client.get_torrent_contents(hash_arc.as_str(), None).await?;
|
||||||
|
Ok(data)
|
||||||
|
},
|
||||||
|
stop_wait_fn,
|
||||||
|
timeout,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl TorrentDownloader for QBittorrentDownloader {
|
||||||
|
#[instrument(level = "debug", skip(self))]
|
||||||
|
async fn get_torrents_info(
|
||||||
|
&self,
|
||||||
|
status_filter: TorrentFilter,
|
||||||
|
category: Option<String>,
|
||||||
|
tag: Option<String>,
|
||||||
|
) -> color_eyre::eyre::Result<Vec<Torrent>> {
|
||||||
|
let arg = GetTorrentListArg {
|
||||||
|
filter: Some(status_filter.into()),
|
||||||
|
category,
|
||||||
|
tag,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let torrent_list = self.client.get_torrent_list(arg).await?;
|
||||||
|
let torrent_contents = try_join_all(torrent_list.iter().map(|s| async {
|
||||||
|
if let Some(hash) = &s.hash {
|
||||||
|
self.client.get_torrent_contents(hash as &str, None).await
|
||||||
|
} else {
|
||||||
|
Ok(vec![])
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.await?;
|
||||||
|
Ok(torrent_list
|
||||||
|
.into_iter()
|
||||||
|
.zip(torrent_contents)
|
||||||
|
.map(|(torrent, contents)| Torrent::Qbit { torrent, contents })
|
||||||
|
.collect::<Vec<_>>())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self))]
|
||||||
|
async fn add_torrents(
|
||||||
|
&self,
|
||||||
|
source: TorrentSource,
|
||||||
|
save_path: String,
|
||||||
|
category: Option<&str>,
|
||||||
|
) -> color_eyre::eyre::Result<()> {
|
||||||
|
let arg = AddTorrentArg {
|
||||||
|
source: source.clone().into(),
|
||||||
|
savepath: Some(save_path),
|
||||||
|
category: category.map(String::from),
|
||||||
|
auto_torrent_management: Some(false),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let add_result = self.client.add_torrent(arg.clone()).await;
|
||||||
|
if let (
|
||||||
|
Err(qbit_rs::Error::ApiError(qbit_rs::ApiError::CategoryNotFound)),
|
||||||
|
Some(category),
|
||||||
|
) = (&add_result, category)
|
||||||
|
{
|
||||||
|
self.add_category(category).await?;
|
||||||
|
self.client.add_torrent(arg).await?;
|
||||||
|
} else {
|
||||||
|
add_result?;
|
||||||
|
}
|
||||||
|
let source_hash = source.hash();
|
||||||
|
self.wait_sync_until(
|
||||||
|
|sync_data| {
|
||||||
|
sync_data
|
||||||
|
.torrents
|
||||||
|
.as_ref()
|
||||||
|
.is_some_and(|t| t.contains_key(source_hash))
|
||||||
|
},
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self))]
|
||||||
|
async fn delete_torrents(&self, hashes: Vec<String>) -> color_eyre::eyre::Result<()> {
|
||||||
|
self.client
|
||||||
|
.delete_torrents(hashes.clone(), Some(true))
|
||||||
|
.await?;
|
||||||
|
self.wait_torrents_until(
|
||||||
|
GetTorrentListArg::builder()
|
||||||
|
.hashes(hashes.join("|"))
|
||||||
|
.build(),
|
||||||
|
|torrents| -> bool { torrents.is_empty() },
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self))]
|
||||||
|
async fn rename_torrent_file(
|
||||||
|
&self,
|
||||||
|
hash: &str,
|
||||||
|
old_path: &str,
|
||||||
|
new_path: &str,
|
||||||
|
) -> color_eyre::eyre::Result<()> {
|
||||||
|
self.client.rename_file(hash, old_path, new_path).await?;
|
||||||
|
let new_path = self.save_path.join(new_path);
|
||||||
|
let save_path = self.save_path.as_path();
|
||||||
|
self.wait_torrent_contents_until(
|
||||||
|
hash,
|
||||||
|
|contents| -> bool {
|
||||||
|
contents.iter().any(|c| {
|
||||||
|
path_equals_as_file_url(save_path.join(&c.name), &new_path)
|
||||||
|
.inspect_err(|error| {
|
||||||
|
tracing::warn!(name = "path_equals_as_file_url", error = ?error);
|
||||||
|
})
|
||||||
|
.unwrap_or(false)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self))]
|
||||||
|
async fn move_torrents(
|
||||||
|
&self,
|
||||||
|
hashes: Vec<String>,
|
||||||
|
new_path: &str,
|
||||||
|
) -> color_eyre::eyre::Result<()> {
|
||||||
|
self.client
|
||||||
|
.set_torrent_location(hashes.clone(), new_path)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
self.wait_torrents_until(
|
||||||
|
GetTorrentListArg::builder()
|
||||||
|
.hashes(hashes.join("|"))
|
||||||
|
.build(),
|
||||||
|
|torrents| -> bool {
|
||||||
|
torrents.iter().flat_map(|t| t.save_path.as_ref()).any(|p| {
|
||||||
|
path_equals_as_file_url(p, new_path)
|
||||||
|
.inspect_err(|error| {
|
||||||
|
tracing::warn!(name = "path_equals_as_file_url", error = ?error);
|
||||||
|
})
|
||||||
|
.unwrap_or(false)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_torrent_path(&self, hashes: String) -> color_eyre::eyre::Result<Option<String>> {
|
||||||
|
let mut torrent_list = self
|
||||||
|
.client
|
||||||
|
.get_torrent_list(GetTorrentListArg {
|
||||||
|
hashes: Some(hashes),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
let torrent = torrent_list.first_mut().ok_or_eyre("No torrent found")?;
|
||||||
|
Ok(torrent.save_path.take())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self))]
|
||||||
|
async fn check_connection(&self) -> color_eyre::eyre::Result<()> {
|
||||||
|
self.api_version().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self))]
|
||||||
|
async fn set_torrents_category(
|
||||||
|
&self,
|
||||||
|
hashes: Vec<String>,
|
||||||
|
category: &str,
|
||||||
|
) -> color_eyre::eyre::Result<()> {
|
||||||
|
let result = self
|
||||||
|
.client
|
||||||
|
.set_torrent_category(hashes.clone(), category)
|
||||||
|
.await;
|
||||||
|
if let Err(qbit_rs::Error::ApiError(qbit_rs::ApiError::CategoryNotFound)) = &result {
|
||||||
|
self.add_category(category).await?;
|
||||||
|
self.client
|
||||||
|
.set_torrent_category(hashes.clone(), category)
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
result?;
|
||||||
|
}
|
||||||
|
self.wait_torrents_until(
|
||||||
|
GetTorrentListArg::builder()
|
||||||
|
.hashes(hashes.join("|"))
|
||||||
|
.build(),
|
||||||
|
|torrents| {
|
||||||
|
torrents
|
||||||
|
.iter()
|
||||||
|
.all(|t| t.category.as_ref().is_some_and(|c| c == category))
|
||||||
|
},
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self))]
|
||||||
|
async fn add_torrent_tags(
|
||||||
|
&self,
|
||||||
|
hashes: Vec<String>,
|
||||||
|
tags: Vec<String>,
|
||||||
|
) -> color_eyre::eyre::Result<()> {
|
||||||
|
if tags.is_empty() {
|
||||||
|
return Err(color_eyre::eyre::eyre!("add torrent tags can not be empty"));
|
||||||
|
}
|
||||||
|
self.client
|
||||||
|
.add_torrent_tags(hashes.clone(), tags.clone())
|
||||||
|
.await?;
|
||||||
|
let tag_sets = tags.iter().map(|s| s.as_str()).collect::<HashSet<&str>>();
|
||||||
|
self.wait_torrents_until(
|
||||||
|
GetTorrentListArg::builder()
|
||||||
|
.hashes(hashes.join("|"))
|
||||||
|
.build(),
|
||||||
|
|torrents| {
|
||||||
|
torrents.iter().all(|t| {
|
||||||
|
t.tags.as_ref().is_some_and(|t| {
|
||||||
|
t.split(',')
|
||||||
|
.map(|s| s.trim())
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.collect::<HashSet<&str>>()
|
||||||
|
.is_superset(&tag_sets)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self))]
|
||||||
|
async fn add_category(&self, category: &str) -> color_eyre::eyre::Result<()> {
|
||||||
|
self.client
|
||||||
|
.add_category(
|
||||||
|
NonEmptyStr::new(category).ok_or_eyre("category can not be empty")?,
|
||||||
|
self.save_path.as_str(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
self.wait_sync_until(
|
||||||
|
|sync_data| {
|
||||||
|
sync_data
|
||||||
|
.categories
|
||||||
|
.as_ref()
|
||||||
|
.is_some_and(|s| s.contains_key(category))
|
||||||
|
},
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_save_path(&self, sub_path: &Path) -> PathBuf {
|
||||||
|
self.save_path.join(sub_path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for QBittorrentDownloader {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("QBittorrentDownloader")
|
||||||
|
.field("subscriber_id", &self.subscriber_id)
|
||||||
|
.field("client", &self.endpoint_url.as_str())
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub mod tests {
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::test_utils::fetch::build_testing_http_client;
|
||||||
|
|
||||||
|
fn get_tmp_qbit_test_folder() -> &'static str {
|
||||||
|
if cfg!(all(windows, not(feature = "testcontainers"))) {
|
||||||
|
"C:\\Windows\\Temp\\konobangu\\qbit"
|
||||||
|
} else {
|
||||||
|
"/tmp/konobangu/qbit"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "testcontainers")]
|
||||||
|
pub async fn create_qbit_testcontainer()
|
||||||
|
-> color_eyre::eyre::Result<testcontainers::ContainerRequest<testcontainers::GenericImage>>
|
||||||
|
{
|
||||||
|
use testcontainers::{
|
||||||
|
GenericImage,
|
||||||
|
core::{
|
||||||
|
ContainerPort,
|
||||||
|
// ReuseDirective,
|
||||||
|
WaitFor,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use testcontainers_modules::testcontainers::ImageExt;
|
||||||
|
|
||||||
|
use crate::test_utils::testcontainers::ContainerRequestEnhancedExt;
|
||||||
|
|
||||||
|
let container = GenericImage::new("linuxserver/qbittorrent", "latest")
|
||||||
|
.with_wait_for(WaitFor::message_on_stderr("Connection to localhost"))
|
||||||
|
.with_env_var("WEBUI_PORT", "8080")
|
||||||
|
.with_env_var("TZ", "Asia/Singapore")
|
||||||
|
.with_env_var("TORRENTING_PORT", "6881")
|
||||||
|
.with_mapped_port(6881, ContainerPort::Tcp(6881))
|
||||||
|
.with_mapped_port(8080, ContainerPort::Tcp(8080))
|
||||||
|
// .with_reuse(ReuseDirective::Always)
|
||||||
|
.with_default_log_consumer()
|
||||||
|
.with_prune_existed_label("qbit-downloader", true, true)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(container)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "testcontainers"))]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_qbittorrent_downloader() {
|
||||||
|
test_qbittorrent_downloader_impl(None, None).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "testcontainers")]
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_qbittorrent_downloader() -> color_eyre::eyre::Result<()> {
|
||||||
|
use testcontainers::runners::AsyncRunner;
|
||||||
|
use tokio::io::AsyncReadExt;
|
||||||
|
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(tracing::Level::DEBUG)
|
||||||
|
.with_test_writer()
|
||||||
|
.init();
|
||||||
|
|
||||||
|
let image = create_qbit_testcontainer().await?;
|
||||||
|
|
||||||
|
let container = image.start().await?;
|
||||||
|
|
||||||
|
let mut logs = String::new();
|
||||||
|
|
||||||
|
container.stdout(false).read_to_string(&mut logs).await?;
|
||||||
|
|
||||||
|
let username = logs
|
||||||
|
.lines()
|
||||||
|
.find_map(|line| {
|
||||||
|
if line.contains("The WebUI administrator username is") {
|
||||||
|
line.split_whitespace().last()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.expect("should have username")
|
||||||
|
.trim();
|
||||||
|
|
||||||
|
let password = logs
|
||||||
|
.lines()
|
||||||
|
.find_map(|line| {
|
||||||
|
if line.contains("A temporary password is provided for this session") {
|
||||||
|
line.split_whitespace().last()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.expect("should have password")
|
||||||
|
.trim();
|
||||||
|
|
||||||
|
tracing::info!(username, password);
|
||||||
|
|
||||||
|
test_qbittorrent_downloader_impl(Some(username), Some(password)).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn test_qbittorrent_downloader_impl(
|
||||||
|
username: Option<&str>,
|
||||||
|
password: Option<&str>,
|
||||||
|
) -> color_eyre::eyre::Result<()> {
|
||||||
|
let http_client = build_testing_http_client()?;
|
||||||
|
let base_save_path = Path::new(get_tmp_qbit_test_folder());
|
||||||
|
|
||||||
|
let mut downloader = QBittorrentDownloader::from_creation(QBittorrentDownloaderCreation {
|
||||||
|
endpoint: "http://127.0.0.1:8080".to_string(),
|
||||||
|
password: password.unwrap_or_default().to_string(),
|
||||||
|
username: username.unwrap_or_default().to_string(),
|
||||||
|
subscriber_id: 0,
|
||||||
|
save_path: base_save_path.to_string(),
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
downloader.wait_sync_timeout = Duration::from_secs(3);
|
||||||
|
|
||||||
|
downloader.check_connection().await?;
|
||||||
|
|
||||||
|
downloader
|
||||||
|
.delete_torrents(vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()])
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let torrent_source = TorrentSource::parse(
|
||||||
|
&http_client,
|
||||||
|
"https://mikanani.me/Download/20240301/47ee2d69e7f19af783ad896541a07b012676f858.torrent"
|
||||||
|
).await?;
|
||||||
|
|
||||||
|
let save_path = base_save_path.join(format!(
|
||||||
|
"test_add_torrents_{}",
|
||||||
|
chrono::Utc::now().timestamp()
|
||||||
|
));
|
||||||
|
|
||||||
|
downloader
|
||||||
|
.add_torrents(torrent_source, save_path.to_string(), Some("bangumi"))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let get_torrent = async || -> color_eyre::eyre::Result<Torrent> {
|
||||||
|
let torrent_infos = downloader
|
||||||
|
.get_torrents_info(TorrentFilter::All, None, None)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let result = torrent_infos
|
||||||
|
.into_iter()
|
||||||
|
.find(|t| (t.get_hash() == Some("47ee2d69e7f19af783ad896541a07b012676f858")))
|
||||||
|
.ok_or_eyre("no torrent")?;
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
};
|
||||||
|
|
||||||
|
let target_torrent = get_torrent().await?;
|
||||||
|
|
||||||
|
let files = target_torrent.iter_files().collect_vec();
|
||||||
|
assert!(!files.is_empty());
|
||||||
|
|
||||||
|
let first_file = files[0];
|
||||||
|
assert_eq!(
|
||||||
|
first_file.get_name(),
|
||||||
|
r#"[Nekomoe kissaten&LoliHouse] Boku no Kokoro no Yabai Yatsu - 20 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"#
|
||||||
|
);
|
||||||
|
|
||||||
|
let test_tag = format!("test_tag_{}", chrono::Utc::now().timestamp());
|
||||||
|
|
||||||
|
downloader
|
||||||
|
.add_torrent_tags(
|
||||||
|
vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()],
|
||||||
|
vec![test_tag.clone()],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let target_torrent = get_torrent().await?;
|
||||||
|
|
||||||
|
assert!(target_torrent.get_tags().iter().any(|s| s == &test_tag));
|
||||||
|
|
||||||
|
let test_category = format!("test_category_{}", chrono::Utc::now().timestamp());
|
||||||
|
|
||||||
|
downloader
|
||||||
|
.set_torrents_category(
|
||||||
|
vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()],
|
||||||
|
&test_category,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let target_torrent = get_torrent().await?;
|
||||||
|
|
||||||
|
assert_eq!(Some(test_category.as_str()), target_torrent.get_category());
|
||||||
|
|
||||||
|
let moved_save_path = base_save_path.join(format!(
|
||||||
|
"moved_test_add_torrents_{}",
|
||||||
|
chrono::Utc::now().timestamp()
|
||||||
|
));
|
||||||
|
|
||||||
|
downloader
|
||||||
|
.move_torrents(
|
||||||
|
vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()],
|
||||||
|
moved_save_path.as_str(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let target_torrent = get_torrent().await?;
|
||||||
|
|
||||||
|
let content_path = target_torrent.iter_files().next().unwrap().get_name();
|
||||||
|
|
||||||
|
let new_content_path = &format!("new_{}", content_path);
|
||||||
|
|
||||||
|
downloader
|
||||||
|
.rename_torrent_file(
|
||||||
|
"47ee2d69e7f19af783ad896541a07b012676f858",
|
||||||
|
content_path,
|
||||||
|
new_content_path,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let target_torrent = get_torrent().await?;
|
||||||
|
|
||||||
|
let content_path = target_torrent.iter_files().next().unwrap().get_name();
|
||||||
|
|
||||||
|
assert_eq!(content_path, new_content_path);
|
||||||
|
|
||||||
|
downloader
|
||||||
|
.delete_torrents(vec!["47ee2d69e7f19af783ad896541a07b012676f858".to_string()])
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let torrent_infos1 = downloader
|
||||||
|
.get_torrents_info(TorrentFilter::All, None, None)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
assert!(torrent_infos1.is_empty());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,277 +1,16 @@
|
|||||||
use std::{borrow::Cow, sync::Arc};
|
use std::borrow::Cow;
|
||||||
|
|
||||||
use async_stream::stream;
|
use async_trait::async_trait;
|
||||||
use futures::{Stream, StreamExt, pin_mut};
|
|
||||||
use serde::{Serialize, de::DeserializeOwned};
|
|
||||||
use tokio::sync::{RwLock, mpsc};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{app::AppContext, errors::RResult};
|
||||||
app::AppContextTrait,
|
|
||||||
errors::app_error::{RError, RResult},
|
|
||||||
models,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub struct TaskMeta {
|
pub struct TaskVars {}
|
||||||
pub subscriber_id: i32,
|
|
||||||
pub task_id: i32,
|
|
||||||
pub task_kind: Cow<'static, str>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ReplayChannel<T: Send + Sync + Clone + 'static> {
|
#[async_trait]
|
||||||
sender: mpsc::UnboundedSender<T>,
|
pub trait Task: Send + Sync {
|
||||||
channels: Arc<RwLock<Vec<mpsc::UnboundedSender<T>>>>,
|
fn task_name() -> Cow<'static, str>;
|
||||||
buffer: Arc<RwLock<Vec<T>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Send + Sync + Clone + 'static> ReplayChannel<T> {
|
fn task_id(&self) -> &str;
|
||||||
pub fn new(history: Vec<T>) -> Self {
|
|
||||||
let (tx, mut rx) = mpsc::unbounded_channel::<T>();
|
|
||||||
let channels = Arc::new(RwLock::new(Vec::<mpsc::UnboundedSender<T>>::new()));
|
|
||||||
let buffer = Arc::new(RwLock::new(history));
|
|
||||||
{
|
|
||||||
let channels = channels.clone();
|
|
||||||
let buffer = buffer.clone();
|
|
||||||
tokio::spawn(async move {
|
|
||||||
loop {
|
|
||||||
match rx.recv().await {
|
|
||||||
Some(value) => {
|
|
||||||
let mut w = buffer.write().await;
|
|
||||||
let senders = channels.read().await;
|
|
||||||
for s in senders.iter() {
|
|
||||||
if !s.is_closed() {
|
|
||||||
if let Err(err) = s.send(value.clone()) {
|
|
||||||
tracing::error!(err = %err, "replay-channel broadcast to other subscribers error");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
w.push(value);
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
drop(rx);
|
|
||||||
let mut cs = channels.write().await;
|
|
||||||
cs.clear();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Self {
|
async fn run(&self, app_context: &AppContext, vars: &TaskVars) -> RResult<()>;
|
||||||
sender: tx,
|
|
||||||
channels,
|
|
||||||
buffer,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn sender(&self) -> &mpsc::UnboundedSender<T> {
|
|
||||||
&self.sender
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn receiver(&self) -> mpsc::UnboundedReceiver<T> {
|
|
||||||
let (tx, rx) = mpsc::unbounded_channel();
|
|
||||||
let items = self.buffer.read().await;
|
|
||||||
for item in items.iter() {
|
|
||||||
if let Err(err) = tx.send(item.clone()) {
|
|
||||||
tracing::error!(err = %err, "replay-channel send replay value to other subscribers error");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !self.sender.is_closed() {
|
|
||||||
let mut sw = self.channels.write().await;
|
|
||||||
sw.push(tx);
|
|
||||||
}
|
|
||||||
rx
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn close(&self) {
|
|
||||||
let mut senders = self.channels.write().await;
|
|
||||||
senders.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait StreamTaskCoreTrait: Sized {
|
|
||||||
type Request: Serialize + DeserializeOwned;
|
|
||||||
type Item: Serialize + DeserializeOwned;
|
|
||||||
|
|
||||||
fn task_id(&self) -> i32;
|
|
||||||
|
|
||||||
fn task_kind(&self) -> &str;
|
|
||||||
|
|
||||||
fn new(meta: TaskMeta, request: Self::Request) -> Self;
|
|
||||||
|
|
||||||
fn request(&self) -> &Self::Request;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait StreamTaskReplayLayoutTrait: StreamTaskCoreTrait {
|
|
||||||
fn history(&self) -> &[Arc<RResult<Self::Item>>];
|
|
||||||
|
|
||||||
fn resume_from_model(
|
|
||||||
task: models::tasks::Model,
|
|
||||||
stream_items: Vec<models::task_stream_item::Model>,
|
|
||||||
) -> RResult<Self>;
|
|
||||||
|
|
||||||
fn running_receiver(
|
|
||||||
&self,
|
|
||||||
) -> impl Future<Output = Option<mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>>>;
|
|
||||||
|
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
fn init_receiver(
|
|
||||||
&self,
|
|
||||||
) -> impl Future<
|
|
||||||
Output = (
|
|
||||||
mpsc::UnboundedSender<Arc<RResult<Self::Item>>>,
|
|
||||||
mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>,
|
|
||||||
),
|
|
||||||
>;
|
|
||||||
|
|
||||||
fn serialize_request(request: Self::Request) -> RResult<serde_json::Value> {
|
|
||||||
serde_json::to_value(request).map_err(RError::from)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn serialize_item(item: RResult<Self::Item>) -> RResult<serde_json::Value> {
|
|
||||||
serde_json::to_value(item).map_err(RError::from)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn deserialize_request(request: serde_json::Value) -> RResult<Self::Request> {
|
|
||||||
serde_json::from_value(request).map_err(RError::from)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn deserialize_item(item: serde_json::Value) -> RResult<RResult<Self::Item>> {
|
|
||||||
serde_json::from_value(item).map_err(RError::from)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait StreamTaskRunnerTrait: StreamTaskCoreTrait {
|
|
||||||
fn run(
|
|
||||||
context: Arc<dyn AppContextTrait>,
|
|
||||||
request: &Self::Request,
|
|
||||||
history: &[Arc<RResult<Self::Item>>],
|
|
||||||
) -> impl Stream<Item = RResult<Self::Item>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait StreamTaskReplayRunnerTrait: StreamTaskRunnerTrait + StreamTaskReplayLayoutTrait {
|
|
||||||
fn run_shared(
|
|
||||||
&self,
|
|
||||||
context: Arc<dyn AppContextTrait>,
|
|
||||||
) -> impl Stream<Item = Arc<RResult<Self::Item>>> {
|
|
||||||
stream! {
|
|
||||||
if let Some(mut receiver) = self.running_receiver().await {
|
|
||||||
while let Some(item) = receiver.recv().await {
|
|
||||||
yield item
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let (tx, _) = self.init_receiver().await;
|
|
||||||
let stream = Self::run(context, self.request(), self.history());
|
|
||||||
|
|
||||||
pin_mut!(stream);
|
|
||||||
|
|
||||||
while let Some(item) = stream.next().await {
|
|
||||||
let item = Arc::new(item);
|
|
||||||
if let Err(err) = tx.send(item.clone()) {
|
|
||||||
tracing::error!(task_id = self.task_id(), task_kind = self.task_kind(), err = %err, "run shared send error");
|
|
||||||
}
|
|
||||||
yield item
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct StandardStreamTaskReplayLayout<Request, Item>
|
|
||||||
where
|
|
||||||
Request: Serialize + DeserializeOwned,
|
|
||||||
Item: Serialize + DeserializeOwned + Sync + Send + 'static,
|
|
||||||
{
|
|
||||||
pub meta: TaskMeta,
|
|
||||||
pub request: Request,
|
|
||||||
pub history: Vec<Arc<RResult<Item>>>,
|
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
pub channel: Arc<RwLock<Option<ReplayChannel<Arc<RResult<Item>>>>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Request, Item> StreamTaskCoreTrait for StandardStreamTaskReplayLayout<Request, Item>
|
|
||||||
where
|
|
||||||
Request: Serialize + DeserializeOwned,
|
|
||||||
Item: Serialize + DeserializeOwned + Sync + Send + 'static,
|
|
||||||
{
|
|
||||||
type Request = Request;
|
|
||||||
type Item = Item;
|
|
||||||
|
|
||||||
fn task_id(&self) -> i32 {
|
|
||||||
self.meta.task_id
|
|
||||||
}
|
|
||||||
|
|
||||||
fn request(&self) -> &Self::Request {
|
|
||||||
&self.request
|
|
||||||
}
|
|
||||||
|
|
||||||
fn task_kind(&self) -> &str {
|
|
||||||
&self.meta.task_kind
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new(meta: TaskMeta, request: Self::Request) -> Self {
|
|
||||||
Self {
|
|
||||||
meta,
|
|
||||||
request,
|
|
||||||
history: vec![],
|
|
||||||
channel: Arc::new(RwLock::new(None)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Request, Item> StreamTaskReplayLayoutTrait for StandardStreamTaskReplayLayout<Request, Item>
|
|
||||||
where
|
|
||||||
Request: Serialize + DeserializeOwned,
|
|
||||||
Item: Serialize + DeserializeOwned + Sync + Send + 'static,
|
|
||||||
{
|
|
||||||
fn history(&self) -> &[Arc<RResult<Self::Item>>] {
|
|
||||||
&self.history
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resume_from_model(
|
|
||||||
task: models::tasks::Model,
|
|
||||||
stream_items: Vec<models::task_stream_item::Model>,
|
|
||||||
) -> RResult<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
meta: TaskMeta {
|
|
||||||
task_id: task.id,
|
|
||||||
subscriber_id: task.subscriber_id,
|
|
||||||
task_kind: Cow::Owned(task.task_type),
|
|
||||||
},
|
|
||||||
request: Self::deserialize_request(task.request_data)?,
|
|
||||||
history: stream_items
|
|
||||||
.into_iter()
|
|
||||||
.map(|m| Self::deserialize_item(m.item).map(Arc::new))
|
|
||||||
.collect::<RResult<Vec<_>>>()?,
|
|
||||||
channel: Arc::new(RwLock::new(None)),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn running_receiver(&self) -> Option<mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>> {
|
|
||||||
if let Some(channel) = self.channel.read().await.as_ref() {
|
|
||||||
Some(channel.receiver().await)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn init_receiver(
|
|
||||||
&self,
|
|
||||||
) -> (
|
|
||||||
mpsc::UnboundedSender<Arc<RResult<Self::Item>>>,
|
|
||||||
mpsc::UnboundedReceiver<Arc<RResult<Self::Item>>>,
|
|
||||||
) {
|
|
||||||
let channel = ReplayChannel::new(self.history.clone());
|
|
||||||
let rx = channel.receiver().await;
|
|
||||||
let sender = channel.sender().clone();
|
|
||||||
|
|
||||||
{
|
|
||||||
{
|
|
||||||
let mut w = self.channel.write().await;
|
|
||||||
*w = Some(channel);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(sender, rx)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -0,0 +1,49 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use futures::{TryStreamExt, pin_mut};
|
||||||
|
|
||||||
|
use super::core::{Task, TaskVars};
|
||||||
|
use crate::{
|
||||||
|
app::AppContext,
|
||||||
|
errors::RResult,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanAuthSecrecy, web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct CreateMikanRSSFromMyBangumiTask {
|
||||||
|
pub subscriber_id: i32,
|
||||||
|
pub task_id: String,
|
||||||
|
pub auth_secrecy: MikanAuthSecrecy,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl Task for CreateMikanRSSFromMyBangumiTask {
|
||||||
|
fn task_name() -> Cow<'static, str> {
|
||||||
|
Cow::Borrowed("create-mikan-rss-from-my-bangumi")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn task_id(&self) -> &str {
|
||||||
|
&self.task_id
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run(&self, app_context: &AppContext, _vars: &TaskVars) -> RResult<()> {
|
||||||
|
let mikan_client = app_context
|
||||||
|
.mikan
|
||||||
|
.fork_with_auth(self.auth_secrecy.clone())?;
|
||||||
|
|
||||||
|
{
|
||||||
|
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
|
||||||
|
&mikan_client,
|
||||||
|
mikan_client.base_url().join("/Home/MyBangumi")?,
|
||||||
|
);
|
||||||
|
|
||||||
|
pin_mut!(bangumi_metas);
|
||||||
|
|
||||||
|
let _bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,37 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use futures::Stream;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
app::AppContextTrait,
|
|
||||||
errors::app_error::RResult,
|
|
||||||
extract::mikan::{MikanAuthSecrecy, MikanBangumiMeta, web_extract},
|
|
||||||
tasks::core::{StandardStreamTaskReplayLayout, StreamTaskRunnerTrait},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
|
||||||
pub struct ExtractMikanBangumisMetaFromMyBangumiRequest {
|
|
||||||
pub my_bangumi_page_url: Url,
|
|
||||||
pub auth_secrecy: Option<MikanAuthSecrecy>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type ExtractMikanBangumisMetaFromMyBangumiTask =
|
|
||||||
StandardStreamTaskReplayLayout<ExtractMikanBangumisMetaFromMyBangumiRequest, MikanBangumiMeta>;
|
|
||||||
|
|
||||||
impl StreamTaskRunnerTrait for ExtractMikanBangumisMetaFromMyBangumiTask {
|
|
||||||
fn run(
|
|
||||||
context: Arc<dyn AppContextTrait>,
|
|
||||||
request: &Self::Request,
|
|
||||||
history: &[Arc<RResult<Self::Item>>],
|
|
||||||
) -> impl Stream<Item = RResult<Self::Item>> {
|
|
||||||
let context = context.clone();
|
|
||||||
web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page(
|
|
||||||
context,
|
|
||||||
request.my_bangumi_page_url.clone(),
|
|
||||||
request.auth_secrecy.clone(),
|
|
||||||
history,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1 +0,0 @@
|
|||||||
pub mod extract_mikan_bangumis_meta_from_my_bangumi;
|
|
||||||
@ -1,4 +1,2 @@
|
|||||||
pub mod core;
|
pub mod core;
|
||||||
pub mod mikan;
|
pub mod create_mikan_bangumi_subscriptions_from_my_bangumi_page;
|
||||||
pub mod service;
|
|
||||||
pub mod registry;
|
|
||||||
|
|||||||
@ -1 +0,0 @@
|
|||||||
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
#[derive(Debug)]
|
|
||||||
pub struct TaskService {}
|
|
||||||
|
|
||||||
impl TaskService {}
|
|
||||||
@ -1,62 +0,0 @@
|
|||||||
use typed_builder::TypedBuilder;
|
|
||||||
|
|
||||||
use crate::app::AppContextTrait;
|
|
||||||
|
|
||||||
#[derive(TypedBuilder)]
|
|
||||||
#[builder(field_defaults(default, setter(strip_option)))]
|
|
||||||
pub struct UnitTestAppContext {
|
|
||||||
logger: Option<crate::logger::LoggerService>,
|
|
||||||
db: Option<crate::database::DatabaseService>,
|
|
||||||
config: Option<crate::app::AppConfig>,
|
|
||||||
cache: Option<crate::cache::CacheService>,
|
|
||||||
mikan: Option<crate::extract::mikan::MikanClient>,
|
|
||||||
auth: Option<crate::auth::AuthService>,
|
|
||||||
graphql: Option<crate::graphql::GraphQLService>,
|
|
||||||
storage: Option<crate::storage::StorageService>,
|
|
||||||
#[builder(default = Some(String::from(env!("CARGO_MANIFEST_DIR"))))]
|
|
||||||
working_dir: Option<String>,
|
|
||||||
#[builder(default = crate::app::Environment::Testing, setter(!strip_option))]
|
|
||||||
environment: crate::app::Environment,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AppContextTrait for UnitTestAppContext {
|
|
||||||
fn logger(&self) -> &crate::logger::LoggerService {
|
|
||||||
self.logger.as_ref().expect("should set logger")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn db(&self) -> &crate::database::DatabaseService {
|
|
||||||
self.db.as_ref().expect("should set db")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn config(&self) -> &crate::app::AppConfig {
|
|
||||||
self.config.as_ref().expect("should set config")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn cache(&self) -> &crate::cache::CacheService {
|
|
||||||
self.cache.as_ref().expect("should set cache")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn mikan(&self) -> &crate::extract::mikan::MikanClient {
|
|
||||||
self.mikan.as_ref().expect("should set mikan")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn auth(&self) -> &crate::auth::AuthService {
|
|
||||||
self.auth.as_ref().expect("should set auth")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn graphql(&self) -> &crate::graphql::GraphQLService {
|
|
||||||
self.graphql.as_ref().expect("should set graphql")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn storage(&self) -> &crate::storage::StorageService {
|
|
||||||
self.storage.as_ref().expect("should set storage")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn environment(&self) -> &crate::app::Environment {
|
|
||||||
&self.environment
|
|
||||||
}
|
|
||||||
|
|
||||||
fn working_dir(&self) -> &String {
|
|
||||||
self.working_dir.as_ref().expect("should set working_dir")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,6 +1,8 @@
|
|||||||
use crate::{errors::app_error::RResult, fetch::HttpClient};
|
use color_eyre::eyre;
|
||||||
|
|
||||||
pub fn build_testing_http_client() -> RResult<HttpClient> {
|
use crate::fetch::HttpClient;
|
||||||
|
|
||||||
|
pub fn build_testing_http_client() -> eyre::Result<HttpClient> {
|
||||||
let mikan_client = HttpClient::default();
|
let mikan_client = HttpClient::default();
|
||||||
Ok(mikan_client)
|
Ok(mikan_client)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
use reqwest::IntoUrl;
|
use reqwest::IntoUrl;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::app_error::RResult,
|
errors::RResult,
|
||||||
extract::mikan::{MikanClient, MikanConfig},
|
extract::mikan::{MikanClient, MikanConfig},
|
||||||
fetch::HttpClientConfig,
|
fetch::HttpClientConfig,
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,4 +1,3 @@
|
|||||||
pub mod app;
|
|
||||||
pub mod fetch;
|
pub mod fetch;
|
||||||
pub mod mikan;
|
pub mod mikan;
|
||||||
#[cfg(feature = "testcontainers")]
|
#[cfg(feature = "testcontainers")]
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user