Compare commits
6 Commits
2844e1fc32
...
2f5b001bb6
Author | SHA1 | Date | |
---|---|---|---|
2f5b001bb6 | |||
50243db5dc | |||
035d4e20dd | |||
8a03dc28a2 | |||
5e51b2752d | |||
4f124c9c0f |
@ -1,9 +1,9 @@
|
||||
[alias]
|
||||
recorder = "run -p recorder --bin recorder_cli -- --environment recorder.development"
|
||||
recorder-playground = "run -p recorder --example playground -- --environment recorder.development"
|
||||
recorder = "run -p recorder --bin recorder_cli -- --environment development"
|
||||
recorder-playground = "run -p recorder --example playground -- --environment development"
|
||||
|
||||
[build]
|
||||
rustflags = ["-Zthreads=8"]
|
||||
rustflags = ["-Zthreads=12", "-Clink-arg=-fuse-ld=lld"]
|
||||
|
||||
[target.x86_64-unknown-linux-gnu]
|
||||
linker = "clang"
|
||||
|
7
.gitignore
vendored
7
.gitignore
vendored
@ -112,7 +112,7 @@ coverage
|
||||
# nyc tests coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate dal (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
@ -221,4 +221,7 @@ index.d.ts.map
|
||||
/*.session.sql
|
||||
|
||||
/temp
|
||||
/rustc-ice-*
|
||||
/rustc-ice-*
|
||||
/crates/recorder/config/test.local.env
|
||||
**/*.local.yaml
|
||||
**/*.local.env
|
954
Cargo.lock
generated
954
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
10
Cargo.toml
10
Cargo.toml
@ -1,3 +1,11 @@
|
||||
cargo-features = ["codegen-backend"]
|
||||
[workspace]
|
||||
members = ["crates/quirks_path", "crates/recorder"]
|
||||
members = [
|
||||
"crates/quirks_path",
|
||||
"crates/recorder"
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[profile.dev]
|
||||
#debug = 0
|
||||
codegen-backend = "cranelift"
|
||||
|
2
crates/recorder/.gitignore
vendored
2
crates/recorder/.gitignore
vendored
@ -15,3 +15,5 @@ Cargo.lock
|
||||
|
||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||
*.pdb
|
||||
|
||||
/data
|
@ -18,30 +18,28 @@ default = []
|
||||
testcontainers = []
|
||||
|
||||
[dependencies]
|
||||
loco-rs = { version = "0.3.1" }
|
||||
loco-rs = { version = "0.3.2" }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
eyre = "0.6"
|
||||
tokio = { version = "1.33.0", default-features = false }
|
||||
async-trait = "0.1.74"
|
||||
tokio = { version = "1.36.0", default-features = false }
|
||||
async-trait = "0.1.79"
|
||||
tracing = "0.1.40"
|
||||
chrono = "0.4"
|
||||
validator = { version = "0.16" }
|
||||
sea-orm = { version = "1.0.0-rc.1", features = [
|
||||
validator = { version = "0.17" }
|
||||
sea-orm = { version = "1.0.0-rc.3", features = [
|
||||
"sqlx-sqlite",
|
||||
"sqlx-postgres",
|
||||
"runtime-tokio-rustls",
|
||||
"macros",
|
||||
] }
|
||||
|
||||
axum = "0.7.1"
|
||||
include_dir = "0.7"
|
||||
uuid = { version = "1.6.0", features = ["v4"] }
|
||||
axum = "0.7.5"
|
||||
uuid = { version = "1.8.0", features = ["v4"] }
|
||||
tracing-subscriber = { version = "0.3.17", features = ["env-filter", "json"] }
|
||||
sea-orm-migration = { version = "1.0.0-rc.1", features = [
|
||||
sea-orm-migration = { version = "1.0.0-rc.3", features = [
|
||||
"runtime-tokio-rustls",
|
||||
] }
|
||||
reqwest = "0.11.24"
|
||||
reqwest = { version = "0.12.2", features = ["json"] }
|
||||
thiserror = "1.0.57"
|
||||
rss = "2.0.7"
|
||||
bytes = "1.5.0"
|
||||
@ -56,14 +54,20 @@ maplit = "1.0.2"
|
||||
tl = { version = "0.7.8", features = ["simd"] }
|
||||
lightningcss = "1.0.0-alpha.54"
|
||||
html-escape = "0.2.13"
|
||||
opendal = "0.45.0"
|
||||
librqbit-core = "3.5.0"
|
||||
opendal = "0.45.1"
|
||||
librqbit-core = "3.6.1"
|
||||
quirks_path = { path = "../quirks_path" }
|
||||
tokio-utils = "0.1.2"
|
||||
oxilangtag = { version = "0.1.5", features = ["serde"] }
|
||||
dateparser = "0.2.1"
|
||||
dotenv = "0.15.0"
|
||||
weak-table = "0.3.2"
|
||||
base64 = "0.22.0"
|
||||
|
||||
[dev-dependencies]
|
||||
serial_test = "2.0.0"
|
||||
serial_test = "3.0.0"
|
||||
rstest = "0.18.2"
|
||||
loco-rs = { version = "0.3.1", features = ["testing"] }
|
||||
insta = { version = "1.34.0", features = ["redactions", "yaml", "filters"] }
|
||||
loco-rs = { version = "0.3.2", features = ["testing"] }
|
||||
insta = { version = "1.3", features = ["redactions", "yaml", "filters"] }
|
||||
testcontainers = { version = "0.15.0" }
|
||||
testcontainers-modules = { version = "0.3.5" }
|
||||
testcontainers-modules = { version = "0.3.6" }
|
||||
|
@ -65,23 +65,7 @@ workers:
|
||||
# - BackgroundQueue - Workers operate asynchronously in the background, processing queued.
|
||||
# - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed.
|
||||
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
|
||||
mode: BackgroundQueue
|
||||
|
||||
# Mailer Configuration.
|
||||
mailer:
|
||||
# SMTP mailer configuration.
|
||||
smtp:
|
||||
# Enable/Disable smtp mailer.
|
||||
enable: true
|
||||
# SMTP server host. e.x localhost, smtp.gmail.com
|
||||
host: '{{ get_env(name="MAILER_HOST", default="localhost") }}'
|
||||
# SMTP server port
|
||||
port: 1025
|
||||
# Use secure connection (SSL/TLS).
|
||||
secure: false
|
||||
# auth:
|
||||
# user:
|
||||
# password:
|
||||
mode: BackgroundAsync
|
||||
|
||||
# Database Configuration
|
||||
database:
|
||||
@ -104,10 +88,6 @@ database:
|
||||
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
|
||||
dangerously_recreate: false
|
||||
|
||||
# Redis Configuration
|
||||
redis:
|
||||
# Redis connection URI
|
||||
uri: '{{ get_env(name="REDIS_URL", default="redis://127.0.0.1:6379") }}'
|
||||
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
|
||||
dangerously_flush: false
|
||||
|
||||
settings:
|
||||
dal:
|
||||
fs_root: "data/dal/"
|
1
crates/recorder/config/test.env
Normal file
1
crates/recorder/config/test.env
Normal file
@ -0,0 +1 @@
|
||||
TMDB_API_TOKEN=""
|
@ -1,9 +1,7 @@
|
||||
# Loco configuration file documentation
|
||||
|
||||
# Application logging configuration
|
||||
logger:
|
||||
# Enable or disable logging.
|
||||
enable: false
|
||||
enable: true
|
||||
# Log level, options: trace, debug, info, warn or error.
|
||||
level: debug
|
||||
# Define the logging format. options: compact, pretty or Json
|
||||
@ -62,29 +60,12 @@ workers:
|
||||
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
|
||||
mode: ForegroundBlocking
|
||||
|
||||
# Mailer Configuration.
|
||||
mailer:
|
||||
# SMTP mailer configuration.
|
||||
smtp:
|
||||
# Enable/Disable smtp mailer.
|
||||
enable: true
|
||||
# SMTP server host. e.x localhost, smtp.gmail.com
|
||||
host: localhost
|
||||
# SMTP server port
|
||||
port: 1025
|
||||
# Use secure connection (SSL/TLS).
|
||||
secure: false
|
||||
# auth:
|
||||
# user:
|
||||
# password:
|
||||
stub: true
|
||||
|
||||
# Database Configuration
|
||||
database:
|
||||
# Database connection URI
|
||||
uri: {{get_env(name="DATABASE_URL", default="postgres://loco:loco@localhost:5432/recorder_test")}}
|
||||
uri: '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu") }}'
|
||||
# When enabled, the sql query will be logged.
|
||||
enable_logging: false
|
||||
enable_logging: true
|
||||
# Set the timeout duration when acquiring a connection.
|
||||
connect_timeout: 500
|
||||
# Set the idle duration before closing a connection.
|
||||
@ -98,21 +79,8 @@ database:
|
||||
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
|
||||
dangerously_truncate: true
|
||||
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
|
||||
dangerously_recreate: false
|
||||
|
||||
# Redis Configuration
|
||||
redis:
|
||||
# Redis connection URI
|
||||
uri: {{get_env(name="REDIS_URL", default="redis://127.0.0.1")}}
|
||||
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
|
||||
dangerously_flush: false
|
||||
|
||||
# Authentication Configuration
|
||||
auth:
|
||||
# JWT authentication
|
||||
jwt:
|
||||
# Secret key for token generation and verification
|
||||
secret: ZknFYqXpnDgaWcKJZ5J5
|
||||
# Token expiration time in seconds
|
||||
expiration: 604800 # 7 days
|
||||
dangerously_recreate: true
|
||||
|
||||
settings:
|
||||
dal:
|
||||
fs_root: "data/dal"
|
@ -1,21 +1,25 @@
|
||||
use std::path::Path;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use loco_rs::{
|
||||
app::{AppContext, Hooks},
|
||||
app::Hooks,
|
||||
boot::{create_app, BootResult, StartMode},
|
||||
config::Config,
|
||||
controller::AppRoutes,
|
||||
db::truncate_table,
|
||||
environment::Environment,
|
||||
prelude::*,
|
||||
task::Tasks,
|
||||
worker::{AppWorker, Processor},
|
||||
Result,
|
||||
worker::Processor,
|
||||
};
|
||||
use sea_orm::DatabaseConnection;
|
||||
use sea_orm::prelude::*;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
use crate::{
|
||||
controllers, migrations::Migrator, models::entities::subscribers,
|
||||
workers::subscription_worker::SubscriptionWorker,
|
||||
controllers,
|
||||
migrations::Migrator,
|
||||
models::{bangumi, downloaders, episodes, resources, subscribers, subscriptions},
|
||||
storage::AppDalInitializer,
|
||||
utils::cli::hack_env_to_fit_workspace,
|
||||
workers::subscription::SubscriptionWorker,
|
||||
};
|
||||
|
||||
pub struct App;
|
||||
@ -37,6 +41,7 @@ impl Hooks for App {
|
||||
}
|
||||
|
||||
async fn boot(mode: StartMode, environment: &Environment) -> Result<BootResult> {
|
||||
hack_env_to_fit_workspace()?;
|
||||
create_app::<Self, Migrator>(mode, environment).await
|
||||
}
|
||||
|
||||
@ -53,11 +58,66 @@ impl Hooks for App {
|
||||
fn register_tasks(_tasks: &mut Tasks) {}
|
||||
|
||||
async fn truncate(db: &DatabaseConnection) -> Result<()> {
|
||||
truncate_table(db, subscribers::Entity).await?;
|
||||
futures::try_join!(
|
||||
subscribers::Entity::delete_many()
|
||||
.filter(subscribers::Column::Pid.ne(subscribers::ROOT_SUBSCRIBER_NAME))
|
||||
.exec(db),
|
||||
truncate_table(db, subscriptions::Entity),
|
||||
truncate_table(db, resources::Entity),
|
||||
truncate_table(db, downloaders::Entity),
|
||||
truncate_table(db, bangumi::Entity),
|
||||
truncate_table(db, episodes::Entity),
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn seed(_db: &DatabaseConnection, _base: &Path) -> Result<()> {
|
||||
async fn seed(_db: &DatabaseConnection, _base: &std::path::Path) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
|
||||
Ok(vec![Box::new(AppDalInitializer)])
|
||||
}
|
||||
|
||||
fn init_logger(app_config: &Config, _env: &Environment) -> Result<bool> {
|
||||
let config = &app_config.logger;
|
||||
if config.enable {
|
||||
let filter = EnvFilter::try_from_default_env()
|
||||
.or_else(|_| {
|
||||
// user wanted a specific filter, don't care about our internal whitelist
|
||||
// or, if no override give them the default whitelisted filter (most common)
|
||||
config.override_filter.as_ref().map_or_else(
|
||||
|| {
|
||||
EnvFilter::try_new(
|
||||
["loco_rs", "sea_orm_migration", "tower_http", "sqlx::query"]
|
||||
.iter()
|
||||
.map(|m| format!("{}={}", m, config.level))
|
||||
.chain(std::iter::once(format!(
|
||||
"{}={}",
|
||||
App::app_name(),
|
||||
config.level
|
||||
)))
|
||||
.collect::<Vec<_>>()
|
||||
.join(","),
|
||||
)
|
||||
},
|
||||
EnvFilter::try_new,
|
||||
)
|
||||
})
|
||||
.expect("logger initialization failed");
|
||||
|
||||
let builder = tracing_subscriber::FmtSubscriber::builder().with_env_filter(filter);
|
||||
|
||||
match serde_json::to_string(&config.format)
|
||||
.expect("init logger format can serialized")
|
||||
.trim_matches('"')
|
||||
{
|
||||
"pretty" => builder.pretty().init(),
|
||||
"json" => builder.json().init(),
|
||||
_ => builder.compact().init(),
|
||||
};
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
use loco_rs::cli;
|
||||
use recorder::migrations::Migrator;
|
||||
use recorder::app::App;
|
||||
use recorder::{app::App, migrations::Migrator, utils::cli::hack_env_to_fit_workspace};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> eyre::Result<()> {
|
||||
hack_env_to_fit_workspace()?;
|
||||
cli::main::<App, Migrator>().await
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ use eyre::OptionExt;
|
||||
use itertools::Itertools;
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
|
||||
pub const DAL_CONF_KEY: &str = "dal";
|
||||
pub const DAL_CONF_KEY: &str = "storage";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct AppCustomConf {
|
||||
|
@ -1,6 +1,6 @@
|
||||
use loco_rs::prelude::*;
|
||||
|
||||
use crate::{models::entities::subscribers, views::subscribers::CurrentResponse};
|
||||
use crate::{models::subscribers, views::subscribers::CurrentResponse};
|
||||
|
||||
async fn current(State(ctx): State<AppContext>) -> Result<Json<CurrentResponse>> {
|
||||
let subscriber = subscribers::Model::find_root(&ctx.db).await?;
|
||||
|
@ -1,74 +0,0 @@
|
||||
use bytes::Bytes;
|
||||
use opendal::{layers::LoggingLayer, services, Operator};
|
||||
use quirks_path::{Path, PathBuf};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::config::AppDalConf;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum AppDalContentCategory {
|
||||
Poster,
|
||||
}
|
||||
|
||||
impl AsRef<str> for AppDalContentCategory {
|
||||
fn as_ref(&self) -> &str {
|
||||
match self {
|
||||
Self::Poster => "poster",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AppDalContext {
|
||||
pub config: AppDalConf,
|
||||
}
|
||||
|
||||
pub enum DalStoredUrl {
|
||||
RelativePath { path: String },
|
||||
Absolute { url: Url },
|
||||
}
|
||||
|
||||
impl AppDalContext {
|
||||
pub fn new(app_dal_conf: AppDalConf) -> Self {
|
||||
Self {
|
||||
config: app_dal_conf,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn store_blob(
|
||||
&self,
|
||||
content_category: AppDalContentCategory,
|
||||
extname: &str,
|
||||
data: Bytes,
|
||||
subscriber_pid: &str,
|
||||
) -> eyre::Result<DalStoredUrl> {
|
||||
let basename = format!("{}{}", Uuid::new_v4(), extname);
|
||||
let mut dirname = [subscriber_pid, content_category.as_ref()]
|
||||
.into_iter()
|
||||
.map(Path::new)
|
||||
.collect::<PathBuf>();
|
||||
|
||||
let mut fs_builder = services::Fs::default();
|
||||
fs_builder.root(self.config.fs_root.as_str());
|
||||
|
||||
let fs_op = Operator::new(fs_builder)?
|
||||
.layer(LoggingLayer::default())
|
||||
.finish();
|
||||
|
||||
fs_op.create_dir(dirname.as_str()).await?;
|
||||
|
||||
let fullname = {
|
||||
dirname.push(basename);
|
||||
dirname
|
||||
};
|
||||
|
||||
fs_op.write_with(fullname.as_str(), data).await?;
|
||||
|
||||
Ok(DalStoredUrl::RelativePath {
|
||||
path: fullname.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
76
crates/recorder/src/downloaders/api_client.rs
Normal file
76
crates/recorder/src/downloaders/api_client.rs
Normal file
@ -0,0 +1,76 @@
|
||||
use axum::http::HeaderMap;
|
||||
use bytes::Bytes;
|
||||
use serde::de::DeserializeOwned;
|
||||
use tokio_utils::RateLimiter;
|
||||
|
||||
use crate::downloaders::defs::DEFAULT_USER_AGENT;
|
||||
|
||||
pub struct ApiClient {
|
||||
headers: HeaderMap,
|
||||
rate_limiter: RateLimiter,
|
||||
fetch_client: reqwest::Client,
|
||||
}
|
||||
|
||||
impl ApiClient {
|
||||
pub fn new(
|
||||
throttle_duration: std::time::Duration,
|
||||
override_headers: Option<HeaderMap>,
|
||||
) -> eyre::Result<Self> {
|
||||
Ok(Self {
|
||||
headers: override_headers.unwrap_or_else(HeaderMap::new),
|
||||
rate_limiter: RateLimiter::new(throttle_duration),
|
||||
fetch_client: reqwest::Client::builder()
|
||||
.user_agent(DEFAULT_USER_AGENT)
|
||||
.build()?,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn fetch_json<R, F>(&self, f: F) -> Result<R, reqwest::Error>
|
||||
where
|
||||
F: FnOnce(&reqwest::Client) -> reqwest::RequestBuilder,
|
||||
R: DeserializeOwned,
|
||||
{
|
||||
self.rate_limiter
|
||||
.throttle(|| async {
|
||||
f(&self.fetch_client)
|
||||
.headers(self.headers.clone())
|
||||
.send()
|
||||
.await?
|
||||
.json::<R>()
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn fetch_bytes<F>(&self, f: F) -> Result<Bytes, reqwest::Error>
|
||||
where
|
||||
F: FnOnce(&reqwest::Client) -> reqwest::RequestBuilder,
|
||||
{
|
||||
self.rate_limiter
|
||||
.throttle(|| async {
|
||||
f(&self.fetch_client)
|
||||
.headers(self.headers.clone())
|
||||
.send()
|
||||
.await?
|
||||
.bytes()
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn fetch_text<F>(&self, f: F) -> Result<String, reqwest::Error>
|
||||
where
|
||||
F: FnOnce(&reqwest::Client) -> reqwest::RequestBuilder,
|
||||
{
|
||||
self.rate_limiter
|
||||
.throttle(|| async {
|
||||
f(&self.fetch_client)
|
||||
.headers(self.headers.clone())
|
||||
.send()
|
||||
.await?
|
||||
.text()
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
use bytes::Bytes;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::defs::DEFAULT_USER_AGENT;
|
||||
|
||||
pub async fn download_bytes<T: IntoUrl>(url: T) -> eyre::Result<Bytes> {
|
||||
let request_client = reqwest::Client::builder()
|
||||
.user_agent(DEFAULT_USER_AGENT)
|
||||
.build()?;
|
||||
let bytes = request_client.get(url).send().await?.bytes().await?;
|
||||
Ok(bytes)
|
||||
}
|
@ -1,249 +1,18 @@
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use librqbit_core::{
|
||||
magnet::Magnet,
|
||||
torrent_metainfo::{torrent_from_bytes, TorrentMetaV1Owned},
|
||||
};
|
||||
use bytes::Bytes;
|
||||
pub use qbit_rs::model::{
|
||||
Torrent as QbitTorrent, TorrentContent as QbitTorrentContent,
|
||||
TorrentFilter as QbitTorrentFilter, TorrentSource as QbitTorrentSource,
|
||||
};
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use crate::downloaders::{bytes::download_bytes, error::DownloaderError};
|
||||
pub(crate) async fn download_bytes<T: IntoUrl>(url: T) -> eyre::Result<Bytes> {
|
||||
let request_client = reqwest::Client::builder()
|
||||
.user_agent(DEFAULT_USER_AGENT)
|
||||
.build()?;
|
||||
let bytes = request_client.get(url).send().await?.bytes().await?;
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
|
||||
pub const MAGNET_SCHEMA: &str = "magnet";
|
||||
pub const DEFAULT_USER_AGENT: &str = "Wget/1.13.4 (linux-gnu)";
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TorrentFilter {
|
||||
All,
|
||||
Downloading,
|
||||
Completed,
|
||||
Paused,
|
||||
Active,
|
||||
Inactive,
|
||||
Resumed,
|
||||
Stalled,
|
||||
StalledUploading,
|
||||
StalledDownloading,
|
||||
Errored,
|
||||
}
|
||||
|
||||
impl From<TorrentFilter> for QbitTorrentFilter {
|
||||
fn from(val: TorrentFilter) -> Self {
|
||||
match val {
|
||||
TorrentFilter::All => QbitTorrentFilter::All,
|
||||
TorrentFilter::Downloading => QbitTorrentFilter::Downloading,
|
||||
TorrentFilter::Completed => QbitTorrentFilter::Completed,
|
||||
TorrentFilter::Paused => QbitTorrentFilter::Paused,
|
||||
TorrentFilter::Active => QbitTorrentFilter::Active,
|
||||
TorrentFilter::Inactive => QbitTorrentFilter::Inactive,
|
||||
TorrentFilter::Resumed => QbitTorrentFilter::Resumed,
|
||||
TorrentFilter::Stalled => QbitTorrentFilter::Stalled,
|
||||
TorrentFilter::StalledUploading => QbitTorrentFilter::StalledUploading,
|
||||
TorrentFilter::StalledDownloading => QbitTorrentFilter::StalledDownloading,
|
||||
TorrentFilter::Errored => QbitTorrentFilter::Errored,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref TORRENT_HASH_RE: Regex = Regex::new(r"[a-fA-F0-9]{40}").unwrap();
|
||||
static ref TORRENT_EXT_RE: Regex = Regex::new(r"\.torrent$").unwrap();
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum TorrentSource {
|
||||
MagnetUrl {
|
||||
url: Url,
|
||||
hash: String,
|
||||
},
|
||||
TorrentUrl {
|
||||
url: Url,
|
||||
hash: String,
|
||||
},
|
||||
TorrentFile {
|
||||
torrent: Vec<u8>,
|
||||
hash: String,
|
||||
name: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
impl TorrentSource {
|
||||
pub async fn parse(url: &str) -> eyre::Result<Self> {
|
||||
let url = Url::parse(url)?;
|
||||
let source = if url.scheme() == MAGNET_SCHEMA {
|
||||
TorrentSource::from_magnet_url(url)?
|
||||
} else if let Some(basename) = url
|
||||
.clone()
|
||||
.path_segments()
|
||||
.and_then(|segments| segments.last())
|
||||
{
|
||||
if let (Some(match_hash), true) = (
|
||||
TORRENT_HASH_RE.find(basename),
|
||||
TORRENT_EXT_RE.is_match(basename),
|
||||
) {
|
||||
TorrentSource::from_torrent_url(url, match_hash.as_str().to_string())?
|
||||
} else {
|
||||
let contents = download_bytes(url).await?;
|
||||
TorrentSource::from_torrent_file(contents.to_vec(), Some(basename.to_string()))?
|
||||
}
|
||||
} else {
|
||||
let contents = download_bytes(url).await?;
|
||||
TorrentSource::from_torrent_file(contents.to_vec(), None)?
|
||||
};
|
||||
Ok(source)
|
||||
}
|
||||
|
||||
pub fn from_torrent_file(file: Vec<u8>, name: Option<String>) -> eyre::Result<Self> {
|
||||
let torrent: TorrentMetaV1Owned =
|
||||
torrent_from_bytes(&file).map_err(|_| DownloaderError::InvalidTorrentFileFormat)?;
|
||||
let hash = torrent.info_hash.as_string();
|
||||
Ok(TorrentSource::TorrentFile {
|
||||
torrent: file,
|
||||
hash,
|
||||
name,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_magnet_url(url: Url) -> eyre::Result<Self> {
|
||||
if url.scheme() != MAGNET_SCHEMA {
|
||||
Err(DownloaderError::InvalidUrlSchema {
|
||||
found: url.scheme().to_string(),
|
||||
expected: MAGNET_SCHEMA.to_string(),
|
||||
}
|
||||
.into())
|
||||
} else {
|
||||
let magnet =
|
||||
Magnet::parse(url.as_str()).map_err(|_| DownloaderError::InvalidMagnetFormat {
|
||||
url: url.as_str().to_string(),
|
||||
})?;
|
||||
let hash = magnet.info_hash.as_string();
|
||||
Ok(TorrentSource::MagnetUrl { url, hash })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_torrent_url(url: Url, hash: String) -> eyre::Result<Self> {
|
||||
Ok(TorrentSource::TorrentUrl { url, hash })
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> &str {
|
||||
match self {
|
||||
TorrentSource::MagnetUrl { hash, .. } => hash,
|
||||
TorrentSource::TorrentUrl { hash, .. } => hash,
|
||||
TorrentSource::TorrentFile { hash, .. } => hash,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TorrentSource> for QbitTorrentSource {
|
||||
fn from(value: TorrentSource) -> Self {
|
||||
match value {
|
||||
TorrentSource::MagnetUrl { url, .. } => QbitTorrentSource::Urls {
|
||||
urls: qbit_rs::model::Sep::from([url]),
|
||||
},
|
||||
TorrentSource::TorrentUrl { url, .. } => QbitTorrentSource::Urls {
|
||||
urls: qbit_rs::model::Sep::from([url]),
|
||||
},
|
||||
TorrentSource::TorrentFile {
|
||||
torrent: torrents, ..
|
||||
} => QbitTorrentSource::TorrentFiles { torrents },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TorrentContent {
|
||||
fn get_name(&self) -> &str;
|
||||
|
||||
fn get_all_size(&self) -> u64;
|
||||
|
||||
fn get_progress(&self) -> f64;
|
||||
|
||||
fn get_curr_size(&self) -> u64;
|
||||
}
|
||||
|
||||
impl TorrentContent for QbitTorrentContent {
|
||||
fn get_name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
fn get_all_size(&self) -> u64 {
|
||||
self.size
|
||||
}
|
||||
|
||||
fn get_progress(&self) -> f64 {
|
||||
self.progress
|
||||
}
|
||||
|
||||
fn get_curr_size(&self) -> u64 {
|
||||
u64::clamp(
|
||||
f64::round(self.get_all_size() as f64 * self.get_progress()) as u64,
|
||||
0,
|
||||
self.get_all_size(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Torrent {
|
||||
Qbit {
|
||||
torrent: QbitTorrent,
|
||||
contents: Vec<QbitTorrentContent>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Torrent {
|
||||
pub fn iter_files(&self) -> impl Iterator<Item = &dyn TorrentContent> {
|
||||
match self {
|
||||
Torrent::Qbit { contents, .. } => {
|
||||
contents.iter().map(|item| item as &dyn TorrentContent)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_name(&self) -> Option<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.name.as_deref(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_hash(&self) -> Option<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.hash.as_deref(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_save_path(&self) -> Option<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.save_path.as_deref(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_content_path(&self) -> Option<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.content_path.as_deref(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_tags(&self) -> Vec<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.tags.as_deref().map_or_else(Vec::new, |s| {
|
||||
s.split(',')
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect_vec()
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_category(&self) -> Option<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.category.as_deref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +0,0 @@
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::defs::DEFAULT_USER_AGENT;
|
||||
|
||||
pub async fn download_html<U: IntoUrl>(url: U) -> eyre::Result<String> {
|
||||
let request_client = reqwest::Client::builder()
|
||||
.user_agent(DEFAULT_USER_AGENT)
|
||||
.build()?;
|
||||
let content = request_client.get(url).send().await?.text().await?;
|
||||
Ok(content)
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
use bytes::Bytes;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::bytes::download_bytes;
|
||||
|
||||
pub async fn download_image<U: IntoUrl>(url: U) -> eyre::Result<Bytes> {
|
||||
download_bytes(url).await
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
pub mod bytes;
|
||||
pub mod api_client;
|
||||
pub mod defs;
|
||||
pub mod error;
|
||||
pub mod html;
|
||||
pub mod qbitorrent;
|
||||
pub mod torrent_downloader;
|
||||
pub mod image;
|
||||
pub mod torrent;
|
||||
|
||||
pub use api_client::ApiClient;
|
||||
|
@ -17,14 +17,13 @@ use quirks_path::{path_equals_as_file_url, Path, PathBuf};
|
||||
use tokio::time::sleep;
|
||||
use url::Url;
|
||||
|
||||
use super::{
|
||||
defs::{Torrent, TorrentFilter, TorrentSource},
|
||||
error::DownloaderError,
|
||||
torrent_downloader::TorrentDownloader,
|
||||
};
|
||||
use super::error::DownloaderError;
|
||||
use crate::{
|
||||
downloaders::defs::{QbitTorrent, QbitTorrentContent, TorrentContent},
|
||||
models::{entities::downloaders, prelude::DownloaderCategory},
|
||||
downloaders::{
|
||||
defs::{QbitTorrent, QbitTorrentContent},
|
||||
torrent::{Torrent, TorrentContent, TorrentDownloader, TorrentFilter, TorrentSource},
|
||||
},
|
||||
models::{downloaders, prelude::DownloaderCategory},
|
||||
};
|
||||
|
||||
pub struct SyncDataCache {
|
||||
@ -414,7 +413,7 @@ impl Debug for QBittorrentDownloader {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
pub(crate) mod tests {
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::*;
|
||||
|
341
crates/recorder/src/downloaders/torrent.rs
Normal file
341
crates/recorder/src/downloaders/torrent.rs
Normal file
@ -0,0 +1,341 @@
|
||||
use eyre::OptionExt;
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use librqbit_core::{
|
||||
magnet::Magnet,
|
||||
torrent_metainfo::{torrent_from_bytes, TorrentMetaV1Owned},
|
||||
};
|
||||
use quirks_path::{Path, PathBuf};
|
||||
use regex::Regex;
|
||||
use sea_orm::{ActiveModelTrait, ActiveValue, DatabaseConnection, IntoActiveModel};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
use super::{
|
||||
defs::{
|
||||
download_bytes, QbitTorrent, QbitTorrentContent, QbitTorrentFilter, QbitTorrentSource,
|
||||
MAGNET_SCHEMA,
|
||||
},
|
||||
error::DownloaderError,
|
||||
qbitorrent::QBittorrentDownloader,
|
||||
};
|
||||
use crate::{
|
||||
models::{bangumi, downloaders, downloaders::DownloaderCategory, resources},
|
||||
path::torrent_path::gen_bangumi_sub_path,
|
||||
};
|
||||
|
||||
lazy_static! {
|
||||
static ref TORRENT_HASH_RE: Regex = Regex::new(r"[a-fA-F0-9]{40}").unwrap();
|
||||
static ref TORRENT_EXT_RE: Regex = Regex::new(r"\.torrent$").unwrap();
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TorrentFilter {
|
||||
All,
|
||||
Downloading,
|
||||
Completed,
|
||||
Paused,
|
||||
Active,
|
||||
Inactive,
|
||||
Resumed,
|
||||
Stalled,
|
||||
StalledUploading,
|
||||
StalledDownloading,
|
||||
Errored,
|
||||
}
|
||||
|
||||
impl From<TorrentFilter> for QbitTorrentFilter {
|
||||
fn from(val: TorrentFilter) -> Self {
|
||||
match val {
|
||||
TorrentFilter::All => QbitTorrentFilter::All,
|
||||
TorrentFilter::Downloading => QbitTorrentFilter::Downloading,
|
||||
TorrentFilter::Completed => QbitTorrentFilter::Completed,
|
||||
TorrentFilter::Paused => QbitTorrentFilter::Paused,
|
||||
TorrentFilter::Active => QbitTorrentFilter::Active,
|
||||
TorrentFilter::Inactive => QbitTorrentFilter::Inactive,
|
||||
TorrentFilter::Resumed => QbitTorrentFilter::Resumed,
|
||||
TorrentFilter::Stalled => QbitTorrentFilter::Stalled,
|
||||
TorrentFilter::StalledUploading => QbitTorrentFilter::StalledUploading,
|
||||
TorrentFilter::StalledDownloading => QbitTorrentFilter::StalledDownloading,
|
||||
TorrentFilter::Errored => QbitTorrentFilter::Errored,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum TorrentSource {
|
||||
MagnetUrl {
|
||||
url: Url,
|
||||
hash: String,
|
||||
},
|
||||
TorrentUrl {
|
||||
url: Url,
|
||||
hash: String,
|
||||
},
|
||||
TorrentFile {
|
||||
torrent: Vec<u8>,
|
||||
hash: String,
|
||||
name: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
impl TorrentSource {
|
||||
pub async fn parse(url: &str) -> eyre::Result<Self> {
|
||||
let url = Url::parse(url)?;
|
||||
let source = if url.scheme() == MAGNET_SCHEMA {
|
||||
TorrentSource::from_magnet_url(url)?
|
||||
} else if let Some(basename) = url
|
||||
.clone()
|
||||
.path_segments()
|
||||
.and_then(|segments| segments.last())
|
||||
{
|
||||
if let (Some(match_hash), true) = (
|
||||
TORRENT_HASH_RE.find(basename),
|
||||
TORRENT_EXT_RE.is_match(basename),
|
||||
) {
|
||||
TorrentSource::from_torrent_url(url, match_hash.as_str().to_string())?
|
||||
} else {
|
||||
let contents = download_bytes(url).await?;
|
||||
TorrentSource::from_torrent_file(contents.to_vec(), Some(basename.to_string()))?
|
||||
}
|
||||
} else {
|
||||
let contents = download_bytes(url).await?;
|
||||
TorrentSource::from_torrent_file(contents.to_vec(), None)?
|
||||
};
|
||||
Ok(source)
|
||||
}
|
||||
|
||||
pub fn from_torrent_file(file: Vec<u8>, name: Option<String>) -> eyre::Result<Self> {
|
||||
let torrent: TorrentMetaV1Owned =
|
||||
torrent_from_bytes(&file).map_err(|_| DownloaderError::InvalidTorrentFileFormat)?;
|
||||
let hash = torrent.info_hash.as_string();
|
||||
Ok(TorrentSource::TorrentFile {
|
||||
torrent: file,
|
||||
hash,
|
||||
name,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_magnet_url(url: Url) -> eyre::Result<Self> {
|
||||
if url.scheme() != MAGNET_SCHEMA {
|
||||
Err(DownloaderError::InvalidUrlSchema {
|
||||
found: url.scheme().to_string(),
|
||||
expected: MAGNET_SCHEMA.to_string(),
|
||||
}
|
||||
.into())
|
||||
} else {
|
||||
let magnet =
|
||||
Magnet::parse(url.as_str()).map_err(|_| DownloaderError::InvalidMagnetFormat {
|
||||
url: url.as_str().to_string(),
|
||||
})?;
|
||||
let hash = magnet
|
||||
.as_id20()
|
||||
.ok_or_eyre("no info hash found")?
|
||||
.as_string();
|
||||
Ok(TorrentSource::MagnetUrl { url, hash })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_torrent_url(url: Url, hash: String) -> eyre::Result<Self> {
|
||||
Ok(TorrentSource::TorrentUrl { url, hash })
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> &str {
|
||||
match self {
|
||||
TorrentSource::MagnetUrl { hash, .. } => hash,
|
||||
TorrentSource::TorrentUrl { hash, .. } => hash,
|
||||
TorrentSource::TorrentFile { hash, .. } => hash,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TorrentSource> for QbitTorrentSource {
|
||||
fn from(value: TorrentSource) -> Self {
|
||||
match value {
|
||||
TorrentSource::MagnetUrl { url, .. } => QbitTorrentSource::Urls {
|
||||
urls: qbit_rs::model::Sep::from([url]),
|
||||
},
|
||||
TorrentSource::TorrentUrl { url, .. } => QbitTorrentSource::Urls {
|
||||
urls: qbit_rs::model::Sep::from([url]),
|
||||
},
|
||||
TorrentSource::TorrentFile {
|
||||
torrent: torrents, ..
|
||||
} => QbitTorrentSource::TorrentFiles { torrents },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TorrentContent {
|
||||
fn get_name(&self) -> &str;
|
||||
|
||||
fn get_all_size(&self) -> u64;
|
||||
|
||||
fn get_progress(&self) -> f64;
|
||||
|
||||
fn get_curr_size(&self) -> u64;
|
||||
}
|
||||
|
||||
impl TorrentContent for QbitTorrentContent {
|
||||
fn get_name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
fn get_all_size(&self) -> u64 {
|
||||
self.size
|
||||
}
|
||||
|
||||
fn get_progress(&self) -> f64 {
|
||||
self.progress
|
||||
}
|
||||
|
||||
fn get_curr_size(&self) -> u64 {
|
||||
u64::clamp(
|
||||
f64::round(self.get_all_size() as f64 * self.get_progress()) as u64,
|
||||
0,
|
||||
self.get_all_size(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Torrent {
|
||||
Qbit {
|
||||
torrent: QbitTorrent,
|
||||
contents: Vec<QbitTorrentContent>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Torrent {
|
||||
pub fn iter_files(&self) -> impl Iterator<Item = &dyn TorrentContent> {
|
||||
match self {
|
||||
Torrent::Qbit { contents, .. } => {
|
||||
contents.iter().map(|item| item as &dyn TorrentContent)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_name(&self) -> Option<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.name.as_deref(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_hash(&self) -> Option<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.hash.as_deref(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_save_path(&self) -> Option<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.save_path.as_deref(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_content_path(&self) -> Option<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.content_path.as_deref(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_tags(&self) -> Vec<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.tags.as_deref().map_or_else(Vec::new, |s| {
|
||||
s.split(',')
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect_vec()
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_category(&self) -> Option<&str> {
|
||||
match self {
|
||||
Torrent::Qbit { torrent, .. } => torrent.category.as_deref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait TorrentDownloader {
|
||||
async fn get_torrents_info(
|
||||
&self,
|
||||
status_filter: TorrentFilter,
|
||||
category: Option<String>,
|
||||
tag: Option<String>,
|
||||
) -> eyre::Result<Vec<Torrent>>;
|
||||
|
||||
async fn add_torrents(
|
||||
&self,
|
||||
source: TorrentSource,
|
||||
save_path: String,
|
||||
category: Option<&str>,
|
||||
) -> eyre::Result<()>;
|
||||
|
||||
async fn delete_torrents(&self, hashes: Vec<String>) -> eyre::Result<()>;
|
||||
|
||||
async fn rename_torrent_file(
|
||||
&self,
|
||||
hash: &str,
|
||||
old_path: &str,
|
||||
new_path: &str,
|
||||
) -> eyre::Result<()>;
|
||||
|
||||
async fn move_torrents(&self, hashes: Vec<String>, new_path: &str) -> eyre::Result<()>;
|
||||
|
||||
async fn get_torrent_path(&self, hashes: String) -> eyre::Result<Option<String>>;
|
||||
|
||||
async fn check_connection(&self) -> eyre::Result<()>;
|
||||
|
||||
async fn set_torrents_category(&self, hashes: Vec<String>, category: &str) -> eyre::Result<()>;
|
||||
|
||||
async fn add_torrent_tags(&self, hashes: Vec<String>, tags: Vec<String>) -> eyre::Result<()>;
|
||||
|
||||
async fn add_category(&self, category: &str) -> eyre::Result<()>;
|
||||
|
||||
fn get_save_path(&self, sub_path: &Path) -> PathBuf;
|
||||
|
||||
async fn add_resources_for_bangumi<'a, 'b>(
|
||||
&self,
|
||||
db: &'a DatabaseConnection,
|
||||
resources: &[&resources::Model],
|
||||
mut bangumi: bangumi::Model,
|
||||
) -> eyre::Result<bangumi::Model> {
|
||||
if bangumi.save_path.is_none() {
|
||||
let gen_sub_path = gen_bangumi_sub_path(&bangumi);
|
||||
let mut bangumi_active = bangumi.into_active_model();
|
||||
bangumi_active.save_path = ActiveValue::Set(Some(gen_sub_path.to_string()));
|
||||
bangumi = bangumi_active.update(db).await?;
|
||||
}
|
||||
|
||||
let sub_path = bangumi
|
||||
.save_path
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| unreachable!("must have a sub path"));
|
||||
|
||||
let mut torrent_urls = vec![];
|
||||
for m in resources.iter() {
|
||||
torrent_urls.push(Url::parse(&m.url as &str)?);
|
||||
}
|
||||
|
||||
// make sequence to prevent too fast to be banned
|
||||
for d in resources.iter() {
|
||||
let source = TorrentSource::parse(&d.url).await?;
|
||||
self.add_torrents(source, sub_path.clone(), Some("bangumi"))
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(bangumi)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn build_torrent_downloader_from_downloader_model(
|
||||
model: downloaders::Model,
|
||||
) -> eyre::Result<Box<dyn TorrentDownloader>> {
|
||||
Ok(Box::new(match &model.category {
|
||||
DownloaderCategory::QBittorrent => {
|
||||
QBittorrentDownloader::from_downloader_model(model).await?
|
||||
}
|
||||
}))
|
||||
}
|
@ -1,96 +0,0 @@
|
||||
use downloaders::DownloaderCategory;
|
||||
use quirks_path::{Path, PathBuf};
|
||||
use sea_orm::{ActiveModelTrait, ActiveValue, DatabaseConnection, IntoActiveModel};
|
||||
use url::Url;
|
||||
|
||||
use super::{
|
||||
defs::{Torrent, TorrentFilter, TorrentSource},
|
||||
qbitorrent::QBittorrentDownloader,
|
||||
};
|
||||
use crate::{
|
||||
models::{bangumi, downloaders, downloads},
|
||||
path::torrent_path::gen_bangumi_sub_path,
|
||||
};
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait TorrentDownloader {
|
||||
async fn get_torrents_info(
|
||||
&self,
|
||||
status_filter: TorrentFilter,
|
||||
category: Option<String>,
|
||||
tag: Option<String>,
|
||||
) -> eyre::Result<Vec<Torrent>>;
|
||||
|
||||
async fn add_torrents(
|
||||
&self,
|
||||
source: TorrentSource,
|
||||
save_path: String,
|
||||
category: Option<&str>,
|
||||
) -> eyre::Result<()>;
|
||||
|
||||
async fn delete_torrents(&self, hashes: Vec<String>) -> eyre::Result<()>;
|
||||
|
||||
async fn rename_torrent_file(
|
||||
&self,
|
||||
hash: &str,
|
||||
old_path: &str,
|
||||
new_path: &str,
|
||||
) -> eyre::Result<()>;
|
||||
|
||||
async fn move_torrents(&self, hashes: Vec<String>, new_path: &str) -> eyre::Result<()>;
|
||||
|
||||
async fn get_torrent_path(&self, hashes: String) -> eyre::Result<Option<String>>;
|
||||
|
||||
async fn check_connection(&self) -> eyre::Result<()>;
|
||||
|
||||
async fn set_torrents_category(&self, hashes: Vec<String>, category: &str) -> eyre::Result<()>;
|
||||
|
||||
async fn add_torrent_tags(&self, hashes: Vec<String>, tags: Vec<String>) -> eyre::Result<()>;
|
||||
|
||||
async fn add_category(&self, category: &str) -> eyre::Result<()>;
|
||||
|
||||
fn get_save_path(&self, sub_path: &Path) -> PathBuf;
|
||||
|
||||
async fn add_downloads_for_bangumi<'a, 'b>(
|
||||
&self,
|
||||
db: &'a DatabaseConnection,
|
||||
downloads: &[&downloads::Model],
|
||||
mut bangumi: bangumi::Model,
|
||||
) -> eyre::Result<bangumi::Model> {
|
||||
if bangumi.save_path.is_none() {
|
||||
let gen_sub_path = gen_bangumi_sub_path(&bangumi);
|
||||
let mut bangumi_active = bangumi.into_active_model();
|
||||
bangumi_active.save_path = ActiveValue::Set(Some(gen_sub_path.to_string()));
|
||||
bangumi = bangumi_active.update(db).await?;
|
||||
}
|
||||
|
||||
let sub_path = bangumi
|
||||
.save_path
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| unreachable!("must have a sub path"));
|
||||
|
||||
let mut torrent_urls = vec![];
|
||||
for m in downloads.iter() {
|
||||
torrent_urls.push(Url::parse(&m.url as &str)?);
|
||||
}
|
||||
|
||||
// make sequence to prevent too fast to be banned
|
||||
for d in downloads.iter() {
|
||||
let source = TorrentSource::parse(&d.url).await?;
|
||||
self.add_torrents(source, sub_path.clone(), Some("bangumi"))
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(bangumi)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn build_torrent_downloader_from_downloader_model(
|
||||
model: downloaders::Model,
|
||||
) -> eyre::Result<Box<dyn TorrentDownloader>> {
|
||||
Ok(Box::new(match &model.category {
|
||||
DownloaderCategory::QBittorrent => {
|
||||
QBittorrentDownloader::from_downloader_model(model).await?
|
||||
}
|
||||
}))
|
||||
}
|
105
crates/recorder/src/i18n/mod.rs
Normal file
105
crates/recorder/src/i18n/mod.rs
Normal file
@ -0,0 +1,105 @@
|
||||
use lazy_static::lazy_static;
|
||||
use oxilangtag::LanguageTag;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::parsers::errors::ParseError;
|
||||
|
||||
lazy_static! {
|
||||
static ref LANGTAG_ADHOC_ALIAS_PAIRS: Vec<(&'static str, &'static str)> = {
|
||||
vec![
|
||||
("tc", "zh-TW"),
|
||||
("zh-tw", "zh-TW"),
|
||||
("cht", "zh-TW"),
|
||||
("繁", "zh-TW"),
|
||||
("sc", "zh-CN"),
|
||||
("chs", "zh-CN"),
|
||||
("简", "zh-CN"),
|
||||
("zh-cn", "zh-CN"),
|
||||
("eng", "en"),
|
||||
("英", "en"),
|
||||
("jp", "ja-JP"),
|
||||
("jpn", "ja-JP"),
|
||||
("日", "ja"),
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum LanguagePresetName {
|
||||
#[serde(rename = "zh-TW")]
|
||||
ZhCN,
|
||||
#[serde(rename = "zh-CN")]
|
||||
ZhTW,
|
||||
#[serde(rename = "zh")]
|
||||
Zh,
|
||||
#[serde(rename = "en")]
|
||||
En,
|
||||
#[serde(rename = "ja")]
|
||||
Ja,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LanguagePreset {
|
||||
name: LanguagePresetName,
|
||||
tag: LanguageTag<String>,
|
||||
}
|
||||
|
||||
impl LanguagePreset {
|
||||
pub fn parse<S: AsRef<str>>(s: S) -> Result<Self, ParseError> {
|
||||
let s = s.as_ref();
|
||||
let s_lower = s.to_lowercase();
|
||||
let mut s_rc = s;
|
||||
for (alias, v) in LANGTAG_ADHOC_ALIAS_PAIRS.iter() {
|
||||
if s_lower.contains(alias) {
|
||||
s_rc = v;
|
||||
break;
|
||||
}
|
||||
}
|
||||
let lang_tag = LanguageTag::parse(s_rc.to_string())?;
|
||||
|
||||
let primary = lang_tag.primary_language();
|
||||
let region = lang_tag.region();
|
||||
|
||||
let kind = match (primary, region) {
|
||||
("zh", Some("TW")) => LanguagePresetName::ZhTW,
|
||||
("zh", Some("CN")) => LanguagePresetName::ZhCN,
|
||||
("zh", _) => LanguagePresetName::Zh,
|
||||
("en", _) => LanguagePresetName::En,
|
||||
("ja", _) => LanguagePresetName::Ja,
|
||||
_ => Err(ParseError::UnsupportedLanguagePreset(s_rc.to_string()))?,
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
name: kind,
|
||||
tag: lang_tag,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &LanguagePresetName {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn name_str(&self) -> &str {
|
||||
&self.name.as_ref()
|
||||
}
|
||||
|
||||
pub fn tag(&self) -> &LanguageTag<String> {
|
||||
&self.tag
|
||||
}
|
||||
|
||||
pub fn tag_str(&self) -> &str {
|
||||
&self.tag.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for LanguagePresetName {
|
||||
fn as_ref(&self) -> &str {
|
||||
match self {
|
||||
Self::ZhTW => "zh-TW",
|
||||
Self::ZhCN => "zh-CN",
|
||||
Self::Zh => "zh",
|
||||
Self::En => "en",
|
||||
Self::Ja => "ja",
|
||||
}
|
||||
}
|
||||
}
|
@ -3,12 +3,18 @@
|
||||
pub mod app;
|
||||
pub mod config;
|
||||
pub mod controllers;
|
||||
pub mod dal;
|
||||
pub mod downloaders;
|
||||
pub mod migrations;
|
||||
pub mod models;
|
||||
pub mod parsers;
|
||||
pub mod path;
|
||||
pub mod search;
|
||||
pub mod storage;
|
||||
pub mod tasks;
|
||||
pub mod views;
|
||||
pub mod workers;
|
||||
|
||||
pub mod i18n;
|
||||
|
||||
pub mod subscribe;
|
||||
pub mod utils;
|
||||
|
@ -1,6 +1,7 @@
|
||||
use std::{collections::HashSet, fmt::Display};
|
||||
use std::collections::HashSet;
|
||||
|
||||
use sea_orm::{DeriveIden, Statement};
|
||||
use itertools::Itertools;
|
||||
use sea_orm::{ActiveEnum, DeriveIden, Statement};
|
||||
use sea_orm_migration::prelude::{extension::postgres::IntoTypeRef, *};
|
||||
|
||||
use crate::migrations::extension::postgres::Type;
|
||||
@ -9,6 +10,7 @@ use crate::migrations::extension::postgres::Type;
|
||||
pub enum GeneralIds {
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
Id,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
@ -18,6 +20,7 @@ pub enum Subscribers {
|
||||
Pid,
|
||||
DisplayName,
|
||||
DownloaderId,
|
||||
BangumiConf,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
@ -36,32 +39,59 @@ pub enum Subscriptions {
|
||||
pub enum Bangumi {
|
||||
Table,
|
||||
Id,
|
||||
DisplayName,
|
||||
SubscriptionId,
|
||||
DisplayName,
|
||||
OfficialTitle,
|
||||
Fansub,
|
||||
Season,
|
||||
Filter,
|
||||
PosterLink,
|
||||
SavePath,
|
||||
LastEp,
|
||||
BangumiConfOverride,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub enum Episodes {
|
||||
Table,
|
||||
Id,
|
||||
OriginTitle,
|
||||
OfficialTitle,
|
||||
DisplayName,
|
||||
NameZh,
|
||||
NameJp,
|
||||
NameEn,
|
||||
SNameZh,
|
||||
SNameJp,
|
||||
SNameEn,
|
||||
BangumiId,
|
||||
OutputName,
|
||||
DownloadId,
|
||||
ResourceId,
|
||||
SavePath,
|
||||
Resolution,
|
||||
Season,
|
||||
SeasonRaw,
|
||||
Fansub,
|
||||
PosterLink,
|
||||
Homepage,
|
||||
Subtitle,
|
||||
Source,
|
||||
EpIndex,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub enum Downloads {
|
||||
pub enum Resources {
|
||||
Table,
|
||||
Id,
|
||||
SubscriptionId,
|
||||
OriginalName,
|
||||
OriginTitle,
|
||||
DisplayName,
|
||||
Status,
|
||||
CurrSize,
|
||||
AllSize,
|
||||
Mime,
|
||||
Category,
|
||||
Url,
|
||||
Homepage,
|
||||
SavePath,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
@ -73,7 +103,7 @@ pub enum Downloaders {
|
||||
Password,
|
||||
Username,
|
||||
SubscriberId,
|
||||
DownloadPath,
|
||||
SavePath,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
@ -151,7 +181,7 @@ pub trait CustomSchemaManagerExt {
|
||||
|
||||
async fn create_postgres_enum_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
T: Display + Send,
|
||||
T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
|
||||
I: IntoIterator<Item = T> + Send,
|
||||
>(
|
||||
&self,
|
||||
@ -161,7 +191,7 @@ pub trait CustomSchemaManagerExt {
|
||||
|
||||
async fn add_postgres_enum_values_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
T: Display + Send,
|
||||
T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
|
||||
I: IntoIterator<Item = T> + Send,
|
||||
>(
|
||||
&self,
|
||||
@ -183,6 +213,48 @@ pub trait CustomSchemaManagerExt {
|
||||
&self,
|
||||
enum_name: E,
|
||||
) -> Result<HashSet<String>, DbErr>;
|
||||
|
||||
async fn create_convention_index<
|
||||
T: IntoTableRef + Send,
|
||||
I: IntoIterator<Item = C> + Send,
|
||||
C: IntoIndexColumn + Send,
|
||||
>(
|
||||
&self,
|
||||
table: T,
|
||||
columns: I,
|
||||
) -> Result<(), DbErr>;
|
||||
|
||||
fn build_convention_index<
|
||||
T: IntoTableRef + Send,
|
||||
I: IntoIterator<Item = C> + Send,
|
||||
C: IntoIndexColumn + Send,
|
||||
>(
|
||||
&self,
|
||||
table: T,
|
||||
columns: I,
|
||||
) -> IndexCreateStatement {
|
||||
let table = table.into_table_ref();
|
||||
let table_name = match &table {
|
||||
TableRef::Table(s) => s.to_string(),
|
||||
_ => panic!("unsupported table ref"),
|
||||
};
|
||||
let columns = columns
|
||||
.into_iter()
|
||||
.map(|c| c.into_index_column())
|
||||
.collect_vec();
|
||||
let mut stmt = Index::create();
|
||||
stmt.table(table);
|
||||
for c in columns {
|
||||
stmt.col(c);
|
||||
}
|
||||
let index_name = format!(
|
||||
"idx_{}_{}",
|
||||
table_name,
|
||||
stmt.get_index_spec().get_column_names().join("-")
|
||||
);
|
||||
stmt.name(&index_name);
|
||||
stmt
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
@ -190,7 +262,8 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
|
||||
async fn create_postgres_auto_update_ts_fn(&self, col_name: &str) -> Result<(), DbErr> {
|
||||
let sql = format!(
|
||||
"CREATE OR REPLACE FUNCTION update_{col_name}_column() RETURNS TRIGGER AS $$ BEGIN \
|
||||
NEW.{col_name} = current_timestamp; RETURN NEW; END; $$ language 'plpgsql';"
|
||||
NEW.\"{col_name}\" = current_timestamp; RETURN NEW; END; $$ language 'plpgsql';",
|
||||
col_name = col_name
|
||||
);
|
||||
|
||||
self.get_connection()
|
||||
@ -239,7 +312,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
|
||||
|
||||
async fn create_postgres_enum_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
T: Display + Send,
|
||||
T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
|
||||
I: IntoIterator<Item = T> + Send,
|
||||
>(
|
||||
&self,
|
||||
@ -250,7 +323,10 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
|
||||
if !existed {
|
||||
let idents = values
|
||||
.into_iter()
|
||||
.map(|v| Alias::new(v.to_string()))
|
||||
.map(|v| {
|
||||
let v = v.to_value();
|
||||
Alias::new(v)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
self.create_type(Type::create().as_enum(enum_name).values(idents).to_owned())
|
||||
.await?;
|
||||
@ -263,7 +339,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
|
||||
|
||||
async fn add_postgres_enum_values_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
T: Display + Send,
|
||||
T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
|
||||
I: IntoIterator<Item = T> + Send,
|
||||
>(
|
||||
&self,
|
||||
@ -273,7 +349,8 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
|
||||
let exists_values = self.get_postgres_enum_values(enum_name.clone()).await?;
|
||||
let to_add_values = values
|
||||
.into_iter()
|
||||
.filter(|v| !exists_values.contains(&v.to_string()))
|
||||
.map(|v| v.to_value())
|
||||
.filter(|v| !exists_values.contains(v))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if to_add_values.is_empty() {
|
||||
@ -283,6 +360,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
|
||||
let mut type_alter = Type::alter().name(enum_name);
|
||||
|
||||
for v in to_add_values {
|
||||
let v: Value = v.into();
|
||||
type_alter = type_alter.add_value(Alias::new(v.to_string()));
|
||||
}
|
||||
|
||||
@ -318,8 +396,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
|
||||
) -> Result<HashSet<String>, DbErr> {
|
||||
let enum_name: String = enum_name.into_iden().to_string();
|
||||
let sql = format!(
|
||||
"SELECT pg_enum.enumlabel AS enumlabel FROM pg_type JOIN pg_enum ON pg_enum.enumtypid \
|
||||
= pg_type.oid WHERE pg_type.typname = '{enum_name}';"
|
||||
r#"SELECT pg_enum.enumlabel AS enumlabel FROM pg_type JOIN pg_enum ON pg_enum.enumtypid = pg_type.oid WHERE pg_type.typname = '{enum_name}';"#
|
||||
);
|
||||
|
||||
let results = self
|
||||
@ -334,4 +411,17 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
async fn create_convention_index<
|
||||
T: IntoTableRef + Send,
|
||||
I: IntoIterator<Item = C> + Send,
|
||||
C: IntoIndexColumn + Send,
|
||||
>(
|
||||
&self,
|
||||
table: T,
|
||||
columns: I,
|
||||
) -> Result<(), DbErr> {
|
||||
let stmt = self.build_convention_index(table, columns);
|
||||
self.create_index(stmt.to_owned()).await
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,10 @@
|
||||
use loco_rs::schema::jsonb_null;
|
||||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use super::defs::{
|
||||
Bangumi, CustomSchemaManagerExt, Episodes, GeneralIds, Subscribers, Subscriptions,
|
||||
};
|
||||
use crate::models::{subscribers::ROOT_SUBSCRIBER, subscriptions};
|
||||
use crate::models::{subscribers::ROOT_SUBSCRIBER_NAME, subscriptions};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
@ -20,6 +21,7 @@ impl MigrationTrait for Migration {
|
||||
.col(pk_auto(Subscribers::Id))
|
||||
.col(string_len_uniq(Subscribers::Pid, 64))
|
||||
.col(string(Subscribers::DisplayName))
|
||||
.col(jsonb_null(Subscribers::BangumiConf))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
@ -33,16 +35,16 @@ impl MigrationTrait for Migration {
|
||||
let insert = Query::insert()
|
||||
.into_table(Subscribers::Table)
|
||||
.columns([Subscribers::Pid, Subscribers::DisplayName])
|
||||
.values_panic([ROOT_SUBSCRIBER.into(), ROOT_SUBSCRIBER.into()])
|
||||
.values_panic([ROOT_SUBSCRIBER_NAME.into(), ROOT_SUBSCRIBER_NAME.into()])
|
||||
.to_owned();
|
||||
manager.exec_stmt(insert).await?;
|
||||
|
||||
manager
|
||||
.create_postgres_enum_for_active_enum(
|
||||
subscriptions::SubscriptionCategoryEnum,
|
||||
&[
|
||||
[
|
||||
subscriptions::SubscriptionCategory::Mikan,
|
||||
subscriptions::SubscriptionCategory::Manual,
|
||||
subscriptions::SubscriptionCategory::Tmdb,
|
||||
],
|
||||
)
|
||||
.await?;
|
||||
@ -79,13 +81,20 @@ impl MigrationTrait for Migration {
|
||||
GeneralIds::UpdatedAt,
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
table_auto(Bangumi::Table)
|
||||
.col(pk_auto(Bangumi::Id))
|
||||
.col(text(Bangumi::DisplayName))
|
||||
.col(integer(Bangumi::SubscriptionId))
|
||||
.col(text(Bangumi::DisplayName))
|
||||
.col(text(Bangumi::OfficialTitle))
|
||||
.col(string_null(Bangumi::Fansub))
|
||||
.col(unsigned(Bangumi::Season))
|
||||
.col(jsonb_null(Bangumi::Filter))
|
||||
.col(text_null(Bangumi::PosterLink))
|
||||
.col(text_null(Bangumi::SavePath))
|
||||
.col(unsigned(Bangumi::LastEp))
|
||||
.col(jsonb_null(Bangumi::BangumiConfOverride))
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_bangumi_subscription_id")
|
||||
@ -94,10 +103,24 @@ impl MigrationTrait for Migration {
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.index(
|
||||
manager
|
||||
.build_convention_index(
|
||||
Bangumi::Table,
|
||||
[Bangumi::OfficialTitle, Bangumi::Fansub, Bangumi::Season],
|
||||
)
|
||||
.unique(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
futures::try_join!(
|
||||
manager.create_convention_index(Bangumi::Table, [Bangumi::Fansub]),
|
||||
manager.create_convention_index(Bangumi::Table, [Bangumi::Season]),
|
||||
manager.create_convention_index(Bangumi::Table, [Bangumi::OfficialTitle]),
|
||||
)?;
|
||||
|
||||
manager
|
||||
.create_postgres_auto_update_ts_trigger_for_col(Bangumi::Table, GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
@ -106,9 +129,26 @@ impl MigrationTrait for Migration {
|
||||
.create_table(
|
||||
table_auto(Episodes::Table)
|
||||
.col(pk_auto(Episodes::Id))
|
||||
.col(text(Episodes::OriginTitle))
|
||||
.col(text(Episodes::OfficialTitle))
|
||||
.col(text(Episodes::DisplayName))
|
||||
.col(text_null(Episodes::NameZh))
|
||||
.col(text_null(Episodes::NameJp))
|
||||
.col(text_null(Episodes::NameEn))
|
||||
.col(text_null(Episodes::SNameZh))
|
||||
.col(text_null(Episodes::SNameJp))
|
||||
.col(text_null(Episodes::SNameEn))
|
||||
.col(integer(Episodes::BangumiId))
|
||||
.col(text(Episodes::OutputName))
|
||||
.col(text_null(Episodes::SavePath))
|
||||
.col(string_null(Episodes::Resolution))
|
||||
.col(integer(Episodes::Season))
|
||||
.col(string_null(Episodes::SeasonRaw))
|
||||
.col(string_null(Episodes::Fansub))
|
||||
.col(text_null(Episodes::PosterLink))
|
||||
.col(text_null(Episodes::Homepage))
|
||||
.col(array_null(Episodes::Subtitle, ColumnType::Text))
|
||||
.col(text_null(Episodes::Source))
|
||||
.col(unsigned(Episodes::EpIndex))
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_episode_bangumi_id")
|
||||
@ -121,6 +161,13 @@ impl MigrationTrait for Migration {
|
||||
)
|
||||
.await?;
|
||||
|
||||
futures::try_join!(
|
||||
manager.create_convention_index(Episodes::Table, [Episodes::OfficialTitle]),
|
||||
manager.create_convention_index(Episodes::Table, [Episodes::Fansub]),
|
||||
manager.create_convention_index(Episodes::Table, [Episodes::Season]),
|
||||
manager.create_convention_index(Episodes::Table, [Episodes::EpIndex]),
|
||||
)?;
|
||||
|
||||
manager
|
||||
.create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
@ -2,9 +2,8 @@ use loco_rs::schema::table_auto;
|
||||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use super::defs::*;
|
||||
use crate::models::prelude::{
|
||||
downloads::{DownloadMimeEnum, DownloadStatusEnum},
|
||||
DownloadMime, DownloadStatus,
|
||||
use crate::models::resources::{
|
||||
DownloadStatus, DownloadStatusEnum, ResourceCategory, ResourceCategoryEnum,
|
||||
};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
@ -15,15 +14,19 @@ impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_postgres_enum_for_active_enum(
|
||||
DownloadMimeEnum,
|
||||
&[DownloadMime::OctetStream, DownloadMime::BitTorrent],
|
||||
ResourceCategoryEnum,
|
||||
[
|
||||
ResourceCategory::BitTorrent,
|
||||
ResourceCategory::OctetStream,
|
||||
ResourceCategory::Poster,
|
||||
],
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_postgres_enum_for_active_enum(
|
||||
DownloadStatusEnum,
|
||||
&[
|
||||
[
|
||||
DownloadStatus::Pending,
|
||||
DownloadStatus::Downloading,
|
||||
DownloadStatus::Completed,
|
||||
@ -36,42 +39,47 @@ impl MigrationTrait for Migration {
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
table_auto(Downloads::Table)
|
||||
.col(pk_auto(Downloads::Id))
|
||||
.col(string(Downloads::OriginalName))
|
||||
.col(string(Downloads::DisplayName))
|
||||
.col(integer(Downloads::SubscriptionId))
|
||||
table_auto(Resources::Table)
|
||||
.col(pk_auto(Resources::Id))
|
||||
.col(text(Resources::OriginTitle))
|
||||
.col(text(Resources::DisplayName))
|
||||
.col(integer(Resources::SubscriptionId))
|
||||
.col(enumeration(
|
||||
Downloads::Status,
|
||||
Resources::Status,
|
||||
DownloadStatusEnum,
|
||||
DownloadMime::iden_values(),
|
||||
ResourceCategory::iden_values(),
|
||||
))
|
||||
.col(enumeration(
|
||||
Downloads::Mime,
|
||||
DownloadMimeEnum,
|
||||
DownloadMime::iden_values(),
|
||||
Resources::Category,
|
||||
ResourceCategoryEnum,
|
||||
ResourceCategory::iden_values(),
|
||||
))
|
||||
.col(big_unsigned(Downloads::AllSize))
|
||||
.col(big_unsigned(Downloads::CurrSize))
|
||||
.col(text(Downloads::Url))
|
||||
.index(
|
||||
Index::create()
|
||||
.table(Downloads::Table)
|
||||
.col(Downloads::Url)
|
||||
.name("idx_download_url"),
|
||||
)
|
||||
.col(big_integer_null(Resources::AllSize))
|
||||
.col(big_integer_null(Resources::CurrSize))
|
||||
.col(text(Resources::Url))
|
||||
.col(text_null(Resources::Homepage))
|
||||
.col(text_null(Resources::SavePath))
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_download_subscription_id")
|
||||
.from(Downloads::Table, Downloads::SubscriptionId)
|
||||
.from(Resources::Table, Resources::SubscriptionId)
|
||||
.to(Subscriptions::Table, Subscriptions::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.index(
|
||||
manager
|
||||
.build_convention_index(Resources::Table, [Resources::Url])
|
||||
.unique(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
futures::try_join!(
|
||||
manager.create_convention_index(Resources::Table, [Resources::Homepage]),
|
||||
)?;
|
||||
|
||||
manager
|
||||
.create_postgres_auto_update_ts_fn_for_col(GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
@ -80,14 +88,14 @@ impl MigrationTrait for Migration {
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Episodes::Table)
|
||||
.add_column_if_not_exists(integer_null(Episodes::DownloadId))
|
||||
.add_column_if_not_exists(integer_null(Episodes::ResourceId))
|
||||
.add_foreign_key(
|
||||
TableForeignKey::new()
|
||||
.name("fk_episode_download_id")
|
||||
.name("fk_episode_resource_id")
|
||||
.from_tbl(Episodes::Table)
|
||||
.from_col(Episodes::DownloadId)
|
||||
.to_tbl(Downloads::Table)
|
||||
.to_col(Downloads::Id)
|
||||
.from_col(Episodes::ResourceId)
|
||||
.to_tbl(Resources::Table)
|
||||
.to_col(Resources::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::SetNull),
|
||||
)
|
||||
@ -103,8 +111,8 @@ impl MigrationTrait for Migration {
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Episodes::Table)
|
||||
.drop_foreign_key(Alias::new("fk_episode_download_id"))
|
||||
.drop_column(Episodes::DownloadId)
|
||||
.drop_foreign_key(Alias::new("fk_episode_resource_id"))
|
||||
.drop_column(Episodes::ResourceId)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
@ -114,11 +122,11 @@ impl MigrationTrait for Migration {
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().table(Downloads::Table).to_owned())
|
||||
.drop_table(Table::drop().table(Resources::Table).to_owned())
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(DownloadMimeEnum)
|
||||
.drop_postgres_enum_for_active_enum(ResourceCategoryEnum)
|
||||
.await?;
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(DownloadStatusEnum)
|
@ -2,7 +2,7 @@ use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use crate::{
|
||||
migrations::defs::{CustomSchemaManagerExt, Downloaders, GeneralIds, Subscribers},
|
||||
models::{downloaders::DownloaderCategoryEnum, prelude::DownloaderCategory},
|
||||
models::downloaders::{DownloaderCategory, DownloaderCategoryEnum},
|
||||
};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
@ -14,7 +14,7 @@ impl MigrationTrait for Migration {
|
||||
manager
|
||||
.create_postgres_enum_for_active_enum(
|
||||
DownloaderCategoryEnum,
|
||||
&[DownloaderCategory::QBittorrent],
|
||||
[DownloaderCategory::QBittorrent],
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -30,7 +30,7 @@ impl MigrationTrait for Migration {
|
||||
DownloaderCategoryEnum,
|
||||
DownloaderCategory::iden_values(),
|
||||
))
|
||||
.col(text(Downloaders::DownloadPath))
|
||||
.col(text(Downloaders::SavePath))
|
||||
.col(integer(Downloaders::SubscriberId))
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
|
@ -2,7 +2,7 @@ pub use sea_orm_migration::prelude::*;
|
||||
|
||||
pub mod defs;
|
||||
pub mod m20220101_000001_init;
|
||||
pub mod m20240224_082543_add_downloads;
|
||||
pub mod m20240224_082543_add_resources;
|
||||
pub mod m20240225_060853_subscriber_add_downloader;
|
||||
|
||||
pub struct Migrator;
|
||||
@ -12,7 +12,7 @@ impl MigratorTrait for Migrator {
|
||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||
vec![
|
||||
Box::new(m20220101_000001_init::Migration),
|
||||
Box::new(m20240224_082543_add_downloads::Migration),
|
||||
Box::new(m20240224_082543_add_resources::Migration),
|
||||
Box::new(m20240225_060853_subscriber_add_downloader::Migration),
|
||||
]
|
||||
}
|
||||
|
@ -1,6 +1,154 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub use super::entities::bangumi::*;
|
||||
use itertools::Itertools;
|
||||
use regex::Regex;
|
||||
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum, DeriveDisplay,
|
||||
)]
|
||||
#[sea_orm(
|
||||
rs_type = "String",
|
||||
db_type = "Enum",
|
||||
enum_name = "bangumi_distribution"
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum BangumiDistribution {
|
||||
#[sea_orm(string_value = "movie")]
|
||||
Movie,
|
||||
#[sea_orm(string_value = "ova")]
|
||||
Ova,
|
||||
#[sea_orm(string_value = "oad")]
|
||||
Oad,
|
||||
#[sea_orm(string_value = "sp")]
|
||||
Sp,
|
||||
#[sea_orm(string_value = "ex")]
|
||||
Ex,
|
||||
#[sea_orm(string_value = "tv")]
|
||||
Tv,
|
||||
#[sea_orm(string_value = "unknown")]
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum BangumiRenameMethod {
|
||||
Pn,
|
||||
Advance,
|
||||
SubtitlePn,
|
||||
SubtitleAdvance,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct SubscribeBangumiConfigOverride {
|
||||
pub leading_fansub_tag: Option<bool>,
|
||||
pub complete_history_episodes: Option<bool>,
|
||||
pub rename_method: Option<BangumiRenameMethod>,
|
||||
pub remove_bad_torrent: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct BangumiFilter {
|
||||
pub plaintext_filters: Option<Vec<String>>,
|
||||
pub regex_filters: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct BangumiUniqueKey {
|
||||
pub official_title: String,
|
||||
pub season: i32,
|
||||
pub fansub: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "bangumi")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub subscription_id: i32,
|
||||
pub display_name: String,
|
||||
pub official_title: String,
|
||||
pub fansub: Option<String>,
|
||||
pub season: i32,
|
||||
pub filter: Option<BangumiFilter>,
|
||||
pub poster_link: Option<String>,
|
||||
pub save_path: Option<String>,
|
||||
pub last_ep: i32,
|
||||
pub bangumi_conf_override: Option<SubscribeBangumiConfigOverride>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscriptions::Entity",
|
||||
from = "Column::SubscriptionId",
|
||||
to = "super::subscriptions::Column::Id"
|
||||
)]
|
||||
Subscription,
|
||||
#[sea_orm(has_many = "super::episodes::Entity")]
|
||||
Episode,
|
||||
}
|
||||
|
||||
impl Related<super::episodes::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Episode.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscription.def()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl BangumiFilter {
|
||||
pub fn is_match(&self, title: &str) -> eyre::Result<bool> {
|
||||
if let Some(regex_filters) = &self.regex_filters {
|
||||
let combined_regex = Regex::new(®ex_filters.join("|"))?;
|
||||
if combined_regex.is_match(title) {
|
||||
return Ok(true);
|
||||
}
|
||||
} else if let Some(plain_filters) = &self.plaintext_filters {
|
||||
for f in plain_filters {
|
||||
if title.contains(f) {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn get_unique_key(&self) -> BangumiUniqueKey {
|
||||
BangumiUniqueKey {
|
||||
official_title: self.official_title.clone(),
|
||||
season: self.season,
|
||||
fansub: self.fansub.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn find_by_unique_keys(
|
||||
db: &DatabaseConnection,
|
||||
unique_keys: impl Iterator<Item = &BangumiUniqueKey>,
|
||||
) -> eyre::Result<Vec<Self>> {
|
||||
let unique_keys = unique_keys.collect::<HashSet<_>>();
|
||||
let mut found = Entity::find()
|
||||
.filter(Column::OfficialTitle.is_in(unique_keys.iter().map(|k| &k.official_title)))
|
||||
.all(db)
|
||||
.await?;
|
||||
|
||||
found = found
|
||||
.into_iter()
|
||||
.filter(|m| unique_keys.contains(&m.get_unique_key()))
|
||||
.collect_vec();
|
||||
|
||||
Ok(found)
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,51 @@
|
||||
use sea_orm::prelude::*;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
pub use crate::models::entities::downloaders::*;
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(
|
||||
rs_type = "String",
|
||||
db_type = "Enum",
|
||||
enum_name = "downloader_category"
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DownloaderCategory {
|
||||
#[sea_orm(string_value = "qbittorrent")]
|
||||
QBittorrent,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "downloaders")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub category: DownloaderCategory,
|
||||
pub endpoint: String,
|
||||
pub password: String,
|
||||
pub username: String,
|
||||
pub subscriber_id: i32,
|
||||
pub save_path: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id"
|
||||
)]
|
||||
Subscriber,
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
@ -1,90 +0,0 @@
|
||||
use loco_rs::app::AppContext;
|
||||
use sea_orm::{prelude::*, sea_query::OnConflict, ActiveValue, Condition, QueryOrder, QuerySelect};
|
||||
|
||||
pub use crate::models::entities::downloads::*;
|
||||
use crate::{
|
||||
models::subscriptions::{self, SubscriptionCategory},
|
||||
parsers::mikan::{parse_mikan_rss_items_from_rss_link, MikanRssItem},
|
||||
};
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl ActiveModel {
|
||||
pub fn from_mikan_rss_item(m: MikanRssItem, subscription_id: i32) -> Self {
|
||||
let _ = Self {
|
||||
origin_name: ActiveValue::Set(m.title.clone()),
|
||||
display_name: ActiveValue::Set(m.title),
|
||||
subscription_id: ActiveValue::Set(subscription_id),
|
||||
status: ActiveValue::Set(DownloadStatus::Pending),
|
||||
mime: ActiveValue::Set(DownloadMime::BitTorrent),
|
||||
url: ActiveValue::Set(m.url),
|
||||
curr_size: ActiveValue::Set(m.content_length.as_ref().map(|_| 0)),
|
||||
all_size: ActiveValue::Set(m.content_length),
|
||||
homepage: ActiveValue::Set(m.homepage),
|
||||
..Default::default()
|
||||
};
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub async fn pull_subscription(
|
||||
ctx: AppContext,
|
||||
item: &subscriptions::Model,
|
||||
) -> eyre::Result<Vec<i32>> {
|
||||
let db = &ctx.db;
|
||||
match &item.category {
|
||||
SubscriptionCategory::Mikan => {
|
||||
let items = parse_mikan_rss_items_from_rss_link(&item.source_url).await?;
|
||||
let all_items = items.collect::<Vec<_>>();
|
||||
|
||||
let last_old_id = {
|
||||
Entity::find()
|
||||
.select_only()
|
||||
.column(Column::Id)
|
||||
.order_by_desc(Column::Id)
|
||||
.filter(Column::SubscriptionId.eq(item.id))
|
||||
.one(db)
|
||||
.await?
|
||||
}
|
||||
.map(|i| i.id);
|
||||
|
||||
if all_items.is_empty() {
|
||||
return Ok(vec![]);
|
||||
}
|
||||
|
||||
let new_items = all_items
|
||||
.into_iter()
|
||||
.map(|i| ActiveModel::from_mikan_rss_item(i, item.id));
|
||||
|
||||
let insert_result = Entity::insert_many(new_items)
|
||||
.on_conflict(OnConflict::column(Column::Url).do_nothing().to_owned())
|
||||
.exec(db)
|
||||
.await?;
|
||||
|
||||
let insert_ids = Entity::find()
|
||||
.select_only()
|
||||
.column(Column::Id)
|
||||
.filter({
|
||||
let mut cond = Condition::all()
|
||||
.add(Column::SubscriptionId.eq(item.id))
|
||||
.add(Column::Id.lte(insert_result.last_insert_id));
|
||||
|
||||
if let Some(last_old_id) = last_old_id {
|
||||
cond = cond.add(Column::Id.gt(last_old_id))
|
||||
}
|
||||
|
||||
cond
|
||||
})
|
||||
.all(db)
|
||||
.await?;
|
||||
|
||||
Ok(insert_ids.into_iter().map(|i| i.id).collect::<Vec<_>>())
|
||||
}
|
||||
_ => {
|
||||
todo!("other subscription categories")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,52 +0,0 @@
|
||||
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct BangumiFilter {
|
||||
pub name: Option<Vec<String>>,
|
||||
pub group: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "bangumi")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub subscription_id: i32,
|
||||
pub display_name: String,
|
||||
pub official_title: String,
|
||||
pub season: i32,
|
||||
pub season_raw: Option<String>,
|
||||
pub fansub: Option<String>,
|
||||
pub filter: Option<BangumiFilter>,
|
||||
pub rss_link: Option<String>,
|
||||
pub poster_link: Option<String>,
|
||||
pub save_path: Option<String>,
|
||||
pub deleted: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscriptions::Entity",
|
||||
from = "Column::SubscriptionId",
|
||||
to = "super::subscriptions::Column::Id"
|
||||
)]
|
||||
Subscription,
|
||||
#[sea_orm(has_many = "super::episodes::Entity")]
|
||||
Episode,
|
||||
}
|
||||
|
||||
impl Related<super::episodes::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Episode.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscription.def()
|
||||
}
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "downloader_type")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DownloaderCategory {
|
||||
#[sea_orm(string_value = "qbittorrent")]
|
||||
QBittorrent,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "downloaders")]
|
||||
pub struct Model {
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub created_at: DateTime,
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub category: DownloaderCategory,
|
||||
pub endpoint: String,
|
||||
pub password: String,
|
||||
pub username: String,
|
||||
pub subscriber_id: i32,
|
||||
pub save_path: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id"
|
||||
)]
|
||||
Subscriber,
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
@ -1,77 +0,0 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_status")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DownloadStatus {
|
||||
#[sea_orm(string_value = "pending")]
|
||||
Pending,
|
||||
#[sea_orm(string_value = "downloading")]
|
||||
Downloading,
|
||||
#[sea_orm(string_value = "paused")]
|
||||
Paused,
|
||||
#[sea_orm(string_value = "completed")]
|
||||
Completed,
|
||||
#[sea_orm(string_value = "failed")]
|
||||
Failed,
|
||||
#[sea_orm(string_value = "deleted")]
|
||||
Deleted,
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_mime")]
|
||||
pub enum DownloadMime {
|
||||
#[sea_orm(string_value = "application/octet-stream")]
|
||||
#[serde(rename = "application/octet-stream")]
|
||||
OctetStream,
|
||||
#[sea_orm(string_value = "application/x-bittorrent")]
|
||||
#[serde(rename = "application/x-bittorrent")]
|
||||
BitTorrent,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "downloads")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub origin_name: String,
|
||||
pub display_name: String,
|
||||
pub subscription_id: i32,
|
||||
pub status: DownloadStatus,
|
||||
pub mime: DownloadMime,
|
||||
pub url: String,
|
||||
pub all_size: Option<u64>,
|
||||
pub curr_size: Option<u64>,
|
||||
pub homepage: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscriptions::Entity",
|
||||
from = "Column::SubscriptionId",
|
||||
to = "super::subscriptions::Column::Id"
|
||||
)]
|
||||
Subscription,
|
||||
#[sea_orm(has_many = "super::episodes::Entity")]
|
||||
Episode,
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscription.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::episodes::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Episode.def()
|
||||
}
|
||||
}
|
@ -1,62 +0,0 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "episodes")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub raw_name: String,
|
||||
pub official_title: String,
|
||||
pub display_name: String,
|
||||
pub name_zh: Option<String>,
|
||||
pub name_jp: Option<String>,
|
||||
pub name_en: Option<String>,
|
||||
pub s_name_zh: Option<String>,
|
||||
pub s_name_jp: Option<String>,
|
||||
pub s_name_en: Option<String>,
|
||||
pub bangumi_id: i32,
|
||||
pub download_id: i32,
|
||||
pub save_path: String,
|
||||
pub resolution: Option<String>,
|
||||
pub season: i32,
|
||||
pub season_raw: Option<String>,
|
||||
pub fansub: Option<String>,
|
||||
pub poster_link: Option<String>,
|
||||
pub home_page: Option<String>,
|
||||
pub subtitle: Option<Vec<String>>,
|
||||
pub deleted: bool,
|
||||
pub source: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::bangumi::Entity",
|
||||
from = "Column::BangumiId",
|
||||
to = "super::bangumi::Column::Id"
|
||||
)]
|
||||
Bangumi,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::downloads::Entity",
|
||||
from = "Column::DownloadId",
|
||||
to = "super::downloads::Column::Id"
|
||||
)]
|
||||
Downloads,
|
||||
}
|
||||
|
||||
impl Related<super::bangumi::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Bangumi.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::downloads::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Downloads.def()
|
||||
}
|
||||
}
|
@ -1,7 +0,0 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.4
|
||||
pub mod bangumi;
|
||||
pub mod downloads;
|
||||
pub mod episodes;
|
||||
pub mod subscribers;
|
||||
pub mod subscriptions;
|
||||
pub mod downloaders;
|
@ -1,47 +0,0 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
|
||||
|
||||
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct SubscriberBangumiConfig {
|
||||
pub leading_group_tag: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "subscribers")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
#[sea_orm(unique)]
|
||||
pub pid: String,
|
||||
pub display_name: String,
|
||||
pub downloader_id: Option<i32>,
|
||||
pub bangumi_conf: SubscriberBangumiConfig,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::subscriptions::Entity")]
|
||||
Subscription,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::downloaders::Entity",
|
||||
from = "Column::DownloaderId",
|
||||
to = "super::downloaders::Column::Id"
|
||||
)]
|
||||
Downloader,
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscription.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::downloaders::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Downloader.def()
|
||||
}
|
||||
}
|
@ -1,59 +0,0 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
|
||||
)]
|
||||
#[sea_orm(
|
||||
rs_type = "String",
|
||||
db_type = "Enum",
|
||||
enum_name = "subscription_category"
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SubscriptionCategory {
|
||||
#[sea_orm(string_value = "mikan")]
|
||||
Mikan,
|
||||
#[sea_orm(string_value = "manual")]
|
||||
Manual,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "subscriptions")]
|
||||
pub struct Model {
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub created_at: DateTime,
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub display_name: String,
|
||||
pub subscriber_id: i32,
|
||||
pub category: SubscriptionCategory,
|
||||
pub source_url: String,
|
||||
pub aggregate: bool,
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id"
|
||||
)]
|
||||
Subscriber,
|
||||
#[sea_orm(has_many = "super::bangumi::Entity")]
|
||||
Bangumi,
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::bangumi::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Bangumi.def()
|
||||
}
|
||||
}
|
@ -1,6 +1,102 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::{entity::prelude::*, ActiveValue};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub use super::entities::episodes::*;
|
||||
use crate::{
|
||||
models::resources,
|
||||
parsers::{mikan::MikanEpisodeMeta, raw::RawEpisodeMeta},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "episodes")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub origin_title: String,
|
||||
pub official_title: String,
|
||||
pub display_name: String,
|
||||
pub name_zh: Option<String>,
|
||||
pub name_jp: Option<String>,
|
||||
pub name_en: Option<String>,
|
||||
pub s_name_zh: Option<String>,
|
||||
pub s_name_jp: Option<String>,
|
||||
pub s_name_en: Option<String>,
|
||||
pub bangumi_id: i32,
|
||||
pub resource_id: Option<i32>,
|
||||
pub save_path: Option<String>,
|
||||
pub resolution: Option<String>,
|
||||
pub season: i32,
|
||||
pub season_raw: Option<String>,
|
||||
pub fansub: Option<String>,
|
||||
pub poster_link: Option<String>,
|
||||
pub homepage: Option<String>,
|
||||
pub subtitle: Option<Vec<String>>,
|
||||
pub source: Option<String>,
|
||||
pub ep_index: i32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::bangumi::Entity",
|
||||
from = "Column::BangumiId",
|
||||
to = "super::bangumi::Column::Id"
|
||||
)]
|
||||
Bangumi,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::resources::Entity",
|
||||
from = "Column::ResourceId",
|
||||
to = "super::resources::Column::Id"
|
||||
)]
|
||||
Resources,
|
||||
}
|
||||
|
||||
impl Related<super::bangumi::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Bangumi.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::resources::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Resources.def()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl ActiveModel {
|
||||
pub fn from_mikan_meta(
|
||||
bangumi_id: i32,
|
||||
resource: resources::Model,
|
||||
raw_meta: RawEpisodeMeta,
|
||||
mikan_meta: MikanEpisodeMeta,
|
||||
mikan_poster: Option<String>,
|
||||
) -> Self {
|
||||
Self {
|
||||
origin_title: ActiveValue::Set(resource.origin_title),
|
||||
official_title: ActiveValue::Set(mikan_meta.official_title.clone()),
|
||||
display_name: ActiveValue::Set(mikan_meta.official_title),
|
||||
name_zh: ActiveValue::Set(raw_meta.name_zh),
|
||||
name_jp: ActiveValue::Set(raw_meta.name_jp),
|
||||
name_en: ActiveValue::Set(raw_meta.name_en),
|
||||
s_name_zh: ActiveValue::Set(raw_meta.s_name_zh),
|
||||
s_name_jp: ActiveValue::Set(raw_meta.s_name_jp),
|
||||
s_name_en: ActiveValue::Set(raw_meta.s_name_en),
|
||||
bangumi_id: ActiveValue::Set(bangumi_id),
|
||||
resource_id: ActiveValue::Set(Some(resource.id)),
|
||||
resolution: ActiveValue::Set(raw_meta.resolution),
|
||||
season: ActiveValue::Set(raw_meta.season),
|
||||
season_raw: ActiveValue::Set(raw_meta.season_raw),
|
||||
fansub: ActiveValue::Set(raw_meta.fansub),
|
||||
poster_link: ActiveValue::Set(mikan_poster),
|
||||
homepage: ActiveValue::Set(resource.homepage),
|
||||
subtitle: ActiveValue::Set(raw_meta.sub),
|
||||
source: ActiveValue::Set(raw_meta.source),
|
||||
ep_index: ActiveValue::Set(raw_meta.episode_index),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,8 @@
|
||||
pub mod bangumi;
|
||||
pub mod downloaders;
|
||||
pub mod downloads;
|
||||
pub mod entities;
|
||||
pub mod episodes;
|
||||
pub mod notifications;
|
||||
pub mod prelude;
|
||||
pub mod resources;
|
||||
pub mod subscribers;
|
||||
pub mod subscriptions;
|
||||
|
@ -4,6 +4,6 @@ use serde::{Deserialize, Serialize};
|
||||
pub struct Notification {
|
||||
official_title: String,
|
||||
season: i32,
|
||||
episode_size: u32,
|
||||
episode_size: i32,
|
||||
poster_url: Option<String>,
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
pub use super::{
|
||||
bangumi::{self, Entity as Bangumi},
|
||||
downloaders::{self, DownloaderCategory, Entity as Downloader},
|
||||
downloads::{self, DownloadMime, DownloadStatus, Entity as Download},
|
||||
episodes::{self, Entity as Episode},
|
||||
resources::{self, DownloadStatus, Entity as Download, ResourceCategory},
|
||||
subscribers::{self, Entity as Subscriber},
|
||||
subscriptions::{self, Entity as Subscription, SubscriptionCategory},
|
||||
};
|
||||
|
187
crates/recorder/src/models/resources.rs
Normal file
187
crates/recorder/src/models/resources.rs
Normal file
@ -0,0 +1,187 @@
|
||||
use std::future::Future;
|
||||
|
||||
use bytes::Bytes;
|
||||
use loco_rs::app::AppContext;
|
||||
use sea_orm::{entity::prelude::*, ActiveValue, TryIntoModel};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
parsers::{errors::ParseError, mikan::MikanRssItem},
|
||||
path::extract_extname_from_url,
|
||||
storage::{AppContextDalExt, DalContentType},
|
||||
};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_status")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DownloadStatus {
|
||||
#[sea_orm(string_value = "pending")]
|
||||
Pending,
|
||||
#[sea_orm(string_value = "downloading")]
|
||||
Downloading,
|
||||
#[sea_orm(string_value = "paused")]
|
||||
Paused,
|
||||
#[sea_orm(string_value = "completed")]
|
||||
Completed,
|
||||
#[sea_orm(string_value = "failed")]
|
||||
Failed,
|
||||
#[sea_orm(string_value = "deleted")]
|
||||
Deleted,
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "resource_category")]
|
||||
pub enum ResourceCategory {
|
||||
#[sea_orm(string_value = "octet-stream")]
|
||||
#[serde(rename = "octet-stream")]
|
||||
OctetStream,
|
||||
#[sea_orm(string_value = "bittorrent")]
|
||||
#[serde(rename = "bittorrent")]
|
||||
BitTorrent,
|
||||
#[sea_orm(string_value = "poster")]
|
||||
#[serde(rename = "poster")]
|
||||
Poster,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "resources")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub origin_title: String,
|
||||
pub display_name: String,
|
||||
pub subscription_id: i32,
|
||||
pub status: DownloadStatus,
|
||||
pub category: ResourceCategory,
|
||||
pub url: String,
|
||||
pub all_size: Option<i64>,
|
||||
pub curr_size: Option<i64>,
|
||||
pub homepage: Option<String>,
|
||||
pub save_path: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscriptions::Entity",
|
||||
from = "Column::SubscriptionId",
|
||||
to = "super::subscriptions::Column::Id"
|
||||
)]
|
||||
Subscription,
|
||||
#[sea_orm(has_many = "super::episodes::Entity")]
|
||||
Episode,
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscription.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::episodes::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Episode.def()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl ActiveModel {
|
||||
pub fn from_mikan_rss_item(rss_item: MikanRssItem, subscription_id: i32) -> Self {
|
||||
let resource_category = rss_item.get_resource_category();
|
||||
Self {
|
||||
origin_title: ActiveValue::Set(rss_item.title.clone()),
|
||||
display_name: ActiveValue::Set(rss_item.title),
|
||||
subscription_id: ActiveValue::Set(subscription_id),
|
||||
status: ActiveValue::Set(DownloadStatus::Pending),
|
||||
category: ActiveValue::Set(resource_category),
|
||||
url: ActiveValue::Set(rss_item.url),
|
||||
all_size: ActiveValue::Set(rss_item.content_length),
|
||||
curr_size: ActiveValue::Set(Some(0)),
|
||||
homepage: ActiveValue::Set(rss_item.homepage),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_poster_url(
|
||||
subscription_id: i32,
|
||||
origin_title: String,
|
||||
url: Url,
|
||||
save_path: Option<String>,
|
||||
content_length: i64,
|
||||
) -> Self {
|
||||
Self {
|
||||
origin_title: ActiveValue::Set(origin_title.clone()),
|
||||
display_name: ActiveValue::Set(origin_title),
|
||||
subscription_id: ActiveValue::Set(subscription_id),
|
||||
status: ActiveValue::Set(DownloadStatus::Completed),
|
||||
category: ActiveValue::Set(ResourceCategory::Poster),
|
||||
url: ActiveValue::Set(url.to_string()),
|
||||
all_size: ActiveValue::Set(Some(content_length)),
|
||||
curr_size: ActiveValue::Set(Some(content_length)),
|
||||
save_path: ActiveValue::Set(save_path),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub async fn from_poster_url<F, R, E>(
|
||||
ctx: &AppContext,
|
||||
subscriber_pid: &str,
|
||||
subscription_id: i32,
|
||||
original_title: String,
|
||||
url: Url,
|
||||
fetch_fn: F,
|
||||
) -> eyre::Result<Self>
|
||||
where
|
||||
F: FnOnce(Url) -> R,
|
||||
R: Future<Output = Result<Bytes, E>>,
|
||||
E: Into<eyre::Report>,
|
||||
{
|
||||
let db = &ctx.db;
|
||||
let found = Entity::find()
|
||||
.filter(
|
||||
Column::SubscriptionId
|
||||
.eq(subscription_id)
|
||||
.and(Column::Url.eq(url.as_str())),
|
||||
)
|
||||
.one(db)
|
||||
.await?;
|
||||
|
||||
let resource = if let Some(found) = found {
|
||||
found
|
||||
} else {
|
||||
let bytes = fetch_fn(url.clone()).await.map_err(|e| e.into())?;
|
||||
let content_length = bytes.len() as i64;
|
||||
let dal = ctx.get_dal_unwrap().await;
|
||||
let extname = extract_extname_from_url(&url)
|
||||
.ok_or_else(|| ParseError::ParseExtnameError(url.to_string()))?;
|
||||
let stored_url = dal
|
||||
.store_blob(DalContentType::Poster, &extname, bytes, subscriber_pid)
|
||||
.await?;
|
||||
let saved_path = Some(stored_url.to_string());
|
||||
|
||||
let new_resource = ActiveModel::from_poster_url(
|
||||
subscription_id,
|
||||
original_title,
|
||||
url,
|
||||
saved_path,
|
||||
content_length,
|
||||
);
|
||||
|
||||
let new_resource = new_resource.save(db).await?;
|
||||
new_resource.try_into_model()?
|
||||
};
|
||||
|
||||
Ok(resource)
|
||||
}
|
||||
}
|
@ -1,14 +1,70 @@
|
||||
use loco_rs::model::{ModelError, ModelResult};
|
||||
use sea_orm::{entity::prelude::*, ActiveValue, TransactionTrait};
|
||||
use sea_orm::{entity::prelude::*, ActiveValue, FromJsonQueryResult, TransactionTrait};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub use super::entities::subscribers::*;
|
||||
use super::bangumi::BangumiRenameMethod;
|
||||
|
||||
pub const ROOT_SUBSCRIBER: &str = "konobangu";
|
||||
pub const ROOT_SUBSCRIBER_NAME: &str = "konobangu";
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct SubscribeBangumiConfig {
|
||||
pub leading_fansub_tag: bool,
|
||||
pub complete_history_episodes: bool,
|
||||
pub rename_method: BangumiRenameMethod,
|
||||
pub remove_bad_torrent: bool,
|
||||
}
|
||||
|
||||
impl Default for SubscribeBangumiConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
leading_fansub_tag: false,
|
||||
complete_history_episodes: false,
|
||||
rename_method: BangumiRenameMethod::Pn,
|
||||
remove_bad_torrent: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "subscribers")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub pid: String,
|
||||
pub display_name: String,
|
||||
pub downloader_id: Option<i32>,
|
||||
pub bangumi_conf: Option<SubscribeBangumiConfig>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::subscriptions::Entity")]
|
||||
Subscription,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::downloaders::Entity",
|
||||
from = "Column::DownloaderId",
|
||||
to = "super::downloaders::Column::Id"
|
||||
)]
|
||||
Downloader,
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscription.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::downloaders::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Downloader.def()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct SubscriberIdParams {
|
||||
pub id: String,
|
||||
pub pid: String,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
@ -19,7 +75,9 @@ impl ActiveModelBehavior for ActiveModel {
|
||||
{
|
||||
if insert {
|
||||
let mut this = self;
|
||||
this.pid = ActiveValue::Set(Uuid::new_v4().to_string());
|
||||
if this.pid.is_not_set() {
|
||||
this.pid = ActiveValue::Set(Uuid::new_v4().to_string());
|
||||
};
|
||||
Ok(this)
|
||||
} else {
|
||||
Ok(self)
|
||||
@ -28,36 +86,26 @@ impl ActiveModelBehavior for ActiveModel {
|
||||
}
|
||||
|
||||
impl Model {
|
||||
/// finds a user by the provided pid
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// When could not find user or DB query error
|
||||
pub async fn find_by_pid(db: &DatabaseConnection, pid: &str) -> ModelResult<Self> {
|
||||
let parse_uuid = Uuid::parse_str(pid).map_err(|e| ModelError::Any(e.into()))?;
|
||||
let subscriber = Entity::find()
|
||||
.filter(Column::Pid.eq(parse_uuid))
|
||||
.one(db)
|
||||
.await?;
|
||||
let subscriber = Entity::find().filter(Column::Pid.eq(pid)).one(db).await?;
|
||||
subscriber.ok_or_else(|| ModelError::EntityNotFound)
|
||||
}
|
||||
|
||||
pub async fn find_by_id(db: &DatabaseConnection, id: i32) -> ModelResult<Self> {
|
||||
let subscriber = Entity::find().filter(Column::Id.eq(id)).one(db).await?;
|
||||
subscriber.ok_or_else(|| ModelError::EntityNotFound)
|
||||
}
|
||||
|
||||
pub async fn find_root(db: &DatabaseConnection) -> ModelResult<Self> {
|
||||
Self::find_by_pid(db, ROOT_SUBSCRIBER).await
|
||||
Self::find_by_pid(db, ROOT_SUBSCRIBER_NAME).await
|
||||
}
|
||||
|
||||
/// Asynchronously creates a user with a password and saves it to the
|
||||
/// database.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// When could not save the user into the DB
|
||||
pub async fn create_root(db: &DatabaseConnection) -> ModelResult<Self> {
|
||||
let txn = db.begin().await?;
|
||||
|
||||
let user = ActiveModel {
|
||||
display_name: ActiveValue::set(ROOT_SUBSCRIBER.to_string()),
|
||||
pid: ActiveValue::set(ROOT_SUBSCRIBER.to_string()),
|
||||
display_name: ActiveValue::set(ROOT_SUBSCRIBER_NAME.to_string()),
|
||||
pid: ActiveValue::set(ROOT_SUBSCRIBER_NAME.to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&txn)
|
||||
|
@ -1,7 +1,83 @@
|
||||
use sea_orm::{entity::prelude::*, ActiveValue};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub use super::entities::subscriptions::{self, *};
|
||||
use itertools::Itertools;
|
||||
use loco_rs::app::AppContext;
|
||||
use sea_orm::{
|
||||
entity::prelude::*,
|
||||
sea_query::{InsertStatement, OnConflict},
|
||||
ActiveValue,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{event, instrument, Level};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
|
||||
)]
|
||||
#[sea_orm(
|
||||
rs_type = "String",
|
||||
db_type = "Enum",
|
||||
enum_name = "subscription_category"
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SubscriptionCategory {
|
||||
#[sea_orm(string_value = "mikan")]
|
||||
Mikan,
|
||||
#[sea_orm(string_value = "tmdb")]
|
||||
Tmdb,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "subscriptions")]
|
||||
pub struct Model {
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub created_at: DateTime,
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub display_name: String,
|
||||
pub subscriber_id: i32,
|
||||
pub category: SubscriptionCategory,
|
||||
pub source_url: String,
|
||||
pub aggregate: bool,
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id"
|
||||
)]
|
||||
Subscriber,
|
||||
#[sea_orm(has_many = "super::bangumi::Entity")]
|
||||
Bangumi,
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::bangumi::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Bangumi.def()
|
||||
}
|
||||
}
|
||||
|
||||
use crate::{
|
||||
models::{bangumi, episodes, resources, subscribers},
|
||||
parsers::{
|
||||
mikan::{
|
||||
parse_episode_meta_from_mikan_homepage, parse_mikan_rss_items_from_rss_link,
|
||||
MikanClient, MikanEpisodeMeta,
|
||||
},
|
||||
raw::{parse_episode_meta_from_raw_name, RawEpisodeMeta},
|
||||
},
|
||||
utils::db::insert_many_with_returning_all,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct SubscriptionCreateFromRssDto {
|
||||
@ -12,7 +88,7 @@ pub struct SubscriptionCreateFromRssDto {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(tag = "category")]
|
||||
#[serde(tag = "category", rename_all = "snake_case")]
|
||||
pub enum SubscriptionCreateDto {
|
||||
Mikan(SubscriptionCreateFromRssDto),
|
||||
}
|
||||
@ -80,4 +156,188 @@ impl Model {
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(
|
||||
fields(subscriber_id = "self.subscriber_id", subscription_id = "self.id"),
|
||||
skip(self, ctx)
|
||||
)]
|
||||
pub async fn pull_one(
|
||||
&self,
|
||||
ctx: &AppContext,
|
||||
subscriber: &subscribers::Model,
|
||||
) -> eyre::Result<()> {
|
||||
let db = &ctx.db;
|
||||
let subscription = self;
|
||||
let subscription_id = subscription.id;
|
||||
match &subscription.category {
|
||||
SubscriptionCategory::Mikan => {
|
||||
let subscriber_id = subscription.subscriber_id;
|
||||
let mikan_client = MikanClient::new(subscriber_id).await?;
|
||||
let mikan_rss_items =
|
||||
parse_mikan_rss_items_from_rss_link(&mikan_client, &subscription.source_url)
|
||||
.await?;
|
||||
let all_items = mikan_rss_items.collect::<Vec<_>>();
|
||||
|
||||
if all_items.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let new_resources = all_items
|
||||
.into_iter()
|
||||
.map(|rss_item| {
|
||||
resources::ActiveModel::from_mikan_rss_item(rss_item, subscription.id)
|
||||
})
|
||||
.collect_vec();
|
||||
|
||||
// insert and filter out duplicated items
|
||||
let new_resources: Vec<resources::Model> = insert_many_with_returning_all(
|
||||
db,
|
||||
new_resources,
|
||||
|stat: &mut InsertStatement| {
|
||||
stat.on_conflict(
|
||||
OnConflict::column(resources::Column::Url)
|
||||
.do_nothing()
|
||||
.to_owned(),
|
||||
);
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
pub struct MikanEpMetaBundle {
|
||||
pub resource: resources::Model,
|
||||
pub mikan: MikanEpisodeMeta,
|
||||
pub raw: RawEpisodeMeta,
|
||||
pub poster: Option<String>,
|
||||
}
|
||||
|
||||
let mut ep_metas: HashMap<bangumi::BangumiUniqueKey, Vec<MikanEpMetaBundle>> =
|
||||
HashMap::new();
|
||||
{
|
||||
for r in new_resources {
|
||||
let mut mikan_meta = if let Some(homepage) = r.homepage.as_deref() {
|
||||
match parse_episode_meta_from_mikan_homepage(&mikan_client, homepage)
|
||||
.await
|
||||
{
|
||||
Ok(mikan_meta) => mikan_meta,
|
||||
Err(e) => {
|
||||
let error: &dyn std::error::Error = e.as_ref();
|
||||
event!(
|
||||
Level::ERROR,
|
||||
desc = "failed to parse episode meta from mikan homepage",
|
||||
homepage = homepage,
|
||||
error = error
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
let mikan_poster_link =
|
||||
if let Some(poster_url) = mikan_meta.poster_url.take() {
|
||||
let poster_url_str = poster_url.to_string();
|
||||
let poster_resource_result = resources::Model::from_poster_url(
|
||||
ctx,
|
||||
&subscriber.pid,
|
||||
subscription_id,
|
||||
mikan_meta.official_title.clone(),
|
||||
poster_url,
|
||||
|url| mikan_client.fetch_bytes(|f| f.get(url)),
|
||||
)
|
||||
.await;
|
||||
match poster_resource_result {
|
||||
Ok(resource) => resource.save_path,
|
||||
Err(e) => {
|
||||
let error: &dyn std::error::Error = e.as_ref();
|
||||
event!(
|
||||
Level::ERROR,
|
||||
desc = "failed to fetch mikan meta poster",
|
||||
poster_url = poster_url_str,
|
||||
error = error
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let raw_meta = match parse_episode_meta_from_raw_name(&r.origin_title) {
|
||||
Ok(raw_meta) => raw_meta,
|
||||
Err(e) => {
|
||||
let error: &dyn std::error::Error = e.as_ref();
|
||||
event!(
|
||||
Level::ERROR,
|
||||
desc = "failed to parse episode meta from origin name",
|
||||
origin_name = &r.origin_title,
|
||||
error = error
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let key = bangumi::BangumiUniqueKey {
|
||||
official_title: mikan_meta.official_title.clone(),
|
||||
season: raw_meta.season,
|
||||
fansub: raw_meta.fansub.clone(),
|
||||
};
|
||||
let meta = MikanEpMetaBundle {
|
||||
resource: r,
|
||||
mikan: mikan_meta,
|
||||
raw: raw_meta,
|
||||
poster: mikan_poster_link,
|
||||
};
|
||||
ep_metas.entry(key).or_default().push(meta);
|
||||
}
|
||||
}
|
||||
|
||||
for (_, eps) in ep_metas {
|
||||
let meta = eps.first().unwrap_or_else(|| {
|
||||
unreachable!(
|
||||
"subscriptions pull one bangumi must have at least one episode meta"
|
||||
)
|
||||
});
|
||||
let last_ep = eps.iter().fold(0, |acc, ep| acc.max(ep.raw.episode_index));
|
||||
let official_title = &meta.mikan.official_title;
|
||||
let bgm = bangumi::ActiveModel {
|
||||
subscription_id: ActiveValue::Set(subscription_id),
|
||||
display_name: ActiveValue::Set(official_title.clone()),
|
||||
official_title: ActiveValue::Set(official_title.clone()),
|
||||
fansub: ActiveValue::Set(meta.raw.fansub.clone()),
|
||||
season: ActiveValue::Set(meta.raw.season),
|
||||
poster_link: ActiveValue::Set(meta.poster.clone()),
|
||||
last_ep: ActiveValue::Set(last_ep),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let bgm = bangumi::Entity::insert(bgm)
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
bangumi::Column::OfficialTitle,
|
||||
bangumi::Column::Season,
|
||||
bangumi::Column::Fansub,
|
||||
])
|
||||
.update_columns([bangumi::Column::LastEp])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec_with_returning(db)
|
||||
.await?;
|
||||
|
||||
let eps = eps.into_iter().map(|ep| {
|
||||
episodes::ActiveModel::from_mikan_meta(
|
||||
bgm.id,
|
||||
ep.resource,
|
||||
ep.raw,
|
||||
ep.mikan,
|
||||
ep.poster,
|
||||
)
|
||||
});
|
||||
episodes::Entity::insert_many(eps).exec(db).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
_ => {
|
||||
todo!("other subscription categories")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,11 +5,6 @@ use lazy_static::lazy_static;
|
||||
use maplit::hashmap;
|
||||
use regex::Regex;
|
||||
|
||||
const LANG_ZH_TW: &str = "zh-tw";
|
||||
const LANG_ZH: &str = "zh";
|
||||
const LANG_EN: &str = "en";
|
||||
const LANG_JP: &str = "jp";
|
||||
|
||||
lazy_static! {
|
||||
pub static ref SEASON_REGEX: Regex =
|
||||
Regex::new(r"(S\|[Ss]eason\s+)(\d+)").expect("Invalid regex");
|
||||
@ -26,14 +21,6 @@ lazy_static! {
|
||||
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
|
||||
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
|
||||
];
|
||||
pub static ref SUBTITLE_LANG: Vec<(&'static str, Vec<&'static str>)> = {
|
||||
vec![
|
||||
(LANG_ZH_TW, vec!["tc", "cht", "繁", "zh-tw"]),
|
||||
(LANG_ZH, vec!["sc", "chs", "简", "zh", "zh-cn"]),
|
||||
(LANG_EN, vec!["en", "eng", "英"]),
|
||||
(LANG_JP, vec!["jp", "jpn", "日"]),
|
||||
]
|
||||
};
|
||||
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
|
||||
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
|
||||
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {
|
||||
|
@ -12,4 +12,12 @@ pub enum ParseError {
|
||||
expected: String,
|
||||
found: String,
|
||||
},
|
||||
#[error("Parse language tag error: {0}")]
|
||||
LanguageTagError(#[from] oxilangtag::LanguageTagParseError),
|
||||
#[error("Unsupported language preset: {0}")]
|
||||
UnsupportedLanguagePreset(String),
|
||||
#[error("Parse episode meta error, get empty official title, homepage = {0}")]
|
||||
MikanEpisodeMetaEmptyOfficialTitleError(String),
|
||||
#[error("Parse extname error from source = {0}")]
|
||||
ParseExtnameError(String),
|
||||
}
|
||||
|
31
crates/recorder/src/parsers/mikan/mikan_client.rs
Normal file
31
crates/recorder/src/parsers/mikan/mikan_client.rs
Normal file
@ -0,0 +1,31 @@
|
||||
use std::{ops::Deref, sync::Arc};
|
||||
|
||||
use tokio::sync::OnceCell;
|
||||
|
||||
use crate::downloaders::ApiClient;
|
||||
|
||||
pub struct MikanClient {
|
||||
api_client: ApiClient,
|
||||
}
|
||||
|
||||
static MIKAN_CLIENT: OnceCell<Arc<MikanClient>> = OnceCell::const_new();
|
||||
|
||||
impl MikanClient {
|
||||
pub async fn new(_subscriber_id: i32) -> eyre::Result<Arc<Self>> {
|
||||
let res = MIKAN_CLIENT
|
||||
.get_or_try_init(|| async {
|
||||
ApiClient::new(std::time::Duration::from_millis(50), None)
|
||||
.map(|api_client| Arc::new(Self { api_client }))
|
||||
})
|
||||
.await?;
|
||||
Ok(res.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for MikanClient {
|
||||
type Target = ApiClient;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.api_client
|
||||
}
|
||||
}
|
@ -3,17 +3,26 @@ use html_escape::decode_html_entities;
|
||||
use lazy_static::lazy_static;
|
||||
use lightningcss::{properties::Property, values::image::Image};
|
||||
use regex::Regex;
|
||||
use reqwest::IntoUrl;
|
||||
use tracing::instrument;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
downloaders::{html::download_html, image::download_image},
|
||||
parsers::html::{get_tag_style, query_selector_first_tag},
|
||||
use crate::parsers::{
|
||||
errors::ParseError,
|
||||
html::{get_tag_style, query_selector_first_tag},
|
||||
mikan::mikan_client::MikanClient,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MikanEpisodeMetaPosterBlob {
|
||||
pub origin_url: Url,
|
||||
pub data: Bytes,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MikanEpisodeMeta {
|
||||
pub homepage: Url,
|
||||
pub poster_data: Option<Bytes>,
|
||||
pub origin_poster_src: Option<Url>,
|
||||
pub poster_url: Option<Url>,
|
||||
pub official_title: String,
|
||||
}
|
||||
|
||||
@ -21,11 +30,14 @@ lazy_static! {
|
||||
static ref MIKAN_TITLE_SEASON: Regex = Regex::new("第.*季").unwrap();
|
||||
}
|
||||
|
||||
#[instrument(skip(client, url))]
|
||||
pub async fn parse_episode_meta_from_mikan_homepage(
|
||||
url: Url,
|
||||
) -> eyre::Result<Option<MikanEpisodeMeta>> {
|
||||
client: &MikanClient,
|
||||
url: impl IntoUrl,
|
||||
) -> eyre::Result<MikanEpisodeMeta> {
|
||||
let url = url.into_url()?;
|
||||
let url_host = url.origin().unicode_serialization();
|
||||
let content = download_html(url.as_str()).await?;
|
||||
let content = client.fetch_text(|f| f.get(url.clone())).await?;
|
||||
let dom = tl::parse(&content, tl::ParserOptions::default())?;
|
||||
let parser = dom.parser();
|
||||
let poster_node = query_selector_first_tag(&dom, r"div.bangumi-poster", parser);
|
||||
@ -61,12 +73,7 @@ pub async fn parse_episode_meta_from_mikan_homepage(
|
||||
p.set_query(None);
|
||||
p
|
||||
});
|
||||
let poster_data = if let Some(p) = origin_poster_src.as_ref() {
|
||||
download_image(p.clone()).await.ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let meta = official_title_node
|
||||
let official_title = official_title_node
|
||||
.map(|s| s.inner_text(parser))
|
||||
.and_then(|official_title| {
|
||||
let title = MIKAN_TITLE_SEASON
|
||||
@ -79,13 +86,13 @@ pub async fn parse_episode_meta_from_mikan_homepage(
|
||||
Some(title)
|
||||
}
|
||||
})
|
||||
.map(|title| MikanEpisodeMeta {
|
||||
homepage: url,
|
||||
poster_data,
|
||||
official_title: title,
|
||||
origin_poster_src,
|
||||
});
|
||||
Ok(meta)
|
||||
.ok_or_else(|| ParseError::MikanEpisodeMetaEmptyOfficialTitleError(url.to_string()))?;
|
||||
|
||||
Ok(MikanEpisodeMeta {
|
||||
homepage: url,
|
||||
poster_url: origin_poster_src,
|
||||
official_title,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -93,6 +100,7 @@ mod test {
|
||||
use url::Url;
|
||||
|
||||
use super::parse_episode_meta_from_mikan_homepage;
|
||||
use crate::parsers::mikan::mikan_client::MikanClient;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_mikan() {
|
||||
@ -101,22 +109,18 @@ mod test {
|
||||
"https://mikanani.me/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a";
|
||||
let url = Url::parse(url_str)?;
|
||||
|
||||
if let Some(ep_meta) = parse_episode_meta_from_mikan_homepage(url.clone()).await? {
|
||||
let client = MikanClient::new(0).await.expect("should get mikan client");
|
||||
|
||||
let ep_meta = parse_episode_meta_from_mikan_homepage(&client, url.clone()).await?;
|
||||
{
|
||||
assert_eq!(ep_meta.homepage, url);
|
||||
assert_eq!(ep_meta.official_title, "葬送的芙莉莲");
|
||||
assert_eq!(
|
||||
ep_meta.origin_poster_src,
|
||||
ep_meta.poster_url.clone(),
|
||||
Some(Url::parse(
|
||||
"https://mikanani.me/images/Bangumi/202309/5ce9fed1.jpg"
|
||||
)?)
|
||||
);
|
||||
let u8_data = ep_meta.poster_data.expect("should have poster data");
|
||||
assert!(
|
||||
u8_data.starts_with(&[255, 216, 255, 224]),
|
||||
"should start with valid jpeg data magic number"
|
||||
);
|
||||
} else {
|
||||
panic!("can not find mikan episode title")
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -3,8 +3,9 @@ use reqwest::IntoUrl;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
downloaders::{bytes::download_bytes, defs::BITTORRENT_MIME_TYPE},
|
||||
parsers::errors::ParseError,
|
||||
downloaders::defs::BITTORRENT_MIME_TYPE,
|
||||
models::prelude::ResourceCategory,
|
||||
parsers::{errors::ParseError, mikan::mikan_client::MikanClient},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
@ -12,11 +13,17 @@ pub struct MikanRssItem {
|
||||
pub title: String,
|
||||
pub homepage: Option<String>,
|
||||
pub url: String,
|
||||
pub content_length: Option<u64>,
|
||||
pub content_length: Option<i64>,
|
||||
pub mime: String,
|
||||
pub pub_date: Option<i64>,
|
||||
}
|
||||
|
||||
impl MikanRssItem {
|
||||
pub fn get_resource_category(&self) -> ResourceCategory {
|
||||
ResourceCategory::BitTorrent
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<rss::Item> for MikanRssItem {
|
||||
type Error = ParseError;
|
||||
|
||||
@ -50,9 +57,10 @@ impl TryFrom<rss::Item> for MikanRssItem {
|
||||
}
|
||||
|
||||
pub async fn parse_mikan_rss_items_from_rss_link(
|
||||
client: &MikanClient,
|
||||
url: impl IntoUrl,
|
||||
) -> eyre::Result<impl Iterator<Item = MikanRssItem>> {
|
||||
let bytes = download_bytes(url).await?;
|
||||
let bytes = client.fetch_bytes(|f| f.get(url)).await?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
|
||||
@ -62,14 +70,17 @@ pub async fn parse_mikan_rss_items_from_rss_link(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::parse_mikan_rss_items_from_rss_link;
|
||||
use crate::downloaders::defs::BITTORRENT_MIME_TYPE;
|
||||
use crate::{
|
||||
downloaders::defs::BITTORRENT_MIME_TYPE, parsers::mikan::mikan_client::MikanClient,
|
||||
};
|
||||
|
||||
#[tokio::test]
|
||||
pub async fn test_mikan_subscription_items_from_rss_url() {
|
||||
let url = "https://mikanani.me/RSS/Bangumi?bangumiId=3141&subgroupid=370";
|
||||
let items = parse_mikan_rss_items_from_rss_link(url)
|
||||
let client = MikanClient::new(0).await.expect("should get mikan client");
|
||||
let items = parse_mikan_rss_items_from_rss_link(&client, url)
|
||||
.await
|
||||
.expect("should get subscription items from rss url")
|
||||
.expect("should get subscription items from subscription url")
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let first_sub_item = items
|
||||
|
@ -1,5 +1,7 @@
|
||||
pub mod mikan_client;
|
||||
pub mod mikan_ep_parser;
|
||||
pub mod mikan_rss_parser;
|
||||
|
||||
pub use mikan_client::MikanClient;
|
||||
pub use mikan_ep_parser::{parse_episode_meta_from_mikan_homepage, MikanEpisodeMeta};
|
||||
pub use mikan_rss_parser::{parse_mikan_rss_items_from_rss_link, MikanRssItem};
|
||||
|
@ -3,5 +3,5 @@ pub mod errors;
|
||||
pub mod html;
|
||||
pub mod mikan;
|
||||
pub mod raw;
|
||||
pub mod title_parser;
|
||||
pub mod tmdb;
|
||||
pub mod torrent;
|
||||
|
@ -17,7 +17,8 @@ lazy_static! {
|
||||
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
|
||||
static ref SOURCE_L1_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|W[Ee][Bb][Rr][Ii][Pp]|Sentai|B[Dd][Rr][Ii][Pp]|UHD[Rr][Ii][Pp]|NETFLIX").unwrap();
|
||||
static ref SOURCE_L2_RE: Regex = Regex::new(r"AMZ|CR|W[Ee][Bb]|B[Dd]").unwrap();
|
||||
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap();
|
||||
static ref SUB_RE: Regex = Regex::new(r"[简繁日英字幕]|CH|BIG5|GB").unwrap();
|
||||
static ref SUB_RE_EXCLUDE: Regex = Regex::new(r"字幕[社组]").unwrap();
|
||||
static ref PREFIX_RE: Regex =
|
||||
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
|
||||
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
|
||||
@ -43,19 +44,19 @@ lazy_static! {
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct RawEpisodeMeta {
|
||||
name_en: Option<String>,
|
||||
name_en_no_season: Option<String>,
|
||||
name_jp: Option<String>,
|
||||
name_jp_no_season: Option<String>,
|
||||
name_zh: Option<String>,
|
||||
name_zh_no_season: Option<String>,
|
||||
season: i32,
|
||||
season_raw: Option<String>,
|
||||
episode_index: i32,
|
||||
sub: Option<String>,
|
||||
source: Option<String>,
|
||||
fansub: Option<String>,
|
||||
resolution: Option<String>,
|
||||
pub name_en: Option<String>,
|
||||
pub s_name_en: Option<String>,
|
||||
pub name_jp: Option<String>,
|
||||
pub s_name_jp: Option<String>,
|
||||
pub name_zh: Option<String>,
|
||||
pub s_name_zh: Option<String>,
|
||||
pub season: i32,
|
||||
pub season_raw: Option<String>,
|
||||
pub episode_index: i32,
|
||||
pub sub: Option<Vec<String>>,
|
||||
pub source: Option<String>,
|
||||
pub fansub: Option<String>,
|
||||
pub resolution: Option<String>,
|
||||
}
|
||||
|
||||
fn extract_fansub(raw_name: &str) -> Option<&str> {
|
||||
@ -122,7 +123,7 @@ fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>,
|
||||
return (title_body.to_string(), None, 1);
|
||||
}
|
||||
|
||||
let mut season = 1;
|
||||
let mut season = 1i32;
|
||||
let mut season_raw = None;
|
||||
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
|
||||
|
||||
@ -215,13 +216,17 @@ fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32>
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn clear_sub(sub: Option<String>) -> Option<String> {
|
||||
sub.map(|s| CLEAR_SUB_RE.replace_all(&s, "").to_string())
|
||||
fn clear_sub(sub: Option<Vec<String>>) -> Option<Vec<String>> {
|
||||
sub.map(|s| {
|
||||
s.into_iter()
|
||||
.map(|s| CLEAR_SUB_RE.replace_all(&s, "").to_string())
|
||||
.collect_vec()
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_tags_from_title_extra(
|
||||
title_extra: &str,
|
||||
) -> (Option<String>, Option<String>, Option<String>) {
|
||||
) -> (Option<Vec<String>>, Option<String>, Option<String>) {
|
||||
let replaced = TAGS_EXTRACT_SPLIT_RE.replace_all(title_extra, " ");
|
||||
let elements = replaced
|
||||
.split(' ')
|
||||
@ -229,12 +234,19 @@ fn extract_tags_from_title_extra(
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect_vec();
|
||||
|
||||
let mut sub = None;
|
||||
let mut sub: Option<Vec<String>> = None;
|
||||
let mut resolution = None;
|
||||
let mut source = None;
|
||||
for element in elements.iter() {
|
||||
if SUB_RE.is_match(element) {
|
||||
sub = Some(element.to_string())
|
||||
if SUB_RE.is_match(element) && !SUB_RE_EXCLUDE.is_match(element) {
|
||||
let el = element.to_string();
|
||||
sub = Some(match sub {
|
||||
Some(mut res) => {
|
||||
res.push(el);
|
||||
res
|
||||
}
|
||||
None => vec![el],
|
||||
})
|
||||
} else if RESOLUTION_RE.is_match(element) {
|
||||
resolution = Some(element.to_string())
|
||||
} else if SOURCE_L1_RE.is_match(element) {
|
||||
@ -286,17 +298,17 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> eyre::Result<RawEpisodeMeta>
|
||||
let title_body = title_body_pre_process(&title_body, fansub)?;
|
||||
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
|
||||
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
|
||||
let (name_en_no_season, name_zh_no_season, name_jp_no_season) =
|
||||
let (s_name_en, s_name_zh, s_name_jp) =
|
||||
extract_name_from_title_body_name_section(&name_without_season);
|
||||
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(1);
|
||||
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
|
||||
Ok(RawEpisodeMeta {
|
||||
name_en,
|
||||
name_en_no_season,
|
||||
s_name_en,
|
||||
name_jp,
|
||||
name_jp_no_season,
|
||||
s_name_jp,
|
||||
name_zh,
|
||||
name_zh_no_season,
|
||||
s_name_zh,
|
||||
season,
|
||||
season_raw,
|
||||
episode_index,
|
||||
@ -334,11 +346,11 @@ mod tests {
|
||||
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
|
||||
r#"{
|
||||
"name_zh": "我心里危险的东西",
|
||||
"name_zh_no_season": "我心里危险的东西",
|
||||
"s_name_zh": "我心里危险的东西",
|
||||
"season": 2,
|
||||
"season_raw": "第二季",
|
||||
"episode_index": 5,
|
||||
"sub": "简日双语",
|
||||
"sub": ["简日双语"],
|
||||
"source": null,
|
||||
"fansub": "新Sub",
|
||||
"resolution": "1080P"
|
||||
@ -352,13 +364,13 @@ mod tests {
|
||||
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
|
||||
r#"{
|
||||
"name_en": "Boku no Kokoro no Yabai Yatsu",
|
||||
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu",
|
||||
"s_name_en": "Boku no Kokoro no Yabai Yatsu",
|
||||
"name_zh": "我内心的糟糕念头",
|
||||
"name_zh_no_season": "我内心的糟糕念头",
|
||||
"s_name_zh": "我内心的糟糕念头",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 18,
|
||||
"sub": "简日双语",
|
||||
"sub": ["简日双语"],
|
||||
"source": null,
|
||||
"fansub": "喵萌奶茶屋",
|
||||
"resolution": "1080p"
|
||||
@ -372,13 +384,13 @@ mod tests {
|
||||
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Shin no Nakama 2nd",
|
||||
"name_en_no_season": "Shin no Nakama",
|
||||
"s_name_en": "Shin no Nakama",
|
||||
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
|
||||
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
|
||||
"s_name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
|
||||
"season": 2,
|
||||
"season_raw": "2nd",
|
||||
"episode_index": 8,
|
||||
"sub": "简繁内封字幕",
|
||||
"sub": ["简繁内封字幕"],
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
@ -392,10 +404,10 @@ mod tests {
|
||||
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
|
||||
r#"{
|
||||
"name_en": "THE MARGINAL SERVICE",
|
||||
"name_en_no_season": "THE MARGINAL SERVICE",
|
||||
"s_name_en": "THE MARGINAL SERVICE",
|
||||
"season": 1,
|
||||
"episode_index": 8,
|
||||
"sub": "简繁内封字幕",
|
||||
"sub": ["简繁内封字幕"],
|
||||
"source": "WebRip",
|
||||
"fansub": "动漫国字幕组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
@ -409,13 +421,13 @@ mod tests {
|
||||
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Nozomanu Fushi no Boukensha",
|
||||
"name_en_no_season": "Nozomanu Fushi no Boukensha",
|
||||
"s_name_en": "Nozomanu Fushi no Boukensha",
|
||||
"name_zh": "事与愿违的不死冒险者",
|
||||
"name_zh_no_season": "事与愿违的不死冒险者",
|
||||
"s_name_zh": "事与愿违的不死冒险者",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"sub": "简繁内封字幕",
|
||||
"sub": ["简繁内封字幕"],
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
@ -431,13 +443,13 @@ mod tests {
|
||||
"name_en": "Pon no Michi",
|
||||
"name_jp": "ぽんのみち",
|
||||
"name_zh": "碰之道",
|
||||
"name_en_no_season": "Pon no Michi",
|
||||
"name_jp_no_season": "ぽんのみち",
|
||||
"name_zh_no_season": "碰之道",
|
||||
"s_name_en": "Pon no Michi",
|
||||
"s_name_jp": "ぽんのみち",
|
||||
"s_name_zh": "碰之道",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 7,
|
||||
"sub": "简繁日内封字幕",
|
||||
"sub": ["简繁日内封字幕"],
|
||||
"source": "WebRip",
|
||||
"fansub": "喵萌奶茶屋&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
@ -451,13 +463,13 @@ mod tests {
|
||||
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
r#"{
|
||||
"name_en": "Yowai Character Tomozakikun",
|
||||
"name_en_no_season": "Yowai Character Tomozakikun",
|
||||
"s_name_en": "Yowai Character Tomozakikun",
|
||||
"name_zh": "弱角友崎同学 2nd STAGE",
|
||||
"name_zh_no_season": "弱角友崎同学",
|
||||
"s_name_zh": "弱角友崎同学",
|
||||
"season": 2,
|
||||
"season_raw": "2nd",
|
||||
"episode_index": 9,
|
||||
"sub": "CHT",
|
||||
"sub": ["CHT"],
|
||||
"source": "Baha",
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
@ -471,13 +483,13 @@ mod tests {
|
||||
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Kingdom S5",
|
||||
"name_en_no_season": "Kingdom",
|
||||
"s_name_en": "Kingdom",
|
||||
"name_zh": "王者天下 第五季",
|
||||
"name_zh_no_season": "王者天下",
|
||||
"s_name_zh": "王者天下",
|
||||
"season": 5,
|
||||
"season_raw": "第五季",
|
||||
"episode_index": 7,
|
||||
"sub": "简繁外挂字幕",
|
||||
"sub": ["简繁外挂字幕"],
|
||||
"source": "WebRip",
|
||||
"fansub": "豌豆字幕组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
@ -491,12 +503,12 @@ mod tests {
|
||||
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
|
||||
r#"{
|
||||
"name_en": "Alice to Therese no Maboroshi Koujou",
|
||||
"name_en_no_season": "Alice to Therese no Maboroshi Koujou",
|
||||
"s_name_en": "Alice to Therese no Maboroshi Koujou",
|
||||
"name_zh": "爱丽丝与特蕾丝的虚幻工厂",
|
||||
"name_zh_no_season": "爱丽丝与特蕾丝的虚幻工厂",
|
||||
"s_name_zh": "爱丽丝与特蕾丝的虚幻工厂",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"sub": "简繁内封",
|
||||
"sub": ["简繁内封"],
|
||||
"source": "WebRip",
|
||||
"fansub": "千夏字幕组",
|
||||
"resolution": "1080p"
|
||||
@ -510,12 +522,12 @@ mod tests {
|
||||
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
|
||||
r#"{
|
||||
"name_en": "Yuru Camp Movie",
|
||||
"name_en_no_season": "Yuru Camp Movie",
|
||||
"s_name_en": "Yuru Camp Movie",
|
||||
"name_zh": "电影 轻旅轻营 (摇曳露营)",
|
||||
"name_zh_no_season": "电影 轻旅轻营 (摇曳露营)",
|
||||
"s_name_zh": "电影 轻旅轻营 (摇曳露营)",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"sub": "繁体",
|
||||
"sub": ["繁体"],
|
||||
"source": "UHDRip",
|
||||
"fansub": "千夏字幕组&喵萌奶茶屋",
|
||||
"resolution": "2160p"
|
||||
@ -529,12 +541,12 @@ mod tests {
|
||||
r#"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"#,
|
||||
r#"{
|
||||
"name_en": "New Doraemon",
|
||||
"name_en_no_season": "New Doraemon",
|
||||
"s_name_en": "New Doraemon",
|
||||
"name_zh": "哆啦A梦新番",
|
||||
"name_zh_no_season": "哆啦A梦新番",
|
||||
"s_name_zh": "哆啦A梦新番",
|
||||
"season": 1,
|
||||
"episode_index": 747,
|
||||
"sub": "GB",
|
||||
"sub": ["GB"],
|
||||
"fansub": "梦蓝字幕组",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
@ -547,12 +559,12 @@ mod tests {
|
||||
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
|
||||
r#"{
|
||||
"name_en": "Yuru Camp",
|
||||
"name_en_no_season": "Yuru Camp",
|
||||
"s_name_en": "Yuru Camp",
|
||||
"name_zh": "剧场版-摇曳露营",
|
||||
"name_zh_no_season": "剧场版-摇曳露营",
|
||||
"s_name_zh": "剧场版-摇曳露营",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"sub": "简日双语",
|
||||
"sub": ["简日双语"],
|
||||
"fansub": "MCE汉化组",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
@ -565,12 +577,12 @@ mod tests {
|
||||
r#"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"#,
|
||||
r#"{
|
||||
"name_en": "NieR Automata Ver1.1a",
|
||||
"name_en_no_season": "NieR Automata Ver1.1a",
|
||||
"s_name_en": "NieR Automata Ver1.1a",
|
||||
"name_zh": "尼尔:机械纪元",
|
||||
"name_zh_no_season": "尼尔:机械纪元",
|
||||
"s_name_zh": "尼尔:机械纪元",
|
||||
"season": 1,
|
||||
"episode_index": 2,
|
||||
"sub": "简日双语",
|
||||
"sub": ["简日双语"],
|
||||
"fansub": "织梦字幕组",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
@ -584,12 +596,12 @@ mod tests {
|
||||
r#"
|
||||
{
|
||||
"name_en": "Delicious in Dungeon",
|
||||
"name_en_no_season": "Delicious in Dungeon",
|
||||
"s_name_en": "Delicious in Dungeon",
|
||||
"name_zh": "迷宫饭",
|
||||
"name_zh_no_season": "迷宫饭",
|
||||
"s_name_zh": "迷宫饭",
|
||||
"season": 1,
|
||||
"episode_index": 3,
|
||||
"sub": "日语中字",
|
||||
"sub": ["日语中字"],
|
||||
"source": "NETFLIX",
|
||||
"fansub": "天月搬运组",
|
||||
"resolution": "1080P"
|
||||
@ -604,12 +616,12 @@ mod tests {
|
||||
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
|
||||
r#"{
|
||||
"name_en": "Dungeon Meshi",
|
||||
"name_en_no_season": "Dungeon Meshi",
|
||||
"s_name_en": "Dungeon Meshi",
|
||||
"name_zh": "迷宫饭",
|
||||
"name_zh_no_season": "迷宫饭",
|
||||
"s_name_zh": "迷宫饭",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"sub": "简日双语",
|
||||
"sub": ["简日双语"],
|
||||
"fansub": "爱恋字幕社",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
@ -622,12 +634,12 @@ mod tests {
|
||||
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
r#"{
|
||||
"name_en": "Mahou Shoujo ni Akogarete",
|
||||
"name_en_no_season": "Mahou Shoujo ni Akogarete",
|
||||
"s_name_en": "Mahou Shoujo ni Akogarete",
|
||||
"name_zh": "梦想成为魔法少女 [年龄限制版]",
|
||||
"name_zh_no_season": "梦想成为魔法少女 [年龄限制版]",
|
||||
"s_name_zh": "梦想成为魔法少女 [年龄限制版]",
|
||||
"season": 1,
|
||||
"episode_index": 9,
|
||||
"sub": "CHT",
|
||||
"sub": ["CHT"],
|
||||
"source": "Baha",
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
@ -641,11 +653,11 @@ mod tests {
|
||||
r#"[ANi] 16bit 的感动 ANOTHER LAYER - 01 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
r#"{
|
||||
"name_zh": "16bit 的感动 ANOTHER LAYER",
|
||||
"name_zh_no_season": "16bit 的感动 ANOTHER LAYER",
|
||||
"s_name_zh": "16bit 的感动 ANOTHER LAYER",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"sub": "CHT",
|
||||
"sub": ["CHT"],
|
||||
"source": "Baha",
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
@ -659,12 +671,12 @@ mod tests {
|
||||
r#"【喵萌奶茶屋】★07月新番★[银砂糖师与黑妖精 ~ Sugar Apple Fairy Tale ~][13][1080p][简日双语][招募翻译]"#,
|
||||
r#"{
|
||||
"name_en": "~ Sugar Apple Fairy Tale ~",
|
||||
"name_en_no_season": "~ Sugar Apple Fairy Tale ~",
|
||||
"s_name_en": "~ Sugar Apple Fairy Tale ~",
|
||||
"name_zh": "银砂糖师与黑妖精",
|
||||
"name_zh_no_season": "银砂糖师与黑妖精",
|
||||
"s_name_zh": "银砂糖师与黑妖精",
|
||||
"season": 1,
|
||||
"episode_index": 13,
|
||||
"sub": "简日双语",
|
||||
"sub": ["简日双语"],
|
||||
"fansub": "喵萌奶茶屋",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
@ -677,12 +689,12 @@ mod tests {
|
||||
r#"【极影字幕社】★4月新番 天国大魔境 Tengoku Daimakyou 第05话 GB 720P MP4(字幕社招人内详)"#,
|
||||
r#"{
|
||||
"name_en": "Tengoku Daimakyou",
|
||||
"name_en_no_season": "Tengoku Daimakyou",
|
||||
"s_name_en": "Tengoku Daimakyou",
|
||||
"name_zh": "天国大魔境",
|
||||
"name_zh_no_season": "天国大魔境",
|
||||
"s_name_zh": "天国大魔境",
|
||||
"season": 1,
|
||||
"episode_index": 5,
|
||||
"sub": "字幕社招人内详",
|
||||
"sub": ["GB"],
|
||||
"source": null,
|
||||
"fansub": "极影字幕社",
|
||||
"resolution": "720P"
|
||||
@ -696,9 +708,9 @@ mod tests {
|
||||
r#"[MagicStar] 假面骑士Geats / 仮面ライダーギーツ EP33 [WEBDL] [1080p] [TTFC]【生】"#,
|
||||
r#"{
|
||||
"name_jp": "仮面ライダーギーツ",
|
||||
"name_jp_no_season": "仮面ライダーギーツ",
|
||||
"s_name_jp": "仮面ライダーギーツ",
|
||||
"name_zh": "假面骑士Geats",
|
||||
"name_zh_no_season": "假面骑士Geats",
|
||||
"s_name_zh": "假面骑士Geats",
|
||||
"season": 1,
|
||||
"episode_index": 33,
|
||||
"source": "WEBDL",
|
||||
@ -714,12 +726,12 @@ mod tests {
|
||||
r#"[百冬练习组&LoliHouse] BanG Dream! 少女乐团派对!☆PICO FEVER! / Garupa Pico: Fever! - 26 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕][END] [101.69 MB]"#,
|
||||
r#"{
|
||||
"name_en": "Garupa Pico: Fever!",
|
||||
"name_en_no_season": "Garupa Pico: Fever!",
|
||||
"s_name_en": "Garupa Pico: Fever!",
|
||||
"name_zh": "BanG Dream! 少女乐团派对!☆PICO FEVER!",
|
||||
"name_zh_no_season": "BanG Dream! 少女乐团派对!☆PICO FEVER!",
|
||||
"s_name_zh": "BanG Dream! 少女乐团派对!☆PICO FEVER!",
|
||||
"season": 1,
|
||||
"episode_index": 26,
|
||||
"sub": "简繁内封字幕",
|
||||
"sub": ["简繁内封字幕"],
|
||||
"source": "WebRip",
|
||||
"fansub": "百冬练习组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
@ -734,11 +746,11 @@ mod tests {
|
||||
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
|
||||
r#"{
|
||||
"name_zh": "摇曳露营△剧场版",
|
||||
"name_zh_no_season": "摇曳露营△剧场版",
|
||||
"s_name_zh": "摇曳露营△剧场版",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"sub": "简繁字幕",
|
||||
"sub": ["简繁字幕"],
|
||||
"source": "BDrip",
|
||||
"fansub": "7³ACG x 桜都字幕组",
|
||||
"resolution": "1080p"
|
||||
@ -749,13 +761,13 @@ mod tests {
|
||||
r#"【幻樱字幕组】【4月新番】【古见同学有交流障碍症 第二季 Komi-san wa, Komyushou Desu. S02】【22】【GB_MP4】【1920X1080】"#,
|
||||
r#"{
|
||||
"name_en": "第二季 Komi-san wa, Komyushou Desu. S02",
|
||||
"name_en_no_season": "Komi-san wa, Komyushou Desu.",
|
||||
"s_name_en": "Komi-san wa, Komyushou Desu.",
|
||||
"name_zh": "古见同学有交流障碍症",
|
||||
"name_zh_no_season": "古见同学有交流障碍症",
|
||||
"s_name_zh": "古见同学有交流障碍症",
|
||||
"season": 2,
|
||||
"season_raw": "第二季",
|
||||
"episode_index": 22,
|
||||
"sub": "GB",
|
||||
"sub": ["GB"],
|
||||
"fansub": "幻樱字幕组",
|
||||
"resolution": "1920X1080"
|
||||
}"#,
|
||||
|
4
crates/recorder/src/parsers/tmdb/mod.rs
Normal file
4
crates/recorder/src/parsers/tmdb/mod.rs
Normal file
@ -0,0 +1,4 @@
|
||||
pub mod tmdb_bgm_parser;
|
||||
pub mod tmdb_client;
|
||||
pub mod tmdb_dtos;
|
||||
pub mod tmdb_list_parser;
|
209
crates/recorder/src/parsers/tmdb/tmdb_bgm_parser.rs
Normal file
209
crates/recorder/src/parsers/tmdb/tmdb_bgm_parser.rs
Normal file
@ -0,0 +1,209 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::tmdb_client::TMDB_API_ORIGIN;
|
||||
use crate::{
|
||||
i18n::LanguagePreset,
|
||||
models::bangumi::BangumiDistribution,
|
||||
parsers::tmdb::{
|
||||
tmdb_client::TmdbApiClient,
|
||||
tmdb_dtos::{
|
||||
TmdbMediaDetailDto, TmdbMovieDetailDto, TmdbSearchMultiItemDto, TmdbSearchMultiPageDto,
|
||||
TmdbTvSeriesDetailDto,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
impl BangumiDistribution {
|
||||
pub fn prefer_tmdb_media_type(&self) -> &str {
|
||||
match self {
|
||||
BangumiDistribution::Movie => "movie",
|
||||
BangumiDistribution::Tv => "tv",
|
||||
_ => "tv",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_tmdb_media_type(media_type: &str) -> Self {
|
||||
match media_type {
|
||||
"movie" => BangumiDistribution::Movie,
|
||||
_ => BangumiDistribution::Tv,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const TMDB_ANIMATION_GENRE_ID: i64 = 16;
|
||||
|
||||
#[inline]
|
||||
fn build_tmdb_search_api_url(query: &str, lang: &LanguagePreset, page: u32) -> String {
|
||||
format!(
|
||||
"{endpoint}/3/search/multi?language={lang_tag}&query={query}&page={page}&\
|
||||
include_adult=true",
|
||||
endpoint = TMDB_API_ORIGIN,
|
||||
lang_tag = lang.name_str(),
|
||||
query = query,
|
||||
page = page
|
||||
)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn build_tmdb_info_api_url(
|
||||
id: i64,
|
||||
lang: &LanguagePreset,
|
||||
distribution: &BangumiDistribution,
|
||||
) -> String {
|
||||
let tmdb_media_type = match distribution {
|
||||
BangumiDistribution::Movie => "movie",
|
||||
BangumiDistribution::Tv => "tv",
|
||||
_ => "tv",
|
||||
};
|
||||
format!(
|
||||
"{endpoint}/3/{tmdb_media_type}/{id}?language={lang_tag}",
|
||||
endpoint = TMDB_API_ORIGIN,
|
||||
tmdb_media_type = tmdb_media_type,
|
||||
id = id,
|
||||
lang_tag = lang.name_str()
|
||||
)
|
||||
}
|
||||
|
||||
fn tmdb_genres_is_match_animation(genre_ids: &[i64]) -> bool {
|
||||
genre_ids.contains(&TMDB_ANIMATION_GENRE_ID)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct TmdbBangumiItem {
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub origin_name: String,
|
||||
pub last_season: i32,
|
||||
pub year: Option<String>,
|
||||
pub poster_link: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn search_tmdb_items_from_title_and_lang(
|
||||
tmdb_client: &TmdbApiClient,
|
||||
title: &str,
|
||||
lang: &LanguagePreset,
|
||||
) -> eyre::Result<Vec<TmdbSearchMultiItemDto>> {
|
||||
let mut items = vec![];
|
||||
let page_num = {
|
||||
let search_url = build_tmdb_search_api_url(title, lang, 1);
|
||||
let first_page: TmdbSearchMultiPageDto = tmdb_client
|
||||
.fetch_json(|fetch| fetch.get(search_url))
|
||||
.await?;
|
||||
items.extend(first_page.results);
|
||||
first_page.total_pages
|
||||
};
|
||||
for i in 2..=page_num {
|
||||
let search_url = build_tmdb_search_api_url(title, lang, i);
|
||||
let page: TmdbSearchMultiPageDto = tmdb_client
|
||||
.fetch_json(|fetch| fetch.get(search_url))
|
||||
.await?;
|
||||
items.extend(page.results);
|
||||
}
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
pub async fn get_tmdb_info_from_id_lang_and_distribution(
|
||||
tmdb_client: &TmdbApiClient,
|
||||
id: i64,
|
||||
lang: &LanguagePreset,
|
||||
distribution: &BangumiDistribution,
|
||||
) -> eyre::Result<TmdbMediaDetailDto> {
|
||||
let info_url = build_tmdb_info_api_url(id, lang, distribution);
|
||||
let info = if distribution == &BangumiDistribution::Movie {
|
||||
let info: Box<TmdbMovieDetailDto> =
|
||||
tmdb_client.fetch_json(|fetch| fetch.get(info_url)).await?;
|
||||
TmdbMediaDetailDto::Movie(info)
|
||||
} else {
|
||||
let info: Box<TmdbTvSeriesDetailDto> =
|
||||
tmdb_client.fetch_json(|fetch| fetch.get(info_url)).await?;
|
||||
TmdbMediaDetailDto::Tv(info)
|
||||
};
|
||||
Ok(info)
|
||||
}
|
||||
|
||||
pub async fn parse_tmdb_bangumi_from_title_and_lang(
|
||||
tmdb_client: &TmdbApiClient,
|
||||
title: &str,
|
||||
lang: &LanguagePreset,
|
||||
distribution: &BangumiDistribution,
|
||||
) -> eyre::Result<Option<TmdbBangumiItem>> {
|
||||
let mut search_result = search_tmdb_items_from_title_and_lang(tmdb_client, title, lang).await?;
|
||||
if search_result.is_empty() {
|
||||
search_result =
|
||||
search_tmdb_items_from_title_and_lang(tmdb_client, &title.replace(' ', ""), lang)
|
||||
.await?;
|
||||
}
|
||||
if search_result.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
let mut target_and_priority: Option<(&TmdbSearchMultiItemDto, u32)> = None;
|
||||
for item in search_result.iter() {
|
||||
let is_animation = tmdb_genres_is_match_animation(&item.genre_ids);
|
||||
let is_prefer_media_type = item.media_type == distribution.prefer_tmdb_media_type();
|
||||
let priority =
|
||||
(if is_prefer_media_type { 10 } else { 0 }) + (if is_animation { 1 } else { 0 });
|
||||
if let Some((last_target, last_priority)) = target_and_priority.as_mut() {
|
||||
if priority > *last_priority {
|
||||
*last_target = item;
|
||||
}
|
||||
} else {
|
||||
target_and_priority = Some((item, priority));
|
||||
}
|
||||
}
|
||||
if let Some((target, _)) = target_and_priority {
|
||||
let info = get_tmdb_info_from_id_lang_and_distribution(
|
||||
tmdb_client,
|
||||
target.id,
|
||||
lang,
|
||||
&BangumiDistribution::from_tmdb_media_type(&target.media_type),
|
||||
)
|
||||
.await?;
|
||||
match info {
|
||||
TmdbMediaDetailDto::Movie(info) => Ok(Some(TmdbBangumiItem {
|
||||
id: info.id,
|
||||
name: info.name,
|
||||
origin_name: info.original_name,
|
||||
last_season: 1,
|
||||
year: Some(info.release_date),
|
||||
poster_link: info.poster_path,
|
||||
})),
|
||||
TmdbMediaDetailDto::Tv(info) => Ok(Some(TmdbBangumiItem {
|
||||
id: info.id,
|
||||
name: info.name,
|
||||
origin_name: info.original_name,
|
||||
last_season: info.number_of_seasons,
|
||||
year: info.first_air_date,
|
||||
poster_link: info.poster_path,
|
||||
})),
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::parsers::tmdb::{
|
||||
tmdb_bgm_parser::parse_tmdb_bangumi_from_title_and_lang,
|
||||
tmdb_client::tests::prepare_tmdb_api_client,
|
||||
};
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_tmdb_bangumi_from_title_and_lang() {
|
||||
let client = prepare_tmdb_api_client().await;
|
||||
let result = parse_tmdb_bangumi_from_title_and_lang(
|
||||
client.as_ref(),
|
||||
"青春猪头",
|
||||
&crate::i18n::LanguagePreset::parse("zh-CN").expect("failed to create language preset"),
|
||||
&crate::models::bangumi::BangumiDistribution::Tv,
|
||||
)
|
||||
.await
|
||||
.expect("failed to parse tmdb bangumi from title and lang");
|
||||
|
||||
assert_eq!(
|
||||
result.as_ref().map_or("", |item| &item.name),
|
||||
"青春猪头少年不会梦到兔女郎学姐"
|
||||
);
|
||||
}
|
||||
}
|
82
crates/recorder/src/parsers/tmdb/tmdb_client.rs
Normal file
82
crates/recorder/src/parsers/tmdb/tmdb_client.rs
Normal file
@ -0,0 +1,82 @@
|
||||
use std::{
|
||||
ops::Deref,
|
||||
sync::{Arc, Weak},
|
||||
};
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use reqwest::header::{HeaderMap, HeaderValue, ACCEPT, AUTHORIZATION};
|
||||
use tokio::sync::RwLock;
|
||||
use weak_table::WeakValueHashMap;
|
||||
|
||||
use crate::downloaders::ApiClient;
|
||||
|
||||
pub(crate) const TMDB_API_ORIGIN: &str = "https://api.themoviedb.org";
|
||||
|
||||
pub struct TmdbApiClient {
|
||||
api_token: String,
|
||||
api_client: ApiClient,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref TMDB_API_CLIENT_MAP: Arc<RwLock<WeakValueHashMap<String, Weak<TmdbApiClient>>>> =
|
||||
Arc::new(RwLock::new(WeakValueHashMap::new()));
|
||||
}
|
||||
|
||||
impl TmdbApiClient {
|
||||
pub async fn new<S: AsRef<str>>(api_token: S) -> eyre::Result<Arc<Self>> {
|
||||
let api_token = api_token.as_ref();
|
||||
{
|
||||
let map_read = TMDB_API_CLIENT_MAP.read().await;
|
||||
if let Some(client) = map_read.get(api_token) {
|
||||
return Ok(client.clone());
|
||||
}
|
||||
}
|
||||
let client = Arc::new(TmdbApiClient {
|
||||
api_token: api_token.to_string(),
|
||||
api_client: ApiClient::new(
|
||||
std::time::Duration::from_millis(50),
|
||||
Some({
|
||||
let mut header_map = HeaderMap::new();
|
||||
header_map.insert(ACCEPT, HeaderValue::from_static("application/json"));
|
||||
header_map.insert(
|
||||
AUTHORIZATION,
|
||||
HeaderValue::from_str(&format!("Bearer {api_token}"))?,
|
||||
);
|
||||
header_map
|
||||
}),
|
||||
)?,
|
||||
});
|
||||
{
|
||||
let mut map_write = TMDB_API_CLIENT_MAP.write().await;
|
||||
map_write.insert(api_token.to_string(), client.clone());
|
||||
}
|
||||
Ok(client)
|
||||
}
|
||||
|
||||
pub fn get_api_token(&self) -> &str {
|
||||
&self.api_token
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for TmdbApiClient {
|
||||
type Target = ApiClient;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.api_client
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::{env, sync::Arc};
|
||||
|
||||
use crate::{parsers::tmdb::tmdb_client::TmdbApiClient, utils::test::load_test_env_panic};
|
||||
|
||||
pub async fn prepare_tmdb_api_client() -> Arc<TmdbApiClient> {
|
||||
load_test_env_panic();
|
||||
let tmdb_api_token = env::var("TMDB_API_TOKEN").expect("TMDB_API_TOKEN is not set");
|
||||
TmdbApiClient::new(tmdb_api_token)
|
||||
.await
|
||||
.expect("failed to create tmdb api client")
|
||||
}
|
||||
}
|
170
crates/recorder/src/parsers/tmdb/tmdb_dtos.rs
Normal file
170
crates/recorder/src/parsers/tmdb/tmdb_dtos.rs
Normal file
@ -0,0 +1,170 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TmdbListItemDto {
|
||||
pub id: i64,
|
||||
#[serde(alias = "title")]
|
||||
pub name: String,
|
||||
#[serde(alias = "original_title")]
|
||||
pub original_name: String,
|
||||
pub original_language: String,
|
||||
pub adult: bool,
|
||||
pub poster_path: Option<String>,
|
||||
pub backdrop_path: Option<String>,
|
||||
pub media_type: String,
|
||||
pub overview: String,
|
||||
pub genre_ids: Vec<i64>,
|
||||
pub popularity: f32,
|
||||
pub first_air_date: String,
|
||||
pub origin_country: Option<Vec<String>>,
|
||||
pub vote_average: f32,
|
||||
pub vote_count: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TmdbListPageDto {
|
||||
pub id: i64,
|
||||
pub page: u32,
|
||||
pub sort_by: Option<String>,
|
||||
pub total_pages: u32,
|
||||
pub total_results: u32,
|
||||
pub name: String,
|
||||
pub results: Vec<TmdbListItemDto>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TmdbGenresObjDto {
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TmdbEpisodeAirDto {
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub overview: String,
|
||||
pub vote_average: f32,
|
||||
pub vote_count: i32,
|
||||
pub air_date: String,
|
||||
pub episode_number: i32,
|
||||
pub episode_type: String,
|
||||
pub production_code: String,
|
||||
pub runtime: Option<i32>,
|
||||
pub season_number: i32,
|
||||
pub show_id: i64,
|
||||
pub still_path: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TmdbSeasonDto {
|
||||
pub air_date: String,
|
||||
pub episode_count: i32,
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub overview: String,
|
||||
pub poster_path: Option<String>,
|
||||
pub season_number: i32,
|
||||
pub vote_average: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TmdbSpokenLanguageDto {
|
||||
pub iso_639_1: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TmdbTvSeriesDetailDto {
|
||||
pub adult: bool,
|
||||
pub id: i64,
|
||||
#[serde(alias = "title")]
|
||||
pub name: String,
|
||||
#[serde(alias = "original_title")]
|
||||
pub original_name: String,
|
||||
pub original_language: String,
|
||||
pub backdrop_path: Option<String>,
|
||||
pub episode_run_time: Option<Vec<i32>>,
|
||||
pub genres: Vec<TmdbGenresObjDto>,
|
||||
pub first_air_date: Option<String>,
|
||||
pub homepage: Option<String>,
|
||||
pub in_production: bool,
|
||||
pub languages: Vec<String>,
|
||||
pub last_air_date: Option<String>,
|
||||
pub last_episode_to_air: Option<TmdbEpisodeAirDto>,
|
||||
pub next_episode_to_air: Option<TmdbEpisodeAirDto>,
|
||||
pub number_of_episodes: i32,
|
||||
pub number_of_seasons: i32,
|
||||
pub origin_country: Option<Vec<String>>,
|
||||
pub overview: String,
|
||||
pub popularity: f32,
|
||||
pub poster_path: Option<String>,
|
||||
pub seasons: Vec<TmdbSeasonDto>,
|
||||
pub spoken_languages: Vec<TmdbSpokenLanguageDto>,
|
||||
pub status: String,
|
||||
pub tagline: String,
|
||||
pub vote_average: f32,
|
||||
pub vote_count: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TmdbMovieDetailDto {
|
||||
#[serde(alias = "title")]
|
||||
pub name: String,
|
||||
#[serde(alias = "original_title")]
|
||||
pub original_name: String,
|
||||
pub adult: bool,
|
||||
pub backdrop_path: Option<String>,
|
||||
pub homepage: Option<String>,
|
||||
pub id: i64,
|
||||
pub budget: i64,
|
||||
pub imdb_id: Option<String>,
|
||||
pub original_language: String,
|
||||
pub overview: String,
|
||||
pub popularity: f32,
|
||||
pub poster_path: Option<String>,
|
||||
pub release_date: String,
|
||||
pub revenue: i32,
|
||||
pub runtime: Option<i32>,
|
||||
pub spoken_languages: Vec<TmdbSpokenLanguageDto>,
|
||||
pub status: String,
|
||||
pub tagline: String,
|
||||
pub video: bool,
|
||||
pub vote_average: f32,
|
||||
pub vote_count: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TmdbSearchMultiItemDto {
|
||||
pub adult: bool,
|
||||
pub backdrop_path: Option<String>,
|
||||
pub id: i64,
|
||||
#[serde(alias = "title")]
|
||||
pub name: String,
|
||||
#[serde(alias = "original_title")]
|
||||
pub original_name: String,
|
||||
pub original_language: String,
|
||||
pub overview: String,
|
||||
pub poster_path: Option<String>,
|
||||
pub media_type: String,
|
||||
pub genre_ids: Vec<i64>,
|
||||
pub popularity: f32,
|
||||
pub first_air_date: Option<String>,
|
||||
pub vote_average: f32,
|
||||
pub vote_count: i32,
|
||||
pub origin_country: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "media_type", rename_all = "snake_case")]
|
||||
pub enum TmdbMediaDetailDto {
|
||||
Tv(Box<TmdbTvSeriesDetailDto>),
|
||||
Movie(Box<TmdbMovieDetailDto>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TmdbSearchMultiPageDto {
|
||||
pub total_results: u32,
|
||||
pub total_pages: u32,
|
||||
pub page: u32,
|
||||
pub results: Vec<TmdbSearchMultiItemDto>,
|
||||
}
|
66
crates/recorder/src/parsers/tmdb/tmdb_list_parser.rs
Normal file
66
crates/recorder/src/parsers/tmdb/tmdb_list_parser.rs
Normal file
@ -0,0 +1,66 @@
|
||||
use super::tmdb_client::TMDB_API_ORIGIN;
|
||||
use crate::{
|
||||
i18n::LanguagePreset,
|
||||
parsers::tmdb::{
|
||||
tmdb_client::TmdbApiClient,
|
||||
tmdb_dtos::{TmdbListItemDto, TmdbListPageDto},
|
||||
},
|
||||
};
|
||||
|
||||
#[inline]
|
||||
fn build_tmdb_list_api_url(list_id: i64, lang: &LanguagePreset, page: u32) -> String {
|
||||
format!(
|
||||
"{endpoint}/4/list/{list_id}?language={lang_tag}&page={page}",
|
||||
endpoint = TMDB_API_ORIGIN,
|
||||
list_id = list_id,
|
||||
lang_tag = lang.name_str(),
|
||||
page = page
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn parse_tmdb_list_items_from_list_api(
|
||||
tmdb_client: &TmdbApiClient,
|
||||
list_id: i64,
|
||||
lang: &LanguagePreset,
|
||||
) -> eyre::Result<Vec<TmdbListItemDto>> {
|
||||
let mut items: Vec<TmdbListItemDto> = vec![];
|
||||
|
||||
let page_num = {
|
||||
let first_page: TmdbListPageDto = tmdb_client
|
||||
.fetch_json(|fetch| fetch.get(build_tmdb_list_api_url(list_id, lang, 1)))
|
||||
.await?;
|
||||
|
||||
items.extend(first_page.results);
|
||||
|
||||
first_page.total_pages
|
||||
};
|
||||
|
||||
for i in 2..=page_num {
|
||||
let page: TmdbListPageDto = tmdb_client
|
||||
.fetch_json(|fetch| fetch.get(build_tmdb_list_api_url(list_id, lang, i)))
|
||||
.await?;
|
||||
|
||||
items.extend(page.results);
|
||||
}
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::super::tmdb_client::tests::prepare_tmdb_api_client;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_tmdb_list_items_from_list_api() {
|
||||
let client = prepare_tmdb_api_client().await;
|
||||
let items = super::parse_tmdb_list_items_from_list_api(
|
||||
client.as_ref(),
|
||||
8294054,
|
||||
&crate::i18n::LanguagePreset::parse("zh-CN").expect("failed to create language preset"),
|
||||
)
|
||||
.await
|
||||
.expect("failed to parse tmdb list items from list api");
|
||||
|
||||
assert!(items.iter().any(|item| item.name == "葬送的芙莉莲"));
|
||||
}
|
||||
}
|
@ -5,7 +5,7 @@ use quirks_path::Path;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::parsers::defs::SUBTITLE_LANG;
|
||||
use crate::i18n::LanguagePreset;
|
||||
|
||||
lazy_static! {
|
||||
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
|
||||
@ -52,7 +52,7 @@ fn get_fansub(group_and_title: &str) -> (Option<&str>, &str) {
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
match (n.get(0), n.get(1)) {
|
||||
match (n.first(), n.get(1)) {
|
||||
(None, None) => (None, ""),
|
||||
(Some(n0), None) => (None, *n0),
|
||||
(Some(n0), Some(n1)) => {
|
||||
@ -84,17 +84,10 @@ fn get_season_and_title(season_and_title: &str) -> (String, i32) {
|
||||
(title, season)
|
||||
}
|
||||
|
||||
fn get_subtitle_lang(media_name: &str) -> Option<&str> {
|
||||
let media_name_lower = media_name.to_lowercase();
|
||||
for (lang, lang_aliases) in SUBTITLE_LANG.iter() {
|
||||
if lang_aliases
|
||||
.iter()
|
||||
.any(|alias| media_name_lower.contains(alias))
|
||||
{
|
||||
return Some(lang);
|
||||
}
|
||||
}
|
||||
return None;
|
||||
fn get_subtitle_lang(subtitle_str: &str) -> Option<LanguagePreset> {
|
||||
let lowercase = subtitle_str.to_lowercase();
|
||||
let media_name_lower = lowercase.trim();
|
||||
LanguagePreset::parse(media_name_lower).ok()
|
||||
}
|
||||
|
||||
pub fn parse_episode_media_meta_from_torrent(
|
||||
@ -168,7 +161,7 @@ pub fn parse_episode_subtitle_meta_from_torrent(
|
||||
|
||||
Ok(TorrentEpisodeSubtitleMeta {
|
||||
media: media_meta,
|
||||
lang: lang.map(|s| s.to_string()),
|
||||
lang: lang.map(|s| s.name_str().to_string()),
|
||||
})
|
||||
}
|
||||
|
||||
@ -272,7 +265,7 @@ mod tests {
|
||||
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
|
||||
let found_raw =
|
||||
parse_episode_subtitle_meta_from_torrent(Path::new(raw_name), None, None);
|
||||
let found = found_raw.as_ref().ok().map(|s| s.clone());
|
||||
let found = found_raw.as_ref().ok().cloned();
|
||||
|
||||
if expected != found {
|
||||
if found_raw.is_ok() {
|
||||
@ -293,7 +286,7 @@ mod tests {
|
||||
} else {
|
||||
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
|
||||
let found_raw = parse_episode_media_meta_from_torrent(Path::new(raw_name), None, None);
|
||||
let found = found_raw.as_ref().ok().map(|s| s.clone());
|
||||
let found = found_raw.as_ref().ok().cloned();
|
||||
|
||||
if expected != found {
|
||||
if found_raw.is_ok() {
|
||||
|
@ -1 +1,4 @@
|
||||
pub mod torrent_path;
|
||||
pub mod url_utils;
|
||||
|
||||
pub use url_utils::{extract_extname_from_url, extract_filename_from_url};
|
||||
|
@ -3,7 +3,7 @@ use std::collections::HashSet;
|
||||
use quirks_path::{Path, PathBuf};
|
||||
|
||||
use crate::{
|
||||
downloaders::defs::Torrent,
|
||||
downloaders::torrent::Torrent,
|
||||
models::{bangumi, subscribers},
|
||||
parsers::defs::SEASON_REGEX,
|
||||
};
|
||||
@ -70,8 +70,8 @@ pub fn gen_bangumi_sub_path(data: &bangumi::Model) -> PathBuf {
|
||||
PathBuf::from(data.official_title.to_string()).join(format!("Season {}", data.season))
|
||||
}
|
||||
|
||||
pub fn rule_name(bgm: &bangumi::Model, conf: &subscribers::SubscriberBangumiConfig) -> String {
|
||||
if let (Some(true), Some(group_name)) = (conf.leading_group_tag, &bgm.fansub) {
|
||||
pub fn rule_name(bgm: &bangumi::Model, conf: &subscribers::SubscribeBangumiConfig) -> String {
|
||||
if let (true, Some(group_name)) = (conf.leading_fansub_tag, &bgm.fansub) {
|
||||
format!("[{}] {} S{}", group_name, bgm.official_title, bgm.season)
|
||||
} else {
|
||||
format!("{} S{}", bgm.official_title, bgm.season)
|
||||
|
19
crates/recorder/src/path/url_utils.rs
Normal file
19
crates/recorder/src/path/url_utils.rs
Normal file
@ -0,0 +1,19 @@
|
||||
use quirks_path::Path;
|
||||
use url::Url;
|
||||
|
||||
pub fn extract_filename_from_url(url: &Url) -> Option<&str> {
|
||||
url.path_segments().and_then(|s| s.last()).and_then(|last| {
|
||||
if last.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(last)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn extract_extname_from_url(url: &Url) -> Option<String> {
|
||||
let filename = extract_filename_from_url(url);
|
||||
filename
|
||||
.and_then(|f| Path::new(f).extension())
|
||||
.map(|ext| format!(".{}", ext))
|
||||
}
|
0
crates/recorder/src/search/mod.rs
Normal file
0
crates/recorder/src/search/mod.rs
Normal file
130
crates/recorder/src/storage/dal.rs
Normal file
130
crates/recorder/src/storage/dal.rs
Normal file
@ -0,0 +1,130 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use bytes::Bytes;
|
||||
use opendal::{layers::LoggingLayer, services, Operator};
|
||||
use quirks_path::{Path, PathBuf};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::config::AppDalConf;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DalContentType {
|
||||
Poster,
|
||||
}
|
||||
|
||||
impl AsRef<str> for DalContentType {
|
||||
fn as_ref(&self) -> &str {
|
||||
match self {
|
||||
Self::Poster => "poster",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DalContext {
|
||||
pub config: AppDalConf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum DalStoredUrl {
|
||||
RelativePath { path: String },
|
||||
Absolute { url: Url },
|
||||
}
|
||||
|
||||
impl DalStoredUrl {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match self {
|
||||
Self::RelativePath { path } => path.as_str(),
|
||||
Self::Absolute { url } => url.as_str(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for DalStoredUrl {
|
||||
fn as_ref(&self) -> &str {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for DalStoredUrl {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.as_str().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl DalContext {
|
||||
pub fn new(dal_conf: AppDalConf) -> Self {
|
||||
Self { config: dal_conf }
|
||||
}
|
||||
|
||||
pub async fn store_blob(
|
||||
&self,
|
||||
content_category: DalContentType,
|
||||
extname: &str,
|
||||
data: Bytes,
|
||||
subscriber_pid: &str,
|
||||
) -> eyre::Result<DalStoredUrl> {
|
||||
let basename = format!("{}{}", Uuid::new_v4(), extname);
|
||||
let mut dirname = [subscriber_pid, content_category.as_ref()]
|
||||
.into_iter()
|
||||
.map(Path::new)
|
||||
.collect::<PathBuf>();
|
||||
|
||||
let mut fs_builder = services::Fs::default();
|
||||
fs_builder.root(self.config.fs_root.as_str());
|
||||
|
||||
let fs_op = Operator::new(fs_builder)?
|
||||
.layer(LoggingLayer::default())
|
||||
.finish();
|
||||
|
||||
let dirpath = format!("{}/", dirname.as_str());
|
||||
fs_op.create_dir(&dirpath).await?;
|
||||
|
||||
let fullname = {
|
||||
dirname.push(basename);
|
||||
dirname
|
||||
};
|
||||
|
||||
fs_op.write_with(fullname.as_str(), data).await?;
|
||||
|
||||
Ok(DalStoredUrl::RelativePath {
|
||||
path: fullname.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use base64::Engine;
|
||||
|
||||
use crate::{
|
||||
config::AppDalConf, models::subscribers::ROOT_SUBSCRIBER_NAME, storage::DalContext,
|
||||
};
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_dal_context() {
|
||||
let dal_context = DalContext::new(AppDalConf {
|
||||
fs_root: "data/dal".to_string(),
|
||||
});
|
||||
|
||||
let a = dal_context
|
||||
.store_blob(
|
||||
crate::storage::DalContentType::Poster,
|
||||
".jpg",
|
||||
bytes::Bytes::from(
|
||||
base64::engine::general_purpose::STANDARD.decode("iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8BQDwAEhQGAhKmMIQAAAABJRU5ErkJggg==").expect("should decode as vec u8")
|
||||
),
|
||||
ROOT_SUBSCRIBER_NAME,
|
||||
)
|
||||
.await
|
||||
.expect("dal context should store blob");
|
||||
|
||||
assert!(
|
||||
matches!(a, crate::storage::DalStoredUrl::RelativePath { .. }),
|
||||
"dal context should store blob as relative path"
|
||||
);
|
||||
}
|
||||
}
|
40
crates/recorder/src/storage/dal_ext.rs
Normal file
40
crates/recorder/src/storage/dal_ext.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use eyre::Context;
|
||||
use loco_rs::app::AppContext;
|
||||
use tokio::sync::OnceCell;
|
||||
|
||||
use crate::{
|
||||
config::{deserialize_key_path_from_loco_rs_config, AppDalConf},
|
||||
storage::DalContext,
|
||||
};
|
||||
|
||||
static APP_DAL_CONTEXT: OnceCell<Arc<DalContext>> = OnceCell::const_new();
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait AppContextDalExt {
|
||||
async fn get_dal(&self) -> eyre::Result<Arc<DalContext>>;
|
||||
async fn get_dal_unwrap(&self) -> Arc<DalContext>;
|
||||
async fn init_dal(&self) -> eyre::Result<Arc<DalContext>> {
|
||||
self.get_dal().await.wrap_err("dal context failed to init")
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl AppContextDalExt for AppContext {
|
||||
async fn get_dal(&self) -> eyre::Result<Arc<DalContext>> {
|
||||
let context = APP_DAL_CONTEXT
|
||||
.get_or_try_init(|| async {
|
||||
deserialize_key_path_from_loco_rs_config::<AppDalConf>(&["dal"], &self.config)
|
||||
.map(|dal_conf| Arc::new(DalContext::new(dal_conf)))
|
||||
})
|
||||
.await?;
|
||||
Ok(context.clone())
|
||||
}
|
||||
|
||||
async fn get_dal_unwrap(&self) -> Arc<DalContext> {
|
||||
self.get_dal()
|
||||
.await
|
||||
.unwrap_or_else(|e| panic!("dal context failed to init: {}", e))
|
||||
}
|
||||
}
|
26
crates/recorder/src/storage/dal_initializer.rs
Normal file
26
crates/recorder/src/storage/dal_initializer.rs
Normal file
@ -0,0 +1,26 @@
|
||||
use axum::Router as AxumRouter;
|
||||
use loco_rs::app::{AppContext, Initializer};
|
||||
|
||||
use crate::storage::AppContextDalExt;
|
||||
|
||||
pub struct AppDalInitializer;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Initializer for AppDalInitializer {
|
||||
fn name(&self) -> String {
|
||||
"AppDalInitializer".to_string()
|
||||
}
|
||||
|
||||
async fn before_run(&self, ctx: &AppContext) -> loco_rs::Result<()> {
|
||||
ctx.init_dal().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn after_routes(
|
||||
&self,
|
||||
router: AxumRouter,
|
||||
_ctx: &AppContext,
|
||||
) -> loco_rs::Result<AxumRouter> {
|
||||
Ok(router)
|
||||
}
|
||||
}
|
7
crates/recorder/src/storage/mod.rs
Normal file
7
crates/recorder/src/storage/mod.rs
Normal file
@ -0,0 +1,7 @@
|
||||
pub mod dal;
|
||||
pub mod dal_ext;
|
||||
pub mod dal_initializer;
|
||||
|
||||
pub use dal::{DalContentType, DalContext, DalStoredUrl};
|
||||
pub use dal_ext::AppContextDalExt;
|
||||
pub use dal_initializer::AppDalInitializer;
|
8
crates/recorder/src/utils/cli.rs
Normal file
8
crates/recorder/src/utils/cli.rs
Normal file
@ -0,0 +1,8 @@
|
||||
pub fn hack_env_to_fit_workspace() -> std::io::Result<()> {
|
||||
if cfg!(test) || cfg!(debug_assertions) {
|
||||
let package_dir = env!("CARGO_MANIFEST_DIR");
|
||||
let package_dir = std::path::Path::new(package_dir);
|
||||
std::env::set_current_dir(package_dir)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
114
crates/recorder/src/utils/db.rs
Normal file
114
crates/recorder/src/utils/db.rs
Normal file
@ -0,0 +1,114 @@
|
||||
use sea_orm::{
|
||||
sea_query::{Expr, InsertStatement, IntoIden, Query, SimpleExpr},
|
||||
ActiveModelTrait, ActiveValue, ColumnTrait, ConnectionTrait, EntityName, EntityTrait,
|
||||
FromQueryResult, Iterable, SelectModel, SelectorRaw, TryGetable,
|
||||
};
|
||||
|
||||
use crate::migrations::{defs::GeneralIds, ColumnRef};
|
||||
|
||||
#[derive(FromQueryResult)]
|
||||
pub(crate) struct OnlyIdsModel<Id>
|
||||
where
|
||||
Id: TryGetable,
|
||||
{
|
||||
pub id: Id,
|
||||
}
|
||||
|
||||
pub(crate) async fn insert_many_with_returning_columns<M, D, V, T, F>(
|
||||
db: &D,
|
||||
insert_values: impl IntoIterator<Item = V>,
|
||||
returning_columns: impl IntoIterator<Item = T>,
|
||||
extra_config: F,
|
||||
) -> eyre::Result<Vec<M>>
|
||||
where
|
||||
D: ConnectionTrait,
|
||||
V: ActiveModelTrait,
|
||||
T: Into<SimpleExpr>,
|
||||
F: FnOnce(&mut InsertStatement),
|
||||
M: FromQueryResult,
|
||||
{
|
||||
let db_backend = db.get_database_backend();
|
||||
assert!(
|
||||
db_backend.support_returning(),
|
||||
"db backend must support returning!"
|
||||
);
|
||||
let ent = V::Entity::default();
|
||||
let mut insert = Query::insert();
|
||||
let insert_statement = insert
|
||||
.into_table(ent.table_ref())
|
||||
.returning(Query::returning().exprs(returning_columns));
|
||||
|
||||
{
|
||||
extra_config(insert_statement);
|
||||
}
|
||||
|
||||
for new_item in insert_values {
|
||||
let mut columns = vec![];
|
||||
let mut values = vec![];
|
||||
for c in <V::Entity as EntityTrait>::Column::iter() {
|
||||
let av = new_item.get(c);
|
||||
match av {
|
||||
ActiveValue::Set(value) => {
|
||||
values.push(c.save_as(Expr::val(value)));
|
||||
columns.push(c);
|
||||
}
|
||||
ActiveValue::Unchanged(value) => {
|
||||
values.push(c.save_as(Expr::val(value)));
|
||||
columns.push(c);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
insert_statement.columns(columns);
|
||||
insert_statement.values(values)?;
|
||||
}
|
||||
|
||||
let result = SelectorRaw::<SelectModel<M>>::from_statement(db_backend.build(insert_statement))
|
||||
.all(db)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub(crate) async fn insert_many_with_returning_all<D, V, F>(
|
||||
db: &D,
|
||||
insert_values: impl IntoIterator<Item = V>,
|
||||
extra_config: F,
|
||||
) -> eyre::Result<Vec<<V::Entity as EntityTrait>::Model>>
|
||||
where
|
||||
D: ConnectionTrait,
|
||||
V: ActiveModelTrait,
|
||||
F: FnOnce(&mut InsertStatement),
|
||||
{
|
||||
let result: Vec<<V::Entity as EntityTrait>::Model> = insert_many_with_returning_columns(
|
||||
db,
|
||||
insert_values,
|
||||
<V::Entity as EntityTrait>::Column::iter().map(|c| c.select_as(Expr::col(c))),
|
||||
extra_config,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub(crate) async fn insert_many_with_returning_id<D, V, F, I>(
|
||||
db: &D,
|
||||
insert_values: impl IntoIterator<Item = V>,
|
||||
extra_config: F,
|
||||
) -> eyre::Result<Vec<OnlyIdsModel<I>>>
|
||||
where
|
||||
D: ConnectionTrait,
|
||||
V: ActiveModelTrait,
|
||||
F: FnOnce(&mut InsertStatement),
|
||||
I: TryGetable,
|
||||
{
|
||||
let result: Vec<OnlyIdsModel<I>> = insert_many_with_returning_columns(
|
||||
db,
|
||||
insert_values,
|
||||
[Expr::col(ColumnRef::Column(GeneralIds::Id.into_iden()))],
|
||||
extra_config,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
5
crates/recorder/src/utils/mod.rs
Normal file
5
crates/recorder/src/utils/mod.rs
Normal file
@ -0,0 +1,5 @@
|
||||
pub mod cli;
|
||||
pub mod db;
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod test;
|
20
crates/recorder/src/utils/test.rs
Normal file
20
crates/recorder/src/utils/test.rs
Normal file
@ -0,0 +1,20 @@
|
||||
use std::path::Path;
|
||||
|
||||
pub fn load_test_env() -> Result<(), dotenv::Error> {
|
||||
let package_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
|
||||
let env_files = vec![
|
||||
package_dir.join("configs/test.local.env"),
|
||||
package_dir.join("configs/test.env"),
|
||||
];
|
||||
for env_file in env_files {
|
||||
if env_file.exists() {
|
||||
dotenv::from_path(env_file)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn load_test_env_panic() {
|
||||
load_test_env().expect("failed to load test env")
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::models::entities::subscribers;
|
||||
use crate::models::subscribers;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct CurrentResponse {
|
||||
|
42
crates/recorder/src/workers/collect.rs
Normal file
42
crates/recorder/src/workers/collect.rs
Normal file
@ -0,0 +1,42 @@
|
||||
use loco_rs::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::info;
|
||||
|
||||
use crate::models::bangumi;
|
||||
|
||||
pub struct CollectHistoryEpisodesWorker {
|
||||
pub ctx: AppContext,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub enum CollectHistoryEpisodesWorkerArgs {
|
||||
CollectFromBangumiEntity(),
|
||||
}
|
||||
|
||||
impl CollectHistoryEpisodesWorker {
|
||||
pub async fn collect_history_episodes(bangumi: &bangumi::Model, _only_season: bool) {
|
||||
info!(
|
||||
"Start collecting {} season {}...",
|
||||
bangumi.official_title, bangumi.season
|
||||
);
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl worker::AppWorker<CollectHistoryEpisodesWorkerArgs> for CollectHistoryEpisodesWorker {
|
||||
fn build(ctx: &AppContext) -> Self {
|
||||
Self { ctx: ctx.clone() }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl worker::Worker<CollectHistoryEpisodesWorkerArgs> for CollectHistoryEpisodesWorker {
|
||||
async fn perform(&self, _args: CollectHistoryEpisodesWorkerArgs) -> worker::Result<()> {
|
||||
println!("================================================");
|
||||
|
||||
// let db = &self.ctx.db;
|
||||
|
||||
println!("================================================");
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -1 +1,4 @@
|
||||
pub mod subscription_worker;
|
||||
pub mod collect;
|
||||
pub mod rename;
|
||||
pub mod subscription;
|
||||
pub mod torrent;
|
||||
|
0
crates/recorder/src/workers/rename.rs
Normal file
0
crates/recorder/src/workers/rename.rs
Normal file
@ -20,11 +20,11 @@ impl worker::AppWorker<SubscriptionWorkerArgs> for SubscriptionWorker {
|
||||
|
||||
#[async_trait]
|
||||
impl worker::Worker<SubscriptionWorkerArgs> for SubscriptionWorker {
|
||||
async fn perform(&self, args: SubscriptionWorkerArgs) -> worker::Result<()> {
|
||||
async fn perform(&self, _args: SubscriptionWorkerArgs) -> worker::Result<()> {
|
||||
println!("================================================");
|
||||
|
||||
let db = &self.ctx.db;
|
||||
let storage = &self.ctx.storage;
|
||||
// let db = &self.ctx.db;
|
||||
// let storage = &self.ctx.storage;
|
||||
|
||||
println!("================================================");
|
||||
Ok(())
|
0
crates/recorder/src/workers/torrent.rs
Normal file
0
crates/recorder/src/workers/torrent.rs
Normal file
@ -1 +1,2 @@
|
||||
mod subscribers;
|
||||
mod subscriptions;
|
||||
|
@ -1,7 +0,0 @@
|
||||
---
|
||||
source: tests/models/subscribers.rs
|
||||
expression: non_existing_subscriber_results
|
||||
---
|
||||
Err(
|
||||
EntityNotFound,
|
||||
)
|
@ -0,0 +1,13 @@
|
||||
---
|
||||
source: tests/models/subscription.rs
|
||||
expression: existing_subscription
|
||||
---
|
||||
Ok(
|
||||
Model {
|
||||
created_at: 2023-11-12T12:34:56.789,
|
||||
updated_at: 2023-11-12T12:34:56.789,
|
||||
id: 1,
|
||||
pid: "11111111-1111-1111-1111-111111111111",
|
||||
display_name: "user1"
|
||||
},
|
||||
)
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
source: crates/recorder/tests/models/subscriptions.rs
|
||||
assertion_line: 55
|
||||
expression: a
|
||||
---
|
||||
1
|
@ -7,7 +7,7 @@ macro_rules! configure_insta {
|
||||
($($expr:expr),*) => {
|
||||
let mut settings = insta::Settings::clone_current();
|
||||
settings.set_prepend_module_to_snapshot(false);
|
||||
settings.set_snapshot_suffix("users");
|
||||
settings.set_snapshot_suffix("subscribers");
|
||||
let _guard = settings.bind_to_scope();
|
||||
};
|
||||
}
|
||||
|
62
crates/recorder/tests/models/subscriptions.rs
Normal file
62
crates/recorder/tests/models/subscriptions.rs
Normal file
@ -0,0 +1,62 @@
|
||||
// use insta::assert_debug_snapshot;
|
||||
use loco_rs::{app::Hooks, testing};
|
||||
use recorder::{
|
||||
app::App,
|
||||
models::{
|
||||
subscribers::{self},
|
||||
subscriptions,
|
||||
},
|
||||
};
|
||||
use sea_orm::{ActiveModelTrait, TryIntoModel};
|
||||
use serial_test::serial;
|
||||
|
||||
macro_rules! configure_insta {
|
||||
($($expr:expr),*) => {
|
||||
let mut settings = insta::Settings::clone_current();
|
||||
settings.set_prepend_module_to_snapshot(false);
|
||||
settings.set_snapshot_suffix("subscriptions");
|
||||
let _guard = settings.bind_to_scope();
|
||||
};
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn can_pull_subscription() {
|
||||
configure_insta!();
|
||||
|
||||
let boot = testing::boot_test::<App>().await.unwrap();
|
||||
App::init_logger(&boot.app_context.config, &boot.app_context.environment).unwrap();
|
||||
testing::seed::<App>(&boot.app_context.db).await.unwrap();
|
||||
let db = &boot.app_context.db;
|
||||
|
||||
let create_rss = serde_json::from_str(
|
||||
r#"{
|
||||
"rss_link": "https://mikanani.me/RSS/Bangumi?bangumiId=3271&subgroupid=370",
|
||||
"display_name": "Mikan Project - 我心里危险的东西 第二季",
|
||||
"aggregate": false,
|
||||
"enabled": true,
|
||||
"category": "mikan"
|
||||
}"#,
|
||||
)
|
||||
.expect("should parse create rss dto from json");
|
||||
|
||||
let subscriber = subscribers::Model::find_by_pid(db, subscribers::ROOT_SUBSCRIBER_NAME)
|
||||
.await
|
||||
.expect("should find subscriber");
|
||||
|
||||
let subscription = subscriptions::ActiveModel::from_create_dto(create_rss, subscriber.id);
|
||||
|
||||
let subscription = subscription
|
||||
.save(&boot.app_context.db)
|
||||
.await
|
||||
.expect("should save subscription")
|
||||
.try_into_model()
|
||||
.expect("should convert to model");
|
||||
|
||||
subscription
|
||||
.pull_one(&boot.app_context, &subscriber)
|
||||
.await
|
||||
.expect("should pull subscription");
|
||||
|
||||
// assert_debug_snapshot!(a);
|
||||
}
|
Loading…
Reference in New Issue
Block a user