Compare commits

...

9 Commits

83 changed files with 5404 additions and 2151 deletions

View File

@ -40,6 +40,7 @@
}
],
"rust-analyzer.cargo.features": "all",
"rust-analyzer.testExplorer": true
// https://github.com/rust-lang/rust/issues/141540
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
// "rust-analyzer.check.extraEnv": {

1648
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -26,7 +26,6 @@ util-derive = { path = "./packages/util-derive" }
fetch = { path = "./packages/fetch" }
downloader = { path = "./packages/downloader" }
recorder = { path = "./apps/recorder" }
proxy = { path = "./apps/proxy" }
reqwest = { version = "0.12.20", features = [
"charset",
@ -62,11 +61,30 @@ regex = "1.11"
lazy_static = "1.5"
axum = { version = "0.8.3", features = ["macros"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
axum-extra = "0.10"
axum-extra = { version = "0.10", features = ["typed-header"] }
mockito = { version = "1.6.1" }
convert_case = "0.8"
color-eyre = "0.6.5"
inquire = "0.7.5"
image = "0.25.6"
uuid = { version = "1.6.0", features = ["v4"] }
maplit = "1.0.2"
once_cell = "1.20.2"
rand = "0.9.1"
rust_decimal = "1.37.2"
base64 = "0.22.1"
nom = "8.0.0"
percent-encoding = "2.3.1"
num-traits = "0.2.19"
http = "1.2.0"
async-stream = "0.3.6"
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.40"
ipnetwork = "0.21.1"
typed-builder = "0.21.0"
nanoid = "0.4.0"
webp = "0.3.0"
[patch.crates-io]
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" }

View File

@ -1 +1 @@
^https://mikanani.me/*** http://127.0.0.1:5005/$1
^https://mikanani.me/*** http://127.0.0.1:5005/$1 excludeFilter://^**/***.svg excludeFilter://^**/***.css excludeFilter://^**/***.js

View File

@ -14,7 +14,7 @@ path = "src/bin/main.rs"
required-features = []
[features]
default = []
default = ["jxl"]
playground = ["dep:inquire", "dep:color-eyre"]
testcontainers = [
"dep:testcontainers",
@ -23,6 +23,7 @@ testcontainers = [
"downloader/testcontainers",
"testcontainers-modules/postgres",
]
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
[dependencies]
downloader = { workspace = true }
@ -55,6 +56,28 @@ moka = { workspace = true }
chrono = { workspace = true }
tracing-subscriber = { workspace = true }
mockito = { workspace = true }
color-eyre = { workspace = true, optional = true }
inquire = { workspace = true, optional = true }
convert_case = { workspace = true }
image = { workspace = true }
uuid = { workspace = true }
maplit = { workspace = true }
once_cell = { workspace = true }
rand = { workspace = true }
rust_decimal = { workspace = true }
base64 = { workspace = true }
nom = { workspace = true }
percent-encoding = { workspace = true }
num-traits = { workspace = true }
http = { workspace = true }
async-stream = { workspace = true }
serde_variant = { workspace = true }
tracing-appender = { workspace = true }
clap = { workspace = true }
ipnetwork = { workspace = true }
typed-builder = { workspace = true }
nanoid = { workspace = true }
webp = { workspace = true }
sea-orm = { version = "1.1", features = [
"sqlx-sqlite",
@ -64,19 +87,13 @@ sea-orm = { version = "1.1", features = [
"debug-print",
] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
uuid = { version = "1.6.0", features = ["v4"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
rss = "2"
fancy-regex = "0.14"
maplit = "1.0.2"
lightningcss = "1.0.0-alpha.66"
html-escape = "0.2.13"
opendal = { version = "0.53", features = ["default", "services-fs"] }
zune-image = "0.4.15"
once_cell = "1.20.2"
scraper = "0.23"
log = "0.4"
async-graphql = { version = "7", features = ["dynamic-schema"] }
async-graphql-axum = "7"
seaography = { version = "1.1", features = [
@ -89,7 +106,6 @@ seaography = { version = "1.1", features = [
"with-postgres-array",
"with-json-as-scalar",
] }
base64 = "0.22.1"
tower = "0.5.2"
tower-http = { version = "0.6", features = [
"trace",
@ -104,35 +120,26 @@ tower-http = { version = "0.6", features = [
tera = "1.20.0"
openidconnect = { version = "4" }
dotenvy = "0.15.7"
http = "1.2.0"
async-stream = "0.3.6"
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.31"
ipnetwork = "0.21.1"
typed-builder = "0.21.0"
apalis = { version = "0.7", features = [
"limit",
"tracing",
"catch-panic",
"retry",
] }
jpegxl-rs = { version = "0.11.2", optional = true }
jpegxl-sys = { version = "0.11.2", optional = true }
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
apalis-sql = { version = "0.7", features = ["postgres"] }
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
rand = "0.9.1"
rust_decimal = "1.37.1"
reqwest_cookie_store = "0.8.0"
nanoid = "0.4.0"
jwtk = "0.4.0"
color-eyre = { workspace = true, optional = true }
inquire = { workspace = true, optional = true }
percent-encoding = "2.3.1"
mime_guess = "2.0.5"
icu_properties = "2.0.1"
icu = "2.0.0"
tracing-tree = "0.4.0"
num_cpus = "1.17.0"
headers-accept = "0.1.4"
[dev-dependencies]
inquire = { workspace = true }
color-eyre = { workspace = true }
serial_test = "3"
insta = { version = "1", features = ["redactions", "toml", "filters"] }
rstest = "0.25"
ctor = "0.4.0"
inquire = { workspace = true }
color-eyre = { workspace = true }

View File

@ -26,25 +26,25 @@ host = '{{ get_env(name="HOST", default="localhost") }}'
enable = true
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
[server.middleware.request_id]
[server.middlewares.request_id]
enable = true
[server.middleware.logger]
[server.middlewares.logger]
enable = true
# when your code is panicked, the request still returns 500 status code.
[server.middleware.catch_panic]
[server.middlewares.catch_panic]
enable = true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
[server.middleware.timeout_request]
[server.middlewares.timeout_request]
enable = false
# Duration time in milliseconds.
timeout = 5000
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://loco.rs
# - https://konobangu.com
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
@ -53,7 +53,10 @@ timeout = 5000
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
[server.middleware.cors]
[server.middlewares.cors]
enable = true
[server.middlewares.compression]
enable = true
# Database Configuration

View File

@ -21,6 +21,9 @@ pub struct MainCliArgs {
/// Explicit environment
#[arg(short, long)]
environment: Option<Environment>,
#[arg(long)]
graceful_shutdown: Option<bool>,
}
pub struct AppBuilder {
@ -28,6 +31,7 @@ pub struct AppBuilder {
config_file: Option<String>,
working_dir: String,
environment: Environment,
pub graceful_shutdown: bool,
}
impl AppBuilder {
@ -61,7 +65,8 @@ impl AppBuilder {
builder = builder
.config_file(args.config_file)
.dotenv_file(args.dotenv_file)
.environment(environment);
.environment(environment)
.graceful_shutdown(args.graceful_shutdown.unwrap_or(true));
Ok(builder)
}
@ -118,6 +123,12 @@ impl AppBuilder {
ret
}
pub fn graceful_shutdown(self, graceful_shutdown: bool) -> Self {
let mut ret = self;
ret.graceful_shutdown = graceful_shutdown;
ret
}
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
let mut ret = self;
ret.dotenv_file = dotenv_file;
@ -141,6 +152,7 @@ impl Default for AppBuilder {
dotenv_file: None,
config_file: None,
working_dir: String::from("."),
graceful_shutdown: true,
}
}
}

View File

@ -11,6 +11,7 @@ leaky_bucket_initial_tokens = 0
leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500
[mikan.http_client.proxy]
[mikan.http_client.proxy.headers]
@ -26,3 +27,5 @@ complexity_limit = inf
[task]
[message]
[media]

View File

@ -11,8 +11,8 @@ use super::env::Environment;
use crate::{
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
logger::LoggerConfig, message::MessageConfig, storage::StorageConfig, task::TaskConfig,
web::WebServerConfig,
logger::LoggerConfig, media::MediaConfig, message::MessageConfig, storage::StorageConfig,
task::TaskConfig, web::WebServerConfig,
};
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
@ -27,6 +27,7 @@ pub struct AppConfig {
pub mikan: MikanConfig,
pub crypto: CryptoConfig,
pub graphql: GraphQLConfig,
pub media: MediaConfig,
pub logger: LoggerConfig,
pub database: DatabaseConfig,
pub task: TaskConfig,

View File

@ -4,16 +4,9 @@ use tokio::sync::OnceCell;
use super::{Environment, config::AppConfig};
use crate::{
auth::AuthService,
cache::CacheService,
crypto::CryptoService,
database::DatabaseService,
errors::RecorderResult,
extract::mikan::MikanClient,
graphql::GraphQLService,
logger::LoggerService,
message::MessageService,
storage::{StorageService, StorageServiceTrait},
auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
logger::LoggerService, media::MediaService, message::MessageService, storage::StorageService,
task::TaskService,
};
@ -25,12 +18,13 @@ pub trait AppContextTrait: Send + Sync + Debug {
fn mikan(&self) -> &MikanClient;
fn auth(&self) -> &AuthService;
fn graphql(&self) -> &GraphQLService;
fn storage(&self) -> &dyn StorageServiceTrait;
fn storage(&self) -> &StorageService;
fn working_dir(&self) -> &String;
fn environment(&self) -> &Environment;
fn crypto(&self) -> &CryptoService;
fn task(&self) -> &TaskService;
fn message(&self) -> &MessageService;
fn media(&self) -> &MediaService;
}
pub struct AppContext {
@ -45,6 +39,7 @@ pub struct AppContext {
working_dir: String,
environment: Environment,
message: MessageService,
media: MediaService,
task: OnceCell<TaskService>,
graphql: OnceCell<GraphQLService>,
}
@ -65,6 +60,7 @@ impl AppContext {
let auth = AuthService::from_conf(config.auth).await?;
let mikan = MikanClient::from_config(config.mikan).await?;
let crypto = CryptoService::from_config(config.crypto).await?;
let media = MediaService::from_config(config.media).await?;
let ctx = Arc::new(AppContext {
config: config_cloned,
@ -78,6 +74,7 @@ impl AppContext {
working_dir: working_dir.to_string(),
crypto,
message,
media,
task: OnceCell::new(),
graphql: OnceCell::new(),
});
@ -126,7 +123,7 @@ impl AppContextTrait for AppContext {
fn graphql(&self) -> &GraphQLService {
self.graphql.get().expect("graphql should be set")
}
fn storage(&self) -> &dyn StorageServiceTrait {
fn storage(&self) -> &StorageService {
&self.storage
}
fn working_dir(&self) -> &String {
@ -144,4 +141,7 @@ impl AppContextTrait for AppContext {
fn message(&self) -> &MessageService {
&self.message
}
fn media(&self) -> &MediaService {
&self.media
}
}

View File

@ -51,20 +51,23 @@ impl App {
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
let (graphql_c, oidc_c, metadata_c) = futures::try_join!(
let (graphql_c, oidc_c, metadata_c, static_c) = futures::try_join!(
controller::graphql::create(context.clone()),
controller::oidc::create(context.clone()),
controller::metadata::create(context.clone())
controller::metadata::create(context.clone()),
controller::r#static::create(context.clone()),
)?;
for c in [graphql_c, oidc_c, metadata_c] {
for c in [graphql_c, oidc_c, metadata_c, static_c] {
router = c.apply_to(router);
}
let middlewares = default_middleware_stack(context.clone());
for mid in middlewares {
router = mid.apply(router)?;
tracing::info!(name = mid.name(), "+middleware");
if mid.is_enabled() {
router = mid.apply(router)?;
tracing::info!(name = mid.name(), "+middleware");
}
}
let router = router
@ -73,26 +76,40 @@ impl App {
let task = context.task();
let graceful_shutdown = self.builder.graceful_shutdown;
tokio::try_join!(
async {
axum::serve(listener, router)
.with_graceful_shutdown(async move {
Self::shutdown_signal().await;
tracing::info!("axum shutting down...");
})
.await?;
let axum_serve = axum::serve(listener, router);
if graceful_shutdown {
axum_serve
.with_graceful_shutdown(async move {
Self::shutdown_signal().await;
tracing::info!("axum shutting down...");
})
.await?;
} else {
axum_serve.await?;
}
Ok::<(), RecorderError>(())
},
async {
let monitor = task.setup_monitor().await?;
monitor
.run_with_signal(async move {
Self::shutdown_signal().await;
tracing::info!("apalis shutting down...");
Ok(())
})
.await?;
{
let monitor = task.setup_monitor().await?;
if graceful_shutdown {
monitor
.run_with_signal(async move {
Self::shutdown_signal().await;
tracing::info!("apalis shutting down...");
Ok(())
})
.await?;
} else {
monitor.run().await?;
}
}
Ok::<(), RecorderError>(())
},

View File

@ -11,13 +11,14 @@ use openidconnect::{
};
use serde::{Deserialize, Serialize};
use snafu::prelude::*;
use util::OptDynErr;
use crate::models::auth::AuthType;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum AuthError {
#[snafu(display("Permission denied"))]
PermissionError,
#[snafu(display("Not support auth method"))]
NotSupportAuthMethod {
supported: Vec<AuthType>,
@ -93,12 +94,6 @@ pub enum AuthError {
column: String,
context_path: String,
},
#[snafu(display("GraphQL permission denied since {field}"))]
GraphqlStaticPermissionError {
#[snafu(source)]
source: OptDynErr,
field: String,
},
}
impl AuthError {

View File

@ -5,8 +5,7 @@ use axum::{
response::{IntoResponse, Response},
};
use fetch::{FetchError, HttpClientError, reqwest, reqwest_middleware};
use http::StatusCode;
use serde::{Deserialize, Deserializer, Serialize};
use http::{HeaderMap, StatusCode};
use snafu::Snafu;
use crate::{
@ -19,6 +18,24 @@ use crate::{
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum RecorderError {
#[snafu(display(
"HTTP {status} {reason}, source = {source:?}",
status = status,
reason = status.canonical_reason().unwrap_or("Unknown")
))]
HttpResponseError {
status: StatusCode,
headers: Option<HeaderMap>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(transparent)]
ImageError { source: image::ImageError },
#[cfg(feature = "jxl")]
#[snafu(transparent)]
JxlEncodeError { source: jpegxl_rs::EncodeError },
#[snafu(transparent, context(false))]
HttpError { source: http::Error },
#[snafu(transparent, context(false))]
FancyRegexError {
#[snafu(source(from(fancy_regex::Error, Box::new)))]
@ -28,12 +45,10 @@ pub enum RecorderError {
NetAddrParseError { source: std::net::AddrParseError },
#[snafu(transparent)]
RegexError { source: regex::Error },
#[snafu(transparent)]
InvalidMethodError { source: http::method::InvalidMethod },
#[snafu(transparent)]
InvalidHeaderNameError {
source: http::header::InvalidHeaderName,
},
#[snafu(display("Invalid method"))]
InvalidMethodError,
#[snafu(display("Invalid header name"))]
InvalidHeaderNameError,
#[snafu(transparent)]
TracingAppenderInitError {
source: tracing_appender::rolling::InitError,
@ -72,10 +87,8 @@ pub enum RecorderError {
#[snafu(source(from(opendal::Error, Box::new)))]
source: Box<opendal::Error>,
},
#[snafu(transparent)]
InvalidHeaderValueError {
source: http::header::InvalidHeaderValue,
},
#[snafu(display("Invalid header value"))]
InvalidHeaderValueError,
#[snafu(transparent)]
HttpClientError { source: HttpClientError },
#[cfg(feature = "testcontainers")]
@ -103,7 +116,7 @@ pub enum RecorderError {
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Model Entity {entity} not found"))]
#[snafu(display("Model Entity {entity} not found or not belong to subscriber"))]
ModelEntityNotFound { entity: Cow<'static, str> },
#[snafu(transparent)]
FetchError { source: FetchError },
@ -123,9 +136,27 @@ pub enum RecorderError {
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Invalid task id: {message}"))]
InvalidTaskId { message: String },
}
impl RecorderError {
pub fn from_status(status: StatusCode) -> Self {
Self::HttpResponseError {
status,
headers: None,
source: None.into(),
}
}
pub fn from_status_and_headers(status: StatusCode, headers: HeaderMap) -> Self {
Self::HttpResponseError {
status,
headers: Some(headers),
source: None.into(),
}
}
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
@ -175,10 +206,48 @@ impl snafu::FromString for RecorderError {
}
}
impl From<StatusCode> for RecorderError {
fn from(status: StatusCode) -> Self {
Self::HttpResponseError {
status,
headers: None,
source: None.into(),
}
}
}
impl From<(StatusCode, HeaderMap)> for RecorderError {
fn from((status, headers): (StatusCode, HeaderMap)) -> Self {
Self::HttpResponseError {
status,
headers: Some(headers),
source: None.into(),
}
}
}
impl IntoResponse for RecorderError {
fn into_response(self) -> Response {
match self {
Self::AuthError { source: auth_error } => auth_error.into_response(),
Self::HttpResponseError {
status,
headers,
source,
} => {
let message = source
.into_inner()
.map(|s| s.to_string())
.unwrap_or_else(|| {
String::from(status.canonical_reason().unwrap_or("Unknown"))
});
(
status,
headers,
Json::<StandardErrorResponse>(StandardErrorResponse::from(message)),
)
.into_response()
}
err => (
StatusCode::INTERNAL_SERVER_ERROR,
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
@ -188,28 +257,6 @@ impl IntoResponse for RecorderError {
}
}
impl Serialize for RecorderError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for RecorderError {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(Self::Whatever {
message: s,
source: None.into(),
})
}
}
impl From<reqwest::Error> for RecorderError {
fn from(error: reqwest::Error) -> Self {
FetchError::from(error).into()
@ -222,4 +269,22 @@ impl From<reqwest_middleware::Error> for RecorderError {
}
}
impl From<http::header::InvalidHeaderValue> for RecorderError {
fn from(_error: http::header::InvalidHeaderValue) -> Self {
Self::InvalidHeaderValueError
}
}
impl From<http::header::InvalidHeaderName> for RecorderError {
fn from(_error: http::header::InvalidHeaderName) -> Self {
Self::InvalidHeaderNameError
}
}
impl From<http::method::InvalidMethod> for RecorderError {
fn from(_error: http::method::InvalidMethod) -> Self {
Self::InvalidMethodError
}
}
pub type RecorderResult<T> = Result<T, RecorderError>;

View File

@ -268,8 +268,8 @@ mod tests {
)
}
pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) {
let extname = Path::new(raw_name)
pub fn test_torrent_ep_parser(origin_name: &str, expected: &str) {
let extname = Path::new(origin_name)
.extension()
.map(|e| format!(".{e}"))
.unwrap_or_default()
@ -278,7 +278,7 @@ mod tests {
if extname == ".srt" || extname == ".ass" {
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
let found_raw =
parse_episode_subtitle_meta_from_torrent(Path::new(raw_name), None, None);
parse_episode_subtitle_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {
@ -299,7 +299,8 @@ mod tests {
assert_eq!(expected, found);
} else {
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
let found_raw = parse_episode_media_meta_from_torrent(Path::new(raw_name), None, None);
let found_raw =
parse_episode_media_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {

View File

@ -1,8 +1,5 @@
use std::collections::HashMap;
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use maplit::hashmap;
use regex::Regex;
const LANG_ZH_TW: &str = "zh-tw";
@ -34,40 +31,4 @@ lazy_static! {
(LANG_JP, vec!["jp", "jpn", ""]),
]
};
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {
hashmap! {
"" => 0,
"" => 1,
"" => 2,
"" => 3,
"" => 4,
"" => 5,
"" => 6,
"" => 7,
"" => 8,
"" => 9,
"" => 10,
"廿" => 20,
"" => 100,
"" => 1000,
"" => 0,
"" => 1,
"" => 2,
"" => 3,
"" => 4,
"" => 5,
"" => 6,
"" => 7,
"" => 8,
"" => 9,
"" => 10,
"" => 20,
"" => 100,
"" => 1000,
}
};
pub static ref ZH_NUM_RE: Regex =
Regex::new(r"[〇一二三四五六七八九十廿百千零壹贰叁肆伍陆柒捌玖拾念佰仟]").unwrap();
}

View File

@ -2,10 +2,6 @@ use url::Url;
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
let mut image_url = base_url.join(image_src).ok()?;
if let Some((_, value)) = image_url.query_pairs().find(|(key, _)| key == "webp") {
image_url.set_query(Some(&format!("webp={value}")));
} else {
image_url.set_query(None);
}
image_url.set_query(None);
Some(image_url)
}

View File

@ -5,6 +5,7 @@ use std::{
};
use async_graphql::{InputObject, SimpleObject};
use async_stream::try_stream;
use fetch::fetch_bytes;
use futures::{Stream, TryStreamExt, pin_mut, try_join};
use maplit::hashmap;
@ -292,17 +293,19 @@ impl SubscriptionTrait for MikanSeasonSubscription {
}
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
let rss_item_list = self
.get_rss_item_list_from_subsribed_url_rss_link(ctx.as_ref())
.await?;
let rss_item_stream = self.get_rss_item_stream_from_subsribed_url_rss_link(ctx.as_ref());
sync_mikan_feeds_from_rss_item_list(
ctx.as_ref(),
rss_item_list,
self.get_subscriber_id(),
self.get_subscription_id(),
)
.await?;
pin_mut!(rss_item_stream);
while let Some(rss_item_chunk_list) = rss_item_stream.try_next().await? {
sync_mikan_feeds_from_rss_item_list(
ctx.as_ref(),
rss_item_chunk_list,
self.get_subscriber_id(),
self.get_subscription_id(),
)
.await?;
}
Ok(())
}
@ -393,48 +396,53 @@ impl MikanSeasonSubscription {
)
}
#[tracing::instrument(err, skip(ctx))]
async fn get_rss_item_list_from_subsribed_url_rss_link(
fn get_rss_item_stream_from_subsribed_url_rss_link(
&self,
ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
let db = ctx.db();
) -> impl Stream<Item = RecorderResult<Vec<MikanRssEpisodeItem>>> {
try_stream! {
let subscribed_bangumi_list = bangumi::Entity::find()
.filter(
Condition::all()
.add(subscription_bangumi::Column::SubscriptionId.eq(self.subscription_id)),
)
.join_rev(
JoinType::InnerJoin,
subscription_bangumi::Relation::Bangumi.def(),
)
.all(db)
.await?;
let db = ctx.db();
let mut rss_item_list = vec![];
for subscribed_bangumi in subscribed_bangumi_list {
let rss_url = subscribed_bangumi
.rss_link
.with_whatever_context::<_, String, RecorderError>(|| {
format!(
"rss_link is required, subscription_id = {}, bangumi_name = {}",
self.subscription_id, subscribed_bangumi.display_name
)
})?;
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
let subscribed_bangumi_list = bangumi::Entity::find()
.filter(
Condition::all()
.add(subscription_bangumi::Column::SubscriptionId.eq(self.subscription_id)),
)
.join_rev(
JoinType::InnerJoin,
subscription_bangumi::Relation::Bangumi.def(),
)
.all(db)
.await?;
let channel = rss::Channel::read_from(&bytes[..])?;
for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}")
for subscribed_bangumi in subscribed_bangumi_list {
let rss_url = subscribed_bangumi
.rss_link
.with_whatever_context::<_, String, RecorderError>(|| {
format!(
"rss_link is required, subscription_id = {}, bangumi_name = {}",
self.subscription_id, subscribed_bangumi.display_name
)
})?;
rss_item_list.push(item);
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?;
let mut rss_item_list = vec![];
for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}")
})?;
rss_item_list.push(item);
}
yield rss_item_list;
}
}
Ok(rss_item_list)
}
}
@ -548,13 +556,8 @@ mod tests {
subscriptions::{self, SubscriptionTrait},
},
test_utils::{
app::TestingAppContext,
crypto::build_testing_crypto_service,
database::build_testing_database_service,
mikan::{
MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential_form,
},
storage::build_testing_storage_service,
app::{TestingAppContext, TestingAppContextPreset},
mikan::{MikanMockServer, build_testing_mikan_credential_form},
tracing::try_init_testing_tracing,
},
};
@ -569,20 +572,11 @@ mod tests {
let mikan_base_url = mikan_server.base_url().clone();
let app_ctx = {
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
let db_service = build_testing_database_service(Default::default()).await?;
let crypto_service = build_testing_crypto_service().await?;
let storage_service = build_testing_storage_service().await?;
let app_ctx = TestingAppContext::builder()
.mikan(mikan_client)
.db(db_service)
.crypto(crypto_service)
.storage(storage_service)
.build();
Arc::new(app_ctx)
};
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
mikan_base_url: mikan_base_url.to_string(),
database_config: None,
})
.await?;
Ok(TestingResources {
app_ctx,

View File

@ -28,7 +28,12 @@ use crate::{
MIKAN_YEAR_QUERY_KEY, MikanClient,
},
},
storage::{StorageContentCategory, StorageServiceTrait},
media::{
AutoOptimizeImageFormat, EncodeAvifOptions, EncodeImageOptions, EncodeJxlOptions,
EncodeWebpOptions,
},
storage::StorageContentCategory,
task::{OptimizeImageTask, SystemTask},
};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
@ -738,48 +743,92 @@ pub async fn scrape_mikan_poster_data_from_image_url(
#[instrument(skip_all, fields(origin_poster_src_url = origin_poster_src_url.as_str()))]
pub async fn scrape_mikan_poster_meta_from_image_url(
mikan_client: &MikanClient,
storage_service: &dyn StorageServiceTrait,
ctx: &dyn AppContextTrait,
origin_poster_src_url: Url,
subscriber_id: i32,
) -> RecorderResult<MikanBangumiPosterMeta> {
if let Some(poster_src) = storage_service
.exists_object(
StorageContentCategory::Image,
subscriber_id,
Some(MIKAN_POSTER_BUCKET_KEY),
&origin_poster_src_url
.path()
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
)
.await?
{
return Ok(MikanBangumiPosterMeta {
let storage_service = ctx.storage();
let media_service = ctx.media();
let mikan_client = ctx.mikan();
let task_service = ctx.task();
let storage_path = storage_service.build_public_object_path(
StorageContentCategory::Image,
MIKAN_POSTER_BUCKET_KEY,
&origin_poster_src_url
.path()
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
);
let meta = if let Some(poster_src) = storage_service.exists(&storage_path).await? {
MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_src: Some(poster_src.to_string()),
});
}
}
} else {
let poster_data =
scrape_mikan_poster_data_from_image_url(mikan_client, origin_poster_src_url.clone())
.await?;
let poster_data =
scrape_mikan_poster_data_from_image_url(mikan_client, origin_poster_src_url.clone())
let poster_str = storage_service
.write(storage_path.clone(), poster_data)
.await?;
let poster_str = storage_service
.store_object(
StorageContentCategory::Image,
subscriber_id,
Some(MIKAN_POSTER_BUCKET_KEY),
&origin_poster_src_url
.path()
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
poster_data,
)
.await?;
tracing::warn!(
poster_str = poster_str.to_string(),
"mikan poster meta extracted"
);
Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_src: Some(poster_str.to_string()),
})
MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_src: Some(poster_str.to_string()),
}
};
if meta.poster_src.is_some()
&& storage_path
.extension()
.is_some_and(|ext| media_service.is_legacy_image_format(ext))
{
let auto_optimize_formats = &media_service.config.auto_optimize_formats;
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Webp) {
let webp_storage_path = storage_path.with_extension("webp");
if storage_service.exists(&webp_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: webp_storage_path.to_string(),
format_options: EncodeImageOptions::Webp(EncodeWebpOptions::default()),
}))
.await?;
}
}
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Avif) {
let avif_storage_path = storage_path.with_extension("avif");
if storage_service.exists(&avif_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: avif_storage_path.to_string(),
format_options: EncodeImageOptions::Avif(EncodeAvifOptions::default()),
}))
.await?;
}
}
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Jxl) {
let jxl_storage_path = storage_path.with_extension("jxl");
if storage_service.exists(&jxl_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: jxl_storage_path.to_string(),
format_options: EncodeImageOptions::Jxl(EncodeJxlOptions::default()),
}))
.await?;
}
}
}
Ok(meta)
}
pub fn extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(
@ -1006,24 +1055,23 @@ pub async fn scrape_mikan_bangumi_meta_list_from_season_flow_url(
#[cfg(test)]
mod test {
#![allow(unused_variables)]
use std::{fs, sync::Arc};
use std::{fs, io::Cursor, sync::Arc};
use futures::StreamExt;
use image::{ImageFormat, ImageReader};
use rstest::{fixture, rstest};
use tracing::Level;
use url::Url;
use zune_image::{codecs::ImageFormat, image::Image};
use super::*;
use crate::test_utils::{
app::TestingAppContext,
app::{TestingAppContext, TestingAppContextPreset},
crypto::build_testing_crypto_service,
database::build_testing_database_service,
mikan::{
MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential,
build_testing_mikan_credential_form,
},
storage::build_testing_storage_service,
tracing::try_init_testing_tracing,
};
@ -1048,12 +1096,14 @@ mod test {
scrape_mikan_poster_data_from_image_url(&mikan_client, bangumi_poster_url).await?;
resources_mock.shared_resource_mock.expect(1);
let image = Image::read(bgm_poster_data.to_vec(), Default::default());
let image = {
let c = Cursor::new(bgm_poster_data);
ImageReader::new(c)
};
let image_format = image.with_guessed_format().ok().and_then(|i| i.format());
assert!(
image.is_ok_and(|img| img
.metadata()
.get_image_format()
.is_some_and(|fmt| matches!(fmt, ImageFormat::JPEG))),
image_format.is_some_and(|fmt| matches!(fmt, ImageFormat::Jpeg)),
"should start with valid jpeg data magic number"
);
@ -1067,39 +1117,47 @@ mod test {
let mikan_base_url = mikan_server.base_url().clone();
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
mikan_base_url: mikan_base_url.to_string(),
database_config: None,
})
.await?;
let resources_mock = mikan_server.mock_resources_with_doppel();
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
let storage_service = build_testing_storage_service().await?;
let storage_operator = storage_service.get_operator()?;
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;
let bgm_poster = scrape_mikan_poster_meta_from_image_url(
&mikan_client,
&storage_service,
bangumi_poster_url,
1,
)
.await?;
let bgm_poster =
scrape_mikan_poster_meta_from_image_url(app_ctx.as_ref(), bangumi_poster_url).await?;
resources_mock.shared_resource_mock.expect(1);
let storage_fullname = storage_service.get_fullname(
let storage_service = app_ctx.storage();
let storage_fullname = storage_service.build_public_object_path(
StorageContentCategory::Image,
1,
Some(MIKAN_POSTER_BUCKET_KEY),
MIKAN_POSTER_BUCKET_KEY,
"202309/5ce9fed1.jpg",
);
let storage_fullename_str = storage_fullname.as_str();
assert!(storage_operator.exists(storage_fullename_str).await?);
assert!(
storage_service.exists(&storage_fullname).await?.is_some(),
"storage_fullename_str = {}, list public = {:?}",
&storage_fullname,
storage_service.list_public().await?
);
let expected_data =
fs::read("tests/resources/mikan/doppel/images/Bangumi/202309/5ce9fed1.jpg")?;
let found_data = storage_operator.read(storage_fullename_str).await?.to_vec();
assert_eq!(expected_data, found_data);
let bgm_poster_data = storage_service.read(&storage_fullname).await?;
let image = {
let c = Cursor::new(bgm_poster_data.to_vec());
ImageReader::new(c)
};
let image_format = image.with_guessed_format().ok().and_then(|i| i.format());
assert!(
image_format.is_some_and(|fmt| matches!(fmt, ImageFormat::Jpeg)),
"should start with valid jpeg data magic number"
);
Ok(())
}

View File

@ -1,7 +1,7 @@
pub mod bittorrent;
pub mod defs;
pub mod html;
pub mod http;
pub mod media;
pub mod mikan;
pub mod rawname;
pub mod bittorrent;
pub mod origin;

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +0,0 @@
pub mod parser;
pub use parser::{
RawEpisodeMeta, extract_episode_meta_from_raw_name, extract_season_from_title_body,
};

View File

@ -1,845 +0,0 @@
/**
* @TODO: rewrite with nom
*/
use std::borrow::Cow;
use itertools::Itertools;
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::whatever;
use crate::{
errors::RecorderResult,
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
};
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
lazy_static! {
static ref TITLE_RE: Regex = Regex::new(
r#"(.*|\[.*])( -? \d+|\[\d+]|\[\d+.?[vV]\d]|第\d+[话話集]|\[第?\d+[话話集]]|\[\d+.?END]|[Ee][Pp]?\d+|\[\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*[话話集]\s*])(.*)"#
).unwrap();
static ref EP_COLLECTION_RE:Regex = Regex::new(r#"\[?\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*合?[话話集]\s*]?"#).unwrap();
static ref MOVIE_TITLE_RE:Regex = Regex::new(r#"(.*|\[.*])(剧场版|[Mm]ovie|电影)(.*?)$"#).unwrap();
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
static ref SOURCE_L1_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|W[Ee][Bb][Rr][Ii][Pp]|Sentai|B[Dd][Rr][Ii][Pp]|UHD[Rr][Ii][Pp]|NETFLIX").unwrap();
static ref SOURCE_L2_RE: Regex = Regex::new(r"AMZ|CR|W[Ee][Bb]|B[Dd]").unwrap();
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap();
static ref PREFIX_RE: Regex =
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
static ref MOVIE_SEASON_EXTRACT_RE: Regex = Regex::new(r"剧场版|Movie|电影").unwrap();
static ref MAIN_TITLE_PREFIX_PROCESS_RE1: Regex = Regex::new(r"新番|月?番").unwrap();
static ref MAIN_TITLE_PREFIX_PROCESS_RE2: Regex = Regex::new(r"[港澳台]{1,3}地区").unwrap();
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE: Regex = Regex::new(r"\[.+\]").unwrap();
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1: Regex = Regex::new(r"^.*?\[").unwrap();
static ref SEASON_EXTRACT_SEASON_ALL_RE: Regex = Regex::new(r"S\d{1,2}|Season \d{1,2}|[第].[季期]|1st|2nd|3rd|\d{1,2}th").unwrap();
static ref SEASON_EXTRACT_SEASON_EN_PREFIX_RE: Regex = Regex::new(r"Season|S").unwrap();
static ref SEASON_EXTRACT_SEASON_EN_NTH_RE: Regex = Regex::new(r"1st|2nd|3rd|\d{1,2}th").unwrap();
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_RE: Regex = Regex::new(r"[第 ].*[季期(部分)]|部分").unwrap();
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE: Regex = Regex::new(r"[第季期 ]").unwrap();
static ref NAME_EXTRACT_REMOVE_RE: Regex = Regex::new(r"[(]仅限[港澳台]{1,3}地区[)]").unwrap();
static ref NAME_EXTRACT_SPLIT_RE: Regex = Regex::new(r"/|\s{2}|-\s{2}|\]\[").unwrap();
static ref NAME_EXTRACT_REPLACE_ADHOC1_RE: Regex = Regex::new(r"([\p{scx=Han}\s\(\)]{5,})_([a-zA-Z]{2,})").unwrap();
static ref NAME_JP_TEST: Regex = Regex::new(r"[\p{scx=Hira}\p{scx=Kana}]{2,}").unwrap();
static ref NAME_ZH_TEST: Regex = Regex::new(r"[\p{scx=Han}]{2,}").unwrap();
static ref NAME_EN_TEST: Regex = Regex::new(r"[a-zA-Z]{3,}").unwrap();
static ref TAGS_EXTRACT_SPLIT_RE: Regex = Regex::new(r"[\[\]()_]").unwrap();
static ref CLEAR_SUB_RE: Regex = Regex::new(r"_MP4|_MKV").unwrap();
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
pub struct RawEpisodeMeta {
pub name_en: Option<String>,
pub name_en_no_season: Option<String>,
pub name_jp: Option<String>,
pub name_jp_no_season: Option<String>,
pub name_zh: Option<String>,
pub name_zh_no_season: Option<String>,
pub season: i32,
pub season_raw: Option<String>,
pub episode_index: i32,
pub subtitle: Option<String>,
pub source: Option<String>,
pub fansub: Option<String>,
pub resolution: Option<String>,
}
fn extract_fansub(raw_name: &str) -> Option<&str> {
let mut groups = EN_BRACKET_SPLIT_RE.splitn(raw_name, 3);
groups.nth(1)
}
fn replace_ch_bracket_to_en(raw_name: &str) -> String {
raw_name.replace('【', "[").replace('】', "]")
}
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RecorderResult<String> {
let raw_without_fansub = if let Some(fansub) = fansub {
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
fan_sub_re.replace_all(title_body, "")
} else {
Cow::Borrowed(title_body)
};
let raw_with_prefix_replaced = PREFIX_RE.replace_all(&raw_without_fansub, "/");
let mut arg_group = raw_with_prefix_replaced
.split('/')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
if arg_group.len() == 1 {
arg_group = arg_group.first_mut().unwrap().split(' ').collect();
}
let mut raw = raw_without_fansub.to_string();
for arg in arg_group.iter() {
if (arg_group.len() <= 5 && MAIN_TITLE_PREFIX_PROCESS_RE1.is_match(arg))
|| (MAIN_TITLE_PREFIX_PROCESS_RE2.is_match(arg))
{
let sub = Regex::new(&format!(".{arg}."))?;
raw = sub.replace_all(&raw, "").to_string();
}
}
if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw)
&& m.len() as f32 > (raw.len() as f32) * 0.5
{
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1
.replace(&raw, "")
.chars()
.collect_vec();
while let Some(ch) = raw1.pop() {
if ch == ']' {
break;
}
}
raw = raw1.into_iter().collect();
}
Ok(raw.to_string())
}
pub fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, i32) {
let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " ");
let seasons = SEASON_EXTRACT_SEASON_ALL_RE
.find(&name_and_season)
.into_iter()
.map(|s| s.as_str())
.collect_vec();
if seasons.is_empty() {
return (title_body.to_string(), None, 1);
}
let mut season = 1;
let mut season_raw = None;
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
for s in seasons {
season_raw = Some(s);
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s)
&& let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
.replace_all(m.as_str(), "")
.parse::<i32>()
{
season = s;
break;
}
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s)
&& let Some(s) = DIGIT_1PLUS_REG
.find(m.as_str())
.and_then(|s| s.as_str().parse::<i32>().ok())
{
season = s;
break;
}
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
.replace(m.as_str(), "")
.parse::<i32>()
{
season = s;
break;
}
if let Some(m) = ZH_NUM_RE.find(m.as_str()) {
season = ZH_NUM_MAP[m.as_str()];
break;
}
}
}
(name.to_string(), season_raw.map(|s| s.to_string()), season)
}
fn extract_name_from_title_body_name_section(
title_body_name_section: &str,
) -> (Option<String>, Option<String>, Option<String>) {
let mut name_en = None;
let mut name_zh = None;
let mut name_jp = None;
let replaced1 = NAME_EXTRACT_REMOVE_RE.replace_all(title_body_name_section, "");
let replaced2 = NAME_EXTRACT_REPLACE_ADHOC1_RE
.replace_all(&replaced1, NAME_EXTRACT_REPLACE_ADHOC1_REPLACED);
let trimmed = replaced2.trim();
let mut split = NAME_EXTRACT_SPLIT_RE
.split(trimmed)
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.map(|s| s.to_string())
.collect_vec();
if split.len() == 1 {
let mut split_space = split[0].split(' ').collect_vec();
let mut search_indices = vec![0];
if split_space.len() > 1 {
search_indices.push(split_space.len() - 1);
}
for i in search_indices {
if NAME_ZH_TEST.is_match(split_space[i]) {
let chs = split_space[i];
split_space.remove(i);
split = vec![chs.to_string(), split_space.join(" ")];
break;
}
}
}
for item in split {
if NAME_JP_TEST.is_match(&item) && name_jp.is_none() {
name_jp = Some(item);
} else if NAME_ZH_TEST.is_match(&item) && name_zh.is_none() {
name_zh = Some(item);
} else if NAME_EN_TEST.is_match(&item) && name_en.is_none() {
name_en = Some(item);
}
}
(name_en, name_zh, name_jp)
}
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32> {
DIGIT_1PLUS_REG
.find(title_episode)?
.as_str()
.parse::<i32>()
.ok()
}
fn clear_sub(sub: Option<String>) -> Option<String> {
sub.map(|s| CLEAR_SUB_RE.replace_all(&s, "").to_string())
}
fn extract_tags_from_title_extra(
title_extra: &str,
) -> (Option<String>, Option<String>, Option<String>) {
let replaced = TAGS_EXTRACT_SPLIT_RE.replace_all(title_extra, " ");
let elements = replaced
.split(' ')
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect_vec();
let mut sub = None;
let mut resolution = None;
let mut source = None;
for element in elements.iter() {
if SUB_RE.is_match(element) {
sub = Some(element.to_string())
} else if RESOLUTION_RE.is_match(element) {
resolution = Some(element.to_string())
} else if SOURCE_L1_RE.is_match(element) {
source = Some(element.to_string())
}
}
if source.is_none() {
for element in elements {
if SOURCE_L2_RE.is_match(element) {
source = Some(element.to_string())
}
}
}
(clear_sub(sub), resolution, source)
}
pub fn check_is_movie(title: &str) -> bool {
MOVIE_TITLE_RE.is_match(title)
}
pub fn extract_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
let raw_title = s.trim();
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
let fansub = extract_fansub(&raw_title_without_ch_brackets);
let movie_capture = check_is_movie(&raw_title_without_ch_brackets);
if let Some(title_re_match_obj) = MOVIE_TITLE_RE
.captures(&raw_title_without_ch_brackets)
.or(TITLE_RE.captures(&raw_title_without_ch_brackets))
{
let mut title_body = title_re_match_obj
.get(1)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"))
.to_string();
let mut title_episode = title_re_match_obj
.get(2)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
let title_extra = title_re_match_obj
.get(3)
.map(|s| s.as_str().trim())
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
if movie_capture {
title_body += title_episode;
title_episode = "";
} else if EP_COLLECTION_RE.is_match(title_episode) {
title_episode = "";
}
let title_body = title_body_pre_process(&title_body, fansub)?;
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
let (name_en_no_season, name_zh_no_season, name_jp_no_season) =
extract_name_from_title_body_name_section(&name_without_season);
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(1);
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
Ok(RawEpisodeMeta {
name_en,
name_en_no_season,
name_jp,
name_jp_no_season,
name_zh,
name_zh_no_season,
season,
season_raw,
episode_index,
subtitle: sub,
source,
fansub: fansub.map(|s| s.to_string()),
resolution,
})
} else {
whatever!("Can not parse episode meta from raw filename {}", raw_title)
}
}
#[cfg(test)]
mod tests {
use super::{RawEpisodeMeta, extract_episode_meta_from_raw_name};
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap_or_default();
let found = extract_episode_meta_from_raw_name(raw_name).ok();
if expected != found {
println!(
"expected {} and found {} are not equal",
serde_json::to_string_pretty(&expected).unwrap(),
serde_json::to_string_pretty(&found).unwrap()
)
}
assert_eq!(expected, found);
}
#[test]
fn test_parse_ep_with_all_parts_wrapped() {
test_raw_ep_parser_case(
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
r#"{
"name_zh": "我心里危险的东西",
"name_zh_no_season": "我心里危险的东西",
"season": 2,
"season_raw": "第二季",
"episode_index": 5,
"subtitle": "简日双语",
"source": null,
"fansub": "新Sub",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_title_wrapped_by_one_square_bracket_and_season_prefix() {
test_raw_ep_parser_case(
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
r#"{
"name_en": "Boku no Kokoro no Yabai Yatsu",
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu",
"name_zh": "我内心的糟糕念头",
"name_zh_no_season": "我内心的糟糕念头",
"season": 1,
"season_raw": null,
"episode_index": 18,
"subtitle": "简日双语",
"source": null,
"fansub": "喵萌奶茶屋",
"resolution": "1080p"
}"#,
);
}
#[test]
fn test_parse_ep_with_ep_and_version() {
test_raw_ep_parser_case(
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{
"name_en": "Shin no Nakama 2nd",
"name_en_no_season": "Shin no Nakama",
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
"season": 2,
"season_raw": "2nd",
"episode_index": 8,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_en_title_only() {
test_raw_ep_parser_case(
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
r#"{
"name_en": "THE MARGINAL SERVICE",
"name_en_no_season": "THE MARGINAL SERVICE",
"season": 1,
"episode_index": 8,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "动漫国字幕组&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_two_zh_title() {
test_raw_ep_parser_case(
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{
"name_en": "Nozomanu Fushi no Boukensha",
"name_en_no_season": "Nozomanu Fushi no Boukensha",
"name_zh": "事与愿违的不死冒险者",
"name_zh_no_season": "事与愿违的不死冒险者",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_en_zh_jp_titles() {
test_raw_ep_parser_case(
r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
r#"{
"name_en": "Pon no Michi",
"name_jp": "ぽんのみち",
"name_zh": "碰之道",
"name_en_no_season": "Pon no Michi",
"name_jp_no_season": "ぽんのみち",
"name_zh_no_season": "碰之道",
"season": 1,
"season_raw": null,
"episode_index": 7,
"subtitle": "简繁日内封字幕",
"source": "WebRip",
"fansub": "喵萌奶茶屋&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_nth_season() {
test_raw_ep_parser_case(
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_en": "Yowai Character Tomozakikun",
"name_en_no_season": "Yowai Character Tomozakikun",
"name_zh": "弱角友崎同学 2nd STAGE",
"name_zh_no_season": "弱角友崎同学",
"season": 2,
"season_raw": "2nd",
"episode_index": 9,
"subtitle": "CHT",
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_season_en_and_season_zh() {
test_raw_ep_parser_case(
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
r#"{
"name_en": "Kingdom S5",
"name_en_no_season": "Kingdom",
"name_zh": "王者天下 第五季",
"name_zh_no_season": "王者天下",
"season": 5,
"season_raw": "第五季",
"episode_index": 7,
"subtitle": "简繁外挂字幕",
"source": "WebRip",
"fansub": "豌豆字幕组&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_airota_fansub_style_case1() {
test_raw_ep_parser_case(
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
r#"{
"name_en": "Alice to Therese no Maboroshi Koujou",
"name_en_no_season": "Alice to Therese no Maboroshi Koujou",
"name_zh": "爱丽丝与特蕾丝的虚幻工厂",
"name_zh_no_season": "爱丽丝与特蕾丝的虚幻工厂",
"season": 1,
"episode_index": 1,
"subtitle": "简繁内封",
"source": "WebRip",
"fansub": "千夏字幕组",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_airota_fansub_style_case2() {
test_raw_ep_parser_case(
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
r#"{
"name_en": "Yuru Camp Movie",
"name_en_no_season": "Yuru Camp Movie",
"name_zh": "电影 轻旅轻营 (摇曳露营)",
"name_zh_no_season": "电影 轻旅轻营 (摇曳露营)",
"season": 1,
"episode_index": 1,
"subtitle": "繁体",
"source": "UHDRip",
"fansub": "千夏字幕组&喵萌奶茶屋",
"resolution": "2160p"
}"#,
)
}
#[test]
fn test_parse_ep_with_large_episode_style() {
test_raw_ep_parser_case(
r#"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"#,
r#"{
"name_en": "New Doraemon",
"name_en_no_season": "New Doraemon",
"name_zh": "哆啦A梦新番",
"name_zh_no_season": "哆啦A梦新番",
"season": 1,
"episode_index": 747,
"subtitle": "GB",
"fansub": "梦蓝字幕组",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_many_square_brackets_split_title() {
test_raw_ep_parser_case(
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
r#"{
"name_en": "Yuru Camp",
"name_en_no_season": "Yuru Camp",
"name_zh": "剧场版-摇曳露营",
"name_zh_no_season": "剧场版-摇曳露营",
"season": 1,
"episode_index": 1,
"subtitle": "简日双语",
"fansub": "MCE汉化组",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_implicit_lang_title_sep() {
test_raw_ep_parser_case(
r#"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"#,
r#"{
"name_en": "NieR Automata Ver1.1a",
"name_en_no_season": "NieR Automata Ver1.1a",
"name_zh": "尼尔:机械纪元",
"name_zh_no_season": "尼尔:机械纪元",
"season": 1,
"episode_index": 2,
"subtitle": "简日双语",
"fansub": "织梦字幕组",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_square_brackets_wrapped_and_space_split() {
test_raw_ep_parser_case(
r#"[天月搬运组][迷宫饭 Delicious in Dungeon][03][日语中字][MKV][1080P][NETFLIX][高画质版]"#,
r#"
{
"name_en": "Delicious in Dungeon",
"name_en_no_season": "Delicious in Dungeon",
"name_zh": "迷宫饭",
"name_zh_no_season": "迷宫饭",
"season": 1,
"episode_index": 3,
"subtitle": "日语中字",
"source": "NETFLIX",
"fansub": "天月搬运组",
"resolution": "1080P"
}
"#,
)
}
#[test]
fn test_parse_ep_with_start_with_brackets_wrapped_season_info_prefix() {
test_raw_ep_parser_case(
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
r#"{
"name_en": "Dungeon Meshi",
"name_en_no_season": "Dungeon Meshi",
"name_zh": "迷宫饭",
"name_zh_no_season": "迷宫饭",
"season": 1,
"episode_index": 1,
"subtitle": "简日双语",
"fansub": "爱恋字幕社",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_with_small_no_title_extra_brackets_case() {
test_raw_ep_parser_case(
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_en": "Mahou Shoujo ni Akogarete",
"name_en_no_season": "Mahou Shoujo ni Akogarete",
"name_zh": "梦想成为魔法少女 [年龄限制版]",
"name_zh_no_season": "梦想成为魔法少女 [年龄限制版]",
"season": 1,
"episode_index": 9,
"subtitle": "CHT",
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_title_leading_space_style() {
test_raw_ep_parser_case(
r#"[ANi] 16bit 的感动 ANOTHER LAYER - 01 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_zh": "16bit 的感动 ANOTHER LAYER",
"name_zh_no_season": "16bit 的感动 ANOTHER LAYER",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "CHT",
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
}"#,
)
}
#[test]
fn test_parse_ep_title_leading_month_and_wrapped_brackets_style() {
test_raw_ep_parser_case(
r#"【喵萌奶茶屋】★07月新番★[银砂糖师与黑妖精 ~ Sugar Apple Fairy Tale ~][13][1080p][简日双语][招募翻译]"#,
r#"{
"name_en": "~ Sugar Apple Fairy Tale ~",
"name_en_no_season": "~ Sugar Apple Fairy Tale ~",
"name_zh": "银砂糖师与黑妖精",
"name_zh_no_season": "银砂糖师与黑妖精",
"season": 1,
"episode_index": 13,
"subtitle": "简日双语",
"fansub": "喵萌奶茶屋",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_title_leading_month_style() {
test_raw_ep_parser_case(
r#"【极影字幕社】★4月新番 天国大魔境 Tengoku Daimakyou 第05话 GB 720P MP4字幕社招人内详"#,
r#"{
"name_en": "Tengoku Daimakyou",
"name_en_no_season": "Tengoku Daimakyou",
"name_zh": "天国大魔境",
"name_zh_no_season": "天国大魔境",
"season": 1,
"episode_index": 5,
"subtitle": "字幕社招人内详",
"source": null,
"fansub": "极影字幕社",
"resolution": "720P"
}"#,
)
}
#[test]
fn test_parse_ep_tokusatsu_style() {
test_raw_ep_parser_case(
r#"[MagicStar] 假面骑士Geats / 仮面ライダーギーツ EP33 [WEBDL] [1080p] [TTFC]【生】"#,
r#"{
"name_jp": "仮面ライダーギーツ",
"name_jp_no_season": "仮面ライダーギーツ",
"name_zh": "假面骑士Geats",
"name_zh_no_season": "假面骑士Geats",
"season": 1,
"episode_index": 33,
"source": "WEBDL",
"fansub": "MagicStar",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_parse_ep_with_multi_lang_zh_title() {
test_raw_ep_parser_case(
r#"[百冬练习组&LoliHouse] BanG Dream! 少女乐团派对☆PICO FEVER / Garupa Pico: Fever! - 26 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕][END] [101.69 MB]"#,
r#"{
"name_en": "Garupa Pico: Fever!",
"name_en_no_season": "Garupa Pico: Fever!",
"name_zh": "BanG Dream! 少女乐团派对☆PICO FEVER",
"name_zh_no_season": "BanG Dream! 少女乐团派对☆PICO FEVER",
"season": 1,
"episode_index": 26,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "百冬练习组&LoliHouse",
"resolution": "1080p"
}"#,
)
}
#[test]
fn test_ep_collections() {
test_raw_ep_parser_case(
r#"[奶²&LoliHouse] 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简日内封字幕]"#,
r#"{
"name_en": "Kinokoinu: Mushroom Pup",
"name_en_no_season": "Kinokoinu: Mushroom Pup",
"name_zh": "蘑菇狗",
"name_zh_no_season": "蘑菇狗",
"season": 1,
"episode_index": 1,
"subtitle": "简日内封字幕",
"source": "WebRip",
"fansub": "奶²&LoliHouse",
"resolution": "1080p",
"name": " 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集]"
}"#,
);
test_raw_ep_parser_case(
r#"[LoliHouse] 叹气的亡灵想隐退 / Nageki no Bourei wa Intai shitai [01-13 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
r#"{
"name_en": "Nageki no Bourei wa Intai shitai",
"name_en_no_season": "Nageki no Bourei wa Intai shitai",
"name_jp": null,
"name_jp_no_season": null,
"name_zh": "叹气的亡灵想隐退",
"name_zh_no_season": "叹气的亡灵想隐退",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
);
test_raw_ep_parser_case(
r#"[LoliHouse] 精灵幻想记 第二季 / Seirei Gensouki S2 [01-12 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
r#"{
"name_en": "Seirei Gensouki S2",
"name_en_no_season": "Seirei Gensouki",
"name_zh": "精灵幻想记 第二季",
"name_zh_no_season": "精灵幻想记",
"season": 2,
"season_raw": "第二季",
"episode_index": 1,
"subtitle": "简繁内封字幕",
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
}"#,
);
test_raw_ep_parser_case(
r#"[喵萌奶茶屋&LoliHouse] 超自然武装当哒当 / 胆大党 / Dandadan [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简繁日内封字幕][Fin]"#,
r#" {
"name_en": "Dandadan",
"name_en_no_season": "Dandadan",
"name_zh": "超自然武装当哒当",
"name_zh_no_season": "超自然武装当哒当",
"season": 1,
"episode_index": 1,
"subtitle": "简繁日内封字幕",
"source": "WebRip",
"fansub": "喵萌奶茶屋&LoliHouse",
"resolution": "1080p"
}"#,
);
}
// TODO: FIXME
#[test]
fn test_bad_cases() {
test_raw_ep_parser_case(
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
r#"{
"name_zh": "摇曳露营△剧场版",
"name_zh_no_season": "摇曳露营△剧场版",
"season": 1,
"season_raw": null,
"episode_index": 1,
"subtitle": "简繁字幕",
"source": "BDrip",
"fansub": "7³ACG x 桜都字幕组",
"resolution": "1080p"
}"#,
);
test_raw_ep_parser_case(
r#"【幻樱字幕组】【4月新番】【古见同学有交流障碍症 第二季 Komi-san wa, Komyushou Desu. S02】【22】【GB_MP4】【1920X1080】"#,
r#"{
"name_en": "第二季 Komi-san wa, Komyushou Desu. S02",
"name_en_no_season": "Komi-san wa, Komyushou Desu.",
"name_zh": "古见同学有交流障碍症",
"name_zh_no_season": "古见同学有交流障碍症",
"season": 2,
"season_raw": "第二季",
"episode_index": 22,
"subtitle": "GB",
"fansub": "幻樱字幕组",
"resolution": "1920X1080"
}"#,
);
}
}

View File

@ -1,19 +1,151 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use std::{ops::Deref, sync::Arc};
use crate::{
graphql::infra::json::restrict_jsonb_filter_input_for_entity, models::subscriber_tasks,
use async_graphql::dynamic::{FieldValue, TypeRef};
use sea_orm::{
ColumnTrait, ConnectionTrait, EntityTrait, QueryFilter, QuerySelect, QueryTrait, prelude::Expr,
sea_query::Query,
};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityDeleteMutationBuilder, EntityObjectBuilder,
EntityQueryFieldBuilder, get_filter_conditions,
};
use crate::{
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::{
custom::generate_entity_filter_mutation_field,
json::{convert_jsonb_output_case_for_entity, restrict_jsonb_filter_input_for_entity},
},
},
models::subscriber_tasks,
task::{ApalisJobs, ApalisSchema},
};
pub fn register_subscriber_tasks_entity_mutations(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
let context = builder.context;
{
let entitity_delete_mutation_builder = EntityDeleteMutationBuilder { context };
let delete_mutation = generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>(
context,
entitity_delete_mutation_builder.type_name::<subscriber_tasks::Entity>(),
TypeRef::named_nn(TypeRef::INT),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>(
resolver_ctx,
context,
filters,
);
Box::pin(async move {
let db = app_ctx.db();
let select_subquery = subscriber_tasks::Entity::find()
.select_only()
.column(subscriber_tasks::Column::Id)
.filter(filters_condition);
let delete_query = Query::delete()
.from_table((ApalisSchema::Schema, ApalisJobs::Table))
.and_where(
Expr::col(ApalisJobs::Id).in_subquery(select_subquery.into_query()),
)
.to_owned();
let db_backend = db.deref().get_database_backend();
let delete_statement = db_backend.build(&delete_query);
let result = db.execute(delete_statement).await?;
Ok::<_, RecorderError>(Some(FieldValue::value(result.rows_affected() as i32)))
})
}),
);
builder.mutations.push(delete_mutation);
}
{
let entity_object_builder = EntityObjectBuilder { context };
let entity_query_field = EntityQueryFieldBuilder { context };
let entity_retry_one_mutation_name = format!(
"{}RetryOne",
entity_query_field.type_name::<subscriber_tasks::Entity>()
);
let retry_one_mutation =
generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>(
context,
entity_retry_one_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>(
resolver_ctx,
context,
filters,
);
Box::pin(async move {
let db = app_ctx.db();
let job_id = subscriber_tasks::Entity::find()
.filter(filters_condition)
.select_only()
.column(subscriber_tasks::Column::Id)
.into_tuple::<String>()
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
let task = app_ctx.task();
task.retry_subscriber_task(job_id.clone()).await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(&job_id))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(retry_one_mutation);
}
builder
}
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::SubscriberId,
);
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
);
convert_jsonb_output_case_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
);
}
pub fn register_subscriber_tasks_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.register_entity::<subscriber_tasks::Entity>(
<subscriber_tasks::RelatedEntity as sea_orm::Iterable>::iter()
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(subscriber_tasks::Entity, tokio::spawn);
builder =
builder.register_entity_dataloader_one_to_many(subscriber_tasks::Entity, tokio::spawn);
builder = register_subscriber_tasks_entity_mutations(builder);
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>();
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
builder
}

View File

@ -1,104 +1,58 @@
use std::sync::Arc;
use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef,
use async_graphql::dynamic::{FieldValue, TypeRef};
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
use seaography::{
Builder as SeaographyBuilder, EntityObjectBuilder, EntityQueryFieldBuilder,
get_filter_conditions,
};
use seaography::Builder as SeaographyBuilder;
use serde::{Deserialize, Serialize};
use util_derive::DynamicGraphql;
use crate::{
app::AppContextTrait,
auth::AuthUserInfo,
models::subscriptions::{self, SubscriptionTrait},
errors::RecorderError,
graphql::infra::custom::generate_entity_filter_mutation_field,
models::{
subscriber_tasks,
subscriptions::{self, SubscriptionTrait},
},
task::SubscriberTask,
};
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
struct SyncOneSubscriptionFilterInput {
pub id: i32,
}
impl SyncOneSubscriptionFilterInput {
fn input_type_name() -> &'static str {
"SyncOneSubscriptionFilterInput"
}
fn arg_name() -> &'static str {
"filter"
}
fn generate_input_object() -> InputObject {
InputObject::new(Self::input_type_name())
.description("The input of the subscriptionSyncOne series of mutations")
.field(InputValue::new(
SyncOneSubscriptionFilterInputFieldEnum::Id.as_str(),
TypeRef::named_nn(TypeRef::INT),
))
}
}
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
pub struct SyncOneSubscriptionInfo {
pub task_id: String,
}
impl SyncOneSubscriptionInfo {
fn object_type_name() -> &'static str {
"SyncOneSubscriptionInfo"
}
fn generate_output_object() -> Object {
Object::new(Self::object_type_name())
.description("The output of the subscriptionSyncOne series of mutations")
.field(Field::new(
SyncOneSubscriptionInfoFieldEnum::TaskId,
TypeRef::named_nn(TypeRef::STRING),
move |ctx| {
FieldFuture::new(async move {
let subscription_info = ctx.parent_value.try_downcast_ref::<Self>()?;
Ok(Some(async_graphql::Value::from(
subscription_info.task_id.as_str(),
)))
})
},
))
}
}
pub fn register_subscriptions_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.schema = builder
.schema
.register(SyncOneSubscriptionFilterInput::generate_input_object());
builder.schema = builder
.schema
.register(SyncOneSubscriptionInfo::generate_output_object());
let context = builder.context;
builder.mutations.push(
Field::new(
"subscriptionSyncOneFeedsIncremental",
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
move |ctx| {
FieldFuture::new(async move {
let auth_user_info = ctx.data::<AuthUserInfo>()?;
let entity_object_builder = EntityObjectBuilder { context };
let entity_query_field = EntityQueryFieldBuilder { context };
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
{
let sync_one_feeds_incremental_mutation_name = format!(
"{}SyncOneFeedsIncremental",
entity_query_field.type_name::<subscriptions::Entity>()
);
let filter_input: SyncOneSubscriptionFilterInput = ctx
.args
.get(SyncOneSubscriptionFilterInput::arg_name())
.unwrap()
.deserialize()?;
let sync_one_feeds_incremental_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_incremental_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
app_ctx.as_ref(),
filter_input.id,
subscriber_id,
)
.await?;
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
@ -107,48 +61,56 @@ pub fn register_subscriptions_to_schema_builder(
let task_id = task_service
.add_subscriber_task(
auth_user_info.subscriber_auth.subscriber_id,
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsIncremental(
subscription.into(),
),
)
.await?;
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
task_id: task_id.to_string(),
})))
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
},
)
.argument(InputValue::new(
SyncOneSubscriptionFilterInput::arg_name(),
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
)),
);
}),
);
builder.mutations.push(
Field::new(
"subscriptionSyncOneFeedsFull",
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
move |ctx| {
FieldFuture::new(async move {
let auth_user_info = ctx.data::<AuthUserInfo>()?;
builder.mutations.push(sync_one_feeds_incremental_mutation);
}
{
let sync_one_feeds_full_mutation_name = format!(
"{}SyncOneFeedsFull",
entity_query_field.type_name::<subscriptions::Entity>()
);
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
let sync_one_feeds_full_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_full_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
let filter_input: SyncOneSubscriptionFilterInput = ctx
.args
.get(SyncOneSubscriptionFilterInput::arg_name())
.unwrap()
.deserialize()?;
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
app_ctx.as_ref(),
filter_input.id,
subscriber_id,
)
.await?;
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
@ -157,46 +119,55 @@ pub fn register_subscriptions_to_schema_builder(
let task_id = task_service
.add_subscriber_task(
auth_user_info.subscriber_auth.subscriber_id,
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsFull(subscription.into()),
)
.await?;
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
task_id: task_id.to_string(),
})))
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
},
)
.argument(InputValue::new(
SyncOneSubscriptionFilterInput::arg_name(),
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
)),
);
}),
);
builder.mutations.push(
Field::new(
"subscriptionSyncOneSources",
TypeRef::named_nn(SyncOneSubscriptionInfo::object_type_name()),
move |ctx| {
FieldFuture::new(async move {
let auth_user_info = ctx.data::<AuthUserInfo>()?;
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
builder.mutations.push(sync_one_feeds_full_mutation);
}
let subscriber_id = auth_user_info.subscriber_auth.subscriber_id;
{
let sync_one_sources_mutation_name = format!(
"{}SyncOneSources",
entity_query_field.type_name::<subscriptions::Entity>()
);
let filter_input: SyncOneSubscriptionFilterInput = ctx
.args
.get(SyncOneSubscriptionFilterInput::arg_name())
.unwrap()
.deserialize()?;
let sync_one_sources_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_sources_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
let subscription_model = subscriptions::Model::find_by_id_and_subscriber_id(
app_ctx.as_ref(),
filter_input.id,
subscriber_id,
)
.await?;
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
@ -205,22 +176,26 @@ pub fn register_subscriptions_to_schema_builder(
let task_id = task_service
.add_subscriber_task(
auth_user_info.subscriber_auth.subscriber_id,
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionSources(subscription.into()),
)
.await?;
Ok(Some(FieldValue::owned_any(SyncOneSubscriptionInfo {
task_id: task_id.to_string(),
})))
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
},
)
.argument(InputValue::new(
SyncOneSubscriptionFilterInput::arg_name(),
TypeRef::named_nn(SyncOneSubscriptionFilterInput::input_type_name()),
)),
);
}),
);
builder.mutations.push(sync_one_sources_mutation);
}
builder
}

View File

@ -0,0 +1,76 @@
use std::{pin::Pin, sync::Arc};
use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputValue, ResolverContext, TypeRef, ValueAccessor,
};
use sea_orm::EntityTrait;
use seaography::{BuilderContext, EntityObjectBuilder, FilterInputBuilder, GuardAction};
use crate::{app::AppContextTrait, errors::RecorderResult};
pub type FilterMutationFn = Arc<
dyn for<'a> Fn(
&ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Option<ValueAccessor<'_>>,
) -> Pin<
Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>,
> + Send
+ Sync,
>;
pub fn generate_entity_filter_mutation_field<T, N, R>(
builder_context: &'static BuilderContext,
field_name: N,
type_ref: R,
mutation_fn: FilterMutationFn,
) -> Field
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
N: Into<String>,
R: Into<TypeRef>,
{
let entity_filter_input_builder = FilterInputBuilder {
context: builder_context,
};
let entity_object_builder = EntityObjectBuilder {
context: builder_context,
};
let object_name: String = entity_object_builder.type_name::<T>();
let context = builder_context;
let guard = builder_context.guards.entity_guards.get(&object_name);
Field::new(field_name, type_ref, move |ctx| {
let mutation_fn = mutation_fn.clone();
FieldFuture::new(async move {
let guard_flag = if let Some(guard) = guard {
(*guard)(&ctx)
} else {
GuardAction::Allow
};
if let GuardAction::Block(reason) = guard_flag {
return Err::<Option<_>, async_graphql::Error>(async_graphql::Error::new(
reason.unwrap_or("Entity guard triggered.".into()),
));
}
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?;
let filters = ctx.args.get(&context.entity_delete_mutation.filter_field);
let result = mutation_fn(&ctx, app_ctx.clone(), filters)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(result)
})
})
.argument(InputValue::new(
&context.entity_delete_mutation.filter_field,
TypeRef::named(entity_filter_input_builder.type_name(&object_name)),
))
}

View File

@ -3,6 +3,7 @@ use async_graphql::{
dynamic::{ResolverContext, Scalar, SchemaError},
to_value,
};
use convert_case::Case;
use itertools::Itertools;
use rust_decimal::{Decimal, prelude::FromPrimitive};
use sea_orm::{
@ -12,9 +13,13 @@ use sea_orm::{
use seaography::{
Builder as SeaographyBuilder, BuilderContext, FilterType, FnFilterCondition, SeaographyError,
};
use serde::{Serialize, de::DeserializeOwned};
use serde_json::Value as JsonValue;
use crate::{errors::RecorderResult, graphql::infra::util::get_entity_column_key};
use crate::{
errors::RecorderResult, graphql::infra::util::get_entity_column_key,
utils::json::convert_json_keys,
};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Copy)]
pub enum JsonbFilterOperation {
@ -948,6 +953,64 @@ where
);
}
pub fn validate_jsonb_input_for_entity<T, S>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
S: DeserializeOwned + Serialize,
{
let entity_column_key = get_entity_column_key::<T>(context, column);
context.types.input_conversions.insert(
entity_column_key.clone(),
Box::new(move |_resolve_context, accessor| {
let deserialized = accessor.deserialize::<S>().map_err(|err| {
SeaographyError::TypeConversionError(
err.message,
format!("Json - {entity_column_key}"),
)
})?;
let json_value = serde_json::to_value(deserialized).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_key}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
}
pub fn convert_jsonb_output_case_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_column_key = get_entity_column_key::<T>(context, column);
context.types.output_conversions.insert(
entity_column_key.clone(),
Box::new(move |value| {
if let sea_orm::Value::Json(Some(json)) = value {
let result = async_graphql::Value::from_json(convert_json_keys(
json.as_ref().clone(),
Case::Camel,
))
.map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_key}"),
)
})?;
Ok(result)
} else {
Err(SeaographyError::TypeConversionError(
"value should be json".to_string(),
format!("Json - {entity_column_key}"),
))
}
}),
);
}
#[cfg(test)]
mod tests {
use std::assert_matches::assert_matches;

View File

@ -1,2 +1,3 @@
pub mod custom;
pub mod json;
pub mod util;

View File

@ -42,10 +42,6 @@ pub fn build_schema(
register_subscribers_to_schema_context(&mut context);
{
restrict_subscriber_for_entity::<bangumi::Entity>(
&mut context,
&bangumi::Column::SubscriberId,
);
restrict_subscriber_for_entity::<downloaders::Entity>(
&mut context,
&downloaders::Column::SubscriberId,
@ -74,10 +70,6 @@ pub fn build_schema(
&mut context,
&subscription_episode::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
&mut context,
&subscriber_tasks::Column::SubscriberId,
);
restrict_subscriber_for_entity::<credential_3rd::Entity>(
&mut context,
&credential_3rd::Column::SubscriberId,
@ -110,7 +102,6 @@ pub fn build_schema(
subscription_bangumi,
subscription_episode,
subscriptions,
subscriber_tasks,
credential_3rd
]
);
@ -121,7 +112,6 @@ pub fn build_schema(
builder.register_enumeration::<downloaders::DownloaderCategory>();
builder.register_enumeration::<downloads::DownloadMime>();
builder.register_enumeration::<credential_3rd::Credential3rdType>();
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
}
builder = register_subscriptions_to_schema_builder(builder);

View File

@ -21,10 +21,12 @@ pub mod errors;
pub mod extract;
pub mod graphql;
pub mod logger;
pub mod media;
pub mod message;
pub mod migrations;
pub mod models;
pub mod storage;
pub mod task;
pub mod test_utils;
pub mod utils;
pub mod web;

View File

@ -0,0 +1,105 @@
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum AutoOptimizeImageFormat {
#[serde(rename = "image/webp")]
Webp,
#[serde(rename = "image/avif")]
Avif,
#[serde(rename = "image/jxl")]
Jxl,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct EncodeWebpOptions {
pub quality: Option<f32>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct EncodeAvifOptions {
pub quality: Option<u8>,
pub speed: Option<u8>,
pub threads: Option<u8>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct EncodeJxlOptions {
pub quality: Option<f32>,
pub speed: Option<u8>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(tag = "mime_type")]
pub enum EncodeImageOptions {
#[serde(rename = "image/webp")]
Webp(EncodeWebpOptions),
#[serde(rename = "image/avif")]
Avif(EncodeAvifOptions),
#[serde(rename = "image/jxl")]
Jxl(EncodeJxlOptions),
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MediaConfig {
#[serde(default = "default_webp_quality")]
pub webp_quality: f32,
#[serde(default = "default_avif_quality")]
pub avif_quality: u8,
#[serde(default = "default_avif_speed")]
pub avif_speed: u8,
#[serde(default = "default_avif_threads")]
pub avif_threads: u8,
#[serde(default = "default_jxl_quality")]
pub jxl_quality: f32,
#[serde(default = "default_jxl_speed")]
pub jxl_speed: u8,
#[serde(default = "default_auto_optimize_formats")]
pub auto_optimize_formats: Vec<AutoOptimizeImageFormat>,
}
impl Default for MediaConfig {
fn default() -> Self {
Self {
webp_quality: default_webp_quality(),
avif_quality: default_avif_quality(),
avif_speed: default_avif_speed(),
avif_threads: default_avif_threads(),
jxl_quality: default_jxl_quality(),
jxl_speed: default_jxl_speed(),
auto_optimize_formats: default_auto_optimize_formats(),
}
}
}
fn default_webp_quality() -> f32 {
80.0
}
fn default_avif_quality() -> u8 {
80
}
fn default_avif_speed() -> u8 {
6
}
fn default_avif_threads() -> u8 {
1
}
fn default_jxl_quality() -> f32 {
80.0
}
fn default_jxl_speed() -> u8 {
7
}
fn default_auto_optimize_formats() -> Vec<AutoOptimizeImageFormat> {
vec![
AutoOptimizeImageFormat::Webp,
// AutoOptimizeImageFormat::Avif, // TOO SLOW */
#[cfg(feature = "jxl")]
AutoOptimizeImageFormat::Jxl,
]
}

View File

@ -0,0 +1,8 @@
mod config;
mod service;
pub use config::{
AutoOptimizeImageFormat, EncodeAvifOptions, EncodeImageOptions, EncodeJxlOptions,
EncodeWebpOptions, MediaConfig,
};
pub use service::MediaService;

View File

@ -0,0 +1,199 @@
use std::io::Cursor;
use bytes::Bytes;
use image::{GenericImageView, ImageEncoder, ImageReader, codecs::avif::AvifEncoder};
use quirks_path::Path;
use snafu::ResultExt;
use crate::{
errors::{RecorderError, RecorderResult},
media::{EncodeAvifOptions, EncodeJxlOptions, EncodeWebpOptions, MediaConfig},
};
#[derive(Debug)]
pub struct MediaService {
pub config: MediaConfig,
}
impl MediaService {
pub async fn from_config(config: MediaConfig) -> RecorderResult<Self> {
Ok(Self { config })
}
pub fn is_legacy_image_format(&self, ext: &str) -> bool {
matches!(ext, "jpeg" | "jpg" | "png")
}
pub async fn optimize_image_to_webp(
&self,
path: impl AsRef<Path>,
data: impl Into<Bytes>,
options: Option<EncodeWebpOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.and_then(|o| o.quality)
.unwrap_or(self.config.webp_quality);
let data = data.into();
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let img = image_reader.decode()?;
let (width, height) = (img.width(), img.height());
let color = img.color();
let webp_data = if color.has_alpha() {
let rgba_image = img.into_rgba8();
let encoder = webp::Encoder::from_rgba(&rgba_image, width, height);
encoder.encode(quality)
} else {
let rgba_image = img.into_rgb8();
let encoder = webp::Encoder::from_rgb(&rgba_image, width, height);
encoder.encode(quality)
};
Ok(Bytes::from(webp_data.to_vec()))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to webp: {}",
path.as_ref().display()
)
})?
}
pub async fn optimize_image_to_avif(
&self,
path: impl AsRef<Path>,
data: Bytes,
options: Option<EncodeAvifOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.as_ref()
.and_then(|o| o.quality)
.unwrap_or(self.config.avif_quality);
let speed = options
.as_ref()
.and_then(|o| o.speed)
.unwrap_or(self.config.avif_speed);
let threads = options
.as_ref()
.and_then(|o| o.threads)
.unwrap_or(self.config.avif_threads);
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
let mut buf = vec![];
{
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let img = image_reader.decode()?;
let (width, height) = img.dimensions();
let color_type = img.color();
let encoder = AvifEncoder::new_with_speed_quality(&mut buf, speed, quality)
.with_num_threads(Some(threads as usize));
encoder.write_image(img.as_bytes(), width, height, color_type.into())?;
}
Ok(Bytes::from(buf))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to avif: {}",
path.as_ref().display()
)
})?
}
#[cfg(feature = "jxl")]
pub async fn optimize_image_to_jxl(
&self,
path: impl AsRef<Path>,
data: Bytes,
options: Option<EncodeJxlOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.as_ref()
.and_then(|o| o.quality)
.unwrap_or(self.config.jxl_quality);
let speed = options
.as_ref()
.and_then(|o| o.speed)
.unwrap_or(self.config.jxl_speed);
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
use jpegxl_rs::encode::{ColorEncoding, EncoderResult, EncoderSpeed};
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let image = image_reader.decode()?;
let (width, height) = image.dimensions();
let color = image.color();
let has_alpha = color.has_alpha();
let libjxl_speed = {
match speed {
0 | 1 => EncoderSpeed::Lightning,
2 => EncoderSpeed::Thunder,
3 => EncoderSpeed::Falcon,
4 => EncoderSpeed::Cheetah,
5 => EncoderSpeed::Hare,
6 => EncoderSpeed::Wombat,
7 => EncoderSpeed::Squirrel,
8 => EncoderSpeed::Kitten,
_ => EncoderSpeed::Tortoise,
}
};
let mut encoder_builder = jpegxl_rs::encoder_builder()
.lossless(false)
.has_alpha(has_alpha)
.color_encoding(ColorEncoding::Srgb)
.speed(libjxl_speed)
.jpeg_quality(quality)
.build()?;
let buffer: EncoderResult<u8> = if color.has_alpha() {
let sample = image.into_rgba8();
encoder_builder.encode(&sample, width, height)?
} else {
let sample = image.into_rgb8();
encoder_builder.encode(&sample, width, height)?
};
Ok(Bytes::from(buffer.data))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to avif: {}",
path.as_ref().display()
)
})?
}
#[cfg(not(feature = "jxl"))]
pub async fn optimize_image_to_jxl(
&self,
_path: impl AsRef<Path>,
_data: Bytes,
_options: Option<EncodeJxlOptions>,
) -> RecorderResult<Bytes> {
Err(RecorderError::Whatever {
message: "jxl feature is not enabled".to_string(),
source: None.into(),
})
}
}

View File

@ -43,7 +43,7 @@ pub enum Bangumi {
MikanBangumiId,
DisplayName,
SubscriberId,
RawName,
OriginName,
Season,
SeasonRaw,
Fansub,
@ -51,6 +51,7 @@ pub enum Bangumi {
Filter,
RssLink,
PosterLink,
OriginPosterLink,
SavePath,
Homepage,
}
@ -69,7 +70,7 @@ pub enum Episodes {
Table,
Id,
MikanEpisodeId,
RawName,
OriginName,
DisplayName,
BangumiId,
SubscriberId,
@ -80,6 +81,7 @@ pub enum Episodes {
SeasonRaw,
Fansub,
PosterLink,
OriginPosterLink,
EpisodeIndex,
Homepage,
Subtitle,
@ -100,7 +102,7 @@ pub enum SubscriptionEpisode {
pub enum Downloads {
Table,
Id,
RawName,
OriginName,
DisplayName,
SubscriberId,
DownloaderId,

View File

@ -96,7 +96,7 @@ impl MigrationTrait for Migration {
.col(text_null(Bangumi::MikanBangumiId))
.col(integer(Bangumi::SubscriberId))
.col(text(Bangumi::DisplayName))
.col(text(Bangumi::RawName))
.col(text(Bangumi::OriginName))
.col(integer(Bangumi::Season))
.col(text_null(Bangumi::SeasonRaw))
.col(text_null(Bangumi::Fansub))
@ -104,6 +104,7 @@ impl MigrationTrait for Migration {
.col(json_binary_null(Bangumi::Filter))
.col(text_null(Bangumi::RssLink))
.col(text_null(Bangumi::PosterLink))
.col(text_null(Bangumi::OriginPosterLink))
.col(text_null(Bangumi::SavePath))
.col(text_null(Bangumi::Homepage))
.foreign_key(
@ -220,7 +221,7 @@ impl MigrationTrait for Migration {
table_auto_z(Episodes::Table)
.col(pk_auto(Episodes::Id))
.col(text_null(Episodes::MikanEpisodeId))
.col(text(Episodes::RawName))
.col(text(Episodes::OriginName))
.col(text(Episodes::DisplayName))
.col(integer(Episodes::BangumiId))
.col(integer(Episodes::SubscriberId))
@ -230,6 +231,7 @@ impl MigrationTrait for Migration {
.col(text_null(Episodes::SeasonRaw))
.col(text_null(Episodes::Fansub))
.col(text_null(Episodes::PosterLink))
.col(text_null(Episodes::OriginPosterLink))
.col(integer(Episodes::EpisodeIndex))
.col(text_null(Episodes::Homepage))
.col(text_null(Episodes::Subtitle))

View File

@ -80,7 +80,7 @@ impl MigrationTrait for Migration {
.create_table(
table_auto_z(Downloads::Table)
.col(pk_auto(Downloads::Id))
.col(string(Downloads::RawName))
.col(string(Downloads::OriginName))
.col(string(Downloads::DisplayName))
.col(integer(Downloads::SubscriberId))
.col(integer(Downloads::DownloaderId))

View File

@ -99,7 +99,9 @@ impl Model {
..Default::default()
};
let new_item: Model = new_item.save(&txn).await?.try_into()?;
let new_item: Model = new_item.insert(&txn).await?;
txn.commit().await?;
Ok(new_item)
}

View File

@ -17,7 +17,7 @@ use crate::{
MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url,
scrape_mikan_poster_meta_from_image_url,
},
rawname::extract_season_from_title_body,
origin::{OriginCompTrait, SeasonComp},
},
};
@ -41,7 +41,7 @@ pub struct Model {
pub mikan_bangumi_id: Option<String>,
pub subscriber_id: i32,
pub display_name: String,
pub raw_name: String,
pub origin_name: String,
pub season: i32,
pub season_raw: Option<String>,
pub fansub: Option<String>,
@ -49,6 +49,7 @@ pub struct Model {
pub filter: Option<BangumiFilter>,
pub rss_link: Option<String>,
pub poster_link: Option<String>,
pub origin_poster_link: Option<String>,
pub save_path: Option<String>,
pub homepage: Option<String>,
}
@ -120,9 +121,12 @@ impl ActiveModel {
_subscription_id: i32,
) -> RecorderResult<Self> {
let mikan_client = ctx.mikan();
let storage_service = ctx.storage();
let mikan_base_url = mikan_client.base_url();
let (_, season_raw, season_index) = extract_season_from_title_body(&meta.bangumi_title);
let season_comp = SeasonComp::parse_comp(&meta.bangumi_title)
.ok()
.map(|(_, s)| s);
let season_index = season_comp.as_ref().map(|s| s.num).unwrap_or(1);
let season_raw = season_comp.map(|s| s.source.into_owned());
let rss_url = build_mikan_bangumi_subscription_rss_url(
mikan_base_url.clone(),
@ -130,14 +134,9 @@ impl ActiveModel {
Some(&meta.mikan_fansub_id),
);
let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src {
let poster_meta = scrape_mikan_poster_meta_from_image_url(
mikan_client,
storage_service,
origin_poster_src,
subscriber_id,
)
.await?;
let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src.clone() {
let poster_meta =
scrape_mikan_poster_meta_from_image_url(ctx, origin_poster_src).await?;
poster_meta.poster_src
} else {
None
@ -148,11 +147,12 @@ impl ActiveModel {
mikan_fansub_id: ActiveValue::Set(Some(meta.mikan_fansub_id)),
subscriber_id: ActiveValue::Set(subscriber_id),
display_name: ActiveValue::Set(meta.bangumi_title.clone()),
raw_name: ActiveValue::Set(meta.bangumi_title),
origin_name: ActiveValue::Set(meta.bangumi_title),
season: ActiveValue::Set(season_index),
season_raw: ActiveValue::Set(season_raw),
fansub: ActiveValue::Set(Some(meta.fansub)),
poster_link: ActiveValue::Set(poster_link),
origin_poster_link: ActiveValue::Set(meta.origin_poster_src.map(|src| src.to_string())),
homepage: ActiveValue::Set(Some(meta.homepage.to_string())),
rss_link: ActiveValue::Set(Some(rss_url.to_string())),
..Default::default()
@ -228,7 +228,7 @@ impl Model {
Column::SubscriberId,
])
.update_columns([
Column::RawName,
Column::OriginName,
Column::Fansub,
Column::PosterLink,
Column::Season,

View File

@ -44,7 +44,7 @@ pub struct Model {
pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)]
pub id: i32,
pub raw_name: String,
pub origin_name: String,
pub display_name: String,
pub downloader_id: i32,
pub episode_id: i32,

View File

@ -10,7 +10,7 @@ use crate::{
errors::RecorderResult,
extract::{
mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url},
rawname::extract_episode_meta_from_raw_name,
origin::{OriginCompTrait, OriginNameRoot},
},
};
@ -25,7 +25,7 @@ pub struct Model {
pub id: i32,
#[sea_orm(indexed)]
pub mikan_episode_id: Option<String>,
pub raw_name: String,
pub origin_name: String,
pub display_name: String,
pub bangumi_id: i32,
pub subscriber_id: i32,
@ -35,6 +35,7 @@ pub struct Model {
pub season_raw: Option<String>,
pub fansub: Option<String>,
pub poster_link: Option<String>,
pub origin_poster_link: Option<String>,
pub episode_index: i32,
pub homepage: Option<String>,
pub subtitle: Option<String>,
@ -123,7 +124,7 @@ impl ActiveModel {
episode: MikanEpisodeMeta,
) -> RecorderResult<Self> {
let mikan_base_url = ctx.mikan().base_url().clone();
let episode_extention_meta = extract_episode_meta_from_raw_name(&episode.episode_title)
let episode_extention_meta = OriginNameRoot::parse_comp(&episode.episode_title)
.inspect_err(|err| {
tracing::error!(
err = ?err,
@ -131,12 +132,13 @@ impl ActiveModel {
"Failed to parse episode extension meta from episode title, skip"
);
})
.map(|(_, e)| e.into_meta())
.ok();
let homepage = build_mikan_episode_homepage_url(mikan_base_url, &episode.mikan_episode_id);
let mut episode_active_model = Self {
mikan_episode_id: ActiveValue::Set(Some(episode.mikan_episode_id)),
raw_name: ActiveValue::Set(episode.episode_title.clone()),
origin_name: ActiveValue::Set(episode.episode_title.clone()),
display_name: ActiveValue::Set(episode.episode_title.clone()),
bangumi_id: ActiveValue::Set(bangumi.id),
subscriber_id: ActiveValue::Set(bangumi.subscriber_id),
@ -145,6 +147,7 @@ impl ActiveModel {
season: ActiveValue::Set(bangumi.season),
fansub: ActiveValue::Set(bangumi.fansub.clone()),
poster_link: ActiveValue::Set(bangumi.poster_link.clone()),
origin_poster_link: ActiveValue::Set(bangumi.origin_poster_link.clone()),
episode_index: ActiveValue::Set(0),
..Default::default()
};
@ -231,7 +234,7 @@ impl Model {
let new_episode_ids = Entity::insert_many(new_episode_active_modes)
.on_conflict(
OnConflict::columns([Column::MikanEpisodeId, Column::SubscriberId])
.update_columns([Column::RawName, Column::PosterLink, Column::Homepage])
.update_columns([Column::OriginName, Column::PosterLink, Column::Homepage])
.to_owned(),
)
.exec_with_returning_columns(db, [Column::Id])

View File

@ -186,19 +186,13 @@ impl Model {
let subscription_model = Entity::find_by_id(subscription_id)
.one(db)
.await?
.ok_or_else(|| RecorderError::DbError {
source: DbErr::RecordNotFound(format!(
"Subscription id {subscription_id} not found or not belong to subscriber \
{subscriber_id}",
)),
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
if subscription_model.subscriber_id != subscriber_id {
Err(RecorderError::DbError {
source: DbErr::RecordNotFound(format!(
"Subscription id {subscription_id} not found or not belong to subscriber \
{subscriber_id}",
)),
Err(RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
}

View File

@ -1,13 +1,24 @@
use std::fmt;
use std::{borrow::Cow, fmt};
use async_stream::try_stream;
use axum::{body::Body, response::Response};
use axum_extra::{TypedHeader, headers::Range};
use bytes::Bytes;
use opendal::{Buffer, Operator, layers::LoggingLayer};
use futures::{Stream, StreamExt};
use headers_accept::Accept;
use http::{HeaderValue, StatusCode, header};
use opendal::{Buffer, Metadata, Operator, Reader, Writer, layers::LoggingLayer};
use quirks_path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use tracing::instrument;
use url::Url;
use uuid::Uuid;
use super::StorageConfig;
use crate::errors::app_error::RecorderResult;
use crate::{
errors::{RecorderError, RecorderResult},
utils::http::{bound_range_to_content_range, build_no_satisfiable_content_range},
};
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
@ -43,108 +54,313 @@ impl fmt::Display for StorageStoredUrl {
}
}
#[async_trait::async_trait]
pub trait StorageServiceTrait: Sync {
fn get_operator(&self) -> RecorderResult<Operator>;
fn get_fullname(
&self,
content_category: StorageContentCategory,
subscriber_id: i32,
bucket: Option<&str>,
filename: &str,
) -> PathBuf {
[
&subscriber_id.to_string(),
content_category.as_ref(),
bucket.unwrap_or_default(),
filename,
]
.into_iter()
.map(Path::new)
.collect::<PathBuf>()
}
async fn store_object(
&self,
content_category: StorageContentCategory,
subscriber_id: i32,
bucket: Option<&str>,
filename: &str,
data: Bytes,
) -> RecorderResult<StorageStoredUrl> {
let fullname = self.get_fullname(content_category, subscriber_id, bucket, filename);
let operator = self.get_operator()?;
if let Some(dirname) = fullname.parent() {
let dirname = dirname.join("/");
operator.create_dir(dirname.as_str()).await?;
}
operator.write(fullname.as_str(), data).await?;
Ok(StorageStoredUrl::RelativePath {
path: fullname.to_string(),
})
}
async fn exists_object(
&self,
content_category: StorageContentCategory,
subscriber_id: i32,
bucket: Option<&str>,
filename: &str,
) -> RecorderResult<Option<StorageStoredUrl>> {
let fullname = self.get_fullname(content_category, subscriber_id, bucket, filename);
let operator = self.get_operator()?;
if operator.exists(fullname.as_str()).await? {
Ok(Some(StorageStoredUrl::RelativePath {
path: fullname.to_string(),
}))
} else {
Ok(None)
}
}
async fn load_object(
&self,
content_category: StorageContentCategory,
subscriber_id: i32,
bucket: Option<&str>,
filename: &str,
) -> RecorderResult<Buffer> {
let fullname = self.get_fullname(content_category, subscriber_id, bucket, filename);
let operator = self.get_operator()?;
let data = operator.read(fullname.as_str()).await?;
Ok(data)
}
}
#[derive(Debug, Clone)]
pub struct StorageService {
pub data_dir: String,
pub operator: Operator,
}
impl StorageService {
pub async fn from_config(config: StorageConfig) -> RecorderResult<Self> {
Ok(Self {
data_dir: config.data_dir.to_string(),
operator: Self::get_operator(&config.data_dir)?,
})
}
}
#[async_trait::async_trait]
impl StorageServiceTrait for StorageService {
fn get_operator(&self) -> RecorderResult<Operator> {
let fs_op = Operator::new(opendal::services::Fs::default().root(&self.data_dir))?
.layer(LoggingLayer::default())
.finish();
pub fn get_operator(data_dir: &str) -> Result<Operator, opendal::Error> {
let op = if cfg!(test) {
Operator::new(opendal::services::Memory::default())?
.layer(LoggingLayer::default())
.finish()
} else {
Operator::new(opendal::services::Fs::default().root(data_dir))?
.layer(LoggingLayer::default())
.finish()
};
Ok(fs_op)
Ok(op)
}
pub fn build_subscriber_path(&self, subscriber_id: i32, path: impl AsRef<Path>) -> PathBuf {
let mut p = PathBuf::from("/subscribers");
p.push(subscriber_id.to_string());
p.push(path);
p
}
pub fn build_public_path(&self, path: impl AsRef<Path>) -> PathBuf {
let mut p = PathBuf::from("/public");
p.push(path);
p
}
pub fn build_subscriber_object_path(
&self,
subscriber_id: i32,
content_category: StorageContentCategory,
bucket: &str,
object_name: &str,
) -> PathBuf {
self.build_subscriber_path(
subscriber_id,
[content_category.as_ref(), bucket, object_name]
.iter()
.collect::<PathBuf>(),
)
}
pub fn build_public_object_path(
&self,
content_category: StorageContentCategory,
bucket: &str,
object_name: &str,
) -> PathBuf {
self.build_public_path(
[content_category.as_ref(), bucket, object_name]
.iter()
.collect::<PathBuf>(),
)
}
pub async fn write<P: Into<PathBuf> + Send>(
&self,
path: P,
data: Bytes,
) -> Result<StorageStoredUrl, opendal::Error> {
let operator = &self.operator;
let path = path.into();
if let Some(dirname) = path.parent() {
let dirname = dirname.join("/");
operator.create_dir(dirname.as_str()).await?;
}
operator.write(path.as_str(), data).await?;
Ok(StorageStoredUrl::RelativePath {
path: path.to_string(),
})
}
pub async fn exists<P: ToString + Send>(
&self,
path: P,
) -> Result<Option<StorageStoredUrl>, opendal::Error> {
let operator = &self.operator;
let path = path.to_string();
if operator.exists(&path).await? {
Ok(Some(StorageStoredUrl::RelativePath { path }))
} else {
Ok(None)
}
}
pub async fn read(&self, path: impl AsRef<str>) -> Result<Buffer, opendal::Error> {
let operator = &self.operator;
let data = operator.read(path.as_ref()).await?;
Ok(data)
}
pub async fn reader(&self, path: impl AsRef<str>) -> Result<Reader, opendal::Error> {
let operator = &self.operator;
let reader = operator.reader(path.as_ref()).await?;
Ok(reader)
}
pub async fn writer(&self, path: impl AsRef<str>) -> Result<Writer, opendal::Error> {
let operator = &self.operator;
let writer = operator.writer(path.as_ref()).await?;
Ok(writer)
}
pub async fn stat(&self, path: impl AsRef<str>) -> Result<Metadata, opendal::Error> {
let operator = &self.operator;
let metadata = operator.stat(path.as_ref()).await?;
Ok(metadata)
}
#[cfg(test)]
pub async fn list_public(&self) -> Result<Vec<opendal::Entry>, opendal::Error> {
use futures::TryStreamExt;
let lister = self.operator.lister_with("public/").recursive(true).await?;
lister.try_collect().await
}
#[cfg(test)]
pub async fn list_subscribers(&self) -> Result<Vec<opendal::Entry>, opendal::Error> {
use futures::TryStreamExt;
let lister = self
.operator
.lister_with("subscribers/")
.recursive(true)
.await?;
lister.try_collect().await
}
#[instrument(skip_all, err, fields(storage_path = %storage_path.as_ref(), range = ?range, accept = ?accept))]
pub async fn serve_optimized_image(
&self,
storage_path: impl AsRef<Path>,
range: Option<TypedHeader<Range>>,
accept: Accept,
) -> RecorderResult<Response> {
let storage_path = Path::new(storage_path.as_ref());
for mime_type in accept.media_types() {
let accpetable_path = match mime_type.subty().as_str() {
"webp" => Some(storage_path.with_extension("webp")),
"avif" => Some(storage_path.with_extension("avif")),
"jxl" => Some(storage_path.with_extension("jxl")),
_ => None,
};
if let Some(accpetable_path) = accpetable_path
&& self.exists(&accpetable_path).await?.is_some()
&& self.stat(&accpetable_path).await?.is_file()
{
return self.serve_file(accpetable_path, range).await;
}
}
self.serve_file(storage_path, range).await
}
#[instrument(skip_all, err, fields(storage_path = %storage_path.as_ref(), range = ?range))]
pub async fn serve_file(
&self,
storage_path: impl AsRef<str>,
range: Option<TypedHeader<Range>>,
) -> RecorderResult<Response> {
let metadata = self
.stat(&storage_path)
.await
.map_err(|_| RecorderError::from_status(StatusCode::NOT_FOUND))?;
if !metadata.is_file() {
return Err(RecorderError::from_status(StatusCode::NOT_FOUND));
}
let mime_type = mime_guess::from_path(storage_path.as_ref()).first_or_octet_stream();
let content_type = HeaderValue::from_str(mime_type.as_ref())?;
let etag = metadata.etag().map(Cow::Borrowed).or_else(|| {
let len = metadata.content_length();
let lm = metadata.last_modified()?.timestamp();
Some(Cow::Owned(format!("\"{lm:x}-{len:x}\"")))
});
let last_modified = metadata.last_modified().map(|lm| lm.to_rfc2822());
let response = if let Some(TypedHeader(range)) = range {
let ranges = range
.satisfiable_ranges(metadata.content_length())
.map(|r| -> Option<(_, _)> {
let a = bound_range_to_content_range(&r, metadata.content_length())?;
Some((r, a))
})
.collect::<Option<Vec<_>>>();
if let Some(mut ranges) = ranges {
if ranges.len() > 1 {
let boundary = Uuid::new_v4().to_string();
let reader = self.reader(storage_path.as_ref()).await?;
let stream: impl Stream<Item = Result<Bytes, RecorderError>> = {
let boundary = boundary.clone();
try_stream! {
for (r, content_range) in ranges {
let part_header = format!("--{boundary}\r\nContent-Type: {}\r\nContent-Range: {}\r\n\r\n",
mime_type.as_ref(),
content_range.clone().to_str().unwrap(),
);
yield part_header.into();
let mut part_stream = reader.clone().into_bytes_stream(r).await?;
while let Some(chunk) = part_stream.next().await {
yield chunk?;
}
yield "\r\n".into();
}
yield format!("--{boundary}--").into();
}
};
let body = Body::from_stream(stream);
let mut builder = Response::builder()
.status(StatusCode::PARTIAL_CONTENT)
.header(
header::CONTENT_TYPE,
HeaderValue::from_str(
format!("multipart/byteranges; boundary={boundary}").as_str(),
)
.unwrap(),
);
if let Some(etag) = etag {
builder = builder.header(header::ETAG, etag.to_string());
}
if let Some(last_modified) = last_modified {
builder = builder.header(header::LAST_MODIFIED, last_modified);
}
builder.body(body)?
} else if let Some((r, content_range)) = ranges.pop() {
let reader = self.reader(storage_path.as_ref()).await?;
let stream = reader.into_bytes_stream(r).await?;
let mut builder = Response::builder()
.status(StatusCode::PARTIAL_CONTENT)
.header(header::CONTENT_TYPE, content_type.clone())
.header(header::CONTENT_RANGE, content_range);
if let Some(etag) = metadata.etag() {
builder = builder.header(header::ETAG, etag);
}
if let Some(last_modified) = last_modified {
builder = builder.header(header::LAST_MODIFIED, last_modified);
}
builder.body(Body::from_stream(stream))?
} else {
unreachable!("ranges length should be greater than 0")
}
} else {
Response::builder()
.status(StatusCode::RANGE_NOT_SATISFIABLE)
.header(header::CONTENT_TYPE, content_type)
.header(
header::CONTENT_RANGE,
build_no_satisfiable_content_range(metadata.content_length()),
)
.body(Body::empty())?
}
} else {
let reader = self.reader(storage_path.as_ref()).await?;
let stream = reader.into_bytes_stream(..).await?;
let mut builder = Response::builder()
.status(StatusCode::OK)
.header(header::CONTENT_TYPE, content_type);
if let Some(etag) = etag {
builder = builder.header(header::ETAG, etag.to_string());
}
if let Some(last_modified) = last_modified {
builder = builder.header(header::LAST_MODIFIED, last_modified);
}
builder.body(Body::from_stream(stream))?
};
Ok(response)
}
}

View File

@ -1,4 +1,4 @@
mod client;
mod config;
pub use client::{StorageContentCategory, StorageService, StorageServiceTrait, StorageStoredUrl};
pub use client::{StorageContentCategory, StorageService, StorageStoredUrl};
pub use config::StorageConfig;

View File

@ -1,4 +1,50 @@
use std::time::Duration;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TaskConfig {}
pub struct TaskConfig {
#[serde(default = "default_subscriber_task_workers")]
pub subscriber_task_concurrency: u32,
#[serde(default = "default_system_task_workers")]
pub system_task_concurrency: u32,
#[serde(default = "default_subscriber_task_timeout")]
pub subscriber_task_timeout: Duration,
#[serde(default = "default_system_task_timeout")]
pub system_task_timeout: Duration,
}
impl Default for TaskConfig {
fn default() -> Self {
Self {
subscriber_task_concurrency: default_subscriber_task_workers(),
system_task_concurrency: default_system_task_workers(),
subscriber_task_timeout: default_subscriber_task_timeout(),
system_task_timeout: default_system_task_timeout(),
}
}
}
pub fn default_subscriber_task_workers() -> u32 {
if cfg!(test) {
1
} else {
((num_cpus::get_physical() as f32 / 2.0).floor() as u32).max(1)
}
}
pub fn default_system_task_workers() -> u32 {
if cfg!(test) {
1
} else {
((num_cpus::get_physical() as f32 / 2.0).floor() as u32).max(1)
}
}
pub fn default_subscriber_task_timeout() -> Duration {
Duration::from_secs(3600)
}
pub fn default_system_task_timeout() -> Duration {
Duration::from_secs(3600)
}

View File

@ -5,10 +5,11 @@ use serde::{Serialize, de::DeserializeOwned};
use crate::{app::AppContextTrait, errors::RecorderResult};
pub const SYSTEM_TASK_APALIS_NAME: &str = "system_task";
pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task";
#[async_trait::async_trait]
pub trait SubscriberAsyncTaskTrait: Serialize + DeserializeOwned + Sized {
pub trait AsyncTaskTrait: Serialize + DeserializeOwned + Sized {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
@ -19,7 +20,7 @@ pub trait SubscriberAsyncTaskTrait: Serialize + DeserializeOwned + Sized {
}
#[async_trait::async_trait]
pub trait SubscriberStreamTaskTrait: Serialize + DeserializeOwned + Sized {
pub trait StreamTaskTrait: Serialize + DeserializeOwned + Sized {
type Yield: Serialize + DeserializeOwned + Send;
fn run_stream(

View File

@ -0,0 +1,16 @@
use sea_orm::sea_query;
#[derive(sea_query::Iden)]
pub enum ApalisSchema {
#[iden = "apalis"]
Schema,
}
#[derive(sea_query::Iden)]
pub enum ApalisJobs {
#[iden = "jobs"]
Table,
Id,
}

View File

@ -1,14 +1,19 @@
mod config;
mod core;
mod r#extern;
mod registry;
mod service;
pub use core::{SUBSCRIBER_TASK_APALIS_NAME, SubscriberAsyncTaskTrait, SubscriberStreamTaskTrait};
pub use core::{
AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, StreamTaskTrait,
};
pub use config::TaskConfig;
pub use r#extern::{ApalisJobs, ApalisSchema};
pub use registry::{
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant,
SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask,
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask,
OptimizeImageTask, SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum,
SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask,
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask, SystemTask,
SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant, SystemTaskTypeVariantIter,
};
pub use service::TaskService;

View File

@ -0,0 +1,53 @@
use std::sync::Arc;
use quirks_path::Path;
use serde::{Deserialize, Serialize};
use tracing::instrument;
use crate::{
app::AppContextTrait, errors::RecorderResult, media::EncodeImageOptions, task::AsyncTaskTrait,
};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct OptimizeImageTask {
pub source_path: String,
pub target_path: String,
pub format_options: EncodeImageOptions,
}
#[async_trait::async_trait]
impl AsyncTaskTrait for OptimizeImageTask {
#[instrument(err, skip(ctx))]
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
let storage = ctx.storage();
let source_path = Path::new(&self.source_path);
let media_service = ctx.media();
let image_data = storage.read(source_path).await?;
match self.format_options {
EncodeImageOptions::Webp(options) => {
let data = media_service
.optimize_image_to_webp(source_path, image_data.to_bytes(), Some(options))
.await?;
storage.write(self.target_path, data).await?;
}
EncodeImageOptions::Avif(options) => {
let data = media_service
.optimize_image_to_avif(source_path, image_data.to_bytes(), Some(options))
.await?;
storage.write(self.target_path, data).await?;
}
EncodeImageOptions::Jxl(options) => {
let data = media_service
.optimize_image_to_jxl(source_path, image_data.to_bytes(), Some(options))
.await?;
storage.write(self.target_path, data).await?;
}
};
Ok(())
}
}

View File

@ -1,6 +1,8 @@
mod media;
mod subscription;
use std::sync::Arc;
pub use media::OptimizeImageTask;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
pub use subscription::{
@ -8,11 +10,11 @@ pub use subscription::{
SyncOneSubscriptionSourcesTask,
};
use super::SubscriberAsyncTaskTrait;
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
models::subscriptions::SubscriptionTrait,
task::AsyncTaskTrait,
};
#[derive(
@ -97,3 +99,36 @@ impl SubscriberTask {
}
}
}
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SystemTaskType {
#[serde(rename = "optimize_image")]
#[sea_orm(string_value = "optimize_image")]
OptimizeImage,
}
#[derive(Clone, Debug, Serialize, Deserialize, FromJsonQueryResult)]
pub enum SystemTask {
#[serde(rename = "optimize_image")]
OptimizeImage(OptimizeImageTask),
}
impl SystemTask {
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::OptimizeImage(task) => task.run(ctx).await,
}
}
}

View File

@ -7,7 +7,7 @@ use crate::{
app::AppContextTrait,
errors::RecorderResult,
models::subscriptions::{self, SubscriptionTrait},
task::SubscriberAsyncTaskTrait,
task::AsyncTaskTrait,
};
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
@ -20,7 +20,7 @@ impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsIncrementalTa
}
#[async_trait::async_trait]
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionFeedsIncrementalTask {
impl AsyncTaskTrait for SyncOneSubscriptionFeedsIncrementalTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_feeds_incremental(ctx).await?;
Ok(())
@ -37,7 +37,7 @@ impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsFullTask {
}
#[async_trait::async_trait]
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
impl AsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_feeds_full(ctx).await?;
Ok(())
@ -48,7 +48,7 @@ impl SubscriberAsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
pub struct SyncOneSubscriptionSourcesTask(pub subscriptions::Subscription);
#[async_trait::async_trait]
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionSourcesTask {
impl AsyncTaskTrait for SyncOneSubscriptionSourcesTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_sources(ctx).await?;
Ok(())

View File

@ -1,4 +1,4 @@
use std::{ops::Deref, sync::Arc};
use std::{ops::Deref, str::FromStr, sync::Arc};
use apalis::prelude::*;
use apalis_sql::{
@ -10,29 +10,48 @@ use tokio::sync::RwLock;
use crate::{
app::AppContextTrait,
errors::RecorderResult,
task::{SUBSCRIBER_TASK_APALIS_NAME, SubscriberTask, TaskConfig},
errors::{RecorderError, RecorderResult},
task::{
SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, SubscriberTask, TaskConfig,
config::{default_subscriber_task_workers, default_system_task_workers},
registry::SystemTask,
},
};
pub struct TaskService {
pub config: TaskConfig,
ctx: Arc<dyn AppContextTrait>,
subscriber_task_storage: Arc<RwLock<PostgresStorage<SubscriberTask>>>,
system_task_storage: Arc<RwLock<PostgresStorage<SystemTask>>>,
}
impl TaskService {
pub async fn from_config_and_ctx(
config: TaskConfig,
mut config: TaskConfig,
ctx: Arc<dyn AppContextTrait>,
) -> RecorderResult<Self> {
if config.subscriber_task_concurrency == 0 {
config.subscriber_task_concurrency = default_subscriber_task_workers();
};
if config.system_task_concurrency == 0 {
config.system_task_concurrency = default_system_task_workers();
};
let pool = ctx.db().get_postgres_connection_pool().clone();
let storage_config = Config::new(SUBSCRIBER_TASK_APALIS_NAME);
let subscriber_task_storage = PostgresStorage::new_with_config(pool, storage_config);
let subscriber_task_storage_config =
Config::new(SUBSCRIBER_TASK_APALIS_NAME).set_keep_alive(config.subscriber_task_timeout);
let system_task_storage_config =
Config::new(SYSTEM_TASK_APALIS_NAME).set_keep_alive(config.system_task_timeout);
let subscriber_task_storage =
PostgresStorage::new_with_config(pool.clone(), subscriber_task_storage_config);
let system_task_storage =
PostgresStorage::new_with_config(pool, system_task_storage_config);
Ok(Self {
config,
ctx,
subscriber_task_storage: Arc::new(RwLock::new(subscriber_task_storage)),
system_task_storage: Arc::new(RwLock::new(system_task_storage)),
})
}
@ -45,6 +64,40 @@ impl TaskService {
job.run(ctx).await
}
async fn run_system_task(
job: SystemTask,
data: Data<Arc<dyn AppContextTrait>>,
) -> RecorderResult<()> {
let ctx = data.deref().clone();
job.run(ctx).await
}
pub async fn retry_subscriber_task(&self, job_id: String) -> RecorderResult<()> {
{
let mut storage = self.subscriber_task_storage.write().await;
let task_id =
TaskId::from_str(&job_id).map_err(|err| RecorderError::InvalidTaskId {
message: err.to_string(),
})?;
let worker_id = WorkerId::new(SUBSCRIBER_TASK_APALIS_NAME);
storage.retry(&worker_id, &task_id).await?;
}
Ok(())
}
pub async fn retry_system_task(&self, job_id: String) -> RecorderResult<()> {
{
let mut storage = self.system_task_storage.write().await;
let task_id =
TaskId::from_str(&job_id).map_err(|err| RecorderError::InvalidTaskId {
message: err.to_string(),
})?;
let worker_id = WorkerId::new(SYSTEM_TASK_APALIS_NAME);
storage.retry(&worker_id, &task_id).await?;
}
Ok(())
}
pub async fn add_subscriber_task(
&self,
_subscriber_id: i32,
@ -64,27 +117,66 @@ impl TaskService {
Ok(task_id)
}
pub async fn setup_monitor(&self) -> RecorderResult<Monitor> {
let monitor = Monitor::new();
let worker = WorkerBuilder::new(SUBSCRIBER_TASK_APALIS_NAME)
.catch_panic()
.enable_tracing()
.data(self.ctx.clone())
.backend(self.subscriber_task_storage.read().await.clone())
.build_fn(Self::run_subscriber_task);
pub async fn add_system_task(&self, system_task: SystemTask) -> RecorderResult<TaskId> {
let task_id = {
let mut storage = self.system_task_storage.write().await;
let sql_context = {
let mut c = SqlContext::default();
c.set_max_attempts(1);
c
};
let request = Request::new_with_ctx(system_task, sql_context);
storage.push_request(request).await?.task_id
};
Ok(monitor.register(worker))
Ok(task_id)
}
pub async fn setup_monitor(&self) -> RecorderResult<Monitor> {
let mut monitor = Monitor::new();
{
let subscriber_task_worker = WorkerBuilder::new(SUBSCRIBER_TASK_APALIS_NAME)
.concurrency(self.config.subscriber_task_concurrency as usize)
.catch_panic()
.enable_tracing()
.data(self.ctx.clone())
.backend({
let storage = self.subscriber_task_storage.read().await;
storage.clone()
})
.build_fn(Self::run_subscriber_task);
let system_task_worker = WorkerBuilder::new(SYSTEM_TASK_APALIS_NAME)
.concurrency(self.config.system_task_concurrency as usize)
.catch_panic()
.enable_tracing()
.data(self.ctx.clone())
.backend(self.system_task_storage.read().await.clone())
.build_fn(Self::run_system_task);
monitor = monitor
.register(subscriber_task_worker)
.register(system_task_worker);
}
Ok(monitor)
}
pub async fn setup_listener(&self) -> RecorderResult<PgListen> {
let pool = self.ctx.db().get_postgres_connection_pool().clone();
let mut subscriber_task_listener = PgListen::new(pool).await?;
let mut task_listener = PgListen::new(pool).await?;
{
let mut subscriber_task_storage = self.subscriber_task_storage.write().await;
subscriber_task_listener.subscribe_with(&mut subscriber_task_storage);
task_listener.subscribe_with(&mut subscriber_task_storage);
}
Ok(subscriber_task_listener)
{
let mut system_task_storage = self.system_task_storage.write().await;
task_listener.subscribe_with(&mut system_task_storage);
}
Ok(task_listener)
}
}

View File

@ -3,7 +3,17 @@ use std::{fmt::Debug, sync::Arc};
use once_cell::sync::OnceCell;
use typed_builder::TypedBuilder;
use crate::{app::AppContextTrait, test_utils::storage::TestingStorageService};
use crate::{
app::AppContextTrait,
test_utils::{
crypto::build_testing_crypto_service,
database::{TestingDatabaseServiceConfig, build_testing_database_service},
media::build_testing_media_service,
mikan::build_testing_mikan_client,
storage::build_testing_storage_service,
task::build_testing_task_service,
},
};
#[derive(TypedBuilder)]
#[builder(field_defaults(default, setter(strip_option)))]
@ -15,8 +25,9 @@ pub struct TestingAppContext {
mikan: Option<crate::extract::mikan::MikanClient>,
auth: Option<crate::auth::AuthService>,
graphql: Option<crate::graphql::GraphQLService>,
storage: Option<TestingStorageService>,
storage: Option<crate::storage::StorageService>,
crypto: Option<crate::crypto::CryptoService>,
media: Option<crate::media::MediaService>,
#[builder(default = Arc::new(OnceCell::new()), setter(!strip_option))]
task: Arc<OnceCell<crate::task::TaskService>>,
message: Option<crate::message::MessageService>,
@ -30,6 +41,32 @@ impl TestingAppContext {
pub fn set_task(&self, task: crate::task::TaskService) {
self.task.get_or_init(|| task);
}
pub async fn from_preset(
preset: TestingAppContextPreset,
) -> crate::errors::RecorderResult<Arc<Self>> {
let mikan_client = build_testing_mikan_client(preset.mikan_base_url.clone()).await?;
let db_service =
build_testing_database_service(preset.database_config.unwrap_or_default()).await?;
let crypto_service = build_testing_crypto_service().await?;
let storage_service = build_testing_storage_service().await?;
let media_service = build_testing_media_service().await?;
let app_ctx = Arc::new(
TestingAppContext::builder()
.mikan(mikan_client)
.db(db_service)
.crypto(crypto_service)
.storage(storage_service)
.media(media_service)
.build(),
);
let task_service = build_testing_task_service(app_ctx.clone()).await?;
app_ctx.set_task(task_service);
Ok(app_ctx)
}
}
impl Debug for TestingAppContext {
@ -67,7 +104,7 @@ impl AppContextTrait for TestingAppContext {
self.graphql.as_ref().expect("should set graphql")
}
fn storage(&self) -> &dyn crate::storage::StorageServiceTrait {
fn storage(&self) -> &crate::storage::StorageService {
self.storage.as_ref().expect("should set storage")
}
@ -90,4 +127,13 @@ impl AppContextTrait for TestingAppContext {
fn message(&self) -> &crate::message::MessageService {
self.message.as_ref().expect("should set message")
}
fn media(&self) -> &crate::media::MediaService {
self.media.as_ref().expect("should set media")
}
}
pub struct TestingAppContextPreset {
pub mikan_base_url: String,
pub database_config: Option<TestingDatabaseServiceConfig>,
}

View File

@ -17,6 +17,10 @@ impl Default for TestingDatabaseServiceConfig {
pub async fn build_testing_database_service(
config: TestingDatabaseServiceConfig,
) -> RecorderResult<DatabaseService> {
tracing::info!(
"enable testcontainers feature, build testing database service in testcontainers..."
);
use testcontainers::{ImageExt, runners::AsyncRunner};
use testcontainers_ext::{ImageDefaultLogConsumerExt, ImagePruneExistedLabelExt};
use testcontainers_modules::postgres::Postgres;
@ -38,6 +42,11 @@ pub async fn build_testing_database_service(
let connection_string =
format!("postgres://konobangu:konobangu@{host_ip}:{host_port}/konobangu");
tracing::debug!(
"testing database service connection string: {}",
connection_string
);
let mut db_service = DatabaseService::from_config(DatabaseConfig {
uri: connection_string,
enable_logging: true,

View File

@ -0,0 +1,8 @@
use crate::{
errors::RecorderResult,
media::{MediaConfig, MediaService},
};
pub async fn build_testing_media_service() -> RecorderResult<MediaService> {
MediaService::from_config(MediaConfig::default()).await
}

View File

@ -1,6 +1,7 @@
pub mod app;
pub mod crypto;
pub mod database;
pub mod media;
pub mod mikan;
pub mod storage;
pub mod task;

View File

@ -1,28 +1,13 @@
use opendal::{Operator, layers::LoggingLayer};
use crate::{
errors::RecorderResult,
storage::{StorageConfig, StorageService},
};
use crate::{errors::RecorderResult, storage::StorageServiceTrait};
pub async fn build_testing_storage_service() -> RecorderResult<StorageService> {
let service = StorageService::from_config(StorageConfig {
data_dir: "tests/data".to_string(),
})
.await?;
pub struct TestingStorageService {
operator: Operator,
}
impl TestingStorageService {
pub fn new() -> RecorderResult<Self> {
let op = Operator::new(opendal::services::Memory::default())?
.layer(LoggingLayer::default())
.finish();
Ok(Self { operator: op })
}
}
#[async_trait::async_trait]
impl StorageServiceTrait for TestingStorageService {
fn get_operator(&self) -> RecorderResult<Operator> {
Ok(self.operator.clone())
}
}
pub async fn build_testing_storage_service() -> RecorderResult<TestingStorageService> {
TestingStorageService::new()
Ok(service)
}

View File

@ -9,7 +9,7 @@ use crate::{
pub async fn build_testing_task_service(
ctx: Arc<dyn AppContextTrait>,
) -> RecorderResult<TaskService> {
let config = TaskConfig {};
let config = TaskConfig::default();
let task_service = TaskService::from_config_and_ctx(config, ctx).await?;
Ok(task_service)
}

View File

@ -1,18 +1,36 @@
use tracing::Level;
use tracing_subscriber::EnvFilter;
use tracing_subscriber::{EnvFilter, layer::SubscriberExt, util::SubscriberInitExt};
use tracing_tree::HierarchicalLayer;
use crate::logger::MODULE_WHITELIST;
pub fn try_init_testing_tracing(level: Level) {
fn build_testing_tracing_filter(level: Level) -> EnvFilter {
let crate_name = env!("CARGO_PKG_NAME");
let level = level.as_str().to_lowercase();
let mut filter = EnvFilter::new(format!("{crate_name}[]={level}"));
let mut modules = vec!["mockito"];
let mut modules = vec!["mockito", "testcontainers"];
modules.extend(MODULE_WHITELIST.iter());
for module in modules {
filter = filter.add_directive(format!("{module}[]={level}").parse().unwrap());
}
let _ = tracing_subscriber::fmt().with_env_filter(filter).try_init();
filter
}
pub fn try_init_testing_tracing(level: Level) {
let _ = tracing_subscriber::fmt()
.with_env_filter(build_testing_tracing_filter(level))
.try_init();
}
pub fn try_init_testing_tracing_only_leaf(level: Level) {
let _ = tracing_subscriber::registry()
.with(build_testing_tracing_filter(level))
.with(
HierarchicalLayer::new(2)
.with_targets(true)
.with_bracketed_fields(true),
)
.try_init();
}

View File

@ -0,0 +1,23 @@
use std::ops::Bound;
use http::HeaderValue;
pub fn build_no_satisfiable_content_range(len: u64) -> HeaderValue {
HeaderValue::from_str(&format!("bytes */{len}"))
.unwrap_or_else(|e| unreachable!("Invalid content range: {e}"))
}
pub fn bound_range_to_content_range(r: &(Bound<u64>, Bound<u64>), l: u64) -> Option<HeaderValue> {
match r {
(Bound::Included(start), Bound::Included(end)) => Some(format!("bytes {start}-{end}/{l}")),
(Bound::Included(start), Bound::Excluded(end)) => {
Some(format!("bytes {start}-{}/{l}", end - 1))
}
(Bound::Included(start), Bound::Unbounded) => Some(format!(
"bytes {start}-{}/{l}",
if l > 0 { l - 1 } else { 0 }
)),
_ => None,
}
.and_then(|s| HeaderValue::from_str(&s).ok())
}

View File

@ -0,0 +1,20 @@
use convert_case::{Case, Casing};
use serde_json::Value;
pub fn convert_json_keys(json: Value, case: Case) -> Value {
match json {
Value::Object(object) => Value::Object(
object
.into_iter()
.map(|(key, value)| (key.to_case(case), convert_json_keys(value, case)))
.collect(),
),
Value::Array(array) => Value::Array(
array
.into_iter()
.map(|item| convert_json_keys(item, case))
.collect(),
),
_ => json,
}
}

View File

@ -0,0 +1,3 @@
pub mod http;
pub mod json;
pub mod nom;

View File

@ -0,0 +1,261 @@
use std::collections::HashMap;
use icu::properties::{CodePointMapData, props::Script};
use lazy_static::lazy_static;
use maplit::hashmap;
use nom::{
IResult, Parser,
branch::alt,
bytes::complete::tag,
character::complete::{anychar, digit1, none_of, satisfy},
combinator::{map, opt, recognize, value, verify},
error::ParseError,
multi::many1,
sequence::{delimited, preceded},
};
use num_traits::{PrimInt, Signed};
lazy_static! {
pub static ref ZH_DIGIT_MAP: HashMap<char, u32> = {
hashmap! {
'' => 0,
'零' => 0,
'一' => 1,
'壹' => 1,
'二' => 2,
'贰' => 2,
'三' => 3,
'叁' => 3,
'四' => 4,
'肆' => 4,
'五' => 5,
'伍' => 5,
'六' => 6,
'陆' => 6,
'七' => 7,
'柒' => 7,
'八' => 8,
'捌' => 8,
'九' => 9,
'玖' => 9,
'十' => 10,
'拾' => 10,
'廿' => 20,
'念' => 20,
'百' => 100,
'佰' => 100,
'千' => 1000,
'仟' => 1000,
'万' => 10000,
'萬' => 10000,
'亿' => 100000000,
'億' => 100000000,
}
};
}
pub fn with_recognized<'a, F, O, E>(
mut parser: F,
) -> impl FnMut(&'a str) -> IResult<&'a str, (O, &'a str), E>
where
F: Parser<&'a str, Output = O, Error = E>,
E: ParseError<&'a str>,
{
move |input: &'a str| {
let i = input;
let (rest, output) = parser.parse(i)?;
let consumed_len = i.len() - rest.len();
Ok((rest, (output, &i[..consumed_len])))
}
}
pub fn is_some_unicode_scx(input: &str, script: Script) -> IResult<&str, char> {
let script_data = CodePointMapData::<Script>::new();
verify(anychar, |&c| script_data.get(c) == script).parse(input)
}
pub fn is_han_scx(input: &str) -> IResult<&str, char> {
is_some_unicode_scx(input, Script::Han)
}
pub fn is_hira_scx(input: &str) -> IResult<&str, char> {
is_some_unicode_scx(input, Script::Hiragana)
}
pub fn is_kana_scx(input: &str) -> IResult<&str, char> {
is_some_unicode_scx(input, Script::Katakana)
}
pub fn delimited_by_brackets(input: &str) -> IResult<&str, &str> {
alt((
delimited(tag("["), recognize(many1(none_of("[]"))), tag("]")),
delimited(tag(""), recognize(many1(none_of("【】"))), tag("")),
))
.parse(input)
}
pub struct ZhNum {
pub int: i32,
}
impl ZhNum {
fn parse_digit<'a>(
max_value: u32,
) -> impl Parser<&'a str, Output = u32, Error = nom::error::Error<&'a str>> {
map(
satisfy(move |c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v <= max_value)),
|c| *ZH_DIGIT_MAP.get(&c).unwrap(),
)
}
fn parse_个(input: &str) -> IResult<&str, u32> {
Self::parse_digit(9).parse(input)
}
fn parse_十(input: &str) -> IResult<&str, u32> {
let (input, (p, o, s)) = (
opt(Self::parse_个),
map(
satisfy(|c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v == 10 || *v == 20)),
|c| *ZH_DIGIT_MAP.get(&c).unwrap(),
),
opt(Self::parse_个),
)
.parse(input)?;
let value = p.unwrap_or(1) * o + s.unwrap_or(0);
Ok((input, value))
}
pub fn parse_百(input: &str) -> IResult<&str, u32> {
let (input, (p, o, s)) = (
opt(Self::parse_个),
map(
satisfy(|c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v == 100 || *v == 200)),
|c| *ZH_DIGIT_MAP.get(&c).unwrap(),
),
opt(Self::parse_十),
)
.parse(input)?;
let value = p.unwrap_or(1) * o + s.unwrap_or(0);
Ok((input, value))
}
pub fn parse_千(input: &str) -> IResult<&str, u32> {
let (input, (p, o, s)) = (
opt(Self::parse_个),
value(
1000u32,
satisfy(|c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v == 1000)),
),
opt(Self::parse_百),
)
.parse(input)?;
let value = p.unwrap_or(1) * o + s.unwrap_or(0);
Ok((input, value))
}
pub fn parse_万(input: &str) -> IResult<&str, u32> {
let (input, (p, o, s)) = (
opt(Self::parse_千),
value(
10000u32,
satisfy(|c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v == 10000)),
),
opt(Self::parse_千),
)
.parse(input)?;
let value = p.unwrap_or(1) * o + s.unwrap_or(0);
Ok((input, value))
}
pub fn parse_亿(input: &str) -> IResult<&str, u32> {
let (input, (p, o, s)) = (
opt(Self::parse_万),
value(
100000000u32,
satisfy(|c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v == 100000000)),
),
opt(Self::parse_万),
)
.parse(input)?;
let value = p.unwrap_or(1) * o + s.unwrap_or(0);
Ok((input, value))
}
pub fn parse_uint(input: &str) -> IResult<&str, u32> {
preceded(
opt(tag("")),
alt((
Self::parse_个,
Self::parse_十,
Self::parse_百,
Self::parse_千,
Self::parse_万,
Self::parse_亿,
)),
)
.parse(input)
}
pub fn parse_int(input: &str) -> IResult<&str, i32> {
let (input, (sign, value)) = (
opt(alt((value(1, tag("")), value(-1, tag(""))))),
alt((
Self::parse_个,
Self::parse_十,
Self::parse_百,
Self::parse_千,
Self::parse_万,
Self::parse_亿,
)),
)
.parse(input)?;
Ok((input, sign.unwrap_or(1) * value as i32))
}
}
pub fn parse_uint<T: PrimInt>(input: &str) -> IResult<&str, T> {
let (input, value) = preceded(opt(tag("+")), digit1).parse(input)?;
let value = T::from_str_radix(value, 10).map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Digit))
})?;
Ok((input, value))
}
pub fn parse_int<T: PrimInt + Signed>(input: &str) -> IResult<&str, T> {
let (input, value) = recognize((
opt(alt((
value(T::one(), tag("+")),
value(T::one().neg(), tag("-")),
))),
digit1,
))
.parse(input)?;
let value = T::from_str_radix(value, 10).map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Digit))
})?;
Ok((input, value))
}
pub fn parse_month_num(input: &str) -> IResult<&str, u32> {
verify(alt((ZhNum::parse_uint, parse_uint::<u32>)), |v| {
*v <= 12 && *v > 0
})
.parse(input)
}

View File

@ -2,5 +2,6 @@ pub mod core;
pub mod graphql;
pub mod metadata;
pub mod oidc;
pub mod r#static;
pub use core::{Controller, ControllerTrait, PrefixController};

View File

@ -0,0 +1,103 @@
use std::sync::Arc;
use axum::{
Extension, Router,
extract::{Path, Query, State},
middleware::from_fn_with_state,
response::Response,
routing::get,
};
use axum_extra::{TypedHeader, headers::Range};
use headers_accept::Accept;
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
auth::{AuthError, AuthUserInfo, auth_middleware},
errors::RecorderResult,
web::controller::Controller,
};
pub const CONTROLLER_PREFIX: &str = "/api/static";
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
pub enum OptimizeType {
#[serde(rename = "accept")]
AcceptHeader,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct StaticQuery {
optimize: Option<OptimizeType>,
}
async fn serve_subscriber_static(
State(ctx): State<Arc<dyn AppContextTrait>>,
Path((subscriber_id, path)): Path<(i32, String)>,
Extension(auth_user_info): Extension<AuthUserInfo>,
Query(query): Query<StaticQuery>,
range: Option<TypedHeader<Range>>,
accept: Option<TypedHeader<Accept>>,
) -> RecorderResult<Response> {
if subscriber_id != auth_user_info.subscriber_auth.id {
Err(AuthError::PermissionError)?;
}
let storage = ctx.storage();
let media = ctx.media();
let storage_path = storage.build_subscriber_path(subscriber_id, &path);
if query
.optimize
.is_some_and(|optimize| optimize == OptimizeType::AcceptHeader)
&& storage_path
.extension()
.is_some_and(|ext| media.is_legacy_image_format(ext))
&& let Some(TypedHeader(accept)) = accept
{
storage
.serve_optimized_image(storage_path, range, accept)
.await
} else {
storage.serve_file(storage_path, range).await
}
}
async fn serve_public_static(
State(ctx): State<Arc<dyn AppContextTrait>>,
Path(path): Path<String>,
Query(query): Query<StaticQuery>,
range: Option<TypedHeader<Range>>,
accept: Option<TypedHeader<Accept>>,
) -> RecorderResult<Response> {
let storage = ctx.storage();
let media = ctx.media();
let storage_path = storage.build_public_path(&path);
if query
.optimize
.is_some_and(|optimize| optimize == OptimizeType::AcceptHeader)
&& storage_path
.extension()
.is_some_and(|ext| media.is_legacy_image_format(ext))
&& let Some(TypedHeader(accept)) = accept
{
storage
.serve_optimized_image(storage_path, range, accept)
.await
} else {
storage.serve_file(storage_path, range).await
}
}
pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
let router = Router::<Arc<dyn AppContextTrait>>::new()
.route(
"/subscribers/{subscriber_id}/{*path}",
get(serve_subscriber_static).layer(from_fn_with_state(ctx, auth_middleware)),
)
.route("/public/{*path}", get(serve_public_static));
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router))
}

View File

@ -1,6 +0,0 @@
---
source: apps/recorder/src/web/middleware/request_id.rs
assertion_line: 126
expression: id
---
"foo-barbaz"

View File

@ -77,7 +77,6 @@
"tw-animate-css": "^1.3.4",
"type-fest": "^4.41.0",
"vaul": "^1.1.2",
"es-toolkit": "^1.39.3",
"@tanstack/react-router": "^1.121.2"
},
"devDependencies": {

View File

@ -4,7 +4,7 @@ import {
SidebarMenuItem,
} from '@/components/ui/sidebar';
import { Image } from '@/components/ui/image';
import { Img } from '@/components/ui/img';
export function AppIcon() {
return (
@ -16,7 +16,7 @@ export function AppIcon() {
>
<div className="flex size-8 items-center justify-center rounded-lg bg-sidebar-primary text-sidebar-primary-foreground">
<div className="relative size-8">
<Image
<Img
src="/assets/favicon.png"
alt="App Logo"
className="object-cover"

View File

@ -1,9 +0,0 @@
import type { ComponentProps } from 'react';
export type ImageProps = Omit<ComponentProps<'img'>, 'alt'> &
Required<Pick<ComponentProps<'img'>, 'alt'>>;
export const Image = (props: ImageProps) => {
// biome-ignore lint/nursery/noImgElement: <explanation>
return <img {...props} alt={props.alt} />;
};

View File

@ -0,0 +1,45 @@
import { useInject } from "@/infra/di/inject";
import { DOCUMENT } from "@/infra/platform/injection";
import { type ComponentProps, useMemo } from "react";
const URL_PARSE_REGEX = /^([^?#]*)(\?[^#]*)?(#.*)?$/;
function parseURL(url: string) {
const match = url.match(URL_PARSE_REGEX);
if (!match) {
return { other: url, search: "", hash: "" };
}
return {
other: match[1] || "",
search: match[2] || "",
hash: match[3] || "",
};
}
export type ImgProps = Omit<ComponentProps<"img">, "alt"> &
Required<Pick<ComponentProps<"img">, "alt">> & {
optimize?: "accept";
};
export const Img = ({
src: propsSrc,
optimize = "accept",
...props
}: ImgProps) => {
const document = useInject(DOCUMENT);
const src = useMemo(() => {
const baseURI = document?.baseURI;
if (!propsSrc || !baseURI) {
return propsSrc;
}
const { other, search, hash } = parseURL(propsSrc);
const searchParams = new URLSearchParams(search);
searchParams.set("optimize", optimize);
return `${other}?${searchParams.toString()}${hash}`;
}, [propsSrc, optimize, document?.baseURI]);
// biome-ignore lint/nursery/noImgElement: <explanation>
return <img {...props} alt={props.alt} src={src} />;
};

View File

@ -106,7 +106,6 @@ query GetSubscriptionDetail ($id: Int!) {
id
mikanBangumiId
displayName
rawName
season
seasonRaw
fansub
@ -123,25 +122,25 @@ query GetSubscriptionDetail ($id: Int!) {
`;
export const SYNC_SUBSCRIPTION_FEEDS_INCREMENTAL = gql`
mutation SyncSubscriptionFeedsIncremental($id: Int!) {
subscriptionSyncOneFeedsIncremental(filter: { id: $id }) {
taskId
mutation SyncSubscriptionFeedsIncremental($filter: SubscriptionsFilterInput!) {
subscriptionsSyncOneFeedsIncremental(filter: $filter) {
id
}
}
`;
export const SYNC_SUBSCRIPTION_FEEDS_FULL = gql`
mutation SyncSubscriptionFeedsFull($id: Int!) {
subscriptionSyncOneFeedsFull(filter: { id: $id }) {
taskId
mutation SyncSubscriptionFeedsFull($filter: SubscriptionsFilterInput!) {
subscriptionsSyncOneFeedsFull(filter: $filter) {
id
}
}
`;
export const SYNC_SUBSCRIPTION_SOURCES = gql`
mutation SyncSubscriptionSources($id: Int!) {
subscriptionSyncOneSources(filter: { id: $id }) {
taskId
mutation SyncSubscriptionSources($filter: SubscriptionsFilterInput!) {
subscriptionsSyncOneSources(filter: $filter) {
id
}
}
`;

View File

@ -35,6 +35,31 @@ export const GET_TASKS = gql`
}
`;
export const DELETE_TASKS = gql`
mutation DeleteTasks($filters: SubscriberTasksFilterInput!) {
subscriberTasksDelete(filter: $filters)
}
`;
export const RETRY_TASKS = gql`
mutation RetryTasks($filters: SubscriberTasksFilterInput!) {
subscriberTasksRetryOne(filter: $filters) {
id,
job,
taskType,
status,
attempts,
maxAttempts,
runAt,
lastError,
lockAt,
lockBy,
doneAt,
priority
}
}
`;
export const TaskTypedSyncOneSubscriptionFeedsIncrementalSchema = type({
taskType: `'${SubscriberTaskTypeEnum.SyncOneSubscriptionFeedsIncremental}'`,
}).and(SubscriptionSchema);

View File

@ -24,11 +24,13 @@ type Documents = {
"\n mutation InsertSubscription($data: SubscriptionsInsertInput!) {\n subscriptionsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n credentialId\n }\n }\n": typeof types.InsertSubscriptionDocument,
"\n mutation UpdateSubscriptions(\n $data: SubscriptionsUpdateInput!,\n $filters: SubscriptionsFilterInput!,\n ) {\n subscriptionsUpdate (\n data: $data\n filter: $filters\n ) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n }\n}\n": typeof types.UpdateSubscriptionsDocument,
"\n mutation DeleteSubscriptions($filters: SubscriptionsFilterInput) {\n subscriptionsDelete(filter: $filters)\n }\n": typeof types.DeleteSubscriptionsDocument,
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n rawName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n": typeof types.GetSubscriptionDetailDocument,
"\n mutation SyncSubscriptionFeedsIncremental($id: Int!) {\n subscriptionSyncOneFeedsIncremental(filter: { id: $id }) {\n taskId\n }\n }\n": typeof types.SyncSubscriptionFeedsIncrementalDocument,
"\n mutation SyncSubscriptionFeedsFull($id: Int!) {\n subscriptionSyncOneFeedsFull(filter: { id: $id }) {\n taskId\n }\n }\n": typeof types.SyncSubscriptionFeedsFullDocument,
"\n mutation SyncSubscriptionSources($id: Int!) {\n subscriptionSyncOneSources(filter: { id: $id }) {\n taskId\n }\n }\n": typeof types.SyncSubscriptionSourcesDocument,
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n": typeof types.GetSubscriptionDetailDocument,
"\n mutation SyncSubscriptionFeedsIncremental($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsIncremental(filter: $filter) {\n id\n }\n }\n": typeof types.SyncSubscriptionFeedsIncrementalDocument,
"\n mutation SyncSubscriptionFeedsFull($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsFull(filter: $filter) {\n id\n }\n }\n": typeof types.SyncSubscriptionFeedsFullDocument,
"\n mutation SyncSubscriptionSources($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneSources(filter: $filter) {\n id\n }\n }\n": typeof types.SyncSubscriptionSourcesDocument,
"\n query GetTasks($filters: SubscriberTasksFilterInput!, $orderBy: SubscriberTasksOrderInput!, $pagination: PaginationInput!) {\n subscriberTasks(\n pagination: $pagination\n filters: $filters\n orderBy: $orderBy\n ) {\n nodes {\n id,\n job,\n taskType,\n status,\n attempts,\n maxAttempts,\n runAt,\n lastError,\n lockAt,\n lockBy,\n doneAt,\n priority\n }\n paginationInfo {\n total\n pages\n }\n }\n }\n": typeof types.GetTasksDocument,
"\n mutation DeleteTasks($filters: SubscriberTasksFilterInput!) {\n subscriberTasksDelete(filter: $filters)\n }\n": typeof types.DeleteTasksDocument,
"\n mutation RetryTasks($filters: SubscriberTasksFilterInput!) {\n subscriberTasksRetryOne(filter: $filters) {\n id,\n job,\n taskType,\n status,\n attempts,\n maxAttempts,\n runAt,\n lastError,\n lockAt,\n lockBy,\n doneAt,\n priority\n }\n }\n": typeof types.RetryTasksDocument,
};
const documents: Documents = {
"\n query GetCredential3rd($filters: Credential3rdFilterInput!, $orderBy: Credential3rdOrderInput, $pagination: PaginationInput) {\n credential3rd(filters: $filters, orderBy: $orderBy, pagination: $pagination) {\n nodes {\n id\n cookies\n username\n password\n userAgent\n createdAt\n updatedAt\n credentialType\n }\n paginationInfo {\n total\n pages\n }\n }\n }\n": types.GetCredential3rdDocument,
@ -41,11 +43,13 @@ const documents: Documents = {
"\n mutation InsertSubscription($data: SubscriptionsInsertInput!) {\n subscriptionsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n credentialId\n }\n }\n": types.InsertSubscriptionDocument,
"\n mutation UpdateSubscriptions(\n $data: SubscriptionsUpdateInput!,\n $filters: SubscriptionsFilterInput!,\n ) {\n subscriptionsUpdate (\n data: $data\n filter: $filters\n ) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n }\n}\n": types.UpdateSubscriptionsDocument,
"\n mutation DeleteSubscriptions($filters: SubscriptionsFilterInput) {\n subscriptionsDelete(filter: $filters)\n }\n": types.DeleteSubscriptionsDocument,
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n rawName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n": types.GetSubscriptionDetailDocument,
"\n mutation SyncSubscriptionFeedsIncremental($id: Int!) {\n subscriptionSyncOneFeedsIncremental(filter: { id: $id }) {\n taskId\n }\n }\n": types.SyncSubscriptionFeedsIncrementalDocument,
"\n mutation SyncSubscriptionFeedsFull($id: Int!) {\n subscriptionSyncOneFeedsFull(filter: { id: $id }) {\n taskId\n }\n }\n": types.SyncSubscriptionFeedsFullDocument,
"\n mutation SyncSubscriptionSources($id: Int!) {\n subscriptionSyncOneSources(filter: { id: $id }) {\n taskId\n }\n }\n": types.SyncSubscriptionSourcesDocument,
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n": types.GetSubscriptionDetailDocument,
"\n mutation SyncSubscriptionFeedsIncremental($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsIncremental(filter: $filter) {\n id\n }\n }\n": types.SyncSubscriptionFeedsIncrementalDocument,
"\n mutation SyncSubscriptionFeedsFull($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsFull(filter: $filter) {\n id\n }\n }\n": types.SyncSubscriptionFeedsFullDocument,
"\n mutation SyncSubscriptionSources($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneSources(filter: $filter) {\n id\n }\n }\n": types.SyncSubscriptionSourcesDocument,
"\n query GetTasks($filters: SubscriberTasksFilterInput!, $orderBy: SubscriberTasksOrderInput!, $pagination: PaginationInput!) {\n subscriberTasks(\n pagination: $pagination\n filters: $filters\n orderBy: $orderBy\n ) {\n nodes {\n id,\n job,\n taskType,\n status,\n attempts,\n maxAttempts,\n runAt,\n lastError,\n lockAt,\n lockBy,\n doneAt,\n priority\n }\n paginationInfo {\n total\n pages\n }\n }\n }\n": types.GetTasksDocument,
"\n mutation DeleteTasks($filters: SubscriberTasksFilterInput!) {\n subscriberTasksDelete(filter: $filters)\n }\n": types.DeleteTasksDocument,
"\n mutation RetryTasks($filters: SubscriberTasksFilterInput!) {\n subscriberTasksRetryOne(filter: $filters) {\n id,\n job,\n taskType,\n status,\n attempts,\n maxAttempts,\n runAt,\n lastError,\n lockAt,\n lockBy,\n doneAt,\n priority\n }\n }\n": types.RetryTasksDocument,
};
/**
@ -105,23 +109,31 @@ export function gql(source: "\n mutation DeleteSubscriptions($filters: Subscr
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function gql(source: "\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n rawName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n"): (typeof documents)["\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n rawName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n"];
export function gql(source: "\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n"): (typeof documents)["\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n"];
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function gql(source: "\n mutation SyncSubscriptionFeedsIncremental($id: Int!) {\n subscriptionSyncOneFeedsIncremental(filter: { id: $id }) {\n taskId\n }\n }\n"): (typeof documents)["\n mutation SyncSubscriptionFeedsIncremental($id: Int!) {\n subscriptionSyncOneFeedsIncremental(filter: { id: $id }) {\n taskId\n }\n }\n"];
export function gql(source: "\n mutation SyncSubscriptionFeedsIncremental($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsIncremental(filter: $filter) {\n id\n }\n }\n"): (typeof documents)["\n mutation SyncSubscriptionFeedsIncremental($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsIncremental(filter: $filter) {\n id\n }\n }\n"];
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function gql(source: "\n mutation SyncSubscriptionFeedsFull($id: Int!) {\n subscriptionSyncOneFeedsFull(filter: { id: $id }) {\n taskId\n }\n }\n"): (typeof documents)["\n mutation SyncSubscriptionFeedsFull($id: Int!) {\n subscriptionSyncOneFeedsFull(filter: { id: $id }) {\n taskId\n }\n }\n"];
export function gql(source: "\n mutation SyncSubscriptionFeedsFull($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsFull(filter: $filter) {\n id\n }\n }\n"): (typeof documents)["\n mutation SyncSubscriptionFeedsFull($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsFull(filter: $filter) {\n id\n }\n }\n"];
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function gql(source: "\n mutation SyncSubscriptionSources($id: Int!) {\n subscriptionSyncOneSources(filter: { id: $id }) {\n taskId\n }\n }\n"): (typeof documents)["\n mutation SyncSubscriptionSources($id: Int!) {\n subscriptionSyncOneSources(filter: { id: $id }) {\n taskId\n }\n }\n"];
export function gql(source: "\n mutation SyncSubscriptionSources($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneSources(filter: $filter) {\n id\n }\n }\n"): (typeof documents)["\n mutation SyncSubscriptionSources($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneSources(filter: $filter) {\n id\n }\n }\n"];
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function gql(source: "\n query GetTasks($filters: SubscriberTasksFilterInput!, $orderBy: SubscriberTasksOrderInput!, $pagination: PaginationInput!) {\n subscriberTasks(\n pagination: $pagination\n filters: $filters\n orderBy: $orderBy\n ) {\n nodes {\n id,\n job,\n taskType,\n status,\n attempts,\n maxAttempts,\n runAt,\n lastError,\n lockAt,\n lockBy,\n doneAt,\n priority\n }\n paginationInfo {\n total\n pages\n }\n }\n }\n"): (typeof documents)["\n query GetTasks($filters: SubscriberTasksFilterInput!, $orderBy: SubscriberTasksOrderInput!, $pagination: PaginationInput!) {\n subscriberTasks(\n pagination: $pagination\n filters: $filters\n orderBy: $orderBy\n ) {\n nodes {\n id,\n job,\n taskType,\n status,\n attempts,\n maxAttempts,\n runAt,\n lastError,\n lockAt,\n lockBy,\n doneAt,\n priority\n }\n paginationInfo {\n total\n pages\n }\n }\n }\n"];
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function gql(source: "\n mutation DeleteTasks($filters: SubscriberTasksFilterInput!) {\n subscriberTasksDelete(filter: $filters)\n }\n"): (typeof documents)["\n mutation DeleteTasks($filters: SubscriberTasksFilterInput!) {\n subscriberTasksDelete(filter: $filters)\n }\n"];
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function gql(source: "\n mutation RetryTasks($filters: SubscriberTasksFilterInput!) {\n subscriberTasksRetryOne(filter: $filters) {\n id,\n job,\n taskType,\n status,\n attempts,\n maxAttempts,\n runAt,\n lastError,\n lockAt,\n lockBy,\n doneAt,\n priority\n }\n }\n"): (typeof documents)["\n mutation RetryTasks($filters: SubscriberTasksFilterInput!) {\n subscriberTasksRetryOne(filter: $filters) {\n id,\n job,\n taskType,\n status,\n attempts,\n maxAttempts,\n runAt,\n lastError,\n lockAt,\n lockBy,\n doneAt,\n priority\n }\n }\n"];
export function gql(source: string) {
return (documents as any)[source] ?? {};

View File

@ -30,8 +30,9 @@ export type Bangumi = {
id: Scalars['Int']['output'];
mikanBangumiId?: Maybe<Scalars['String']['output']>;
mikanFansubId?: Maybe<Scalars['String']['output']>;
originName: Scalars['String']['output'];
originPosterLink?: Maybe<Scalars['String']['output']>;
posterLink?: Maybe<Scalars['String']['output']>;
rawName: Scalars['String']['output'];
rssLink?: Maybe<Scalars['String']['output']>;
savePath?: Maybe<Scalars['String']['output']>;
season: Scalars['Int']['output'];
@ -74,8 +75,9 @@ export type BangumiBasic = {
id: Scalars['Int']['output'];
mikanBangumiId?: Maybe<Scalars['String']['output']>;
mikanFansubId?: Maybe<Scalars['String']['output']>;
originName: Scalars['String']['output'];
originPosterLink?: Maybe<Scalars['String']['output']>;
posterLink?: Maybe<Scalars['String']['output']>;
rawName: Scalars['String']['output'];
rssLink?: Maybe<Scalars['String']['output']>;
savePath?: Maybe<Scalars['String']['output']>;
season: Scalars['Int']['output'];
@ -108,13 +110,14 @@ export type BangumiFilterInput = {
mikanBangumiId?: InputMaybe<StringFilterInput>;
mikanFansubId?: InputMaybe<StringFilterInput>;
or?: InputMaybe<Array<BangumiFilterInput>>;
originName?: InputMaybe<StringFilterInput>;
originPosterLink?: InputMaybe<StringFilterInput>;
posterLink?: InputMaybe<StringFilterInput>;
rawName?: InputMaybe<StringFilterInput>;
rssLink?: InputMaybe<StringFilterInput>;
savePath?: InputMaybe<StringFilterInput>;
season?: InputMaybe<IntegerFilterInput>;
seasonRaw?: InputMaybe<StringFilterInput>;
subscriberId?: InputMaybe<SubscriberIdFilterInput>;
subscriberId?: InputMaybe<IntegerFilterInput>;
updatedAt?: InputMaybe<TextFilterInput>;
};
@ -127,13 +130,14 @@ export type BangumiInsertInput = {
id?: InputMaybe<Scalars['Int']['input']>;
mikanBangumiId?: InputMaybe<Scalars['String']['input']>;
mikanFansubId?: InputMaybe<Scalars['String']['input']>;
originName: Scalars['String']['input'];
originPosterLink?: InputMaybe<Scalars['String']['input']>;
posterLink?: InputMaybe<Scalars['String']['input']>;
rawName: Scalars['String']['input'];
rssLink?: InputMaybe<Scalars['String']['input']>;
savePath?: InputMaybe<Scalars['String']['input']>;
season: Scalars['Int']['input'];
seasonRaw?: InputMaybe<Scalars['String']['input']>;
subscriberId?: InputMaybe<Scalars['Int']['input']>;
subscriberId: Scalars['Int']['input'];
updatedAt?: InputMaybe<Scalars['String']['input']>;
};
@ -146,8 +150,9 @@ export type BangumiOrderInput = {
id?: InputMaybe<OrderByEnum>;
mikanBangumiId?: InputMaybe<OrderByEnum>;
mikanFansubId?: InputMaybe<OrderByEnum>;
originName?: InputMaybe<OrderByEnum>;
originPosterLink?: InputMaybe<OrderByEnum>;
posterLink?: InputMaybe<OrderByEnum>;
rawName?: InputMaybe<OrderByEnum>;
rssLink?: InputMaybe<OrderByEnum>;
savePath?: InputMaybe<OrderByEnum>;
season?: InputMaybe<OrderByEnum>;
@ -165,12 +170,14 @@ export type BangumiUpdateInput = {
id?: InputMaybe<Scalars['Int']['input']>;
mikanBangumiId?: InputMaybe<Scalars['String']['input']>;
mikanFansubId?: InputMaybe<Scalars['String']['input']>;
originName?: InputMaybe<Scalars['String']['input']>;
originPosterLink?: InputMaybe<Scalars['String']['input']>;
posterLink?: InputMaybe<Scalars['String']['input']>;
rawName?: InputMaybe<Scalars['String']['input']>;
rssLink?: InputMaybe<Scalars['String']['input']>;
savePath?: InputMaybe<Scalars['String']['input']>;
season?: InputMaybe<Scalars['Int']['input']>;
seasonRaw?: InputMaybe<Scalars['String']['input']>;
subscriberId?: InputMaybe<Scalars['Int']['input']>;
updatedAt?: InputMaybe<Scalars['String']['input']>;
};
@ -491,7 +498,7 @@ export type Downloads = {
homepage?: Maybe<Scalars['String']['output']>;
id: Scalars['Int']['output'];
mime: DownloadMimeEnum;
rawName: Scalars['String']['output'];
originName: Scalars['String']['output'];
savePath?: Maybe<Scalars['String']['output']>;
status: DownloadStatusEnum;
subscriber?: Maybe<Subscribers>;
@ -511,7 +518,7 @@ export type DownloadsBasic = {
homepage?: Maybe<Scalars['String']['output']>;
id: Scalars['Int']['output'];
mime: DownloadMimeEnum;
rawName: Scalars['String']['output'];
originName: Scalars['String']['output'];
savePath?: Maybe<Scalars['String']['output']>;
status: DownloadStatusEnum;
subscriberId: Scalars['Int']['output'];
@ -545,7 +552,7 @@ export type DownloadsFilterInput = {
id?: InputMaybe<IntegerFilterInput>;
mime?: InputMaybe<DownloadMimeEnumFilterInput>;
or?: InputMaybe<Array<DownloadsFilterInput>>;
rawName?: InputMaybe<StringFilterInput>;
originName?: InputMaybe<StringFilterInput>;
savePath?: InputMaybe<StringFilterInput>;
status?: InputMaybe<DownloadStatusEnumFilterInput>;
subscriberId?: InputMaybe<SubscriberIdFilterInput>;
@ -563,7 +570,7 @@ export type DownloadsInsertInput = {
homepage?: InputMaybe<Scalars['String']['input']>;
id?: InputMaybe<Scalars['Int']['input']>;
mime: DownloadMimeEnum;
rawName: Scalars['String']['input'];
originName: Scalars['String']['input'];
savePath?: InputMaybe<Scalars['String']['input']>;
status: DownloadStatusEnum;
subscriberId?: InputMaybe<Scalars['Int']['input']>;
@ -581,7 +588,7 @@ export type DownloadsOrderInput = {
homepage?: InputMaybe<OrderByEnum>;
id?: InputMaybe<OrderByEnum>;
mime?: InputMaybe<OrderByEnum>;
rawName?: InputMaybe<OrderByEnum>;
originName?: InputMaybe<OrderByEnum>;
savePath?: InputMaybe<OrderByEnum>;
status?: InputMaybe<OrderByEnum>;
subscriberId?: InputMaybe<OrderByEnum>;
@ -599,7 +606,7 @@ export type DownloadsUpdateInput = {
homepage?: InputMaybe<Scalars['String']['input']>;
id?: InputMaybe<Scalars['Int']['input']>;
mime?: InputMaybe<DownloadMimeEnum>;
rawName?: InputMaybe<Scalars['String']['input']>;
originName?: InputMaybe<Scalars['String']['input']>;
savePath?: InputMaybe<Scalars['String']['input']>;
status?: InputMaybe<DownloadStatusEnum>;
updatedAt?: InputMaybe<Scalars['String']['input']>;
@ -618,8 +625,9 @@ export type Episodes = {
homepage?: Maybe<Scalars['String']['output']>;
id: Scalars['Int']['output'];
mikanEpisodeId?: Maybe<Scalars['String']['output']>;
originName: Scalars['String']['output'];
originPosterLink?: Maybe<Scalars['String']['output']>;
posterLink?: Maybe<Scalars['String']['output']>;
rawName: Scalars['String']['output'];
resolution?: Maybe<Scalars['String']['output']>;
savePath?: Maybe<Scalars['String']['output']>;
season: Scalars['Int']['output'];
@ -664,8 +672,9 @@ export type EpisodesBasic = {
homepage?: Maybe<Scalars['String']['output']>;
id: Scalars['Int']['output'];
mikanEpisodeId?: Maybe<Scalars['String']['output']>;
originName: Scalars['String']['output'];
originPosterLink?: Maybe<Scalars['String']['output']>;
posterLink?: Maybe<Scalars['String']['output']>;
rawName: Scalars['String']['output'];
resolution?: Maybe<Scalars['String']['output']>;
savePath?: Maybe<Scalars['String']['output']>;
season: Scalars['Int']['output'];
@ -701,8 +710,9 @@ export type EpisodesFilterInput = {
id?: InputMaybe<IntegerFilterInput>;
mikanEpisodeId?: InputMaybe<StringFilterInput>;
or?: InputMaybe<Array<EpisodesFilterInput>>;
originName?: InputMaybe<StringFilterInput>;
originPosterLink?: InputMaybe<StringFilterInput>;
posterLink?: InputMaybe<StringFilterInput>;
rawName?: InputMaybe<StringFilterInput>;
resolution?: InputMaybe<StringFilterInput>;
savePath?: InputMaybe<StringFilterInput>;
season?: InputMaybe<IntegerFilterInput>;
@ -722,8 +732,9 @@ export type EpisodesInsertInput = {
homepage?: InputMaybe<Scalars['String']['input']>;
id?: InputMaybe<Scalars['Int']['input']>;
mikanEpisodeId?: InputMaybe<Scalars['String']['input']>;
originName: Scalars['String']['input'];
originPosterLink?: InputMaybe<Scalars['String']['input']>;
posterLink?: InputMaybe<Scalars['String']['input']>;
rawName: Scalars['String']['input'];
resolution?: InputMaybe<Scalars['String']['input']>;
savePath?: InputMaybe<Scalars['String']['input']>;
season: Scalars['Int']['input'];
@ -743,8 +754,9 @@ export type EpisodesOrderInput = {
homepage?: InputMaybe<OrderByEnum>;
id?: InputMaybe<OrderByEnum>;
mikanEpisodeId?: InputMaybe<OrderByEnum>;
originName?: InputMaybe<OrderByEnum>;
originPosterLink?: InputMaybe<OrderByEnum>;
posterLink?: InputMaybe<OrderByEnum>;
rawName?: InputMaybe<OrderByEnum>;
resolution?: InputMaybe<OrderByEnum>;
savePath?: InputMaybe<OrderByEnum>;
season?: InputMaybe<OrderByEnum>;
@ -764,8 +776,9 @@ export type EpisodesUpdateInput = {
homepage?: InputMaybe<Scalars['String']['input']>;
id?: InputMaybe<Scalars['Int']['input']>;
mikanEpisodeId?: InputMaybe<Scalars['String']['input']>;
originName?: InputMaybe<Scalars['String']['input']>;
originPosterLink?: InputMaybe<Scalars['String']['input']>;
posterLink?: InputMaybe<Scalars['String']['input']>;
rawName?: InputMaybe<Scalars['String']['input']>;
resolution?: InputMaybe<Scalars['String']['input']>;
savePath?: InputMaybe<Scalars['String']['input']>;
season?: InputMaybe<Scalars['Int']['input']>;
@ -813,10 +826,8 @@ export type Mutation = {
episodesCreateOne: EpisodesBasic;
episodesDelete: Scalars['Int']['output'];
episodesUpdate: Array<EpisodesBasic>;
subscriberTasksCreateBatch: Array<SubscriberTasksBasic>;
subscriberTasksCreateOne: SubscriberTasksBasic;
subscriberTasksDelete: Scalars['Int']['output'];
subscriberTasksUpdate: Array<SubscriberTasksBasic>;
subscriberTasksRetryOne: SubscriberTasks;
subscriptionBangumiCreateBatch: Array<SubscriptionBangumiBasic>;
subscriptionBangumiCreateOne: SubscriptionBangumiBasic;
subscriptionBangumiDelete: Scalars['Int']['output'];
@ -825,12 +836,12 @@ export type Mutation = {
subscriptionEpisodeCreateOne: SubscriptionEpisodeBasic;
subscriptionEpisodeDelete: Scalars['Int']['output'];
subscriptionEpisodeUpdate: Array<SubscriptionEpisodeBasic>;
subscriptionSyncOneFeedsFull: SyncOneSubscriptionInfo;
subscriptionSyncOneFeedsIncremental: SyncOneSubscriptionInfo;
subscriptionSyncOneSources: SyncOneSubscriptionInfo;
subscriptionsCreateBatch: Array<SubscriptionsBasic>;
subscriptionsCreateOne: SubscriptionsBasic;
subscriptionsDelete: Scalars['Int']['output'];
subscriptionsSyncOneFeedsFull: SubscriberTasks;
subscriptionsSyncOneFeedsIncremental: SubscriberTasks;
subscriptionsSyncOneSources: SubscriberTasks;
subscriptionsUpdate: Array<SubscriptionsBasic>;
};
@ -940,23 +951,12 @@ export type MutationEpisodesUpdateArgs = {
};
export type MutationSubscriberTasksCreateBatchArgs = {
data: Array<SubscriberTasksInsertInput>;
};
export type MutationSubscriberTasksCreateOneArgs = {
data: SubscriberTasksInsertInput;
};
export type MutationSubscriberTasksDeleteArgs = {
filter?: InputMaybe<SubscriberTasksFilterInput>;
};
export type MutationSubscriberTasksUpdateArgs = {
data: SubscriberTasksUpdateInput;
export type MutationSubscriberTasksRetryOneArgs = {
filter?: InputMaybe<SubscriberTasksFilterInput>;
};
@ -1003,21 +1003,6 @@ export type MutationSubscriptionEpisodeUpdateArgs = {
};
export type MutationSubscriptionSyncOneFeedsFullArgs = {
filter: SyncOneSubscriptionFilterInput;
};
export type MutationSubscriptionSyncOneFeedsIncrementalArgs = {
filter: SyncOneSubscriptionFilterInput;
};
export type MutationSubscriptionSyncOneSourcesArgs = {
filter: SyncOneSubscriptionFilterInput;
};
export type MutationSubscriptionsCreateBatchArgs = {
data: Array<SubscriptionsInsertInput>;
};
@ -1033,6 +1018,21 @@ export type MutationSubscriptionsDeleteArgs = {
};
export type MutationSubscriptionsSyncOneFeedsFullArgs = {
filter?: InputMaybe<SubscriptionsFilterInput>;
};
export type MutationSubscriptionsSyncOneFeedsIncrementalArgs = {
filter?: InputMaybe<SubscriptionsFilterInput>;
};
export type MutationSubscriptionsSyncOneSourcesArgs = {
filter?: InputMaybe<SubscriptionsFilterInput>;
};
export type MutationSubscriptionsUpdateArgs = {
data: SubscriptionsUpdateInput;
filter?: InputMaybe<SubscriptionsFilterInput>;
@ -1231,23 +1231,6 @@ export type SubscriberTasks = {
taskType: SubscriberTaskTypeEnum;
};
export type SubscriberTasksBasic = {
__typename?: 'SubscriberTasksBasic';
attempts: Scalars['Int']['output'];
doneAt?: Maybe<Scalars['String']['output']>;
id: Scalars['String']['output'];
job: Scalars['Json']['output'];
lastError?: Maybe<Scalars['String']['output']>;
lockAt?: Maybe<Scalars['String']['output']>;
lockBy?: Maybe<Scalars['String']['output']>;
maxAttempts: Scalars['Int']['output'];
priority: Scalars['Int']['output'];
runAt: Scalars['String']['output'];
status: SubscriberTaskStatusEnum;
subscriberId: Scalars['Int']['output'];
taskType: SubscriberTaskTypeEnum;
};
export type SubscriberTasksConnection = {
__typename?: 'SubscriberTasksConnection';
edges: Array<SubscriberTasksEdge>;
@ -1280,22 +1263,6 @@ export type SubscriberTasksFilterInput = {
taskType?: InputMaybe<StringFilterInput>;
};
export type SubscriberTasksInsertInput = {
attempts: Scalars['Int']['input'];
doneAt?: InputMaybe<Scalars['String']['input']>;
id?: InputMaybe<Scalars['String']['input']>;
job: Scalars['Json']['input'];
lastError?: InputMaybe<Scalars['String']['input']>;
lockAt?: InputMaybe<Scalars['String']['input']>;
lockBy?: InputMaybe<Scalars['String']['input']>;
maxAttempts: Scalars['Int']['input'];
priority: Scalars['Int']['input'];
runAt: Scalars['String']['input'];
status: SubscriberTaskStatusEnum;
subscriberId?: InputMaybe<Scalars['Int']['input']>;
taskType: SubscriberTaskTypeEnum;
};
export type SubscriberTasksOrderInput = {
attempts?: InputMaybe<OrderByEnum>;
doneAt?: InputMaybe<OrderByEnum>;
@ -1312,21 +1279,6 @@ export type SubscriberTasksOrderInput = {
taskType?: InputMaybe<OrderByEnum>;
};
export type SubscriberTasksUpdateInput = {
attempts?: InputMaybe<Scalars['Int']['input']>;
doneAt?: InputMaybe<Scalars['String']['input']>;
id?: InputMaybe<Scalars['String']['input']>;
job?: InputMaybe<Scalars['Json']['input']>;
lastError?: InputMaybe<Scalars['String']['input']>;
lockAt?: InputMaybe<Scalars['String']['input']>;
lockBy?: InputMaybe<Scalars['String']['input']>;
maxAttempts?: InputMaybe<Scalars['Int']['input']>;
priority?: InputMaybe<Scalars['Int']['input']>;
runAt?: InputMaybe<Scalars['String']['input']>;
status?: InputMaybe<SubscriberTaskStatusEnum>;
taskType?: InputMaybe<SubscriberTaskTypeEnum>;
};
export type Subscribers = {
__typename?: 'Subscribers';
bangumi: BangumiConnection;
@ -1672,17 +1624,6 @@ export type SubscriptionsUpdateInput = {
updatedAt?: InputMaybe<Scalars['String']['input']>;
};
/** The input of the subscriptionSyncOne series of mutations */
export type SyncOneSubscriptionFilterInput = {
id: Scalars['Int']['input'];
};
/** The output of the subscriptionSyncOne series of mutations */
export type SyncOneSubscriptionInfo = {
__typename?: 'SyncOneSubscriptionInfo';
taskId: Scalars['String']['output'];
};
export type TextFilterInput = {
between?: InputMaybe<Array<Scalars['String']['input']>>;
eq?: InputMaybe<Scalars['String']['input']>;
@ -1779,28 +1720,28 @@ export type GetSubscriptionDetailQueryVariables = Exact<{
}>;
export type GetSubscriptionDetailQuery = { __typename?: 'Query', subscriptions: { __typename?: 'SubscriptionsConnection', nodes: Array<{ __typename?: 'Subscriptions', id: number, displayName: string, createdAt: string, updatedAt: string, category: SubscriptionCategoryEnum, sourceUrl: string, enabled: boolean, credential3rd?: { __typename?: 'Credential3rd', id: number, username?: string | null } | null, bangumi: { __typename?: 'BangumiConnection', nodes: Array<{ __typename?: 'Bangumi', createdAt: string, updatedAt: string, id: number, mikanBangumiId?: string | null, displayName: string, rawName: string, season: number, seasonRaw?: string | null, fansub?: string | null, mikanFansubId?: string | null, rssLink?: string | null, posterLink?: string | null, savePath?: string | null, homepage?: string | null }> } }> } };
export type GetSubscriptionDetailQuery = { __typename?: 'Query', subscriptions: { __typename?: 'SubscriptionsConnection', nodes: Array<{ __typename?: 'Subscriptions', id: number, displayName: string, createdAt: string, updatedAt: string, category: SubscriptionCategoryEnum, sourceUrl: string, enabled: boolean, credential3rd?: { __typename?: 'Credential3rd', id: number, username?: string | null } | null, bangumi: { __typename?: 'BangumiConnection', nodes: Array<{ __typename?: 'Bangumi', createdAt: string, updatedAt: string, id: number, mikanBangumiId?: string | null, displayName: string, season: number, seasonRaw?: string | null, fansub?: string | null, mikanFansubId?: string | null, rssLink?: string | null, posterLink?: string | null, savePath?: string | null, homepage?: string | null }> } }> } };
export type SyncSubscriptionFeedsIncrementalMutationVariables = Exact<{
id: Scalars['Int']['input'];
filter: SubscriptionsFilterInput;
}>;
export type SyncSubscriptionFeedsIncrementalMutation = { __typename?: 'Mutation', subscriptionSyncOneFeedsIncremental: { __typename?: 'SyncOneSubscriptionInfo', taskId: string } };
export type SyncSubscriptionFeedsIncrementalMutation = { __typename?: 'Mutation', subscriptionsSyncOneFeedsIncremental: { __typename?: 'SubscriberTasks', id: string } };
export type SyncSubscriptionFeedsFullMutationVariables = Exact<{
id: Scalars['Int']['input'];
filter: SubscriptionsFilterInput;
}>;
export type SyncSubscriptionFeedsFullMutation = { __typename?: 'Mutation', subscriptionSyncOneFeedsFull: { __typename?: 'SyncOneSubscriptionInfo', taskId: string } };
export type SyncSubscriptionFeedsFullMutation = { __typename?: 'Mutation', subscriptionsSyncOneFeedsFull: { __typename?: 'SubscriberTasks', id: string } };
export type SyncSubscriptionSourcesMutationVariables = Exact<{
id: Scalars['Int']['input'];
filter: SubscriptionsFilterInput;
}>;
export type SyncSubscriptionSourcesMutation = { __typename?: 'Mutation', subscriptionSyncOneSources: { __typename?: 'SyncOneSubscriptionInfo', taskId: string } };
export type SyncSubscriptionSourcesMutation = { __typename?: 'Mutation', subscriptionsSyncOneSources: { __typename?: 'SubscriberTasks', id: string } };
export type GetTasksQueryVariables = Exact<{
filters: SubscriberTasksFilterInput;
@ -1811,6 +1752,20 @@ export type GetTasksQueryVariables = Exact<{
export type GetTasksQuery = { __typename?: 'Query', subscriberTasks: { __typename?: 'SubscriberTasksConnection', nodes: Array<{ __typename?: 'SubscriberTasks', id: string, job: any, taskType: SubscriberTaskTypeEnum, status: SubscriberTaskStatusEnum, attempts: number, maxAttempts: number, runAt: string, lastError?: string | null, lockAt?: string | null, lockBy?: string | null, doneAt?: string | null, priority: number }>, paginationInfo?: { __typename?: 'PaginationInfo', total: number, pages: number } | null } };
export type DeleteTasksMutationVariables = Exact<{
filters: SubscriberTasksFilterInput;
}>;
export type DeleteTasksMutation = { __typename?: 'Mutation', subscriberTasksDelete: number };
export type RetryTasksMutationVariables = Exact<{
filters: SubscriberTasksFilterInput;
}>;
export type RetryTasksMutation = { __typename?: 'Mutation', subscriberTasksRetryOne: { __typename?: 'SubscriberTasks', id: string, job: any, taskType: SubscriberTaskTypeEnum, status: SubscriberTaskStatusEnum, attempts: number, maxAttempts: number, runAt: string, lastError?: string | null, lockAt?: string | null, lockBy?: string | null, doneAt?: string | null, priority: number } };
export const GetCredential3rdDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetCredential3rd"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Credential3rdFilterInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"Credential3rdOrderInput"}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"pagination"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"PaginationInput"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"credential3rd"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}},{"kind":"Argument","name":{"kind":"Name","value":"orderBy"},"value":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}}},{"kind":"Argument","name":{"kind":"Name","value":"pagination"},"value":{"kind":"Variable","name":{"kind":"Name","value":"pagination"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"cookies"}},{"kind":"Field","name":{"kind":"Name","value":"username"}},{"kind":"Field","name":{"kind":"Name","value":"password"}},{"kind":"Field","name":{"kind":"Name","value":"userAgent"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"credentialType"}}]}},{"kind":"Field","name":{"kind":"Name","value":"paginationInfo"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"total"}},{"kind":"Field","name":{"kind":"Name","value":"pages"}}]}}]}}]}}]} as unknown as DocumentNode<GetCredential3rdQuery, GetCredential3rdQueryVariables>;
export const InsertCredential3rdDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"InsertCredential3rd"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"data"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Credential3rdInsertInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"credential3rdCreateOne"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"data"},"value":{"kind":"Variable","name":{"kind":"Name","value":"data"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"cookies"}},{"kind":"Field","name":{"kind":"Name","value":"username"}},{"kind":"Field","name":{"kind":"Name","value":"password"}},{"kind":"Field","name":{"kind":"Name","value":"userAgent"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"credentialType"}}]}}]}}]} as unknown as DocumentNode<InsertCredential3rdMutation, InsertCredential3rdMutationVariables>;
@ -1822,8 +1777,10 @@ export const GetSubscriptionsDocument = {"kind":"Document","definitions":[{"kind
export const InsertSubscriptionDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"InsertSubscription"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"data"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsInsertInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsCreateOne"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"data"},"value":{"kind":"Variable","name":{"kind":"Name","value":"data"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"credentialId"}}]}}]}}]} as unknown as DocumentNode<InsertSubscriptionMutation, InsertSubscriptionMutationVariables>;
export const UpdateSubscriptionsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"UpdateSubscriptions"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"data"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsUpdateInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsUpdate"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"data"},"value":{"kind":"Variable","name":{"kind":"Name","value":"data"}}},{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}}]}}]}}]} as unknown as DocumentNode<UpdateSubscriptionsMutation, UpdateSubscriptionsMutationVariables>;
export const DeleteSubscriptionsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"DeleteSubscriptions"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsDelete"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}]}]}}]} as unknown as DocumentNode<DeleteSubscriptionsMutation, DeleteSubscriptionsMutationVariables>;
export const GetSubscriptionDetailDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetSubscriptionDetail"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptions"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"eq"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"credential3rd"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"username"}}]}},{"kind":"Field","name":{"kind":"Name","value":"bangumi"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"mikanBangumiId"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"rawName"}},{"kind":"Field","name":{"kind":"Name","value":"season"}},{"kind":"Field","name":{"kind":"Name","value":"seasonRaw"}},{"kind":"Field","name":{"kind":"Name","value":"fansub"}},{"kind":"Field","name":{"kind":"Name","value":"mikanFansubId"}},{"kind":"Field","name":{"kind":"Name","value":"rssLink"}},{"kind":"Field","name":{"kind":"Name","value":"posterLink"}},{"kind":"Field","name":{"kind":"Name","value":"savePath"}},{"kind":"Field","name":{"kind":"Name","value":"homepage"}}]}}]}}]}}]}}]}}]} as unknown as DocumentNode<GetSubscriptionDetailQuery, GetSubscriptionDetailQueryVariables>;
export const SyncSubscriptionFeedsIncrementalDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionFeedsIncremental"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionSyncOneFeedsIncremental"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"taskId"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionFeedsIncrementalMutation, SyncSubscriptionFeedsIncrementalMutationVariables>;
export const SyncSubscriptionFeedsFullDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionFeedsFull"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionSyncOneFeedsFull"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"taskId"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionFeedsFullMutation, SyncSubscriptionFeedsFullMutationVariables>;
export const SyncSubscriptionSourcesDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionSources"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionSyncOneSources"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"taskId"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionSourcesMutation, SyncSubscriptionSourcesMutationVariables>;
export const GetTasksDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetTasks"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriberTasksFilterInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriberTasksOrderInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"pagination"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PaginationInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriberTasks"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"pagination"},"value":{"kind":"Variable","name":{"kind":"Name","value":"pagination"}}},{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}},{"kind":"Argument","name":{"kind":"Name","value":"orderBy"},"value":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"job"}},{"kind":"Field","name":{"kind":"Name","value":"taskType"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"attempts"}},{"kind":"Field","name":{"kind":"Name","value":"maxAttempts"}},{"kind":"Field","name":{"kind":"Name","value":"runAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"lockAt"}},{"kind":"Field","name":{"kind":"Name","value":"lockBy"}},{"kind":"Field","name":{"kind":"Name","value":"doneAt"}},{"kind":"Field","name":{"kind":"Name","value":"priority"}}]}},{"kind":"Field","name":{"kind":"Name","value":"paginationInfo"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"total"}},{"kind":"Field","name":{"kind":"Name","value":"pages"}}]}}]}}]}}]} as unknown as DocumentNode<GetTasksQuery, GetTasksQueryVariables>;
export const GetSubscriptionDetailDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetSubscriptionDetail"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptions"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"eq"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"credential3rd"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"username"}}]}},{"kind":"Field","name":{"kind":"Name","value":"bangumi"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"mikanBangumiId"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"season"}},{"kind":"Field","name":{"kind":"Name","value":"seasonRaw"}},{"kind":"Field","name":{"kind":"Name","value":"fansub"}},{"kind":"Field","name":{"kind":"Name","value":"mikanFansubId"}},{"kind":"Field","name":{"kind":"Name","value":"rssLink"}},{"kind":"Field","name":{"kind":"Name","value":"posterLink"}},{"kind":"Field","name":{"kind":"Name","value":"savePath"}},{"kind":"Field","name":{"kind":"Name","value":"homepage"}}]}}]}}]}}]}}]}}]} as unknown as DocumentNode<GetSubscriptionDetailQuery, GetSubscriptionDetailQueryVariables>;
export const SyncSubscriptionFeedsIncrementalDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionFeedsIncremental"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filter"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsSyncOneFeedsIncremental"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filter"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionFeedsIncrementalMutation, SyncSubscriptionFeedsIncrementalMutationVariables>;
export const SyncSubscriptionFeedsFullDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionFeedsFull"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filter"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsSyncOneFeedsFull"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filter"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionFeedsFullMutation, SyncSubscriptionFeedsFullMutationVariables>;
export const SyncSubscriptionSourcesDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionSources"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filter"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsSyncOneSources"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filter"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionSourcesMutation, SyncSubscriptionSourcesMutationVariables>;
export const GetTasksDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetTasks"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriberTasksFilterInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriberTasksOrderInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"pagination"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PaginationInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriberTasks"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"pagination"},"value":{"kind":"Variable","name":{"kind":"Name","value":"pagination"}}},{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}},{"kind":"Argument","name":{"kind":"Name","value":"orderBy"},"value":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"job"}},{"kind":"Field","name":{"kind":"Name","value":"taskType"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"attempts"}},{"kind":"Field","name":{"kind":"Name","value":"maxAttempts"}},{"kind":"Field","name":{"kind":"Name","value":"runAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"lockAt"}},{"kind":"Field","name":{"kind":"Name","value":"lockBy"}},{"kind":"Field","name":{"kind":"Name","value":"doneAt"}},{"kind":"Field","name":{"kind":"Name","value":"priority"}}]}},{"kind":"Field","name":{"kind":"Name","value":"paginationInfo"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"total"}},{"kind":"Field","name":{"kind":"Name","value":"pages"}}]}}]}}]}}]} as unknown as DocumentNode<GetTasksQuery, GetTasksQueryVariables>;
export const DeleteTasksDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"DeleteTasks"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriberTasksFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriberTasksDelete"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}]}]}}]} as unknown as DocumentNode<DeleteTasksMutation, DeleteTasksMutationVariables>;
export const RetryTasksDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"RetryTasks"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriberTasksFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriberTasksRetryOne"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"job"}},{"kind":"Field","name":{"kind":"Name","value":"taskType"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"attempts"}},{"kind":"Field","name":{"kind":"Name","value":"maxAttempts"}},{"kind":"Field","name":{"kind":"Name","value":"runAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"lockAt"}},{"kind":"Field","name":{"kind":"Name","value":"lockBy"}},{"kind":"Field","name":{"kind":"Name","value":"doneAt"}},{"kind":"Field","name":{"kind":"Name","value":"priority"}}]}}]}}]} as unknown as DocumentNode<RetryTasksMutation, RetryTasksMutationVariables>;

View File

@ -26,7 +26,7 @@ import { memo, useCallback } from 'react';
import { toast } from 'sonner';
export type SubscriptionSyncViewCompletePayload = {
taskId: string;
id: string;
};
export interface SubscriptionSyncViewProps {
@ -43,7 +43,7 @@ export const SubscriptionSyncView = memo(
>(SYNC_SUBSCRIPTION_FEEDS_INCREMENTAL, {
onCompleted: (data) => {
toast.success('Sync completed');
onComplete(data.subscriptionSyncOneFeedsIncremental);
onComplete(data.subscriptionsSyncOneFeedsIncremental);
},
onError: (error) => {
toast.error('Failed to sync subscription', {
@ -58,7 +58,7 @@ export const SubscriptionSyncView = memo(
>(SYNC_SUBSCRIPTION_FEEDS_FULL, {
onCompleted: (data) => {
toast.success('Sync completed');
onComplete(data.subscriptionSyncOneFeedsFull);
onComplete(data.subscriptionsSyncOneFeedsFull);
},
onError: (error) => {
toast.error('Failed to sync subscription', {
@ -73,7 +73,7 @@ export const SubscriptionSyncView = memo(
>(SYNC_SUBSCRIPTION_SOURCES, {
onCompleted: (data) => {
toast.success('Sync completed');
onComplete(data.subscriptionSyncOneSources);
onComplete(data.subscriptionsSyncOneSources);
},
onError: (error) => {
toast.error('Failed to sync subscription', {
@ -89,7 +89,11 @@ export const SubscriptionSyncView = memo(
<Button
size="lg"
variant="outline"
onClick={() => syncSubscriptionSources({ variables: { id } })}
onClick={() =>
syncSubscriptionSources({
variables: { filter: { id: { eq: id } } },
})
}
>
<RefreshCcwIcon className="h-4 w-4" />
<span>Sources</span>
@ -98,7 +102,9 @@ export const SubscriptionSyncView = memo(
size="lg"
variant="outline"
onClick={() =>
syncSubscriptionFeedsIncremental({ variables: { id } })
syncSubscriptionFeedsIncremental({
variables: { filter: { id: { eq: id } } },
})
}
>
<RefreshCcwIcon className="h-4 w-4" />
@ -107,7 +113,11 @@ export const SubscriptionSyncView = memo(
<Button
size="lg"
variant="outline"
onClick={() => syncSubscriptionFeedsFull({ variables: { id } })}
onClick={() =>
syncSubscriptionFeedsFull({
variables: { filter: { id: { eq: id } } },
})
}
>
<RefreshCcwIcon className="h-4 w-4" />
<span>Full Feeds</span>
@ -138,7 +148,7 @@ export const SubscriptionSyncDialogContent = memo(
navigate({
to: '/tasks/detail/$id',
params: {
id: `${payload.taskId}`,
id: `${payload.id}`,
},
});
},

View File

@ -10,6 +10,7 @@ import {
} from '@/components/ui/card';
import { DetailEmptyView } from '@/components/ui/detail-empty-view';
import { Dialog, DialogTrigger } from '@/components/ui/dialog';
import { Img } from '@/components/ui/img';
import { Label } from '@/components/ui/label';
import { QueryErrorView } from '@/components/ui/query-error-view';
import { Separator } from '@/components/ui/separator';
@ -324,7 +325,18 @@ function SubscriptionDetailRouteComponent() {
<div className="space-y-3">
{subscription.bangumi.nodes.map((bangumi) => (
<Card key={bangumi.id} className="p-4">
<div className="grid grid-cols-1 gap-4 md:grid-cols-2">
<div className="grid grid-cols-2 gap-4 md:grid-cols-3">
<div className="col-span-1 row-span-2 space-y-2">
<div className="flex h-full items-center justify-center overflow-hidden rounded-md bg-muted">
{bangumi.posterLink && (
<Img
src={`/api/static${bangumi.posterLink}`}
alt="Poster"
className="h-full w-full object-cover"
/>
)}
</div>
</div>
<div className="space-y-2">
<Label className="font-medium text-muted-foreground text-xs">
Display Name
@ -333,14 +345,6 @@ function SubscriptionDetailRouteComponent() {
{bangumi.displayName}
</div>
</div>
<div className="space-y-2">
<Label className="font-medium text-muted-foreground text-xs">
Season
</Label>
<div className="text-sm">
{bangumi.season || '-'}
</div>
</div>
<div className="space-y-2">
<Label className="font-medium text-muted-foreground text-xs">
Fansub
@ -351,10 +355,21 @@ function SubscriptionDetailRouteComponent() {
</div>
<div className="space-y-2">
<Label className="font-medium text-muted-foreground text-xs">
Save Path
Season
</Label>
<div className="text-sm">
{bangumi.season || '-'}
</div>
</div>
<div className="space-y-2">
<Label className="font-medium text-muted-foreground text-xs">
Updated At
</Label>
<div className="font-mono text-sm">
{bangumi.savePath || '-'}
{format(
new Date(bangumi.updatedAt),
'yyyy-MM-dd'
)}
</div>
</div>
</div>

View File

@ -25,13 +25,9 @@ import {
apolloErrorToMessage,
getApolloQueryError,
} from '@/infra/errors/apollo';
import type {
GetSubscriptionsQuery,
SubscriptionsUpdateInput,
} from '@/infra/graphql/gql/graphql';
import type { GetSubscriptionsQuery } from '@/infra/graphql/gql/graphql';
import type { RouteStateDataOption } from '@/infra/routes/traits';
import { useDebouncedSkeleton } from '@/presentation/hooks/use-debounded-skeleton';
import { useEvent } from '@/presentation/hooks/use-event';
import { cn } from '@/presentation/utils';
import { useMutation, useQuery } from '@apollo/client';
import { createFileRoute } from '@tanstack/react-router';
@ -39,7 +35,6 @@ import { useNavigate } from '@tanstack/react-router';
import {
type ColumnDef,
type PaginationState,
type Row,
type SortingState,
type VisibilityState,
flexRender,
@ -131,29 +126,6 @@ function SubscriptionManageRouteComponent() {
const subscriptions = data?.subscriptions;
const handleUpdateRecord = useEvent(
(row: Row<SubscriptionDto>) => async (data: SubscriptionsUpdateInput) => {
await updateSubscription({
variables: {
data,
filters: {
id: {
eq: row.original.id,
},
},
},
});
}
);
const handleDeleteRecord = useEvent(
(row: Row<SubscriptionDto>) => async () => {
await deleteSubscription({
variables: { filters: { id: { eq: row.original.id } } },
});
}
);
const columns = useMemo(() => {
const cs: ColumnDef<SubscriptionDto>[] = [
{
@ -166,7 +138,18 @@ function SubscriptionManageRouteComponent() {
<Switch
checked={enabled}
onCheckedChange={(enabled) =>
handleUpdateRecord(row)({ enabled: enabled })
updateSubscription({
variables: {
data: {
enabled,
},
filters: {
id: {
eq: row.original.id,
},
},
},
})
}
/>
</div>
@ -242,7 +225,11 @@ function SubscriptionManageRouteComponent() {
params: { id: `${row.original.id}` },
});
}}
onDelete={handleDeleteRecord(row)}
onDelete={() =>
deleteSubscription({
variables: { filters: { id: { eq: row.original.id } } },
})
}
>
<Dialog>
<DialogTrigger asChild>
@ -257,7 +244,7 @@ function SubscriptionManageRouteComponent() {
},
];
return cs;
}, [handleUpdateRecord, handleDeleteRecord, navigate]);
}, [updateSubscription, deleteSubscription, navigate]);
const table = useReactTable({
data: useMemo(() => subscriptions?.nodes ?? [], [subscriptions]),

View File

@ -1,5 +0,0 @@
import { memo } from 'react';
export const TaskActionsView = memo(() => {
return null;
});

View File

@ -12,14 +12,18 @@ import { DetailEmptyView } from '@/components/ui/detail-empty-view';
import { Label } from '@/components/ui/label';
import { QueryErrorView } from '@/components/ui/query-error-view';
import { Separator } from '@/components/ui/separator';
import { GET_TASKS } from '@/domains/recorder/schema/tasks';
import { GET_TASKS, RETRY_TASKS } from '@/domains/recorder/schema/tasks';
import { getApolloQueryError } from '@/infra/errors/apollo';
import { apolloErrorToMessage } from '@/infra/errors/apollo';
import {
type GetTasksQuery,
type GetTasksQueryVariables,
type RetryTasksMutation,
type RetryTasksMutationVariables,
SubscriberTaskStatusEnum,
} from '@/infra/graphql/gql/graphql';
import type { RouteStateDataOption } from '@/infra/routes/traits';
import { useQuery } from '@apollo/client';
import { useMutation, useQuery } from '@apollo/client';
import {
createFileRoute,
useCanGoBack,
@ -28,6 +32,7 @@ import {
} from '@tanstack/react-router';
import { format } from 'date-fns';
import { ArrowLeft, RefreshCw } from 'lucide-react';
import { toast } from 'sonner';
import { getStatusBadge } from './-status-badge';
export const Route = createFileRoute('/_app/tasks/detail/$id')({
@ -76,6 +81,28 @@ function TaskDetailRouteComponent() {
const task = data?.subscriberTasks?.nodes?.[0];
const [retryTasks] = useMutation<
RetryTasksMutation,
RetryTasksMutationVariables
>(RETRY_TASKS, {
onCompleted: async () => {
const refetchResult = await refetch();
const error = getApolloQueryError(refetchResult);
if (error) {
toast.error('Failed to retry task', {
description: apolloErrorToMessage(error),
});
return;
}
toast.success('Task retried successfully');
},
onError: (error) => {
toast.error('Failed to retry task', {
description: apolloErrorToMessage(error),
});
},
});
if (loading) {
return <DetailCardSkeleton />;
}
@ -123,6 +150,21 @@ function TaskDetailRouteComponent() {
</div>
<div className="flex items-center gap-2">
{getStatusBadge(task.status)}
{task.status ===
(SubscriberTaskStatusEnum.Killed ||
SubscriberTaskStatusEnum.Failed) && (
<Button
variant="ghost"
size="sm"
onClick={() =>
retryTasks({
variables: { filters: { id: { eq: task.id } } },
})
}
>
Retry
</Button>
)}
</div>
</div>
</CardHeader>

View File

@ -5,14 +5,23 @@ import { DetailEmptyView } from '@/components/ui/detail-empty-view';
import { DropdownMenuActions } from '@/components/ui/dropdown-menu-actions';
import { QueryErrorView } from '@/components/ui/query-error-view';
import { Skeleton } from '@/components/ui/skeleton';
import { GET_TASKS, type TaskDto } from '@/domains/recorder/schema/tasks';
import {
DELETE_TASKS,
GET_TASKS,
RETRY_TASKS,
type TaskDto,
} from '@/domains/recorder/schema/tasks';
import {
type DeleteTasksMutation,
type DeleteTasksMutationVariables,
type GetTasksQuery,
type RetryTasksMutation,
type RetryTasksMutationVariables,
SubscriberTaskStatusEnum,
} from '@/infra/graphql/gql/graphql';
import type { RouteStateDataOption } from '@/infra/routes/traits';
import { useDebouncedSkeleton } from '@/presentation/hooks/use-debounded-skeleton';
import { useQuery } from '@apollo/client';
import { useMutation, useQuery } from '@apollo/client';
import { createFileRoute, useNavigate } from '@tanstack/react-router';
import {
type ColumnDef,
@ -26,7 +35,13 @@ import {
import { format } from 'date-fns';
import { RefreshCw } from 'lucide-react';
import { DropdownMenuItem } from '@/components/ui/dropdown-menu';
import {
apolloErrorToMessage,
getApolloQueryError,
} from '@/infra/errors/apollo';
import { useMemo, useState } from 'react';
import { toast } from 'sonner';
import { getStatusBadge } from './-status-badge';
export const Route = createFileRoute('/_app/tasks/manage')({
@ -70,6 +85,42 @@ function TaskManageRouteComponent() {
const tasks = data?.subscriberTasks;
const [deleteTasks] = useMutation<
DeleteTasksMutation,
DeleteTasksMutationVariables
>(DELETE_TASKS, {
onCompleted: async () => {
const refetchResult = await refetch();
const error = getApolloQueryError(refetchResult);
if (error) {
toast.error('Failed to delete tasks', {
description: apolloErrorToMessage(error),
});
return;
}
toast.success('Tasks deleted');
},
onError: (error) => {
toast.error('Failed to delete tasks', {
description: error.message,
});
},
});
const [retryTasks] = useMutation<
RetryTasksMutation,
RetryTasksMutationVariables
>(RETRY_TASKS, {
onCompleted: () => {
toast.success('Tasks retried');
},
onError: (error) => {
toast.error('Failed to retry tasks', {
description: error.message,
});
},
});
const columns = useMemo(() => {
const cs: ColumnDef<TaskDto>[] = [
{
@ -167,7 +218,39 @@ function TaskManageRouteComponent() {
params: { id: task.id },
});
}}
/>
showDelete
onDelete={() =>
deleteTasks({
variables: {
filters: {
id: {
eq: task.id,
},
},
},
})
}
>
{task.status ===
(SubscriberTaskStatusEnum.Killed ||
SubscriberTaskStatusEnum.Failed) && (
<DropdownMenuItem
onSelect={() =>
retryTasks({
variables: {
filters: {
id: {
eq: task.id,
},
},
},
})
}
>
Retry
</DropdownMenuItem>
)}
</DropdownMenuActions>
</div>
</div>
@ -206,6 +289,7 @@ function TaskManageRouteComponent() {
</div>
</div>
{/* Job */}
{task.job && (
<div className="text-sm">
<span className="text-muted-foreground">Job: </span>

View File

@ -4,13 +4,16 @@ set dotenv-load := true
prepare-dev:
cargo install cargo-binstall
cargo binstall sea-orm-cli cargo-llvm-cov cargo-nextest
# <package-manager> install watchexec just zellij
# <package-manager> install watchexec just zellij nasm libjxl
prepare-dev-testcontainers:
docker pull linuxserver/qbittorrent:latest
docker pull ghcr.io/dumtruck/konobangu-testing-torrents:latest
docker pull postgres:17-alpine
dev-optimize-images:
npx -y zx apps/recorder/examples/optimize_image.mjs
dev-webui:
pnpm run --filter=webui dev
@ -19,7 +22,7 @@ dev-proxy:
pnpm run --parallel --filter=proxy dev
dev-recorder:
watchexec -r -e rs,toml,yaml,json,env -- cargo run -p recorder --bin recorder_cli -- --environment development
watchexec -r -e rs,toml,yaml,json,env -- cargo run -p recorder --bin recorder_cli -- --environment=development --graceful-shutdown=false
dev-recorder-migrate-down:
cargo run -p recorder --bin migrate_down -- --environment development

View File

@ -19,6 +19,9 @@
"engines": {
"node": ">=22"
},
"dependencies": {
"es-toolkit": "^1.39.3"
},
"devDependencies": {
"@biomejs/biome": "1.9.4",
"@types/node": "^24.0.1",

View File

@ -21,6 +21,10 @@ impl OptDynErr {
pub fn none() -> Self {
Self(None)
}
pub fn into_inner(self) -> Option<Box<dyn std::error::Error + Send + Sync>> {
self.0
}
}
impl Display for OptDynErr {

7
pnpm-lock.yaml generated
View File

@ -10,6 +10,10 @@ overrides:
importers:
.:
dependencies:
es-toolkit:
specifier: ^1.39.3
version: 1.39.3
devDependencies:
'@biomejs/biome':
specifier: 1.9.4
@ -209,9 +213,6 @@ importers:
embla-carousel-react:
specifier: ^8.6.0
version: 8.6.0(react@19.1.0)
es-toolkit:
specifier: ^1.39.3
version: 1.39.3
graphiql:
specifier: ^4.1.2
version: 4.1.2(@codemirror/language@6.11.1)(@emotion/is-prop-valid@0.8.8)(@types/node@24.0.1)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(graphql-ws@6.0.4(graphql@16.11.0)(ws@8.18.2(bufferutil@4.0.9)(utf-8-validate@6.0.5)))(graphql@16.11.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(use-sync-external-store@1.5.0(react@19.1.0))