Compare commits
10 Commits
6726cafff4
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 9fd3ae6563 | |||
| cde3361458 | |||
| f055011b86 | |||
| 16429a44b4 | |||
| fe0b7e88e6 | |||
| 28dd9da6ac | |||
| 02c16a2972 | |||
| 324427513c | |||
| c12b9b360a | |||
| cc06142050 |
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -40,6 +40,7 @@
|
||||
}
|
||||
],
|
||||
"rust-analyzer.cargo.features": "all",
|
||||
"rust-analyzer.testExplorer": true
|
||||
// https://github.com/rust-lang/rust/issues/141540
|
||||
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
|
||||
// "rust-analyzer.check.extraEnv": {
|
||||
|
||||
2598
Cargo.lock
generated
2598
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
19
Cargo.toml
19
Cargo.toml
@@ -66,6 +66,25 @@ mockito = { version = "1.6.1" }
|
||||
convert_case = "0.8"
|
||||
color-eyre = "0.6.5"
|
||||
inquire = "0.7.5"
|
||||
image = "0.25.6"
|
||||
uuid = { version = "1.6.0", features = ["v4"] }
|
||||
maplit = "1.0.2"
|
||||
once_cell = "1.20.2"
|
||||
rand = "0.9.1"
|
||||
rust_decimal = "1.37.2"
|
||||
base64 = "0.22.1"
|
||||
nom = "8.0.0"
|
||||
percent-encoding = "2.3.1"
|
||||
num-traits = "0.2.19"
|
||||
http = "1.2.0"
|
||||
async-stream = "0.3.6"
|
||||
serde_variant = "0.1.3"
|
||||
tracing-appender = "0.2.3"
|
||||
clap = "4.5.40"
|
||||
ipnetwork = "0.21.1"
|
||||
typed-builder = "0.21.0"
|
||||
nanoid = "0.4.0"
|
||||
webp = "0.3.0"
|
||||
|
||||
[patch.crates-io]
|
||||
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" }
|
||||
|
||||
2
apps/recorder/.gitignore
vendored
2
apps/recorder/.gitignore
vendored
@@ -27,3 +27,5 @@ node_modules
|
||||
dist/
|
||||
temp/*
|
||||
!temp/.gitkeep
|
||||
tests/resources/mikan/classic_episodes/*/*
|
||||
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet
|
||||
@@ -2,8 +2,20 @@
|
||||
name = "recorder"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[features]
|
||||
default = ["jxl"]
|
||||
playground = ["dep:inquire", "dep:color-eyre", "dep:polars"]
|
||||
testcontainers = [
|
||||
"dep:testcontainers",
|
||||
"dep:testcontainers-modules",
|
||||
"dep:testcontainers-ext",
|
||||
"downloader/testcontainers",
|
||||
"testcontainers-modules/postgres",
|
||||
]
|
||||
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
|
||||
|
||||
[lib]
|
||||
name = "recorder"
|
||||
path = "src/lib.rs"
|
||||
@@ -13,16 +25,25 @@ name = "recorder_cli"
|
||||
path = "src/bin/main.rs"
|
||||
required-features = []
|
||||
|
||||
[features]
|
||||
default = []
|
||||
playground = ["dep:inquire", "dep:color-eyre"]
|
||||
testcontainers = [
|
||||
"dep:testcontainers",
|
||||
"dep:testcontainers-modules",
|
||||
"dep:testcontainers-ext",
|
||||
"downloader/testcontainers",
|
||||
"testcontainers-modules/postgres",
|
||||
]
|
||||
[[example]]
|
||||
name = "mikan_collect_classic_eps"
|
||||
path = "examples/mikan_collect_classic_eps.rs"
|
||||
required-features = ["playground"]
|
||||
|
||||
[[example]]
|
||||
name = "mikan_doppel_season_subscription"
|
||||
path = "examples/mikan_doppel_season_subscription.rs"
|
||||
required-features = ["playground"]
|
||||
|
||||
[[example]]
|
||||
name = "mikan_doppel_subscriber_subscription"
|
||||
path = "examples/mikan_doppel_subscriber_subscription.rs"
|
||||
required-features = ["playground"]
|
||||
|
||||
[[example]]
|
||||
name = "playground"
|
||||
path = "examples/playground.rs"
|
||||
required-features = ["playground"]
|
||||
|
||||
[dependencies]
|
||||
downloader = { workspace = true }
|
||||
@@ -58,6 +79,25 @@ mockito = { workspace = true }
|
||||
color-eyre = { workspace = true, optional = true }
|
||||
inquire = { workspace = true, optional = true }
|
||||
convert_case = { workspace = true }
|
||||
image = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
maplit = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
rust_decimal = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
nom = { workspace = true }
|
||||
percent-encoding = { workspace = true }
|
||||
num-traits = { workspace = true }
|
||||
http = { workspace = true }
|
||||
async-stream = { workspace = true }
|
||||
serde_variant = { workspace = true }
|
||||
tracing-appender = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
ipnetwork = { workspace = true }
|
||||
typed-builder = { workspace = true }
|
||||
nanoid = { workspace = true }
|
||||
webp = { workspace = true }
|
||||
|
||||
sea-orm = { version = "1.1", features = [
|
||||
"sqlx-sqlite",
|
||||
@@ -67,19 +107,13 @@ sea-orm = { version = "1.1", features = [
|
||||
"debug-print",
|
||||
] }
|
||||
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
||||
uuid = { version = "1.6.0", features = ["v4"] }
|
||||
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
|
||||
rss = "2"
|
||||
fancy-regex = "0.14"
|
||||
maplit = "1.0.2"
|
||||
lightningcss = "1.0.0-alpha.66"
|
||||
html-escape = "0.2.13"
|
||||
opendal = { version = "0.53", features = ["default", "services-fs"] }
|
||||
zune-image = "0.4.15"
|
||||
once_cell = "1.20.2"
|
||||
scraper = "0.23"
|
||||
|
||||
log = "0.4"
|
||||
scraper = "0.23.1"
|
||||
async-graphql = { version = "7", features = ["dynamic-schema"] }
|
||||
async-graphql-axum = "7"
|
||||
seaography = { version = "1.1", features = [
|
||||
@@ -92,7 +126,6 @@ seaography = { version = "1.1", features = [
|
||||
"with-postgres-array",
|
||||
"with-json-as-scalar",
|
||||
] }
|
||||
base64 = "0.22.1"
|
||||
tower = "0.5.2"
|
||||
tower-http = { version = "0.6", features = [
|
||||
"trace",
|
||||
@@ -107,34 +140,30 @@ tower-http = { version = "0.6", features = [
|
||||
tera = "1.20.0"
|
||||
openidconnect = { version = "4" }
|
||||
dotenvy = "0.15.7"
|
||||
http = "1.2.0"
|
||||
async-stream = "0.3.6"
|
||||
serde_variant = "0.1.3"
|
||||
tracing-appender = "0.2.3"
|
||||
clap = "4.5.31"
|
||||
ipnetwork = "0.21.1"
|
||||
typed-builder = "0.21.0"
|
||||
apalis = { version = "0.7", features = [
|
||||
"limit",
|
||||
"tracing",
|
||||
"catch-panic",
|
||||
"retry",
|
||||
] }
|
||||
jpegxl-rs = { version = "0.11.2", optional = true }
|
||||
jpegxl-sys = { version = "0.11.2", optional = true }
|
||||
|
||||
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
|
||||
apalis-sql = { version = "0.7", features = ["postgres"] }
|
||||
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
|
||||
rand = "0.9.1"
|
||||
rust_decimal = "1.37.1"
|
||||
reqwest_cookie_store = "0.8.0"
|
||||
nanoid = "0.4.0"
|
||||
jwtk = "0.4.0"
|
||||
percent-encoding = "2.3.1"
|
||||
mime_guess = "2.0.5"
|
||||
|
||||
icu_properties = "2.0.1"
|
||||
icu = "2.0.0"
|
||||
tracing-tree = "0.4.0"
|
||||
num_cpus = "1.17.0"
|
||||
headers-accept = "0.1.4"
|
||||
polars = { version = "0.49.1", features = [
|
||||
"parquet",
|
||||
"lazy",
|
||||
"diagonal_concat",
|
||||
], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
inquire = { workspace = true }
|
||||
color-eyre = { workspace = true }
|
||||
serial_test = "3"
|
||||
insta = { version = "1", features = ["redactions", "toml", "filters"] }
|
||||
rstest = "0.25"
|
||||
ctor = "0.4.0"
|
||||
inquire = { workspace = true }
|
||||
color-eyre = { workspace = true }
|
||||
|
||||
584
apps/recorder/examples/mikan_collect_classic_eps.rs
Normal file
584
apps/recorder/examples/mikan_collect_classic_eps.rs
Normal file
@@ -0,0 +1,584 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use chrono::{DateTime, Duration, FixedOffset, NaiveDate, NaiveTime, TimeZone, Utc};
|
||||
use fetch::{HttpClientConfig, fetch_html};
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use nom::{
|
||||
IResult, Parser,
|
||||
branch::alt,
|
||||
bytes::complete::{tag, take, take_till1},
|
||||
character::complete::space1,
|
||||
combinator::map,
|
||||
};
|
||||
use recorder::{
|
||||
errors::{RecorderError, RecorderResult},
|
||||
extract::{
|
||||
html::extract_inner_text_from_element_ref,
|
||||
mikan::{MikanClient, MikanConfig, MikanEpisodeHash, MikanFansubHash},
|
||||
},
|
||||
};
|
||||
use regex::Regex;
|
||||
use scraper::{ElementRef, Html, Selector};
|
||||
use snafu::FromString;
|
||||
use url::Url;
|
||||
|
||||
lazy_static! {
|
||||
static ref TEST_FOLDER: std::path::PathBuf =
|
||||
if cfg!(any(test, debug_assertions, feature = "playground")) {
|
||||
std::path::PathBuf::from(format!(
|
||||
"{}/tests/resources/mikan/classic_episodes",
|
||||
env!("CARGO_MANIFEST_DIR")
|
||||
))
|
||||
} else {
|
||||
std::path::PathBuf::from("tests/resources/mikan/classic_episodes")
|
||||
};
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref TOTAL_PAGE_REGEX: Regex =
|
||||
Regex::new(r#"\$\(\'\.classic-view-pagination2\'\)\.bootpag\(\{\s*total:\s*(\d+)"#)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
pub struct MikanClassicEpisodeTableRow {
|
||||
pub id: i32,
|
||||
pub publish_at: DateTime<Utc>,
|
||||
pub mikan_fansub_id: Option<String>,
|
||||
pub fansub_name: Option<String>,
|
||||
pub mikan_episode_id: String,
|
||||
pub original_name: String,
|
||||
pub magnet_link: Option<String>,
|
||||
pub file_size: Option<String>,
|
||||
pub torrent_link: Option<String>,
|
||||
}
|
||||
|
||||
impl MikanClassicEpisodeTableRow {
|
||||
fn timezone() -> FixedOffset {
|
||||
FixedOffset::east_opt(8 * 3600).unwrap()
|
||||
}
|
||||
|
||||
fn fixed_date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||
alt((
|
||||
map(tag("今天"), move |_| {
|
||||
Utc::now().with_timezone(&Self::timezone()).date_naive()
|
||||
}),
|
||||
map(tag("昨天"), move |_| {
|
||||
Utc::now().with_timezone(&Self::timezone()).date_naive() - Duration::days(1)
|
||||
}),
|
||||
))
|
||||
.parse(input)
|
||||
}
|
||||
|
||||
fn formatted_date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||
let (remain, date_str) = take_till1(|c: char| c.is_whitespace()).parse(input)?;
|
||||
let date = NaiveDate::parse_from_str(date_str, "%Y/%m/%d").map_err(|_| {
|
||||
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
|
||||
})?;
|
||||
Ok((remain, date))
|
||||
}
|
||||
|
||||
fn date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||
alt((Self::fixed_date_parser, Self::formatted_date_parser)).parse(input)
|
||||
}
|
||||
|
||||
fn time_parser(input: &str) -> IResult<&str, NaiveTime> {
|
||||
let (remain, time_str) = take(5usize).parse(input)?;
|
||||
let time = NaiveTime::parse_from_str(time_str, "%H:%M").map_err(|_| {
|
||||
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
|
||||
})?;
|
||||
Ok((remain, time))
|
||||
}
|
||||
|
||||
fn extract_publish_at(text: &str) -> Option<DateTime<Utc>> {
|
||||
let (_, (date, _, time)) = (Self::date_parser, space1, Self::time_parser)
|
||||
.parse(text)
|
||||
.ok()?;
|
||||
let local_dt = Self::timezone()
|
||||
.from_local_datetime(&date.and_time(time))
|
||||
.single()?;
|
||||
Some(local_dt.with_timezone(&Utc))
|
||||
}
|
||||
|
||||
pub fn from_element_ref(
|
||||
row: ElementRef<'_>,
|
||||
rev_id: i32,
|
||||
idx: i32,
|
||||
mikan_base_url: &Url,
|
||||
) -> RecorderResult<Self> {
|
||||
let publish_at_selector = &Selector::parse("td:nth-of-type(1)").unwrap();
|
||||
let fansub_selector = &Selector::parse("td:nth-of-type(2) > a").unwrap();
|
||||
let original_name_selector =
|
||||
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(1)").unwrap();
|
||||
let magnet_link_selector =
|
||||
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(2)").unwrap();
|
||||
let file_size_selector = &Selector::parse("td:nth-of-type(4)").unwrap();
|
||||
let torrent_link_selector = &Selector::parse("td:nth-of-type(5) > a").unwrap();
|
||||
|
||||
let publish_at = row
|
||||
.select(publish_at_selector)
|
||||
.next()
|
||||
.map(extract_inner_text_from_element_ref)
|
||||
.and_then(|e| Self::extract_publish_at(&e));
|
||||
|
||||
let (mikan_fansub_hash, fansub_name) = row
|
||||
.select(fansub_selector)
|
||||
.next()
|
||||
.and_then(|e| {
|
||||
e.attr("href")
|
||||
.and_then(|s| mikan_base_url.join(s).ok())
|
||||
.and_then(|u| MikanFansubHash::from_homepage_url(&u))
|
||||
.map(|h| (h, extract_inner_text_from_element_ref(e)))
|
||||
})
|
||||
.unzip();
|
||||
|
||||
let (mikan_episode_hash, original_name) = row
|
||||
.select(original_name_selector)
|
||||
.next()
|
||||
.and_then(|el| {
|
||||
el.attr("href")
|
||||
.and_then(|s| mikan_base_url.join(s).ok())
|
||||
.and_then(|u| MikanEpisodeHash::from_homepage_url(&u))
|
||||
.map(|h| (h, extract_inner_text_from_element_ref(el)))
|
||||
})
|
||||
.unzip();
|
||||
|
||||
let magnet_link = row
|
||||
.select(magnet_link_selector)
|
||||
.next()
|
||||
.and_then(|el| el.attr("data-clipboard-text"));
|
||||
|
||||
let file_size = row
|
||||
.select(file_size_selector)
|
||||
.next()
|
||||
.map(extract_inner_text_from_element_ref);
|
||||
|
||||
let torrent_link = row
|
||||
.select(torrent_link_selector)
|
||||
.next()
|
||||
.and_then(|el| el.attr("href"));
|
||||
|
||||
if let (Some(mikan_episode_hash), Some(original_name), Some(publish_at)) = (
|
||||
mikan_episode_hash.as_ref(),
|
||||
original_name.as_ref(),
|
||||
publish_at.as_ref(),
|
||||
) {
|
||||
Ok(Self {
|
||||
id: rev_id * 1000 + idx,
|
||||
publish_at: *publish_at,
|
||||
mikan_fansub_id: mikan_fansub_hash.map(|h| h.mikan_fansub_id.clone()),
|
||||
fansub_name,
|
||||
mikan_episode_id: mikan_episode_hash.mikan_episode_id.clone(),
|
||||
original_name: original_name.clone(),
|
||||
magnet_link: magnet_link.map(|s| s.to_string()),
|
||||
file_size: file_size.map(|s| s.to_string()),
|
||||
torrent_link: torrent_link.map(|s| s.to_string()),
|
||||
})
|
||||
} else {
|
||||
let mut missing_fields = vec![];
|
||||
if mikan_episode_hash.is_none() {
|
||||
missing_fields.push("mikan_episode_id");
|
||||
}
|
||||
if original_name.is_none() {
|
||||
missing_fields.push("original_name");
|
||||
}
|
||||
if publish_at.is_none() {
|
||||
missing_fields.push("publish_at");
|
||||
}
|
||||
Err(RecorderError::without_source(format!(
|
||||
"Failed to parse episode table row, missing fields: {missing_fields:?}, row \
|
||||
index: {idx}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MikanClassicEpisodeTablePage {
|
||||
pub page: i32,
|
||||
pub total: i32,
|
||||
pub html: String,
|
||||
pub rows: Vec<MikanClassicEpisodeTableRow>,
|
||||
}
|
||||
|
||||
impl MikanClassicEpisodeTablePage {
|
||||
pub fn from_html(
|
||||
html: String,
|
||||
mikan_base_url: &Url,
|
||||
page: i32,
|
||||
updated_info: Option<(i32, i32)>,
|
||||
) -> RecorderResult<Self> {
|
||||
let tr_selector = &Selector::parse("tbody tr").unwrap();
|
||||
let doc = Html::parse_document(&html);
|
||||
if let Some(mut total) = TOTAL_PAGE_REGEX
|
||||
.captures(&html)
|
||||
.and_then(|c| c.get(1))
|
||||
.and_then(|s| s.as_str().parse::<i32>().ok())
|
||||
{
|
||||
if let Some((_, update_total)) = updated_info {
|
||||
total = update_total;
|
||||
}
|
||||
|
||||
let rev_id = total - page;
|
||||
let rows = doc
|
||||
.select(tr_selector)
|
||||
.rev()
|
||||
.enumerate()
|
||||
.map(|(idx, tr)| {
|
||||
MikanClassicEpisodeTableRow::from_element_ref(
|
||||
tr,
|
||||
rev_id,
|
||||
idx as i32,
|
||||
mikan_base_url,
|
||||
)
|
||||
})
|
||||
.collect::<RecorderResult<Vec<_>>>()?;
|
||||
Ok(Self {
|
||||
page,
|
||||
total,
|
||||
html,
|
||||
rows,
|
||||
})
|
||||
} else {
|
||||
Err(RecorderError::without_source(
|
||||
"Failed to parse pagination meta and rows".into(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save_to_files(&self) -> RecorderResult<()> {
|
||||
use polars::prelude::*;
|
||||
|
||||
let rev_id = self.total - self.page;
|
||||
let parquet_path = TEST_FOLDER.join(format!("parquet/rev_{rev_id}.parquet"));
|
||||
let csv_path = TEST_FOLDER.join(format!("csv/rev_{rev_id}.csv"));
|
||||
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
|
||||
|
||||
std::fs::write(html_path, self.html.clone())?;
|
||||
|
||||
let mut id_vec = Vec::new();
|
||||
let mut publish_at_vec = Vec::new();
|
||||
let mut mikan_fansub_id_vec = Vec::new();
|
||||
let mut fansub_name_vec = Vec::new();
|
||||
let mut mikan_episode_id_vec = Vec::new();
|
||||
let mut original_name_vec = Vec::new();
|
||||
let mut magnet_link_vec = Vec::new();
|
||||
let mut file_size_vec = Vec::new();
|
||||
let mut torrent_link_vec = Vec::new();
|
||||
|
||||
for row in &self.rows {
|
||||
id_vec.push(row.id);
|
||||
publish_at_vec.push(row.publish_at.to_rfc3339());
|
||||
mikan_fansub_id_vec.push(row.mikan_fansub_id.clone());
|
||||
fansub_name_vec.push(row.fansub_name.clone());
|
||||
mikan_episode_id_vec.push(row.mikan_episode_id.clone());
|
||||
original_name_vec.push(row.original_name.clone());
|
||||
magnet_link_vec.push(row.magnet_link.clone());
|
||||
file_size_vec.push(row.file_size.clone());
|
||||
torrent_link_vec.push(row.torrent_link.clone());
|
||||
}
|
||||
|
||||
let df = df! [
|
||||
"id" => id_vec,
|
||||
"publish_at_timestamp" => publish_at_vec,
|
||||
"mikan_fansub_id" => mikan_fansub_id_vec,
|
||||
"fansub_name" => fansub_name_vec,
|
||||
"mikan_episode_id" => mikan_episode_id_vec,
|
||||
"original_name" => original_name_vec,
|
||||
"magnet_link" => magnet_link_vec,
|
||||
"file_size" => file_size_vec,
|
||||
"torrent_link" => torrent_link_vec,
|
||||
]
|
||||
.map_err(|e| {
|
||||
let message = format!("Failed to create DataFrame: {e}");
|
||||
RecorderError::with_source(Box::new(e), message)
|
||||
})?;
|
||||
|
||||
let mut parquet_file = std::fs::File::create(&parquet_path)?;
|
||||
|
||||
ParquetWriter::new(&mut parquet_file)
|
||||
.finish(&mut df.clone())
|
||||
.map_err(|e| {
|
||||
let message = format!("Failed to write parquet file: {e}");
|
||||
RecorderError::with_source(Box::new(e), message)
|
||||
})?;
|
||||
|
||||
let mut csv_file = std::fs::File::create(&csv_path)?;
|
||||
|
||||
CsvWriter::new(&mut csv_file)
|
||||
.include_header(true)
|
||||
.with_quote_style(QuoteStyle::Always)
|
||||
.finish(&mut df.clone())
|
||||
.map_err(|e| {
|
||||
let message = format!("Failed to write csv file: {e}");
|
||||
RecorderError::with_source(Box::new(e), message)
|
||||
})?;
|
||||
|
||||
println!(
|
||||
"[{}/{}] Saved {} rows to rev_{}.{{parquet,html,csv}}",
|
||||
self.page,
|
||||
self.total,
|
||||
self.rows.len(),
|
||||
rev_id
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn waiting_rev_ids(total: i32) -> RecorderResult<Vec<i32>> {
|
||||
let dir = TEST_FOLDER.join("csv");
|
||||
|
||||
let files = std::fs::read_dir(dir)?;
|
||||
|
||||
let rev_ids = files
|
||||
.filter_map(|f| f.ok())
|
||||
.filter_map(|f| {
|
||||
f.path().file_stem().and_then(|s| {
|
||||
s.to_str().and_then(|s| {
|
||||
if s.starts_with("rev_") {
|
||||
s.replace("rev_", "").parse::<i32>().ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
Ok((0..total)
|
||||
.filter(|rev_id| !rev_ids.contains(rev_id))
|
||||
.collect::<Vec<_>>())
|
||||
}
|
||||
}
|
||||
|
||||
async fn scrape_mikan_classic_episode_table_page(
|
||||
mikan_client: &MikanClient,
|
||||
page: i32,
|
||||
updated_info: Option<(i32, i32)>,
|
||||
) -> RecorderResult<MikanClassicEpisodeTablePage> {
|
||||
let mikan_base_url = mikan_client.base_url();
|
||||
let url = mikan_base_url.join(&format!("/Home/Classic/{page}"))?;
|
||||
|
||||
if let Some((rev_id, update_total)) = updated_info.as_ref() {
|
||||
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
|
||||
if html_path.exists() {
|
||||
let html = std::fs::read_to_string(&html_path)?;
|
||||
println!("[{page}/{update_total}] html exists, skipping fetch");
|
||||
return MikanClassicEpisodeTablePage::from_html(
|
||||
html,
|
||||
mikan_base_url,
|
||||
page,
|
||||
updated_info,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let total = if let Some((_, update_total)) = updated_info.as_ref() {
|
||||
update_total.to_string()
|
||||
} else {
|
||||
"Unknown".to_string()
|
||||
};
|
||||
|
||||
println!("[{page}/{total}] fetching html...");
|
||||
|
||||
let html = fetch_html(mikan_client, url).await?;
|
||||
|
||||
println!("[{page}/{total}] fetched html done");
|
||||
|
||||
std::fs::write(TEST_FOLDER.join("html/temp.html"), html.clone())?;
|
||||
|
||||
MikanClassicEpisodeTablePage::from_html(html, mikan_base_url, page, updated_info)
|
||||
}
|
||||
|
||||
async fn scrape_mikan_classic_episode_table_page_from_rev_id(
|
||||
mikan_client: &MikanClient,
|
||||
total: i32,
|
||||
rev_idx: i32,
|
||||
) -> RecorderResult<MikanClassicEpisodeTablePage> {
|
||||
let page = total - rev_idx;
|
||||
|
||||
scrape_mikan_classic_episode_table_page(mikan_client, page, Some((rev_idx, total))).await
|
||||
}
|
||||
|
||||
async fn merge_mikan_classic_episodes_and_strip_columns() -> RecorderResult<()> {
|
||||
use polars::prelude::*;
|
||||
|
||||
let dir = TEST_FOLDER.join("parquet");
|
||||
let files = std::fs::read_dir(dir)?;
|
||||
|
||||
let parquet_paths = files
|
||||
.filter_map(|f| f.ok())
|
||||
.filter_map(|f| {
|
||||
let path = f.path();
|
||||
if let Some(ext) = path.extension()
|
||||
&& ext == "parquet"
|
||||
&& path
|
||||
.file_stem()
|
||||
.is_some_and(|f| f.to_string_lossy().starts_with("rev_"))
|
||||
{
|
||||
Some(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if parquet_paths.is_empty() {
|
||||
return Err(RecorderError::without_source(
|
||||
"No parquet files found to merge".into(),
|
||||
));
|
||||
}
|
||||
|
||||
println!("Found {} parquet files to merge", parquet_paths.len());
|
||||
|
||||
// 读取并合并所有 parquet 文件
|
||||
let mut all_dfs = Vec::new();
|
||||
for path in &parquet_paths {
|
||||
println!("Reading {path:?}");
|
||||
let file = std::fs::File::open(path)?;
|
||||
let df = ParquetReader::new(file).finish().map_err(|e| {
|
||||
let message = format!("Failed to read parquet file {path:?}: {e}");
|
||||
RecorderError::with_source(Box::new(e), message)
|
||||
})?;
|
||||
all_dfs.push(df);
|
||||
}
|
||||
|
||||
let lazy_frames: Vec<LazyFrame> = all_dfs.into_iter().map(|df| df.lazy()).collect();
|
||||
|
||||
let merged_df = concat_lf_diagonal(&lazy_frames, UnionArgs::default())
|
||||
.map_err(|e| {
|
||||
let message = format!("Failed to concat DataFrames: {e}");
|
||||
RecorderError::with_source(Box::new(e), message)
|
||||
})?
|
||||
.sort(
|
||||
["publish_at_timestamp"],
|
||||
SortMultipleOptions::default().with_order_descending(true),
|
||||
)
|
||||
.unique(
|
||||
Some(vec![
|
||||
"mikan_fansub_id".to_string(),
|
||||
"mikan_episode_id".to_string(),
|
||||
]),
|
||||
UniqueKeepStrategy::First,
|
||||
)
|
||||
.collect()
|
||||
.map_err(|e| {
|
||||
let message = format!("Failed to collect lazy DataFrame: {e}");
|
||||
RecorderError::with_source(Box::new(e), message)
|
||||
})?;
|
||||
|
||||
fn select_columns_and_write(
|
||||
merged_df: DataFrame,
|
||||
name: &str,
|
||||
columns: &[&str],
|
||||
) -> RecorderResult<()> {
|
||||
let result_df = merged_df
|
||||
.lazy()
|
||||
.sort(["publish_at_timestamp"], SortMultipleOptions::default())
|
||||
.select(columns.iter().map(|c| col(*c)).collect_vec())
|
||||
.collect()
|
||||
.map_err(|e| {
|
||||
let message = format!("Failed to sort and select columns: {e}");
|
||||
RecorderError::with_source(Box::new(e), message)
|
||||
})?;
|
||||
|
||||
let output_path = TEST_FOLDER.join(format!("parquet/{name}.parquet"));
|
||||
let mut output_file = std::fs::File::create(&output_path)?;
|
||||
|
||||
ParquetWriter::new(&mut output_file)
|
||||
.set_parallel(true)
|
||||
.with_compression(ParquetCompression::Zstd(Some(
|
||||
ZstdLevel::try_new(22).unwrap(),
|
||||
)))
|
||||
.finish(&mut result_df.clone())
|
||||
.map_err(|e| {
|
||||
let message = format!("Failed to write merged parquet file: {e}");
|
||||
RecorderError::with_source(Box::new(e), message)
|
||||
})?;
|
||||
|
||||
println!("Merged {} rows into {output_path:?}", result_df.height());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
select_columns_and_write(merged_df.clone(), "tiny", &["fansub_name", "original_name"])?;
|
||||
// select_columns_and_write(
|
||||
// merged_df.clone(),
|
||||
// "lite",
|
||||
// &[
|
||||
// "mikan_fansub_id",
|
||||
// "fansub_name",
|
||||
// "mikan_episode_id",
|
||||
// "original_name",
|
||||
// ],
|
||||
// )?;
|
||||
// select_columns_and_write(
|
||||
// merged_df,
|
||||
// "full",
|
||||
// &[
|
||||
// "id",
|
||||
// "publish_at_timestamp",
|
||||
// "mikan_fansub_id",
|
||||
// "fansub_name",
|
||||
// "mikan_episode_id",
|
||||
// "original_name",
|
||||
// "magnet_link",
|
||||
// "file_size",
|
||||
// "torrent_link",
|
||||
// ],
|
||||
// )?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> RecorderResult<()> {
|
||||
std::fs::create_dir_all(TEST_FOLDER.join("html"))?;
|
||||
std::fs::create_dir_all(TEST_FOLDER.join("parquet"))?;
|
||||
std::fs::create_dir_all(TEST_FOLDER.join("csv"))?;
|
||||
|
||||
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||
http_client: HttpClientConfig {
|
||||
exponential_backoff_max_retries: Some(3),
|
||||
leaky_bucket_max_tokens: Some(2),
|
||||
leaky_bucket_initial_tokens: Some(1),
|
||||
leaky_bucket_refill_tokens: Some(1),
|
||||
leaky_bucket_refill_interval: Some(std::time::Duration::from_millis(1000)),
|
||||
user_agent: Some(
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||
.to_string(),
|
||||
),
|
||||
..Default::default()
|
||||
},
|
||||
base_url: Url::parse("https://mikanani.me")?,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let first_page_and_pagination_info =
|
||||
scrape_mikan_classic_episode_table_page(&mikan_scrape_client, 1, None).await?;
|
||||
|
||||
let total_page = first_page_and_pagination_info.total;
|
||||
|
||||
first_page_and_pagination_info.save_to_files()?;
|
||||
|
||||
let next_rev_ids = MikanClassicEpisodeTablePage::waiting_rev_ids(total_page)?;
|
||||
|
||||
for todo_rev_id in next_rev_ids {
|
||||
let page = scrape_mikan_classic_episode_table_page_from_rev_id(
|
||||
&mikan_scrape_client,
|
||||
total_page,
|
||||
todo_rev_id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
page.save_to_files()?;
|
||||
}
|
||||
|
||||
// 合并所有 parquet 文件
|
||||
println!("\nMerging all parquet files...");
|
||||
|
||||
merge_mikan_classic_episodes_and_strip_columns().await?;
|
||||
|
||||
println!("Merge completed!");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -212,7 +212,7 @@ async fn main() -> Result<()> {
|
||||
};
|
||||
}
|
||||
{
|
||||
let episode_torrent_url = rss_item.url;
|
||||
let episode_torrent_url = rss_item.torrent_link;
|
||||
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||
if !episode_torrent_doppel_path.exists_any() {
|
||||
|
||||
@@ -72,7 +72,7 @@ async fn main() -> RecorderResult<()> {
|
||||
}?;
|
||||
|
||||
{
|
||||
let episode_torrent_url = rss_item.url;
|
||||
let episode_torrent_url = rss_item.torrent_link;
|
||||
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||
if !episode_torrent_doppel_path.exists_any() {
|
||||
@@ -173,7 +173,7 @@ async fn main() -> RecorderResult<()> {
|
||||
};
|
||||
|
||||
{
|
||||
let episode_torrent_url = rss_item.url;
|
||||
let episode_torrent_url = rss_item.torrent_link;
|
||||
let episode_torrent_doppel_path =
|
||||
MikanDoppelPath::new(episode_torrent_url.clone());
|
||||
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||
|
||||
@@ -26,18 +26,18 @@ host = '{{ get_env(name="HOST", default="localhost") }}'
|
||||
enable = true
|
||||
|
||||
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
|
||||
[server.middleware.request_id]
|
||||
[server.middlewares.request_id]
|
||||
enable = true
|
||||
|
||||
[server.middleware.logger]
|
||||
[server.middlewares.logger]
|
||||
enable = true
|
||||
|
||||
# when your code is panicked, the request still returns 500 status code.
|
||||
[server.middleware.catch_panic]
|
||||
[server.middlewares.catch_panic]
|
||||
enable = true
|
||||
|
||||
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
|
||||
[server.middleware.timeout_request]
|
||||
[server.middlewares.timeout_request]
|
||||
enable = false
|
||||
# Duration time in milliseconds.
|
||||
timeout = 5000
|
||||
@@ -53,7 +53,10 @@ timeout = 5000
|
||||
# - POST
|
||||
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
|
||||
# max_age: 3600
|
||||
[server.middleware.cors]
|
||||
[server.middlewares.cors]
|
||||
enable = true
|
||||
|
||||
[server.middlewares.compression]
|
||||
enable = true
|
||||
|
||||
# Database Configuration
|
||||
|
||||
@@ -21,6 +21,9 @@ pub struct MainCliArgs {
|
||||
/// Explicit environment
|
||||
#[arg(short, long)]
|
||||
environment: Option<Environment>,
|
||||
|
||||
#[arg(long)]
|
||||
graceful_shutdown: Option<bool>,
|
||||
}
|
||||
|
||||
pub struct AppBuilder {
|
||||
@@ -28,6 +31,7 @@ pub struct AppBuilder {
|
||||
config_file: Option<String>,
|
||||
working_dir: String,
|
||||
environment: Environment,
|
||||
pub graceful_shutdown: bool,
|
||||
}
|
||||
|
||||
impl AppBuilder {
|
||||
@@ -61,7 +65,8 @@ impl AppBuilder {
|
||||
builder = builder
|
||||
.config_file(args.config_file)
|
||||
.dotenv_file(args.dotenv_file)
|
||||
.environment(environment);
|
||||
.environment(environment)
|
||||
.graceful_shutdown(args.graceful_shutdown.unwrap_or(true));
|
||||
|
||||
Ok(builder)
|
||||
}
|
||||
@@ -118,6 +123,12 @@ impl AppBuilder {
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn graceful_shutdown(self, graceful_shutdown: bool) -> Self {
|
||||
let mut ret = self;
|
||||
ret.graceful_shutdown = graceful_shutdown;
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
|
||||
let mut ret = self;
|
||||
ret.dotenv_file = dotenv_file;
|
||||
@@ -141,6 +152,7 @@ impl Default for AppBuilder {
|
||||
dotenv_file: None,
|
||||
config_file: None,
|
||||
working_dir: String::from("."),
|
||||
graceful_shutdown: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ leaky_bucket_initial_tokens = 0
|
||||
leaky_bucket_refill_tokens = 1
|
||||
leaky_bucket_refill_interval = 500
|
||||
|
||||
|
||||
[mikan.http_client.proxy]
|
||||
|
||||
[mikan.http_client.proxy.headers]
|
||||
@@ -26,3 +27,5 @@ complexity_limit = inf
|
||||
[task]
|
||||
|
||||
[message]
|
||||
|
||||
[media]
|
||||
|
||||
@@ -11,8 +11,8 @@ use super::env::Environment;
|
||||
use crate::{
|
||||
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
|
||||
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
|
||||
logger::LoggerConfig, message::MessageConfig, storage::StorageConfig, task::TaskConfig,
|
||||
web::WebServerConfig,
|
||||
logger::LoggerConfig, media::MediaConfig, message::MessageConfig, storage::StorageConfig,
|
||||
task::TaskConfig, web::WebServerConfig,
|
||||
};
|
||||
|
||||
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
|
||||
@@ -27,6 +27,7 @@ pub struct AppConfig {
|
||||
pub mikan: MikanConfig,
|
||||
pub crypto: CryptoConfig,
|
||||
pub graphql: GraphQLConfig,
|
||||
pub media: MediaConfig,
|
||||
pub logger: LoggerConfig,
|
||||
pub database: DatabaseConfig,
|
||||
pub task: TaskConfig,
|
||||
|
||||
@@ -6,7 +6,8 @@ use super::{Environment, config::AppConfig};
|
||||
use crate::{
|
||||
auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
|
||||
errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
|
||||
logger::LoggerService, message::MessageService, storage::StorageService, task::TaskService,
|
||||
logger::LoggerService, media::MediaService, message::MessageService, storage::StorageService,
|
||||
task::TaskService,
|
||||
};
|
||||
|
||||
pub trait AppContextTrait: Send + Sync + Debug {
|
||||
@@ -23,6 +24,7 @@ pub trait AppContextTrait: Send + Sync + Debug {
|
||||
fn crypto(&self) -> &CryptoService;
|
||||
fn task(&self) -> &TaskService;
|
||||
fn message(&self) -> &MessageService;
|
||||
fn media(&self) -> &MediaService;
|
||||
}
|
||||
|
||||
pub struct AppContext {
|
||||
@@ -37,6 +39,7 @@ pub struct AppContext {
|
||||
working_dir: String,
|
||||
environment: Environment,
|
||||
message: MessageService,
|
||||
media: MediaService,
|
||||
task: OnceCell<TaskService>,
|
||||
graphql: OnceCell<GraphQLService>,
|
||||
}
|
||||
@@ -57,6 +60,7 @@ impl AppContext {
|
||||
let auth = AuthService::from_conf(config.auth).await?;
|
||||
let mikan = MikanClient::from_config(config.mikan).await?;
|
||||
let crypto = CryptoService::from_config(config.crypto).await?;
|
||||
let media = MediaService::from_config(config.media).await?;
|
||||
|
||||
let ctx = Arc::new(AppContext {
|
||||
config: config_cloned,
|
||||
@@ -70,6 +74,7 @@ impl AppContext {
|
||||
working_dir: working_dir.to_string(),
|
||||
crypto,
|
||||
message,
|
||||
media,
|
||||
task: OnceCell::new(),
|
||||
graphql: OnceCell::new(),
|
||||
});
|
||||
@@ -136,4 +141,7 @@ impl AppContextTrait for AppContext {
|
||||
fn message(&self) -> &MessageService {
|
||||
&self.message
|
||||
}
|
||||
fn media(&self) -> &MediaService {
|
||||
&self.media
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,8 @@ use crate::{
|
||||
},
|
||||
};
|
||||
|
||||
pub const PROJECT_NAME: &str = "konobangu";
|
||||
|
||||
pub struct App {
|
||||
pub context: Arc<dyn AppContextTrait>,
|
||||
pub builder: AppBuilder,
|
||||
@@ -51,21 +53,24 @@ impl App {
|
||||
|
||||
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
||||
|
||||
let (graphql_c, oidc_c, metadata_c, static_c) = futures::try_join!(
|
||||
let (graphql_c, oidc_c, metadata_c, static_c, feeds_c) = futures::try_join!(
|
||||
controller::graphql::create(context.clone()),
|
||||
controller::oidc::create(context.clone()),
|
||||
controller::metadata::create(context.clone()),
|
||||
controller::r#static::create(context.clone()),
|
||||
controller::feeds::create(context.clone()),
|
||||
)?;
|
||||
|
||||
for c in [graphql_c, oidc_c, metadata_c, static_c] {
|
||||
for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
|
||||
router = c.apply_to(router);
|
||||
}
|
||||
|
||||
let middlewares = default_middleware_stack(context.clone());
|
||||
for mid in middlewares {
|
||||
router = mid.apply(router)?;
|
||||
tracing::info!(name = mid.name(), "+middleware");
|
||||
if mid.is_enabled() {
|
||||
router = mid.apply(router)?;
|
||||
tracing::info!(name = mid.name(), "+middleware");
|
||||
}
|
||||
}
|
||||
|
||||
let router = router
|
||||
@@ -73,26 +78,40 @@ impl App {
|
||||
.into_make_service_with_connect_info::<SocketAddr>();
|
||||
|
||||
let task = context.task();
|
||||
|
||||
let graceful_shutdown = self.builder.graceful_shutdown;
|
||||
|
||||
tokio::try_join!(
|
||||
async {
|
||||
axum::serve(listener, router)
|
||||
.with_graceful_shutdown(async move {
|
||||
Self::shutdown_signal().await;
|
||||
tracing::info!("axum shutting down...");
|
||||
})
|
||||
.await?;
|
||||
let axum_serve = axum::serve(listener, router);
|
||||
|
||||
if graceful_shutdown {
|
||||
axum_serve
|
||||
.with_graceful_shutdown(async move {
|
||||
Self::shutdown_signal().await;
|
||||
tracing::info!("axum shutting down...");
|
||||
})
|
||||
.await?;
|
||||
} else {
|
||||
axum_serve.await?;
|
||||
}
|
||||
|
||||
Ok::<(), RecorderError>(())
|
||||
},
|
||||
async {
|
||||
{
|
||||
let monitor = task.setup_monitor().await?;
|
||||
monitor
|
||||
.run_with_signal(async move {
|
||||
Self::shutdown_signal().await;
|
||||
tracing::info!("apalis shutting down...");
|
||||
Ok(())
|
||||
})
|
||||
.await?;
|
||||
if graceful_shutdown {
|
||||
monitor
|
||||
.run_with_signal(async move {
|
||||
Self::shutdown_signal().await;
|
||||
tracing::info!("apalis shutting down...");
|
||||
Ok(())
|
||||
})
|
||||
.await?;
|
||||
} else {
|
||||
monitor.run().await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok::<(), RecorderError>(())
|
||||
@@ -135,7 +154,7 @@ impl App {
|
||||
#[cfg(not(unix))]
|
||||
let terminate = std::future::pending::<()>();
|
||||
|
||||
#[cfg(all(not(unix), debug_assertions))]
|
||||
#[cfg(not(all(unix, debug_assertions)))]
|
||||
let quit = std::future::pending::<()>();
|
||||
|
||||
tokio::select! {
|
||||
|
||||
@@ -4,7 +4,7 @@ pub mod context;
|
||||
pub mod core;
|
||||
pub mod env;
|
||||
|
||||
pub use core::App;
|
||||
pub use core::{App, PROJECT_NAME};
|
||||
|
||||
pub use builder::AppBuilder;
|
||||
pub use config::AppConfig;
|
||||
|
||||
@@ -9,7 +9,7 @@ use super::{
|
||||
service::{AuthServiceTrait, AuthUserInfo},
|
||||
};
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
app::{AppContextTrait, PROJECT_NAME},
|
||||
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
|
||||
};
|
||||
|
||||
@@ -86,7 +86,7 @@ impl AuthServiceTrait for BasicAuthService {
|
||||
}
|
||||
|
||||
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
||||
Some(HeaderValue::from_static(r#"Basic realm="konobangu""#))
|
||||
Some(HeaderValue::from_str(format!("Basic realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
|
||||
}
|
||||
|
||||
fn auth_type(&self) -> AuthType {
|
||||
|
||||
@@ -32,7 +32,11 @@ use super::{
|
||||
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
|
||||
service::{AuthServiceTrait, AuthUserInfo},
|
||||
};
|
||||
use crate::{app::AppContextTrait, errors::RecorderError, models::auth::AuthType};
|
||||
use crate::{
|
||||
app::{AppContextTrait, PROJECT_NAME},
|
||||
errors::RecorderError,
|
||||
models::auth::AuthType,
|
||||
};
|
||||
|
||||
pub struct OidcHttpClient(pub Arc<HttpClient>);
|
||||
|
||||
@@ -351,7 +355,7 @@ impl AuthServiceTrait for OidcAuthService {
|
||||
}
|
||||
|
||||
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
||||
Some(HeaderValue::from_static(r#"Bearer realm="konobangu""#))
|
||||
Some(HeaderValue::from_str(format!("Bearer realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
|
||||
}
|
||||
|
||||
fn auth_type(&self) -> AuthType {
|
||||
|
||||
@@ -29,6 +29,11 @@ pub enum RecorderError {
|
||||
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||
source: OptDynErr,
|
||||
},
|
||||
#[snafu(transparent)]
|
||||
ImageError { source: image::ImageError },
|
||||
#[cfg(feature = "jxl")]
|
||||
#[snafu(transparent)]
|
||||
JxlEncodeError { source: jpegxl_rs::EncodeError },
|
||||
#[snafu(transparent, context(false))]
|
||||
HttpError { source: http::Error },
|
||||
#[snafu(transparent, context(false))]
|
||||
@@ -42,8 +47,12 @@ pub enum RecorderError {
|
||||
RegexError { source: regex::Error },
|
||||
#[snafu(display("Invalid method"))]
|
||||
InvalidMethodError,
|
||||
#[snafu(display("Invalid header value"))]
|
||||
InvalidHeaderValueError,
|
||||
#[snafu(display("Invalid header name"))]
|
||||
InvalidHeaderNameError,
|
||||
#[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))]
|
||||
MissingOriginError,
|
||||
#[snafu(transparent)]
|
||||
TracingAppenderInitError {
|
||||
source: tracing_appender::rolling::InitError,
|
||||
@@ -82,8 +91,6 @@ pub enum RecorderError {
|
||||
#[snafu(source(from(opendal::Error, Box::new)))]
|
||||
source: Box<opendal::Error>,
|
||||
},
|
||||
#[snafu(display("Invalid header value"))]
|
||||
InvalidHeaderValueError,
|
||||
#[snafu(transparent)]
|
||||
HttpClientError { source: HttpClientError },
|
||||
#[cfg(feature = "testcontainers")]
|
||||
@@ -243,6 +250,11 @@ impl IntoResponse for RecorderError {
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
Self::ModelEntityNotFound { entity } => (
|
||||
StatusCode::NOT_FOUND,
|
||||
Json::<StandardErrorResponse>(StandardErrorResponse::from(entity.to_string())),
|
||||
)
|
||||
.into_response(),
|
||||
err => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use fancy_regex::Regex as FancyRegex;
|
||||
use lazy_static::lazy_static;
|
||||
use quirks_path::Path;
|
||||
@@ -33,6 +34,14 @@ lazy_static! {
|
||||
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct EpisodeEnclosureMeta {
|
||||
pub magnet_link: Option<String>,
|
||||
pub torrent_link: Option<String>,
|
||||
pub pub_date: Option<DateTime<Utc>>,
|
||||
pub content_length: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct TorrentEpisodeMediaMeta {
|
||||
pub fansub: Option<String>,
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use fancy_regex::Regex as FancyRegex;
|
||||
use lazy_static::lazy_static;
|
||||
use maplit::hashmap;
|
||||
use regex::Regex;
|
||||
|
||||
const LANG_ZH_TW: &str = "zh-tw";
|
||||
@@ -34,40 +31,4 @@ lazy_static! {
|
||||
(LANG_JP, vec!["jp", "jpn", "日"]),
|
||||
]
|
||||
};
|
||||
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
|
||||
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
|
||||
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {
|
||||
hashmap! {
|
||||
"〇" => 0,
|
||||
"一" => 1,
|
||||
"二" => 2,
|
||||
"三" => 3,
|
||||
"四" => 4,
|
||||
"五" => 5,
|
||||
"六" => 6,
|
||||
"七" => 7,
|
||||
"八" => 8,
|
||||
"九" => 9,
|
||||
"十" => 10,
|
||||
"廿" => 20,
|
||||
"百" => 100,
|
||||
"千" => 1000,
|
||||
"零" => 0,
|
||||
"壹" => 1,
|
||||
"贰" => 2,
|
||||
"叁" => 3,
|
||||
"肆" => 4,
|
||||
"伍" => 5,
|
||||
"陆" => 6,
|
||||
"柒" => 7,
|
||||
"捌" => 8,
|
||||
"玖" => 9,
|
||||
"拾" => 10,
|
||||
"念" => 20,
|
||||
"佰" => 100,
|
||||
"仟" => 1000,
|
||||
}
|
||||
};
|
||||
pub static ref ZH_NUM_RE: Regex =
|
||||
Regex::new(r"[〇一二三四五六七八九十廿百千零壹贰叁肆伍陆柒捌玖拾念佰仟]").unwrap();
|
||||
}
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
use axum::http::{HeaderName, HeaderValue, Uri, header, request::Parts};
|
||||
use axum::{
|
||||
extract::FromRequestParts,
|
||||
http::{HeaderName, HeaderValue, Uri, header, request::Parts},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use url::Url;
|
||||
|
||||
use crate::errors::RecorderError;
|
||||
|
||||
/// Fields from a "Forwarded" header per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4)
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ForwardedHeader {
|
||||
@@ -101,9 +106,13 @@ pub struct ForwardedRelatedInfo {
|
||||
pub origin: Option<String>,
|
||||
}
|
||||
|
||||
impl ForwardedRelatedInfo {
|
||||
pub fn from_request_parts(request_parts: &Parts) -> ForwardedRelatedInfo {
|
||||
let headers = &request_parts.headers;
|
||||
impl<T> FromRequestParts<T> for ForwardedRelatedInfo {
|
||||
type Rejection = RecorderError;
|
||||
fn from_request_parts(
|
||||
parts: &mut Parts,
|
||||
_state: &T,
|
||||
) -> impl Future<Output = Result<Self, Self::Rejection>> + Send {
|
||||
let headers = &parts.headers;
|
||||
let forwarded = headers
|
||||
.get(header::FORWARDED)
|
||||
.and_then(|s| ForwardedHeader::try_from(s.clone()).ok());
|
||||
@@ -132,17 +141,19 @@ impl ForwardedRelatedInfo {
|
||||
.get(header::ORIGIN)
|
||||
.and_then(|s| s.to_str().map(String::from).ok());
|
||||
|
||||
ForwardedRelatedInfo {
|
||||
futures::future::ready(Ok(ForwardedRelatedInfo {
|
||||
host,
|
||||
x_forwarded_for,
|
||||
x_forwarded_host,
|
||||
x_forwarded_proto,
|
||||
forwarded,
|
||||
uri: request_parts.uri.clone(),
|
||||
uri: parts.uri.clone(),
|
||||
origin,
|
||||
}
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
impl ForwardedRelatedInfo {
|
||||
pub fn resolved_protocol(&self) -> Option<&str> {
|
||||
self.forwarded
|
||||
.as_ref()
|
||||
|
||||
@@ -2,10 +2,6 @@ use url::Url;
|
||||
|
||||
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
|
||||
let mut image_url = base_url.join(image_src).ok()?;
|
||||
if let Some((_, value)) = image_url.query_pairs().find(|(key, _)| key == "webp") {
|
||||
image_url.set_query(Some(&format!("webp={value}")));
|
||||
} else {
|
||||
image_url.set_query(None);
|
||||
}
|
||||
image_url.set_query(None);
|
||||
Some(image_url)
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ pub const MIKAN_BANGUMI_POSTER_PATH: &str = "/images/Bangumi";
|
||||
pub const MIKAN_EPISODE_TORRENT_PATH: &str = "/Download";
|
||||
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH: &str = "/RSS/MyBangumi";
|
||||
pub const MIKAN_BANGUMI_RSS_PATH: &str = "/RSS/Bangumi";
|
||||
pub const MIKAN_FANSUB_HOMEPAGE_PATH: &str = "/Home/PublishGroup";
|
||||
pub const MIKAN_BANGUMI_ID_QUERY_KEY: &str = "bangumiId";
|
||||
pub const MIKAN_FANSUB_ID_QUERY_KEY: &str = "subgroupid";
|
||||
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY: &str = "token";
|
||||
|
||||
@@ -11,10 +11,11 @@ pub use constants::{
|
||||
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH,
|
||||
MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH,
|
||||
MIKAN_BANGUMI_RSS_PATH, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_EPISODE_TORRENT_PATH,
|
||||
MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_LOGIN_PAGE_PATH, MIKAN_LOGIN_PAGE_SEARCH,
|
||||
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
|
||||
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
|
||||
MIKAN_UNKNOWN_FANSUB_ID, MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
|
||||
MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_LOGIN_PAGE_PATH,
|
||||
MIKAN_LOGIN_PAGE_SEARCH, MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH,
|
||||
MIKAN_SEASON_STR_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH,
|
||||
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_UNKNOWN_FANSUB_ID,
|
||||
MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
|
||||
};
|
||||
pub use credential::MikanCredentialForm;
|
||||
pub use subscription::{
|
||||
@@ -22,11 +23,12 @@ pub use subscription::{
|
||||
};
|
||||
pub use web::{
|
||||
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
|
||||
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanRssEpisodeItem,
|
||||
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionRssUrlMeta,
|
||||
build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url,
|
||||
build_mikan_bangumi_subscription_rss_url, build_mikan_episode_homepage_url,
|
||||
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
|
||||
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanFansubHash,
|
||||
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_expand_subscribed_url,
|
||||
build_mikan_bangumi_homepage_url, build_mikan_bangumi_subscription_rss_url,
|
||||
build_mikan_episode_homepage_url, build_mikan_season_flow_url,
|
||||
build_mikan_subscriber_subscription_rss_url,
|
||||
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||
|
||||
@@ -5,6 +5,7 @@ use std::{
|
||||
};
|
||||
|
||||
use async_graphql::{InputObject, SimpleObject};
|
||||
use async_stream::try_stream;
|
||||
use fetch::fetch_bytes;
|
||||
use futures::{Stream, TryStreamExt, pin_mut, try_join};
|
||||
use maplit::hashmap;
|
||||
@@ -19,12 +20,15 @@ use super::scrape_mikan_bangumi_meta_stream_from_season_flow_url;
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::{RecorderError, RecorderResult},
|
||||
extract::mikan::{
|
||||
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
|
||||
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url,
|
||||
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
|
||||
scrape_mikan_episode_meta_from_episode_homepage_url,
|
||||
extract::{
|
||||
bittorrent::EpisodeEnclosureMeta,
|
||||
mikan::{
|
||||
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
|
||||
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url,
|
||||
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
|
||||
scrape_mikan_episode_meta_from_episode_homepage_url,
|
||||
},
|
||||
},
|
||||
models::{
|
||||
bangumi, episodes, subscription_bangumi, subscription_episode,
|
||||
@@ -53,7 +57,7 @@ async fn sync_mikan_feeds_from_rss_item_list(
|
||||
.map(|(episode_id, hash, bangumi_id)| (hash.mikan_episode_id, (episode_id, bangumi_id)))
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let mut new_episode_meta_list: Vec<MikanEpisodeMeta> = vec![];
|
||||
let mut new_episode_meta_list: Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)> = vec![];
|
||||
|
||||
let mikan_client = ctx.mikan();
|
||||
for to_insert_rss_item in rss_item_list.into_iter().filter(|rss_item| {
|
||||
@@ -64,7 +68,8 @@ async fn sync_mikan_feeds_from_rss_item_list(
|
||||
to_insert_rss_item.build_homepage_url(mikan_base_url.clone()),
|
||||
)
|
||||
.await?;
|
||||
new_episode_meta_list.push(episode_meta);
|
||||
let episode_enclosure_meta = EpisodeEnclosureMeta::from(to_insert_rss_item);
|
||||
new_episode_meta_list.push((episode_meta, episode_enclosure_meta));
|
||||
}
|
||||
|
||||
(new_episode_meta_list, existed_episode_hash2id_map)
|
||||
@@ -91,22 +96,22 @@ async fn sync_mikan_feeds_from_rss_item_list(
|
||||
|
||||
let new_episode_meta_list_group_by_bangumi_hash: HashMap<
|
||||
MikanBangumiHash,
|
||||
Vec<MikanEpisodeMeta>,
|
||||
Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)>,
|
||||
> = {
|
||||
let mut m = hashmap! {};
|
||||
for episode_meta in new_episode_meta_list {
|
||||
for (episode_meta, episode_enclosure_meta) in new_episode_meta_list {
|
||||
let bangumi_hash = episode_meta.bangumi_hash();
|
||||
|
||||
m.entry(bangumi_hash)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(episode_meta);
|
||||
.push((episode_meta, episode_enclosure_meta));
|
||||
}
|
||||
m
|
||||
};
|
||||
|
||||
for (group_bangumi_hash, group_episode_meta_list) in new_episode_meta_list_group_by_bangumi_hash
|
||||
{
|
||||
let first_episode_meta = group_episode_meta_list.first().unwrap();
|
||||
let (first_episode_meta, _) = group_episode_meta_list.first().unwrap();
|
||||
let group_bangumi_model = bangumi::Model::get_or_insert_from_mikan(
|
||||
ctx,
|
||||
group_bangumi_hash,
|
||||
@@ -125,9 +130,12 @@ async fn sync_mikan_feeds_from_rss_item_list(
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
let group_episode_creation_list = group_episode_meta_list
|
||||
.into_iter()
|
||||
.map(|episode_meta| (&group_bangumi_model, episode_meta));
|
||||
let group_episode_creation_list =
|
||||
group_episode_meta_list
|
||||
.into_iter()
|
||||
.map(|(episode_meta, episode_enclosure_meta)| {
|
||||
(&group_bangumi_model, episode_meta, episode_enclosure_meta)
|
||||
});
|
||||
|
||||
episodes::Model::add_mikan_episodes_for_subscription(
|
||||
ctx,
|
||||
@@ -272,7 +280,7 @@ impl MikanSubscriberSubscription {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanSeasonSubscription {
|
||||
pub subscription_id: i32,
|
||||
pub year: i32,
|
||||
@@ -292,17 +300,19 @@ impl SubscriptionTrait for MikanSeasonSubscription {
|
||||
}
|
||||
|
||||
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
let rss_item_list = self
|
||||
.get_rss_item_list_from_subsribed_url_rss_link(ctx.as_ref())
|
||||
.await?;
|
||||
let rss_item_stream = self.get_rss_item_stream_from_subsribed_url_rss_link(ctx.as_ref());
|
||||
|
||||
sync_mikan_feeds_from_rss_item_list(
|
||||
ctx.as_ref(),
|
||||
rss_item_list,
|
||||
self.get_subscriber_id(),
|
||||
self.get_subscription_id(),
|
||||
)
|
||||
.await?;
|
||||
pin_mut!(rss_item_stream);
|
||||
|
||||
while let Some(rss_item_chunk_list) = rss_item_stream.try_next().await? {
|
||||
sync_mikan_feeds_from_rss_item_list(
|
||||
ctx.as_ref(),
|
||||
rss_item_chunk_list,
|
||||
self.get_subscriber_id(),
|
||||
self.get_subscription_id(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -393,48 +403,53 @@ impl MikanSeasonSubscription {
|
||||
)
|
||||
}
|
||||
|
||||
#[tracing::instrument(err, skip(ctx))]
|
||||
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
||||
fn get_rss_item_stream_from_subsribed_url_rss_link(
|
||||
&self,
|
||||
ctx: &dyn AppContextTrait,
|
||||
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||
let db = ctx.db();
|
||||
) -> impl Stream<Item = RecorderResult<Vec<MikanRssEpisodeItem>>> {
|
||||
try_stream! {
|
||||
|
||||
let subscribed_bangumi_list = bangumi::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(subscription_bangumi::Column::SubscriptionId.eq(self.subscription_id)),
|
||||
)
|
||||
.join_rev(
|
||||
JoinType::InnerJoin,
|
||||
subscription_bangumi::Relation::Bangumi.def(),
|
||||
)
|
||||
.all(db)
|
||||
.await?;
|
||||
let db = ctx.db();
|
||||
|
||||
let mut rss_item_list = vec![];
|
||||
for subscribed_bangumi in subscribed_bangumi_list {
|
||||
let rss_url = subscribed_bangumi
|
||||
.rss_link
|
||||
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||
format!(
|
||||
"rss_link is required, subscription_id = {}, bangumi_name = {}",
|
||||
self.subscription_id, subscribed_bangumi.display_name
|
||||
)
|
||||
})?;
|
||||
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||
let subscribed_bangumi_list = bangumi::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(subscription_bangumi::Column::SubscriptionId.eq(self.subscription_id)),
|
||||
)
|
||||
.join_rev(
|
||||
JoinType::InnerJoin,
|
||||
subscription_bangumi::Relation::Bangumi.def(),
|
||||
)
|
||||
.all(db)
|
||||
.await?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
|
||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||
let item = MikanRssEpisodeItem::try_from(item)
|
||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||
format!("failed to extract rss item at idx {idx}")
|
||||
for subscribed_bangumi in subscribed_bangumi_list {
|
||||
let rss_url = subscribed_bangumi
|
||||
.rss_link
|
||||
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||
format!(
|
||||
"rss_link is required, subscription_id = {}, bangumi_name = {}",
|
||||
self.subscription_id, subscribed_bangumi.display_name
|
||||
)
|
||||
})?;
|
||||
rss_item_list.push(item);
|
||||
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
|
||||
let mut rss_item_list = vec![];
|
||||
|
||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||
let item = MikanRssEpisodeItem::try_from(item)
|
||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||
format!("failed to extract rss item at idx {idx}")
|
||||
})?;
|
||||
rss_item_list.push(item);
|
||||
}
|
||||
|
||||
yield rss_item_list;
|
||||
}
|
||||
}
|
||||
Ok(rss_item_list)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -548,13 +563,8 @@ mod tests {
|
||||
subscriptions::{self, SubscriptionTrait},
|
||||
},
|
||||
test_utils::{
|
||||
app::TestingAppContext,
|
||||
crypto::build_testing_crypto_service,
|
||||
database::build_testing_database_service,
|
||||
mikan::{
|
||||
MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential_form,
|
||||
},
|
||||
storage::build_testing_storage_service,
|
||||
app::{TestingAppContext, TestingAppContextPreset},
|
||||
mikan::{MikanMockServer, build_testing_mikan_credential_form},
|
||||
tracing::try_init_testing_tracing,
|
||||
},
|
||||
};
|
||||
@@ -569,20 +579,11 @@ mod tests {
|
||||
|
||||
let mikan_base_url = mikan_server.base_url().clone();
|
||||
|
||||
let app_ctx = {
|
||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||
let db_service = build_testing_database_service(Default::default()).await?;
|
||||
let crypto_service = build_testing_crypto_service().await?;
|
||||
let storage_service = build_testing_storage_service().await?;
|
||||
let app_ctx = TestingAppContext::builder()
|
||||
.mikan(mikan_client)
|
||||
.db(db_service)
|
||||
.crypto(crypto_service)
|
||||
.storage(storage_service)
|
||||
.build();
|
||||
|
||||
Arc::new(app_ctx)
|
||||
};
|
||||
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
|
||||
mikan_base_url: mikan_base_url.to_string(),
|
||||
database_config: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(TestingResources {
|
||||
app_ctx,
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::{borrow::Cow, fmt, str::FromStr, sync::Arc};
|
||||
|
||||
use async_stream::try_stream;
|
||||
use bytes::Bytes;
|
||||
use chrono::DateTime;
|
||||
use chrono::{DateTime, Utc};
|
||||
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
|
||||
use fetch::{html::fetch_html, image::fetch_image};
|
||||
use futures::{Stream, TryStreamExt, pin_mut};
|
||||
@@ -17,28 +17,35 @@ use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::app_error::{RecorderError, RecorderResult},
|
||||
extract::{
|
||||
bittorrent::EpisodeEnclosureMeta,
|
||||
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
|
||||
media::extract_image_src_from_str,
|
||||
mikan::{
|
||||
MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH, MIKAN_BANGUMI_HOMEPAGE_PATH,
|
||||
MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH, MIKAN_BANGUMI_RSS_PATH,
|
||||
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_POSTER_BUCKET_KEY,
|
||||
MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
|
||||
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
|
||||
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
|
||||
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
|
||||
MIKAN_YEAR_QUERY_KEY, MikanClient,
|
||||
},
|
||||
},
|
||||
storage::{StorageContentCategory, StorageService},
|
||||
media::{
|
||||
AutoOptimizeImageFormat, EncodeAvifOptions, EncodeImageOptions, EncodeJxlOptions,
|
||||
EncodeWebpOptions,
|
||||
},
|
||||
storage::StorageContentCategory,
|
||||
task::{OptimizeImageTask, SystemTask},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanRssEpisodeItem {
|
||||
pub title: String,
|
||||
pub url: Url,
|
||||
pub content_length: Option<u64>,
|
||||
pub torrent_link: Url,
|
||||
pub content_length: Option<i64>,
|
||||
pub mime: String,
|
||||
pub pub_date: Option<i64>,
|
||||
pub pub_date: Option<DateTime<Utc>>,
|
||||
pub mikan_episode_id: String,
|
||||
pub magnet_link: Option<String>,
|
||||
}
|
||||
|
||||
impl MikanRssEpisodeItem {
|
||||
@@ -88,20 +95,49 @@ impl TryFrom<rss::Item> for MikanRssEpisodeItem {
|
||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
|
||||
})?;
|
||||
|
||||
let pub_date = item
|
||||
.extensions
|
||||
.get("torrent")
|
||||
.and_then(|t| t.get("pubDate"))
|
||||
.and_then(|e| e.first())
|
||||
.and_then(|e| e.value.as_deref());
|
||||
|
||||
Ok(MikanRssEpisodeItem {
|
||||
title,
|
||||
url: enclosure_url,
|
||||
torrent_link: enclosure_url,
|
||||
content_length: enclosure.length.parse().ok(),
|
||||
mime: mime_type,
|
||||
pub_date: item
|
||||
.pub_date
|
||||
.and_then(|s| DateTime::parse_from_rfc2822(&s).ok())
|
||||
.map(|s| s.timestamp_millis()),
|
||||
pub_date: pub_date.and_then(|s| {
|
||||
DateTime::parse_from_rfc2822(s)
|
||||
.ok()
|
||||
.map(|s| s.with_timezone(&Utc))
|
||||
.or_else(|| {
|
||||
DateTime::parse_from_rfc3339(s)
|
||||
.ok()
|
||||
.map(|s| s.with_timezone(&Utc))
|
||||
})
|
||||
.or_else(|| {
|
||||
DateTime::parse_from_rfc3339(&format!("{s}+08:00"))
|
||||
.ok()
|
||||
.map(|s| s.with_timezone(&Utc))
|
||||
})
|
||||
}),
|
||||
mikan_episode_id,
|
||||
magnet_link: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MikanRssEpisodeItem> for EpisodeEnclosureMeta {
|
||||
fn from(item: MikanRssEpisodeItem) -> Self {
|
||||
Self {
|
||||
magnet_link: item.magnet_link,
|
||||
torrent_link: Some(item.torrent_link.to_string()),
|
||||
pub_date: item.pub_date,
|
||||
content_length: item.content_length,
|
||||
}
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanSubscriberSubscriptionRssUrlMeta {
|
||||
pub mikan_subscription_token: String,
|
||||
@@ -200,6 +236,32 @@ impl MikanBangumiMeta {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct MikanFansubHash {
|
||||
pub mikan_fansub_id: String,
|
||||
}
|
||||
|
||||
impl MikanFansubHash {
|
||||
pub fn from_homepage_url(url: &Url) -> Option<Self> {
|
||||
let path = url.path();
|
||||
if path.starts_with(MIKAN_FANSUB_HOMEPAGE_PATH) {
|
||||
let mikan_fansub_id = path.replace(&format!("{MIKAN_FANSUB_HOMEPAGE_PATH}/"), "");
|
||||
Some(Self { mikan_fansub_id })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_homepage_url(self, mikan_base_url: Url) -> Url {
|
||||
let mut url = mikan_base_url;
|
||||
url.set_path(&format!(
|
||||
"{MIKAN_FANSUB_HOMEPAGE_PATH}/{}",
|
||||
self.mikan_fansub_id
|
||||
));
|
||||
url
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct MikanEpisodeMeta {
|
||||
pub homepage: Url,
|
||||
@@ -738,49 +800,92 @@ pub async fn scrape_mikan_poster_data_from_image_url(
|
||||
|
||||
#[instrument(skip_all, fields(origin_poster_src_url = origin_poster_src_url.as_str()))]
|
||||
pub async fn scrape_mikan_poster_meta_from_image_url(
|
||||
mikan_client: &MikanClient,
|
||||
storage_service: &StorageService,
|
||||
ctx: &dyn AppContextTrait,
|
||||
origin_poster_src_url: Url,
|
||||
) -> RecorderResult<MikanBangumiPosterMeta> {
|
||||
if let Some(poster_src) = storage_service
|
||||
.exists(
|
||||
storage_service.build_public_object_path(
|
||||
StorageContentCategory::Image,
|
||||
MIKAN_POSTER_BUCKET_KEY,
|
||||
&origin_poster_src_url
|
||||
.path()
|
||||
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
|
||||
),
|
||||
)
|
||||
.await?
|
||||
{
|
||||
return Ok(MikanBangumiPosterMeta {
|
||||
let storage_service = ctx.storage();
|
||||
let media_service = ctx.media();
|
||||
let mikan_client = ctx.mikan();
|
||||
let task_service = ctx.task();
|
||||
|
||||
let storage_path = storage_service.build_public_object_path(
|
||||
StorageContentCategory::Image,
|
||||
MIKAN_POSTER_BUCKET_KEY,
|
||||
&origin_poster_src_url
|
||||
.path()
|
||||
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
|
||||
);
|
||||
let meta = if let Some(poster_src) = storage_service.exists(&storage_path).await? {
|
||||
MikanBangumiPosterMeta {
|
||||
origin_poster_src: origin_poster_src_url,
|
||||
poster_src: Some(poster_src.to_string()),
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let poster_data =
|
||||
scrape_mikan_poster_data_from_image_url(mikan_client, origin_poster_src_url.clone())
|
||||
.await?;
|
||||
|
||||
let poster_data =
|
||||
scrape_mikan_poster_data_from_image_url(mikan_client, origin_poster_src_url.clone())
|
||||
let poster_str = storage_service
|
||||
.write(storage_path.clone(), poster_data)
|
||||
.await?;
|
||||
|
||||
let poster_str = storage_service
|
||||
.write(
|
||||
storage_service.build_public_object_path(
|
||||
StorageContentCategory::Image,
|
||||
MIKAN_POSTER_BUCKET_KEY,
|
||||
&origin_poster_src_url
|
||||
.path()
|
||||
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
|
||||
),
|
||||
poster_data,
|
||||
)
|
||||
.await?;
|
||||
tracing::warn!(
|
||||
poster_str = poster_str.to_string(),
|
||||
"mikan poster meta extracted"
|
||||
);
|
||||
|
||||
Ok(MikanBangumiPosterMeta {
|
||||
origin_poster_src: origin_poster_src_url,
|
||||
poster_src: Some(poster_str.to_string()),
|
||||
})
|
||||
MikanBangumiPosterMeta {
|
||||
origin_poster_src: origin_poster_src_url,
|
||||
poster_src: Some(poster_str.to_string()),
|
||||
}
|
||||
};
|
||||
|
||||
if meta.poster_src.is_some()
|
||||
&& storage_path
|
||||
.extension()
|
||||
.is_some_and(|ext| media_service.is_legacy_image_format(ext))
|
||||
{
|
||||
let auto_optimize_formats = &media_service.config.auto_optimize_formats;
|
||||
|
||||
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Webp) {
|
||||
let webp_storage_path = storage_path.with_extension("webp");
|
||||
if storage_service.exists(&webp_storage_path).await?.is_none() {
|
||||
task_service
|
||||
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
|
||||
source_path: storage_path.clone().to_string(),
|
||||
target_path: webp_storage_path.to_string(),
|
||||
format_options: EncodeImageOptions::Webp(EncodeWebpOptions::default()),
|
||||
}))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Avif) {
|
||||
let avif_storage_path = storage_path.with_extension("avif");
|
||||
if storage_service.exists(&avif_storage_path).await?.is_none() {
|
||||
task_service
|
||||
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
|
||||
source_path: storage_path.clone().to_string(),
|
||||
target_path: avif_storage_path.to_string(),
|
||||
format_options: EncodeImageOptions::Avif(EncodeAvifOptions::default()),
|
||||
}))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Jxl) {
|
||||
let jxl_storage_path = storage_path.with_extension("jxl");
|
||||
if storage_service.exists(&jxl_storage_path).await?.is_none() {
|
||||
task_service
|
||||
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
|
||||
source_path: storage_path.clone().to_string(),
|
||||
target_path: jxl_storage_path.to_string(),
|
||||
format_options: EncodeImageOptions::Jxl(EncodeJxlOptions::default()),
|
||||
}))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(meta)
|
||||
}
|
||||
|
||||
pub fn extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(
|
||||
@@ -1007,24 +1112,23 @@ pub async fn scrape_mikan_bangumi_meta_list_from_season_flow_url(
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
#![allow(unused_variables)]
|
||||
use std::{fs, sync::Arc};
|
||||
use std::{fs, io::Cursor, sync::Arc};
|
||||
|
||||
use futures::StreamExt;
|
||||
use image::{ImageFormat, ImageReader};
|
||||
use rstest::{fixture, rstest};
|
||||
use tracing::Level;
|
||||
use url::Url;
|
||||
use zune_image::{codecs::ImageFormat, image::Image};
|
||||
|
||||
use super::*;
|
||||
use crate::test_utils::{
|
||||
app::TestingAppContext,
|
||||
app::{TestingAppContext, TestingAppContextPreset},
|
||||
crypto::build_testing_crypto_service,
|
||||
database::build_testing_database_service,
|
||||
mikan::{
|
||||
MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential,
|
||||
build_testing_mikan_credential_form,
|
||||
},
|
||||
storage::build_testing_storage_service,
|
||||
tracing::try_init_testing_tracing,
|
||||
};
|
||||
|
||||
@@ -1049,12 +1153,14 @@ mod test {
|
||||
scrape_mikan_poster_data_from_image_url(&mikan_client, bangumi_poster_url).await?;
|
||||
|
||||
resources_mock.shared_resource_mock.expect(1);
|
||||
let image = Image::read(bgm_poster_data.to_vec(), Default::default());
|
||||
|
||||
let image = {
|
||||
let c = Cursor::new(bgm_poster_data);
|
||||
ImageReader::new(c)
|
||||
};
|
||||
let image_format = image.with_guessed_format().ok().and_then(|i| i.format());
|
||||
assert!(
|
||||
image.is_ok_and(|img| img
|
||||
.metadata()
|
||||
.get_image_format()
|
||||
.is_some_and(|fmt| matches!(fmt, ImageFormat::JPEG))),
|
||||
image_format.is_some_and(|fmt| matches!(fmt, ImageFormat::Jpeg)),
|
||||
"should start with valid jpeg data magic number"
|
||||
);
|
||||
|
||||
@@ -1068,37 +1174,47 @@ mod test {
|
||||
|
||||
let mikan_base_url = mikan_server.base_url().clone();
|
||||
|
||||
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
|
||||
mikan_base_url: mikan_base_url.to_string(),
|
||||
database_config: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let resources_mock = mikan_server.mock_resources_with_doppel();
|
||||
|
||||
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||
|
||||
let storage_service = build_testing_storage_service().await?;
|
||||
let storage_operator = storage_service.get_operator()?;
|
||||
|
||||
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;
|
||||
|
||||
let bgm_poster = scrape_mikan_poster_meta_from_image_url(
|
||||
&mikan_client,
|
||||
&storage_service,
|
||||
bangumi_poster_url,
|
||||
)
|
||||
.await?;
|
||||
let bgm_poster =
|
||||
scrape_mikan_poster_meta_from_image_url(app_ctx.as_ref(), bangumi_poster_url).await?;
|
||||
|
||||
resources_mock.shared_resource_mock.expect(1);
|
||||
|
||||
let storage_service = app_ctx.storage();
|
||||
|
||||
let storage_fullname = storage_service.build_public_object_path(
|
||||
StorageContentCategory::Image,
|
||||
MIKAN_POSTER_BUCKET_KEY,
|
||||
"202309/5ce9fed1.jpg",
|
||||
);
|
||||
let storage_fullename_str = storage_fullname.as_str();
|
||||
|
||||
assert!(storage_operator.exists(storage_fullename_str).await?);
|
||||
assert!(
|
||||
storage_service.exists(&storage_fullname).await?.is_some(),
|
||||
"storage_fullename_str = {}, list public = {:?}",
|
||||
&storage_fullname,
|
||||
storage_service.list_public().await?
|
||||
);
|
||||
|
||||
let expected_data =
|
||||
fs::read("tests/resources/mikan/doppel/images/Bangumi/202309/5ce9fed1.jpg")?;
|
||||
let found_data = storage_operator.read(storage_fullename_str).await?.to_vec();
|
||||
assert_eq!(expected_data, found_data);
|
||||
let bgm_poster_data = storage_service.read(&storage_fullname).await?;
|
||||
|
||||
let image = {
|
||||
let c = Cursor::new(bgm_poster_data.to_vec());
|
||||
ImageReader::new(c)
|
||||
};
|
||||
let image_format = image.with_guessed_format().ok().and_then(|i| i.format());
|
||||
assert!(
|
||||
image_format.is_some_and(|fmt| matches!(fmt, ImageFormat::Jpeg)),
|
||||
"should start with valid jpeg data magic number"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,845 +0,0 @@
|
||||
/**
|
||||
* @TODO: rewrite with nom
|
||||
*/
|
||||
use std::borrow::Cow;
|
||||
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use snafu::whatever;
|
||||
|
||||
use crate::{
|
||||
errors::RecorderResult,
|
||||
extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE},
|
||||
};
|
||||
|
||||
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
|
||||
|
||||
lazy_static! {
|
||||
static ref TITLE_RE: Regex = Regex::new(
|
||||
r#"(.*|\[.*])( -? \d+|\[\d+]|\[\d+.?[vV]\d]|第\d+[话話集]|\[第?\d+[话話集]]|\[\d+.?END]|[Ee][Pp]?\d+|\[\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*[话話集]\s*])(.*)"#
|
||||
).unwrap();
|
||||
static ref EP_COLLECTION_RE:Regex = Regex::new(r#"\[?\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*合?[话話集]\s*]?"#).unwrap();
|
||||
static ref MOVIE_TITLE_RE:Regex = Regex::new(r#"(.*|\[.*])(剧场版|[Mm]ovie|电影)(.*?)$"#).unwrap();
|
||||
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
|
||||
static ref SOURCE_L1_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|W[Ee][Bb][Rr][Ii][Pp]|Sentai|B[Dd][Rr][Ii][Pp]|UHD[Rr][Ii][Pp]|NETFLIX").unwrap();
|
||||
static ref SOURCE_L2_RE: Regex = Regex::new(r"AMZ|CR|W[Ee][Bb]|B[Dd]").unwrap();
|
||||
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap();
|
||||
static ref PREFIX_RE: Regex =
|
||||
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
|
||||
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
|
||||
static ref MOVIE_SEASON_EXTRACT_RE: Regex = Regex::new(r"剧场版|Movie|电影").unwrap();
|
||||
static ref MAIN_TITLE_PREFIX_PROCESS_RE1: Regex = Regex::new(r"新番|月?番").unwrap();
|
||||
static ref MAIN_TITLE_PREFIX_PROCESS_RE2: Regex = Regex::new(r"[港澳台]{1,3}地区").unwrap();
|
||||
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE: Regex = Regex::new(r"\[.+\]").unwrap();
|
||||
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1: Regex = Regex::new(r"^.*?\[").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_ALL_RE: Regex = Regex::new(r"S\d{1,2}|Season \d{1,2}|[第].[季期]|1st|2nd|3rd|\d{1,2}th").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_EN_PREFIX_RE: Regex = Regex::new(r"Season|S").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_EN_NTH_RE: Regex = Regex::new(r"1st|2nd|3rd|\d{1,2}th").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_RE: Regex = Regex::new(r"[第 ].*[季期(部分)]|部分").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE: Regex = Regex::new(r"[第季期 ]").unwrap();
|
||||
static ref NAME_EXTRACT_REMOVE_RE: Regex = Regex::new(r"[((]仅限[港澳台]{1,3}地区[))]").unwrap();
|
||||
static ref NAME_EXTRACT_SPLIT_RE: Regex = Regex::new(r"/|\s{2}|-\s{2}|\]\[").unwrap();
|
||||
static ref NAME_EXTRACT_REPLACE_ADHOC1_RE: Regex = Regex::new(r"([\p{scx=Han}\s\(\)]{5,})_([a-zA-Z]{2,})").unwrap();
|
||||
static ref NAME_JP_TEST: Regex = Regex::new(r"[\p{scx=Hira}\p{scx=Kana}]{2,}").unwrap();
|
||||
static ref NAME_ZH_TEST: Regex = Regex::new(r"[\p{scx=Han}]{2,}").unwrap();
|
||||
static ref NAME_EN_TEST: Regex = Regex::new(r"[a-zA-Z]{3,}").unwrap();
|
||||
static ref TAGS_EXTRACT_SPLIT_RE: Regex = Regex::new(r"[\[\]()()_]").unwrap();
|
||||
static ref CLEAR_SUB_RE: Regex = Regex::new(r"_MP4|_MKV").unwrap();
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
|
||||
pub struct RawEpisodeMeta {
|
||||
pub name_en: Option<String>,
|
||||
pub name_en_no_season: Option<String>,
|
||||
pub name_jp: Option<String>,
|
||||
pub name_jp_no_season: Option<String>,
|
||||
pub name_zh: Option<String>,
|
||||
pub name_zh_no_season: Option<String>,
|
||||
pub season: i32,
|
||||
pub season_raw: Option<String>,
|
||||
pub episode_index: i32,
|
||||
pub subtitle: Option<String>,
|
||||
pub source: Option<String>,
|
||||
pub fansub: Option<String>,
|
||||
pub resolution: Option<String>,
|
||||
}
|
||||
|
||||
fn extract_fansub(raw_name: &str) -> Option<&str> {
|
||||
let mut groups = EN_BRACKET_SPLIT_RE.splitn(raw_name, 3);
|
||||
groups.nth(1)
|
||||
}
|
||||
|
||||
fn replace_ch_bracket_to_en(raw_name: &str) -> String {
|
||||
raw_name.replace('【', "[").replace('】', "]")
|
||||
}
|
||||
|
||||
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> RecorderResult<String> {
|
||||
let raw_without_fansub = if let Some(fansub) = fansub {
|
||||
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
|
||||
fan_sub_re.replace_all(title_body, "")
|
||||
} else {
|
||||
Cow::Borrowed(title_body)
|
||||
};
|
||||
let raw_with_prefix_replaced = PREFIX_RE.replace_all(&raw_without_fansub, "/");
|
||||
let mut arg_group = raw_with_prefix_replaced
|
||||
.split('/')
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if arg_group.len() == 1 {
|
||||
arg_group = arg_group.first_mut().unwrap().split(' ').collect();
|
||||
}
|
||||
let mut raw = raw_without_fansub.to_string();
|
||||
for arg in arg_group.iter() {
|
||||
if (arg_group.len() <= 5 && MAIN_TITLE_PREFIX_PROCESS_RE1.is_match(arg))
|
||||
|| (MAIN_TITLE_PREFIX_PROCESS_RE2.is_match(arg))
|
||||
{
|
||||
let sub = Regex::new(&format!(".{arg}."))?;
|
||||
raw = sub.replace_all(&raw, "").to_string();
|
||||
}
|
||||
}
|
||||
if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw)
|
||||
&& m.len() as f32 > (raw.len() as f32) * 0.5
|
||||
{
|
||||
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1
|
||||
.replace(&raw, "")
|
||||
.chars()
|
||||
.collect_vec();
|
||||
while let Some(ch) = raw1.pop() {
|
||||
if ch == ']' {
|
||||
break;
|
||||
}
|
||||
}
|
||||
raw = raw1.into_iter().collect();
|
||||
}
|
||||
Ok(raw.to_string())
|
||||
}
|
||||
|
||||
pub fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, i32) {
|
||||
let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " ");
|
||||
let seasons = SEASON_EXTRACT_SEASON_ALL_RE
|
||||
.find(&name_and_season)
|
||||
.into_iter()
|
||||
.map(|s| s.as_str())
|
||||
.collect_vec();
|
||||
|
||||
if seasons.is_empty() {
|
||||
return (title_body.to_string(), None, 1);
|
||||
}
|
||||
|
||||
let mut season = 1;
|
||||
let mut season_raw = None;
|
||||
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
|
||||
|
||||
for s in seasons {
|
||||
season_raw = Some(s);
|
||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s)
|
||||
&& let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
|
||||
.replace_all(m.as_str(), "")
|
||||
.parse::<i32>()
|
||||
{
|
||||
season = s;
|
||||
break;
|
||||
}
|
||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s)
|
||||
&& let Some(s) = DIGIT_1PLUS_REG
|
||||
.find(m.as_str())
|
||||
.and_then(|s| s.as_str().parse::<i32>().ok())
|
||||
{
|
||||
season = s;
|
||||
break;
|
||||
}
|
||||
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
|
||||
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
|
||||
.replace(m.as_str(), "")
|
||||
.parse::<i32>()
|
||||
{
|
||||
season = s;
|
||||
break;
|
||||
}
|
||||
if let Some(m) = ZH_NUM_RE.find(m.as_str()) {
|
||||
season = ZH_NUM_MAP[m.as_str()];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(name.to_string(), season_raw.map(|s| s.to_string()), season)
|
||||
}
|
||||
|
||||
fn extract_name_from_title_body_name_section(
|
||||
title_body_name_section: &str,
|
||||
) -> (Option<String>, Option<String>, Option<String>) {
|
||||
let mut name_en = None;
|
||||
let mut name_zh = None;
|
||||
let mut name_jp = None;
|
||||
let replaced1 = NAME_EXTRACT_REMOVE_RE.replace_all(title_body_name_section, "");
|
||||
let replaced2 = NAME_EXTRACT_REPLACE_ADHOC1_RE
|
||||
.replace_all(&replaced1, NAME_EXTRACT_REPLACE_ADHOC1_REPLACED);
|
||||
let trimmed = replaced2.trim();
|
||||
let mut split = NAME_EXTRACT_SPLIT_RE
|
||||
.split(trimmed)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|s| s.to_string())
|
||||
.collect_vec();
|
||||
if split.len() == 1 {
|
||||
let mut split_space = split[0].split(' ').collect_vec();
|
||||
let mut search_indices = vec![0];
|
||||
if split_space.len() > 1 {
|
||||
search_indices.push(split_space.len() - 1);
|
||||
}
|
||||
for i in search_indices {
|
||||
if NAME_ZH_TEST.is_match(split_space[i]) {
|
||||
let chs = split_space[i];
|
||||
split_space.remove(i);
|
||||
split = vec![chs.to_string(), split_space.join(" ")];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
for item in split {
|
||||
if NAME_JP_TEST.is_match(&item) && name_jp.is_none() {
|
||||
name_jp = Some(item);
|
||||
} else if NAME_ZH_TEST.is_match(&item) && name_zh.is_none() {
|
||||
name_zh = Some(item);
|
||||
} else if NAME_EN_TEST.is_match(&item) && name_en.is_none() {
|
||||
name_en = Some(item);
|
||||
}
|
||||
}
|
||||
(name_en, name_zh, name_jp)
|
||||
}
|
||||
|
||||
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32> {
|
||||
DIGIT_1PLUS_REG
|
||||
.find(title_episode)?
|
||||
.as_str()
|
||||
.parse::<i32>()
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn clear_sub(sub: Option<String>) -> Option<String> {
|
||||
sub.map(|s| CLEAR_SUB_RE.replace_all(&s, "").to_string())
|
||||
}
|
||||
|
||||
fn extract_tags_from_title_extra(
|
||||
title_extra: &str,
|
||||
) -> (Option<String>, Option<String>, Option<String>) {
|
||||
let replaced = TAGS_EXTRACT_SPLIT_RE.replace_all(title_extra, " ");
|
||||
let elements = replaced
|
||||
.split(' ')
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect_vec();
|
||||
|
||||
let mut sub = None;
|
||||
let mut resolution = None;
|
||||
let mut source = None;
|
||||
for element in elements.iter() {
|
||||
if SUB_RE.is_match(element) {
|
||||
sub = Some(element.to_string())
|
||||
} else if RESOLUTION_RE.is_match(element) {
|
||||
resolution = Some(element.to_string())
|
||||
} else if SOURCE_L1_RE.is_match(element) {
|
||||
source = Some(element.to_string())
|
||||
}
|
||||
}
|
||||
if source.is_none() {
|
||||
for element in elements {
|
||||
if SOURCE_L2_RE.is_match(element) {
|
||||
source = Some(element.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
(clear_sub(sub), resolution, source)
|
||||
}
|
||||
|
||||
pub fn check_is_movie(title: &str) -> bool {
|
||||
MOVIE_TITLE_RE.is_match(title)
|
||||
}
|
||||
|
||||
pub fn extract_episode_meta_from_origin_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
|
||||
let raw_title = s.trim();
|
||||
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
||||
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
||||
let movie_capture = check_is_movie(&raw_title_without_ch_brackets);
|
||||
if let Some(title_re_match_obj) = MOVIE_TITLE_RE
|
||||
.captures(&raw_title_without_ch_brackets)
|
||||
.or(TITLE_RE.captures(&raw_title_without_ch_brackets))
|
||||
{
|
||||
let mut title_body = title_re_match_obj
|
||||
.get(1)
|
||||
.map(|s| s.as_str().trim())
|
||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"))
|
||||
.to_string();
|
||||
let mut title_episode = title_re_match_obj
|
||||
.get(2)
|
||||
.map(|s| s.as_str().trim())
|
||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
|
||||
let title_extra = title_re_match_obj
|
||||
.get(3)
|
||||
.map(|s| s.as_str().trim())
|
||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
|
||||
|
||||
if movie_capture {
|
||||
title_body += title_episode;
|
||||
title_episode = "";
|
||||
} else if EP_COLLECTION_RE.is_match(title_episode) {
|
||||
title_episode = "";
|
||||
}
|
||||
|
||||
let title_body = title_body_pre_process(&title_body, fansub)?;
|
||||
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
|
||||
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
|
||||
let (name_en_no_season, name_zh_no_season, name_jp_no_season) =
|
||||
extract_name_from_title_body_name_section(&name_without_season);
|
||||
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(1);
|
||||
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
|
||||
Ok(RawEpisodeMeta {
|
||||
name_en,
|
||||
name_en_no_season,
|
||||
name_jp,
|
||||
name_jp_no_season,
|
||||
name_zh,
|
||||
name_zh_no_season,
|
||||
season,
|
||||
season_raw,
|
||||
episode_index,
|
||||
subtitle: sub,
|
||||
source,
|
||||
fansub: fansub.map(|s| s.to_string()),
|
||||
resolution,
|
||||
})
|
||||
} else {
|
||||
whatever!("Can not parse episode meta from raw filename {}", raw_title)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::{RawEpisodeMeta, extract_episode_meta_from_origin_name};
|
||||
|
||||
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
|
||||
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap_or_default();
|
||||
let found = extract_episode_meta_from_origin_name(raw_name).ok();
|
||||
|
||||
if expected != found {
|
||||
println!(
|
||||
"expected {} and found {} are not equal",
|
||||
serde_json::to_string_pretty(&expected).unwrap(),
|
||||
serde_json::to_string_pretty(&found).unwrap()
|
||||
)
|
||||
}
|
||||
assert_eq!(expected, found);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_all_parts_wrapped() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
|
||||
r#"{
|
||||
"name_zh": "我心里危险的东西",
|
||||
"name_zh_no_season": "我心里危险的东西",
|
||||
"season": 2,
|
||||
"season_raw": "第二季",
|
||||
"episode_index": 5,
|
||||
"subtitle": "简日双语",
|
||||
"source": null,
|
||||
"fansub": "新Sub",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_title_wrapped_by_one_square_bracket_and_season_prefix() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
|
||||
r#"{
|
||||
"name_en": "Boku no Kokoro no Yabai Yatsu",
|
||||
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu",
|
||||
"name_zh": "我内心的糟糕念头",
|
||||
"name_zh_no_season": "我内心的糟糕念头",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 18,
|
||||
"subtitle": "简日双语",
|
||||
"source": null,
|
||||
"fansub": "喵萌奶茶屋",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_ep_and_version() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Shin no Nakama 2nd",
|
||||
"name_en_no_season": "Shin no Nakama",
|
||||
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
|
||||
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
|
||||
"season": 2,
|
||||
"season_raw": "2nd",
|
||||
"episode_index": 8,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_en_title_only() {
|
||||
test_raw_ep_parser_case(
|
||||
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
|
||||
r#"{
|
||||
"name_en": "THE MARGINAL SERVICE",
|
||||
"name_en_no_season": "THE MARGINAL SERVICE",
|
||||
"season": 1,
|
||||
"episode_index": 8,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "动漫国字幕组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_two_zh_title() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Nozomanu Fushi no Boukensha",
|
||||
"name_en_no_season": "Nozomanu Fushi no Boukensha",
|
||||
"name_zh": "事与愿违的不死冒险者",
|
||||
"name_zh_no_season": "事与愿违的不死冒险者",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_en_zh_jp_titles() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Pon no Michi",
|
||||
"name_jp": "ぽんのみち",
|
||||
"name_zh": "碰之道",
|
||||
"name_en_no_season": "Pon no Michi",
|
||||
"name_jp_no_season": "ぽんのみち",
|
||||
"name_zh_no_season": "碰之道",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 7,
|
||||
"subtitle": "简繁日内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "喵萌奶茶屋&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_nth_season() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
r#"{
|
||||
"name_en": "Yowai Character Tomozakikun",
|
||||
"name_en_no_season": "Yowai Character Tomozakikun",
|
||||
"name_zh": "弱角友崎同学 2nd STAGE",
|
||||
"name_zh_no_season": "弱角友崎同学",
|
||||
"season": 2,
|
||||
"season_raw": "2nd",
|
||||
"episode_index": 9,
|
||||
"subtitle": "CHT",
|
||||
"source": "Baha",
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_season_en_and_season_zh() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Kingdom S5",
|
||||
"name_en_no_season": "Kingdom",
|
||||
"name_zh": "王者天下 第五季",
|
||||
"name_zh_no_season": "王者天下",
|
||||
"season": 5,
|
||||
"season_raw": "第五季",
|
||||
"episode_index": 7,
|
||||
"subtitle": "简繁外挂字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "豌豆字幕组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_airota_fansub_style_case1() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
|
||||
r#"{
|
||||
"name_en": "Alice to Therese no Maboroshi Koujou",
|
||||
"name_en_no_season": "Alice to Therese no Maboroshi Koujou",
|
||||
"name_zh": "爱丽丝与特蕾丝的虚幻工厂",
|
||||
"name_zh_no_season": "爱丽丝与特蕾丝的虚幻工厂",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁内封",
|
||||
"source": "WebRip",
|
||||
"fansub": "千夏字幕组",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_airota_fansub_style_case2() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
|
||||
r#"{
|
||||
"name_en": "Yuru Camp Movie",
|
||||
"name_en_no_season": "Yuru Camp Movie",
|
||||
"name_zh": "电影 轻旅轻营 (摇曳露营)",
|
||||
"name_zh_no_season": "电影 轻旅轻营 (摇曳露营)",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "繁体",
|
||||
"source": "UHDRip",
|
||||
"fansub": "千夏字幕组&喵萌奶茶屋",
|
||||
"resolution": "2160p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_large_episode_style() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"#,
|
||||
r#"{
|
||||
"name_en": "New Doraemon",
|
||||
"name_en_no_season": "New Doraemon",
|
||||
"name_zh": "哆啦A梦新番",
|
||||
"name_zh_no_season": "哆啦A梦新番",
|
||||
"season": 1,
|
||||
"episode_index": 747,
|
||||
"subtitle": "GB",
|
||||
"fansub": "梦蓝字幕组",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_many_square_brackets_split_title() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
|
||||
r#"{
|
||||
"name_en": "Yuru Camp",
|
||||
"name_en_no_season": "Yuru Camp",
|
||||
"name_zh": "剧场版-摇曳露营",
|
||||
"name_zh_no_season": "剧场版-摇曳露营",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简日双语",
|
||||
"fansub": "MCE汉化组",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_implicit_lang_title_sep() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"#,
|
||||
r#"{
|
||||
"name_en": "NieR Automata Ver1.1a",
|
||||
"name_en_no_season": "NieR Automata Ver1.1a",
|
||||
"name_zh": "尼尔:机械纪元",
|
||||
"name_zh_no_season": "尼尔:机械纪元",
|
||||
"season": 1,
|
||||
"episode_index": 2,
|
||||
"subtitle": "简日双语",
|
||||
"fansub": "织梦字幕组",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_square_brackets_wrapped_and_space_split() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[天月搬运组][迷宫饭 Delicious in Dungeon][03][日语中字][MKV][1080P][NETFLIX][高画质版]"#,
|
||||
r#"
|
||||
{
|
||||
"name_en": "Delicious in Dungeon",
|
||||
"name_en_no_season": "Delicious in Dungeon",
|
||||
"name_zh": "迷宫饭",
|
||||
"name_zh_no_season": "迷宫饭",
|
||||
"season": 1,
|
||||
"episode_index": 3,
|
||||
"subtitle": "日语中字",
|
||||
"source": "NETFLIX",
|
||||
"fansub": "天月搬运组",
|
||||
"resolution": "1080P"
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_start_with_brackets_wrapped_season_info_prefix() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
|
||||
r#"{
|
||||
"name_en": "Dungeon Meshi",
|
||||
"name_en_no_season": "Dungeon Meshi",
|
||||
"name_zh": "迷宫饭",
|
||||
"name_zh_no_season": "迷宫饭",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简日双语",
|
||||
"fansub": "爱恋字幕社",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_small_no_title_extra_brackets_case() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
r#"{
|
||||
"name_en": "Mahou Shoujo ni Akogarete",
|
||||
"name_en_no_season": "Mahou Shoujo ni Akogarete",
|
||||
"name_zh": "梦想成为魔法少女 [年龄限制版]",
|
||||
"name_zh_no_season": "梦想成为魔法少女 [年龄限制版]",
|
||||
"season": 1,
|
||||
"episode_index": 9,
|
||||
"subtitle": "CHT",
|
||||
"source": "Baha",
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_title_leading_space_style() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[ANi] 16bit 的感动 ANOTHER LAYER - 01 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
r#"{
|
||||
"name_zh": "16bit 的感动 ANOTHER LAYER",
|
||||
"name_zh_no_season": "16bit 的感动 ANOTHER LAYER",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"subtitle": "CHT",
|
||||
"source": "Baha",
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_title_leading_month_and_wrapped_brackets_style() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【喵萌奶茶屋】★07月新番★[银砂糖师与黑妖精 ~ Sugar Apple Fairy Tale ~][13][1080p][简日双语][招募翻译]"#,
|
||||
r#"{
|
||||
"name_en": "~ Sugar Apple Fairy Tale ~",
|
||||
"name_en_no_season": "~ Sugar Apple Fairy Tale ~",
|
||||
"name_zh": "银砂糖师与黑妖精",
|
||||
"name_zh_no_season": "银砂糖师与黑妖精",
|
||||
"season": 1,
|
||||
"episode_index": 13,
|
||||
"subtitle": "简日双语",
|
||||
"fansub": "喵萌奶茶屋",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_title_leading_month_style() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【极影字幕社】★4月新番 天国大魔境 Tengoku Daimakyou 第05话 GB 720P MP4(字幕社招人内详)"#,
|
||||
r#"{
|
||||
"name_en": "Tengoku Daimakyou",
|
||||
"name_en_no_season": "Tengoku Daimakyou",
|
||||
"name_zh": "天国大魔境",
|
||||
"name_zh_no_season": "天国大魔境",
|
||||
"season": 1,
|
||||
"episode_index": 5,
|
||||
"subtitle": "字幕社招人内详",
|
||||
"source": null,
|
||||
"fansub": "极影字幕社",
|
||||
"resolution": "720P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_tokusatsu_style() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[MagicStar] 假面骑士Geats / 仮面ライダーギーツ EP33 [WEBDL] [1080p] [TTFC]【生】"#,
|
||||
r#"{
|
||||
"name_jp": "仮面ライダーギーツ",
|
||||
"name_jp_no_season": "仮面ライダーギーツ",
|
||||
"name_zh": "假面骑士Geats",
|
||||
"name_zh_no_season": "假面骑士Geats",
|
||||
"season": 1,
|
||||
"episode_index": 33,
|
||||
"source": "WEBDL",
|
||||
"fansub": "MagicStar",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_multi_lang_zh_title() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[百冬练习组&LoliHouse] BanG Dream! 少女乐团派对!☆PICO FEVER! / Garupa Pico: Fever! - 26 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕][END] [101.69 MB]"#,
|
||||
r#"{
|
||||
"name_en": "Garupa Pico: Fever!",
|
||||
"name_en_no_season": "Garupa Pico: Fever!",
|
||||
"name_zh": "BanG Dream! 少女乐团派对!☆PICO FEVER!",
|
||||
"name_zh_no_season": "BanG Dream! 少女乐团派对!☆PICO FEVER!",
|
||||
"season": 1,
|
||||
"episode_index": 26,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "百冬练习组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ep_collections() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[奶²&LoliHouse] 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简日内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Kinokoinu: Mushroom Pup",
|
||||
"name_en_no_season": "Kinokoinu: Mushroom Pup",
|
||||
"name_zh": "蘑菇狗",
|
||||
"name_zh_no_season": "蘑菇狗",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简日内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "奶²&LoliHouse",
|
||||
"resolution": "1080p",
|
||||
"name": " 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集]"
|
||||
}"#,
|
||||
);
|
||||
|
||||
test_raw_ep_parser_case(
|
||||
r#"[LoliHouse] 叹气的亡灵想隐退 / Nageki no Bourei wa Intai shitai [01-13 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
|
||||
r#"{
|
||||
"name_en": "Nageki no Bourei wa Intai shitai",
|
||||
"name_en_no_season": "Nageki no Bourei wa Intai shitai",
|
||||
"name_jp": null,
|
||||
"name_jp_no_season": null,
|
||||
"name_zh": "叹气的亡灵想隐退",
|
||||
"name_zh_no_season": "叹气的亡灵想隐退",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
);
|
||||
|
||||
test_raw_ep_parser_case(
|
||||
r#"[LoliHouse] 精灵幻想记 第二季 / Seirei Gensouki S2 [01-12 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
|
||||
r#"{
|
||||
"name_en": "Seirei Gensouki S2",
|
||||
"name_en_no_season": "Seirei Gensouki",
|
||||
"name_zh": "精灵幻想记 第二季",
|
||||
"name_zh_no_season": "精灵幻想记",
|
||||
"season": 2,
|
||||
"season_raw": "第二季",
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
);
|
||||
|
||||
test_raw_ep_parser_case(
|
||||
r#"[喵萌奶茶屋&LoliHouse] 超自然武装当哒当 / 胆大党 / Dandadan [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简繁日内封字幕][Fin]"#,
|
||||
r#" {
|
||||
"name_en": "Dandadan",
|
||||
"name_en_no_season": "Dandadan",
|
||||
"name_zh": "超自然武装当哒当",
|
||||
"name_zh_no_season": "超自然武装当哒当",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁日内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "喵萌奶茶屋&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: FIXME
|
||||
#[test]
|
||||
fn test_bad_cases() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
|
||||
r#"{
|
||||
"name_zh": "摇曳露营△剧场版",
|
||||
"name_zh_no_season": "摇曳露营△剧场版",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁字幕",
|
||||
"source": "BDrip",
|
||||
"fansub": "7³ACG x 桜都字幕组",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
);
|
||||
|
||||
test_raw_ep_parser_case(
|
||||
r#"【幻樱字幕组】【4月新番】【古见同学有交流障碍症 第二季 Komi-san wa, Komyushou Desu. S02】【22】【GB_MP4】【1920X1080】"#,
|
||||
r#"{
|
||||
"name_en": "第二季 Komi-san wa, Komyushou Desu. S02",
|
||||
"name_en_no_season": "Komi-san wa, Komyushou Desu.",
|
||||
"name_zh": "古见同学有交流障碍症",
|
||||
"name_zh_no_season": "古见同学有交流障碍症",
|
||||
"season": 2,
|
||||
"season_raw": "第二季",
|
||||
"episode_index": 22,
|
||||
"subtitle": "GB",
|
||||
"fansub": "幻樱字幕组",
|
||||
"resolution": "1920X1080"
|
||||
}"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
14
apps/recorder/src/graphql/domains/bangumi.rs
Normal file
14
apps/recorder/src/graphql/domains/bangumi.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||
|
||||
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::bangumi};
|
||||
|
||||
pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
|
||||
restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId);
|
||||
}
|
||||
|
||||
pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||
builder.register_enumeration::<bangumi::BangumiType>();
|
||||
seaography::register_entity!(builder, bangumi);
|
||||
|
||||
builder
|
||||
}
|
||||
@@ -3,12 +3,22 @@ use std::sync::Arc;
|
||||
use async_graphql::dynamic::{
|
||||
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef,
|
||||
};
|
||||
use seaography::Builder as SeaographyBuilder;
|
||||
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util_derive::DynamicGraphql;
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait, auth::AuthUserInfo, errors::RecorderError, models::credential_3rd,
|
||||
app::AppContextTrait,
|
||||
auth::AuthUserInfo,
|
||||
errors::RecorderError,
|
||||
graphql::{
|
||||
domains::subscribers::restrict_subscriber_for_entity,
|
||||
infra::crypto::{
|
||||
register_crypto_column_input_conversion_to_schema_context,
|
||||
register_crypto_column_output_conversion_to_schema_context,
|
||||
},
|
||||
},
|
||||
models::credential_3rd,
|
||||
};
|
||||
|
||||
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
|
||||
@@ -63,9 +73,52 @@ impl Credential3rdCheckAvailableInfo {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register_credential3rd_to_schema_context(
|
||||
context: &mut BuilderContext,
|
||||
ctx: Arc<dyn AppContextTrait>,
|
||||
) {
|
||||
restrict_subscriber_for_entity::<credential_3rd::Entity>(
|
||||
context,
|
||||
&credential_3rd::Column::SubscriberId,
|
||||
);
|
||||
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx.clone(),
|
||||
&credential_3rd::Column::Cookies,
|
||||
);
|
||||
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx.clone(),
|
||||
&credential_3rd::Column::Username,
|
||||
);
|
||||
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx.clone(),
|
||||
&credential_3rd::Column::Password,
|
||||
);
|
||||
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx.clone(),
|
||||
&credential_3rd::Column::Cookies,
|
||||
);
|
||||
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx.clone(),
|
||||
&credential_3rd::Column::Username,
|
||||
);
|
||||
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx,
|
||||
&credential_3rd::Column::Password,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn register_credential3rd_to_schema_builder(
|
||||
mut builder: SeaographyBuilder,
|
||||
) -> SeaographyBuilder {
|
||||
builder.register_enumeration::<credential_3rd::Credential3rdType>();
|
||||
seaography::register_entity!(builder, credential_3rd);
|
||||
|
||||
builder.schema = builder
|
||||
.schema
|
||||
.register(Credential3rdCheckAvailableInput::generate_input_object());
|
||||
|
||||
17
apps/recorder/src/graphql/domains/downloaders.rs
Normal file
17
apps/recorder/src/graphql/domains/downloaders.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||
|
||||
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloaders};
|
||||
|
||||
pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
|
||||
restrict_subscriber_for_entity::<downloaders::Entity>(
|
||||
context,
|
||||
&downloaders::Column::SubscriberId,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||
builder.register_enumeration::<downloaders::DownloaderCategory>();
|
||||
seaography::register_entity!(builder, downloaders);
|
||||
|
||||
builder
|
||||
}
|
||||
15
apps/recorder/src/graphql/domains/downloads.rs
Normal file
15
apps/recorder/src/graphql/domains/downloads.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||
|
||||
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloads};
|
||||
|
||||
pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
|
||||
restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId);
|
||||
}
|
||||
|
||||
pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||
builder.register_enumeration::<downloads::DownloadStatus>();
|
||||
builder.register_enumeration::<downloads::DownloadMime>();
|
||||
seaography::register_entity!(builder, downloads);
|
||||
|
||||
builder
|
||||
}
|
||||
14
apps/recorder/src/graphql/domains/episodes.rs
Normal file
14
apps/recorder/src/graphql/domains/episodes.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||
|
||||
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::episodes};
|
||||
|
||||
pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
|
||||
restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId);
|
||||
}
|
||||
|
||||
pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||
builder.register_enumeration::<episodes::EpisodeType>();
|
||||
seaography::register_entity!(builder, episodes);
|
||||
|
||||
builder
|
||||
}
|
||||
56
apps/recorder/src/graphql/domains/feeds.rs
Normal file
56
apps/recorder/src/graphql/domains/feeds.rs
Normal file
@@ -0,0 +1,56 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_graphql::dynamic::ResolverContext;
|
||||
use sea_orm::Value as SeaValue;
|
||||
use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult};
|
||||
|
||||
use crate::{
|
||||
graphql::{
|
||||
domains::subscribers::restrict_subscriber_for_entity,
|
||||
infra::util::{get_entity_column_key, get_entity_key},
|
||||
},
|
||||
models::feeds,
|
||||
};
|
||||
|
||||
pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
|
||||
restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId);
|
||||
{
|
||||
let entity_column_key =
|
||||
get_entity_column_key::<feeds::Entity>(context, &feeds::Column::Token);
|
||||
let entity_key = get_entity_key::<feeds::Entity>(context);
|
||||
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
||||
let entity_create_one_mutation_field_name = Arc::new(format!(
|
||||
"{}{}",
|
||||
entity_name, context.entity_create_one_mutation.mutation_suffix
|
||||
));
|
||||
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
||||
"{}{}",
|
||||
entity_name,
|
||||
context.entity_create_batch_mutation.mutation_suffix.clone()
|
||||
));
|
||||
|
||||
context.types.input_none_conversions.insert(
|
||||
entity_column_key,
|
||||
Box::new(
|
||||
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
|
||||
let field_name = context.field().name();
|
||||
if field_name == entity_create_one_mutation_field_name.as_str()
|
||||
|| field_name == entity_create_batch_mutation_field_name.as_str()
|
||||
{
|
||||
Ok(Some(SeaValue::String(Some(Box::new(nanoid::nanoid!())))))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||
builder.register_enumeration::<feeds::FeedType>();
|
||||
builder.register_enumeration::<feeds::FeedSource>();
|
||||
seaography::register_entity!(builder, feeds);
|
||||
|
||||
builder
|
||||
}
|
||||
@@ -1,5 +1,12 @@
|
||||
pub mod credential_3rd;
|
||||
pub mod crypto;
|
||||
|
||||
pub mod bangumi;
|
||||
pub mod downloaders;
|
||||
pub mod downloads;
|
||||
pub mod episodes;
|
||||
pub mod feeds;
|
||||
pub mod subscriber_tasks;
|
||||
pub mod subscribers;
|
||||
pub mod subscription_bangumi;
|
||||
pub mod subscription_episode;
|
||||
pub mod subscriptions;
|
||||
|
||||
@@ -267,7 +267,6 @@ where
|
||||
Box::new(
|
||||
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
|
||||
let field_name = context.field().name();
|
||||
tracing::warn!("field_name: {:?}", field_name);
|
||||
if field_name == entity_create_one_mutation_field_name.as_str()
|
||||
|| field_name == entity_create_batch_mutation_field_name.as_str()
|
||||
{
|
||||
@@ -320,6 +319,7 @@ where
|
||||
}
|
||||
|
||||
pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
|
||||
restrict_subscriber_for_entity::<subscribers::Entity>(context, &subscribers::Column::Id);
|
||||
for column in subscribers::Column::iter() {
|
||||
if !matches!(column, subscribers::Column::Id) {
|
||||
let key = get_entity_column_key::<subscribers::Entity>(context, &column);
|
||||
|
||||
20
apps/recorder/src/graphql/domains/subscription_bangumi.rs
Normal file
20
apps/recorder/src/graphql/domains/subscription_bangumi.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||
|
||||
use crate::{
|
||||
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_bangumi,
|
||||
};
|
||||
|
||||
pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) {
|
||||
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
|
||||
context,
|
||||
&subscription_bangumi::Column::SubscriberId,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn register_subscription_bangumi_to_schema_builder(
|
||||
mut builder: SeaographyBuilder,
|
||||
) -> SeaographyBuilder {
|
||||
seaography::register_entity!(builder, subscription_bangumi);
|
||||
|
||||
builder
|
||||
}
|
||||
20
apps/recorder/src/graphql/domains/subscription_episode.rs
Normal file
20
apps/recorder/src/graphql/domains/subscription_episode.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||
|
||||
use crate::{
|
||||
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_episode,
|
||||
};
|
||||
|
||||
pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) {
|
||||
restrict_subscriber_for_entity::<subscription_episode::Entity>(
|
||||
context,
|
||||
&subscription_episode::Column::SubscriberId,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn register_subscription_episode_to_schema_builder(
|
||||
mut builder: SeaographyBuilder,
|
||||
) -> SeaographyBuilder {
|
||||
seaography::register_entity!(builder, subscription_episode);
|
||||
|
||||
builder
|
||||
}
|
||||
@@ -3,13 +3,16 @@ use std::sync::Arc;
|
||||
use async_graphql::dynamic::{FieldValue, TypeRef};
|
||||
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
|
||||
use seaography::{
|
||||
Builder as SeaographyBuilder, EntityObjectBuilder, EntityQueryFieldBuilder,
|
||||
Builder as SeaographyBuilder, BuilderContext, EntityObjectBuilder, EntityQueryFieldBuilder,
|
||||
get_filter_conditions,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
errors::RecorderError,
|
||||
graphql::infra::custom::generate_entity_filter_mutation_field,
|
||||
graphql::{
|
||||
domains::subscribers::restrict_subscriber_for_entity,
|
||||
infra::custom::generate_entity_filter_mutation_field,
|
||||
},
|
||||
models::{
|
||||
subscriber_tasks,
|
||||
subscriptions::{self, SubscriptionTrait},
|
||||
@@ -17,9 +20,19 @@ use crate::{
|
||||
task::SubscriberTask,
|
||||
};
|
||||
|
||||
pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) {
|
||||
restrict_subscriber_for_entity::<subscriptions::Entity>(
|
||||
context,
|
||||
&subscriptions::Column::SubscriberId,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn register_subscriptions_to_schema_builder(
|
||||
mut builder: SeaographyBuilder,
|
||||
) -> SeaographyBuilder {
|
||||
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
|
||||
seaography::register_entity!(builder, subscriptions);
|
||||
|
||||
let context = builder.context;
|
||||
|
||||
let entity_object_builder = EntityObjectBuilder { context };
|
||||
|
||||
@@ -7,10 +7,9 @@ use seaography::{BuilderContext, SeaResult};
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
graphql::infra::util::{get_column_key, get_entity_key},
|
||||
models::credential_3rd,
|
||||
};
|
||||
|
||||
fn register_crypto_column_input_conversion_to_schema_context<T>(
|
||||
pub fn register_crypto_column_input_conversion_to_schema_context<T>(
|
||||
context: &mut BuilderContext,
|
||||
ctx: Arc<dyn AppContextTrait>,
|
||||
column: &T::Column,
|
||||
@@ -37,7 +36,7 @@ fn register_crypto_column_input_conversion_to_schema_context<T>(
|
||||
);
|
||||
}
|
||||
|
||||
fn register_crypto_column_output_conversion_to_schema_context<T>(
|
||||
pub fn register_crypto_column_output_conversion_to_schema_context<T>(
|
||||
context: &mut BuilderContext,
|
||||
ctx: Arc<dyn AppContextTrait>,
|
||||
column: &T::Column,
|
||||
@@ -68,39 +67,3 @@ fn register_crypto_column_output_conversion_to_schema_context<T>(
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn register_crypto_to_schema_context(
|
||||
context: &mut BuilderContext,
|
||||
ctx: Arc<dyn AppContextTrait>,
|
||||
) {
|
||||
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx.clone(),
|
||||
&credential_3rd::Column::Cookies,
|
||||
);
|
||||
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx.clone(),
|
||||
&credential_3rd::Column::Username,
|
||||
);
|
||||
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx.clone(),
|
||||
&credential_3rd::Column::Password,
|
||||
);
|
||||
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx.clone(),
|
||||
&credential_3rd::Column::Cookies,
|
||||
);
|
||||
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx.clone(),
|
||||
&credential_3rd::Column::Username,
|
||||
);
|
||||
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
|
||||
context,
|
||||
ctx,
|
||||
&credential_3rd::Column::Password,
|
||||
);
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod crypto;
|
||||
pub mod custom;
|
||||
pub mod json;
|
||||
pub mod util;
|
||||
|
||||
@@ -8,17 +8,36 @@ use crate::{
|
||||
app::AppContextTrait,
|
||||
graphql::{
|
||||
domains::{
|
||||
credential_3rd::register_credential3rd_to_schema_builder,
|
||||
crypto::register_crypto_to_schema_context,
|
||||
bangumi::{register_bangumi_to_schema_builder, register_bangumi_to_schema_context},
|
||||
credential_3rd::{
|
||||
register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context,
|
||||
},
|
||||
downloaders::{
|
||||
register_downloaders_to_schema_builder, register_downloaders_to_schema_context,
|
||||
},
|
||||
downloads::{
|
||||
register_downloads_to_schema_builder, register_downloads_to_schema_context,
|
||||
},
|
||||
episodes::{register_episodes_to_schema_builder, register_episodes_to_schema_context},
|
||||
feeds::{register_feeds_to_schema_builder, register_feeds_to_schema_context},
|
||||
subscriber_tasks::{
|
||||
register_subscriber_tasks_to_schema_builder,
|
||||
register_subscriber_tasks_to_schema_context,
|
||||
},
|
||||
subscribers::{
|
||||
register_subscribers_to_schema_builder, register_subscribers_to_schema_context,
|
||||
restrict_subscriber_for_entity,
|
||||
},
|
||||
subscriptions::register_subscriptions_to_schema_builder,
|
||||
subscription_bangumi::{
|
||||
register_subscription_bangumi_to_schema_builder,
|
||||
register_subscription_bangumi_to_schema_context,
|
||||
},
|
||||
subscription_episode::{
|
||||
register_subscription_episode_to_schema_builder,
|
||||
register_subscription_episode_to_schema_context,
|
||||
},
|
||||
subscriptions::{
|
||||
register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context,
|
||||
},
|
||||
},
|
||||
infra::json::register_jsonb_input_filter_to_schema_builder,
|
||||
},
|
||||
@@ -31,7 +50,6 @@ pub fn build_schema(
|
||||
depth: Option<usize>,
|
||||
complexity: Option<usize>,
|
||||
) -> Result<Schema, SchemaError> {
|
||||
use crate::models::*;
|
||||
let database = app_ctx.db().as_ref().clone();
|
||||
|
||||
let context = CONTEXT.get_or_init(|| {
|
||||
@@ -39,45 +57,17 @@ pub fn build_schema(
|
||||
|
||||
{
|
||||
// domains
|
||||
register_feeds_to_schema_context(&mut context);
|
||||
register_subscribers_to_schema_context(&mut context);
|
||||
|
||||
{
|
||||
restrict_subscriber_for_entity::<downloaders::Entity>(
|
||||
&mut context,
|
||||
&downloaders::Column::SubscriberId,
|
||||
);
|
||||
restrict_subscriber_for_entity::<downloads::Entity>(
|
||||
&mut context,
|
||||
&downloads::Column::SubscriberId,
|
||||
);
|
||||
restrict_subscriber_for_entity::<episodes::Entity>(
|
||||
&mut context,
|
||||
&episodes::Column::SubscriberId,
|
||||
);
|
||||
restrict_subscriber_for_entity::<subscriptions::Entity>(
|
||||
&mut context,
|
||||
&subscriptions::Column::SubscriberId,
|
||||
);
|
||||
restrict_subscriber_for_entity::<subscribers::Entity>(
|
||||
&mut context,
|
||||
&subscribers::Column::Id,
|
||||
);
|
||||
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
|
||||
&mut context,
|
||||
&subscription_bangumi::Column::SubscriberId,
|
||||
);
|
||||
restrict_subscriber_for_entity::<subscription_episode::Entity>(
|
||||
&mut context,
|
||||
&subscription_episode::Column::SubscriberId,
|
||||
);
|
||||
restrict_subscriber_for_entity::<credential_3rd::Entity>(
|
||||
&mut context,
|
||||
&credential_3rd::Column::SubscriberId,
|
||||
);
|
||||
}
|
||||
|
||||
register_crypto_to_schema_context(&mut context, app_ctx.clone());
|
||||
register_subscriptions_to_schema_context(&mut context);
|
||||
register_subscriber_tasks_to_schema_context(&mut context);
|
||||
register_credential3rd_to_schema_context(&mut context, app_ctx.clone());
|
||||
register_downloaders_to_schema_context(&mut context);
|
||||
register_downloads_to_schema_context(&mut context);
|
||||
register_episodes_to_schema_context(&mut context);
|
||||
register_subscription_bangumi_to_schema_context(&mut context);
|
||||
register_subscription_episode_to_schema_context(&mut context);
|
||||
register_bangumi_to_schema_context(&mut context);
|
||||
}
|
||||
context
|
||||
});
|
||||
@@ -91,32 +81,16 @@ pub fn build_schema(
|
||||
{
|
||||
// domains
|
||||
builder = register_subscribers_to_schema_builder(builder);
|
||||
|
||||
seaography::register_entities!(
|
||||
builder,
|
||||
[
|
||||
bangumi,
|
||||
downloaders,
|
||||
downloads,
|
||||
episodes,
|
||||
subscription_bangumi,
|
||||
subscription_episode,
|
||||
subscriptions,
|
||||
credential_3rd
|
||||
]
|
||||
);
|
||||
|
||||
{
|
||||
builder.register_enumeration::<downloads::DownloadStatus>();
|
||||
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
|
||||
builder.register_enumeration::<downloaders::DownloaderCategory>();
|
||||
builder.register_enumeration::<downloads::DownloadMime>();
|
||||
builder.register_enumeration::<credential_3rd::Credential3rdType>();
|
||||
}
|
||||
|
||||
builder = register_feeds_to_schema_builder(builder);
|
||||
builder = register_episodes_to_schema_builder(builder);
|
||||
builder = register_subscription_bangumi_to_schema_builder(builder);
|
||||
builder = register_subscription_episode_to_schema_builder(builder);
|
||||
builder = register_downloaders_to_schema_builder(builder);
|
||||
builder = register_downloads_to_schema_builder(builder);
|
||||
builder = register_subscriptions_to_schema_builder(builder);
|
||||
builder = register_credential3rd_to_schema_builder(builder);
|
||||
builder = register_subscriber_tasks_to_schema_builder(builder);
|
||||
builder = register_bangumi_to_schema_builder(builder);
|
||||
}
|
||||
|
||||
let schema = builder.schema_builder();
|
||||
|
||||
@@ -21,6 +21,7 @@ pub mod errors;
|
||||
pub mod extract;
|
||||
pub mod graphql;
|
||||
pub mod logger;
|
||||
pub mod media;
|
||||
pub mod message;
|
||||
pub mod migrations;
|
||||
pub mod models;
|
||||
|
||||
105
apps/recorder/src/media/config.rs
Normal file
105
apps/recorder/src/media/config.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub enum AutoOptimizeImageFormat {
|
||||
#[serde(rename = "image/webp")]
|
||||
Webp,
|
||||
#[serde(rename = "image/avif")]
|
||||
Avif,
|
||||
#[serde(rename = "image/jxl")]
|
||||
Jxl,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
|
||||
pub struct EncodeWebpOptions {
|
||||
pub quality: Option<f32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
|
||||
pub struct EncodeAvifOptions {
|
||||
pub quality: Option<u8>,
|
||||
pub speed: Option<u8>,
|
||||
pub threads: Option<u8>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
|
||||
pub struct EncodeJxlOptions {
|
||||
pub quality: Option<f32>,
|
||||
pub speed: Option<u8>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "mime_type")]
|
||||
pub enum EncodeImageOptions {
|
||||
#[serde(rename = "image/webp")]
|
||||
Webp(EncodeWebpOptions),
|
||||
#[serde(rename = "image/avif")]
|
||||
Avif(EncodeAvifOptions),
|
||||
#[serde(rename = "image/jxl")]
|
||||
Jxl(EncodeJxlOptions),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct MediaConfig {
|
||||
#[serde(default = "default_webp_quality")]
|
||||
pub webp_quality: f32,
|
||||
#[serde(default = "default_avif_quality")]
|
||||
pub avif_quality: u8,
|
||||
#[serde(default = "default_avif_speed")]
|
||||
pub avif_speed: u8,
|
||||
#[serde(default = "default_avif_threads")]
|
||||
pub avif_threads: u8,
|
||||
#[serde(default = "default_jxl_quality")]
|
||||
pub jxl_quality: f32,
|
||||
#[serde(default = "default_jxl_speed")]
|
||||
pub jxl_speed: u8,
|
||||
#[serde(default = "default_auto_optimize_formats")]
|
||||
pub auto_optimize_formats: Vec<AutoOptimizeImageFormat>,
|
||||
}
|
||||
|
||||
impl Default for MediaConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
webp_quality: default_webp_quality(),
|
||||
avif_quality: default_avif_quality(),
|
||||
avif_speed: default_avif_speed(),
|
||||
avif_threads: default_avif_threads(),
|
||||
jxl_quality: default_jxl_quality(),
|
||||
jxl_speed: default_jxl_speed(),
|
||||
auto_optimize_formats: default_auto_optimize_formats(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_webp_quality() -> f32 {
|
||||
80.0
|
||||
}
|
||||
|
||||
fn default_avif_quality() -> u8 {
|
||||
80
|
||||
}
|
||||
|
||||
fn default_avif_speed() -> u8 {
|
||||
6
|
||||
}
|
||||
|
||||
fn default_avif_threads() -> u8 {
|
||||
1
|
||||
}
|
||||
|
||||
fn default_jxl_quality() -> f32 {
|
||||
80.0
|
||||
}
|
||||
|
||||
fn default_jxl_speed() -> u8 {
|
||||
7
|
||||
}
|
||||
|
||||
fn default_auto_optimize_formats() -> Vec<AutoOptimizeImageFormat> {
|
||||
vec![
|
||||
AutoOptimizeImageFormat::Webp,
|
||||
// AutoOptimizeImageFormat::Avif, // TOO SLOW */
|
||||
#[cfg(feature = "jxl")]
|
||||
AutoOptimizeImageFormat::Jxl,
|
||||
]
|
||||
}
|
||||
8
apps/recorder/src/media/mod.rs
Normal file
8
apps/recorder/src/media/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
mod config;
|
||||
mod service;
|
||||
|
||||
pub use config::{
|
||||
AutoOptimizeImageFormat, EncodeAvifOptions, EncodeImageOptions, EncodeJxlOptions,
|
||||
EncodeWebpOptions, MediaConfig,
|
||||
};
|
||||
pub use service::MediaService;
|
||||
199
apps/recorder/src/media/service.rs
Normal file
199
apps/recorder/src/media/service.rs
Normal file
@@ -0,0 +1,199 @@
|
||||
use std::io::Cursor;
|
||||
|
||||
use bytes::Bytes;
|
||||
use image::{GenericImageView, ImageEncoder, ImageReader, codecs::avif::AvifEncoder};
|
||||
use quirks_path::Path;
|
||||
use snafu::ResultExt;
|
||||
|
||||
use crate::{
|
||||
errors::{RecorderError, RecorderResult},
|
||||
media::{EncodeAvifOptions, EncodeJxlOptions, EncodeWebpOptions, MediaConfig},
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MediaService {
|
||||
pub config: MediaConfig,
|
||||
}
|
||||
|
||||
impl MediaService {
|
||||
pub async fn from_config(config: MediaConfig) -> RecorderResult<Self> {
|
||||
Ok(Self { config })
|
||||
}
|
||||
|
||||
pub fn is_legacy_image_format(&self, ext: &str) -> bool {
|
||||
matches!(ext, "jpeg" | "jpg" | "png")
|
||||
}
|
||||
|
||||
pub async fn optimize_image_to_webp(
|
||||
&self,
|
||||
path: impl AsRef<Path>,
|
||||
data: impl Into<Bytes>,
|
||||
options: Option<EncodeWebpOptions>,
|
||||
) -> RecorderResult<Bytes> {
|
||||
let quality = options
|
||||
.and_then(|o| o.quality)
|
||||
.unwrap_or(self.config.webp_quality);
|
||||
|
||||
let data = data.into();
|
||||
|
||||
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
|
||||
let cursor = Cursor::new(data);
|
||||
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
|
||||
|
||||
let img = image_reader.decode()?;
|
||||
|
||||
let (width, height) = (img.width(), img.height());
|
||||
|
||||
let color = img.color();
|
||||
|
||||
let webp_data = if color.has_alpha() {
|
||||
let rgba_image = img.into_rgba8();
|
||||
|
||||
let encoder = webp::Encoder::from_rgba(&rgba_image, width, height);
|
||||
|
||||
encoder.encode(quality)
|
||||
} else {
|
||||
let rgba_image = img.into_rgb8();
|
||||
|
||||
let encoder = webp::Encoder::from_rgb(&rgba_image, width, height);
|
||||
|
||||
encoder.encode(quality)
|
||||
};
|
||||
|
||||
Ok(Bytes::from(webp_data.to_vec()))
|
||||
})
|
||||
.await
|
||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||
format!(
|
||||
"failed to spawn blocking task to optimize legacy image to webp: {}",
|
||||
path.as_ref().display()
|
||||
)
|
||||
})?
|
||||
}
|
||||
|
||||
pub async fn optimize_image_to_avif(
|
||||
&self,
|
||||
path: impl AsRef<Path>,
|
||||
data: Bytes,
|
||||
options: Option<EncodeAvifOptions>,
|
||||
) -> RecorderResult<Bytes> {
|
||||
let quality = options
|
||||
.as_ref()
|
||||
.and_then(|o| o.quality)
|
||||
.unwrap_or(self.config.avif_quality);
|
||||
let speed = options
|
||||
.as_ref()
|
||||
.and_then(|o| o.speed)
|
||||
.unwrap_or(self.config.avif_speed);
|
||||
let threads = options
|
||||
.as_ref()
|
||||
.and_then(|o| o.threads)
|
||||
.unwrap_or(self.config.avif_threads);
|
||||
|
||||
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
|
||||
let mut buf = vec![];
|
||||
|
||||
{
|
||||
let cursor = Cursor::new(data);
|
||||
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
|
||||
|
||||
let img = image_reader.decode()?;
|
||||
|
||||
let (width, height) = img.dimensions();
|
||||
let color_type = img.color();
|
||||
let encoder = AvifEncoder::new_with_speed_quality(&mut buf, speed, quality)
|
||||
.with_num_threads(Some(threads as usize));
|
||||
|
||||
encoder.write_image(img.as_bytes(), width, height, color_type.into())?;
|
||||
}
|
||||
|
||||
Ok(Bytes::from(buf))
|
||||
})
|
||||
.await
|
||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||
format!(
|
||||
"failed to spawn blocking task to optimize legacy image to avif: {}",
|
||||
path.as_ref().display()
|
||||
)
|
||||
})?
|
||||
}
|
||||
|
||||
#[cfg(feature = "jxl")]
|
||||
pub async fn optimize_image_to_jxl(
|
||||
&self,
|
||||
path: impl AsRef<Path>,
|
||||
data: Bytes,
|
||||
options: Option<EncodeJxlOptions>,
|
||||
) -> RecorderResult<Bytes> {
|
||||
let quality = options
|
||||
.as_ref()
|
||||
.and_then(|o| o.quality)
|
||||
.unwrap_or(self.config.jxl_quality);
|
||||
let speed = options
|
||||
.as_ref()
|
||||
.and_then(|o| o.speed)
|
||||
.unwrap_or(self.config.jxl_speed);
|
||||
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
|
||||
use jpegxl_rs::encode::{ColorEncoding, EncoderResult, EncoderSpeed};
|
||||
let cursor = Cursor::new(data);
|
||||
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
|
||||
|
||||
let image = image_reader.decode()?;
|
||||
let (width, height) = image.dimensions();
|
||||
|
||||
let color = image.color();
|
||||
let has_alpha = color.has_alpha();
|
||||
let libjxl_speed = {
|
||||
match speed {
|
||||
0 | 1 => EncoderSpeed::Lightning,
|
||||
2 => EncoderSpeed::Thunder,
|
||||
3 => EncoderSpeed::Falcon,
|
||||
4 => EncoderSpeed::Cheetah,
|
||||
5 => EncoderSpeed::Hare,
|
||||
6 => EncoderSpeed::Wombat,
|
||||
7 => EncoderSpeed::Squirrel,
|
||||
8 => EncoderSpeed::Kitten,
|
||||
_ => EncoderSpeed::Tortoise,
|
||||
}
|
||||
};
|
||||
|
||||
let mut encoder_builder = jpegxl_rs::encoder_builder()
|
||||
.lossless(false)
|
||||
.has_alpha(has_alpha)
|
||||
.color_encoding(ColorEncoding::Srgb)
|
||||
.speed(libjxl_speed)
|
||||
.jpeg_quality(quality)
|
||||
.build()?;
|
||||
|
||||
let buffer: EncoderResult<u8> = if color.has_alpha() {
|
||||
let sample = image.into_rgba8();
|
||||
encoder_builder.encode(&sample, width, height)?
|
||||
} else {
|
||||
let sample = image.into_rgb8();
|
||||
encoder_builder.encode(&sample, width, height)?
|
||||
};
|
||||
|
||||
Ok(Bytes::from(buffer.data))
|
||||
})
|
||||
.await
|
||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||
format!(
|
||||
"failed to spawn blocking task to optimize legacy image to avif: {}",
|
||||
path.as_ref().display()
|
||||
)
|
||||
})?
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "jxl"))]
|
||||
pub async fn optimize_image_to_jxl(
|
||||
&self,
|
||||
_path: impl AsRef<Path>,
|
||||
_data: Bytes,
|
||||
_options: Option<EncodeJxlOptions>,
|
||||
) -> RecorderResult<Bytes> {
|
||||
Err(RecorderError::Whatever {
|
||||
message: "jxl feature is not enabled".to_string(),
|
||||
source: None.into(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -52,8 +52,12 @@ pub enum Bangumi {
|
||||
RssLink,
|
||||
PosterLink,
|
||||
OriginPosterLink,
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
SavePath,
|
||||
Homepage,
|
||||
BangumiType,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
@@ -75,6 +79,9 @@ pub enum Episodes {
|
||||
BangumiId,
|
||||
SubscriberId,
|
||||
DownloadId,
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
SavePath,
|
||||
Resolution,
|
||||
Season,
|
||||
@@ -86,7 +93,11 @@ pub enum Episodes {
|
||||
Homepage,
|
||||
Subtitle,
|
||||
Source,
|
||||
Extra,
|
||||
EpisodeType,
|
||||
EnclosureTorrentLink,
|
||||
EnclosureMagnetLink,
|
||||
EnclosurePubDate,
|
||||
EnclosureContentLength,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
@@ -149,6 +160,17 @@ pub enum Credential3rd {
|
||||
UserAgent,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub enum Feeds {
|
||||
Table,
|
||||
Id,
|
||||
Token,
|
||||
FeedType,
|
||||
FeedSource,
|
||||
SubscriberId,
|
||||
SubscriptionId,
|
||||
}
|
||||
|
||||
macro_rules! create_postgres_enum_for_active_enum {
|
||||
($manager: expr, $active_enum: expr, $($enum_value:expr),+) => {
|
||||
{
|
||||
|
||||
@@ -95,8 +95,8 @@ impl MigrationTrait for Migration {
|
||||
DownloadMimeEnum,
|
||||
DownloadMime::iden_values(),
|
||||
))
|
||||
.col(big_unsigned(Downloads::AllSize))
|
||||
.col(big_unsigned(Downloads::CurrSize))
|
||||
.col(big_integer(Downloads::AllSize))
|
||||
.col(big_integer(Downloads::CurrSize))
|
||||
.col(text(Downloads::Url))
|
||||
.col(text_null(Downloads::Homepage))
|
||||
.col(text_null(Downloads::SavePath))
|
||||
|
||||
95
apps/recorder/src/migrations/m20250622_015618_feeds.rs
Normal file
95
apps/recorder/src/migrations/m20250622_015618_feeds.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
use async_trait::async_trait;
|
||||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use crate::{
|
||||
migrations::defs::{
|
||||
CustomSchemaManagerExt, Feeds, GeneralIds, Subscribers, Subscriptions, table_auto_z,
|
||||
},
|
||||
models::feeds::{FeedSource, FeedSourceEnum, FeedType, FeedTypeEnum},
|
||||
};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
create_postgres_enum_for_active_enum!(manager, FeedTypeEnum, FeedType::Rss).await?;
|
||||
create_postgres_enum_for_active_enum!(
|
||||
manager,
|
||||
FeedSourceEnum,
|
||||
FeedSource::SubscriptionEpisode
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
table_auto_z(Feeds::Table)
|
||||
.col(pk_auto(Feeds::Id))
|
||||
.col(text(Feeds::Token))
|
||||
.col(enumeration(
|
||||
Feeds::FeedType,
|
||||
FeedTypeEnum,
|
||||
FeedType::iden_values(),
|
||||
))
|
||||
.col(
|
||||
enumeration(Feeds::FeedSource, FeedSourceEnum, FeedSource::iden_values())
|
||||
.not_null(),
|
||||
)
|
||||
.col(integer_null(Feeds::SubscriberId))
|
||||
.col(integer_null(Feeds::SubscriptionId))
|
||||
.index(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("idx_feeds_token")
|
||||
.table(Feeds::Table)
|
||||
.col(Feeds::Token)
|
||||
.unique(),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_feeds_subscriber_id")
|
||||
.from(Feeds::Table, Feeds::SubscriberId)
|
||||
.to(Subscribers::Table, Subscribers::Id)
|
||||
.on_update(ForeignKeyAction::Cascade)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_feeds_subscription_id")
|
||||
.from(Feeds::Table, Feeds::SubscriptionId)
|
||||
.to(Subscriptions::Table, Subscriptions::Id)
|
||||
.on_update(ForeignKeyAction::Cascade)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_postgres_auto_update_ts_trigger_for_col(Feeds::Table, GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_postgres_auto_update_ts_trigger_for_col(Feeds::Table, GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().if_exists().table(Feeds::Table).to_owned())
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(FeedTypeEnum)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(FeedSourceEnum)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,133 @@
|
||||
use async_trait::async_trait;
|
||||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use crate::{
|
||||
migrations::defs::{Bangumi, CustomSchemaManagerExt, Episodes},
|
||||
models::{
|
||||
bangumi::{BangumiType, BangumiTypeEnum},
|
||||
episodes::{EpisodeType, EpisodeTypeEnum},
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan).await?;
|
||||
|
||||
{
|
||||
create_postgres_enum_for_active_enum!(manager, BangumiTypeEnum, BangumiType::Mikan)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Bangumi::Table)
|
||||
.add_column_if_not_exists(enumeration_null(
|
||||
Bangumi::BangumiType,
|
||||
BangumiTypeEnum,
|
||||
BangumiType::iden_values(),
|
||||
))
|
||||
.drop_column(Bangumi::SavePath)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.exec_stmt(
|
||||
UpdateStatement::new()
|
||||
.table(Bangumi::Table)
|
||||
.value(
|
||||
Bangumi::BangumiType,
|
||||
BangumiType::Mikan.as_enum(BangumiTypeEnum),
|
||||
)
|
||||
.and_where(Expr::col(Bangumi::BangumiType).is_null())
|
||||
.and_where(Expr::col(Bangumi::MikanBangumiId).is_not_null())
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Bangumi::Table)
|
||||
.modify_column(enumeration(
|
||||
Bangumi::BangumiType,
|
||||
BangumiTypeEnum,
|
||||
BangumiType::iden_values(),
|
||||
))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
{
|
||||
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Episodes::Table)
|
||||
.add_column_if_not_exists(enumeration_null(
|
||||
Episodes::EpisodeType,
|
||||
EpisodeTypeEnum,
|
||||
EpisodeType::enum_type_name(),
|
||||
))
|
||||
.add_column_if_not_exists(text_null(Episodes::EnclosureMagnetLink))
|
||||
.add_column_if_not_exists(text_null(Episodes::EnclosureTorrentLink))
|
||||
.add_column_if_not_exists(timestamp_with_time_zone_null(
|
||||
Episodes::EnclosurePubDate,
|
||||
))
|
||||
.add_column_if_not_exists(big_integer_null(
|
||||
Episodes::EnclosureContentLength,
|
||||
))
|
||||
.drop_column(Episodes::SavePath)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.exec_stmt(
|
||||
UpdateStatement::new()
|
||||
.table(Episodes::Table)
|
||||
.value(
|
||||
Episodes::EpisodeType,
|
||||
EpisodeType::Mikan.as_enum(EpisodeTypeEnum),
|
||||
)
|
||||
.and_where(Expr::col(Episodes::EpisodeType).is_null())
|
||||
.and_where(Expr::col(Episodes::MikanEpisodeId).is_not_null())
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Episodes::Table)
|
||||
.modify_column(enumeration(
|
||||
Episodes::EpisodeType,
|
||||
EpisodeTypeEnum,
|
||||
EpisodeType::enum_type_name(),
|
||||
))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(BangumiTypeEnum)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(EpisodeTypeEnum)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,8 @@ pub mod m20240224_082543_add_downloads;
|
||||
pub mod m20241231_000001_auth;
|
||||
pub mod m20250501_021523_credential_3rd;
|
||||
pub mod m20250520_021135_subscriber_tasks;
|
||||
pub mod m20250622_015618_feeds;
|
||||
pub mod m20250622_020819_bangumi_and_episode_type;
|
||||
|
||||
pub struct Migrator;
|
||||
|
||||
@@ -20,6 +22,8 @@ impl MigratorTrait for Migrator {
|
||||
Box::new(m20241231_000001_auth::Migration),
|
||||
Box::new(m20250501_021523_credential_3rd::Migration),
|
||||
Box::new(m20250520_021135_subscriber_tasks::Migration),
|
||||
Box::new(m20250622_015618_feeds::Migration),
|
||||
Box::new(m20250622_020819_bangumi_and_episode_type::Migration),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ use crate::{
|
||||
MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url,
|
||||
scrape_mikan_poster_meta_from_image_url,
|
||||
},
|
||||
origin::extract_season_from_title_body,
|
||||
origin::{BangumiComps, OriginCompTrait},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -29,7 +29,14 @@ pub struct BangumiFilter {
|
||||
pub group: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "bangumi_type")]
|
||||
pub enum BangumiType {
|
||||
#[sea_orm(string_value = "mikan")]
|
||||
Mikan,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "bangumi")]
|
||||
pub struct Model {
|
||||
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||
@@ -39,6 +46,7 @@ pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub mikan_bangumi_id: Option<String>,
|
||||
pub bangumi_type: BangumiType,
|
||||
pub subscriber_id: i32,
|
||||
pub display_name: String,
|
||||
pub origin_name: String,
|
||||
@@ -50,7 +58,6 @@ pub struct Model {
|
||||
pub rss_link: Option<String>,
|
||||
pub poster_link: Option<String>,
|
||||
pub origin_poster_link: Option<String>,
|
||||
pub save_path: Option<String>,
|
||||
pub homepage: Option<String>,
|
||||
}
|
||||
|
||||
@@ -121,9 +128,13 @@ impl ActiveModel {
|
||||
_subscription_id: i32,
|
||||
) -> RecorderResult<Self> {
|
||||
let mikan_client = ctx.mikan();
|
||||
let storage_service = ctx.storage();
|
||||
let mikan_base_url = mikan_client.base_url();
|
||||
let (_, season_raw, season_index) = extract_season_from_title_body(&meta.bangumi_title);
|
||||
let season_comp = BangumiComps::parse_comp(&meta.bangumi_title)
|
||||
.ok()
|
||||
.map(|(_, s)| s)
|
||||
.and_then(|s| s.season);
|
||||
let season_index = season_comp.as_ref().map(|s| s.num).unwrap_or(1);
|
||||
let season_raw = season_comp.map(|s| s.source.to_string());
|
||||
|
||||
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
||||
mikan_base_url.clone(),
|
||||
@@ -132,12 +143,8 @@ impl ActiveModel {
|
||||
);
|
||||
|
||||
let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src.clone() {
|
||||
let poster_meta = scrape_mikan_poster_meta_from_image_url(
|
||||
mikan_client,
|
||||
storage_service,
|
||||
origin_poster_src,
|
||||
)
|
||||
.await?;
|
||||
let poster_meta =
|
||||
scrape_mikan_poster_meta_from_image_url(ctx, origin_poster_src).await?;
|
||||
poster_meta.poster_src
|
||||
} else {
|
||||
None
|
||||
@@ -156,6 +163,7 @@ impl ActiveModel {
|
||||
origin_poster_link: ActiveValue::Set(meta.origin_poster_src.map(|src| src.to_string())),
|
||||
homepage: ActiveValue::Set(Some(meta.homepage.to_string())),
|
||||
rss_link: ActiveValue::Set(Some(rss_url.to_string())),
|
||||
bangumi_type: ActiveValue::Set(BangumiType::Mikan),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
@@ -232,6 +240,7 @@ impl Model {
|
||||
Column::OriginName,
|
||||
Column::Fansub,
|
||||
Column::PosterLink,
|
||||
Column::OriginPosterLink,
|
||||
Column::Season,
|
||||
Column::SeasonRaw,
|
||||
Column::RssLink,
|
||||
|
||||
@@ -52,8 +52,8 @@ pub struct Model {
|
||||
pub status: DownloadStatus,
|
||||
pub mime: DownloadMime,
|
||||
pub url: String,
|
||||
pub all_size: Option<u64>,
|
||||
pub curr_size: Option<u64>,
|
||||
pub all_size: Option<i64>,
|
||||
pub curr_size: Option<i64>,
|
||||
pub homepage: Option<String>,
|
||||
pub save_path: Option<String>,
|
||||
}
|
||||
|
||||
@@ -9,11 +9,19 @@ use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::RecorderResult,
|
||||
extract::{
|
||||
bittorrent::EpisodeEnclosureMeta,
|
||||
mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url},
|
||||
origin::extract_episode_meta_from_origin_name,
|
||||
origin::{OriginCompTrait, OriginNameRoot},
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "episode_type")]
|
||||
pub enum EpisodeType {
|
||||
#[sea_orm(string_value = "mikan")]
|
||||
Mikan,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "episodes")]
|
||||
pub struct Model {
|
||||
@@ -25,11 +33,15 @@ pub struct Model {
|
||||
pub id: i32,
|
||||
#[sea_orm(indexed)]
|
||||
pub mikan_episode_id: Option<String>,
|
||||
pub enclosure_torrent_link: Option<String>,
|
||||
pub enclosure_magnet_link: Option<String>,
|
||||
pub enclosure_pub_date: Option<DateTimeUtc>,
|
||||
pub enclosure_content_length: Option<i64>,
|
||||
pub episode_type: EpisodeType,
|
||||
pub origin_name: String,
|
||||
pub display_name: String,
|
||||
pub bangumi_id: i32,
|
||||
pub subscriber_id: i32,
|
||||
pub save_path: Option<String>,
|
||||
pub resolution: Option<String>,
|
||||
pub season: i32,
|
||||
pub season_raw: Option<String>,
|
||||
@@ -122,9 +134,10 @@ impl ActiveModel {
|
||||
ctx: &dyn AppContextTrait,
|
||||
bangumi: &bangumi::Model,
|
||||
episode: MikanEpisodeMeta,
|
||||
enclosure_meta: EpisodeEnclosureMeta,
|
||||
) -> RecorderResult<Self> {
|
||||
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||
let episode_extention_meta = extract_episode_meta_from_origin_name(&episode.episode_title)
|
||||
let episode_extention_meta = OriginNameRoot::parse_comp(&episode.episode_title)
|
||||
.inspect_err(|err| {
|
||||
tracing::error!(
|
||||
err = ?err,
|
||||
@@ -132,6 +145,7 @@ impl ActiveModel {
|
||||
"Failed to parse episode extension meta from episode title, skip"
|
||||
);
|
||||
})
|
||||
.map(|(_, e)| e.into_meta())
|
||||
.ok();
|
||||
let homepage = build_mikan_episode_homepage_url(mikan_base_url, &episode.mikan_episode_id);
|
||||
|
||||
@@ -148,6 +162,11 @@ impl ActiveModel {
|
||||
poster_link: ActiveValue::Set(bangumi.poster_link.clone()),
|
||||
origin_poster_link: ActiveValue::Set(bangumi.origin_poster_link.clone()),
|
||||
episode_index: ActiveValue::Set(0),
|
||||
enclosure_torrent_link: ActiveValue::Set(enclosure_meta.torrent_link),
|
||||
enclosure_magnet_link: ActiveValue::Set(enclosure_meta.magnet_link),
|
||||
enclosure_pub_date: ActiveValue::Set(enclosure_meta.pub_date),
|
||||
enclosure_content_length: ActiveValue::Set(enclosure_meta.content_length),
|
||||
episode_type: ActiveValue::Set(EpisodeType::Mikan),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
@@ -215,14 +234,19 @@ impl Model {
|
||||
|
||||
pub async fn add_mikan_episodes_for_subscription(
|
||||
ctx: &dyn AppContextTrait,
|
||||
creations: impl Iterator<Item = (&bangumi::Model, MikanEpisodeMeta)>,
|
||||
creations: impl Iterator<Item = (&bangumi::Model, MikanEpisodeMeta, EpisodeEnclosureMeta)>,
|
||||
subscriber_id: i32,
|
||||
subscription_id: i32,
|
||||
) -> RecorderResult<()> {
|
||||
let db = ctx.db();
|
||||
let new_episode_active_modes: Vec<ActiveModel> = creations
|
||||
.map(|(bangumi, episode_meta)| {
|
||||
ActiveModel::from_mikan_bangumi_and_episode_meta(ctx, bangumi, episode_meta)
|
||||
.map(|(bangumi, episode_meta, enclosure_meta)| {
|
||||
ActiveModel::from_mikan_bangumi_and_episode_meta(
|
||||
ctx,
|
||||
bangumi,
|
||||
episode_meta,
|
||||
enclosure_meta,
|
||||
)
|
||||
})
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
@@ -233,7 +257,23 @@ impl Model {
|
||||
let new_episode_ids = Entity::insert_many(new_episode_active_modes)
|
||||
.on_conflict(
|
||||
OnConflict::columns([Column::MikanEpisodeId, Column::SubscriberId])
|
||||
.update_columns([Column::OriginName, Column::PosterLink, Column::Homepage])
|
||||
.update_columns([
|
||||
Column::OriginName,
|
||||
Column::PosterLink,
|
||||
Column::OriginPosterLink,
|
||||
Column::Homepage,
|
||||
Column::EnclosureContentLength,
|
||||
Column::EnclosurePubDate,
|
||||
Column::EnclosureTorrentLink,
|
||||
Column::EnclosureMagnetLink,
|
||||
Column::EpisodeIndex,
|
||||
Column::Subtitle,
|
||||
Column::Source,
|
||||
Column::Resolution,
|
||||
Column::Season,
|
||||
Column::SeasonRaw,
|
||||
Column::Fansub,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec_with_returning_columns(db, [Column::Id])
|
||||
|
||||
133
apps/recorder/src/models/feeds/mod.rs
Normal file
133
apps/recorder/src/models/feeds/mod.rs
Normal file
@@ -0,0 +1,133 @@
|
||||
mod registry;
|
||||
mod rss;
|
||||
mod subscription_episodes_feed;
|
||||
|
||||
use ::rss::Channel;
|
||||
use async_trait::async_trait;
|
||||
pub use registry::Feed;
|
||||
pub use rss::{RssFeedItemTrait, RssFeedTrait};
|
||||
use sea_orm::{ActiveValue, DeriveEntityModel, entity::prelude::*};
|
||||
use serde::{Deserialize, Serialize};
|
||||
pub use subscription_episodes_feed::SubscriptionEpisodesFeed;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::{RecorderError, RecorderResult},
|
||||
};
|
||||
|
||||
#[derive(
|
||||
Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "feed_type")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum FeedType {
|
||||
#[sea_orm(string_value = "rss")]
|
||||
Rss,
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "feed_source")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum FeedSource {
|
||||
#[sea_orm(string_value = "subscription_episode")]
|
||||
SubscriptionEpisode,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "feeds")]
|
||||
pub struct Model {
|
||||
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||
pub created_at: DateTimeUtc,
|
||||
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||
pub updated_at: DateTimeUtc,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
#[sea_orm(indexed)]
|
||||
pub token: String,
|
||||
#[sea_orm(indexed)]
|
||||
pub feed_type: FeedType,
|
||||
#[sea_orm(indexed)]
|
||||
pub feed_source: FeedSource,
|
||||
pub subscriber_id: Option<i32>,
|
||||
pub subscription_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscriptions::Entity",
|
||||
from = "Column::SubscriptionId",
|
||||
to = "super::subscriptions::Column::Id",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Subscription,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Subscriber,
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscription.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||
pub enum RelatedEntity {
|
||||
#[sea_orm(entity = "super::subscribers::Entity")]
|
||||
Subscriber,
|
||||
#[sea_orm(entity = "super::subscriptions::Entity")]
|
||||
Subscription,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {
|
||||
async fn before_save<C>(mut self, _db: &C, insert: bool) -> Result<Self, DbErr>
|
||||
where
|
||||
C: ConnectionTrait,
|
||||
{
|
||||
if insert && let ActiveValue::NotSet = self.token {
|
||||
let token = nanoid::nanoid!(10);
|
||||
self.token = ActiveValue::Set(token);
|
||||
}
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub async fn find_rss_feed_by_token(
|
||||
ctx: &dyn AppContextTrait,
|
||||
token: &str,
|
||||
api_base: &Url,
|
||||
) -> RecorderResult<Channel> {
|
||||
let db = ctx.db();
|
||||
|
||||
let feed_model = Entity::find()
|
||||
.filter(Column::Token.eq(token))
|
||||
.filter(Column::FeedType.eq(FeedType::Rss))
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or(RecorderError::ModelEntityNotFound {
|
||||
entity: "Feed".into(),
|
||||
})?;
|
||||
|
||||
let feed = Feed::from_model(ctx, feed_model).await?;
|
||||
|
||||
feed.into_rss_channel(ctx, api_base)
|
||||
}
|
||||
}
|
||||
65
apps/recorder/src/models/feeds/registry.rs
Normal file
65
apps/recorder/src/models/feeds/registry.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use rss::Channel;
|
||||
use sea_orm::{ColumnTrait, EntityTrait, JoinType, QueryFilter, QuerySelect, RelationTrait};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::{RecorderError, RecorderResult},
|
||||
models::{
|
||||
episodes,
|
||||
feeds::{self, FeedSource, RssFeedTrait, SubscriptionEpisodesFeed},
|
||||
subscription_episode, subscriptions,
|
||||
},
|
||||
};
|
||||
|
||||
pub enum Feed {
|
||||
SubscritpionEpisodes(SubscriptionEpisodesFeed),
|
||||
}
|
||||
|
||||
impl Feed {
|
||||
pub async fn from_model(ctx: &dyn AppContextTrait, m: feeds::Model) -> RecorderResult<Self> {
|
||||
match m.feed_source {
|
||||
FeedSource::SubscriptionEpisode => {
|
||||
let db = ctx.db();
|
||||
let (subscription, episodes) = if let Some(subscription_id) = m.subscription_id
|
||||
&& let Some(subscription) = subscriptions::Entity::find()
|
||||
.filter(subscriptions::Column::Id.eq(subscription_id))
|
||||
.one(db)
|
||||
.await?
|
||||
{
|
||||
let episodes = episodes::Entity::find()
|
||||
.join(
|
||||
JoinType::InnerJoin,
|
||||
episodes::Relation::SubscriptionEpisode.def(),
|
||||
)
|
||||
.join(
|
||||
JoinType::InnerJoin,
|
||||
subscription_episode::Relation::Subscription.def(),
|
||||
)
|
||||
.filter(subscriptions::Column::Id.eq(subscription_id))
|
||||
.all(db)
|
||||
.await?;
|
||||
(subscription, episodes)
|
||||
} else {
|
||||
return Err(RecorderError::ModelEntityNotFound {
|
||||
entity: "Subscription".into(),
|
||||
});
|
||||
};
|
||||
|
||||
Ok(Feed::SubscritpionEpisodes(
|
||||
SubscriptionEpisodesFeed::from_model(m, subscription, episodes),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_rss_channel(
|
||||
self,
|
||||
ctx: &dyn AppContextTrait,
|
||||
api_base: &Url,
|
||||
) -> RecorderResult<Channel> {
|
||||
match self {
|
||||
Self::SubscritpionEpisodes(feed) => feed.into_channel(ctx, api_base),
|
||||
}
|
||||
}
|
||||
}
|
||||
142
apps/recorder/src/models/feeds/rss.rs
Normal file
142
apps/recorder/src/models/feeds/rss.rs
Normal file
@@ -0,0 +1,142 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
|
||||
use maplit::btreemap;
|
||||
use rss::{
|
||||
Channel, ChannelBuilder, EnclosureBuilder, GuidBuilder, Item, ItemBuilder,
|
||||
extension::{ExtensionBuilder, ExtensionMap},
|
||||
};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::{RecorderError, RecorderResult},
|
||||
};
|
||||
|
||||
pub trait RssFeedItemTrait: Sized {
|
||||
fn get_guid_value(&self) -> Cow<'_, str>;
|
||||
fn get_title(&self) -> Cow<'_, str>;
|
||||
fn get_description(&self) -> Cow<'_, str>;
|
||||
fn get_link(&self, ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>>;
|
||||
fn get_enclosure_mime(&self) -> Option<Cow<'_, str>>;
|
||||
fn get_enclosure_link(&self, ctx: &dyn AppContextTrait, api_base: &Url)
|
||||
-> Option<Cow<'_, str>>;
|
||||
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>>;
|
||||
fn get_enclosure_content_length(&self) -> Option<i64>;
|
||||
fn into_item(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Item> {
|
||||
let enclosure_mime_type =
|
||||
self.get_enclosure_mime()
|
||||
.ok_or_else(|| RecorderError::MikanRssInvalidFieldError {
|
||||
field: "enclosure_mime_type".into(),
|
||||
source: None.into(),
|
||||
})?;
|
||||
let enclosure_link = self.get_enclosure_link(ctx, api_base).ok_or_else(|| {
|
||||
RecorderError::MikanRssInvalidFieldError {
|
||||
field: "enclosure_link".into(),
|
||||
source: None.into(),
|
||||
}
|
||||
})?;
|
||||
let enclosure_content_length = self.get_enclosure_content_length().ok_or_else(|| {
|
||||
RecorderError::MikanRssInvalidFieldError {
|
||||
field: "enclosure_content_length".into(),
|
||||
source: None.into(),
|
||||
}
|
||||
})?;
|
||||
let enclosure_pub_date = self.get_enclosure_pub_date();
|
||||
let link = self.get_link(ctx, api_base).ok_or_else(|| {
|
||||
RecorderError::MikanRssInvalidFieldError {
|
||||
field: "link".into(),
|
||||
source: None.into(),
|
||||
}
|
||||
})?;
|
||||
|
||||
let mut extensions = ExtensionMap::default();
|
||||
if enclosure_mime_type == BITTORRENT_MIME_TYPE {
|
||||
extensions.insert("torrent".to_string(), {
|
||||
let mut map = btreemap! {
|
||||
"link".to_string() => vec![
|
||||
ExtensionBuilder::default().name(
|
||||
"link"
|
||||
).value(enclosure_link.to_string()).build()
|
||||
],
|
||||
"contentLength".to_string() => vec![
|
||||
ExtensionBuilder::default().name(
|
||||
"contentLength"
|
||||
).value(enclosure_content_length.to_string()).build()
|
||||
],
|
||||
};
|
||||
if let Some(pub_date) = enclosure_pub_date {
|
||||
map.insert(
|
||||
"pubDate".to_string(),
|
||||
vec![
|
||||
ExtensionBuilder::default()
|
||||
.name("pubDate")
|
||||
.value(pub_date.to_rfc3339())
|
||||
.build(),
|
||||
],
|
||||
);
|
||||
}
|
||||
map
|
||||
});
|
||||
};
|
||||
|
||||
let enclosure = EnclosureBuilder::default()
|
||||
.mime_type(enclosure_mime_type)
|
||||
.url(enclosure_link.to_string())
|
||||
.length(enclosure_content_length.to_string())
|
||||
.build();
|
||||
|
||||
let guid = GuidBuilder::default()
|
||||
.value(self.get_guid_value())
|
||||
.permalink(false)
|
||||
.build();
|
||||
|
||||
let item = ItemBuilder::default()
|
||||
.guid(guid)
|
||||
.title(self.get_title().to_string())
|
||||
.description(self.get_description().to_string())
|
||||
.link(link.to_string())
|
||||
.enclosure(enclosure)
|
||||
.extensions(extensions)
|
||||
.build();
|
||||
|
||||
Ok(item)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait RssFeedTrait: Sized {
|
||||
type Item: RssFeedItemTrait;
|
||||
|
||||
fn get_description(&self) -> Cow<'_, str>;
|
||||
|
||||
fn get_title(&self) -> Cow<'_, str>;
|
||||
|
||||
fn get_link(&self, ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>>;
|
||||
|
||||
fn items(&self) -> impl Iterator<Item = &Self::Item>;
|
||||
|
||||
fn into_items(self) -> impl Iterator<Item = Self::Item>;
|
||||
|
||||
fn into_channel(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Channel> {
|
||||
let link = self.get_link(ctx, api_base).ok_or_else(|| {
|
||||
RecorderError::MikanRssInvalidFieldError {
|
||||
field: "link".into(),
|
||||
source: None.into(),
|
||||
}
|
||||
})?;
|
||||
|
||||
let channel = ChannelBuilder::default()
|
||||
.title(self.get_title())
|
||||
.link(link.to_string())
|
||||
.description(self.get_description())
|
||||
.items({
|
||||
self.into_items()
|
||||
.map(|item| item.into_item(ctx, api_base))
|
||||
.collect::<RecorderResult<Vec<_>>>()?
|
||||
})
|
||||
.build();
|
||||
|
||||
Ok(channel)
|
||||
}
|
||||
}
|
||||
114
apps/recorder/src/models/feeds/subscription_episodes_feed.rs
Normal file
114
apps/recorder/src/models/feeds/subscription_episodes_feed.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
app::{AppContextTrait, PROJECT_NAME},
|
||||
models::{
|
||||
episodes,
|
||||
feeds::{
|
||||
self,
|
||||
rss::{RssFeedItemTrait, RssFeedTrait},
|
||||
},
|
||||
subscriptions,
|
||||
},
|
||||
web::controller,
|
||||
};
|
||||
|
||||
pub struct SubscriptionEpisodesFeed {
|
||||
pub feed: feeds::Model,
|
||||
pub subscription: subscriptions::Model,
|
||||
pub episodes: Vec<episodes::Model>,
|
||||
}
|
||||
|
||||
impl SubscriptionEpisodesFeed {
|
||||
pub fn from_model(
|
||||
feed: feeds::Model,
|
||||
subscription: subscriptions::Model,
|
||||
episodes: Vec<episodes::Model>,
|
||||
) -> Self {
|
||||
Self {
|
||||
feed,
|
||||
subscription,
|
||||
episodes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RssFeedItemTrait for episodes::Model {
|
||||
fn get_guid_value(&self) -> Cow<'_, str> {
|
||||
Cow::Owned(format!("{PROJECT_NAME}:episode:{}", self.id))
|
||||
}
|
||||
|
||||
fn get_title(&self) -> Cow<'_, str> {
|
||||
Cow::Borrowed(&self.display_name)
|
||||
}
|
||||
|
||||
fn get_description(&self) -> Cow<'_, str> {
|
||||
Cow::Borrowed(&self.display_name)
|
||||
}
|
||||
|
||||
fn get_link(&self, _ctx: &dyn AppContextTrait, _api_base: &Url) -> Option<Cow<'_, str>> {
|
||||
self.homepage.as_deref().map(Cow::Borrowed)
|
||||
}
|
||||
|
||||
fn get_enclosure_mime(&self) -> Option<Cow<'_, str>> {
|
||||
if self.enclosure_torrent_link.is_some() {
|
||||
Some(Cow::Borrowed(BITTORRENT_MIME_TYPE))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn get_enclosure_link(
|
||||
&self,
|
||||
_ctx: &dyn AppContextTrait,
|
||||
_api_base: &Url,
|
||||
) -> Option<Cow<'_, str>> {
|
||||
self.enclosure_torrent_link.as_deref().map(Cow::Borrowed)
|
||||
}
|
||||
|
||||
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>> {
|
||||
self.enclosure_pub_date
|
||||
}
|
||||
|
||||
fn get_enclosure_content_length(&self) -> Option<i64> {
|
||||
self.enclosure_content_length
|
||||
}
|
||||
}
|
||||
|
||||
impl RssFeedTrait for SubscriptionEpisodesFeed {
|
||||
type Item = episodes::Model;
|
||||
|
||||
fn get_description(&self) -> Cow<'_, str> {
|
||||
Cow::Owned(format!(
|
||||
"{PROJECT_NAME} - episodes of subscription {}",
|
||||
self.subscription.id
|
||||
))
|
||||
}
|
||||
|
||||
fn get_title(&self) -> Cow<'_, str> {
|
||||
Cow::Owned(format!("{PROJECT_NAME} - subscription episodes"))
|
||||
}
|
||||
|
||||
fn get_link(&self, _ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>> {
|
||||
let api_base = api_base
|
||||
.join(&format!(
|
||||
"{}/{}",
|
||||
controller::feeds::CONTROLLER_PREFIX,
|
||||
self.feed.token
|
||||
))
|
||||
.ok()?;
|
||||
Some(Cow::Owned(api_base.to_string()))
|
||||
}
|
||||
|
||||
fn items(&self) -> impl Iterator<Item = &Self::Item> {
|
||||
self.episodes.iter()
|
||||
}
|
||||
|
||||
fn into_items(self) -> impl Iterator<Item = Self::Item> {
|
||||
self.episodes.into_iter()
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ pub mod credential_3rd;
|
||||
pub mod downloaders;
|
||||
pub mod downloads;
|
||||
pub mod episodes;
|
||||
pub mod feeds;
|
||||
pub mod query;
|
||||
pub mod subscriber_tasks;
|
||||
pub mod subscribers;
|
||||
|
||||
@@ -3,11 +3,11 @@ use sea_orm::{ActiveValue, FromJsonQueryResult, TransactionTrait, entity::prelud
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
app::{AppContextTrait, PROJECT_NAME},
|
||||
errors::app_error::{RecorderError, RecorderResult},
|
||||
};
|
||||
|
||||
pub const SEED_SUBSCRIBER: &str = "konobangu";
|
||||
pub const SEED_SUBSCRIBER: &str = PROJECT_NAME;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct SubscriberBangumiConfig {
|
||||
@@ -41,6 +41,10 @@ pub enum Relation {
|
||||
Auth,
|
||||
#[sea_orm(has_many = "super::credential_3rd::Entity")]
|
||||
Credential3rd,
|
||||
#[sea_orm(has_many = "super::feeds::Entity")]
|
||||
Feed,
|
||||
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
|
||||
SubscriberTask,
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
@@ -79,6 +83,18 @@ impl Related<super::credential_3rd::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::feeds::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Feed.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscriber_tasks::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::SubscriberTask.def()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||
pub enum RelatedEntity {
|
||||
#[sea_orm(entity = "super::subscriptions::Entity")]
|
||||
@@ -91,6 +107,10 @@ pub enum RelatedEntity {
|
||||
Episode,
|
||||
#[sea_orm(entity = "super::credential_3rd::Entity")]
|
||||
Credential3rd,
|
||||
#[sea_orm(entity = "super::feeds::Entity")]
|
||||
Feed,
|
||||
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
|
||||
SubscriberTask,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
|
||||
@@ -1,311 +0,0 @@
|
||||
use std::{fmt::Debug, sync::Arc};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::{RecorderError, RecorderResult},
|
||||
extract::mikan::{
|
||||
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
|
||||
)]
|
||||
#[sea_orm(
|
||||
rs_type = "String",
|
||||
db_type = "Enum",
|
||||
enum_name = "subscription_category"
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SubscriptionCategory {
|
||||
#[sea_orm(string_value = "mikan_subscriber")]
|
||||
MikanSubscriber,
|
||||
#[sea_orm(string_value = "mikan_season")]
|
||||
MikanSeason,
|
||||
#[sea_orm(string_value = "mikan_bangumi")]
|
||||
MikanBangumi,
|
||||
#[sea_orm(string_value = "manual")]
|
||||
Manual,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "subscriptions")]
|
||||
pub struct Model {
|
||||
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||
pub created_at: DateTimeUtc,
|
||||
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||
pub updated_at: DateTimeUtc,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub display_name: String,
|
||||
pub subscriber_id: i32,
|
||||
pub category: SubscriptionCategory,
|
||||
pub source_url: String,
|
||||
pub enabled: bool,
|
||||
pub credential_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Subscriber,
|
||||
#[sea_orm(has_many = "super::bangumi::Entity")]
|
||||
Bangumi,
|
||||
#[sea_orm(has_many = "super::episodes::Entity")]
|
||||
Episodes,
|
||||
#[sea_orm(has_many = "super::subscription_episode::Entity")]
|
||||
SubscriptionEpisode,
|
||||
#[sea_orm(has_many = "super::subscription_bangumi::Entity")]
|
||||
SubscriptionBangumi,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::credential_3rd::Entity",
|
||||
from = "Column::CredentialId",
|
||||
to = "super::credential_3rd::Column::Id",
|
||||
on_update = "Cascade",
|
||||
on_delete = "SetNull"
|
||||
)]
|
||||
Credential3rd,
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscription_bangumi::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::SubscriptionBangumi.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscription_episode::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::SubscriptionEpisode.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::bangumi::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
super::subscription_bangumi::Relation::Bangumi.def()
|
||||
}
|
||||
|
||||
fn via() -> Option<RelationDef> {
|
||||
Some(
|
||||
super::subscription_bangumi::Relation::Subscription
|
||||
.def()
|
||||
.rev(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::episodes::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
super::subscription_episode::Relation::Episode.def()
|
||||
}
|
||||
|
||||
fn via() -> Option<RelationDef> {
|
||||
Some(
|
||||
super::subscription_episode::Relation::Subscription
|
||||
.def()
|
||||
.rev(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::credential_3rd::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Credential3rd.def()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||
pub enum RelatedEntity {
|
||||
#[sea_orm(entity = "super::subscribers::Entity")]
|
||||
Subscriber,
|
||||
#[sea_orm(entity = "super::bangumi::Entity")]
|
||||
Bangumi,
|
||||
#[sea_orm(entity = "super::episodes::Entity")]
|
||||
Episode,
|
||||
#[sea_orm(entity = "super::subscription_episode::Entity")]
|
||||
SubscriptionEpisode,
|
||||
#[sea_orm(entity = "super::subscription_bangumi::Entity")]
|
||||
SubscriptionBangumi,
|
||||
#[sea_orm(entity = "super::credential_3rd::Entity")]
|
||||
Credential3rd,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl ActiveModel {}
|
||||
|
||||
impl Model {
|
||||
pub async fn toggle_with_ids(
|
||||
ctx: &dyn AppContextTrait,
|
||||
ids: impl Iterator<Item = i32>,
|
||||
enabled: bool,
|
||||
) -> RecorderResult<()> {
|
||||
let db = ctx.db();
|
||||
Entity::update_many()
|
||||
.col_expr(Column::Enabled, Expr::value(enabled))
|
||||
.filter(Column::Id.is_in(ids))
|
||||
.exec(db)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_with_ids(
|
||||
ctx: &dyn AppContextTrait,
|
||||
ids: impl Iterator<Item = i32>,
|
||||
) -> RecorderResult<()> {
|
||||
let db = ctx.db();
|
||||
Entity::delete_many()
|
||||
.filter(Column::Id.is_in(ids))
|
||||
.exec(db)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn find_by_id_and_subscriber_id(
|
||||
ctx: &dyn AppContextTrait,
|
||||
subscriber_id: i32,
|
||||
subscription_id: i32,
|
||||
) -> RecorderResult<Self> {
|
||||
let db = ctx.db();
|
||||
let subscription_model = Entity::find_by_id(subscription_id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||
entity: "Subscription".into(),
|
||||
})?;
|
||||
|
||||
if subscription_model.subscriber_id != subscriber_id {
|
||||
Err(RecorderError::ModelEntityNotFound {
|
||||
entity: "Subscription".into(),
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(subscription_model)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait SubscriptionTrait: Sized + Debug {
|
||||
fn get_subscriber_id(&self) -> i32;
|
||||
|
||||
fn get_subscription_id(&self) -> i32;
|
||||
|
||||
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
|
||||
|
||||
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
|
||||
|
||||
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
|
||||
|
||||
fn try_from_model(model: &Model) -> RecorderResult<Self>;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(tag = "category")]
|
||||
pub enum Subscription {
|
||||
#[serde(rename = "mikan_subscriber")]
|
||||
MikanSubscriber(MikanSubscriberSubscription),
|
||||
#[serde(rename = "mikan_season")]
|
||||
MikanSeason(MikanSeasonSubscription),
|
||||
#[serde(rename = "mikan_bangumi")]
|
||||
MikanBangumi(MikanBangumiSubscription),
|
||||
#[serde(rename = "manual")]
|
||||
Manual,
|
||||
}
|
||||
|
||||
impl Subscription {
|
||||
pub fn category(&self) -> SubscriptionCategory {
|
||||
match self {
|
||||
Self::MikanSubscriber(_) => SubscriptionCategory::MikanSubscriber,
|
||||
Self::MikanSeason(_) => SubscriptionCategory::MikanSeason,
|
||||
Self::MikanBangumi(_) => SubscriptionCategory::MikanBangumi,
|
||||
Self::Manual => SubscriptionCategory::Manual,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl SubscriptionTrait for Subscription {
|
||||
fn get_subscriber_id(&self) -> i32 {
|
||||
match self {
|
||||
Self::MikanSubscriber(subscription) => subscription.get_subscriber_id(),
|
||||
Self::MikanSeason(subscription) => subscription.get_subscriber_id(),
|
||||
Self::MikanBangumi(subscription) => subscription.get_subscriber_id(),
|
||||
Self::Manual => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_subscription_id(&self) -> i32 {
|
||||
match self {
|
||||
Self::MikanSubscriber(subscription) => subscription.get_subscription_id(),
|
||||
Self::MikanSeason(subscription) => subscription.get_subscription_id(),
|
||||
Self::MikanBangumi(subscription) => subscription.get_subscription_id(),
|
||||
Self::Manual => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
match self {
|
||||
Self::MikanSubscriber(subscription) => subscription.sync_feeds_incremental(ctx).await,
|
||||
Self::MikanSeason(subscription) => subscription.sync_feeds_incremental(ctx).await,
|
||||
Self::MikanBangumi(subscription) => subscription.sync_feeds_incremental(ctx).await,
|
||||
Self::Manual => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
match self {
|
||||
Self::MikanSubscriber(subscription) => subscription.sync_feeds_full(ctx).await,
|
||||
Self::MikanSeason(subscription) => subscription.sync_feeds_full(ctx).await,
|
||||
Self::MikanBangumi(subscription) => subscription.sync_feeds_full(ctx).await,
|
||||
Self::Manual => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
match self {
|
||||
Self::MikanSubscriber(subscription) => subscription.sync_sources(ctx).await,
|
||||
Self::MikanSeason(subscription) => subscription.sync_sources(ctx).await,
|
||||
Self::MikanBangumi(subscription) => subscription.sync_sources(ctx).await,
|
||||
Self::Manual => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
fn try_from_model(model: &Model) -> RecorderResult<Self> {
|
||||
match model.category {
|
||||
SubscriptionCategory::MikanSubscriber => {
|
||||
MikanSubscriberSubscription::try_from_model(model).map(Self::MikanSubscriber)
|
||||
}
|
||||
SubscriptionCategory::MikanSeason => {
|
||||
MikanSeasonSubscription::try_from_model(model).map(Self::MikanSeason)
|
||||
}
|
||||
SubscriptionCategory::MikanBangumi => {
|
||||
MikanBangumiSubscription::try_from_model(model).map(Self::MikanBangumi)
|
||||
}
|
||||
SubscriptionCategory::Manual => Ok(Self::Manual),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&Model> for Subscription {
|
||||
type Error = RecorderError;
|
||||
|
||||
fn try_from(model: &Model) -> Result<Self, Self::Error> {
|
||||
Self::try_from_model(model)
|
||||
}
|
||||
}
|
||||
20
apps/recorder/src/models/subscriptions/core.rs
Normal file
20
apps/recorder/src/models/subscriptions/core.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use std::{fmt::Debug, sync::Arc};
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult, models::subscriptions};
|
||||
|
||||
#[async_trait]
|
||||
pub trait SubscriptionTrait: Sized + Debug {
|
||||
fn get_subscriber_id(&self) -> i32;
|
||||
|
||||
fn get_subscription_id(&self) -> i32;
|
||||
|
||||
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
|
||||
|
||||
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
|
||||
|
||||
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
|
||||
|
||||
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self>;
|
||||
}
|
||||
195
apps/recorder/src/models/subscriptions/mod.rs
Normal file
195
apps/recorder/src/models/subscriptions/mod.rs
Normal file
@@ -0,0 +1,195 @@
|
||||
mod core;
|
||||
mod registry;
|
||||
pub use core::SubscriptionTrait;
|
||||
use std::fmt::Debug;
|
||||
|
||||
use async_trait::async_trait;
|
||||
pub use registry::{
|
||||
Subscription, SubscriptionCategory, SubscriptionCategoryEnum, SubscriptionCategoryIter,
|
||||
SubscriptionCategoryVariant, SubscriptionCategoryVariantIter,
|
||||
};
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::{RecorderError, RecorderResult},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "subscriptions")]
|
||||
pub struct Model {
|
||||
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||
pub created_at: DateTimeUtc,
|
||||
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||
pub updated_at: DateTimeUtc,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub display_name: String,
|
||||
pub subscriber_id: i32,
|
||||
pub category: SubscriptionCategory,
|
||||
pub source_url: String,
|
||||
pub enabled: bool,
|
||||
pub credential_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Subscriber,
|
||||
#[sea_orm(has_many = "super::bangumi::Entity")]
|
||||
Bangumi,
|
||||
#[sea_orm(has_many = "super::episodes::Entity")]
|
||||
Episodes,
|
||||
#[sea_orm(has_many = "super::subscription_episode::Entity")]
|
||||
SubscriptionEpisode,
|
||||
#[sea_orm(has_many = "super::subscription_bangumi::Entity")]
|
||||
SubscriptionBangumi,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::credential_3rd::Entity",
|
||||
from = "Column::CredentialId",
|
||||
to = "super::credential_3rd::Column::Id",
|
||||
on_update = "Cascade",
|
||||
on_delete = "SetNull"
|
||||
)]
|
||||
Credential3rd,
|
||||
#[sea_orm(has_many = "super::feeds::Entity")]
|
||||
Feed,
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscription_bangumi::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::SubscriptionBangumi.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscription_episode::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::SubscriptionEpisode.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::bangumi::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
super::subscription_bangumi::Relation::Bangumi.def()
|
||||
}
|
||||
|
||||
fn via() -> Option<RelationDef> {
|
||||
Some(
|
||||
super::subscription_bangumi::Relation::Subscription
|
||||
.def()
|
||||
.rev(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::feeds::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Feed.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::episodes::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
super::subscription_episode::Relation::Episode.def()
|
||||
}
|
||||
|
||||
fn via() -> Option<RelationDef> {
|
||||
Some(
|
||||
super::subscription_episode::Relation::Subscription
|
||||
.def()
|
||||
.rev(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::credential_3rd::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Credential3rd.def()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||
pub enum RelatedEntity {
|
||||
#[sea_orm(entity = "super::subscribers::Entity")]
|
||||
Subscriber,
|
||||
#[sea_orm(entity = "super::bangumi::Entity")]
|
||||
Bangumi,
|
||||
#[sea_orm(entity = "super::episodes::Entity")]
|
||||
Episode,
|
||||
#[sea_orm(entity = "super::subscription_episode::Entity")]
|
||||
SubscriptionEpisode,
|
||||
#[sea_orm(entity = "super::subscription_bangumi::Entity")]
|
||||
SubscriptionBangumi,
|
||||
#[sea_orm(entity = "super::credential_3rd::Entity")]
|
||||
Credential3rd,
|
||||
#[sea_orm(entity = "super::feeds::Entity")]
|
||||
Feed,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl ActiveModel {}
|
||||
|
||||
impl Model {
|
||||
pub async fn toggle_with_ids(
|
||||
ctx: &dyn AppContextTrait,
|
||||
ids: impl Iterator<Item = i32>,
|
||||
enabled: bool,
|
||||
) -> RecorderResult<()> {
|
||||
let db = ctx.db();
|
||||
Entity::update_many()
|
||||
.col_expr(Column::Enabled, Expr::value(enabled))
|
||||
.filter(Column::Id.is_in(ids))
|
||||
.exec(db)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_with_ids(
|
||||
ctx: &dyn AppContextTrait,
|
||||
ids: impl Iterator<Item = i32>,
|
||||
) -> RecorderResult<()> {
|
||||
let db = ctx.db();
|
||||
Entity::delete_many()
|
||||
.filter(Column::Id.is_in(ids))
|
||||
.exec(db)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn find_by_id_and_subscriber_id(
|
||||
ctx: &dyn AppContextTrait,
|
||||
subscriber_id: i32,
|
||||
subscription_id: i32,
|
||||
) -> RecorderResult<Self> {
|
||||
let db = ctx.db();
|
||||
let subscription_model = Entity::find_by_id(subscription_id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or_else(|| RecorderError::ModelEntityNotFound {
|
||||
entity: "Subscription".into(),
|
||||
})?;
|
||||
|
||||
if subscription_model.subscriber_id != subscriber_id {
|
||||
Err(RecorderError::ModelEntityNotFound {
|
||||
entity: "Subscription".into(),
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(subscription_model)
|
||||
}
|
||||
}
|
||||
129
apps/recorder/src/models/subscriptions/registry.rs
Normal file
129
apps/recorder/src/models/subscriptions/registry.rs
Normal file
@@ -0,0 +1,129 @@
|
||||
use std::{fmt::Debug, sync::Arc};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::{RecorderError, RecorderResult},
|
||||
extract::mikan::{
|
||||
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
|
||||
},
|
||||
models::subscriptions::{self, SubscriptionTrait},
|
||||
};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
|
||||
)]
|
||||
#[sea_orm(
|
||||
rs_type = "String",
|
||||
db_type = "Enum",
|
||||
enum_name = "subscription_category"
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SubscriptionCategory {
|
||||
#[sea_orm(string_value = "mikan_subscriber")]
|
||||
MikanSubscriber,
|
||||
#[sea_orm(string_value = "mikan_season")]
|
||||
MikanSeason,
|
||||
#[sea_orm(string_value = "mikan_bangumi")]
|
||||
MikanBangumi,
|
||||
#[sea_orm(string_value = "manual")]
|
||||
Manual,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(tag = "category")]
|
||||
pub enum Subscription {
|
||||
#[serde(rename = "mikan_subscriber")]
|
||||
MikanSubscriber(MikanSubscriberSubscription),
|
||||
#[serde(rename = "mikan_season")]
|
||||
MikanSeason(MikanSeasonSubscription),
|
||||
#[serde(rename = "mikan_bangumi")]
|
||||
MikanBangumi(MikanBangumiSubscription),
|
||||
#[serde(rename = "manual")]
|
||||
Manual,
|
||||
}
|
||||
|
||||
impl Subscription {
|
||||
pub fn category(&self) -> SubscriptionCategory {
|
||||
match self {
|
||||
Self::MikanSubscriber(_) => SubscriptionCategory::MikanSubscriber,
|
||||
Self::MikanSeason(_) => SubscriptionCategory::MikanSeason,
|
||||
Self::MikanBangumi(_) => SubscriptionCategory::MikanBangumi,
|
||||
Self::Manual => SubscriptionCategory::Manual,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl SubscriptionTrait for Subscription {
|
||||
fn get_subscriber_id(&self) -> i32 {
|
||||
match self {
|
||||
Self::MikanSubscriber(subscription) => subscription.get_subscriber_id(),
|
||||
Self::MikanSeason(subscription) => subscription.get_subscriber_id(),
|
||||
Self::MikanBangumi(subscription) => subscription.get_subscriber_id(),
|
||||
Self::Manual => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_subscription_id(&self) -> i32 {
|
||||
match self {
|
||||
Self::MikanSubscriber(subscription) => subscription.get_subscription_id(),
|
||||
Self::MikanSeason(subscription) => subscription.get_subscription_id(),
|
||||
Self::MikanBangumi(subscription) => subscription.get_subscription_id(),
|
||||
Self::Manual => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
match self {
|
||||
Self::MikanSubscriber(subscription) => subscription.sync_feeds_incremental(ctx).await,
|
||||
Self::MikanSeason(subscription) => subscription.sync_feeds_incremental(ctx).await,
|
||||
Self::MikanBangumi(subscription) => subscription.sync_feeds_incremental(ctx).await,
|
||||
Self::Manual => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
match self {
|
||||
Self::MikanSubscriber(subscription) => subscription.sync_feeds_full(ctx).await,
|
||||
Self::MikanSeason(subscription) => subscription.sync_feeds_full(ctx).await,
|
||||
Self::MikanBangumi(subscription) => subscription.sync_feeds_full(ctx).await,
|
||||
Self::Manual => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
match self {
|
||||
Self::MikanSubscriber(subscription) => subscription.sync_sources(ctx).await,
|
||||
Self::MikanSeason(subscription) => subscription.sync_sources(ctx).await,
|
||||
Self::MikanBangumi(subscription) => subscription.sync_sources(ctx).await,
|
||||
Self::Manual => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
|
||||
match model.category {
|
||||
SubscriptionCategory::MikanSubscriber => {
|
||||
MikanSubscriberSubscription::try_from_model(model).map(Self::MikanSubscriber)
|
||||
}
|
||||
SubscriptionCategory::MikanSeason => {
|
||||
MikanSeasonSubscription::try_from_model(model).map(Self::MikanSeason)
|
||||
}
|
||||
SubscriptionCategory::MikanBangumi => {
|
||||
MikanBangumiSubscription::try_from_model(model).map(Self::MikanBangumi)
|
||||
}
|
||||
SubscriptionCategory::Manual => Ok(Self::Manual),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&subscriptions::Model> for Subscription {
|
||||
type Error = RecorderError;
|
||||
|
||||
fn try_from(model: &subscriptions::Model) -> Result<Self, Self::Error> {
|
||||
Self::try_from_model(model)
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,11 @@
|
||||
use std::fmt;
|
||||
use std::{borrow::Cow, fmt};
|
||||
|
||||
use async_stream::try_stream;
|
||||
use axum::{body::Body, response::Response};
|
||||
use axum_extra::{TypedHeader, headers::Range};
|
||||
use bytes::Bytes;
|
||||
use futures::{Stream, StreamExt};
|
||||
use headers_accept::Accept;
|
||||
use http::{HeaderValue, StatusCode, header};
|
||||
use opendal::{Buffer, Metadata, Operator, Reader, Writer, layers::LoggingLayer};
|
||||
use quirks_path::{Path, PathBuf};
|
||||
@@ -56,22 +57,24 @@ impl fmt::Display for StorageStoredUrl {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StorageService {
|
||||
pub data_dir: String,
|
||||
pub operator: Operator,
|
||||
}
|
||||
|
||||
impl StorageService {
|
||||
pub async fn from_config(config: StorageConfig) -> RecorderResult<Self> {
|
||||
Ok(Self {
|
||||
data_dir: config.data_dir.to_string(),
|
||||
operator: Self::get_operator(&config.data_dir)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_operator(&self) -> Result<Operator, opendal::Error> {
|
||||
pub fn get_operator(data_dir: &str) -> Result<Operator, opendal::Error> {
|
||||
let op = if cfg!(test) {
|
||||
Operator::new(opendal::services::Memory::default())?
|
||||
.layer(LoggingLayer::default())
|
||||
.finish()
|
||||
} else {
|
||||
Operator::new(opendal::services::Fs::default().root(&self.data_dir))?
|
||||
Operator::new(opendal::services::Fs::default().root(data_dir))?
|
||||
.layer(LoggingLayer::default())
|
||||
.finish()
|
||||
};
|
||||
@@ -125,7 +128,7 @@ impl StorageService {
|
||||
path: P,
|
||||
data: Bytes,
|
||||
) -> Result<StorageStoredUrl, opendal::Error> {
|
||||
let operator = self.get_operator()?;
|
||||
let operator = &self.operator;
|
||||
|
||||
let path = path.into();
|
||||
|
||||
@@ -145,7 +148,7 @@ impl StorageService {
|
||||
&self,
|
||||
path: P,
|
||||
) -> Result<Option<StorageStoredUrl>, opendal::Error> {
|
||||
let operator = self.get_operator()?;
|
||||
let operator = &self.operator;
|
||||
|
||||
let path = path.to_string();
|
||||
|
||||
@@ -157,7 +160,7 @@ impl StorageService {
|
||||
}
|
||||
|
||||
pub async fn read(&self, path: impl AsRef<str>) -> Result<Buffer, opendal::Error> {
|
||||
let operator = self.get_operator()?;
|
||||
let operator = &self.operator;
|
||||
|
||||
let data = operator.read(path.as_ref()).await?;
|
||||
|
||||
@@ -165,7 +168,7 @@ impl StorageService {
|
||||
}
|
||||
|
||||
pub async fn reader(&self, path: impl AsRef<str>) -> Result<Reader, opendal::Error> {
|
||||
let operator = self.get_operator()?;
|
||||
let operator = &self.operator;
|
||||
|
||||
let reader = operator.reader(path.as_ref()).await?;
|
||||
|
||||
@@ -173,7 +176,7 @@ impl StorageService {
|
||||
}
|
||||
|
||||
pub async fn writer(&self, path: impl AsRef<str>) -> Result<Writer, opendal::Error> {
|
||||
let operator = self.get_operator()?;
|
||||
let operator = &self.operator;
|
||||
|
||||
let writer = operator.writer(path.as_ref()).await?;
|
||||
|
||||
@@ -181,13 +184,57 @@ impl StorageService {
|
||||
}
|
||||
|
||||
pub async fn stat(&self, path: impl AsRef<str>) -> Result<Metadata, opendal::Error> {
|
||||
let operator = self.get_operator()?;
|
||||
let operator = &self.operator;
|
||||
|
||||
let metadata = operator.stat(path.as_ref()).await?;
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub async fn list_public(&self) -> Result<Vec<opendal::Entry>, opendal::Error> {
|
||||
use futures::TryStreamExt;
|
||||
let lister = self.operator.lister_with("public/").recursive(true).await?;
|
||||
lister.try_collect().await
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub async fn list_subscribers(&self) -> Result<Vec<opendal::Entry>, opendal::Error> {
|
||||
use futures::TryStreamExt;
|
||||
let lister = self
|
||||
.operator
|
||||
.lister_with("subscribers/")
|
||||
.recursive(true)
|
||||
.await?;
|
||||
lister.try_collect().await
|
||||
}
|
||||
|
||||
#[instrument(skip_all, err, fields(storage_path = %storage_path.as_ref(), range = ?range, accept = ?accept))]
|
||||
pub async fn serve_optimized_image(
|
||||
&self,
|
||||
storage_path: impl AsRef<Path>,
|
||||
range: Option<TypedHeader<Range>>,
|
||||
accept: Accept,
|
||||
) -> RecorderResult<Response> {
|
||||
let storage_path = Path::new(storage_path.as_ref());
|
||||
for mime_type in accept.media_types() {
|
||||
let accpetable_path = match mime_type.subty().as_str() {
|
||||
"webp" => Some(storage_path.with_extension("webp")),
|
||||
"avif" => Some(storage_path.with_extension("avif")),
|
||||
"jxl" => Some(storage_path.with_extension("jxl")),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(accpetable_path) = accpetable_path
|
||||
&& self.exists(&accpetable_path).await?.is_some()
|
||||
&& self.stat(&accpetable_path).await?.is_file()
|
||||
{
|
||||
return self.serve_file(accpetable_path, range).await;
|
||||
}
|
||||
}
|
||||
|
||||
self.serve_file(storage_path, range).await
|
||||
}
|
||||
|
||||
#[instrument(skip_all, err, fields(storage_path = %storage_path.as_ref(), range = ?range))]
|
||||
pub async fn serve_file(
|
||||
&self,
|
||||
@@ -206,6 +253,12 @@ impl StorageService {
|
||||
let mime_type = mime_guess::from_path(storage_path.as_ref()).first_or_octet_stream();
|
||||
|
||||
let content_type = HeaderValue::from_str(mime_type.as_ref())?;
|
||||
let etag = metadata.etag().map(Cow::Borrowed).or_else(|| {
|
||||
let len = metadata.content_length();
|
||||
let lm = metadata.last_modified()?.timestamp();
|
||||
Some(Cow::Owned(format!("\"{lm:x}-{len:x}\"")))
|
||||
});
|
||||
let last_modified = metadata.last_modified().map(|lm| lm.to_rfc2822());
|
||||
|
||||
let response = if let Some(TypedHeader(range)) = range {
|
||||
let ranges = range
|
||||
@@ -240,7 +293,7 @@ impl StorageService {
|
||||
};
|
||||
let body = Body::from_stream(stream);
|
||||
|
||||
Response::builder()
|
||||
let mut builder = Response::builder()
|
||||
.status(StatusCode::PARTIAL_CONTENT)
|
||||
.header(
|
||||
header::CONTENT_TYPE,
|
||||
@@ -248,17 +301,34 @@ impl StorageService {
|
||||
format!("multipart/byteranges; boundary={boundary}").as_str(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
.body(body)?
|
||||
);
|
||||
|
||||
if let Some(etag) = etag {
|
||||
builder = builder.header(header::ETAG, etag.to_string());
|
||||
}
|
||||
|
||||
if let Some(last_modified) = last_modified {
|
||||
builder = builder.header(header::LAST_MODIFIED, last_modified);
|
||||
}
|
||||
|
||||
builder.body(body)?
|
||||
} else if let Some((r, content_range)) = ranges.pop() {
|
||||
let reader = self.reader(storage_path.as_ref()).await?;
|
||||
let stream = reader.into_bytes_stream(r).await?;
|
||||
|
||||
Response::builder()
|
||||
let mut builder = Response::builder()
|
||||
.status(StatusCode::PARTIAL_CONTENT)
|
||||
.header(header::CONTENT_TYPE, content_type.clone())
|
||||
.header(header::CONTENT_RANGE, content_range)
|
||||
.body(Body::from_stream(stream))?
|
||||
.header(header::CONTENT_RANGE, content_range);
|
||||
|
||||
if let Some(etag) = metadata.etag() {
|
||||
builder = builder.header(header::ETAG, etag);
|
||||
}
|
||||
if let Some(last_modified) = last_modified {
|
||||
builder = builder.header(header::LAST_MODIFIED, last_modified);
|
||||
}
|
||||
|
||||
builder.body(Body::from_stream(stream))?
|
||||
} else {
|
||||
unreachable!("ranges length should be greater than 0")
|
||||
}
|
||||
@@ -276,10 +346,19 @@ impl StorageService {
|
||||
let reader = self.reader(storage_path.as_ref()).await?;
|
||||
let stream = reader.into_bytes_stream(..).await?;
|
||||
|
||||
Response::builder()
|
||||
let mut builder = Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.header(header::CONTENT_TYPE, content_type)
|
||||
.body(Body::from_stream(stream))?
|
||||
.header(header::CONTENT_TYPE, content_type);
|
||||
|
||||
if let Some(etag) = etag {
|
||||
builder = builder.header(header::ETAG, etag.to_string());
|
||||
}
|
||||
|
||||
if let Some(last_modified) = last_modified {
|
||||
builder = builder.header(header::LAST_MODIFIED, last_modified);
|
||||
}
|
||||
|
||||
builder.body(Body::from_stream(stream))?
|
||||
};
|
||||
|
||||
Ok(response)
|
||||
|
||||
@@ -1,4 +1,50 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TaskConfig {}
|
||||
pub struct TaskConfig {
|
||||
#[serde(default = "default_subscriber_task_workers")]
|
||||
pub subscriber_task_concurrency: u32,
|
||||
#[serde(default = "default_system_task_workers")]
|
||||
pub system_task_concurrency: u32,
|
||||
#[serde(default = "default_subscriber_task_timeout")]
|
||||
pub subscriber_task_timeout: Duration,
|
||||
#[serde(default = "default_system_task_timeout")]
|
||||
pub system_task_timeout: Duration,
|
||||
}
|
||||
|
||||
impl Default for TaskConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
subscriber_task_concurrency: default_subscriber_task_workers(),
|
||||
system_task_concurrency: default_system_task_workers(),
|
||||
subscriber_task_timeout: default_subscriber_task_timeout(),
|
||||
system_task_timeout: default_system_task_timeout(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_subscriber_task_workers() -> u32 {
|
||||
if cfg!(test) {
|
||||
1
|
||||
} else {
|
||||
((num_cpus::get_physical() as f32 / 2.0).floor() as u32).max(1)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_system_task_workers() -> u32 {
|
||||
if cfg!(test) {
|
||||
1
|
||||
} else {
|
||||
((num_cpus::get_physical() as f32 / 2.0).floor() as u32).max(1)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_subscriber_task_timeout() -> Duration {
|
||||
Duration::from_secs(3600)
|
||||
}
|
||||
|
||||
pub fn default_system_task_timeout() -> Duration {
|
||||
Duration::from_secs(3600)
|
||||
}
|
||||
|
||||
@@ -5,10 +5,11 @@ use serde::{Serialize, de::DeserializeOwned};
|
||||
|
||||
use crate::{app::AppContextTrait, errors::RecorderResult};
|
||||
|
||||
pub const SYSTEM_TASK_APALIS_NAME: &str = "system_task";
|
||||
pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task";
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait SubscriberAsyncTaskTrait: Serialize + DeserializeOwned + Sized {
|
||||
pub trait AsyncTaskTrait: Serialize + DeserializeOwned + Sized {
|
||||
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
|
||||
|
||||
async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
@@ -19,7 +20,7 @@ pub trait SubscriberAsyncTaskTrait: Serialize + DeserializeOwned + Sized {
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait SubscriberStreamTaskTrait: Serialize + DeserializeOwned + Sized {
|
||||
pub trait StreamTaskTrait: Serialize + DeserializeOwned + Sized {
|
||||
type Yield: Serialize + DeserializeOwned + Send;
|
||||
|
||||
fn run_stream(
|
||||
|
||||
@@ -4,13 +4,16 @@ mod r#extern;
|
||||
mod registry;
|
||||
mod service;
|
||||
|
||||
pub use core::{SUBSCRIBER_TASK_APALIS_NAME, SubscriberAsyncTaskTrait, SubscriberStreamTaskTrait};
|
||||
pub use core::{
|
||||
AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, StreamTaskTrait,
|
||||
};
|
||||
|
||||
pub use config::TaskConfig;
|
||||
pub use r#extern::{ApalisJobs, ApalisSchema};
|
||||
pub use registry::{
|
||||
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant,
|
||||
SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask,
|
||||
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask,
|
||||
OptimizeImageTask, SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum,
|
||||
SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask,
|
||||
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask, SystemTask,
|
||||
SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant, SystemTaskTypeVariantIter,
|
||||
};
|
||||
pub use service::TaskService;
|
||||
|
||||
53
apps/recorder/src/task/registry/media.rs
Normal file
53
apps/recorder/src/task/registry/media.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use quirks_path::Path;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::instrument;
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait, errors::RecorderResult, media::EncodeImageOptions, task::AsyncTaskTrait,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct OptimizeImageTask {
|
||||
pub source_path: String,
|
||||
pub target_path: String,
|
||||
pub format_options: EncodeImageOptions,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl AsyncTaskTrait for OptimizeImageTask {
|
||||
#[instrument(err, skip(ctx))]
|
||||
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
let storage = ctx.storage();
|
||||
|
||||
let source_path = Path::new(&self.source_path);
|
||||
|
||||
let media_service = ctx.media();
|
||||
|
||||
let image_data = storage.read(source_path).await?;
|
||||
|
||||
match self.format_options {
|
||||
EncodeImageOptions::Webp(options) => {
|
||||
let data = media_service
|
||||
.optimize_image_to_webp(source_path, image_data.to_bytes(), Some(options))
|
||||
.await?;
|
||||
storage.write(self.target_path, data).await?;
|
||||
}
|
||||
EncodeImageOptions::Avif(options) => {
|
||||
let data = media_service
|
||||
.optimize_image_to_avif(source_path, image_data.to_bytes(), Some(options))
|
||||
.await?;
|
||||
storage.write(self.target_path, data).await?;
|
||||
}
|
||||
EncodeImageOptions::Jxl(options) => {
|
||||
let data = media_service
|
||||
.optimize_image_to_jxl(source_path, image_data.to_bytes(), Some(options))
|
||||
.await?;
|
||||
storage.write(self.target_path, data).await?;
|
||||
}
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
mod media;
|
||||
mod subscription;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use media::OptimizeImageTask;
|
||||
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
pub use subscription::{
|
||||
@@ -8,11 +10,11 @@ pub use subscription::{
|
||||
SyncOneSubscriptionSourcesTask,
|
||||
};
|
||||
|
||||
use super::SubscriberAsyncTaskTrait;
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::{RecorderError, RecorderResult},
|
||||
models::subscriptions::SubscriptionTrait,
|
||||
task::AsyncTaskTrait,
|
||||
};
|
||||
|
||||
#[derive(
|
||||
@@ -97,3 +99,36 @@ impl SubscriberTask {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Clone,
|
||||
Debug,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Copy,
|
||||
DeriveActiveEnum,
|
||||
DeriveDisplay,
|
||||
EnumIter,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Text")]
|
||||
pub enum SystemTaskType {
|
||||
#[serde(rename = "optimize_image")]
|
||||
#[sea_orm(string_value = "optimize_image")]
|
||||
OptimizeImage,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub enum SystemTask {
|
||||
#[serde(rename = "optimize_image")]
|
||||
OptimizeImage(OptimizeImageTask),
|
||||
}
|
||||
|
||||
impl SystemTask {
|
||||
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
match self {
|
||||
Self::OptimizeImage(task) => task.run(ctx).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::RecorderResult,
|
||||
models::subscriptions::{self, SubscriptionTrait},
|
||||
task::SubscriberAsyncTaskTrait,
|
||||
task::AsyncTaskTrait,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
@@ -20,7 +20,7 @@ impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsIncrementalTa
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionFeedsIncrementalTask {
|
||||
impl AsyncTaskTrait for SyncOneSubscriptionFeedsIncrementalTask {
|
||||
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
self.0.sync_feeds_incremental(ctx).await?;
|
||||
Ok(())
|
||||
@@ -37,7 +37,7 @@ impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsFullTask {
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
|
||||
impl AsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
|
||||
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
self.0.sync_feeds_full(ctx).await?;
|
||||
Ok(())
|
||||
@@ -48,7 +48,7 @@ impl SubscriberAsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
|
||||
pub struct SyncOneSubscriptionSourcesTask(pub subscriptions::Subscription);
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionSourcesTask {
|
||||
impl AsyncTaskTrait for SyncOneSubscriptionSourcesTask {
|
||||
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||
self.0.sync_sources(ctx).await?;
|
||||
Ok(())
|
||||
|
||||
@@ -11,28 +11,47 @@ use tokio::sync::RwLock;
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::{RecorderError, RecorderResult},
|
||||
task::{SUBSCRIBER_TASK_APALIS_NAME, SubscriberTask, TaskConfig},
|
||||
task::{
|
||||
SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, SubscriberTask, TaskConfig,
|
||||
config::{default_subscriber_task_workers, default_system_task_workers},
|
||||
registry::SystemTask,
|
||||
},
|
||||
};
|
||||
|
||||
pub struct TaskService {
|
||||
pub config: TaskConfig,
|
||||
ctx: Arc<dyn AppContextTrait>,
|
||||
subscriber_task_storage: Arc<RwLock<PostgresStorage<SubscriberTask>>>,
|
||||
system_task_storage: Arc<RwLock<PostgresStorage<SystemTask>>>,
|
||||
}
|
||||
|
||||
impl TaskService {
|
||||
pub async fn from_config_and_ctx(
|
||||
config: TaskConfig,
|
||||
mut config: TaskConfig,
|
||||
ctx: Arc<dyn AppContextTrait>,
|
||||
) -> RecorderResult<Self> {
|
||||
if config.subscriber_task_concurrency == 0 {
|
||||
config.subscriber_task_concurrency = default_subscriber_task_workers();
|
||||
};
|
||||
if config.system_task_concurrency == 0 {
|
||||
config.system_task_concurrency = default_system_task_workers();
|
||||
};
|
||||
|
||||
let pool = ctx.db().get_postgres_connection_pool().clone();
|
||||
let storage_config = Config::new(SUBSCRIBER_TASK_APALIS_NAME);
|
||||
let subscriber_task_storage = PostgresStorage::new_with_config(pool, storage_config);
|
||||
let subscriber_task_storage_config =
|
||||
Config::new(SUBSCRIBER_TASK_APALIS_NAME).set_keep_alive(config.subscriber_task_timeout);
|
||||
let system_task_storage_config =
|
||||
Config::new(SYSTEM_TASK_APALIS_NAME).set_keep_alive(config.system_task_timeout);
|
||||
let subscriber_task_storage =
|
||||
PostgresStorage::new_with_config(pool.clone(), subscriber_task_storage_config);
|
||||
let system_task_storage =
|
||||
PostgresStorage::new_with_config(pool, system_task_storage_config);
|
||||
|
||||
Ok(Self {
|
||||
config,
|
||||
ctx,
|
||||
subscriber_task_storage: Arc::new(RwLock::new(subscriber_task_storage)),
|
||||
system_task_storage: Arc::new(RwLock::new(system_task_storage)),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -45,6 +64,14 @@ impl TaskService {
|
||||
job.run(ctx).await
|
||||
}
|
||||
|
||||
async fn run_system_task(
|
||||
job: SystemTask,
|
||||
data: Data<Arc<dyn AppContextTrait>>,
|
||||
) -> RecorderResult<()> {
|
||||
let ctx = data.deref().clone();
|
||||
job.run(ctx).await
|
||||
}
|
||||
|
||||
pub async fn retry_subscriber_task(&self, job_id: String) -> RecorderResult<()> {
|
||||
{
|
||||
let mut storage = self.subscriber_task_storage.write().await;
|
||||
@@ -58,6 +85,19 @@ impl TaskService {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn retry_system_task(&self, job_id: String) -> RecorderResult<()> {
|
||||
{
|
||||
let mut storage = self.system_task_storage.write().await;
|
||||
let task_id =
|
||||
TaskId::from_str(&job_id).map_err(|err| RecorderError::InvalidTaskId {
|
||||
message: err.to_string(),
|
||||
})?;
|
||||
let worker_id = WorkerId::new(SYSTEM_TASK_APALIS_NAME);
|
||||
storage.retry(&worker_id, &task_id).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn add_subscriber_task(
|
||||
&self,
|
||||
_subscriber_id: i32,
|
||||
@@ -77,11 +117,27 @@ impl TaskService {
|
||||
Ok(task_id)
|
||||
}
|
||||
|
||||
pub async fn add_system_task(&self, system_task: SystemTask) -> RecorderResult<TaskId> {
|
||||
let task_id = {
|
||||
let mut storage = self.system_task_storage.write().await;
|
||||
let sql_context = {
|
||||
let mut c = SqlContext::default();
|
||||
c.set_max_attempts(1);
|
||||
c
|
||||
};
|
||||
let request = Request::new_with_ctx(system_task, sql_context);
|
||||
storage.push_request(request).await?.task_id
|
||||
};
|
||||
|
||||
Ok(task_id)
|
||||
}
|
||||
|
||||
pub async fn setup_monitor(&self) -> RecorderResult<Monitor> {
|
||||
let mut monitor = Monitor::new();
|
||||
|
||||
{
|
||||
let subscriber_task_worker = WorkerBuilder::new(SUBSCRIBER_TASK_APALIS_NAME)
|
||||
.concurrency(self.config.subscriber_task_concurrency as usize)
|
||||
.catch_panic()
|
||||
.enable_tracing()
|
||||
.data(self.ctx.clone())
|
||||
@@ -91,7 +147,17 @@ impl TaskService {
|
||||
})
|
||||
.build_fn(Self::run_subscriber_task);
|
||||
|
||||
monitor = monitor.register(subscriber_task_worker);
|
||||
let system_task_worker = WorkerBuilder::new(SYSTEM_TASK_APALIS_NAME)
|
||||
.concurrency(self.config.system_task_concurrency as usize)
|
||||
.catch_panic()
|
||||
.enable_tracing()
|
||||
.data(self.ctx.clone())
|
||||
.backend(self.system_task_storage.read().await.clone())
|
||||
.build_fn(Self::run_system_task);
|
||||
|
||||
monitor = monitor
|
||||
.register(subscriber_task_worker)
|
||||
.register(system_task_worker);
|
||||
}
|
||||
|
||||
Ok(monitor)
|
||||
@@ -99,13 +165,18 @@ impl TaskService {
|
||||
|
||||
pub async fn setup_listener(&self) -> RecorderResult<PgListen> {
|
||||
let pool = self.ctx.db().get_postgres_connection_pool().clone();
|
||||
let mut subscriber_task_listener = PgListen::new(pool).await?;
|
||||
let mut task_listener = PgListen::new(pool).await?;
|
||||
|
||||
{
|
||||
let mut subscriber_task_storage = self.subscriber_task_storage.write().await;
|
||||
subscriber_task_listener.subscribe_with(&mut subscriber_task_storage);
|
||||
task_listener.subscribe_with(&mut subscriber_task_storage);
|
||||
}
|
||||
|
||||
Ok(subscriber_task_listener)
|
||||
{
|
||||
let mut system_task_storage = self.system_task_storage.write().await;
|
||||
task_listener.subscribe_with(&mut system_task_storage);
|
||||
}
|
||||
|
||||
Ok(task_listener)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,17 @@ use std::{fmt::Debug, sync::Arc};
|
||||
use once_cell::sync::OnceCell;
|
||||
use typed_builder::TypedBuilder;
|
||||
|
||||
use crate::app::AppContextTrait;
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
test_utils::{
|
||||
crypto::build_testing_crypto_service,
|
||||
database::{TestingDatabaseServiceConfig, build_testing_database_service},
|
||||
media::build_testing_media_service,
|
||||
mikan::build_testing_mikan_client,
|
||||
storage::build_testing_storage_service,
|
||||
task::build_testing_task_service,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(TypedBuilder)]
|
||||
#[builder(field_defaults(default, setter(strip_option)))]
|
||||
@@ -17,6 +27,7 @@ pub struct TestingAppContext {
|
||||
graphql: Option<crate::graphql::GraphQLService>,
|
||||
storage: Option<crate::storage::StorageService>,
|
||||
crypto: Option<crate::crypto::CryptoService>,
|
||||
media: Option<crate::media::MediaService>,
|
||||
#[builder(default = Arc::new(OnceCell::new()), setter(!strip_option))]
|
||||
task: Arc<OnceCell<crate::task::TaskService>>,
|
||||
message: Option<crate::message::MessageService>,
|
||||
@@ -30,6 +41,32 @@ impl TestingAppContext {
|
||||
pub fn set_task(&self, task: crate::task::TaskService) {
|
||||
self.task.get_or_init(|| task);
|
||||
}
|
||||
|
||||
pub async fn from_preset(
|
||||
preset: TestingAppContextPreset,
|
||||
) -> crate::errors::RecorderResult<Arc<Self>> {
|
||||
let mikan_client = build_testing_mikan_client(preset.mikan_base_url.clone()).await?;
|
||||
let db_service =
|
||||
build_testing_database_service(preset.database_config.unwrap_or_default()).await?;
|
||||
let crypto_service = build_testing_crypto_service().await?;
|
||||
let storage_service = build_testing_storage_service().await?;
|
||||
let media_service = build_testing_media_service().await?;
|
||||
let app_ctx = Arc::new(
|
||||
TestingAppContext::builder()
|
||||
.mikan(mikan_client)
|
||||
.db(db_service)
|
||||
.crypto(crypto_service)
|
||||
.storage(storage_service)
|
||||
.media(media_service)
|
||||
.build(),
|
||||
);
|
||||
|
||||
let task_service = build_testing_task_service(app_ctx.clone()).await?;
|
||||
|
||||
app_ctx.set_task(task_service);
|
||||
|
||||
Ok(app_ctx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for TestingAppContext {
|
||||
@@ -90,4 +127,13 @@ impl AppContextTrait for TestingAppContext {
|
||||
fn message(&self) -> &crate::message::MessageService {
|
||||
self.message.as_ref().expect("should set message")
|
||||
}
|
||||
|
||||
fn media(&self) -> &crate::media::MediaService {
|
||||
self.media.as_ref().expect("should set media")
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TestingAppContextPreset {
|
||||
pub mikan_base_url: String,
|
||||
pub database_config: Option<TestingDatabaseServiceConfig>,
|
||||
}
|
||||
|
||||
@@ -17,6 +17,10 @@ impl Default for TestingDatabaseServiceConfig {
|
||||
pub async fn build_testing_database_service(
|
||||
config: TestingDatabaseServiceConfig,
|
||||
) -> RecorderResult<DatabaseService> {
|
||||
tracing::info!(
|
||||
"enable testcontainers feature, build testing database service in testcontainers..."
|
||||
);
|
||||
|
||||
use testcontainers::{ImageExt, runners::AsyncRunner};
|
||||
use testcontainers_ext::{ImageDefaultLogConsumerExt, ImagePruneExistedLabelExt};
|
||||
use testcontainers_modules::postgres::Postgres;
|
||||
@@ -38,6 +42,11 @@ pub async fn build_testing_database_service(
|
||||
let connection_string =
|
||||
format!("postgres://konobangu:konobangu@{host_ip}:{host_port}/konobangu");
|
||||
|
||||
tracing::debug!(
|
||||
"testing database service connection string: {}",
|
||||
connection_string
|
||||
);
|
||||
|
||||
let mut db_service = DatabaseService::from_config(DatabaseConfig {
|
||||
uri: connection_string,
|
||||
enable_logging: true,
|
||||
|
||||
8
apps/recorder/src/test_utils/media.rs
Normal file
8
apps/recorder/src/test_utils/media.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use crate::{
|
||||
errors::RecorderResult,
|
||||
media::{MediaConfig, MediaService},
|
||||
};
|
||||
|
||||
pub async fn build_testing_media_service() -> RecorderResult<MediaService> {
|
||||
MediaService::from_config(MediaConfig::default()).await
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
pub mod app;
|
||||
pub mod crypto;
|
||||
pub mod database;
|
||||
pub mod media;
|
||||
pub mod mikan;
|
||||
pub mod storage;
|
||||
pub mod task;
|
||||
|
||||
@@ -9,7 +9,7 @@ use crate::{
|
||||
pub async fn build_testing_task_service(
|
||||
ctx: Arc<dyn AppContextTrait>,
|
||||
) -> RecorderResult<TaskService> {
|
||||
let config = TaskConfig {};
|
||||
let config = TaskConfig::default();
|
||||
let task_service = TaskService::from_config_and_ctx(config, ctx).await?;
|
||||
Ok(task_service)
|
||||
}
|
||||
|
||||
@@ -1,18 +1,36 @@
|
||||
use tracing::Level;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use tracing_subscriber::{EnvFilter, layer::SubscriberExt, util::SubscriberInitExt};
|
||||
use tracing_tree::HierarchicalLayer;
|
||||
|
||||
use crate::logger::MODULE_WHITELIST;
|
||||
|
||||
pub fn try_init_testing_tracing(level: Level) {
|
||||
fn build_testing_tracing_filter(level: Level) -> EnvFilter {
|
||||
let crate_name = env!("CARGO_PKG_NAME");
|
||||
let level = level.as_str().to_lowercase();
|
||||
let mut filter = EnvFilter::new(format!("{crate_name}[]={level}"));
|
||||
|
||||
let mut modules = vec!["mockito"];
|
||||
let mut modules = vec!["mockito", "testcontainers"];
|
||||
modules.extend(MODULE_WHITELIST.iter());
|
||||
for module in modules {
|
||||
filter = filter.add_directive(format!("{module}[]={level}").parse().unwrap());
|
||||
}
|
||||
|
||||
let _ = tracing_subscriber::fmt().with_env_filter(filter).try_init();
|
||||
filter
|
||||
}
|
||||
|
||||
pub fn try_init_testing_tracing(level: Level) {
|
||||
let _ = tracing_subscriber::fmt()
|
||||
.with_env_filter(build_testing_tracing_filter(level))
|
||||
.try_init();
|
||||
}
|
||||
|
||||
pub fn try_init_testing_tracing_only_leaf(level: Level) {
|
||||
let _ = tracing_subscriber::registry()
|
||||
.with(build_testing_tracing_filter(level))
|
||||
.with(
|
||||
HierarchicalLayer::new(2)
|
||||
.with_targets(true)
|
||||
.with_bracketed_fields(true),
|
||||
)
|
||||
.try_init();
|
||||
}
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
pub mod http;
|
||||
pub mod json;
|
||||
pub mod nom;
|
||||
|
||||
261
apps/recorder/src/utils/nom.rs
Normal file
261
apps/recorder/src/utils/nom.rs
Normal file
@@ -0,0 +1,261 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use icu::properties::{CodePointMapData, props::Script};
|
||||
use lazy_static::lazy_static;
|
||||
use maplit::hashmap;
|
||||
use nom::{
|
||||
IResult, Parser,
|
||||
branch::alt,
|
||||
bytes::complete::tag,
|
||||
character::complete::{anychar, digit1, none_of, satisfy},
|
||||
combinator::{map, opt, recognize, value, verify},
|
||||
error::ParseError,
|
||||
multi::many1,
|
||||
sequence::{delimited, preceded},
|
||||
};
|
||||
use num_traits::{PrimInt, Signed};
|
||||
|
||||
lazy_static! {
|
||||
pub static ref ZH_DIGIT_MAP: HashMap<char, u32> = {
|
||||
hashmap! {
|
||||
'〇' => 0,
|
||||
'零' => 0,
|
||||
'一' => 1,
|
||||
'壹' => 1,
|
||||
'二' => 2,
|
||||
'贰' => 2,
|
||||
'三' => 3,
|
||||
'叁' => 3,
|
||||
'四' => 4,
|
||||
'肆' => 4,
|
||||
'五' => 5,
|
||||
'伍' => 5,
|
||||
'六' => 6,
|
||||
'陆' => 6,
|
||||
'七' => 7,
|
||||
'柒' => 7,
|
||||
'八' => 8,
|
||||
'捌' => 8,
|
||||
'九' => 9,
|
||||
'玖' => 9,
|
||||
'十' => 10,
|
||||
'拾' => 10,
|
||||
'廿' => 20,
|
||||
'念' => 20,
|
||||
'百' => 100,
|
||||
'佰' => 100,
|
||||
'千' => 1000,
|
||||
'仟' => 1000,
|
||||
'万' => 10000,
|
||||
'萬' => 10000,
|
||||
'亿' => 100000000,
|
||||
'億' => 100000000,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn with_recognized<'a, F, O, E>(
|
||||
mut parser: F,
|
||||
) -> impl FnMut(&'a str) -> IResult<&'a str, (O, &'a str), E>
|
||||
where
|
||||
F: Parser<&'a str, Output = O, Error = E>,
|
||||
E: ParseError<&'a str>,
|
||||
{
|
||||
move |input: &'a str| {
|
||||
let i = input;
|
||||
let (rest, output) = parser.parse(i)?;
|
||||
let consumed_len = i.len() - rest.len();
|
||||
Ok((rest, (output, &i[..consumed_len])))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_some_unicode_scx(input: &str, script: Script) -> IResult<&str, char> {
|
||||
let script_data = CodePointMapData::<Script>::new();
|
||||
|
||||
verify(anychar, |&c| script_data.get(c) == script).parse(input)
|
||||
}
|
||||
|
||||
pub fn is_han_scx(input: &str) -> IResult<&str, char> {
|
||||
is_some_unicode_scx(input, Script::Han)
|
||||
}
|
||||
|
||||
pub fn is_hira_scx(input: &str) -> IResult<&str, char> {
|
||||
is_some_unicode_scx(input, Script::Hiragana)
|
||||
}
|
||||
|
||||
pub fn is_kana_scx(input: &str) -> IResult<&str, char> {
|
||||
is_some_unicode_scx(input, Script::Katakana)
|
||||
}
|
||||
|
||||
pub fn delimited_by_brackets(input: &str) -> IResult<&str, &str> {
|
||||
alt((
|
||||
delimited(tag("["), recognize(many1(none_of("[]"))), tag("]")),
|
||||
delimited(tag("【"), recognize(many1(none_of("【】"))), tag("】")),
|
||||
))
|
||||
.parse(input)
|
||||
}
|
||||
|
||||
pub struct ZhNum {
|
||||
pub int: i32,
|
||||
}
|
||||
|
||||
impl ZhNum {
|
||||
fn parse_digit<'a>(
|
||||
max_value: u32,
|
||||
) -> impl Parser<&'a str, Output = u32, Error = nom::error::Error<&'a str>> {
|
||||
map(
|
||||
satisfy(move |c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v <= max_value)),
|
||||
|c| *ZH_DIGIT_MAP.get(&c).unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
fn parse_个(input: &str) -> IResult<&str, u32> {
|
||||
Self::parse_digit(9).parse(input)
|
||||
}
|
||||
|
||||
fn parse_十(input: &str) -> IResult<&str, u32> {
|
||||
let (input, (p, o, s)) = (
|
||||
opt(Self::parse_个),
|
||||
map(
|
||||
satisfy(|c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v == 10 || *v == 20)),
|
||||
|c| *ZH_DIGIT_MAP.get(&c).unwrap(),
|
||||
),
|
||||
opt(Self::parse_个),
|
||||
)
|
||||
.parse(input)?;
|
||||
|
||||
let value = p.unwrap_or(1) * o + s.unwrap_or(0);
|
||||
|
||||
Ok((input, value))
|
||||
}
|
||||
|
||||
pub fn parse_百(input: &str) -> IResult<&str, u32> {
|
||||
let (input, (p, o, s)) = (
|
||||
opt(Self::parse_个),
|
||||
map(
|
||||
satisfy(|c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v == 100 || *v == 200)),
|
||||
|c| *ZH_DIGIT_MAP.get(&c).unwrap(),
|
||||
),
|
||||
opt(Self::parse_十),
|
||||
)
|
||||
.parse(input)?;
|
||||
|
||||
let value = p.unwrap_or(1) * o + s.unwrap_or(0);
|
||||
|
||||
Ok((input, value))
|
||||
}
|
||||
|
||||
pub fn parse_千(input: &str) -> IResult<&str, u32> {
|
||||
let (input, (p, o, s)) = (
|
||||
opt(Self::parse_个),
|
||||
value(
|
||||
1000u32,
|
||||
satisfy(|c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v == 1000)),
|
||||
),
|
||||
opt(Self::parse_百),
|
||||
)
|
||||
.parse(input)?;
|
||||
|
||||
let value = p.unwrap_or(1) * o + s.unwrap_or(0);
|
||||
|
||||
Ok((input, value))
|
||||
}
|
||||
|
||||
pub fn parse_万(input: &str) -> IResult<&str, u32> {
|
||||
let (input, (p, o, s)) = (
|
||||
opt(Self::parse_千),
|
||||
value(
|
||||
10000u32,
|
||||
satisfy(|c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v == 10000)),
|
||||
),
|
||||
opt(Self::parse_千),
|
||||
)
|
||||
.parse(input)?;
|
||||
|
||||
let value = p.unwrap_or(1) * o + s.unwrap_or(0);
|
||||
|
||||
Ok((input, value))
|
||||
}
|
||||
|
||||
pub fn parse_亿(input: &str) -> IResult<&str, u32> {
|
||||
let (input, (p, o, s)) = (
|
||||
opt(Self::parse_万),
|
||||
value(
|
||||
100000000u32,
|
||||
satisfy(|c| ZH_DIGIT_MAP.get(&c).is_some_and(|v| *v == 100000000)),
|
||||
),
|
||||
opt(Self::parse_万),
|
||||
)
|
||||
.parse(input)?;
|
||||
|
||||
let value = p.unwrap_or(1) * o + s.unwrap_or(0);
|
||||
|
||||
Ok((input, value))
|
||||
}
|
||||
|
||||
pub fn parse_uint(input: &str) -> IResult<&str, u32> {
|
||||
preceded(
|
||||
opt(tag("正")),
|
||||
alt((
|
||||
Self::parse_个,
|
||||
Self::parse_十,
|
||||
Self::parse_百,
|
||||
Self::parse_千,
|
||||
Self::parse_万,
|
||||
Self::parse_亿,
|
||||
)),
|
||||
)
|
||||
.parse(input)
|
||||
}
|
||||
|
||||
pub fn parse_int(input: &str) -> IResult<&str, i32> {
|
||||
let (input, (sign, value)) = (
|
||||
opt(alt((value(1, tag("正")), value(-1, tag("负"))))),
|
||||
alt((
|
||||
Self::parse_个,
|
||||
Self::parse_十,
|
||||
Self::parse_百,
|
||||
Self::parse_千,
|
||||
Self::parse_万,
|
||||
Self::parse_亿,
|
||||
)),
|
||||
)
|
||||
.parse(input)?;
|
||||
|
||||
Ok((input, sign.unwrap_or(1) * value as i32))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_uint<T: PrimInt>(input: &str) -> IResult<&str, T> {
|
||||
let (input, value) = preceded(opt(tag("+")), digit1).parse(input)?;
|
||||
|
||||
let value = T::from_str_radix(value, 10).map_err(|_| {
|
||||
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Digit))
|
||||
})?;
|
||||
|
||||
Ok((input, value))
|
||||
}
|
||||
|
||||
pub fn parse_int<T: PrimInt + Signed>(input: &str) -> IResult<&str, T> {
|
||||
let (input, value) = recognize((
|
||||
opt(alt((
|
||||
value(T::one(), tag("+")),
|
||||
value(T::one().neg(), tag("-")),
|
||||
))),
|
||||
digit1,
|
||||
))
|
||||
.parse(input)?;
|
||||
|
||||
let value = T::from_str_radix(value, 10).map_err(|_| {
|
||||
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Digit))
|
||||
})?;
|
||||
|
||||
Ok((input, value))
|
||||
}
|
||||
|
||||
pub fn parse_month_num(input: &str) -> IResult<&str, u32> {
|
||||
verify(alt((ZhNum::parse_uint, parse_uint::<u32>)), |v| {
|
||||
*v <= 12 && *v > 0
|
||||
})
|
||||
.parse(input)
|
||||
}
|
||||
42
apps/recorder/src/web/controller/feeds/mod.rs
Normal file
42
apps/recorder/src/web/controller/feeds/mod.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
Router,
|
||||
extract::{Path, State},
|
||||
response::IntoResponse,
|
||||
routing::get,
|
||||
};
|
||||
use http::StatusCode;
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
errors::{RecorderError, RecorderResult},
|
||||
extract::http::ForwardedRelatedInfo,
|
||||
models::feeds,
|
||||
web::controller::Controller,
|
||||
};
|
||||
|
||||
pub const CONTROLLER_PREFIX: &str = "/api/feeds";
|
||||
|
||||
async fn rss_handler(
|
||||
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||
Path(token): Path<String>,
|
||||
forwarded_info: ForwardedRelatedInfo,
|
||||
) -> RecorderResult<impl IntoResponse> {
|
||||
let api_base = forwarded_info
|
||||
.resolved_origin()
|
||||
.ok_or(RecorderError::MissingOriginError)?;
|
||||
let channel = feeds::Model::find_rss_feed_by_token(ctx.as_ref(), &token, &api_base).await?;
|
||||
|
||||
Ok((
|
||||
StatusCode::OK,
|
||||
[("Content-Type", "application/xml; charset=utf-8")],
|
||||
channel.to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
pub async fn create(_ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
|
||||
let router = Router::<Arc<dyn AppContextTrait>>::new().route("/rss/{token}", get(rss_handler));
|
||||
|
||||
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router))
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
pub mod core;
|
||||
pub mod feeds;
|
||||
pub mod graphql;
|
||||
pub mod metadata;
|
||||
pub mod oidc;
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
Json, Router,
|
||||
extract::{Query, Request, State},
|
||||
extract::{Query, State},
|
||||
routing::get,
|
||||
};
|
||||
use snafu::ResultExt;
|
||||
@@ -42,12 +42,11 @@ async fn oidc_callback(
|
||||
|
||||
async fn oidc_auth(
|
||||
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||
request: Request,
|
||||
forwarded_info: ForwardedRelatedInfo,
|
||||
) -> Result<Json<OidcAuthRequest>, AuthError> {
|
||||
let auth_service = ctx.auth();
|
||||
if let AuthService::Oidc(oidc_auth_service) = auth_service {
|
||||
let (parts, _) = request.into_parts();
|
||||
let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts)
|
||||
let mut redirect_uri = forwarded_info
|
||||
.resolved_origin()
|
||||
.ok_or(url::ParseError::EmptyHost)
|
||||
.context(OidcRequestRedirectUriSnafu)?;
|
||||
|
||||
@@ -2,12 +2,14 @@ use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
Extension, Router,
|
||||
extract::{Path, State},
|
||||
extract::{Path, Query, State},
|
||||
middleware::from_fn_with_state,
|
||||
response::Response,
|
||||
routing::get,
|
||||
};
|
||||
use axum_extra::{TypedHeader, headers::Range};
|
||||
use headers_accept::Accept;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
app::AppContextTrait,
|
||||
@@ -18,33 +20,75 @@ use crate::{
|
||||
|
||||
pub const CONTROLLER_PREFIX: &str = "/api/static";
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
|
||||
pub enum OptimizeType {
|
||||
#[serde(rename = "accept")]
|
||||
AcceptHeader,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct StaticQuery {
|
||||
optimize: Option<OptimizeType>,
|
||||
}
|
||||
|
||||
async fn serve_subscriber_static(
|
||||
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||
Path((subscriber_id, path)): Path<(i32, String)>,
|
||||
Extension(auth_user_info): Extension<AuthUserInfo>,
|
||||
Query(query): Query<StaticQuery>,
|
||||
range: Option<TypedHeader<Range>>,
|
||||
accept: Option<TypedHeader<Accept>>,
|
||||
) -> RecorderResult<Response> {
|
||||
if subscriber_id != auth_user_info.subscriber_auth.id {
|
||||
Err(AuthError::PermissionError)?;
|
||||
}
|
||||
|
||||
let storage = ctx.storage();
|
||||
let media = ctx.media();
|
||||
|
||||
let storage_path = storage.build_subscriber_path(subscriber_id, &path);
|
||||
|
||||
storage.serve_file(storage_path, range).await
|
||||
if query
|
||||
.optimize
|
||||
.is_some_and(|optimize| optimize == OptimizeType::AcceptHeader)
|
||||
&& storage_path
|
||||
.extension()
|
||||
.is_some_and(|ext| media.is_legacy_image_format(ext))
|
||||
&& let Some(TypedHeader(accept)) = accept
|
||||
{
|
||||
storage
|
||||
.serve_optimized_image(storage_path, range, accept)
|
||||
.await
|
||||
} else {
|
||||
storage.serve_file(storage_path, range).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn serve_public_static(
|
||||
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||
Path(path): Path<String>,
|
||||
Query(query): Query<StaticQuery>,
|
||||
range: Option<TypedHeader<Range>>,
|
||||
accept: Option<TypedHeader<Accept>>,
|
||||
) -> RecorderResult<Response> {
|
||||
let storage = ctx.storage();
|
||||
let media = ctx.media();
|
||||
|
||||
let storage_path = storage.build_public_path(&path);
|
||||
|
||||
storage.serve_file(storage_path, range).await
|
||||
if query
|
||||
.optimize
|
||||
.is_some_and(|optimize| optimize == OptimizeType::AcceptHeader)
|
||||
&& storage_path
|
||||
.extension()
|
||||
.is_some_and(|ext| media.is_legacy_image_format(ext))
|
||||
&& let Some(TypedHeader(accept)) = accept
|
||||
{
|
||||
storage
|
||||
.serve_optimized_image(storage_path, range, accept)
|
||||
.await
|
||||
} else {
|
||||
storage.serve_file(storage_path, range).await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
source: apps/recorder/src/web/middleware/request_id.rs
|
||||
assertion_line: 126
|
||||
expression: id
|
||||
---
|
||||
"foo-barbaz"
|
||||
Binary file not shown.
@@ -77,7 +77,6 @@
|
||||
"tw-animate-css": "^1.3.4",
|
||||
"type-fest": "^4.41.0",
|
||||
"vaul": "^1.1.2",
|
||||
"es-toolkit": "^1.39.3",
|
||||
"@tanstack/react-router": "^1.121.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,9 +1,45 @@
|
||||
import type { ComponentProps } from "react";
|
||||
import { useInject } from "@/infra/di/inject";
|
||||
import { DOCUMENT } from "@/infra/platform/injection";
|
||||
import { type ComponentProps, useMemo } from "react";
|
||||
|
||||
const URL_PARSE_REGEX = /^([^?#]*)(\?[^#]*)?(#.*)?$/;
|
||||
|
||||
function parseURL(url: string) {
|
||||
const match = url.match(URL_PARSE_REGEX);
|
||||
|
||||
if (!match) {
|
||||
return { other: url, search: "", hash: "" };
|
||||
}
|
||||
|
||||
return {
|
||||
other: match[1] || "",
|
||||
search: match[2] || "",
|
||||
hash: match[3] || "",
|
||||
};
|
||||
}
|
||||
|
||||
export type ImgProps = Omit<ComponentProps<"img">, "alt"> &
|
||||
Required<Pick<ComponentProps<"img">, "alt">>;
|
||||
Required<Pick<ComponentProps<"img">, "alt">> & {
|
||||
optimize?: "accept";
|
||||
};
|
||||
|
||||
export const Img = ({
|
||||
src: propsSrc,
|
||||
optimize = "accept",
|
||||
...props
|
||||
}: ImgProps) => {
|
||||
const document = useInject(DOCUMENT);
|
||||
const src = useMemo(() => {
|
||||
const baseURI = document?.baseURI;
|
||||
if (!propsSrc || !baseURI) {
|
||||
return propsSrc;
|
||||
}
|
||||
const { other, search, hash } = parseURL(propsSrc);
|
||||
const searchParams = new URLSearchParams(search);
|
||||
searchParams.set("optimize", optimize);
|
||||
return `${other}?${searchParams.toString()}${hash}`;
|
||||
}, [propsSrc, optimize, document?.baseURI]);
|
||||
|
||||
export const Img = (props: ImgProps) => {
|
||||
// biome-ignore lint/nursery/noImgElement: <explanation>
|
||||
return <img {...props} alt={props.alt} />;
|
||||
return <img {...props} alt={props.alt} src={src} />;
|
||||
};
|
||||
|
||||
19
apps/webui/src/domains/recorder/schema/feeds.ts
Normal file
19
apps/webui/src/domains/recorder/schema/feeds.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { gql } from '@apollo/client';
|
||||
|
||||
export const INSERT_FEED = gql`
|
||||
mutation InsertFeed($data: FeedsInsertInput!) {
|
||||
feedsCreateOne(data: $data) {
|
||||
id
|
||||
createdAt
|
||||
updatedAt
|
||||
feedType
|
||||
token
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const DELETE_FEED = gql`
|
||||
mutation DeleteFeed($filters: FeedsFilterInput!) {
|
||||
feedsDelete(filter: $filters)
|
||||
}
|
||||
`;
|
||||
@@ -95,6 +95,16 @@ query GetSubscriptionDetail ($id: Int!) {
|
||||
category
|
||||
sourceUrl
|
||||
enabled
|
||||
feed {
|
||||
nodes {
|
||||
id
|
||||
createdAt
|
||||
updatedAt
|
||||
token
|
||||
feedType
|
||||
feedSource
|
||||
}
|
||||
}
|
||||
credential3rd {
|
||||
id
|
||||
username
|
||||
@@ -112,7 +122,6 @@ query GetSubscriptionDetail ($id: Int!) {
|
||||
mikanFansubId
|
||||
rssLink
|
||||
posterLink
|
||||
savePath
|
||||
homepage
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,11 +20,13 @@ type Documents = {
|
||||
"\n mutation DeleteCredential3rd($filters: Credential3rdFilterInput!) {\n credential3rdDelete(filter: $filters)\n }\n": typeof types.DeleteCredential3rdDocument,
|
||||
"\n query GetCredential3rdDetail($id: Int!) {\n credential3rd(filters: { id: { eq: $id } }) {\n nodes {\n id\n cookies\n username\n password\n userAgent\n createdAt\n updatedAt\n credentialType\n }\n }\n }\n": typeof types.GetCredential3rdDetailDocument,
|
||||
"\n query CheckCredential3rdAvailable($id: Int!) {\n credential3rdCheckAvailable(filter: { id: $id }) {\n available\n }\n }\n": typeof types.CheckCredential3rdAvailableDocument,
|
||||
"\n mutation InsertFeed($data: FeedsInsertInput!) {\n feedsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n feedType\n token\n }\n }\n": typeof types.InsertFeedDocument,
|
||||
"\n mutation DeleteFeed($filters: FeedsFilterInput!) {\n feedsDelete(filter: $filters)\n }\n": typeof types.DeleteFeedDocument,
|
||||
"\n query GetSubscriptions($filters: SubscriptionsFilterInput!, $orderBy: SubscriptionsOrderInput!, $pagination: PaginationInput!) {\n subscriptions(\n pagination: $pagination\n filters: $filters\n orderBy: $orderBy\n ) {\n nodes {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n credentialId\n }\n paginationInfo {\n total\n pages\n }\n }\n }\n": typeof types.GetSubscriptionsDocument,
|
||||
"\n mutation InsertSubscription($data: SubscriptionsInsertInput!) {\n subscriptionsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n credentialId\n }\n }\n": typeof types.InsertSubscriptionDocument,
|
||||
"\n mutation UpdateSubscriptions(\n $data: SubscriptionsUpdateInput!,\n $filters: SubscriptionsFilterInput!,\n ) {\n subscriptionsUpdate (\n data: $data\n filter: $filters\n ) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n }\n}\n": typeof types.UpdateSubscriptionsDocument,
|
||||
"\n mutation DeleteSubscriptions($filters: SubscriptionsFilterInput) {\n subscriptionsDelete(filter: $filters)\n }\n": typeof types.DeleteSubscriptionsDocument,
|
||||
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n": typeof types.GetSubscriptionDetailDocument,
|
||||
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n feed {\n nodes {\n id\n createdAt\n updatedAt\n token\n feedType\n feedSource\n }\n }\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n homepage\n }\n }\n }\n }\n}\n": typeof types.GetSubscriptionDetailDocument,
|
||||
"\n mutation SyncSubscriptionFeedsIncremental($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsIncremental(filter: $filter) {\n id\n }\n }\n": typeof types.SyncSubscriptionFeedsIncrementalDocument,
|
||||
"\n mutation SyncSubscriptionFeedsFull($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsFull(filter: $filter) {\n id\n }\n }\n": typeof types.SyncSubscriptionFeedsFullDocument,
|
||||
"\n mutation SyncSubscriptionSources($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneSources(filter: $filter) {\n id\n }\n }\n": typeof types.SyncSubscriptionSourcesDocument,
|
||||
@@ -39,11 +41,13 @@ const documents: Documents = {
|
||||
"\n mutation DeleteCredential3rd($filters: Credential3rdFilterInput!) {\n credential3rdDelete(filter: $filters)\n }\n": types.DeleteCredential3rdDocument,
|
||||
"\n query GetCredential3rdDetail($id: Int!) {\n credential3rd(filters: { id: { eq: $id } }) {\n nodes {\n id\n cookies\n username\n password\n userAgent\n createdAt\n updatedAt\n credentialType\n }\n }\n }\n": types.GetCredential3rdDetailDocument,
|
||||
"\n query CheckCredential3rdAvailable($id: Int!) {\n credential3rdCheckAvailable(filter: { id: $id }) {\n available\n }\n }\n": types.CheckCredential3rdAvailableDocument,
|
||||
"\n mutation InsertFeed($data: FeedsInsertInput!) {\n feedsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n feedType\n token\n }\n }\n": types.InsertFeedDocument,
|
||||
"\n mutation DeleteFeed($filters: FeedsFilterInput!) {\n feedsDelete(filter: $filters)\n }\n": types.DeleteFeedDocument,
|
||||
"\n query GetSubscriptions($filters: SubscriptionsFilterInput!, $orderBy: SubscriptionsOrderInput!, $pagination: PaginationInput!) {\n subscriptions(\n pagination: $pagination\n filters: $filters\n orderBy: $orderBy\n ) {\n nodes {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n credentialId\n }\n paginationInfo {\n total\n pages\n }\n }\n }\n": types.GetSubscriptionsDocument,
|
||||
"\n mutation InsertSubscription($data: SubscriptionsInsertInput!) {\n subscriptionsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n credentialId\n }\n }\n": types.InsertSubscriptionDocument,
|
||||
"\n mutation UpdateSubscriptions(\n $data: SubscriptionsUpdateInput!,\n $filters: SubscriptionsFilterInput!,\n ) {\n subscriptionsUpdate (\n data: $data\n filter: $filters\n ) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n }\n}\n": types.UpdateSubscriptionsDocument,
|
||||
"\n mutation DeleteSubscriptions($filters: SubscriptionsFilterInput) {\n subscriptionsDelete(filter: $filters)\n }\n": types.DeleteSubscriptionsDocument,
|
||||
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n": types.GetSubscriptionDetailDocument,
|
||||
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n feed {\n nodes {\n id\n createdAt\n updatedAt\n token\n feedType\n feedSource\n }\n }\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n homepage\n }\n }\n }\n }\n}\n": types.GetSubscriptionDetailDocument,
|
||||
"\n mutation SyncSubscriptionFeedsIncremental($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsIncremental(filter: $filter) {\n id\n }\n }\n": types.SyncSubscriptionFeedsIncrementalDocument,
|
||||
"\n mutation SyncSubscriptionFeedsFull($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsFull(filter: $filter) {\n id\n }\n }\n": types.SyncSubscriptionFeedsFullDocument,
|
||||
"\n mutation SyncSubscriptionSources($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneSources(filter: $filter) {\n id\n }\n }\n": types.SyncSubscriptionSourcesDocument,
|
||||
@@ -90,6 +94,14 @@ export function gql(source: "\n query GetCredential3rdDetail($id: Int!) {\n
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function gql(source: "\n query CheckCredential3rdAvailable($id: Int!) {\n credential3rdCheckAvailable(filter: { id: $id }) {\n available\n }\n }\n"): (typeof documents)["\n query CheckCredential3rdAvailable($id: Int!) {\n credential3rdCheckAvailable(filter: { id: $id }) {\n available\n }\n }\n"];
|
||||
/**
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function gql(source: "\n mutation InsertFeed($data: FeedsInsertInput!) {\n feedsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n feedType\n token\n }\n }\n"): (typeof documents)["\n mutation InsertFeed($data: FeedsInsertInput!) {\n feedsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n feedType\n token\n }\n }\n"];
|
||||
/**
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function gql(source: "\n mutation DeleteFeed($filters: FeedsFilterInput!) {\n feedsDelete(filter: $filters)\n }\n"): (typeof documents)["\n mutation DeleteFeed($filters: FeedsFilterInput!) {\n feedsDelete(filter: $filters)\n }\n"];
|
||||
/**
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
@@ -109,7 +121,7 @@ export function gql(source: "\n mutation DeleteSubscriptions($filters: Subscr
|
||||
/**
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function gql(source: "\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n"): (typeof documents)["\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n"];
|
||||
export function gql(source: "\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n feed {\n nodes {\n id\n createdAt\n updatedAt\n token\n feedType\n feedSource\n }\n }\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n homepage\n }\n }\n }\n }\n}\n"): (typeof documents)["\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n feed {\n nodes {\n id\n createdAt\n updatedAt\n token\n feedType\n feedSource\n }\n }\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n homepage\n }\n }\n }\n }\n}\n"];
|
||||
/**
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
|
||||
@@ -21,6 +21,7 @@ export type Scalars = {
|
||||
|
||||
export type Bangumi = {
|
||||
__typename?: 'Bangumi';
|
||||
bangumiType: BangumiTypeEnum;
|
||||
createdAt: Scalars['String']['output'];
|
||||
displayName: Scalars['String']['output'];
|
||||
episode: EpisodesConnection;
|
||||
@@ -34,7 +35,6 @@ export type Bangumi = {
|
||||
originPosterLink?: Maybe<Scalars['String']['output']>;
|
||||
posterLink?: Maybe<Scalars['String']['output']>;
|
||||
rssLink?: Maybe<Scalars['String']['output']>;
|
||||
savePath?: Maybe<Scalars['String']['output']>;
|
||||
season: Scalars['Int']['output'];
|
||||
seasonRaw?: Maybe<Scalars['String']['output']>;
|
||||
subscriber?: Maybe<Subscribers>;
|
||||
@@ -67,6 +67,7 @@ export type BangumiSubscriptionBangumiArgs = {
|
||||
|
||||
export type BangumiBasic = {
|
||||
__typename?: 'BangumiBasic';
|
||||
bangumiType: BangumiTypeEnum;
|
||||
createdAt: Scalars['String']['output'];
|
||||
displayName: Scalars['String']['output'];
|
||||
fansub?: Maybe<Scalars['String']['output']>;
|
||||
@@ -79,7 +80,6 @@ export type BangumiBasic = {
|
||||
originPosterLink?: Maybe<Scalars['String']['output']>;
|
||||
posterLink?: Maybe<Scalars['String']['output']>;
|
||||
rssLink?: Maybe<Scalars['String']['output']>;
|
||||
savePath?: Maybe<Scalars['String']['output']>;
|
||||
season: Scalars['Int']['output'];
|
||||
seasonRaw?: Maybe<Scalars['String']['output']>;
|
||||
subscriberId: Scalars['Int']['output'];
|
||||
@@ -102,6 +102,7 @@ export type BangumiEdge = {
|
||||
|
||||
export type BangumiFilterInput = {
|
||||
and?: InputMaybe<Array<BangumiFilterInput>>;
|
||||
bangumiType?: InputMaybe<BangumiTypeEnumFilterInput>;
|
||||
createdAt?: InputMaybe<TextFilterInput>;
|
||||
displayName?: InputMaybe<StringFilterInput>;
|
||||
fansub?: InputMaybe<StringFilterInput>;
|
||||
@@ -114,14 +115,14 @@ export type BangumiFilterInput = {
|
||||
originPosterLink?: InputMaybe<StringFilterInput>;
|
||||
posterLink?: InputMaybe<StringFilterInput>;
|
||||
rssLink?: InputMaybe<StringFilterInput>;
|
||||
savePath?: InputMaybe<StringFilterInput>;
|
||||
season?: InputMaybe<IntegerFilterInput>;
|
||||
seasonRaw?: InputMaybe<StringFilterInput>;
|
||||
subscriberId?: InputMaybe<IntegerFilterInput>;
|
||||
subscriberId?: InputMaybe<SubscriberIdFilterInput>;
|
||||
updatedAt?: InputMaybe<TextFilterInput>;
|
||||
};
|
||||
|
||||
export type BangumiInsertInput = {
|
||||
bangumiType: BangumiTypeEnum;
|
||||
createdAt?: InputMaybe<Scalars['String']['input']>;
|
||||
displayName: Scalars['String']['input'];
|
||||
fansub?: InputMaybe<Scalars['String']['input']>;
|
||||
@@ -134,14 +135,14 @@ export type BangumiInsertInput = {
|
||||
originPosterLink?: InputMaybe<Scalars['String']['input']>;
|
||||
posterLink?: InputMaybe<Scalars['String']['input']>;
|
||||
rssLink?: InputMaybe<Scalars['String']['input']>;
|
||||
savePath?: InputMaybe<Scalars['String']['input']>;
|
||||
season: Scalars['Int']['input'];
|
||||
seasonRaw?: InputMaybe<Scalars['String']['input']>;
|
||||
subscriberId: Scalars['Int']['input'];
|
||||
subscriberId?: InputMaybe<Scalars['Int']['input']>;
|
||||
updatedAt?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
export type BangumiOrderInput = {
|
||||
bangumiType?: InputMaybe<OrderByEnum>;
|
||||
createdAt?: InputMaybe<OrderByEnum>;
|
||||
displayName?: InputMaybe<OrderByEnum>;
|
||||
fansub?: InputMaybe<OrderByEnum>;
|
||||
@@ -154,14 +155,32 @@ export type BangumiOrderInput = {
|
||||
originPosterLink?: InputMaybe<OrderByEnum>;
|
||||
posterLink?: InputMaybe<OrderByEnum>;
|
||||
rssLink?: InputMaybe<OrderByEnum>;
|
||||
savePath?: InputMaybe<OrderByEnum>;
|
||||
season?: InputMaybe<OrderByEnum>;
|
||||
seasonRaw?: InputMaybe<OrderByEnum>;
|
||||
subscriberId?: InputMaybe<OrderByEnum>;
|
||||
updatedAt?: InputMaybe<OrderByEnum>;
|
||||
};
|
||||
|
||||
export const BangumiTypeEnum = {
|
||||
Mikan: 'mikan'
|
||||
} as const;
|
||||
|
||||
export type BangumiTypeEnum = typeof BangumiTypeEnum[keyof typeof BangumiTypeEnum];
|
||||
export type BangumiTypeEnumFilterInput = {
|
||||
eq?: InputMaybe<BangumiTypeEnum>;
|
||||
gt?: InputMaybe<BangumiTypeEnum>;
|
||||
gte?: InputMaybe<BangumiTypeEnum>;
|
||||
is_in?: InputMaybe<Array<BangumiTypeEnum>>;
|
||||
is_not_in?: InputMaybe<Array<BangumiTypeEnum>>;
|
||||
is_not_null?: InputMaybe<BangumiTypeEnum>;
|
||||
is_null?: InputMaybe<BangumiTypeEnum>;
|
||||
lt?: InputMaybe<BangumiTypeEnum>;
|
||||
lte?: InputMaybe<BangumiTypeEnum>;
|
||||
ne?: InputMaybe<BangumiTypeEnum>;
|
||||
};
|
||||
|
||||
export type BangumiUpdateInput = {
|
||||
bangumiType?: InputMaybe<BangumiTypeEnum>;
|
||||
createdAt?: InputMaybe<Scalars['String']['input']>;
|
||||
displayName?: InputMaybe<Scalars['String']['input']>;
|
||||
fansub?: InputMaybe<Scalars['String']['input']>;
|
||||
@@ -174,10 +193,8 @@ export type BangumiUpdateInput = {
|
||||
originPosterLink?: InputMaybe<Scalars['String']['input']>;
|
||||
posterLink?: InputMaybe<Scalars['String']['input']>;
|
||||
rssLink?: InputMaybe<Scalars['String']['input']>;
|
||||
savePath?: InputMaybe<Scalars['String']['input']>;
|
||||
season?: InputMaybe<Scalars['Int']['input']>;
|
||||
seasonRaw?: InputMaybe<Scalars['String']['input']>;
|
||||
subscriberId?: InputMaybe<Scalars['Int']['input']>;
|
||||
updatedAt?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
@@ -613,6 +630,24 @@ export type DownloadsUpdateInput = {
|
||||
url?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
export const EpisodeTypeEnum = {
|
||||
Mikan: 'mikan'
|
||||
} as const;
|
||||
|
||||
export type EpisodeTypeEnum = typeof EpisodeTypeEnum[keyof typeof EpisodeTypeEnum];
|
||||
export type EpisodeTypeEnumFilterInput = {
|
||||
eq?: InputMaybe<EpisodeTypeEnum>;
|
||||
gt?: InputMaybe<EpisodeTypeEnum>;
|
||||
gte?: InputMaybe<EpisodeTypeEnum>;
|
||||
is_in?: InputMaybe<Array<EpisodeTypeEnum>>;
|
||||
is_not_in?: InputMaybe<Array<EpisodeTypeEnum>>;
|
||||
is_not_null?: InputMaybe<EpisodeTypeEnum>;
|
||||
is_null?: InputMaybe<EpisodeTypeEnum>;
|
||||
lt?: InputMaybe<EpisodeTypeEnum>;
|
||||
lte?: InputMaybe<EpisodeTypeEnum>;
|
||||
ne?: InputMaybe<EpisodeTypeEnum>;
|
||||
};
|
||||
|
||||
export type Episodes = {
|
||||
__typename?: 'Episodes';
|
||||
bangumi?: Maybe<Bangumi>;
|
||||
@@ -620,7 +655,12 @@ export type Episodes = {
|
||||
createdAt: Scalars['String']['output'];
|
||||
displayName: Scalars['String']['output'];
|
||||
download: SubscriptionsConnection;
|
||||
enclosureContentLength?: Maybe<Scalars['Int']['output']>;
|
||||
enclosureMagnetLink?: Maybe<Scalars['String']['output']>;
|
||||
enclosurePubDate?: Maybe<Scalars['String']['output']>;
|
||||
enclosureTorrentLink?: Maybe<Scalars['String']['output']>;
|
||||
episodeIndex: Scalars['Int']['output'];
|
||||
episodeType: EpisodeTypeEnum;
|
||||
fansub?: Maybe<Scalars['String']['output']>;
|
||||
homepage?: Maybe<Scalars['String']['output']>;
|
||||
id: Scalars['Int']['output'];
|
||||
@@ -629,7 +669,6 @@ export type Episodes = {
|
||||
originPosterLink?: Maybe<Scalars['String']['output']>;
|
||||
posterLink?: Maybe<Scalars['String']['output']>;
|
||||
resolution?: Maybe<Scalars['String']['output']>;
|
||||
savePath?: Maybe<Scalars['String']['output']>;
|
||||
season: Scalars['Int']['output'];
|
||||
seasonRaw?: Maybe<Scalars['String']['output']>;
|
||||
source?: Maybe<Scalars['String']['output']>;
|
||||
@@ -667,7 +706,12 @@ export type EpisodesBasic = {
|
||||
bangumiId: Scalars['Int']['output'];
|
||||
createdAt: Scalars['String']['output'];
|
||||
displayName: Scalars['String']['output'];
|
||||
enclosureContentLength?: Maybe<Scalars['Int']['output']>;
|
||||
enclosureMagnetLink?: Maybe<Scalars['String']['output']>;
|
||||
enclosurePubDate?: Maybe<Scalars['String']['output']>;
|
||||
enclosureTorrentLink?: Maybe<Scalars['String']['output']>;
|
||||
episodeIndex: Scalars['Int']['output'];
|
||||
episodeType: EpisodeTypeEnum;
|
||||
fansub?: Maybe<Scalars['String']['output']>;
|
||||
homepage?: Maybe<Scalars['String']['output']>;
|
||||
id: Scalars['Int']['output'];
|
||||
@@ -676,7 +720,6 @@ export type EpisodesBasic = {
|
||||
originPosterLink?: Maybe<Scalars['String']['output']>;
|
||||
posterLink?: Maybe<Scalars['String']['output']>;
|
||||
resolution?: Maybe<Scalars['String']['output']>;
|
||||
savePath?: Maybe<Scalars['String']['output']>;
|
||||
season: Scalars['Int']['output'];
|
||||
seasonRaw?: Maybe<Scalars['String']['output']>;
|
||||
source?: Maybe<Scalars['String']['output']>;
|
||||
@@ -704,7 +747,12 @@ export type EpisodesFilterInput = {
|
||||
bangumiId?: InputMaybe<IntegerFilterInput>;
|
||||
createdAt?: InputMaybe<TextFilterInput>;
|
||||
displayName?: InputMaybe<StringFilterInput>;
|
||||
enclosureContentLength?: InputMaybe<IntegerFilterInput>;
|
||||
enclosureMagnetLink?: InputMaybe<StringFilterInput>;
|
||||
enclosurePubDate?: InputMaybe<TextFilterInput>;
|
||||
enclosureTorrentLink?: InputMaybe<StringFilterInput>;
|
||||
episodeIndex?: InputMaybe<IntegerFilterInput>;
|
||||
episodeType?: InputMaybe<EpisodeTypeEnumFilterInput>;
|
||||
fansub?: InputMaybe<StringFilterInput>;
|
||||
homepage?: InputMaybe<StringFilterInput>;
|
||||
id?: InputMaybe<IntegerFilterInput>;
|
||||
@@ -714,7 +762,6 @@ export type EpisodesFilterInput = {
|
||||
originPosterLink?: InputMaybe<StringFilterInput>;
|
||||
posterLink?: InputMaybe<StringFilterInput>;
|
||||
resolution?: InputMaybe<StringFilterInput>;
|
||||
savePath?: InputMaybe<StringFilterInput>;
|
||||
season?: InputMaybe<IntegerFilterInput>;
|
||||
seasonRaw?: InputMaybe<StringFilterInput>;
|
||||
source?: InputMaybe<StringFilterInput>;
|
||||
@@ -727,7 +774,12 @@ export type EpisodesInsertInput = {
|
||||
bangumiId: Scalars['Int']['input'];
|
||||
createdAt?: InputMaybe<Scalars['String']['input']>;
|
||||
displayName: Scalars['String']['input'];
|
||||
enclosureContentLength?: InputMaybe<Scalars['Int']['input']>;
|
||||
enclosureMagnetLink?: InputMaybe<Scalars['String']['input']>;
|
||||
enclosurePubDate?: InputMaybe<Scalars['String']['input']>;
|
||||
enclosureTorrentLink?: InputMaybe<Scalars['String']['input']>;
|
||||
episodeIndex: Scalars['Int']['input'];
|
||||
episodeType: EpisodeTypeEnum;
|
||||
fansub?: InputMaybe<Scalars['String']['input']>;
|
||||
homepage?: InputMaybe<Scalars['String']['input']>;
|
||||
id?: InputMaybe<Scalars['Int']['input']>;
|
||||
@@ -736,7 +788,6 @@ export type EpisodesInsertInput = {
|
||||
originPosterLink?: InputMaybe<Scalars['String']['input']>;
|
||||
posterLink?: InputMaybe<Scalars['String']['input']>;
|
||||
resolution?: InputMaybe<Scalars['String']['input']>;
|
||||
savePath?: InputMaybe<Scalars['String']['input']>;
|
||||
season: Scalars['Int']['input'];
|
||||
seasonRaw?: InputMaybe<Scalars['String']['input']>;
|
||||
source?: InputMaybe<Scalars['String']['input']>;
|
||||
@@ -749,7 +800,12 @@ export type EpisodesOrderInput = {
|
||||
bangumiId?: InputMaybe<OrderByEnum>;
|
||||
createdAt?: InputMaybe<OrderByEnum>;
|
||||
displayName?: InputMaybe<OrderByEnum>;
|
||||
enclosureContentLength?: InputMaybe<OrderByEnum>;
|
||||
enclosureMagnetLink?: InputMaybe<OrderByEnum>;
|
||||
enclosurePubDate?: InputMaybe<OrderByEnum>;
|
||||
enclosureTorrentLink?: InputMaybe<OrderByEnum>;
|
||||
episodeIndex?: InputMaybe<OrderByEnum>;
|
||||
episodeType?: InputMaybe<OrderByEnum>;
|
||||
fansub?: InputMaybe<OrderByEnum>;
|
||||
homepage?: InputMaybe<OrderByEnum>;
|
||||
id?: InputMaybe<OrderByEnum>;
|
||||
@@ -758,7 +814,6 @@ export type EpisodesOrderInput = {
|
||||
originPosterLink?: InputMaybe<OrderByEnum>;
|
||||
posterLink?: InputMaybe<OrderByEnum>;
|
||||
resolution?: InputMaybe<OrderByEnum>;
|
||||
savePath?: InputMaybe<OrderByEnum>;
|
||||
season?: InputMaybe<OrderByEnum>;
|
||||
seasonRaw?: InputMaybe<OrderByEnum>;
|
||||
source?: InputMaybe<OrderByEnum>;
|
||||
@@ -771,7 +826,12 @@ export type EpisodesUpdateInput = {
|
||||
bangumiId?: InputMaybe<Scalars['Int']['input']>;
|
||||
createdAt?: InputMaybe<Scalars['String']['input']>;
|
||||
displayName?: InputMaybe<Scalars['String']['input']>;
|
||||
enclosureContentLength?: InputMaybe<Scalars['Int']['input']>;
|
||||
enclosureMagnetLink?: InputMaybe<Scalars['String']['input']>;
|
||||
enclosurePubDate?: InputMaybe<Scalars['String']['input']>;
|
||||
enclosureTorrentLink?: InputMaybe<Scalars['String']['input']>;
|
||||
episodeIndex?: InputMaybe<Scalars['Int']['input']>;
|
||||
episodeType?: InputMaybe<EpisodeTypeEnum>;
|
||||
fansub?: InputMaybe<Scalars['String']['input']>;
|
||||
homepage?: InputMaybe<Scalars['String']['input']>;
|
||||
id?: InputMaybe<Scalars['Int']['input']>;
|
||||
@@ -780,7 +840,6 @@ export type EpisodesUpdateInput = {
|
||||
originPosterLink?: InputMaybe<Scalars['String']['input']>;
|
||||
posterLink?: InputMaybe<Scalars['String']['input']>;
|
||||
resolution?: InputMaybe<Scalars['String']['input']>;
|
||||
savePath?: InputMaybe<Scalars['String']['input']>;
|
||||
season?: InputMaybe<Scalars['Int']['input']>;
|
||||
seasonRaw?: InputMaybe<Scalars['String']['input']>;
|
||||
source?: InputMaybe<Scalars['String']['input']>;
|
||||
@@ -788,6 +847,127 @@ export type EpisodesUpdateInput = {
|
||||
updatedAt?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
export const FeedSourceEnum = {
|
||||
SubscriptionEpisode: 'subscription_episode'
|
||||
} as const;
|
||||
|
||||
export type FeedSourceEnum = typeof FeedSourceEnum[keyof typeof FeedSourceEnum];
|
||||
export type FeedSourceEnumFilterInput = {
|
||||
eq?: InputMaybe<FeedSourceEnum>;
|
||||
gt?: InputMaybe<FeedSourceEnum>;
|
||||
gte?: InputMaybe<FeedSourceEnum>;
|
||||
is_in?: InputMaybe<Array<FeedSourceEnum>>;
|
||||
is_not_in?: InputMaybe<Array<FeedSourceEnum>>;
|
||||
is_not_null?: InputMaybe<FeedSourceEnum>;
|
||||
is_null?: InputMaybe<FeedSourceEnum>;
|
||||
lt?: InputMaybe<FeedSourceEnum>;
|
||||
lte?: InputMaybe<FeedSourceEnum>;
|
||||
ne?: InputMaybe<FeedSourceEnum>;
|
||||
};
|
||||
|
||||
export const FeedTypeEnum = {
|
||||
Rss: 'rss'
|
||||
} as const;
|
||||
|
||||
export type FeedTypeEnum = typeof FeedTypeEnum[keyof typeof FeedTypeEnum];
|
||||
export type FeedTypeEnumFilterInput = {
|
||||
eq?: InputMaybe<FeedTypeEnum>;
|
||||
gt?: InputMaybe<FeedTypeEnum>;
|
||||
gte?: InputMaybe<FeedTypeEnum>;
|
||||
is_in?: InputMaybe<Array<FeedTypeEnum>>;
|
||||
is_not_in?: InputMaybe<Array<FeedTypeEnum>>;
|
||||
is_not_null?: InputMaybe<FeedTypeEnum>;
|
||||
is_null?: InputMaybe<FeedTypeEnum>;
|
||||
lt?: InputMaybe<FeedTypeEnum>;
|
||||
lte?: InputMaybe<FeedTypeEnum>;
|
||||
ne?: InputMaybe<FeedTypeEnum>;
|
||||
};
|
||||
|
||||
export type Feeds = {
|
||||
__typename?: 'Feeds';
|
||||
createdAt: Scalars['String']['output'];
|
||||
feedSource: FeedSourceEnum;
|
||||
feedType: FeedTypeEnum;
|
||||
id: Scalars['Int']['output'];
|
||||
subscriber?: Maybe<Subscribers>;
|
||||
subscriberId?: Maybe<Scalars['Int']['output']>;
|
||||
subscription?: Maybe<Subscriptions>;
|
||||
subscriptionId?: Maybe<Scalars['Int']['output']>;
|
||||
token: Scalars['String']['output'];
|
||||
updatedAt: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type FeedsBasic = {
|
||||
__typename?: 'FeedsBasic';
|
||||
createdAt: Scalars['String']['output'];
|
||||
feedSource: FeedSourceEnum;
|
||||
feedType: FeedTypeEnum;
|
||||
id: Scalars['Int']['output'];
|
||||
subscriberId?: Maybe<Scalars['Int']['output']>;
|
||||
subscriptionId?: Maybe<Scalars['Int']['output']>;
|
||||
token: Scalars['String']['output'];
|
||||
updatedAt: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type FeedsConnection = {
|
||||
__typename?: 'FeedsConnection';
|
||||
edges: Array<FeedsEdge>;
|
||||
nodes: Array<Feeds>;
|
||||
pageInfo: PageInfo;
|
||||
paginationInfo?: Maybe<PaginationInfo>;
|
||||
};
|
||||
|
||||
export type FeedsEdge = {
|
||||
__typename?: 'FeedsEdge';
|
||||
cursor: Scalars['String']['output'];
|
||||
node: Feeds;
|
||||
};
|
||||
|
||||
export type FeedsFilterInput = {
|
||||
and?: InputMaybe<Array<FeedsFilterInput>>;
|
||||
createdAt?: InputMaybe<TextFilterInput>;
|
||||
feedSource?: InputMaybe<FeedSourceEnumFilterInput>;
|
||||
feedType?: InputMaybe<FeedTypeEnumFilterInput>;
|
||||
id?: InputMaybe<IntegerFilterInput>;
|
||||
or?: InputMaybe<Array<FeedsFilterInput>>;
|
||||
subscriberId?: InputMaybe<SubscriberIdFilterInput>;
|
||||
subscriptionId?: InputMaybe<IntegerFilterInput>;
|
||||
token?: InputMaybe<StringFilterInput>;
|
||||
updatedAt?: InputMaybe<TextFilterInput>;
|
||||
};
|
||||
|
||||
export type FeedsInsertInput = {
|
||||
createdAt?: InputMaybe<Scalars['String']['input']>;
|
||||
feedSource: FeedSourceEnum;
|
||||
feedType: FeedTypeEnum;
|
||||
id?: InputMaybe<Scalars['Int']['input']>;
|
||||
subscriberId?: InputMaybe<Scalars['Int']['input']>;
|
||||
subscriptionId?: InputMaybe<Scalars['Int']['input']>;
|
||||
token?: InputMaybe<Scalars['String']['input']>;
|
||||
updatedAt?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
export type FeedsOrderInput = {
|
||||
createdAt?: InputMaybe<OrderByEnum>;
|
||||
feedSource?: InputMaybe<OrderByEnum>;
|
||||
feedType?: InputMaybe<OrderByEnum>;
|
||||
id?: InputMaybe<OrderByEnum>;
|
||||
subscriberId?: InputMaybe<OrderByEnum>;
|
||||
subscriptionId?: InputMaybe<OrderByEnum>;
|
||||
token?: InputMaybe<OrderByEnum>;
|
||||
updatedAt?: InputMaybe<OrderByEnum>;
|
||||
};
|
||||
|
||||
export type FeedsUpdateInput = {
|
||||
createdAt?: InputMaybe<Scalars['String']['input']>;
|
||||
feedSource?: InputMaybe<FeedSourceEnum>;
|
||||
feedType?: InputMaybe<FeedTypeEnum>;
|
||||
id?: InputMaybe<Scalars['Int']['input']>;
|
||||
subscriptionId?: InputMaybe<Scalars['Int']['input']>;
|
||||
token?: InputMaybe<Scalars['String']['input']>;
|
||||
updatedAt?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
export type IntegerFilterInput = {
|
||||
between?: InputMaybe<Array<Scalars['Int']['input']>>;
|
||||
eq?: InputMaybe<Scalars['Int']['input']>;
|
||||
@@ -826,6 +1006,10 @@ export type Mutation = {
|
||||
episodesCreateOne: EpisodesBasic;
|
||||
episodesDelete: Scalars['Int']['output'];
|
||||
episodesUpdate: Array<EpisodesBasic>;
|
||||
feedsCreateBatch: Array<FeedsBasic>;
|
||||
feedsCreateOne: FeedsBasic;
|
||||
feedsDelete: Scalars['Int']['output'];
|
||||
feedsUpdate: Array<FeedsBasic>;
|
||||
subscriberTasksDelete: Scalars['Int']['output'];
|
||||
subscriberTasksRetryOne: SubscriberTasks;
|
||||
subscriptionBangumiCreateBatch: Array<SubscriptionBangumiBasic>;
|
||||
@@ -951,6 +1135,27 @@ export type MutationEpisodesUpdateArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type MutationFeedsCreateBatchArgs = {
|
||||
data: Array<FeedsInsertInput>;
|
||||
};
|
||||
|
||||
|
||||
export type MutationFeedsCreateOneArgs = {
|
||||
data: FeedsInsertInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationFeedsDeleteArgs = {
|
||||
filter?: InputMaybe<FeedsFilterInput>;
|
||||
};
|
||||
|
||||
|
||||
export type MutationFeedsUpdateArgs = {
|
||||
data: FeedsUpdateInput;
|
||||
filter?: InputMaybe<FeedsFilterInput>;
|
||||
};
|
||||
|
||||
|
||||
export type MutationSubscriberTasksDeleteArgs = {
|
||||
filter?: InputMaybe<SubscriberTasksFilterInput>;
|
||||
};
|
||||
@@ -1085,6 +1290,7 @@ export type Query = {
|
||||
downloaders: DownloadersConnection;
|
||||
downloads: DownloadsConnection;
|
||||
episodes: EpisodesConnection;
|
||||
feeds: FeedsConnection;
|
||||
subscriberTasks: SubscriberTasksConnection;
|
||||
subscribers: SubscribersConnection;
|
||||
subscriptionBangumi: SubscriptionBangumiConnection;
|
||||
@@ -1138,6 +1344,13 @@ export type QueryEpisodesArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type QueryFeedsArgs = {
|
||||
filters?: InputMaybe<FeedsFilterInput>;
|
||||
orderBy?: InputMaybe<FeedsOrderInput>;
|
||||
pagination?: InputMaybe<PaginationInput>;
|
||||
};
|
||||
|
||||
|
||||
export type QuerySubscriberTasksArgs = {
|
||||
filters?: InputMaybe<SubscriberTasksFilterInput>;
|
||||
orderBy?: InputMaybe<SubscriberTasksOrderInput>;
|
||||
@@ -1288,7 +1501,9 @@ export type Subscribers = {
|
||||
displayName: Scalars['String']['output'];
|
||||
downloader: DownloadersConnection;
|
||||
episode: EpisodesConnection;
|
||||
feed: FeedsConnection;
|
||||
id: Scalars['Int']['output'];
|
||||
subscriberTask: SubscriberTasksConnection;
|
||||
subscription: SubscriptionsConnection;
|
||||
updatedAt: Scalars['String']['output'];
|
||||
};
|
||||
@@ -1322,6 +1537,20 @@ export type SubscribersEpisodeArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type SubscribersFeedArgs = {
|
||||
filters?: InputMaybe<FeedsFilterInput>;
|
||||
orderBy?: InputMaybe<FeedsOrderInput>;
|
||||
pagination?: InputMaybe<PaginationInput>;
|
||||
};
|
||||
|
||||
|
||||
export type SubscribersSubscriberTaskArgs = {
|
||||
filters?: InputMaybe<SubscriberTasksFilterInput>;
|
||||
orderBy?: InputMaybe<SubscriberTasksOrderInput>;
|
||||
pagination?: InputMaybe<PaginationInput>;
|
||||
};
|
||||
|
||||
|
||||
export type SubscribersSubscriptionArgs = {
|
||||
filters?: InputMaybe<SubscriptionsFilterInput>;
|
||||
orderBy?: InputMaybe<SubscriptionsOrderInput>;
|
||||
@@ -1511,6 +1740,7 @@ export type Subscriptions = {
|
||||
displayName: Scalars['String']['output'];
|
||||
enabled: Scalars['Boolean']['output'];
|
||||
episode: EpisodesConnection;
|
||||
feed: FeedsConnection;
|
||||
id: Scalars['Int']['output'];
|
||||
sourceUrl: Scalars['String']['output'];
|
||||
subscriber?: Maybe<Subscribers>;
|
||||
@@ -1535,6 +1765,13 @@ export type SubscriptionsEpisodeArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type SubscriptionsFeedArgs = {
|
||||
filters?: InputMaybe<FeedsFilterInput>;
|
||||
orderBy?: InputMaybe<FeedsOrderInput>;
|
||||
pagination?: InputMaybe<PaginationInput>;
|
||||
};
|
||||
|
||||
|
||||
export type SubscriptionsSubscriptionBangumiArgs = {
|
||||
filters?: InputMaybe<SubscriptionBangumiFilterInput>;
|
||||
orderBy?: InputMaybe<SubscriptionBangumiOrderInput>;
|
||||
@@ -1684,6 +1921,20 @@ export type CheckCredential3rdAvailableQueryVariables = Exact<{
|
||||
|
||||
export type CheckCredential3rdAvailableQuery = { __typename?: 'Query', credential3rdCheckAvailable: { __typename?: 'Credential3rdCheckAvailableInfo', available: boolean } };
|
||||
|
||||
export type InsertFeedMutationVariables = Exact<{
|
||||
data: FeedsInsertInput;
|
||||
}>;
|
||||
|
||||
|
||||
export type InsertFeedMutation = { __typename?: 'Mutation', feedsCreateOne: { __typename?: 'FeedsBasic', id: number, createdAt: string, updatedAt: string, feedType: FeedTypeEnum, token: string } };
|
||||
|
||||
export type DeleteFeedMutationVariables = Exact<{
|
||||
filters: FeedsFilterInput;
|
||||
}>;
|
||||
|
||||
|
||||
export type DeleteFeedMutation = { __typename?: 'Mutation', feedsDelete: number };
|
||||
|
||||
export type GetSubscriptionsQueryVariables = Exact<{
|
||||
filters: SubscriptionsFilterInput;
|
||||
orderBy: SubscriptionsOrderInput;
|
||||
@@ -1720,7 +1971,7 @@ export type GetSubscriptionDetailQueryVariables = Exact<{
|
||||
}>;
|
||||
|
||||
|
||||
export type GetSubscriptionDetailQuery = { __typename?: 'Query', subscriptions: { __typename?: 'SubscriptionsConnection', nodes: Array<{ __typename?: 'Subscriptions', id: number, displayName: string, createdAt: string, updatedAt: string, category: SubscriptionCategoryEnum, sourceUrl: string, enabled: boolean, credential3rd?: { __typename?: 'Credential3rd', id: number, username?: string | null } | null, bangumi: { __typename?: 'BangumiConnection', nodes: Array<{ __typename?: 'Bangumi', createdAt: string, updatedAt: string, id: number, mikanBangumiId?: string | null, displayName: string, season: number, seasonRaw?: string | null, fansub?: string | null, mikanFansubId?: string | null, rssLink?: string | null, posterLink?: string | null, savePath?: string | null, homepage?: string | null }> } }> } };
|
||||
export type GetSubscriptionDetailQuery = { __typename?: 'Query', subscriptions: { __typename?: 'SubscriptionsConnection', nodes: Array<{ __typename?: 'Subscriptions', id: number, displayName: string, createdAt: string, updatedAt: string, category: SubscriptionCategoryEnum, sourceUrl: string, enabled: boolean, feed: { __typename?: 'FeedsConnection', nodes: Array<{ __typename?: 'Feeds', id: number, createdAt: string, updatedAt: string, token: string, feedType: FeedTypeEnum, feedSource: FeedSourceEnum }> }, credential3rd?: { __typename?: 'Credential3rd', id: number, username?: string | null } | null, bangumi: { __typename?: 'BangumiConnection', nodes: Array<{ __typename?: 'Bangumi', createdAt: string, updatedAt: string, id: number, mikanBangumiId?: string | null, displayName: string, season: number, seasonRaw?: string | null, fansub?: string | null, mikanFansubId?: string | null, rssLink?: string | null, posterLink?: string | null, homepage?: string | null }> } }> } };
|
||||
|
||||
export type SyncSubscriptionFeedsIncrementalMutationVariables = Exact<{
|
||||
filter: SubscriptionsFilterInput;
|
||||
@@ -1773,11 +2024,13 @@ export const UpdateCredential3rdDocument = {"kind":"Document","definitions":[{"k
|
||||
export const DeleteCredential3rdDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"DeleteCredential3rd"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Credential3rdFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"credential3rdDelete"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}]}]}}]} as unknown as DocumentNode<DeleteCredential3rdMutation, DeleteCredential3rdMutationVariables>;
|
||||
export const GetCredential3rdDetailDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetCredential3rdDetail"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"credential3rd"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"eq"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"cookies"}},{"kind":"Field","name":{"kind":"Name","value":"username"}},{"kind":"Field","name":{"kind":"Name","value":"password"}},{"kind":"Field","name":{"kind":"Name","value":"userAgent"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"credentialType"}}]}}]}}]}}]} as unknown as DocumentNode<GetCredential3rdDetailQuery, GetCredential3rdDetailQueryVariables>;
|
||||
export const CheckCredential3rdAvailableDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"CheckCredential3rdAvailable"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"credential3rdCheckAvailable"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"available"}}]}}]}}]} as unknown as DocumentNode<CheckCredential3rdAvailableQuery, CheckCredential3rdAvailableQueryVariables>;
|
||||
export const InsertFeedDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"InsertFeed"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"data"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"FeedsInsertInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"feedsCreateOne"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"data"},"value":{"kind":"Variable","name":{"kind":"Name","value":"data"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"feedType"}},{"kind":"Field","name":{"kind":"Name","value":"token"}}]}}]}}]} as unknown as DocumentNode<InsertFeedMutation, InsertFeedMutationVariables>;
|
||||
export const DeleteFeedDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"DeleteFeed"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"FeedsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"feedsDelete"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}]}]}}]} as unknown as DocumentNode<DeleteFeedMutation, DeleteFeedMutationVariables>;
|
||||
export const GetSubscriptionsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetSubscriptions"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsOrderInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"pagination"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PaginationInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptions"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"pagination"},"value":{"kind":"Variable","name":{"kind":"Name","value":"pagination"}}},{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}},{"kind":"Argument","name":{"kind":"Name","value":"orderBy"},"value":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"credentialId"}}]}},{"kind":"Field","name":{"kind":"Name","value":"paginationInfo"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"total"}},{"kind":"Field","name":{"kind":"Name","value":"pages"}}]}}]}}]}}]} as unknown as DocumentNode<GetSubscriptionsQuery, GetSubscriptionsQueryVariables>;
|
||||
export const InsertSubscriptionDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"InsertSubscription"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"data"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsInsertInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsCreateOne"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"data"},"value":{"kind":"Variable","name":{"kind":"Name","value":"data"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"credentialId"}}]}}]}}]} as unknown as DocumentNode<InsertSubscriptionMutation, InsertSubscriptionMutationVariables>;
|
||||
export const UpdateSubscriptionsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"UpdateSubscriptions"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"data"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsUpdateInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsUpdate"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"data"},"value":{"kind":"Variable","name":{"kind":"Name","value":"data"}}},{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}}]}}]}}]} as unknown as DocumentNode<UpdateSubscriptionsMutation, UpdateSubscriptionsMutationVariables>;
|
||||
export const DeleteSubscriptionsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"DeleteSubscriptions"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsDelete"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}]}]}}]} as unknown as DocumentNode<DeleteSubscriptionsMutation, DeleteSubscriptionsMutationVariables>;
|
||||
export const GetSubscriptionDetailDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetSubscriptionDetail"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptions"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"eq"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"credential3rd"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"username"}}]}},{"kind":"Field","name":{"kind":"Name","value":"bangumi"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"mikanBangumiId"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"season"}},{"kind":"Field","name":{"kind":"Name","value":"seasonRaw"}},{"kind":"Field","name":{"kind":"Name","value":"fansub"}},{"kind":"Field","name":{"kind":"Name","value":"mikanFansubId"}},{"kind":"Field","name":{"kind":"Name","value":"rssLink"}},{"kind":"Field","name":{"kind":"Name","value":"posterLink"}},{"kind":"Field","name":{"kind":"Name","value":"savePath"}},{"kind":"Field","name":{"kind":"Name","value":"homepage"}}]}}]}}]}}]}}]}}]} as unknown as DocumentNode<GetSubscriptionDetailQuery, GetSubscriptionDetailQueryVariables>;
|
||||
export const GetSubscriptionDetailDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetSubscriptionDetail"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptions"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"eq"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"feed"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"token"}},{"kind":"Field","name":{"kind":"Name","value":"feedType"}},{"kind":"Field","name":{"kind":"Name","value":"feedSource"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"credential3rd"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"username"}}]}},{"kind":"Field","name":{"kind":"Name","value":"bangumi"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"mikanBangumiId"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"season"}},{"kind":"Field","name":{"kind":"Name","value":"seasonRaw"}},{"kind":"Field","name":{"kind":"Name","value":"fansub"}},{"kind":"Field","name":{"kind":"Name","value":"mikanFansubId"}},{"kind":"Field","name":{"kind":"Name","value":"rssLink"}},{"kind":"Field","name":{"kind":"Name","value":"posterLink"}},{"kind":"Field","name":{"kind":"Name","value":"homepage"}}]}}]}}]}}]}}]}}]} as unknown as DocumentNode<GetSubscriptionDetailQuery, GetSubscriptionDetailQueryVariables>;
|
||||
export const SyncSubscriptionFeedsIncrementalDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionFeedsIncremental"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filter"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsSyncOneFeedsIncremental"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filter"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionFeedsIncrementalMutation, SyncSubscriptionFeedsIncrementalMutationVariables>;
|
||||
export const SyncSubscriptionFeedsFullDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionFeedsFull"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filter"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsSyncOneFeedsFull"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filter"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionFeedsFullMutation, SyncSubscriptionFeedsFullMutationVariables>;
|
||||
export const SyncSubscriptionSourcesDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionSources"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filter"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsSyncOneSources"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filter"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionSourcesMutation, SyncSubscriptionSourcesMutationVariables>;
|
||||
|
||||
@@ -14,6 +14,7 @@ import { Img } from '@/components/ui/img';
|
||||
import { Label } from '@/components/ui/label';
|
||||
import { QueryErrorView } from '@/components/ui/query-error-view';
|
||||
import { Separator } from '@/components/ui/separator';
|
||||
import { DELETE_FEED, INSERT_FEED } from '@/domains/recorder/schema/feeds';
|
||||
import { GET_SUBSCRIPTION_DETAIL } from '@/domains/recorder/schema/subscriptions';
|
||||
import { SubscriptionService } from '@/domains/recorder/services/subscription.service';
|
||||
import { useInject } from '@/infra/di/inject';
|
||||
@@ -22,10 +23,16 @@ import {
|
||||
getApolloQueryError,
|
||||
} from '@/infra/errors/apollo';
|
||||
import {
|
||||
type DeleteFeedMutation,
|
||||
type DeleteFeedMutationVariables,
|
||||
FeedSourceEnum,
|
||||
FeedTypeEnum,
|
||||
type GetSubscriptionDetailQuery,
|
||||
type InsertFeedMutation,
|
||||
type InsertFeedMutationVariables,
|
||||
SubscriptionCategoryEnum,
|
||||
} from '@/infra/graphql/gql/graphql';
|
||||
import { useQuery } from '@apollo/client';
|
||||
import { useMutation, useQuery } from '@apollo/client';
|
||||
import {
|
||||
createFileRoute,
|
||||
useCanGoBack,
|
||||
@@ -38,7 +45,9 @@ import {
|
||||
Edit,
|
||||
ExternalLink,
|
||||
ListIcon,
|
||||
PlusIcon,
|
||||
RefreshCcwIcon,
|
||||
Trash2,
|
||||
} from 'lucide-react';
|
||||
import { useMemo } from 'react';
|
||||
import { toast } from 'sonner';
|
||||
@@ -91,6 +100,50 @@ function SubscriptionDetailRouteComponent() {
|
||||
});
|
||||
};
|
||||
|
||||
const [insertFeed] = useMutation<
|
||||
InsertFeedMutation,
|
||||
InsertFeedMutationVariables
|
||||
>(INSERT_FEED, {
|
||||
onCompleted: async () => {
|
||||
const result = await refetch();
|
||||
const error = getApolloQueryError(result);
|
||||
if (error) {
|
||||
toast.error('Failed to add feed', {
|
||||
description: apolloErrorToMessage(error),
|
||||
});
|
||||
return;
|
||||
}
|
||||
toast.success('Feed added');
|
||||
},
|
||||
onError: (error) => {
|
||||
toast.error('Failed to add feed', {
|
||||
description: apolloErrorToMessage(error),
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const [deleteFeed] = useMutation<
|
||||
DeleteFeedMutation,
|
||||
DeleteFeedMutationVariables
|
||||
>(DELETE_FEED, {
|
||||
onCompleted: async () => {
|
||||
const result = await refetch();
|
||||
const error = getApolloQueryError(result);
|
||||
if (error) {
|
||||
toast.error('Failed to delete feed', {
|
||||
description: apolloErrorToMessage(error),
|
||||
});
|
||||
return;
|
||||
}
|
||||
toast.success('Feed deleted');
|
||||
},
|
||||
onError: (error) => {
|
||||
toast.error('Failed to delete feed', {
|
||||
description: apolloErrorToMessage(error),
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const subscription = data?.subscriptions?.nodes?.[0];
|
||||
|
||||
const sourceUrlMeta = useMemo(
|
||||
@@ -314,6 +367,85 @@ function SubscriptionDetailRouteComponent() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Separator />
|
||||
<div className="space-y-4">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label className="font-medium text-sm">Associated Feeds</Label>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() =>
|
||||
insertFeed({
|
||||
variables: {
|
||||
data: {
|
||||
subscriptionId: Number.parseInt(id),
|
||||
feedType: FeedTypeEnum.Rss,
|
||||
feedSource: FeedSourceEnum.SubscriptionEpisode,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
>
|
||||
<PlusIcon className="h-4 w-4" />
|
||||
Add Feed
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="grid grid-cols-1 gap-3 sm:grid-cols-2 lg:grid-cols-3">
|
||||
{subscription.feed?.nodes &&
|
||||
subscription.feed.nodes.length > 0 ? (
|
||||
subscription.feed.nodes.map((feed) => (
|
||||
<Card
|
||||
key={feed.id}
|
||||
className="group relative cursor-pointer p-4 transition-colors hover:bg-accent/50"
|
||||
onClick={() => {
|
||||
window.open(`/api/feeds/rss/${feed.token}`, '_blank');
|
||||
}}
|
||||
>
|
||||
<div className="flex flex-col space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label className="whitespace-nowrap font-medium text-sm capitalize">
|
||||
<span>{feed.feedType} Feed</span>
|
||||
</Label>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 w-6 p-0 opacity-0 transition-opacity group-hover:opacity-100"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
deleteFeed({
|
||||
variables: {
|
||||
filters: {
|
||||
id: {
|
||||
eq: feed.id,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}}
|
||||
>
|
||||
<Trash2 className="h-3 w-3 text-destructive" />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<code className="break-all rounded bg-muted px-2 py-1 font-mono text-xs">
|
||||
{feed.token}
|
||||
</code>
|
||||
|
||||
<div className="text-muted-foreground text-xs">
|
||||
{format(new Date(feed.createdAt), 'MM-dd HH:mm')}
|
||||
</div>
|
||||
</div>
|
||||
</Card>
|
||||
))
|
||||
) : (
|
||||
<div className="col-span-full py-8 text-center text-muted-foreground">
|
||||
No associated feeds now
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{subscription.bangumi?.nodes &&
|
||||
subscription.bangumi.nodes.length > 0 && (
|
||||
<>
|
||||
|
||||
7
justfile
7
justfile
@@ -4,13 +4,16 @@ set dotenv-load := true
|
||||
prepare-dev:
|
||||
cargo install cargo-binstall
|
||||
cargo binstall sea-orm-cli cargo-llvm-cov cargo-nextest
|
||||
# <package-manager> install watchexec just zellij
|
||||
# <package-manager> install watchexec just zellij nasm libjxl
|
||||
|
||||
prepare-dev-testcontainers:
|
||||
docker pull linuxserver/qbittorrent:latest
|
||||
docker pull ghcr.io/dumtruck/konobangu-testing-torrents:latest
|
||||
docker pull postgres:17-alpine
|
||||
|
||||
dev-optimize-images:
|
||||
npx -y zx apps/recorder/examples/optimize_image.mjs
|
||||
|
||||
dev-webui:
|
||||
pnpm run --filter=webui dev
|
||||
|
||||
@@ -19,7 +22,7 @@ dev-proxy:
|
||||
pnpm run --parallel --filter=proxy dev
|
||||
|
||||
dev-recorder:
|
||||
watchexec -r -e rs,toml,yaml,json,env -- cargo run -p recorder --bin recorder_cli -- --environment development
|
||||
watchexec -r -e rs,toml,yaml,json,env -- cargo run -p recorder --bin recorder_cli -- --environment=development --graceful-shutdown=false
|
||||
|
||||
dev-recorder-migrate-down:
|
||||
cargo run -p recorder --bin migrate_down -- --environment development
|
||||
|
||||
@@ -19,6 +19,9 @@
|
||||
"engines": {
|
||||
"node": ">=22"
|
||||
},
|
||||
"dependencies": {
|
||||
"es-toolkit": "^1.39.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@types/node": "^24.0.1",
|
||||
|
||||
7
pnpm-lock.yaml
generated
7
pnpm-lock.yaml
generated
@@ -10,6 +10,10 @@ overrides:
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
es-toolkit:
|
||||
specifier: ^1.39.3
|
||||
version: 1.39.3
|
||||
devDependencies:
|
||||
'@biomejs/biome':
|
||||
specifier: 1.9.4
|
||||
@@ -209,9 +213,6 @@ importers:
|
||||
embla-carousel-react:
|
||||
specifier: ^8.6.0
|
||||
version: 8.6.0(react@19.1.0)
|
||||
es-toolkit:
|
||||
specifier: ^1.39.3
|
||||
version: 1.39.3
|
||||
graphiql:
|
||||
specifier: ^4.1.2
|
||||
version: 4.1.2(@codemirror/language@6.11.1)(@emotion/is-prop-valid@0.8.8)(@types/node@24.0.1)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(graphql-ws@6.0.4(graphql@16.11.0)(ws@8.18.2(bufferutil@4.0.9)(utf-8-validate@6.0.5)))(graphql@16.11.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(use-sync-external-store@1.5.0(react@19.1.0))
|
||||
|
||||
Reference in New Issue
Block a user