Compare commits

...

6 Commits

59 changed files with 3668 additions and 555 deletions

995
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -27,3 +27,5 @@ node_modules
dist/
temp/*
!temp/.gitkeep
tests/resources/mikan/classic_episodes/*/*
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet

View File

@ -2,8 +2,20 @@
name = "recorder"
version = "0.1.0"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = ["jxl"]
playground = ["dep:inquire", "dep:color-eyre", "dep:polars"]
testcontainers = [
"dep:testcontainers",
"dep:testcontainers-modules",
"dep:testcontainers-ext",
"downloader/testcontainers",
"testcontainers-modules/postgres",
]
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
[lib]
name = "recorder"
path = "src/lib.rs"
@ -13,17 +25,25 @@ name = "recorder_cli"
path = "src/bin/main.rs"
required-features = []
[features]
default = ["jxl"]
playground = ["dep:inquire", "dep:color-eyre"]
testcontainers = [
"dep:testcontainers",
"dep:testcontainers-modules",
"dep:testcontainers-ext",
"downloader/testcontainers",
"testcontainers-modules/postgres",
]
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
[[example]]
name = "mikan_collect_classic_eps"
path = "examples/mikan_collect_classic_eps.rs"
required-features = ["playground"]
[[example]]
name = "mikan_doppel_season_subscription"
path = "examples/mikan_doppel_season_subscription.rs"
required-features = ["playground"]
[[example]]
name = "mikan_doppel_subscriber_subscription"
path = "examples/mikan_doppel_subscriber_subscription.rs"
required-features = ["playground"]
[[example]]
name = "playground"
path = "examples/playground.rs"
required-features = ["playground"]
[dependencies]
downloader = { workspace = true }
@ -93,7 +113,7 @@ fancy-regex = "0.14"
lightningcss = "1.0.0-alpha.66"
html-escape = "0.2.13"
opendal = { version = "0.53", features = ["default", "services-fs"] }
scraper = "0.23"
scraper = "0.23.1"
async-graphql = { version = "7", features = ["dynamic-schema"] }
async-graphql-axum = "7"
seaography = { version = "1.1", features = [
@ -134,11 +154,15 @@ icu = "2.0.0"
tracing-tree = "0.4.0"
num_cpus = "1.17.0"
headers-accept = "0.1.4"
polars = { version = "0.49.1", features = [
"parquet",
"lazy",
"diagonal_concat",
], optional = true }
[dev-dependencies]
inquire = { workspace = true }
color-eyre = { workspace = true }
serial_test = "3"
insta = { version = "1", features = ["redactions", "toml", "filters"] }
rstest = "0.25"

View File

@ -0,0 +1,584 @@
use std::collections::HashSet;
use chrono::{DateTime, Duration, FixedOffset, NaiveDate, NaiveTime, TimeZone, Utc};
use fetch::{HttpClientConfig, fetch_html};
use itertools::Itertools;
use lazy_static::lazy_static;
use nom::{
IResult, Parser,
branch::alt,
bytes::complete::{tag, take, take_till1},
character::complete::space1,
combinator::map,
};
use recorder::{
errors::{RecorderError, RecorderResult},
extract::{
html::extract_inner_text_from_element_ref,
mikan::{MikanClient, MikanConfig, MikanEpisodeHash, MikanFansubHash},
},
};
use regex::Regex;
use scraper::{ElementRef, Html, Selector};
use snafu::FromString;
use url::Url;
lazy_static! {
static ref TEST_FOLDER: std::path::PathBuf =
if cfg!(any(test, debug_assertions, feature = "playground")) {
std::path::PathBuf::from(format!(
"{}/tests/resources/mikan/classic_episodes",
env!("CARGO_MANIFEST_DIR")
))
} else {
std::path::PathBuf::from("tests/resources/mikan/classic_episodes")
};
}
lazy_static! {
static ref TOTAL_PAGE_REGEX: Regex =
Regex::new(r#"\$\(\'\.classic-view-pagination2\'\)\.bootpag\(\{\s*total:\s*(\d+)"#)
.unwrap();
}
pub struct MikanClassicEpisodeTableRow {
pub id: i32,
pub publish_at: DateTime<Utc>,
pub mikan_fansub_id: Option<String>,
pub fansub_name: Option<String>,
pub mikan_episode_id: String,
pub original_name: String,
pub magnet_link: Option<String>,
pub file_size: Option<String>,
pub torrent_link: Option<String>,
}
impl MikanClassicEpisodeTableRow {
fn timezone() -> FixedOffset {
FixedOffset::east_opt(8 * 3600).unwrap()
}
fn fixed_date_parser(input: &str) -> IResult<&str, NaiveDate> {
alt((
map(tag("今天"), move |_| {
Utc::now().with_timezone(&Self::timezone()).date_naive()
}),
map(tag("昨天"), move |_| {
Utc::now().with_timezone(&Self::timezone()).date_naive() - Duration::days(1)
}),
))
.parse(input)
}
fn formatted_date_parser(input: &str) -> IResult<&str, NaiveDate> {
let (remain, date_str) = take_till1(|c: char| c.is_whitespace()).parse(input)?;
let date = NaiveDate::parse_from_str(date_str, "%Y/%m/%d").map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
})?;
Ok((remain, date))
}
fn date_parser(input: &str) -> IResult<&str, NaiveDate> {
alt((Self::fixed_date_parser, Self::formatted_date_parser)).parse(input)
}
fn time_parser(input: &str) -> IResult<&str, NaiveTime> {
let (remain, time_str) = take(5usize).parse(input)?;
let time = NaiveTime::parse_from_str(time_str, "%H:%M").map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
})?;
Ok((remain, time))
}
fn extract_publish_at(text: &str) -> Option<DateTime<Utc>> {
let (_, (date, _, time)) = (Self::date_parser, space1, Self::time_parser)
.parse(text)
.ok()?;
let local_dt = Self::timezone()
.from_local_datetime(&date.and_time(time))
.single()?;
Some(local_dt.with_timezone(&Utc))
}
pub fn from_element_ref(
row: ElementRef<'_>,
rev_id: i32,
idx: i32,
mikan_base_url: &Url,
) -> RecorderResult<Self> {
let publish_at_selector = &Selector::parse("td:nth-of-type(1)").unwrap();
let fansub_selector = &Selector::parse("td:nth-of-type(2) > a").unwrap();
let original_name_selector =
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(1)").unwrap();
let magnet_link_selector =
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(2)").unwrap();
let file_size_selector = &Selector::parse("td:nth-of-type(4)").unwrap();
let torrent_link_selector = &Selector::parse("td:nth-of-type(5) > a").unwrap();
let publish_at = row
.select(publish_at_selector)
.next()
.map(extract_inner_text_from_element_ref)
.and_then(|e| Self::extract_publish_at(&e));
let (mikan_fansub_hash, fansub_name) = row
.select(fansub_selector)
.next()
.and_then(|e| {
e.attr("href")
.and_then(|s| mikan_base_url.join(s).ok())
.and_then(|u| MikanFansubHash::from_homepage_url(&u))
.map(|h| (h, extract_inner_text_from_element_ref(e)))
})
.unzip();
let (mikan_episode_hash, original_name) = row
.select(original_name_selector)
.next()
.and_then(|el| {
el.attr("href")
.and_then(|s| mikan_base_url.join(s).ok())
.and_then(|u| MikanEpisodeHash::from_homepage_url(&u))
.map(|h| (h, extract_inner_text_from_element_ref(el)))
})
.unzip();
let magnet_link = row
.select(magnet_link_selector)
.next()
.and_then(|el| el.attr("data-clipboard-text"));
let file_size = row
.select(file_size_selector)
.next()
.map(extract_inner_text_from_element_ref);
let torrent_link = row
.select(torrent_link_selector)
.next()
.and_then(|el| el.attr("href"));
if let (Some(mikan_episode_hash), Some(original_name), Some(publish_at)) = (
mikan_episode_hash.as_ref(),
original_name.as_ref(),
publish_at.as_ref(),
) {
Ok(Self {
id: rev_id * 1000 + idx,
publish_at: *publish_at,
mikan_fansub_id: mikan_fansub_hash.map(|h| h.mikan_fansub_id.clone()),
fansub_name,
mikan_episode_id: mikan_episode_hash.mikan_episode_id.clone(),
original_name: original_name.clone(),
magnet_link: magnet_link.map(|s| s.to_string()),
file_size: file_size.map(|s| s.to_string()),
torrent_link: torrent_link.map(|s| s.to_string()),
})
} else {
let mut missing_fields = vec![];
if mikan_episode_hash.is_none() {
missing_fields.push("mikan_episode_id");
}
if original_name.is_none() {
missing_fields.push("original_name");
}
if publish_at.is_none() {
missing_fields.push("publish_at");
}
Err(RecorderError::without_source(format!(
"Failed to parse episode table row, missing fields: {missing_fields:?}, row \
index: {idx}"
)))
}
}
}
pub struct MikanClassicEpisodeTablePage {
pub page: i32,
pub total: i32,
pub html: String,
pub rows: Vec<MikanClassicEpisodeTableRow>,
}
impl MikanClassicEpisodeTablePage {
pub fn from_html(
html: String,
mikan_base_url: &Url,
page: i32,
updated_info: Option<(i32, i32)>,
) -> RecorderResult<Self> {
let tr_selector = &Selector::parse("tbody tr").unwrap();
let doc = Html::parse_document(&html);
if let Some(mut total) = TOTAL_PAGE_REGEX
.captures(&html)
.and_then(|c| c.get(1))
.and_then(|s| s.as_str().parse::<i32>().ok())
{
if let Some((_, update_total)) = updated_info {
total = update_total;
}
let rev_id = total - page;
let rows = doc
.select(tr_selector)
.rev()
.enumerate()
.map(|(idx, tr)| {
MikanClassicEpisodeTableRow::from_element_ref(
tr,
rev_id,
idx as i32,
mikan_base_url,
)
})
.collect::<RecorderResult<Vec<_>>>()?;
Ok(Self {
page,
total,
html,
rows,
})
} else {
Err(RecorderError::without_source(
"Failed to parse pagination meta and rows".into(),
))
}
}
pub fn save_to_files(&self) -> RecorderResult<()> {
use polars::prelude::*;
let rev_id = self.total - self.page;
let parquet_path = TEST_FOLDER.join(format!("parquet/rev_{rev_id}.parquet"));
let csv_path = TEST_FOLDER.join(format!("csv/rev_{rev_id}.csv"));
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
std::fs::write(html_path, self.html.clone())?;
let mut id_vec = Vec::new();
let mut publish_at_vec = Vec::new();
let mut mikan_fansub_id_vec = Vec::new();
let mut fansub_name_vec = Vec::new();
let mut mikan_episode_id_vec = Vec::new();
let mut original_name_vec = Vec::new();
let mut magnet_link_vec = Vec::new();
let mut file_size_vec = Vec::new();
let mut torrent_link_vec = Vec::new();
for row in &self.rows {
id_vec.push(row.id);
publish_at_vec.push(row.publish_at.to_rfc3339());
mikan_fansub_id_vec.push(row.mikan_fansub_id.clone());
fansub_name_vec.push(row.fansub_name.clone());
mikan_episode_id_vec.push(row.mikan_episode_id.clone());
original_name_vec.push(row.original_name.clone());
magnet_link_vec.push(row.magnet_link.clone());
file_size_vec.push(row.file_size.clone());
torrent_link_vec.push(row.torrent_link.clone());
}
let df = df! [
"id" => id_vec,
"publish_at_timestamp" => publish_at_vec,
"mikan_fansub_id" => mikan_fansub_id_vec,
"fansub_name" => fansub_name_vec,
"mikan_episode_id" => mikan_episode_id_vec,
"original_name" => original_name_vec,
"magnet_link" => magnet_link_vec,
"file_size" => file_size_vec,
"torrent_link" => torrent_link_vec,
]
.map_err(|e| {
let message = format!("Failed to create DataFrame: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let mut parquet_file = std::fs::File::create(&parquet_path)?;
ParquetWriter::new(&mut parquet_file)
.finish(&mut df.clone())
.map_err(|e| {
let message = format!("Failed to write parquet file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let mut csv_file = std::fs::File::create(&csv_path)?;
CsvWriter::new(&mut csv_file)
.include_header(true)
.with_quote_style(QuoteStyle::Always)
.finish(&mut df.clone())
.map_err(|e| {
let message = format!("Failed to write csv file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
println!(
"[{}/{}] Saved {} rows to rev_{}.{{parquet,html,csv}}",
self.page,
self.total,
self.rows.len(),
rev_id
);
Ok(())
}
pub fn waiting_rev_ids(total: i32) -> RecorderResult<Vec<i32>> {
let dir = TEST_FOLDER.join("csv");
let files = std::fs::read_dir(dir)?;
let rev_ids = files
.filter_map(|f| f.ok())
.filter_map(|f| {
f.path().file_stem().and_then(|s| {
s.to_str().and_then(|s| {
if s.starts_with("rev_") {
s.replace("rev_", "").parse::<i32>().ok()
} else {
None
}
})
})
})
.collect::<HashSet<_>>();
Ok((0..total)
.filter(|rev_id| !rev_ids.contains(rev_id))
.collect::<Vec<_>>())
}
}
async fn scrape_mikan_classic_episode_table_page(
mikan_client: &MikanClient,
page: i32,
updated_info: Option<(i32, i32)>,
) -> RecorderResult<MikanClassicEpisodeTablePage> {
let mikan_base_url = mikan_client.base_url();
let url = mikan_base_url.join(&format!("/Home/Classic/{page}"))?;
if let Some((rev_id, update_total)) = updated_info.as_ref() {
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
if html_path.exists() {
let html = std::fs::read_to_string(&html_path)?;
println!("[{page}/{update_total}] html exists, skipping fetch");
return MikanClassicEpisodeTablePage::from_html(
html,
mikan_base_url,
page,
updated_info,
);
}
}
let total = if let Some((_, update_total)) = updated_info.as_ref() {
update_total.to_string()
} else {
"Unknown".to_string()
};
println!("[{page}/{total}] fetching html...");
let html = fetch_html(mikan_client, url).await?;
println!("[{page}/{total}] fetched html done");
std::fs::write(TEST_FOLDER.join("html/temp.html"), html.clone())?;
MikanClassicEpisodeTablePage::from_html(html, mikan_base_url, page, updated_info)
}
async fn scrape_mikan_classic_episode_table_page_from_rev_id(
mikan_client: &MikanClient,
total: i32,
rev_idx: i32,
) -> RecorderResult<MikanClassicEpisodeTablePage> {
let page = total - rev_idx;
scrape_mikan_classic_episode_table_page(mikan_client, page, Some((rev_idx, total))).await
}
async fn merge_mikan_classic_episodes_and_strip_columns() -> RecorderResult<()> {
use polars::prelude::*;
let dir = TEST_FOLDER.join("parquet");
let files = std::fs::read_dir(dir)?;
let parquet_paths = files
.filter_map(|f| f.ok())
.filter_map(|f| {
let path = f.path();
if let Some(ext) = path.extension()
&& ext == "parquet"
&& path
.file_stem()
.is_some_and(|f| f.to_string_lossy().starts_with("rev_"))
{
Some(path)
} else {
None
}
})
.collect::<Vec<_>>();
if parquet_paths.is_empty() {
return Err(RecorderError::without_source(
"No parquet files found to merge".into(),
));
}
println!("Found {} parquet files to merge", parquet_paths.len());
// 读取并合并所有 parquet 文件
let mut all_dfs = Vec::new();
for path in &parquet_paths {
println!("Reading {path:?}");
let file = std::fs::File::open(path)?;
let df = ParquetReader::new(file).finish().map_err(|e| {
let message = format!("Failed to read parquet file {path:?}: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
all_dfs.push(df);
}
let lazy_frames: Vec<LazyFrame> = all_dfs.into_iter().map(|df| df.lazy()).collect();
let merged_df = concat_lf_diagonal(&lazy_frames, UnionArgs::default())
.map_err(|e| {
let message = format!("Failed to concat DataFrames: {e}");
RecorderError::with_source(Box::new(e), message)
})?
.sort(
["publish_at_timestamp"],
SortMultipleOptions::default().with_order_descending(true),
)
.unique(
Some(vec![
"mikan_fansub_id".to_string(),
"mikan_episode_id".to_string(),
]),
UniqueKeepStrategy::First,
)
.collect()
.map_err(|e| {
let message = format!("Failed to collect lazy DataFrame: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
fn select_columns_and_write(
merged_df: DataFrame,
name: &str,
columns: &[&str],
) -> RecorderResult<()> {
let result_df = merged_df
.lazy()
.sort(["publish_at_timestamp"], SortMultipleOptions::default())
.select(columns.iter().map(|c| col(*c)).collect_vec())
.collect()
.map_err(|e| {
let message = format!("Failed to sort and select columns: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let output_path = TEST_FOLDER.join(format!("parquet/{name}.parquet"));
let mut output_file = std::fs::File::create(&output_path)?;
ParquetWriter::new(&mut output_file)
.set_parallel(true)
.with_compression(ParquetCompression::Zstd(Some(
ZstdLevel::try_new(22).unwrap(),
)))
.finish(&mut result_df.clone())
.map_err(|e| {
let message = format!("Failed to write merged parquet file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
println!("Merged {} rows into {output_path:?}", result_df.height());
Ok(())
}
select_columns_and_write(merged_df.clone(), "tiny", &["fansub_name", "original_name"])?;
// select_columns_and_write(
// merged_df.clone(),
// "lite",
// &[
// "mikan_fansub_id",
// "fansub_name",
// "mikan_episode_id",
// "original_name",
// ],
// )?;
// select_columns_and_write(
// merged_df,
// "full",
// &[
// "id",
// "publish_at_timestamp",
// "mikan_fansub_id",
// "fansub_name",
// "mikan_episode_id",
// "original_name",
// "magnet_link",
// "file_size",
// "torrent_link",
// ],
// )?;
Ok(())
}
#[tokio::main]
async fn main() -> RecorderResult<()> {
std::fs::create_dir_all(TEST_FOLDER.join("html"))?;
std::fs::create_dir_all(TEST_FOLDER.join("parquet"))?;
std::fs::create_dir_all(TEST_FOLDER.join("csv"))?;
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
http_client: HttpClientConfig {
exponential_backoff_max_retries: Some(3),
leaky_bucket_max_tokens: Some(2),
leaky_bucket_initial_tokens: Some(1),
leaky_bucket_refill_tokens: Some(1),
leaky_bucket_refill_interval: Some(std::time::Duration::from_millis(1000)),
user_agent: Some(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
.to_string(),
),
..Default::default()
},
base_url: Url::parse("https://mikanani.me")?,
})
.await?;
let first_page_and_pagination_info =
scrape_mikan_classic_episode_table_page(&mikan_scrape_client, 1, None).await?;
let total_page = first_page_and_pagination_info.total;
first_page_and_pagination_info.save_to_files()?;
let next_rev_ids = MikanClassicEpisodeTablePage::waiting_rev_ids(total_page)?;
for todo_rev_id in next_rev_ids {
let page = scrape_mikan_classic_episode_table_page_from_rev_id(
&mikan_scrape_client,
total_page,
todo_rev_id,
)
.await?;
page.save_to_files()?;
}
// 合并所有 parquet 文件
println!("\nMerging all parquet files...");
merge_mikan_classic_episodes_and_strip_columns().await?;
println!("Merge completed!");
Ok(())
}

View File

@ -212,7 +212,7 @@ async fn main() -> Result<()> {
};
}
{
let episode_torrent_url = rss_item.url;
let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() {

View File

@ -72,7 +72,7 @@ async fn main() -> RecorderResult<()> {
}?;
{
let episode_torrent_url = rss_item.url;
let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() {
@ -173,7 +173,7 @@ async fn main() -> RecorderResult<()> {
};
{
let episode_torrent_url = rss_item.url;
let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path =
MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent...");

View File

@ -13,6 +13,8 @@ use crate::{
},
};
pub const PROJECT_NAME: &str = "konobangu";
pub struct App {
pub context: Arc<dyn AppContextTrait>,
pub builder: AppBuilder,
@ -51,14 +53,15 @@ impl App {
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
let (graphql_c, oidc_c, metadata_c, static_c) = futures::try_join!(
let (graphql_c, oidc_c, metadata_c, static_c, feeds_c) = futures::try_join!(
controller::graphql::create(context.clone()),
controller::oidc::create(context.clone()),
controller::metadata::create(context.clone()),
controller::r#static::create(context.clone()),
controller::feeds::create(context.clone()),
)?;
for c in [graphql_c, oidc_c, metadata_c, static_c] {
for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
router = c.apply_to(router);
}
@ -151,7 +154,7 @@ impl App {
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
#[cfg(all(not(unix), debug_assertions))]
#[cfg(not(all(unix, debug_assertions)))]
let quit = std::future::pending::<()>();
tokio::select! {

View File

@ -4,7 +4,7 @@ pub mod context;
pub mod core;
pub mod env;
pub use core::App;
pub use core::{App, PROJECT_NAME};
pub use builder::AppBuilder;
pub use config::AppConfig;

View File

@ -9,7 +9,7 @@ use super::{
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{
app::AppContextTrait,
app::{AppContextTrait, PROJECT_NAME},
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
};
@ -86,7 +86,7 @@ impl AuthServiceTrait for BasicAuthService {
}
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
Some(HeaderValue::from_static(r#"Basic realm="konobangu""#))
Some(HeaderValue::from_str(format!("Basic realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
}
fn auth_type(&self) -> AuthType {

View File

@ -32,7 +32,11 @@ use super::{
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{app::AppContextTrait, errors::RecorderError, models::auth::AuthType};
use crate::{
app::{AppContextTrait, PROJECT_NAME},
errors::RecorderError,
models::auth::AuthType,
};
pub struct OidcHttpClient(pub Arc<HttpClient>);
@ -351,7 +355,7 @@ impl AuthServiceTrait for OidcAuthService {
}
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
Some(HeaderValue::from_static(r#"Bearer realm="konobangu""#))
Some(HeaderValue::from_str(format!("Bearer realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
}
fn auth_type(&self) -> AuthType {

View File

@ -47,8 +47,12 @@ pub enum RecorderError {
RegexError { source: regex::Error },
#[snafu(display("Invalid method"))]
InvalidMethodError,
#[snafu(display("Invalid header value"))]
InvalidHeaderValueError,
#[snafu(display("Invalid header name"))]
InvalidHeaderNameError,
#[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))]
MissingOriginError,
#[snafu(transparent)]
TracingAppenderInitError {
source: tracing_appender::rolling::InitError,
@ -87,8 +91,6 @@ pub enum RecorderError {
#[snafu(source(from(opendal::Error, Box::new)))]
source: Box<opendal::Error>,
},
#[snafu(display("Invalid header value"))]
InvalidHeaderValueError,
#[snafu(transparent)]
HttpClientError { source: HttpClientError },
#[cfg(feature = "testcontainers")]
@ -248,6 +250,11 @@ impl IntoResponse for RecorderError {
)
.into_response()
}
Self::ModelEntityNotFound { entity } => (
StatusCode::NOT_FOUND,
Json::<StandardErrorResponse>(StandardErrorResponse::from(entity.to_string())),
)
.into_response(),
err => (
StatusCode::INTERNAL_SERVER_ERROR,
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),

View File

@ -1,3 +1,4 @@
use chrono::{DateTime, Utc};
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use quirks_path::Path;
@ -33,6 +34,14 @@ lazy_static! {
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
}
#[derive(Clone, Debug)]
pub struct EpisodeEnclosureMeta {
pub magnet_link: Option<String>,
pub torrent_link: Option<String>,
pub pub_date: Option<DateTime<Utc>>,
pub content_length: Option<i64>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeMediaMeta {
pub fansub: Option<String>,

View File

@ -1,7 +1,12 @@
use axum::http::{HeaderName, HeaderValue, Uri, header, request::Parts};
use axum::{
extract::FromRequestParts,
http::{HeaderName, HeaderValue, Uri, header, request::Parts},
};
use itertools::Itertools;
use url::Url;
use crate::errors::RecorderError;
/// Fields from a "Forwarded" header per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4)
#[derive(Debug, Clone)]
pub struct ForwardedHeader {
@ -101,9 +106,13 @@ pub struct ForwardedRelatedInfo {
pub origin: Option<String>,
}
impl ForwardedRelatedInfo {
pub fn from_request_parts(request_parts: &Parts) -> ForwardedRelatedInfo {
let headers = &request_parts.headers;
impl<T> FromRequestParts<T> for ForwardedRelatedInfo {
type Rejection = RecorderError;
fn from_request_parts(
parts: &mut Parts,
_state: &T,
) -> impl Future<Output = Result<Self, Self::Rejection>> + Send {
let headers = &parts.headers;
let forwarded = headers
.get(header::FORWARDED)
.and_then(|s| ForwardedHeader::try_from(s.clone()).ok());
@ -132,17 +141,19 @@ impl ForwardedRelatedInfo {
.get(header::ORIGIN)
.and_then(|s| s.to_str().map(String::from).ok());
ForwardedRelatedInfo {
futures::future::ready(Ok(ForwardedRelatedInfo {
host,
x_forwarded_for,
x_forwarded_host,
x_forwarded_proto,
forwarded,
uri: request_parts.uri.clone(),
uri: parts.uri.clone(),
origin,
}
}))
}
}
impl ForwardedRelatedInfo {
pub fn resolved_protocol(&self) -> Option<&str> {
self.forwarded
.as_ref()

View File

@ -12,6 +12,7 @@ pub const MIKAN_BANGUMI_POSTER_PATH: &str = "/images/Bangumi";
pub const MIKAN_EPISODE_TORRENT_PATH: &str = "/Download";
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH: &str = "/RSS/MyBangumi";
pub const MIKAN_BANGUMI_RSS_PATH: &str = "/RSS/Bangumi";
pub const MIKAN_FANSUB_HOMEPAGE_PATH: &str = "/Home/PublishGroup";
pub const MIKAN_BANGUMI_ID_QUERY_KEY: &str = "bangumiId";
pub const MIKAN_FANSUB_ID_QUERY_KEY: &str = "subgroupid";
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY: &str = "token";

View File

@ -11,10 +11,11 @@ pub use constants::{
MIKAN_ACCOUNT_MANAGE_PAGE_PATH, MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH,
MIKAN_BANGUMI_HOMEPAGE_PATH, MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH,
MIKAN_BANGUMI_RSS_PATH, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_EPISODE_TORRENT_PATH,
MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_LOGIN_PAGE_PATH, MIKAN_LOGIN_PAGE_SEARCH,
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
MIKAN_UNKNOWN_FANSUB_ID, MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_LOGIN_PAGE_PATH,
MIKAN_LOGIN_PAGE_SEARCH, MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH,
MIKAN_SEASON_STR_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH,
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_UNKNOWN_FANSUB_ID,
MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
};
pub use credential::MikanCredentialForm;
pub use subscription::{
@ -22,11 +23,12 @@ pub use subscription::{
};
pub use web::{
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanRssEpisodeItem,
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionRssUrlMeta,
build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url,
build_mikan_bangumi_subscription_rss_url, build_mikan_episode_homepage_url,
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanFansubHash,
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_expand_subscribed_url,
build_mikan_bangumi_homepage_url, build_mikan_bangumi_subscription_rss_url,
build_mikan_episode_homepage_url, build_mikan_season_flow_url,
build_mikan_subscriber_subscription_rss_url,
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
extract_mikan_episode_meta_from_episode_homepage_html,

View File

@ -20,12 +20,15 @@ use super::scrape_mikan_bangumi_meta_stream_from_season_flow_url;
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
extract::mikan::{
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url,
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
scrape_mikan_episode_meta_from_episode_homepage_url,
extract::{
bittorrent::EpisodeEnclosureMeta,
mikan::{
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url,
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
scrape_mikan_episode_meta_from_episode_homepage_url,
},
},
models::{
bangumi, episodes, subscription_bangumi, subscription_episode,
@ -54,7 +57,7 @@ async fn sync_mikan_feeds_from_rss_item_list(
.map(|(episode_id, hash, bangumi_id)| (hash.mikan_episode_id, (episode_id, bangumi_id)))
.collect::<HashMap<_, _>>();
let mut new_episode_meta_list: Vec<MikanEpisodeMeta> = vec![];
let mut new_episode_meta_list: Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)> = vec![];
let mikan_client = ctx.mikan();
for to_insert_rss_item in rss_item_list.into_iter().filter(|rss_item| {
@ -65,7 +68,8 @@ async fn sync_mikan_feeds_from_rss_item_list(
to_insert_rss_item.build_homepage_url(mikan_base_url.clone()),
)
.await?;
new_episode_meta_list.push(episode_meta);
let episode_enclosure_meta = EpisodeEnclosureMeta::from(to_insert_rss_item);
new_episode_meta_list.push((episode_meta, episode_enclosure_meta));
}
(new_episode_meta_list, existed_episode_hash2id_map)
@ -92,22 +96,22 @@ async fn sync_mikan_feeds_from_rss_item_list(
let new_episode_meta_list_group_by_bangumi_hash: HashMap<
MikanBangumiHash,
Vec<MikanEpisodeMeta>,
Vec<(MikanEpisodeMeta, EpisodeEnclosureMeta)>,
> = {
let mut m = hashmap! {};
for episode_meta in new_episode_meta_list {
for (episode_meta, episode_enclosure_meta) in new_episode_meta_list {
let bangumi_hash = episode_meta.bangumi_hash();
m.entry(bangumi_hash)
.or_insert_with(Vec::new)
.push(episode_meta);
.push((episode_meta, episode_enclosure_meta));
}
m
};
for (group_bangumi_hash, group_episode_meta_list) in new_episode_meta_list_group_by_bangumi_hash
{
let first_episode_meta = group_episode_meta_list.first().unwrap();
let (first_episode_meta, _) = group_episode_meta_list.first().unwrap();
let group_bangumi_model = bangumi::Model::get_or_insert_from_mikan(
ctx,
group_bangumi_hash,
@ -126,9 +130,12 @@ async fn sync_mikan_feeds_from_rss_item_list(
},
)
.await?;
let group_episode_creation_list = group_episode_meta_list
.into_iter()
.map(|episode_meta| (&group_bangumi_model, episode_meta));
let group_episode_creation_list =
group_episode_meta_list
.into_iter()
.map(|(episode_meta, episode_enclosure_meta)| {
(&group_bangumi_model, episode_meta, episode_enclosure_meta)
});
episodes::Model::add_mikan_episodes_for_subscription(
ctx,
@ -273,7 +280,7 @@ impl MikanSubscriberSubscription {
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, InputObject, SimpleObject)]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanSeasonSubscription {
pub subscription_id: i32,
pub year: i32,

View File

@ -2,7 +2,7 @@ use std::{borrow::Cow, fmt, str::FromStr, sync::Arc};
use async_stream::try_stream;
use bytes::Bytes;
use chrono::DateTime;
use chrono::{DateTime, Utc};
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
use fetch::{html::fetch_html, image::fetch_image};
use futures::{Stream, TryStreamExt, pin_mut};
@ -17,13 +17,14 @@ use crate::{
app::AppContextTrait,
errors::app_error::{RecorderError, RecorderResult},
extract::{
bittorrent::EpisodeEnclosureMeta,
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str,
mikan::{
MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH, MIKAN_BANGUMI_HOMEPAGE_PATH,
MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_POSTER_PATH, MIKAN_BANGUMI_RSS_PATH,
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_POSTER_BUCKET_KEY,
MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
MIKAN_YEAR_QUERY_KEY, MikanClient,
},
@ -39,11 +40,12 @@ use crate::{
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanRssEpisodeItem {
pub title: String,
pub url: Url,
pub content_length: Option<u64>,
pub torrent_link: Url,
pub content_length: Option<i64>,
pub mime: String,
pub pub_date: Option<i64>,
pub pub_date: Option<DateTime<Utc>>,
pub mikan_episode_id: String,
pub magnet_link: Option<String>,
}
impl MikanRssEpisodeItem {
@ -93,20 +95,49 @@ impl TryFrom<rss::Item> for MikanRssEpisodeItem {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
})?;
let pub_date = item
.extensions
.get("torrent")
.and_then(|t| t.get("pubDate"))
.and_then(|e| e.first())
.and_then(|e| e.value.as_deref());
Ok(MikanRssEpisodeItem {
title,
url: enclosure_url,
torrent_link: enclosure_url,
content_length: enclosure.length.parse().ok(),
mime: mime_type,
pub_date: item
.pub_date
.and_then(|s| DateTime::parse_from_rfc2822(&s).ok())
.map(|s| s.timestamp_millis()),
pub_date: pub_date.and_then(|s| {
DateTime::parse_from_rfc2822(s)
.ok()
.map(|s| s.with_timezone(&Utc))
.or_else(|| {
DateTime::parse_from_rfc3339(s)
.ok()
.map(|s| s.with_timezone(&Utc))
})
.or_else(|| {
DateTime::parse_from_rfc3339(&format!("{s}+08:00"))
.ok()
.map(|s| s.with_timezone(&Utc))
})
}),
mikan_episode_id,
magnet_link: None,
})
}
}
impl From<MikanRssEpisodeItem> for EpisodeEnclosureMeta {
fn from(item: MikanRssEpisodeItem) -> Self {
Self {
magnet_link: item.magnet_link,
torrent_link: Some(item.torrent_link.to_string()),
pub_date: item.pub_date,
content_length: item.content_length,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanSubscriberSubscriptionRssUrlMeta {
pub mikan_subscription_token: String,
@ -205,6 +236,32 @@ impl MikanBangumiMeta {
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct MikanFansubHash {
pub mikan_fansub_id: String,
}
impl MikanFansubHash {
pub fn from_homepage_url(url: &Url) -> Option<Self> {
let path = url.path();
if path.starts_with(MIKAN_FANSUB_HOMEPAGE_PATH) {
let mikan_fansub_id = path.replace(&format!("{MIKAN_FANSUB_HOMEPAGE_PATH}/"), "");
Some(Self { mikan_fansub_id })
} else {
None
}
}
pub fn build_homepage_url(self, mikan_base_url: Url) -> Url {
let mut url = mikan_base_url;
url.set_path(&format!(
"{MIKAN_FANSUB_HOMEPAGE_PATH}/{}",
self.mikan_fansub_id
));
url
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanEpisodeMeta {
pub homepage: Url,

View File

@ -0,0 +1,14 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::bangumi};
pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId);
}
pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<bangumi::BangumiType>();
seaography::register_entity!(builder, bangumi);
builder
}

View File

@ -3,12 +3,22 @@ use std::sync::Arc;
use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef,
};
use seaography::Builder as SeaographyBuilder;
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use serde::{Deserialize, Serialize};
use util_derive::DynamicGraphql;
use crate::{
app::AppContextTrait, auth::AuthUserInfo, errors::RecorderError, models::credential_3rd,
app::AppContextTrait,
auth::AuthUserInfo,
errors::RecorderError,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::crypto::{
register_crypto_column_input_conversion_to_schema_context,
register_crypto_column_output_conversion_to_schema_context,
},
},
models::credential_3rd,
};
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
@ -63,9 +73,52 @@ impl Credential3rdCheckAvailableInfo {
}
}
pub fn register_credential3rd_to_schema_context(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
) {
restrict_subscriber_for_entity::<credential_3rd::Entity>(
context,
&credential_3rd::Column::SubscriberId,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Password,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx,
&credential_3rd::Column::Password,
);
}
pub fn register_credential3rd_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.register_enumeration::<credential_3rd::Credential3rdType>();
seaography::register_entity!(builder, credential_3rd);
builder.schema = builder
.schema
.register(Credential3rdCheckAvailableInput::generate_input_object());

View File

@ -0,0 +1,17 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloaders};
pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloaders::Entity>(
context,
&downloaders::Column::SubscriberId,
);
}
pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloaders::DownloaderCategory>();
seaography::register_entity!(builder, downloaders);
builder
}

View File

@ -0,0 +1,15 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloads};
pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId);
}
pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<downloads::DownloadMime>();
seaography::register_entity!(builder, downloads);
builder
}

View File

@ -0,0 +1,14 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::episodes};
pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId);
}
pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<episodes::EpisodeType>();
seaography::register_entity!(builder, episodes);
builder
}

View File

@ -0,0 +1,56 @@
use std::sync::Arc;
use async_graphql::dynamic::ResolverContext;
use sea_orm::Value as SeaValue;
use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult};
use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::util::{get_entity_column_key, get_entity_key},
},
models::feeds,
};
pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId);
{
let entity_column_key =
get_entity_column_key::<feeds::Entity>(context, &feeds::Column::Token);
let entity_key = get_entity_key::<feeds::Entity>(context);
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
let entity_create_one_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
context.types.input_none_conversions.insert(
entity_column_key,
Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name();
if field_name == entity_create_one_mutation_field_name.as_str()
|| field_name == entity_create_batch_mutation_field_name.as_str()
{
Ok(Some(SeaValue::String(Some(Box::new(nanoid::nanoid!())))))
} else {
Ok(None)
}
},
),
);
}
}
pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<feeds::FeedType>();
builder.register_enumeration::<feeds::FeedSource>();
seaography::register_entity!(builder, feeds);
builder
}

View File

@ -1,5 +1,12 @@
pub mod credential_3rd;
pub mod crypto;
pub mod bangumi;
pub mod downloaders;
pub mod downloads;
pub mod episodes;
pub mod feeds;
pub mod subscriber_tasks;
pub mod subscribers;
pub mod subscription_bangumi;
pub mod subscription_episode;
pub mod subscriptions;

View File

@ -267,7 +267,6 @@ where
Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name();
tracing::warn!("field_name: {:?}", field_name);
if field_name == entity_create_one_mutation_field_name.as_str()
|| field_name == entity_create_batch_mutation_field_name.as_str()
{
@ -320,6 +319,7 @@ where
}
pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscribers::Entity>(context, &subscribers::Column::Id);
for column in subscribers::Column::iter() {
if !matches!(column, subscribers::Column::Id) {
let key = get_entity_column_key::<subscribers::Entity>(context, &column);

View File

@ -0,0 +1,20 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_bangumi,
};
pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
context,
&subscription_bangumi::Column::SubscriberId,
);
}
pub fn register_subscription_bangumi_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_bangumi);
builder
}

View File

@ -0,0 +1,20 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_episode,
};
pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscription_episode::Entity>(
context,
&subscription_episode::Column::SubscriberId,
);
}
pub fn register_subscription_episode_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_episode);
builder
}

View File

@ -3,13 +3,16 @@ use std::sync::Arc;
use async_graphql::dynamic::{FieldValue, TypeRef};
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
use seaography::{
Builder as SeaographyBuilder, EntityObjectBuilder, EntityQueryFieldBuilder,
Builder as SeaographyBuilder, BuilderContext, EntityObjectBuilder, EntityQueryFieldBuilder,
get_filter_conditions,
};
use crate::{
errors::RecorderError,
graphql::infra::custom::generate_entity_filter_mutation_field,
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::generate_entity_filter_mutation_field,
},
models::{
subscriber_tasks,
subscriptions::{self, SubscriptionTrait},
@ -17,9 +20,19 @@ use crate::{
task::SubscriberTask,
};
pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscriptions::Entity>(
context,
&subscriptions::Column::SubscriberId,
);
}
pub fn register_subscriptions_to_schema_builder(
mut builder: SeaographyBuilder,
) -> SeaographyBuilder {
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
seaography::register_entity!(builder, subscriptions);
let context = builder.context;
let entity_object_builder = EntityObjectBuilder { context };

View File

@ -7,10 +7,9 @@ use seaography::{BuilderContext, SeaResult};
use crate::{
app::AppContextTrait,
graphql::infra::util::{get_column_key, get_entity_key},
models::credential_3rd,
};
fn register_crypto_column_input_conversion_to_schema_context<T>(
pub fn register_crypto_column_input_conversion_to_schema_context<T>(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
column: &T::Column,
@ -37,7 +36,7 @@ fn register_crypto_column_input_conversion_to_schema_context<T>(
);
}
fn register_crypto_column_output_conversion_to_schema_context<T>(
pub fn register_crypto_column_output_conversion_to_schema_context<T>(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
column: &T::Column,
@ -68,39 +67,3 @@ fn register_crypto_column_output_conversion_to_schema_context<T>(
),
);
}
pub fn register_crypto_to_schema_context(
context: &mut BuilderContext,
ctx: Arc<dyn AppContextTrait>,
) {
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
register_crypto_column_input_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Password,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Cookies,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx.clone(),
&credential_3rd::Column::Username,
);
register_crypto_column_output_conversion_to_schema_context::<credential_3rd::Entity>(
context,
ctx,
&credential_3rd::Column::Password,
);
}

View File

@ -1,3 +1,4 @@
pub mod crypto;
pub mod custom;
pub mod json;
pub mod util;

View File

@ -8,17 +8,36 @@ use crate::{
app::AppContextTrait,
graphql::{
domains::{
credential_3rd::register_credential3rd_to_schema_builder,
crypto::register_crypto_to_schema_context,
bangumi::{register_bangumi_to_schema_builder, register_bangumi_to_schema_context},
credential_3rd::{
register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context,
},
downloaders::{
register_downloaders_to_schema_builder, register_downloaders_to_schema_context,
},
downloads::{
register_downloads_to_schema_builder, register_downloads_to_schema_context,
},
episodes::{register_episodes_to_schema_builder, register_episodes_to_schema_context},
feeds::{register_feeds_to_schema_builder, register_feeds_to_schema_context},
subscriber_tasks::{
register_subscriber_tasks_to_schema_builder,
register_subscriber_tasks_to_schema_context,
},
subscribers::{
register_subscribers_to_schema_builder, register_subscribers_to_schema_context,
restrict_subscriber_for_entity,
},
subscriptions::register_subscriptions_to_schema_builder,
subscription_bangumi::{
register_subscription_bangumi_to_schema_builder,
register_subscription_bangumi_to_schema_context,
},
subscription_episode::{
register_subscription_episode_to_schema_builder,
register_subscription_episode_to_schema_context,
},
subscriptions::{
register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context,
},
},
infra::json::register_jsonb_input_filter_to_schema_builder,
},
@ -31,7 +50,6 @@ pub fn build_schema(
depth: Option<usize>,
complexity: Option<usize>,
) -> Result<Schema, SchemaError> {
use crate::models::*;
let database = app_ctx.db().as_ref().clone();
let context = CONTEXT.get_or_init(|| {
@ -39,45 +57,17 @@ pub fn build_schema(
{
// domains
register_feeds_to_schema_context(&mut context);
register_subscribers_to_schema_context(&mut context);
{
restrict_subscriber_for_entity::<downloaders::Entity>(
&mut context,
&downloaders::Column::SubscriberId,
);
restrict_subscriber_for_entity::<downloads::Entity>(
&mut context,
&downloads::Column::SubscriberId,
);
restrict_subscriber_for_entity::<episodes::Entity>(
&mut context,
&episodes::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscriptions::Entity>(
&mut context,
&subscriptions::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscribers::Entity>(
&mut context,
&subscribers::Column::Id,
);
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
&mut context,
&subscription_bangumi::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscription_episode::Entity>(
&mut context,
&subscription_episode::Column::SubscriberId,
);
restrict_subscriber_for_entity::<credential_3rd::Entity>(
&mut context,
&credential_3rd::Column::SubscriberId,
);
}
register_crypto_to_schema_context(&mut context, app_ctx.clone());
register_subscriptions_to_schema_context(&mut context);
register_subscriber_tasks_to_schema_context(&mut context);
register_credential3rd_to_schema_context(&mut context, app_ctx.clone());
register_downloaders_to_schema_context(&mut context);
register_downloads_to_schema_context(&mut context);
register_episodes_to_schema_context(&mut context);
register_subscription_bangumi_to_schema_context(&mut context);
register_subscription_episode_to_schema_context(&mut context);
register_bangumi_to_schema_context(&mut context);
}
context
});
@ -91,32 +81,16 @@ pub fn build_schema(
{
// domains
builder = register_subscribers_to_schema_builder(builder);
seaography::register_entities!(
builder,
[
bangumi,
downloaders,
downloads,
episodes,
subscription_bangumi,
subscription_episode,
subscriptions,
credential_3rd
]
);
{
builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
builder.register_enumeration::<downloaders::DownloaderCategory>();
builder.register_enumeration::<downloads::DownloadMime>();
builder.register_enumeration::<credential_3rd::Credential3rdType>();
}
builder = register_feeds_to_schema_builder(builder);
builder = register_episodes_to_schema_builder(builder);
builder = register_subscription_bangumi_to_schema_builder(builder);
builder = register_subscription_episode_to_schema_builder(builder);
builder = register_downloaders_to_schema_builder(builder);
builder = register_downloads_to_schema_builder(builder);
builder = register_subscriptions_to_schema_builder(builder);
builder = register_credential3rd_to_schema_builder(builder);
builder = register_subscriber_tasks_to_schema_builder(builder);
builder = register_bangumi_to_schema_builder(builder);
}
let schema = builder.schema_builder();

View File

@ -52,8 +52,12 @@ pub enum Bangumi {
RssLink,
PosterLink,
OriginPosterLink,
/**
* @deprecated
*/
SavePath,
Homepage,
BangumiType,
}
#[derive(DeriveIden)]
@ -75,6 +79,9 @@ pub enum Episodes {
BangumiId,
SubscriberId,
DownloadId,
/**
* @deprecated
*/
SavePath,
Resolution,
Season,
@ -86,7 +93,11 @@ pub enum Episodes {
Homepage,
Subtitle,
Source,
Extra,
EpisodeType,
EnclosureTorrentLink,
EnclosureMagnetLink,
EnclosurePubDate,
EnclosureContentLength,
}
#[derive(DeriveIden)]
@ -149,6 +160,17 @@ pub enum Credential3rd {
UserAgent,
}
#[derive(DeriveIden)]
pub enum Feeds {
Table,
Id,
Token,
FeedType,
FeedSource,
SubscriberId,
SubscriptionId,
}
macro_rules! create_postgres_enum_for_active_enum {
($manager: expr, $active_enum: expr, $($enum_value:expr),+) => {
{

View File

@ -95,8 +95,8 @@ impl MigrationTrait for Migration {
DownloadMimeEnum,
DownloadMime::iden_values(),
))
.col(big_unsigned(Downloads::AllSize))
.col(big_unsigned(Downloads::CurrSize))
.col(big_integer(Downloads::AllSize))
.col(big_integer(Downloads::CurrSize))
.col(text(Downloads::Url))
.col(text_null(Downloads::Homepage))
.col(text_null(Downloads::SavePath))

View File

@ -0,0 +1,95 @@
use async_trait::async_trait;
use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{
CustomSchemaManagerExt, Feeds, GeneralIds, Subscribers, Subscriptions, table_auto_z,
},
models::feeds::{FeedSource, FeedSourceEnum, FeedType, FeedTypeEnum},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
create_postgres_enum_for_active_enum!(manager, FeedTypeEnum, FeedType::Rss).await?;
create_postgres_enum_for_active_enum!(
manager,
FeedSourceEnum,
FeedSource::SubscriptionEpisode
)
.await?;
manager
.create_table(
table_auto_z(Feeds::Table)
.col(pk_auto(Feeds::Id))
.col(text(Feeds::Token))
.col(enumeration(
Feeds::FeedType,
FeedTypeEnum,
FeedType::iden_values(),
))
.col(
enumeration(Feeds::FeedSource, FeedSourceEnum, FeedSource::iden_values())
.not_null(),
)
.col(integer_null(Feeds::SubscriberId))
.col(integer_null(Feeds::SubscriptionId))
.index(
Index::create()
.if_not_exists()
.name("idx_feeds_token")
.table(Feeds::Table)
.col(Feeds::Token)
.unique(),
)
.foreign_key(
ForeignKey::create()
.name("fk_feeds_subscriber_id")
.from(Feeds::Table, Feeds::SubscriberId)
.to(Subscribers::Table, Subscribers::Id)
.on_update(ForeignKeyAction::Cascade)
.on_delete(ForeignKeyAction::Cascade),
)
.foreign_key(
ForeignKey::create()
.name("fk_feeds_subscription_id")
.from(Feeds::Table, Feeds::SubscriptionId)
.to(Subscriptions::Table, Subscriptions::Id)
.on_update(ForeignKeyAction::Cascade)
.on_delete(ForeignKeyAction::Cascade),
)
.to_owned(),
)
.await?;
manager
.create_postgres_auto_update_ts_trigger_for_col(Feeds::Table, GeneralIds::UpdatedAt)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_postgres_auto_update_ts_trigger_for_col(Feeds::Table, GeneralIds::UpdatedAt)
.await?;
manager
.drop_table(Table::drop().if_exists().table(Feeds::Table).to_owned())
.await?;
manager
.drop_postgres_enum_for_active_enum(FeedTypeEnum)
.await?;
manager
.drop_postgres_enum_for_active_enum(FeedSourceEnum)
.await?;
Ok(())
}
}

View File

@ -0,0 +1,133 @@
use async_trait::async_trait;
use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{Bangumi, CustomSchemaManagerExt, Episodes},
models::{
bangumi::{BangumiType, BangumiTypeEnum},
episodes::{EpisodeType, EpisodeTypeEnum},
},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan).await?;
{
create_postgres_enum_for_active_enum!(manager, BangumiTypeEnum, BangumiType::Mikan)
.await?;
manager
.alter_table(
Table::alter()
.table(Bangumi::Table)
.add_column_if_not_exists(enumeration_null(
Bangumi::BangumiType,
BangumiTypeEnum,
BangumiType::iden_values(),
))
.drop_column(Bangumi::SavePath)
.to_owned(),
)
.await?;
manager
.exec_stmt(
UpdateStatement::new()
.table(Bangumi::Table)
.value(
Bangumi::BangumiType,
BangumiType::Mikan.as_enum(BangumiTypeEnum),
)
.and_where(Expr::col(Bangumi::BangumiType).is_null())
.and_where(Expr::col(Bangumi::MikanBangumiId).is_not_null())
.to_owned(),
)
.await?;
manager
.alter_table(
Table::alter()
.table(Bangumi::Table)
.modify_column(enumeration(
Bangumi::BangumiType,
BangumiTypeEnum,
BangumiType::iden_values(),
))
.to_owned(),
)
.await?;
}
{
create_postgres_enum_for_active_enum!(manager, EpisodeTypeEnum, EpisodeType::Mikan)
.await?;
manager
.alter_table(
Table::alter()
.table(Episodes::Table)
.add_column_if_not_exists(enumeration_null(
Episodes::EpisodeType,
EpisodeTypeEnum,
EpisodeType::enum_type_name(),
))
.add_column_if_not_exists(text_null(Episodes::EnclosureMagnetLink))
.add_column_if_not_exists(text_null(Episodes::EnclosureTorrentLink))
.add_column_if_not_exists(timestamp_with_time_zone_null(
Episodes::EnclosurePubDate,
))
.add_column_if_not_exists(big_integer_null(
Episodes::EnclosureContentLength,
))
.drop_column(Episodes::SavePath)
.to_owned(),
)
.await?;
manager
.exec_stmt(
UpdateStatement::new()
.table(Episodes::Table)
.value(
Episodes::EpisodeType,
EpisodeType::Mikan.as_enum(EpisodeTypeEnum),
)
.and_where(Expr::col(Episodes::EpisodeType).is_null())
.and_where(Expr::col(Episodes::MikanEpisodeId).is_not_null())
.to_owned(),
)
.await?;
manager
.alter_table(
Table::alter()
.table(Episodes::Table)
.modify_column(enumeration(
Episodes::EpisodeType,
EpisodeTypeEnum,
EpisodeType::enum_type_name(),
))
.to_owned(),
)
.await?;
}
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_postgres_enum_for_active_enum(BangumiTypeEnum)
.await?;
manager
.drop_postgres_enum_for_active_enum(EpisodeTypeEnum)
.await?;
Ok(())
}
}

View File

@ -8,6 +8,8 @@ pub mod m20240224_082543_add_downloads;
pub mod m20241231_000001_auth;
pub mod m20250501_021523_credential_3rd;
pub mod m20250520_021135_subscriber_tasks;
pub mod m20250622_015618_feeds;
pub mod m20250622_020819_bangumi_and_episode_type;
pub struct Migrator;
@ -20,6 +22,8 @@ impl MigratorTrait for Migrator {
Box::new(m20241231_000001_auth::Migration),
Box::new(m20250501_021523_credential_3rd::Migration),
Box::new(m20250520_021135_subscriber_tasks::Migration),
Box::new(m20250622_015618_feeds::Migration),
Box::new(m20250622_020819_bangumi_and_episode_type::Migration),
]
}
}

View File

@ -17,7 +17,7 @@ use crate::{
MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url,
scrape_mikan_poster_meta_from_image_url,
},
origin::{OriginCompTrait, SeasonComp},
origin::{BangumiComps, OriginCompTrait},
},
};
@ -29,7 +29,14 @@ pub struct BangumiFilter {
pub group: Option<Vec<String>>,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)]
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "bangumi_type")]
pub enum BangumiType {
#[sea_orm(string_value = "mikan")]
Mikan,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "bangumi")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
@ -39,6 +46,7 @@ pub struct Model {
#[sea_orm(primary_key)]
pub id: i32,
pub mikan_bangumi_id: Option<String>,
pub bangumi_type: BangumiType,
pub subscriber_id: i32,
pub display_name: String,
pub origin_name: String,
@ -50,7 +58,6 @@ pub struct Model {
pub rss_link: Option<String>,
pub poster_link: Option<String>,
pub origin_poster_link: Option<String>,
pub save_path: Option<String>,
pub homepage: Option<String>,
}
@ -122,11 +129,12 @@ impl ActiveModel {
) -> RecorderResult<Self> {
let mikan_client = ctx.mikan();
let mikan_base_url = mikan_client.base_url();
let season_comp = SeasonComp::parse_comp(&meta.bangumi_title)
let season_comp = BangumiComps::parse_comp(&meta.bangumi_title)
.ok()
.map(|(_, s)| s);
.map(|(_, s)| s)
.and_then(|s| s.season);
let season_index = season_comp.as_ref().map(|s| s.num).unwrap_or(1);
let season_raw = season_comp.map(|s| s.source.into_owned());
let season_raw = season_comp.map(|s| s.source.to_string());
let rss_url = build_mikan_bangumi_subscription_rss_url(
mikan_base_url.clone(),
@ -155,6 +163,7 @@ impl ActiveModel {
origin_poster_link: ActiveValue::Set(meta.origin_poster_src.map(|src| src.to_string())),
homepage: ActiveValue::Set(Some(meta.homepage.to_string())),
rss_link: ActiveValue::Set(Some(rss_url.to_string())),
bangumi_type: ActiveValue::Set(BangumiType::Mikan),
..Default::default()
})
}
@ -231,6 +240,7 @@ impl Model {
Column::OriginName,
Column::Fansub,
Column::PosterLink,
Column::OriginPosterLink,
Column::Season,
Column::SeasonRaw,
Column::RssLink,

View File

@ -52,8 +52,8 @@ pub struct Model {
pub status: DownloadStatus,
pub mime: DownloadMime,
pub url: String,
pub all_size: Option<u64>,
pub curr_size: Option<u64>,
pub all_size: Option<i64>,
pub curr_size: Option<i64>,
pub homepage: Option<String>,
pub save_path: Option<String>,
}

View File

@ -9,11 +9,19 @@ use crate::{
app::AppContextTrait,
errors::RecorderResult,
extract::{
bittorrent::EpisodeEnclosureMeta,
mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url},
origin::{OriginCompTrait, OriginNameRoot},
},
};
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "episode_type")]
pub enum EpisodeType {
#[sea_orm(string_value = "mikan")]
Mikan,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "episodes")]
pub struct Model {
@ -25,11 +33,15 @@ pub struct Model {
pub id: i32,
#[sea_orm(indexed)]
pub mikan_episode_id: Option<String>,
pub enclosure_torrent_link: Option<String>,
pub enclosure_magnet_link: Option<String>,
pub enclosure_pub_date: Option<DateTimeUtc>,
pub enclosure_content_length: Option<i64>,
pub episode_type: EpisodeType,
pub origin_name: String,
pub display_name: String,
pub bangumi_id: i32,
pub subscriber_id: i32,
pub save_path: Option<String>,
pub resolution: Option<String>,
pub season: i32,
pub season_raw: Option<String>,
@ -122,6 +134,7 @@ impl ActiveModel {
ctx: &dyn AppContextTrait,
bangumi: &bangumi::Model,
episode: MikanEpisodeMeta,
enclosure_meta: EpisodeEnclosureMeta,
) -> RecorderResult<Self> {
let mikan_base_url = ctx.mikan().base_url().clone();
let episode_extention_meta = OriginNameRoot::parse_comp(&episode.episode_title)
@ -149,6 +162,11 @@ impl ActiveModel {
poster_link: ActiveValue::Set(bangumi.poster_link.clone()),
origin_poster_link: ActiveValue::Set(bangumi.origin_poster_link.clone()),
episode_index: ActiveValue::Set(0),
enclosure_torrent_link: ActiveValue::Set(enclosure_meta.torrent_link),
enclosure_magnet_link: ActiveValue::Set(enclosure_meta.magnet_link),
enclosure_pub_date: ActiveValue::Set(enclosure_meta.pub_date),
enclosure_content_length: ActiveValue::Set(enclosure_meta.content_length),
episode_type: ActiveValue::Set(EpisodeType::Mikan),
..Default::default()
};
@ -216,14 +234,19 @@ impl Model {
pub async fn add_mikan_episodes_for_subscription(
ctx: &dyn AppContextTrait,
creations: impl Iterator<Item = (&bangumi::Model, MikanEpisodeMeta)>,
creations: impl Iterator<Item = (&bangumi::Model, MikanEpisodeMeta, EpisodeEnclosureMeta)>,
subscriber_id: i32,
subscription_id: i32,
) -> RecorderResult<()> {
let db = ctx.db();
let new_episode_active_modes: Vec<ActiveModel> = creations
.map(|(bangumi, episode_meta)| {
ActiveModel::from_mikan_bangumi_and_episode_meta(ctx, bangumi, episode_meta)
.map(|(bangumi, episode_meta, enclosure_meta)| {
ActiveModel::from_mikan_bangumi_and_episode_meta(
ctx,
bangumi,
episode_meta,
enclosure_meta,
)
})
.collect::<Result<_, _>>()?;
@ -234,7 +257,23 @@ impl Model {
let new_episode_ids = Entity::insert_many(new_episode_active_modes)
.on_conflict(
OnConflict::columns([Column::MikanEpisodeId, Column::SubscriberId])
.update_columns([Column::OriginName, Column::PosterLink, Column::Homepage])
.update_columns([
Column::OriginName,
Column::PosterLink,
Column::OriginPosterLink,
Column::Homepage,
Column::EnclosureContentLength,
Column::EnclosurePubDate,
Column::EnclosureTorrentLink,
Column::EnclosureMagnetLink,
Column::EpisodeIndex,
Column::Subtitle,
Column::Source,
Column::Resolution,
Column::Season,
Column::SeasonRaw,
Column::Fansub,
])
.to_owned(),
)
.exec_with_returning_columns(db, [Column::Id])

View File

@ -0,0 +1,133 @@
mod registry;
mod rss;
mod subscription_episodes_feed;
use ::rss::Channel;
use async_trait::async_trait;
pub use registry::Feed;
pub use rss::{RssFeedItemTrait, RssFeedTrait};
use sea_orm::{ActiveValue, DeriveEntityModel, entity::prelude::*};
use serde::{Deserialize, Serialize};
pub use subscription_episodes_feed::SubscriptionEpisodesFeed;
use url::Url;
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
};
#[derive(
Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "feed_type")]
#[serde(rename_all = "snake_case")]
pub enum FeedType {
#[sea_orm(string_value = "rss")]
Rss,
}
#[derive(
Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "feed_source")]
#[serde(rename_all = "snake_case")]
pub enum FeedSource {
#[sea_orm(string_value = "subscription_episode")]
SubscriptionEpisode,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "feeds")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTimeUtc,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)]
pub id: i32,
#[sea_orm(indexed)]
pub token: String,
#[sea_orm(indexed)]
pub feed_type: FeedType,
#[sea_orm(indexed)]
pub feed_source: FeedSource,
pub subscriber_id: Option<i32>,
pub subscription_id: Option<i32>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscription,
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscriber,
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::subscriptions::Entity")]
Subscription,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
if insert && let ActiveValue::NotSet = self.token {
let token = nanoid::nanoid!(10);
self.token = ActiveValue::Set(token);
}
Ok(self)
}
}
impl Model {
pub async fn find_rss_feed_by_token(
ctx: &dyn AppContextTrait,
token: &str,
api_base: &Url,
) -> RecorderResult<Channel> {
let db = ctx.db();
let feed_model = Entity::find()
.filter(Column::Token.eq(token))
.filter(Column::FeedType.eq(FeedType::Rss))
.one(db)
.await?
.ok_or(RecorderError::ModelEntityNotFound {
entity: "Feed".into(),
})?;
let feed = Feed::from_model(ctx, feed_model).await?;
feed.into_rss_channel(ctx, api_base)
}
}

View File

@ -0,0 +1,65 @@
use rss::Channel;
use sea_orm::{ColumnTrait, EntityTrait, JoinType, QueryFilter, QuerySelect, RelationTrait};
use url::Url;
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
models::{
episodes,
feeds::{self, FeedSource, RssFeedTrait, SubscriptionEpisodesFeed},
subscription_episode, subscriptions,
},
};
pub enum Feed {
SubscritpionEpisodes(SubscriptionEpisodesFeed),
}
impl Feed {
pub async fn from_model(ctx: &dyn AppContextTrait, m: feeds::Model) -> RecorderResult<Self> {
match m.feed_source {
FeedSource::SubscriptionEpisode => {
let db = ctx.db();
let (subscription, episodes) = if let Some(subscription_id) = m.subscription_id
&& let Some(subscription) = subscriptions::Entity::find()
.filter(subscriptions::Column::Id.eq(subscription_id))
.one(db)
.await?
{
let episodes = episodes::Entity::find()
.join(
JoinType::InnerJoin,
episodes::Relation::SubscriptionEpisode.def(),
)
.join(
JoinType::InnerJoin,
subscription_episode::Relation::Subscription.def(),
)
.filter(subscriptions::Column::Id.eq(subscription_id))
.all(db)
.await?;
(subscription, episodes)
} else {
return Err(RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
});
};
Ok(Feed::SubscritpionEpisodes(
SubscriptionEpisodesFeed::from_model(m, subscription, episodes),
))
}
}
}
pub fn into_rss_channel(
self,
ctx: &dyn AppContextTrait,
api_base: &Url,
) -> RecorderResult<Channel> {
match self {
Self::SubscritpionEpisodes(feed) => feed.into_channel(ctx, api_base),
}
}
}

View File

@ -0,0 +1,142 @@
use std::borrow::Cow;
use chrono::{DateTime, Utc};
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
use maplit::btreemap;
use rss::{
Channel, ChannelBuilder, EnclosureBuilder, GuidBuilder, Item, ItemBuilder,
extension::{ExtensionBuilder, ExtensionMap},
};
use url::Url;
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
};
pub trait RssFeedItemTrait: Sized {
fn get_guid_value(&self) -> Cow<'_, str>;
fn get_title(&self) -> Cow<'_, str>;
fn get_description(&self) -> Cow<'_, str>;
fn get_link(&self, ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>>;
fn get_enclosure_mime(&self) -> Option<Cow<'_, str>>;
fn get_enclosure_link(&self, ctx: &dyn AppContextTrait, api_base: &Url)
-> Option<Cow<'_, str>>;
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>>;
fn get_enclosure_content_length(&self) -> Option<i64>;
fn into_item(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Item> {
let enclosure_mime_type =
self.get_enclosure_mime()
.ok_or_else(|| RecorderError::MikanRssInvalidFieldError {
field: "enclosure_mime_type".into(),
source: None.into(),
})?;
let enclosure_link = self.get_enclosure_link(ctx, api_base).ok_or_else(|| {
RecorderError::MikanRssInvalidFieldError {
field: "enclosure_link".into(),
source: None.into(),
}
})?;
let enclosure_content_length = self.get_enclosure_content_length().ok_or_else(|| {
RecorderError::MikanRssInvalidFieldError {
field: "enclosure_content_length".into(),
source: None.into(),
}
})?;
let enclosure_pub_date = self.get_enclosure_pub_date();
let link = self.get_link(ctx, api_base).ok_or_else(|| {
RecorderError::MikanRssInvalidFieldError {
field: "link".into(),
source: None.into(),
}
})?;
let mut extensions = ExtensionMap::default();
if enclosure_mime_type == BITTORRENT_MIME_TYPE {
extensions.insert("torrent".to_string(), {
let mut map = btreemap! {
"link".to_string() => vec![
ExtensionBuilder::default().name(
"link"
).value(enclosure_link.to_string()).build()
],
"contentLength".to_string() => vec![
ExtensionBuilder::default().name(
"contentLength"
).value(enclosure_content_length.to_string()).build()
],
};
if let Some(pub_date) = enclosure_pub_date {
map.insert(
"pubDate".to_string(),
vec![
ExtensionBuilder::default()
.name("pubDate")
.value(pub_date.to_rfc3339())
.build(),
],
);
}
map
});
};
let enclosure = EnclosureBuilder::default()
.mime_type(enclosure_mime_type)
.url(enclosure_link.to_string())
.length(enclosure_content_length.to_string())
.build();
let guid = GuidBuilder::default()
.value(self.get_guid_value())
.permalink(false)
.build();
let item = ItemBuilder::default()
.guid(guid)
.title(self.get_title().to_string())
.description(self.get_description().to_string())
.link(link.to_string())
.enclosure(enclosure)
.extensions(extensions)
.build();
Ok(item)
}
}
pub trait RssFeedTrait: Sized {
type Item: RssFeedItemTrait;
fn get_description(&self) -> Cow<'_, str>;
fn get_title(&self) -> Cow<'_, str>;
fn get_link(&self, ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>>;
fn items(&self) -> impl Iterator<Item = &Self::Item>;
fn into_items(self) -> impl Iterator<Item = Self::Item>;
fn into_channel(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Channel> {
let link = self.get_link(ctx, api_base).ok_or_else(|| {
RecorderError::MikanRssInvalidFieldError {
field: "link".into(),
source: None.into(),
}
})?;
let channel = ChannelBuilder::default()
.title(self.get_title())
.link(link.to_string())
.description(self.get_description())
.items({
self.into_items()
.map(|item| item.into_item(ctx, api_base))
.collect::<RecorderResult<Vec<_>>>()?
})
.build();
Ok(channel)
}
}

View File

@ -0,0 +1,114 @@
use std::borrow::Cow;
use chrono::{DateTime, Utc};
use downloader::bittorrent::BITTORRENT_MIME_TYPE;
use url::Url;
use crate::{
app::{AppContextTrait, PROJECT_NAME},
models::{
episodes,
feeds::{
self,
rss::{RssFeedItemTrait, RssFeedTrait},
},
subscriptions,
},
web::controller,
};
pub struct SubscriptionEpisodesFeed {
pub feed: feeds::Model,
pub subscription: subscriptions::Model,
pub episodes: Vec<episodes::Model>,
}
impl SubscriptionEpisodesFeed {
pub fn from_model(
feed: feeds::Model,
subscription: subscriptions::Model,
episodes: Vec<episodes::Model>,
) -> Self {
Self {
feed,
subscription,
episodes,
}
}
}
impl RssFeedItemTrait for episodes::Model {
fn get_guid_value(&self) -> Cow<'_, str> {
Cow::Owned(format!("{PROJECT_NAME}:episode:{}", self.id))
}
fn get_title(&self) -> Cow<'_, str> {
Cow::Borrowed(&self.display_name)
}
fn get_description(&self) -> Cow<'_, str> {
Cow::Borrowed(&self.display_name)
}
fn get_link(&self, _ctx: &dyn AppContextTrait, _api_base: &Url) -> Option<Cow<'_, str>> {
self.homepage.as_deref().map(Cow::Borrowed)
}
fn get_enclosure_mime(&self) -> Option<Cow<'_, str>> {
if self.enclosure_torrent_link.is_some() {
Some(Cow::Borrowed(BITTORRENT_MIME_TYPE))
} else {
None
}
}
fn get_enclosure_link(
&self,
_ctx: &dyn AppContextTrait,
_api_base: &Url,
) -> Option<Cow<'_, str>> {
self.enclosure_torrent_link.as_deref().map(Cow::Borrowed)
}
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>> {
self.enclosure_pub_date
}
fn get_enclosure_content_length(&self) -> Option<i64> {
self.enclosure_content_length
}
}
impl RssFeedTrait for SubscriptionEpisodesFeed {
type Item = episodes::Model;
fn get_description(&self) -> Cow<'_, str> {
Cow::Owned(format!(
"{PROJECT_NAME} - episodes of subscription {}",
self.subscription.id
))
}
fn get_title(&self) -> Cow<'_, str> {
Cow::Owned(format!("{PROJECT_NAME} - subscription episodes"))
}
fn get_link(&self, _ctx: &dyn AppContextTrait, api_base: &Url) -> Option<Cow<'_, str>> {
let api_base = api_base
.join(&format!(
"{}/{}",
controller::feeds::CONTROLLER_PREFIX,
self.feed.token
))
.ok()?;
Some(Cow::Owned(api_base.to_string()))
}
fn items(&self) -> impl Iterator<Item = &Self::Item> {
self.episodes.iter()
}
fn into_items(self) -> impl Iterator<Item = Self::Item> {
self.episodes.into_iter()
}
}

View File

@ -4,6 +4,7 @@ pub mod credential_3rd;
pub mod downloaders;
pub mod downloads;
pub mod episodes;
pub mod feeds;
pub mod query;
pub mod subscriber_tasks;
pub mod subscribers;

View File

@ -3,11 +3,11 @@ use sea_orm::{ActiveValue, FromJsonQueryResult, TransactionTrait, entity::prelud
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
app::{AppContextTrait, PROJECT_NAME},
errors::app_error::{RecorderError, RecorderResult},
};
pub const SEED_SUBSCRIBER: &str = "konobangu";
pub const SEED_SUBSCRIBER: &str = PROJECT_NAME;
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscriberBangumiConfig {
@ -41,6 +41,10 @@ pub enum Relation {
Auth,
#[sea_orm(has_many = "super::credential_3rd::Entity")]
Credential3rd,
#[sea_orm(has_many = "super::feeds::Entity")]
Feed,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask,
}
impl Related<super::subscriptions::Entity> for Entity {
@ -79,6 +83,18 @@ impl Related<super::credential_3rd::Entity> for Entity {
}
}
impl Related<super::feeds::Entity> for Entity {
fn to() -> RelationDef {
Relation::Feed.def()
}
}
impl Related<super::subscriber_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriberTask.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscriptions::Entity")]
@ -91,6 +107,10 @@ pub enum RelatedEntity {
Episode,
#[sea_orm(entity = "super::credential_3rd::Entity")]
Credential3rd,
#[sea_orm(entity = "super::feeds::Entity")]
Feed,
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask,
}
#[derive(Debug, Deserialize, Serialize)]

View File

@ -1,311 +0,0 @@
use std::{fmt::Debug, sync::Arc};
use async_trait::async_trait;
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
extract::mikan::{
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
},
};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
#[serde(rename_all = "snake_case")]
pub enum SubscriptionCategory {
#[sea_orm(string_value = "mikan_subscriber")]
MikanSubscriber,
#[sea_orm(string_value = "mikan_season")]
MikanSeason,
#[sea_orm(string_value = "mikan_bangumi")]
MikanBangumi,
#[sea_orm(string_value = "manual")]
Manual,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTimeUtc,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)]
pub id: i32,
pub display_name: String,
pub subscriber_id: i32,
pub category: SubscriptionCategory,
pub source_url: String,
pub enabled: bool,
pub credential_id: Option<i32>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscriber,
#[sea_orm(has_many = "super::bangumi::Entity")]
Bangumi,
#[sea_orm(has_many = "super::episodes::Entity")]
Episodes,
#[sea_orm(has_many = "super::subscription_episode::Entity")]
SubscriptionEpisode,
#[sea_orm(has_many = "super::subscription_bangumi::Entity")]
SubscriptionBangumi,
#[sea_orm(
belongs_to = "super::credential_3rd::Entity",
from = "Column::CredentialId",
to = "super::credential_3rd::Column::Id",
on_update = "Cascade",
on_delete = "SetNull"
)]
Credential3rd,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::subscription_bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriptionBangumi.def()
}
}
impl Related<super::subscription_episode::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriptionEpisode.def()
}
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
super::subscription_bangumi::Relation::Bangumi.def()
}
fn via() -> Option<RelationDef> {
Some(
super::subscription_bangumi::Relation::Subscription
.def()
.rev(),
)
}
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
super::subscription_episode::Relation::Episode.def()
}
fn via() -> Option<RelationDef> {
Some(
super::subscription_episode::Relation::Subscription
.def()
.rev(),
)
}
}
impl Related<super::credential_3rd::Entity> for Entity {
fn to() -> RelationDef {
Relation::Credential3rd.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::bangumi::Entity")]
Bangumi,
#[sea_orm(entity = "super::episodes::Entity")]
Episode,
#[sea_orm(entity = "super::subscription_episode::Entity")]
SubscriptionEpisode,
#[sea_orm(entity = "super::subscription_bangumi::Entity")]
SubscriptionBangumi,
#[sea_orm(entity = "super::credential_3rd::Entity")]
Credential3rd,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {}
impl Model {
pub async fn toggle_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
enabled: bool,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::update_many()
.col_expr(Column::Enabled, Expr::value(enabled))
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn delete_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::delete_many()
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn find_by_id_and_subscriber_id(
ctx: &dyn AppContextTrait,
subscriber_id: i32,
subscription_id: i32,
) -> RecorderResult<Self> {
let db = ctx.db();
let subscription_model = Entity::find_by_id(subscription_id)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
if subscription_model.subscriber_id != subscriber_id {
Err(RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
}
Ok(subscription_model)
}
}
#[async_trait]
pub trait SubscriptionTrait: Sized + Debug {
fn get_subscriber_id(&self) -> i32;
fn get_subscription_id(&self) -> i32;
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
fn try_from_model(model: &Model) -> RecorderResult<Self>;
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "category")]
pub enum Subscription {
#[serde(rename = "mikan_subscriber")]
MikanSubscriber(MikanSubscriberSubscription),
#[serde(rename = "mikan_season")]
MikanSeason(MikanSeasonSubscription),
#[serde(rename = "mikan_bangumi")]
MikanBangumi(MikanBangumiSubscription),
#[serde(rename = "manual")]
Manual,
}
impl Subscription {
pub fn category(&self) -> SubscriptionCategory {
match self {
Self::MikanSubscriber(_) => SubscriptionCategory::MikanSubscriber,
Self::MikanSeason(_) => SubscriptionCategory::MikanSeason,
Self::MikanBangumi(_) => SubscriptionCategory::MikanBangumi,
Self::Manual => SubscriptionCategory::Manual,
}
}
}
#[async_trait]
impl SubscriptionTrait for Subscription {
fn get_subscriber_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscriber_id(),
Self::MikanSeason(subscription) => subscription.get_subscriber_id(),
Self::MikanBangumi(subscription) => subscription.get_subscriber_id(),
Self::Manual => unreachable!(),
}
}
fn get_subscription_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscription_id(),
Self::MikanSeason(subscription) => subscription.get_subscription_id(),
Self::MikanBangumi(subscription) => subscription.get_subscription_id(),
Self::Manual => unreachable!(),
}
}
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_full(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_sources(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_sources(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_sources(ctx).await,
Self::Manual => Ok(()),
}
}
fn try_from_model(model: &Model) -> RecorderResult<Self> {
match model.category {
SubscriptionCategory::MikanSubscriber => {
MikanSubscriberSubscription::try_from_model(model).map(Self::MikanSubscriber)
}
SubscriptionCategory::MikanSeason => {
MikanSeasonSubscription::try_from_model(model).map(Self::MikanSeason)
}
SubscriptionCategory::MikanBangumi => {
MikanBangumiSubscription::try_from_model(model).map(Self::MikanBangumi)
}
SubscriptionCategory::Manual => Ok(Self::Manual),
}
}
}
impl TryFrom<&Model> for Subscription {
type Error = RecorderError;
fn try_from(model: &Model) -> Result<Self, Self::Error> {
Self::try_from_model(model)
}
}

View File

@ -0,0 +1,20 @@
use std::{fmt::Debug, sync::Arc};
use async_trait::async_trait;
use crate::{app::AppContextTrait, errors::RecorderResult, models::subscriptions};
#[async_trait]
pub trait SubscriptionTrait: Sized + Debug {
fn get_subscriber_id(&self) -> i32;
fn get_subscription_id(&self) -> i32;
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self>;
}

View File

@ -0,0 +1,195 @@
mod core;
mod registry;
pub use core::SubscriptionTrait;
use std::fmt::Debug;
use async_trait::async_trait;
pub use registry::{
Subscription, SubscriptionCategory, SubscriptionCategoryEnum, SubscriptionCategoryIter,
SubscriptionCategoryVariant, SubscriptionCategoryVariantIter,
};
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTimeUtc,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)]
pub id: i32,
pub display_name: String,
pub subscriber_id: i32,
pub category: SubscriptionCategory,
pub source_url: String,
pub enabled: bool,
pub credential_id: Option<i32>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscriber,
#[sea_orm(has_many = "super::bangumi::Entity")]
Bangumi,
#[sea_orm(has_many = "super::episodes::Entity")]
Episodes,
#[sea_orm(has_many = "super::subscription_episode::Entity")]
SubscriptionEpisode,
#[sea_orm(has_many = "super::subscription_bangumi::Entity")]
SubscriptionBangumi,
#[sea_orm(
belongs_to = "super::credential_3rd::Entity",
from = "Column::CredentialId",
to = "super::credential_3rd::Column::Id",
on_update = "Cascade",
on_delete = "SetNull"
)]
Credential3rd,
#[sea_orm(has_many = "super::feeds::Entity")]
Feed,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::subscription_bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriptionBangumi.def()
}
}
impl Related<super::subscription_episode::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriptionEpisode.def()
}
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
super::subscription_bangumi::Relation::Bangumi.def()
}
fn via() -> Option<RelationDef> {
Some(
super::subscription_bangumi::Relation::Subscription
.def()
.rev(),
)
}
}
impl Related<super::feeds::Entity> for Entity {
fn to() -> RelationDef {
Relation::Feed.def()
}
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
super::subscription_episode::Relation::Episode.def()
}
fn via() -> Option<RelationDef> {
Some(
super::subscription_episode::Relation::Subscription
.def()
.rev(),
)
}
}
impl Related<super::credential_3rd::Entity> for Entity {
fn to() -> RelationDef {
Relation::Credential3rd.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::bangumi::Entity")]
Bangumi,
#[sea_orm(entity = "super::episodes::Entity")]
Episode,
#[sea_orm(entity = "super::subscription_episode::Entity")]
SubscriptionEpisode,
#[sea_orm(entity = "super::subscription_bangumi::Entity")]
SubscriptionBangumi,
#[sea_orm(entity = "super::credential_3rd::Entity")]
Credential3rd,
#[sea_orm(entity = "super::feeds::Entity")]
Feed,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {}
impl Model {
pub async fn toggle_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
enabled: bool,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::update_many()
.col_expr(Column::Enabled, Expr::value(enabled))
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn delete_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::delete_many()
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn find_by_id_and_subscriber_id(
ctx: &dyn AppContextTrait,
subscriber_id: i32,
subscription_id: i32,
) -> RecorderResult<Self> {
let db = ctx.db();
let subscription_model = Entity::find_by_id(subscription_id)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
if subscription_model.subscriber_id != subscriber_id {
Err(RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
}
Ok(subscription_model)
}
}

View File

@ -0,0 +1,129 @@
use std::{fmt::Debug, sync::Arc};
use async_trait::async_trait;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter};
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
extract::mikan::{
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
},
models::subscriptions::{self, SubscriptionTrait},
};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
#[serde(rename_all = "snake_case")]
pub enum SubscriptionCategory {
#[sea_orm(string_value = "mikan_subscriber")]
MikanSubscriber,
#[sea_orm(string_value = "mikan_season")]
MikanSeason,
#[sea_orm(string_value = "mikan_bangumi")]
MikanBangumi,
#[sea_orm(string_value = "manual")]
Manual,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "category")]
pub enum Subscription {
#[serde(rename = "mikan_subscriber")]
MikanSubscriber(MikanSubscriberSubscription),
#[serde(rename = "mikan_season")]
MikanSeason(MikanSeasonSubscription),
#[serde(rename = "mikan_bangumi")]
MikanBangumi(MikanBangumiSubscription),
#[serde(rename = "manual")]
Manual,
}
impl Subscription {
pub fn category(&self) -> SubscriptionCategory {
match self {
Self::MikanSubscriber(_) => SubscriptionCategory::MikanSubscriber,
Self::MikanSeason(_) => SubscriptionCategory::MikanSeason,
Self::MikanBangumi(_) => SubscriptionCategory::MikanBangumi,
Self::Manual => SubscriptionCategory::Manual,
}
}
}
#[async_trait]
impl SubscriptionTrait for Subscription {
fn get_subscriber_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscriber_id(),
Self::MikanSeason(subscription) => subscription.get_subscriber_id(),
Self::MikanBangumi(subscription) => subscription.get_subscriber_id(),
Self::Manual => unreachable!(),
}
}
fn get_subscription_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscription_id(),
Self::MikanSeason(subscription) => subscription.get_subscription_id(),
Self::MikanBangumi(subscription) => subscription.get_subscription_id(),
Self::Manual => unreachable!(),
}
}
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_full(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_sources(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_sources(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_sources(ctx).await,
Self::Manual => Ok(()),
}
}
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
match model.category {
SubscriptionCategory::MikanSubscriber => {
MikanSubscriberSubscription::try_from_model(model).map(Self::MikanSubscriber)
}
SubscriptionCategory::MikanSeason => {
MikanSeasonSubscription::try_from_model(model).map(Self::MikanSeason)
}
SubscriptionCategory::MikanBangumi => {
MikanBangumiSubscription::try_from_model(model).map(Self::MikanBangumi)
}
SubscriptionCategory::Manual => Ok(Self::Manual),
}
}
}
impl TryFrom<&subscriptions::Model> for Subscription {
type Error = RecorderError;
fn try_from(model: &subscriptions::Model) -> Result<Self, Self::Error> {
Self::try_from_model(model)
}
}

View File

@ -0,0 +1,42 @@
use std::sync::Arc;
use axum::{
Router,
extract::{Path, State},
response::IntoResponse,
routing::get,
};
use http::StatusCode;
use crate::{
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
extract::http::ForwardedRelatedInfo,
models::feeds,
web::controller::Controller,
};
pub const CONTROLLER_PREFIX: &str = "/api/feeds";
async fn rss_handler(
State(ctx): State<Arc<dyn AppContextTrait>>,
Path(token): Path<String>,
forwarded_info: ForwardedRelatedInfo,
) -> RecorderResult<impl IntoResponse> {
let api_base = forwarded_info
.resolved_origin()
.ok_or(RecorderError::MissingOriginError)?;
let channel = feeds::Model::find_rss_feed_by_token(ctx.as_ref(), &token, &api_base).await?;
Ok((
StatusCode::OK,
[("Content-Type", "application/xml; charset=utf-8")],
channel.to_string(),
))
}
pub async fn create(_ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
let router = Router::<Arc<dyn AppContextTrait>>::new().route("/rss/{token}", get(rss_handler));
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router))
}

View File

@ -1,4 +1,5 @@
pub mod core;
pub mod feeds;
pub mod graphql;
pub mod metadata;
pub mod oidc;

View File

@ -2,7 +2,7 @@ use std::sync::Arc;
use axum::{
Json, Router,
extract::{Query, Request, State},
extract::{Query, State},
routing::get,
};
use snafu::ResultExt;
@ -42,12 +42,11 @@ async fn oidc_callback(
async fn oidc_auth(
State(ctx): State<Arc<dyn AppContextTrait>>,
request: Request,
forwarded_info: ForwardedRelatedInfo,
) -> Result<Json<OidcAuthRequest>, AuthError> {
let auth_service = ctx.auth();
if let AuthService::Oidc(oidc_auth_service) = auth_service {
let (parts, _) = request.into_parts();
let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts)
let mut redirect_uri = forwarded_info
.resolved_origin()
.ok_or(url::ParseError::EmptyHost)
.context(OidcRequestRedirectUriSnafu)?;

View File

@ -0,0 +1,19 @@
import { gql } from '@apollo/client';
export const INSERT_FEED = gql`
mutation InsertFeed($data: FeedsInsertInput!) {
feedsCreateOne(data: $data) {
id
createdAt
updatedAt
feedType
token
}
}
`;
export const DELETE_FEED = gql`
mutation DeleteFeed($filters: FeedsFilterInput!) {
feedsDelete(filter: $filters)
}
`;

View File

@ -95,6 +95,16 @@ query GetSubscriptionDetail ($id: Int!) {
category
sourceUrl
enabled
feed {
nodes {
id
createdAt
updatedAt
token
feedType
feedSource
}
}
credential3rd {
id
username
@ -112,7 +122,6 @@ query GetSubscriptionDetail ($id: Int!) {
mikanFansubId
rssLink
posterLink
savePath
homepage
}
}

View File

@ -20,11 +20,13 @@ type Documents = {
"\n mutation DeleteCredential3rd($filters: Credential3rdFilterInput!) {\n credential3rdDelete(filter: $filters)\n }\n": typeof types.DeleteCredential3rdDocument,
"\n query GetCredential3rdDetail($id: Int!) {\n credential3rd(filters: { id: { eq: $id } }) {\n nodes {\n id\n cookies\n username\n password\n userAgent\n createdAt\n updatedAt\n credentialType\n }\n }\n }\n": typeof types.GetCredential3rdDetailDocument,
"\n query CheckCredential3rdAvailable($id: Int!) {\n credential3rdCheckAvailable(filter: { id: $id }) {\n available\n }\n }\n": typeof types.CheckCredential3rdAvailableDocument,
"\n mutation InsertFeed($data: FeedsInsertInput!) {\n feedsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n feedType\n token\n }\n }\n": typeof types.InsertFeedDocument,
"\n mutation DeleteFeed($filters: FeedsFilterInput!) {\n feedsDelete(filter: $filters)\n }\n": typeof types.DeleteFeedDocument,
"\n query GetSubscriptions($filters: SubscriptionsFilterInput!, $orderBy: SubscriptionsOrderInput!, $pagination: PaginationInput!) {\n subscriptions(\n pagination: $pagination\n filters: $filters\n orderBy: $orderBy\n ) {\n nodes {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n credentialId\n }\n paginationInfo {\n total\n pages\n }\n }\n }\n": typeof types.GetSubscriptionsDocument,
"\n mutation InsertSubscription($data: SubscriptionsInsertInput!) {\n subscriptionsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n credentialId\n }\n }\n": typeof types.InsertSubscriptionDocument,
"\n mutation UpdateSubscriptions(\n $data: SubscriptionsUpdateInput!,\n $filters: SubscriptionsFilterInput!,\n ) {\n subscriptionsUpdate (\n data: $data\n filter: $filters\n ) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n }\n}\n": typeof types.UpdateSubscriptionsDocument,
"\n mutation DeleteSubscriptions($filters: SubscriptionsFilterInput) {\n subscriptionsDelete(filter: $filters)\n }\n": typeof types.DeleteSubscriptionsDocument,
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n": typeof types.GetSubscriptionDetailDocument,
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n feed {\n nodes {\n id\n createdAt\n updatedAt\n token\n feedType\n feedSource\n }\n }\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n homepage\n }\n }\n }\n }\n}\n": typeof types.GetSubscriptionDetailDocument,
"\n mutation SyncSubscriptionFeedsIncremental($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsIncremental(filter: $filter) {\n id\n }\n }\n": typeof types.SyncSubscriptionFeedsIncrementalDocument,
"\n mutation SyncSubscriptionFeedsFull($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsFull(filter: $filter) {\n id\n }\n }\n": typeof types.SyncSubscriptionFeedsFullDocument,
"\n mutation SyncSubscriptionSources($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneSources(filter: $filter) {\n id\n }\n }\n": typeof types.SyncSubscriptionSourcesDocument,
@ -39,11 +41,13 @@ const documents: Documents = {
"\n mutation DeleteCredential3rd($filters: Credential3rdFilterInput!) {\n credential3rdDelete(filter: $filters)\n }\n": types.DeleteCredential3rdDocument,
"\n query GetCredential3rdDetail($id: Int!) {\n credential3rd(filters: { id: { eq: $id } }) {\n nodes {\n id\n cookies\n username\n password\n userAgent\n createdAt\n updatedAt\n credentialType\n }\n }\n }\n": types.GetCredential3rdDetailDocument,
"\n query CheckCredential3rdAvailable($id: Int!) {\n credential3rdCheckAvailable(filter: { id: $id }) {\n available\n }\n }\n": types.CheckCredential3rdAvailableDocument,
"\n mutation InsertFeed($data: FeedsInsertInput!) {\n feedsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n feedType\n token\n }\n }\n": types.InsertFeedDocument,
"\n mutation DeleteFeed($filters: FeedsFilterInput!) {\n feedsDelete(filter: $filters)\n }\n": types.DeleteFeedDocument,
"\n query GetSubscriptions($filters: SubscriptionsFilterInput!, $orderBy: SubscriptionsOrderInput!, $pagination: PaginationInput!) {\n subscriptions(\n pagination: $pagination\n filters: $filters\n orderBy: $orderBy\n ) {\n nodes {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n credentialId\n }\n paginationInfo {\n total\n pages\n }\n }\n }\n": types.GetSubscriptionsDocument,
"\n mutation InsertSubscription($data: SubscriptionsInsertInput!) {\n subscriptionsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n credentialId\n }\n }\n": types.InsertSubscriptionDocument,
"\n mutation UpdateSubscriptions(\n $data: SubscriptionsUpdateInput!,\n $filters: SubscriptionsFilterInput!,\n ) {\n subscriptionsUpdate (\n data: $data\n filter: $filters\n ) {\n id\n createdAt\n updatedAt\n displayName\n category\n sourceUrl\n enabled\n }\n}\n": types.UpdateSubscriptionsDocument,
"\n mutation DeleteSubscriptions($filters: SubscriptionsFilterInput) {\n subscriptionsDelete(filter: $filters)\n }\n": types.DeleteSubscriptionsDocument,
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n": types.GetSubscriptionDetailDocument,
"\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n feed {\n nodes {\n id\n createdAt\n updatedAt\n token\n feedType\n feedSource\n }\n }\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n homepage\n }\n }\n }\n }\n}\n": types.GetSubscriptionDetailDocument,
"\n mutation SyncSubscriptionFeedsIncremental($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsIncremental(filter: $filter) {\n id\n }\n }\n": types.SyncSubscriptionFeedsIncrementalDocument,
"\n mutation SyncSubscriptionFeedsFull($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneFeedsFull(filter: $filter) {\n id\n }\n }\n": types.SyncSubscriptionFeedsFullDocument,
"\n mutation SyncSubscriptionSources($filter: SubscriptionsFilterInput!) {\n subscriptionsSyncOneSources(filter: $filter) {\n id\n }\n }\n": types.SyncSubscriptionSourcesDocument,
@ -90,6 +94,14 @@ export function gql(source: "\n query GetCredential3rdDetail($id: Int!) {\n
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function gql(source: "\n query CheckCredential3rdAvailable($id: Int!) {\n credential3rdCheckAvailable(filter: { id: $id }) {\n available\n }\n }\n"): (typeof documents)["\n query CheckCredential3rdAvailable($id: Int!) {\n credential3rdCheckAvailable(filter: { id: $id }) {\n available\n }\n }\n"];
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function gql(source: "\n mutation InsertFeed($data: FeedsInsertInput!) {\n feedsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n feedType\n token\n }\n }\n"): (typeof documents)["\n mutation InsertFeed($data: FeedsInsertInput!) {\n feedsCreateOne(data: $data) {\n id\n createdAt\n updatedAt\n feedType\n token\n }\n }\n"];
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function gql(source: "\n mutation DeleteFeed($filters: FeedsFilterInput!) {\n feedsDelete(filter: $filters)\n }\n"): (typeof documents)["\n mutation DeleteFeed($filters: FeedsFilterInput!) {\n feedsDelete(filter: $filters)\n }\n"];
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
@ -109,7 +121,7 @@ export function gql(source: "\n mutation DeleteSubscriptions($filters: Subscr
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function gql(source: "\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n"): (typeof documents)["\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n savePath\n homepage\n }\n }\n }\n }\n}\n"];
export function gql(source: "\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n feed {\n nodes {\n id\n createdAt\n updatedAt\n token\n feedType\n feedSource\n }\n }\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n homepage\n }\n }\n }\n }\n}\n"): (typeof documents)["\nquery GetSubscriptionDetail ($id: Int!) {\n subscriptions(filters: { id: {\n eq: $id\n } }) {\n nodes {\n id\n displayName\n createdAt\n updatedAt\n category\n sourceUrl\n enabled\n feed {\n nodes {\n id\n createdAt\n updatedAt\n token\n feedType\n feedSource\n }\n }\n credential3rd {\n id\n username\n }\n bangumi {\n nodes {\n createdAt\n updatedAt\n id\n mikanBangumiId\n displayName\n season\n seasonRaw\n fansub\n mikanFansubId\n rssLink\n posterLink\n homepage\n }\n }\n }\n }\n}\n"];
/**
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/

View File

@ -21,6 +21,7 @@ export type Scalars = {
export type Bangumi = {
__typename?: 'Bangumi';
bangumiType: BangumiTypeEnum;
createdAt: Scalars['String']['output'];
displayName: Scalars['String']['output'];
episode: EpisodesConnection;
@ -34,7 +35,6 @@ export type Bangumi = {
originPosterLink?: Maybe<Scalars['String']['output']>;
posterLink?: Maybe<Scalars['String']['output']>;
rssLink?: Maybe<Scalars['String']['output']>;
savePath?: Maybe<Scalars['String']['output']>;
season: Scalars['Int']['output'];
seasonRaw?: Maybe<Scalars['String']['output']>;
subscriber?: Maybe<Subscribers>;
@ -67,6 +67,7 @@ export type BangumiSubscriptionBangumiArgs = {
export type BangumiBasic = {
__typename?: 'BangumiBasic';
bangumiType: BangumiTypeEnum;
createdAt: Scalars['String']['output'];
displayName: Scalars['String']['output'];
fansub?: Maybe<Scalars['String']['output']>;
@ -79,7 +80,6 @@ export type BangumiBasic = {
originPosterLink?: Maybe<Scalars['String']['output']>;
posterLink?: Maybe<Scalars['String']['output']>;
rssLink?: Maybe<Scalars['String']['output']>;
savePath?: Maybe<Scalars['String']['output']>;
season: Scalars['Int']['output'];
seasonRaw?: Maybe<Scalars['String']['output']>;
subscriberId: Scalars['Int']['output'];
@ -102,6 +102,7 @@ export type BangumiEdge = {
export type BangumiFilterInput = {
and?: InputMaybe<Array<BangumiFilterInput>>;
bangumiType?: InputMaybe<BangumiTypeEnumFilterInput>;
createdAt?: InputMaybe<TextFilterInput>;
displayName?: InputMaybe<StringFilterInput>;
fansub?: InputMaybe<StringFilterInput>;
@ -114,14 +115,14 @@ export type BangumiFilterInput = {
originPosterLink?: InputMaybe<StringFilterInput>;
posterLink?: InputMaybe<StringFilterInput>;
rssLink?: InputMaybe<StringFilterInput>;
savePath?: InputMaybe<StringFilterInput>;
season?: InputMaybe<IntegerFilterInput>;
seasonRaw?: InputMaybe<StringFilterInput>;
subscriberId?: InputMaybe<IntegerFilterInput>;
subscriberId?: InputMaybe<SubscriberIdFilterInput>;
updatedAt?: InputMaybe<TextFilterInput>;
};
export type BangumiInsertInput = {
bangumiType: BangumiTypeEnum;
createdAt?: InputMaybe<Scalars['String']['input']>;
displayName: Scalars['String']['input'];
fansub?: InputMaybe<Scalars['String']['input']>;
@ -134,14 +135,14 @@ export type BangumiInsertInput = {
originPosterLink?: InputMaybe<Scalars['String']['input']>;
posterLink?: InputMaybe<Scalars['String']['input']>;
rssLink?: InputMaybe<Scalars['String']['input']>;
savePath?: InputMaybe<Scalars['String']['input']>;
season: Scalars['Int']['input'];
seasonRaw?: InputMaybe<Scalars['String']['input']>;
subscriberId: Scalars['Int']['input'];
subscriberId?: InputMaybe<Scalars['Int']['input']>;
updatedAt?: InputMaybe<Scalars['String']['input']>;
};
export type BangumiOrderInput = {
bangumiType?: InputMaybe<OrderByEnum>;
createdAt?: InputMaybe<OrderByEnum>;
displayName?: InputMaybe<OrderByEnum>;
fansub?: InputMaybe<OrderByEnum>;
@ -154,14 +155,32 @@ export type BangumiOrderInput = {
originPosterLink?: InputMaybe<OrderByEnum>;
posterLink?: InputMaybe<OrderByEnum>;
rssLink?: InputMaybe<OrderByEnum>;
savePath?: InputMaybe<OrderByEnum>;
season?: InputMaybe<OrderByEnum>;
seasonRaw?: InputMaybe<OrderByEnum>;
subscriberId?: InputMaybe<OrderByEnum>;
updatedAt?: InputMaybe<OrderByEnum>;
};
export const BangumiTypeEnum = {
Mikan: 'mikan'
} as const;
export type BangumiTypeEnum = typeof BangumiTypeEnum[keyof typeof BangumiTypeEnum];
export type BangumiTypeEnumFilterInput = {
eq?: InputMaybe<BangumiTypeEnum>;
gt?: InputMaybe<BangumiTypeEnum>;
gte?: InputMaybe<BangumiTypeEnum>;
is_in?: InputMaybe<Array<BangumiTypeEnum>>;
is_not_in?: InputMaybe<Array<BangumiTypeEnum>>;
is_not_null?: InputMaybe<BangumiTypeEnum>;
is_null?: InputMaybe<BangumiTypeEnum>;
lt?: InputMaybe<BangumiTypeEnum>;
lte?: InputMaybe<BangumiTypeEnum>;
ne?: InputMaybe<BangumiTypeEnum>;
};
export type BangumiUpdateInput = {
bangumiType?: InputMaybe<BangumiTypeEnum>;
createdAt?: InputMaybe<Scalars['String']['input']>;
displayName?: InputMaybe<Scalars['String']['input']>;
fansub?: InputMaybe<Scalars['String']['input']>;
@ -174,10 +193,8 @@ export type BangumiUpdateInput = {
originPosterLink?: InputMaybe<Scalars['String']['input']>;
posterLink?: InputMaybe<Scalars['String']['input']>;
rssLink?: InputMaybe<Scalars['String']['input']>;
savePath?: InputMaybe<Scalars['String']['input']>;
season?: InputMaybe<Scalars['Int']['input']>;
seasonRaw?: InputMaybe<Scalars['String']['input']>;
subscriberId?: InputMaybe<Scalars['Int']['input']>;
updatedAt?: InputMaybe<Scalars['String']['input']>;
};
@ -613,6 +630,24 @@ export type DownloadsUpdateInput = {
url?: InputMaybe<Scalars['String']['input']>;
};
export const EpisodeTypeEnum = {
Mikan: 'mikan'
} as const;
export type EpisodeTypeEnum = typeof EpisodeTypeEnum[keyof typeof EpisodeTypeEnum];
export type EpisodeTypeEnumFilterInput = {
eq?: InputMaybe<EpisodeTypeEnum>;
gt?: InputMaybe<EpisodeTypeEnum>;
gte?: InputMaybe<EpisodeTypeEnum>;
is_in?: InputMaybe<Array<EpisodeTypeEnum>>;
is_not_in?: InputMaybe<Array<EpisodeTypeEnum>>;
is_not_null?: InputMaybe<EpisodeTypeEnum>;
is_null?: InputMaybe<EpisodeTypeEnum>;
lt?: InputMaybe<EpisodeTypeEnum>;
lte?: InputMaybe<EpisodeTypeEnum>;
ne?: InputMaybe<EpisodeTypeEnum>;
};
export type Episodes = {
__typename?: 'Episodes';
bangumi?: Maybe<Bangumi>;
@ -620,7 +655,12 @@ export type Episodes = {
createdAt: Scalars['String']['output'];
displayName: Scalars['String']['output'];
download: SubscriptionsConnection;
enclosureContentLength?: Maybe<Scalars['Int']['output']>;
enclosureMagnetLink?: Maybe<Scalars['String']['output']>;
enclosurePubDate?: Maybe<Scalars['String']['output']>;
enclosureTorrentLink?: Maybe<Scalars['String']['output']>;
episodeIndex: Scalars['Int']['output'];
episodeType: EpisodeTypeEnum;
fansub?: Maybe<Scalars['String']['output']>;
homepage?: Maybe<Scalars['String']['output']>;
id: Scalars['Int']['output'];
@ -629,7 +669,6 @@ export type Episodes = {
originPosterLink?: Maybe<Scalars['String']['output']>;
posterLink?: Maybe<Scalars['String']['output']>;
resolution?: Maybe<Scalars['String']['output']>;
savePath?: Maybe<Scalars['String']['output']>;
season: Scalars['Int']['output'];
seasonRaw?: Maybe<Scalars['String']['output']>;
source?: Maybe<Scalars['String']['output']>;
@ -667,7 +706,12 @@ export type EpisodesBasic = {
bangumiId: Scalars['Int']['output'];
createdAt: Scalars['String']['output'];
displayName: Scalars['String']['output'];
enclosureContentLength?: Maybe<Scalars['Int']['output']>;
enclosureMagnetLink?: Maybe<Scalars['String']['output']>;
enclosurePubDate?: Maybe<Scalars['String']['output']>;
enclosureTorrentLink?: Maybe<Scalars['String']['output']>;
episodeIndex: Scalars['Int']['output'];
episodeType: EpisodeTypeEnum;
fansub?: Maybe<Scalars['String']['output']>;
homepage?: Maybe<Scalars['String']['output']>;
id: Scalars['Int']['output'];
@ -676,7 +720,6 @@ export type EpisodesBasic = {
originPosterLink?: Maybe<Scalars['String']['output']>;
posterLink?: Maybe<Scalars['String']['output']>;
resolution?: Maybe<Scalars['String']['output']>;
savePath?: Maybe<Scalars['String']['output']>;
season: Scalars['Int']['output'];
seasonRaw?: Maybe<Scalars['String']['output']>;
source?: Maybe<Scalars['String']['output']>;
@ -704,7 +747,12 @@ export type EpisodesFilterInput = {
bangumiId?: InputMaybe<IntegerFilterInput>;
createdAt?: InputMaybe<TextFilterInput>;
displayName?: InputMaybe<StringFilterInput>;
enclosureContentLength?: InputMaybe<IntegerFilterInput>;
enclosureMagnetLink?: InputMaybe<StringFilterInput>;
enclosurePubDate?: InputMaybe<TextFilterInput>;
enclosureTorrentLink?: InputMaybe<StringFilterInput>;
episodeIndex?: InputMaybe<IntegerFilterInput>;
episodeType?: InputMaybe<EpisodeTypeEnumFilterInput>;
fansub?: InputMaybe<StringFilterInput>;
homepage?: InputMaybe<StringFilterInput>;
id?: InputMaybe<IntegerFilterInput>;
@ -714,7 +762,6 @@ export type EpisodesFilterInput = {
originPosterLink?: InputMaybe<StringFilterInput>;
posterLink?: InputMaybe<StringFilterInput>;
resolution?: InputMaybe<StringFilterInput>;
savePath?: InputMaybe<StringFilterInput>;
season?: InputMaybe<IntegerFilterInput>;
seasonRaw?: InputMaybe<StringFilterInput>;
source?: InputMaybe<StringFilterInput>;
@ -727,7 +774,12 @@ export type EpisodesInsertInput = {
bangumiId: Scalars['Int']['input'];
createdAt?: InputMaybe<Scalars['String']['input']>;
displayName: Scalars['String']['input'];
enclosureContentLength?: InputMaybe<Scalars['Int']['input']>;
enclosureMagnetLink?: InputMaybe<Scalars['String']['input']>;
enclosurePubDate?: InputMaybe<Scalars['String']['input']>;
enclosureTorrentLink?: InputMaybe<Scalars['String']['input']>;
episodeIndex: Scalars['Int']['input'];
episodeType: EpisodeTypeEnum;
fansub?: InputMaybe<Scalars['String']['input']>;
homepage?: InputMaybe<Scalars['String']['input']>;
id?: InputMaybe<Scalars['Int']['input']>;
@ -736,7 +788,6 @@ export type EpisodesInsertInput = {
originPosterLink?: InputMaybe<Scalars['String']['input']>;
posterLink?: InputMaybe<Scalars['String']['input']>;
resolution?: InputMaybe<Scalars['String']['input']>;
savePath?: InputMaybe<Scalars['String']['input']>;
season: Scalars['Int']['input'];
seasonRaw?: InputMaybe<Scalars['String']['input']>;
source?: InputMaybe<Scalars['String']['input']>;
@ -749,7 +800,12 @@ export type EpisodesOrderInput = {
bangumiId?: InputMaybe<OrderByEnum>;
createdAt?: InputMaybe<OrderByEnum>;
displayName?: InputMaybe<OrderByEnum>;
enclosureContentLength?: InputMaybe<OrderByEnum>;
enclosureMagnetLink?: InputMaybe<OrderByEnum>;
enclosurePubDate?: InputMaybe<OrderByEnum>;
enclosureTorrentLink?: InputMaybe<OrderByEnum>;
episodeIndex?: InputMaybe<OrderByEnum>;
episodeType?: InputMaybe<OrderByEnum>;
fansub?: InputMaybe<OrderByEnum>;
homepage?: InputMaybe<OrderByEnum>;
id?: InputMaybe<OrderByEnum>;
@ -758,7 +814,6 @@ export type EpisodesOrderInput = {
originPosterLink?: InputMaybe<OrderByEnum>;
posterLink?: InputMaybe<OrderByEnum>;
resolution?: InputMaybe<OrderByEnum>;
savePath?: InputMaybe<OrderByEnum>;
season?: InputMaybe<OrderByEnum>;
seasonRaw?: InputMaybe<OrderByEnum>;
source?: InputMaybe<OrderByEnum>;
@ -771,7 +826,12 @@ export type EpisodesUpdateInput = {
bangumiId?: InputMaybe<Scalars['Int']['input']>;
createdAt?: InputMaybe<Scalars['String']['input']>;
displayName?: InputMaybe<Scalars['String']['input']>;
enclosureContentLength?: InputMaybe<Scalars['Int']['input']>;
enclosureMagnetLink?: InputMaybe<Scalars['String']['input']>;
enclosurePubDate?: InputMaybe<Scalars['String']['input']>;
enclosureTorrentLink?: InputMaybe<Scalars['String']['input']>;
episodeIndex?: InputMaybe<Scalars['Int']['input']>;
episodeType?: InputMaybe<EpisodeTypeEnum>;
fansub?: InputMaybe<Scalars['String']['input']>;
homepage?: InputMaybe<Scalars['String']['input']>;
id?: InputMaybe<Scalars['Int']['input']>;
@ -780,7 +840,6 @@ export type EpisodesUpdateInput = {
originPosterLink?: InputMaybe<Scalars['String']['input']>;
posterLink?: InputMaybe<Scalars['String']['input']>;
resolution?: InputMaybe<Scalars['String']['input']>;
savePath?: InputMaybe<Scalars['String']['input']>;
season?: InputMaybe<Scalars['Int']['input']>;
seasonRaw?: InputMaybe<Scalars['String']['input']>;
source?: InputMaybe<Scalars['String']['input']>;
@ -788,6 +847,127 @@ export type EpisodesUpdateInput = {
updatedAt?: InputMaybe<Scalars['String']['input']>;
};
export const FeedSourceEnum = {
SubscriptionEpisode: 'subscription_episode'
} as const;
export type FeedSourceEnum = typeof FeedSourceEnum[keyof typeof FeedSourceEnum];
export type FeedSourceEnumFilterInput = {
eq?: InputMaybe<FeedSourceEnum>;
gt?: InputMaybe<FeedSourceEnum>;
gte?: InputMaybe<FeedSourceEnum>;
is_in?: InputMaybe<Array<FeedSourceEnum>>;
is_not_in?: InputMaybe<Array<FeedSourceEnum>>;
is_not_null?: InputMaybe<FeedSourceEnum>;
is_null?: InputMaybe<FeedSourceEnum>;
lt?: InputMaybe<FeedSourceEnum>;
lte?: InputMaybe<FeedSourceEnum>;
ne?: InputMaybe<FeedSourceEnum>;
};
export const FeedTypeEnum = {
Rss: 'rss'
} as const;
export type FeedTypeEnum = typeof FeedTypeEnum[keyof typeof FeedTypeEnum];
export type FeedTypeEnumFilterInput = {
eq?: InputMaybe<FeedTypeEnum>;
gt?: InputMaybe<FeedTypeEnum>;
gte?: InputMaybe<FeedTypeEnum>;
is_in?: InputMaybe<Array<FeedTypeEnum>>;
is_not_in?: InputMaybe<Array<FeedTypeEnum>>;
is_not_null?: InputMaybe<FeedTypeEnum>;
is_null?: InputMaybe<FeedTypeEnum>;
lt?: InputMaybe<FeedTypeEnum>;
lte?: InputMaybe<FeedTypeEnum>;
ne?: InputMaybe<FeedTypeEnum>;
};
export type Feeds = {
__typename?: 'Feeds';
createdAt: Scalars['String']['output'];
feedSource: FeedSourceEnum;
feedType: FeedTypeEnum;
id: Scalars['Int']['output'];
subscriber?: Maybe<Subscribers>;
subscriberId?: Maybe<Scalars['Int']['output']>;
subscription?: Maybe<Subscriptions>;
subscriptionId?: Maybe<Scalars['Int']['output']>;
token: Scalars['String']['output'];
updatedAt: Scalars['String']['output'];
};
export type FeedsBasic = {
__typename?: 'FeedsBasic';
createdAt: Scalars['String']['output'];
feedSource: FeedSourceEnum;
feedType: FeedTypeEnum;
id: Scalars['Int']['output'];
subscriberId?: Maybe<Scalars['Int']['output']>;
subscriptionId?: Maybe<Scalars['Int']['output']>;
token: Scalars['String']['output'];
updatedAt: Scalars['String']['output'];
};
export type FeedsConnection = {
__typename?: 'FeedsConnection';
edges: Array<FeedsEdge>;
nodes: Array<Feeds>;
pageInfo: PageInfo;
paginationInfo?: Maybe<PaginationInfo>;
};
export type FeedsEdge = {
__typename?: 'FeedsEdge';
cursor: Scalars['String']['output'];
node: Feeds;
};
export type FeedsFilterInput = {
and?: InputMaybe<Array<FeedsFilterInput>>;
createdAt?: InputMaybe<TextFilterInput>;
feedSource?: InputMaybe<FeedSourceEnumFilterInput>;
feedType?: InputMaybe<FeedTypeEnumFilterInput>;
id?: InputMaybe<IntegerFilterInput>;
or?: InputMaybe<Array<FeedsFilterInput>>;
subscriberId?: InputMaybe<SubscriberIdFilterInput>;
subscriptionId?: InputMaybe<IntegerFilterInput>;
token?: InputMaybe<StringFilterInput>;
updatedAt?: InputMaybe<TextFilterInput>;
};
export type FeedsInsertInput = {
createdAt?: InputMaybe<Scalars['String']['input']>;
feedSource: FeedSourceEnum;
feedType: FeedTypeEnum;
id?: InputMaybe<Scalars['Int']['input']>;
subscriberId?: InputMaybe<Scalars['Int']['input']>;
subscriptionId?: InputMaybe<Scalars['Int']['input']>;
token?: InputMaybe<Scalars['String']['input']>;
updatedAt?: InputMaybe<Scalars['String']['input']>;
};
export type FeedsOrderInput = {
createdAt?: InputMaybe<OrderByEnum>;
feedSource?: InputMaybe<OrderByEnum>;
feedType?: InputMaybe<OrderByEnum>;
id?: InputMaybe<OrderByEnum>;
subscriberId?: InputMaybe<OrderByEnum>;
subscriptionId?: InputMaybe<OrderByEnum>;
token?: InputMaybe<OrderByEnum>;
updatedAt?: InputMaybe<OrderByEnum>;
};
export type FeedsUpdateInput = {
createdAt?: InputMaybe<Scalars['String']['input']>;
feedSource?: InputMaybe<FeedSourceEnum>;
feedType?: InputMaybe<FeedTypeEnum>;
id?: InputMaybe<Scalars['Int']['input']>;
subscriptionId?: InputMaybe<Scalars['Int']['input']>;
token?: InputMaybe<Scalars['String']['input']>;
updatedAt?: InputMaybe<Scalars['String']['input']>;
};
export type IntegerFilterInput = {
between?: InputMaybe<Array<Scalars['Int']['input']>>;
eq?: InputMaybe<Scalars['Int']['input']>;
@ -826,6 +1006,10 @@ export type Mutation = {
episodesCreateOne: EpisodesBasic;
episodesDelete: Scalars['Int']['output'];
episodesUpdate: Array<EpisodesBasic>;
feedsCreateBatch: Array<FeedsBasic>;
feedsCreateOne: FeedsBasic;
feedsDelete: Scalars['Int']['output'];
feedsUpdate: Array<FeedsBasic>;
subscriberTasksDelete: Scalars['Int']['output'];
subscriberTasksRetryOne: SubscriberTasks;
subscriptionBangumiCreateBatch: Array<SubscriptionBangumiBasic>;
@ -951,6 +1135,27 @@ export type MutationEpisodesUpdateArgs = {
};
export type MutationFeedsCreateBatchArgs = {
data: Array<FeedsInsertInput>;
};
export type MutationFeedsCreateOneArgs = {
data: FeedsInsertInput;
};
export type MutationFeedsDeleteArgs = {
filter?: InputMaybe<FeedsFilterInput>;
};
export type MutationFeedsUpdateArgs = {
data: FeedsUpdateInput;
filter?: InputMaybe<FeedsFilterInput>;
};
export type MutationSubscriberTasksDeleteArgs = {
filter?: InputMaybe<SubscriberTasksFilterInput>;
};
@ -1085,6 +1290,7 @@ export type Query = {
downloaders: DownloadersConnection;
downloads: DownloadsConnection;
episodes: EpisodesConnection;
feeds: FeedsConnection;
subscriberTasks: SubscriberTasksConnection;
subscribers: SubscribersConnection;
subscriptionBangumi: SubscriptionBangumiConnection;
@ -1138,6 +1344,13 @@ export type QueryEpisodesArgs = {
};
export type QueryFeedsArgs = {
filters?: InputMaybe<FeedsFilterInput>;
orderBy?: InputMaybe<FeedsOrderInput>;
pagination?: InputMaybe<PaginationInput>;
};
export type QuerySubscriberTasksArgs = {
filters?: InputMaybe<SubscriberTasksFilterInput>;
orderBy?: InputMaybe<SubscriberTasksOrderInput>;
@ -1288,7 +1501,9 @@ export type Subscribers = {
displayName: Scalars['String']['output'];
downloader: DownloadersConnection;
episode: EpisodesConnection;
feed: FeedsConnection;
id: Scalars['Int']['output'];
subscriberTask: SubscriberTasksConnection;
subscription: SubscriptionsConnection;
updatedAt: Scalars['String']['output'];
};
@ -1322,6 +1537,20 @@ export type SubscribersEpisodeArgs = {
};
export type SubscribersFeedArgs = {
filters?: InputMaybe<FeedsFilterInput>;
orderBy?: InputMaybe<FeedsOrderInput>;
pagination?: InputMaybe<PaginationInput>;
};
export type SubscribersSubscriberTaskArgs = {
filters?: InputMaybe<SubscriberTasksFilterInput>;
orderBy?: InputMaybe<SubscriberTasksOrderInput>;
pagination?: InputMaybe<PaginationInput>;
};
export type SubscribersSubscriptionArgs = {
filters?: InputMaybe<SubscriptionsFilterInput>;
orderBy?: InputMaybe<SubscriptionsOrderInput>;
@ -1511,6 +1740,7 @@ export type Subscriptions = {
displayName: Scalars['String']['output'];
enabled: Scalars['Boolean']['output'];
episode: EpisodesConnection;
feed: FeedsConnection;
id: Scalars['Int']['output'];
sourceUrl: Scalars['String']['output'];
subscriber?: Maybe<Subscribers>;
@ -1535,6 +1765,13 @@ export type SubscriptionsEpisodeArgs = {
};
export type SubscriptionsFeedArgs = {
filters?: InputMaybe<FeedsFilterInput>;
orderBy?: InputMaybe<FeedsOrderInput>;
pagination?: InputMaybe<PaginationInput>;
};
export type SubscriptionsSubscriptionBangumiArgs = {
filters?: InputMaybe<SubscriptionBangumiFilterInput>;
orderBy?: InputMaybe<SubscriptionBangumiOrderInput>;
@ -1684,6 +1921,20 @@ export type CheckCredential3rdAvailableQueryVariables = Exact<{
export type CheckCredential3rdAvailableQuery = { __typename?: 'Query', credential3rdCheckAvailable: { __typename?: 'Credential3rdCheckAvailableInfo', available: boolean } };
export type InsertFeedMutationVariables = Exact<{
data: FeedsInsertInput;
}>;
export type InsertFeedMutation = { __typename?: 'Mutation', feedsCreateOne: { __typename?: 'FeedsBasic', id: number, createdAt: string, updatedAt: string, feedType: FeedTypeEnum, token: string } };
export type DeleteFeedMutationVariables = Exact<{
filters: FeedsFilterInput;
}>;
export type DeleteFeedMutation = { __typename?: 'Mutation', feedsDelete: number };
export type GetSubscriptionsQueryVariables = Exact<{
filters: SubscriptionsFilterInput;
orderBy: SubscriptionsOrderInput;
@ -1720,7 +1971,7 @@ export type GetSubscriptionDetailQueryVariables = Exact<{
}>;
export type GetSubscriptionDetailQuery = { __typename?: 'Query', subscriptions: { __typename?: 'SubscriptionsConnection', nodes: Array<{ __typename?: 'Subscriptions', id: number, displayName: string, createdAt: string, updatedAt: string, category: SubscriptionCategoryEnum, sourceUrl: string, enabled: boolean, credential3rd?: { __typename?: 'Credential3rd', id: number, username?: string | null } | null, bangumi: { __typename?: 'BangumiConnection', nodes: Array<{ __typename?: 'Bangumi', createdAt: string, updatedAt: string, id: number, mikanBangumiId?: string | null, displayName: string, season: number, seasonRaw?: string | null, fansub?: string | null, mikanFansubId?: string | null, rssLink?: string | null, posterLink?: string | null, savePath?: string | null, homepage?: string | null }> } }> } };
export type GetSubscriptionDetailQuery = { __typename?: 'Query', subscriptions: { __typename?: 'SubscriptionsConnection', nodes: Array<{ __typename?: 'Subscriptions', id: number, displayName: string, createdAt: string, updatedAt: string, category: SubscriptionCategoryEnum, sourceUrl: string, enabled: boolean, feed: { __typename?: 'FeedsConnection', nodes: Array<{ __typename?: 'Feeds', id: number, createdAt: string, updatedAt: string, token: string, feedType: FeedTypeEnum, feedSource: FeedSourceEnum }> }, credential3rd?: { __typename?: 'Credential3rd', id: number, username?: string | null } | null, bangumi: { __typename?: 'BangumiConnection', nodes: Array<{ __typename?: 'Bangumi', createdAt: string, updatedAt: string, id: number, mikanBangumiId?: string | null, displayName: string, season: number, seasonRaw?: string | null, fansub?: string | null, mikanFansubId?: string | null, rssLink?: string | null, posterLink?: string | null, homepage?: string | null }> } }> } };
export type SyncSubscriptionFeedsIncrementalMutationVariables = Exact<{
filter: SubscriptionsFilterInput;
@ -1773,11 +2024,13 @@ export const UpdateCredential3rdDocument = {"kind":"Document","definitions":[{"k
export const DeleteCredential3rdDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"DeleteCredential3rd"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Credential3rdFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"credential3rdDelete"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}]}]}}]} as unknown as DocumentNode<DeleteCredential3rdMutation, DeleteCredential3rdMutationVariables>;
export const GetCredential3rdDetailDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetCredential3rdDetail"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"credential3rd"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"eq"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"cookies"}},{"kind":"Field","name":{"kind":"Name","value":"username"}},{"kind":"Field","name":{"kind":"Name","value":"password"}},{"kind":"Field","name":{"kind":"Name","value":"userAgent"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"credentialType"}}]}}]}}]}}]} as unknown as DocumentNode<GetCredential3rdDetailQuery, GetCredential3rdDetailQueryVariables>;
export const CheckCredential3rdAvailableDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"CheckCredential3rdAvailable"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"credential3rdCheckAvailable"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"available"}}]}}]}}]} as unknown as DocumentNode<CheckCredential3rdAvailableQuery, CheckCredential3rdAvailableQueryVariables>;
export const InsertFeedDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"InsertFeed"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"data"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"FeedsInsertInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"feedsCreateOne"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"data"},"value":{"kind":"Variable","name":{"kind":"Name","value":"data"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"feedType"}},{"kind":"Field","name":{"kind":"Name","value":"token"}}]}}]}}]} as unknown as DocumentNode<InsertFeedMutation, InsertFeedMutationVariables>;
export const DeleteFeedDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"DeleteFeed"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"FeedsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"feedsDelete"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}]}]}}]} as unknown as DocumentNode<DeleteFeedMutation, DeleteFeedMutationVariables>;
export const GetSubscriptionsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetSubscriptions"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsOrderInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"pagination"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PaginationInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptions"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"pagination"},"value":{"kind":"Variable","name":{"kind":"Name","value":"pagination"}}},{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}},{"kind":"Argument","name":{"kind":"Name","value":"orderBy"},"value":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"credentialId"}}]}},{"kind":"Field","name":{"kind":"Name","value":"paginationInfo"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"total"}},{"kind":"Field","name":{"kind":"Name","value":"pages"}}]}}]}}]}}]} as unknown as DocumentNode<GetSubscriptionsQuery, GetSubscriptionsQueryVariables>;
export const InsertSubscriptionDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"InsertSubscription"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"data"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsInsertInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsCreateOne"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"data"},"value":{"kind":"Variable","name":{"kind":"Name","value":"data"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"credentialId"}}]}}]}}]} as unknown as DocumentNode<InsertSubscriptionMutation, InsertSubscriptionMutationVariables>;
export const UpdateSubscriptionsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"UpdateSubscriptions"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"data"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsUpdateInput"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsUpdate"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"data"},"value":{"kind":"Variable","name":{"kind":"Name","value":"data"}}},{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}}]}}]}}]} as unknown as DocumentNode<UpdateSubscriptionsMutation, UpdateSubscriptionsMutationVariables>;
export const DeleteSubscriptionsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"DeleteSubscriptions"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filters"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsDelete"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filters"}}}]}]}}]} as unknown as DocumentNode<DeleteSubscriptionsMutation, DeleteSubscriptionsMutationVariables>;
export const GetSubscriptionDetailDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetSubscriptionDetail"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptions"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"eq"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"credential3rd"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"username"}}]}},{"kind":"Field","name":{"kind":"Name","value":"bangumi"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"mikanBangumiId"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"season"}},{"kind":"Field","name":{"kind":"Name","value":"seasonRaw"}},{"kind":"Field","name":{"kind":"Name","value":"fansub"}},{"kind":"Field","name":{"kind":"Name","value":"mikanFansubId"}},{"kind":"Field","name":{"kind":"Name","value":"rssLink"}},{"kind":"Field","name":{"kind":"Name","value":"posterLink"}},{"kind":"Field","name":{"kind":"Name","value":"savePath"}},{"kind":"Field","name":{"kind":"Name","value":"homepage"}}]}}]}}]}}]}}]}}]} as unknown as DocumentNode<GetSubscriptionDetailQuery, GetSubscriptionDetailQueryVariables>;
export const GetSubscriptionDetailDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetSubscriptionDetail"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptions"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filters"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"id"},"value":{"kind":"ObjectValue","fields":[{"kind":"ObjectField","name":{"kind":"Name","value":"eq"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}}]}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"category"}},{"kind":"Field","name":{"kind":"Name","value":"sourceUrl"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"feed"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"token"}},{"kind":"Field","name":{"kind":"Name","value":"feedType"}},{"kind":"Field","name":{"kind":"Name","value":"feedSource"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"credential3rd"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"username"}}]}},{"kind":"Field","name":{"kind":"Name","value":"bangumi"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"nodes"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"mikanBangumiId"}},{"kind":"Field","name":{"kind":"Name","value":"displayName"}},{"kind":"Field","name":{"kind":"Name","value":"season"}},{"kind":"Field","name":{"kind":"Name","value":"seasonRaw"}},{"kind":"Field","name":{"kind":"Name","value":"fansub"}},{"kind":"Field","name":{"kind":"Name","value":"mikanFansubId"}},{"kind":"Field","name":{"kind":"Name","value":"rssLink"}},{"kind":"Field","name":{"kind":"Name","value":"posterLink"}},{"kind":"Field","name":{"kind":"Name","value":"homepage"}}]}}]}}]}}]}}]}}]} as unknown as DocumentNode<GetSubscriptionDetailQuery, GetSubscriptionDetailQueryVariables>;
export const SyncSubscriptionFeedsIncrementalDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionFeedsIncremental"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filter"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsSyncOneFeedsIncremental"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filter"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionFeedsIncrementalMutation, SyncSubscriptionFeedsIncrementalMutationVariables>;
export const SyncSubscriptionFeedsFullDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionFeedsFull"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filter"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsSyncOneFeedsFull"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filter"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionFeedsFullMutation, SyncSubscriptionFeedsFullMutationVariables>;
export const SyncSubscriptionSourcesDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"SyncSubscriptionSources"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"filter"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"SubscriptionsFilterInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"subscriptionsSyncOneSources"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"filter"},"value":{"kind":"Variable","name":{"kind":"Name","value":"filter"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}}]}}]}}]} as unknown as DocumentNode<SyncSubscriptionSourcesMutation, SyncSubscriptionSourcesMutationVariables>;

View File

@ -14,6 +14,7 @@ import { Img } from '@/components/ui/img';
import { Label } from '@/components/ui/label';
import { QueryErrorView } from '@/components/ui/query-error-view';
import { Separator } from '@/components/ui/separator';
import { DELETE_FEED, INSERT_FEED } from '@/domains/recorder/schema/feeds';
import { GET_SUBSCRIPTION_DETAIL } from '@/domains/recorder/schema/subscriptions';
import { SubscriptionService } from '@/domains/recorder/services/subscription.service';
import { useInject } from '@/infra/di/inject';
@ -22,10 +23,16 @@ import {
getApolloQueryError,
} from '@/infra/errors/apollo';
import {
type DeleteFeedMutation,
type DeleteFeedMutationVariables,
FeedSourceEnum,
FeedTypeEnum,
type GetSubscriptionDetailQuery,
type InsertFeedMutation,
type InsertFeedMutationVariables,
SubscriptionCategoryEnum,
} from '@/infra/graphql/gql/graphql';
import { useQuery } from '@apollo/client';
import { useMutation, useQuery } from '@apollo/client';
import {
createFileRoute,
useCanGoBack,
@ -38,7 +45,9 @@ import {
Edit,
ExternalLink,
ListIcon,
PlusIcon,
RefreshCcwIcon,
Trash2,
} from 'lucide-react';
import { useMemo } from 'react';
import { toast } from 'sonner';
@ -91,6 +100,50 @@ function SubscriptionDetailRouteComponent() {
});
};
const [insertFeed] = useMutation<
InsertFeedMutation,
InsertFeedMutationVariables
>(INSERT_FEED, {
onCompleted: async () => {
const result = await refetch();
const error = getApolloQueryError(result);
if (error) {
toast.error('Failed to add feed', {
description: apolloErrorToMessage(error),
});
return;
}
toast.success('Feed added');
},
onError: (error) => {
toast.error('Failed to add feed', {
description: apolloErrorToMessage(error),
});
},
});
const [deleteFeed] = useMutation<
DeleteFeedMutation,
DeleteFeedMutationVariables
>(DELETE_FEED, {
onCompleted: async () => {
const result = await refetch();
const error = getApolloQueryError(result);
if (error) {
toast.error('Failed to delete feed', {
description: apolloErrorToMessage(error),
});
return;
}
toast.success('Feed deleted');
},
onError: (error) => {
toast.error('Failed to delete feed', {
description: apolloErrorToMessage(error),
});
},
});
const subscription = data?.subscriptions?.nodes?.[0];
const sourceUrlMeta = useMemo(
@ -314,6 +367,85 @@ function SubscriptionDetailRouteComponent() {
</div>
</div>
<Separator />
<div className="space-y-4">
<div className="flex items-center justify-between">
<Label className="font-medium text-sm">Associated Feeds</Label>
<Button
variant="outline"
size="sm"
onClick={() =>
insertFeed({
variables: {
data: {
subscriptionId: Number.parseInt(id),
feedType: FeedTypeEnum.Rss,
feedSource: FeedSourceEnum.SubscriptionEpisode,
},
},
})
}
>
<PlusIcon className="h-4 w-4" />
Add Feed
</Button>
</div>
<div className="grid grid-cols-1 gap-3 sm:grid-cols-2 lg:grid-cols-3">
{subscription.feed?.nodes &&
subscription.feed.nodes.length > 0 ? (
subscription.feed.nodes.map((feed) => (
<Card
key={feed.id}
className="group relative cursor-pointer p-4 transition-colors hover:bg-accent/50"
onClick={() => {
window.open(`/api/feeds/rss/${feed.token}`, '_blank');
}}
>
<div className="flex flex-col space-y-2">
<div className="flex items-center justify-between">
<Label className="whitespace-nowrap font-medium text-sm capitalize">
<span>{feed.feedType} Feed</span>
</Label>
<Button
variant="ghost"
size="sm"
className="h-6 w-6 p-0 opacity-0 transition-opacity group-hover:opacity-100"
onClick={(e) => {
e.stopPropagation();
deleteFeed({
variables: {
filters: {
id: {
eq: feed.id,
},
},
},
});
}}
>
<Trash2 className="h-3 w-3 text-destructive" />
</Button>
</div>
<code className="break-all rounded bg-muted px-2 py-1 font-mono text-xs">
{feed.token}
</code>
<div className="text-muted-foreground text-xs">
{format(new Date(feed.createdAt), 'MM-dd HH:mm')}
</div>
</div>
</Card>
))
) : (
<div className="col-span-full py-8 text-center text-muted-foreground">
No associated feeds now
</div>
)}
</div>
</div>
{subscription.bangumi?.nodes &&
subscription.bangumi.nodes.length > 0 && (
<>

View File

@ -22,7 +22,7 @@ dev-proxy:
pnpm run --parallel --filter=proxy dev
dev-recorder:
watchexec -r -e rs,toml,yaml,json,env -- cargo run -p recorder --bin recorder_cli -- --environment=development --graceful-shutdown=false
watchexec -r -e rs,toml,yaml,json,env -- cargo run -p recorder --bin recorder_cli -- --environment=development --graceful-shutdown=false
dev-recorder-migrate-down:
cargo run -p recorder --bin migrate_down -- --environment development