feat: temp save

This commit is contained in:
master 2024-03-22 21:19:07 +08:00
parent 6149710fe0
commit 4f124c9c0f
27 changed files with 798 additions and 29 deletions

92
Cargo.lock generated
View File

@ -191,6 +191,15 @@ dependencies = [
"zstd-safe",
]
[[package]]
name = "async-stdin"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1ff8b5d9b5ec29e0f49583ba71847b8c8888b67a8510133048a380903aa6822"
dependencies = [
"tokio",
]
[[package]]
name = "async-stream"
version = "0.3.5"
@ -213,6 +222,17 @@ dependencies = [
"syn 2.0.50",
]
[[package]]
name = "async-throttle"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c99532de164435a0b91279e715bff4fa0d164643b409a67761907ffc210ee8f"
dependencies = [
"backoff",
"dashmap",
"tokio",
]
[[package]]
name = "async-trait"
version = "0.1.77"
@ -387,6 +407,20 @@ dependencies = [
"url",
]
[[package]]
name = "backoff"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b62ddb9cb1ec0a098ad4bbf9344d0713fa193ae1a80af55febcff2627b6a00c1"
dependencies = [
"futures-core",
"getrandom",
"instant",
"pin-project-lite",
"rand",
"tokio",
]
[[package]]
name = "backon"
version = "0.4.2"
@ -2289,6 +2323,15 @@ dependencies = [
"yaml-rust",
]
[[package]]
name = "instant"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
"cfg-if",
]
[[package]]
name = "ipnet"
version = "2.9.0"
@ -3010,6 +3053,15 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "oxilangtag"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23f3f87617a86af77fa3691e6350483e7154c2ead9f1261b75130e21ca0f8acb"
dependencies = [
"serde",
]
[[package]]
name = "parcel_selectors"
version = "0.26.4"
@ -3583,6 +3635,7 @@ dependencies = [
"loco-rs",
"maplit",
"opendal",
"oxilangtag",
"qbit-rs",
"quirks_path",
"regex",
@ -3599,11 +3652,13 @@ dependencies = [
"thiserror",
"tl",
"tokio",
"tokio-utils",
"tracing",
"tracing-subscriber",
"url",
"uuid",
"validator",
"weak-table",
]
[[package]]
@ -4620,6 +4675,15 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
[[package]]
name = "shutdown-async"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2799e69bde7e68bedd86c6d94bffa783219114f1f31435ddda61f4aeba348ff"
dependencies = [
"tokio",
]
[[package]]
name = "signal-hook"
version = "0.3.17"
@ -5416,6 +5480,18 @@ dependencies = [
"tracing",
]
[[package]]
name = "tokio-utils"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de75f75f464153a50fe48b9675360e3cf2ae1d7d81f9751363bd2ee4888f5ce8"
dependencies = [
"async-stdin",
"async-throttle",
"shutdown-async",
"tub",
]
[[package]]
name = "toml_datetime"
version = "0.6.5"
@ -5569,6 +5645,16 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "tub"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bca43faba247bc76eb1d6c1b8b561e4a1c5bdd427cc3d7a007faabea75c683a"
dependencies = [
"crossbeam-queue",
"tokio",
]
[[package]]
name = "typed-builder"
version = "0.14.0"
@ -5918,6 +6004,12 @@ dependencies = [
"web-sys",
]
[[package]]
name = "weak-table"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "323f4da9523e9a669e1eaf9c6e763892769b1d38c623913647bfdc1532fe4549"
[[package]]
name = "web-sys"
version = "0.3.68"

View File

@ -59,6 +59,9 @@ html-escape = "0.2.13"
opendal = "0.45.0"
librqbit-core = "3.5.0"
quirks_path = { path = "../quirks_path" }
tokio-utils = "0.1.2"
weak-table = "0.3.2"
oxilangtag = { version = "0.1.5", features = ["serde"] }
[dev-dependencies]
serial_test = "2.0.0"

View File

@ -15,7 +15,7 @@ use sea_orm::DatabaseConnection;
use crate::{
controllers, migrations::Migrator, models::entities::subscribers,
workers::subscription_worker::SubscriptionWorker,
workers::subscription::SubscriptionWorker,
};
pub struct App;

View File

@ -0,0 +1,105 @@
use lazy_static::lazy_static;
use oxilangtag::LanguageTag;
use serde::{Deserialize, Serialize};
use crate::parsers::errors::ParseError;
lazy_static! {
static ref LANGTAG_ADHOC_ALIAS_PAIRS: Vec<(&'static str, &'static str)> = {
vec![
("tc", "zh-TW"),
("zh-tw", "zh-TW"),
("cht", "zh-TW"),
("", "zh-TW"),
("sc", "zh-CN"),
("chs", "zh-CN"),
("", "zh-CN"),
("zh-cn", "zh-CN"),
("eng", "en"),
("", "en"),
("jp", "ja-JP"),
("jpn", "ja-JP"),
("", "ja"),
]
};
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum LanguagePresetName {
#[serde(rename = "zh-TW")]
ZhCN,
#[serde(rename = "zh-CN")]
ZhTW,
#[serde(rename = "zh")]
Zh,
#[serde(rename = "en")]
En,
#[serde(rename = "ja")]
Ja,
}
#[derive(Debug, Clone)]
pub struct LanguagePreset {
name: LanguagePresetName,
tag: LanguageTag<String>,
}
impl LanguagePreset {
pub fn parse<S: AsRef<str>>(s: S) -> Result<Self, ParseError> {
let s = s.as_ref();
let s_lower = s.to_lowercase();
let mut s_rc = s;
for (alias, v) in LANGTAG_ADHOC_ALIAS_PAIRS.iter() {
if s_lower.contains(alias) {
s_rc = v;
break;
}
}
let lang_tag = LanguageTag::parse(s_rc.to_string())?;
let primary = lang_tag.primary_language();
let region = lang_tag.region();
let kind = match primary {
"zh" if region == "TW" => LanguagePresetName::ZhTW,
"zh" if region == "CN" => LanguagePresetName::ZhCN,
"zh" => LanguagePresetName::Zh,
"en" => LanguagePresetName::En,
"ja" => LanguagePresetName::Ja,
_ => Err(ParseError::UnsupportedLanguagePreset(s_rc.to_string()))?,
};
Ok(Self {
name: kind,
tag: lang_tag,
})
}
pub fn name(&self) -> &LanguagePresetName {
&self.name
}
pub fn name_str(&self) -> &str {
&self.name.as_ref()
}
pub fn tag(&self) -> &LanguageTag<String> {
&self.tag
}
pub fn tag_str(&self) -> &str {
&self.tag.as_str()
}
}
impl AsRef<str> for LanguagePresetName {
fn as_ref(&self) -> &str {
match self {
Self::ZhTW => "zh-TW",
Self::ZhCN => "zh-CN",
Self::Zh => "zh",
Self::En => "en",
Self::Ja => "ja",
}
}
}

View File

@ -9,6 +9,9 @@ pub mod migrations;
pub mod models;
pub mod parsers;
pub mod path;
pub mod search;
pub mod tasks;
pub mod views;
pub mod workers;
pub mod i18n;

View File

@ -1,6 +1,25 @@
use regex::Regex;
use sea_orm::entity::prelude::*;
pub use super::entities::bangumi::*;
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl BangumiFilter {
pub fn is_match(&self, title: &str) -> eyre::Result<bool> {
if let Some(regex_filters) = &self.regex_filters {
let combined_regex = Regex::new(&regex_filters.join("|"))?;
if combined_regex.is_match(title) {
return Ok(true);
}
} else if let Some(plain_filters) = &self.plaintext_filters {
for f in plain_filters {
if title.contains(f) {
return Ok(true);
}
}
}
Ok(false)
}
}

View File

@ -1,10 +1,46 @@
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "bangumi_distribution"
)]
#[serde(rename_all = "snake_case")]
pub enum BangumiDistribution {
Movie,
Ova,
Oad,
Sp,
Ex,
Tv,
Unknown,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
#[serde(rename_all = "snake_case")]
pub enum BangumiRenameMethod {
Pn,
Advance,
SubtitlePn,
SubtitleAdvance,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscriberBangumiConfigOverride {
pub leading_fansub_tag: Option<bool>,
pub complete_history_episodes: Option<bool>,
pub rename_method: Option<BangumiRenameMethod>,
pub remove_bad_torrent: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct BangumiFilter {
pub name: Option<Vec<String>>,
pub group: Option<Vec<String>>,
pub plaintext_filters: Option<Vec<String>>,
pub regex_filters: Option<Vec<String>>,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
@ -25,6 +61,7 @@ pub struct Model {
pub poster_link: Option<String>,
pub save_path: Option<String>,
pub deleted: bool,
pub subscriber_conf_override: Option<SubscriberBangumiConfigOverride>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@ -4,7 +4,11 @@ use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "downloader_type")]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "downloader_category"
)]
#[serde(rename_all = "snake_case")]
pub enum DownloaderCategory {
#[sea_orm(string_value = "qbittorrent")]

View File

@ -3,9 +3,25 @@
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
use super::bangumi::BangumiRenameMethod;
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscriberBangumiConfig {
pub leading_group_tag: Option<bool>,
pub leading_fansub_tag: bool,
pub complete_history_episodes: bool,
pub rename_method: BangumiRenameMethod,
pub remove_bad_torrent: bool,
}
impl Default for SubscriberBangumiConfig {
fn default() -> Self {
Self {
leading_fansub_tag: false,
complete_history_episodes: false,
rename_method: BangumiRenameMethod::Pn,
remove_bad_torrent: false,
}
}
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]

View File

@ -26,14 +26,6 @@ lazy_static! {
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
];
pub static ref SUBTITLE_LANG: Vec<(&'static str, Vec<&'static str>)> = {
vec![
(LANG_ZH_TW, vec!["tc", "cht", "", "zh-tw"]),
(LANG_ZH, vec!["sc", "chs", "", "zh", "zh-cn"]),
(LANG_EN, vec!["en", "eng", ""]),
(LANG_JP, vec!["jp", "jpn", ""]),
]
};
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {

View File

@ -12,4 +12,8 @@ pub enum ParseError {
expected: String,
found: String,
},
#[error("Parse language tag error: {0}")]
LanguageTagError(#[from] oxilangtag::LanguageTagParseError),
#[error("Unsupported language preset: {0}")]
UnsupportedLanguagePreset(String),
}

View File

@ -3,5 +3,6 @@ pub mod errors;
pub mod html;
pub mod mikan;
pub mod raw;
pub mod title_parser;
pub mod rss;
pub mod tmdb;
pub mod torrent;

View File

@ -0,0 +1,15 @@
use crate::{
models::entities::subscriptions,
parsers::mikan::{parse_episode_meta_from_mikan_homepage, MikanRssItem},
};
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum RssItem {
Mikan(MikanRssItem),
}
// pub async fn parse_official_title_from_rss_item (rss: &subscriptions::Model)
// -> String { if rss.category == subscriptions::SubscriptionCategory::Mikan
// { let res = parse_episode_meta_from_mikan_homepage(rss.source_url)
// }
// }

View File

@ -0,0 +1,4 @@
pub mod tmdb_bgm_parser;
pub mod tmdb_client;
pub mod tmdb_dtos;
pub mod tmdb_list_parser;

View File

@ -0,0 +1,158 @@
use serde::{Deserialize, Serialize};
use crate::{
i18n::LanguagePreset,
models::bangumi::BangumiDistribution,
parsers::tmdb::{
tmdb_client::TmdbApiClient,
tmdb_dtos::{TmdbSearchMultiItemDto, TmdbSearchMultiPageDto},
},
};
impl BangumiDistribution {
pub fn prefer_tmdb_media_type(&self) -> &str {
match self {
BangumiDistribution::Movie => "movie",
BangumiDistribution::Tv => "tv",
_ => "tv",
}
}
pub fn from_tmdb_media_type(media_type: &str) -> Self {
match media_type {
"movie" => BangumiDistribution::Movie,
_ => BangumiDistribution::Tv,
}
}
}
const TMDB_ANIMATION_GENRE_ID: i64 = 16;
#[inline]
fn build_tmdb_search_api_url(query: &str, lang: &LanguagePreset, page: u32) -> String {
format!(
"{TMDB_API_ORIGIN}/3/search/multi?language={lang_tag}&query={query}&page={page}&\
include_adult=true",
lang_tag = lang.name_str(),
)
}
#[inline]
fn build_tmdb_info_api_url(
id: i64,
lang: &LanguagePreset,
distribution: &BangumiDistribution,
) -> String {
let tmdb_media_type = match distribution {
BangumiDistribution::Movie => "movie",
BangumiDistribution::Tv => "tv",
_ => "tv",
};
format!(
"{TMDB_API_ORIGIN}/3/{tmdb_media_type}/{id}?language={lang_tag}",
lang_tag = lang.name_str()
)
}
fn tmdb_genres_is_match_animation(genre_ids: &[i64]) -> bool {
genre_ids.contains(&TMDB_ANIMATION_GENRE_ID)
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TmdbBangumiItem {
pub id: i64,
pub name: String,
pub origin_name: String,
pub last_season: i32,
pub year: Option<String>,
pub poster_link: Option<String>,
}
pub async fn search_tmdb_items_from_title_and_lang(
tmdb_client: &TmdbApiClient,
title: &str,
lang: &LanguagePreset,
) -> eyre::Result<Vec<TmdbSearchMultiItemDto>> {
let mut items = vec![];
let page_num = {
let search_url = build_tmdb_search_api_url(title, lang, 1);
let first_page: TmdbSearchMultiPageDto =
tmdb_client.fetch(|fetch| fetch.get(search_url)).await?;
items.extend(first_page.results);
first_page.total_pages
};
for i in 2..=page_num {
let search_url = build_tmdb_search_api_url(title, lang, i);
let page: TmdbSearchMultiPageDto = tmdb_client.fetch(|fetch| fetch.get(search_url)).await?;
items.extend(page.results);
}
Ok(items)
}
pub async fn get_tmdb_info_from_id_lang_and_distribution(
tmdb_client: &TmdbApiClient,
id: i64,
lang: &LanguagePreset,
distribution: &BangumiDistribution,
) -> eyre::Result<TmdbSearchMultiItemDto> {
let info_url = build_tmdb_info_api_url(id, lang, distribution);
let info: TmdbSearchMultiItemDto = tmdb_client.fetch(|fetch| fetch.get(info_url)).await?;
Ok(info)
}
pub async fn parse_tmdb_bangumi_from_title_and_lang(
tmdb_client: &TmdbApiClient,
title: &str,
lang: &LanguagePreset,
distribution: &BangumiDistribution,
) -> eyre::Result<Option<TmdbBangumiItem>> {
let mut search_result = search_tmdb_items_from_title_and_lang(tmdb_client, title, lang).await?;
if search_result.is_empty() {
search_result =
search_tmdb_items_from_title_and_lang(tmdb_client, &title.replace(' ', ""), lang)
.await?;
}
if search_result.is_empty() {
return Ok(None);
} else {
let mut target_and_priority: Option<(TmdbSearchMultiItemDto, u32)> = None;
for item in search_result.iter() {
let is_animation = tmdb_genres_is_match_animation(&item.genre_ids);
let is_prefer_media_type =
item.media_type.as_deref() == Some(distribution.prefer_tmdb_media_type());
let priority =
(if is_prefer_media_type { 10 } else { 0 }) + (if is_animation { 1 } else { 0 });
if let Some((last_target_id, last_priority)) = target_and_priority.as_deref_mut() {
if priority > last_priority {
*last_target_id = item;
}
} else {
target_and_priority = Some((item, priority));
}
}
if let Some((target, _)) = target_and_priority {
let info_url = get_tmdb_info_from_id_lang_and_distribution(
target.id,
lang,
BangumiDistribution::from_tmdb_media_type(target.media_type),
);
let info: TmdbSearchMultiItemDto =
tmdb_client.fetch(|fetch| fetch.get(info_url)).await?;
let last_season = match distribution {
BangumiDistribution::Movie => 1,
BangumiDistribution::Tv => info.number_of_seasons,
_ => 1,
};
Ok(Some(TmdbBangumiItem {
id: info.id,
name: info.name,
origin_name: info.original_name,
last_season,
year: info.first_air_date,
poster_link: info.poster_path,
}))
} else {
Ok(None)
}
}
}

View File

@ -0,0 +1,74 @@
use std::sync::{Arc, RwLock, Weak};
use lazy_static::lazy_static;
use opendal::raw::Accessor;
use reqwest::header::{HeaderMap, HeaderValue, ACCEPT, AUTHORIZATION};
use tokio_utils::RateLimiter;
use weak_table::WeakValueHashMap;
use crate::downloaders::defs::DEFAULT_USER_AGENT;
pub(crate) const TMDB_API_ORIGIN: &str = "https://api.themoviedb.org";
pub struct TmdbApiClient {
api_token: String,
rate_limiter: RateLimiter,
fetch_client: reqwest::Client,
headers: HeaderMap,
}
lazy_static! {
static ref TMDB_API_CLIENT_MAP: Arc<RwLock<WeakValueHashMap<String, Weak<TmdbApiClient>>>> =
{ Arc::new(RwLock::new(WeakValueHashMap::new())) };
}
impl TmdbApiClient {
pub async fn new<S: AsRef<str>>(api_token: S) -> Arc<Self> {
let api_token = api_token.as_ref();
let map_read = TMDB_API_CLIENT_MAP.read().await;
if let Some(client) = map_read.get(api_token) {
return client.clone();
}
let client = Arc::new(TmdbApiClient {
api_token: api_token.to_string(),
rate_limiter: RateLimiter::new(std::time::Duration::from_millis(50)),
fetch_client: reqwest::Client::builder()
.user_agent(DEFAULT_USER_AGENT)
.build(),
headers: {
let mut header_map = HeaderMap::new();
header_map.insert(ACCEPT, HeaderValue::from("application/json"));
header_map.insert(
AUTHORIZATION,
HeaderValue::from(format!("Bearer {api_token}")),
);
header_map
},
});
{
let mut map_write = TMDB_API_CLIENT_MAP.write().await;
map_write.insert(api_token.to_string(), client.clone());
}
client.clone()
}
pub fn get_api_token(&self) -> &str {
&self.api_token
}
pub async fn fetch<R, F>(&self, f: F) -> Result<R, reqwest::Error>
where
F: FnOnce(&reqwest::Client) -> reqwest::RequestBuilder,
{
self.rate_limiter
.throttle(|| async {
f(&self.fetch_client)
.headers(self.headers.clone())
.send()
.await?
.json::<R>()
.await
})
.await
}
}

View File

@ -0,0 +1,161 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TmdbListItemDto {
pub id: i64,
pub name: String,
pub adult: bool,
pub poster_path: Option<String>,
pub backdrop_path: Option<String>,
pub media_type: String,
pub original_language: String,
pub original_name: String,
pub overview: String,
pub genre_ids: Vec<i64>,
pub popularity: f64,
pub first_air_date: String,
pub origin_country: Option<Vec<String>>,
pub vote_average: f32,
pub vote_count: i32,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TmdbListPageDto {
pub id: i64,
pub page: u32,
pub sort_by: Option<String>,
pub total_pages: u32,
pub total_results: u32,
pub name: String,
pub results: Vec<TmdbListItemDto>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TmdbGenresObjDto {
pub id: i64,
pub name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TmdbEpisodeAirDto {
pub id: i64,
pub name: String,
pub overview: String,
pub vote_average: f32,
pub vote_count: i32,
pub air_date: String,
pub episode_number: i32,
pub episode_type: String,
pub production_code: String,
pub runtime: Option<i32>,
pub season_number: i32,
pub show_id: i64,
pub still_path: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TmdbSeasonDto {
pub air_date: String,
pub episode_count: i32,
pub id: i64,
pub name: String,
pub overview: String,
pub poster_path: Option<String>,
pub season_number: i32,
pub vote_average: f32,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TmdbSpokenLanguageDto {
pub iso_639_1: String,
pub name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TmdbTvSeriesDetailDto {
pub adult: bool,
pub id: i64,
pub name: String,
pub backdrop_path: Option<String>,
pub episode_run_time: Option<Vec<i32>>,
pub genres: Vec<TmdbGenresObjDto>,
pub first_air_date: Option<String>,
pub home_page: Option<String>,
pub in_production: bool,
pub languages: Vec<String>,
pub last_air_date: Option<String>,
pub last_episode_to_air: Option<TmdbEpisodeAirDto>,
pub next_episode_to_air: Option<TmdbEpisodeAirDto>,
pub number_of_episodes: i32,
pub number_of_seasons: i32,
pub origin_country: Vec<String>,
pub original_language: String,
pub original_name: String,
pub overview: String,
pub popularity: f32,
pub poster_path: Option<String>,
pub seasons: Vec<TmdbSeasonDto>,
pub spoken_languages: Vec<TmdbSpokenLanguageDto>,
pub status: String,
pub tagline: String,
pub vote_average: f32,
pub vote_count: i32,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TmdbMovieDetailDto {
pub adult: bool,
pub backdrop_path: Option<String>,
pub homepage: Option<String>,
pub id: i64,
pub budget: i64,
pub imdb_id: Option<String>,
pub original_language: String,
pub original_title: String,
pub overview: String,
pub popularity: f32,
pub poster_path: Option<String>,
pub release_date: String,
pub revenue: i32,
pub runtime: Option<i32>,
pub spoken_languages: Vec<TmdbSpokenLanguageDto>,
pub status: String,
pub tagline: String,
pub title: String,
pub video: bool,
pub vote_average: f32,
pub vote_count: i32,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TmdbSearchMultiItemDto {
pub adult: bool,
pub backdrop_path: Option<String>,
pub id: i64,
pub name: String,
pub original_language: String,
pub original_name: String,
pub overview: String,
pub poster_path: Option<String>,
pub media_type: String,
pub genre_ids: Vec<i64>,
pub popularity: f32,
pub first_air_date: Option<String>,
pub vote_average: f32,
pub vote_count: i32,
pub origin_country: Vec<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum TmdbMediaDetailDto {
Tv(TmdbTvSeriesDetailDto),
Movie(TmdbMovieDetailDto),
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TmdbSearchMultiPageDto {
pub total_results: u32,
pub total_pages: u32,
pub page: u32,
pub results: Vec<TmdbSearchMultiItemDto>,
}

View File

@ -0,0 +1,44 @@
use std::fmt::Debug;
use crate::{
i18n::LanguagePreset,
parsers::tmdb::{
tmdb_client::TmdbApiClient,
tmdb_dtos::{TmdbListItemDto, TmdbListPageDto},
},
};
#[inline]
fn build_tmdb_list_api_url(list_id: i64, lang: &LanguagePreset, page: u32) -> String {
format!(
"{TMDB_API_ORIGIN}/4/list/{list_id}?language={lang_tag}&{page}",
lang_tag = lang.name_str()
)
}
pub async fn parse_tmdb_list_items_from_list_api(
list_id: i64,
lang: &LanguagePreset,
tmdb_client: &TmdbApiClient,
) -> eyre::Result<Vec<TmdbListItemDto>> {
let mut items: Vec<TmdbListItemDto> = vec![];
let page_num = {
let first_page: TmdbListPageDto = tmdb_client
.fetch(|fetch| fetch.get(build_tmdb_list_api_url(list_id, lang, 1)))
.await?;
items.extend(first_page.results);
first_page.total_pages
};
for i in 2..=page_num {
let page: TmdbListPageDto = tmdb_client
.fetch(|fetch| fetch.get(build_tmdb_list_api_url(list_id, lang, i)))
.await?;
items.extend(page.results);
}
Ok(items)
}

View File

@ -5,7 +5,7 @@ use quirks_path::Path;
use regex::Regex;
use serde::{Deserialize, Serialize};
use crate::parsers::defs::SUBTITLE_LANG;
use crate::i18n::LanguagePreset;
lazy_static! {
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
@ -84,17 +84,11 @@ fn get_season_and_title(season_and_title: &str) -> (String, i32) {
(title, season)
}
fn get_subtitle_lang(media_name: &str) -> Option<&str> {
let media_name_lower = media_name.to_lowercase();
for (lang, lang_aliases) in SUBTITLE_LANG.iter() {
if lang_aliases
.iter()
.any(|alias| media_name_lower.contains(alias))
{
return Some(lang);
}
}
return None;
fn get_subtitle_lang(subtitle_str: &str) -> Option<&str> {
let media_name_lower = subtitle_str.to_lowercase().trim();
LanguagePreset::parse(media_name_lower)
.ok()
.map(|p| p.name_str())
}
pub fn parse_episode_media_meta_from_torrent(

View File

@ -71,7 +71,7 @@ pub fn gen_bangumi_sub_path(data: &bangumi::Model) -> PathBuf {
}
pub fn rule_name(bgm: &bangumi::Model, conf: &subscribers::SubscriberBangumiConfig) -> String {
if let (Some(true), Some(group_name)) = (conf.leading_group_tag, &bgm.fansub) {
if let (Some(true), Some(group_name)) = (conf.leading_fansub_tag, &bgm.fansub) {
format!("[{}] {} S{}", group_name, bgm.official_title, bgm.season)
} else {
format!("{} S{}", bgm.official_title, bgm.season)

View File

View File

@ -0,0 +1,41 @@
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use tracing::info;
use crate::models::{bangumi, subscribers};
pub struct CollectHistoryEpisodesWorker {
pub ctx: AppContext,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum CollectHistoryEpisodesWorkerArgs {
CollectFromBangumiEntity(),
}
impl CollectHistoryEpisodesWorker {
pub async fn collect_history_episodes(bangumi: &bangumi::Model, only_season: bool) {
info!(
"Start collecting {} season {}...",
bangumi.official_title, bangumi.season
);
}
}
impl worker::AppWorker<CollectHistoryEpisodesWorkerArgs> for CollectHistoryEpisodesWorker {
fn build(ctx: &AppContext) -> Self {
Self { ctx: ctx.clone() }
}
}
#[async_trait]
impl worker::Worker<CollectHistoryEpisodesWorkerArgs> for CollectHistoryEpisodesWorker {
async fn perform(&self, args: CollectHistoryEpisodesWorkerArgs) -> worker::Result<()> {
println!("================================================");
let db = &self.ctx.db;
println!("================================================");
Ok(())
}
}

View File

@ -1 +1,4 @@
pub mod subscription_worker;
pub mod collect;
pub mod rename;
pub mod subscription;
pub mod torrent;

View File

View File