feat: do a little work
This commit is contained in:
@@ -46,6 +46,7 @@ uni-path = "1.51.1"
|
||||
tl = { version = "0.7.8", features = ["simd"] }
|
||||
lightningcss = "1.0.0-alpha.54"
|
||||
html-escape = "0.2.13"
|
||||
opendal = "0.45.0"
|
||||
|
||||
[lib]
|
||||
name = "recorder"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::path::Path;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use axum::Router;
|
||||
use loco_rs::{
|
||||
app::{AppContext, Hooks},
|
||||
boot::{create_app, BootResult, StartMode},
|
||||
@@ -15,7 +16,7 @@ use sea_orm::DatabaseConnection;
|
||||
|
||||
use crate::{
|
||||
controllers, migrations::Migrator, models::entities::subscribers,
|
||||
workers::downloader::DownloadWorker,
|
||||
workers::subscription_worker::SubscriptionWorker,
|
||||
};
|
||||
|
||||
pub struct App;
|
||||
@@ -47,7 +48,7 @@ impl Hooks for App {
|
||||
}
|
||||
|
||||
fn connect_workers<'a>(p: &'a mut Processor, ctx: &'a AppContext) {
|
||||
p.register(DownloadWorker::build(ctx));
|
||||
p.register(SubscriptionWorker::build(ctx));
|
||||
}
|
||||
|
||||
fn register_tasks(_tasks: &mut Tasks) {}
|
||||
|
||||
10
crates/recorder/src/config/dal_conf.rs
Normal file
10
crates/recorder/src/config/dal_conf.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub fn default_app_dal_fs_root() -> String {
|
||||
String::from("data")
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct AppDalConf {
|
||||
pub fs_root: String,
|
||||
}
|
||||
44
crates/recorder/src/config/mod.rs
Normal file
44
crates/recorder/src/config/mod.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
pub mod dal_conf;
|
||||
pub use dal_conf::AppDalConf;
|
||||
use eyre::OptionExt;
|
||||
use itertools::Itertools;
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
|
||||
pub const DAL_CONF_KEY: &str = "dal";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct AppCustomConf {
|
||||
pub dal: AppDalConf,
|
||||
}
|
||||
|
||||
pub fn deserialize_key_path_from_json_value<T: DeserializeOwned>(
|
||||
key_path: &[&str],
|
||||
value: &serde_json::Value,
|
||||
) -> eyre::Result<T> {
|
||||
let mut stack = vec![("", value)];
|
||||
for key in key_path {
|
||||
let current = stack.last().unwrap().1;
|
||||
if let Some(v) = current.get(key) {
|
||||
stack.push((key, v));
|
||||
} else {
|
||||
let failed_key_path = stack.iter().map(|s| s.0).collect_vec().join(".");
|
||||
return Err(eyre::eyre!(
|
||||
"can not config key {} of settings",
|
||||
failed_key_path
|
||||
));
|
||||
}
|
||||
}
|
||||
let result: T = serde_json::from_value(stack.pop().unwrap().1.clone())?;
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn deserialize_key_path_from_loco_rs_config<T: DeserializeOwned>(
|
||||
key_path: &[&str],
|
||||
app_config: &loco_rs::config::Config,
|
||||
) -> eyre::Result<T> {
|
||||
let settings = app_config
|
||||
.settings
|
||||
.as_ref()
|
||||
.ok_or_eyre("App config setting not set")?;
|
||||
deserialize_key_path_from_json_value(key_path, settings)
|
||||
}
|
||||
76
crates/recorder/src/dal/mod.rs
Normal file
76
crates/recorder/src/dal/mod.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use bytes::Bytes;
|
||||
use opendal::{layers::LoggingLayer, services, Operator};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
config::AppDalConf,
|
||||
path::{VFSSubPath, VFSSubPathBuf},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum AppDalContentCategory {
|
||||
Poster,
|
||||
}
|
||||
|
||||
impl AsRef<str> for AppDalContentCategory {
|
||||
fn as_ref(&self) -> &str {
|
||||
match self {
|
||||
Self::Poster => "poster",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AppDalContext {
|
||||
pub config: AppDalConf,
|
||||
}
|
||||
|
||||
pub enum DalStoredUrl {
|
||||
RelativePath { path: String },
|
||||
Absolute { url: Url },
|
||||
}
|
||||
|
||||
impl AppDalContext {
|
||||
pub fn new(app_dal_conf: AppDalConf) -> Self {
|
||||
Self {
|
||||
config: app_dal_conf,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn store_blob(
|
||||
&self,
|
||||
content_category: AppDalContentCategory,
|
||||
extname: &str,
|
||||
data: Bytes,
|
||||
subscriber_pid: &str,
|
||||
) -> eyre::Result<DalStoredUrl> {
|
||||
let basename = format!("{}{}", Uuid::new_v4(), extname);
|
||||
let mut dirname = [subscriber_pid, content_category.as_ref()]
|
||||
.into_iter()
|
||||
.map(VFSSubPath::new)
|
||||
.collect::<VFSSubPathBuf>();
|
||||
|
||||
let mut fs_builder = services::Fs::default();
|
||||
fs_builder.root(self.config.fs_root.as_str());
|
||||
|
||||
let fs_op = Operator::new(fs_builder)?
|
||||
.layer(LoggingLayer::default())
|
||||
.finish();
|
||||
|
||||
fs_op.create_dir(dirname.as_str()).await?;
|
||||
|
||||
let fullname = {
|
||||
dirname.push(basename);
|
||||
dirname
|
||||
};
|
||||
|
||||
fs_op.write_with(fullname.as_str(), data).await?;
|
||||
|
||||
Ok(DalStoredUrl::RelativePath {
|
||||
path: fullname.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -6,4 +6,6 @@ pub enum DownloaderError {
|
||||
InvalidMime { expected: String, found: String },
|
||||
#[error("Invalid url format")]
|
||||
InvalidUrlFormat(#[from] url::ParseError),
|
||||
#[error("QBit api error: {0:?}")]
|
||||
QBitAPIError(#[from] qbit_rs::Error),
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ pub struct QBittorrentDownloader {
|
||||
}
|
||||
|
||||
impl QBittorrentDownloader {
|
||||
pub fn from_downloader_model(model: downloaders::Model) -> Result<Self, DownloaderError> {
|
||||
pub async fn from_downloader_model(model: downloaders::Model) -> Result<Self, DownloaderError> {
|
||||
if model.category != DownloaderCategory::QBittorrent {
|
||||
return Err(DownloaderError::InvalidMime {
|
||||
expected: DownloaderCategory::QBittorrent.to_string(),
|
||||
@@ -40,16 +40,21 @@ impl QBittorrentDownloader {
|
||||
let credential = Credential::new(model.username, model.password);
|
||||
let client = Qbit::new(endpoint_url.clone(), credential);
|
||||
|
||||
client
|
||||
.login(false)
|
||||
.await
|
||||
.map_err(DownloaderError::QBitAPIError)?;
|
||||
|
||||
Ok(Self {
|
||||
client,
|
||||
endpoint_url,
|
||||
subscriber_id: model.subscriber_id,
|
||||
save_path: model.download_path,
|
||||
save_path: model.save_path,
|
||||
})
|
||||
}
|
||||
|
||||
async fn api_version(&self) -> eyre::Result<String> {
|
||||
let result = self.client.get_webapi_version().await?;
|
||||
let result = self.client.get_version().await?;
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
@@ -179,3 +184,38 @@ impl Debug for QBittorrentDownloader {
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn get_tmp_qbit_test_folder() -> &'static str {
|
||||
if cfg!(windows) {
|
||||
"~/AppData/Local/Temp/konobangu/qbit"
|
||||
} else {
|
||||
"/tmp/konobangu/qbit"
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_add_torrents() {
|
||||
let downloader = QBittorrentDownloader::from_downloader_model(downloaders::Model {
|
||||
created_at: Default::default(),
|
||||
updated_at: Default::default(),
|
||||
id: 0,
|
||||
category: DownloaderCategory::QBittorrent,
|
||||
endpoint: "http://127.0.0.1:8080".to_string(),
|
||||
password: "".to_string(),
|
||||
username: "".to_string(),
|
||||
subscriber_id: 0,
|
||||
save_path: get_tmp_qbit_test_folder().to_string(),
|
||||
})
|
||||
.await
|
||||
.expect("should create downloader success");
|
||||
|
||||
downloader
|
||||
.check_connection()
|
||||
.await
|
||||
.expect("should check connection success");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,15 +55,15 @@ pub trait TorrentDownloader {
|
||||
downloads: &[&downloads::Model],
|
||||
mut bangumi: bangumi::Model,
|
||||
) -> eyre::Result<bangumi::Model> {
|
||||
if bangumi.sub_path.is_none() {
|
||||
if bangumi.save_path.is_none() {
|
||||
let gen_sub_path = gen_bangumi_sub_path(&bangumi);
|
||||
let mut bangumi_active = bangumi.into_active_model();
|
||||
bangumi_active.sub_path = ActiveValue::Set(Some(gen_sub_path.to_string()));
|
||||
bangumi_active.save_path = ActiveValue::Set(Some(gen_sub_path.to_string()));
|
||||
bangumi = bangumi_active.update(db).await?;
|
||||
}
|
||||
|
||||
let sub_path = bangumi
|
||||
.sub_path
|
||||
.save_path
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| unreachable!("must have a sub path"));
|
||||
|
||||
@@ -81,11 +81,13 @@ pub trait TorrentDownloader {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_torrent_downloader_from_downloader_model(
|
||||
pub async fn build_torrent_downloader_from_downloader_model(
|
||||
model: downloaders::Model,
|
||||
) -> eyre::Result<Box<dyn TorrentDownloader>> {
|
||||
Ok(Box::new(match &model.category {
|
||||
DownloaderCategory::QBittorrent => QBittorrentDownloader::from_downloader_model(model)?,
|
||||
DownloaderCategory::QBittorrent => {
|
||||
QBittorrentDownloader::from_downloader_model(model).await?
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
#![feature(async_closure)]
|
||||
pub mod app;
|
||||
pub mod config;
|
||||
pub mod controllers;
|
||||
pub mod dal;
|
||||
pub mod downloaders;
|
||||
pub mod migrations;
|
||||
pub mod models;
|
||||
pub mod parsers;
|
||||
pub mod path;
|
||||
pub mod subscriptions;
|
||||
pub mod tasks;
|
||||
pub mod views;
|
||||
pub mod workers;
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
use loco_rs::app::AppContext;
|
||||
use sea_orm::{prelude::*, sea_query::OnConflict, ActiveValue, Condition, QueryOrder, QuerySelect};
|
||||
|
||||
pub use crate::models::entities::downloads::*;
|
||||
use crate::{
|
||||
models::subscriptions::{self, SubscriptionCategory},
|
||||
subscriptions::mikan::{MikanSubscriptionEngine, MikanSubscriptionItem},
|
||||
parsers::mikan::{parse_mikan_rss_items_from_rss_link, MikanRssItem},
|
||||
};
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl ActiveModel {
|
||||
pub fn from_mikan_subscription_item(m: MikanSubscriptionItem, subscription_id: i32) -> Self {
|
||||
Self {
|
||||
pub fn from_mikan_rss_item(m: MikanRssItem, subscription_id: i32) -> Self {
|
||||
let _ = Self {
|
||||
origin_name: ActiveValue::Set(m.title.clone()),
|
||||
display_name: ActiveValue::Set(m.title),
|
||||
subscription_id: ActiveValue::Set(subscription_id),
|
||||
@@ -22,20 +23,20 @@ impl ActiveModel {
|
||||
all_size: ActiveValue::Set(m.content_length),
|
||||
homepage: ActiveValue::Set(m.homepage),
|
||||
..Default::default()
|
||||
}
|
||||
};
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub async fn pull_subscription(
|
||||
db: &DatabaseConnection,
|
||||
ctx: AppContext,
|
||||
item: &subscriptions::Model,
|
||||
) -> eyre::Result<Vec<i32>> {
|
||||
let db = &ctx.db;
|
||||
match &item.category {
|
||||
SubscriptionCategory::Mikan => {
|
||||
let items =
|
||||
MikanSubscriptionEngine::subscription_items_from_rss_url(&item.source_url)
|
||||
.await?;
|
||||
let items = parse_mikan_rss_items_from_rss_link(&item.source_url).await?;
|
||||
let all_items = items.collect::<Vec<_>>();
|
||||
|
||||
let last_old_id = {
|
||||
@@ -55,7 +56,7 @@ impl Model {
|
||||
|
||||
let new_items = all_items
|
||||
.into_iter()
|
||||
.map(|i| ActiveModel::from_mikan_subscription_item(i, item.id));
|
||||
.map(|i| ActiveModel::from_mikan_rss_item(i, item.id));
|
||||
|
||||
let insert_result = Entity::insert_many(new_items)
|
||||
.on_conflict(OnConflict::column(Column::Url).do_nothing().to_owned())
|
||||
|
||||
@@ -19,15 +19,14 @@ pub struct Model {
|
||||
pub official_title: String,
|
||||
pub season: i32,
|
||||
pub season_raw: Option<String>,
|
||||
pub group_name: Option<String>,
|
||||
pub fansub: Option<String>,
|
||||
pub resolution: Option<String>,
|
||||
pub source: Option<String>,
|
||||
pub filter: Option<BangumiFilter>,
|
||||
pub subtitle: Option<String>,
|
||||
pub rss_link: Option<String>,
|
||||
pub poster_link: Option<String>,
|
||||
pub rule_name: Option<String>,
|
||||
pub sub_path: Option<String>,
|
||||
pub save_path: Option<String>,
|
||||
pub deleted: bool,
|
||||
}
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ pub struct Model {
|
||||
pub password: String,
|
||||
pub username: String,
|
||||
pub subscriber_id: i32,
|
||||
pub download_path: String,
|
||||
pub save_path: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
@@ -27,10 +27,11 @@ pub struct Model {
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub raw_name: String,
|
||||
pub display_name: String,
|
||||
pub bangumi_id: i32,
|
||||
pub output_name: String,
|
||||
pub download_id: i32,
|
||||
pub save_path: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
@@ -1,26 +1,58 @@
|
||||
use sea_orm::{entity::prelude::*, ActiveValue};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub use super::entities::subscriptions::{self, *};
|
||||
use crate::subscriptions::defs::RssCreateDto;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct SubscriptionCreateFromRssDto {
|
||||
pub rss_link: String,
|
||||
pub display_name: String,
|
||||
pub aggregate: bool,
|
||||
pub enabled: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(tag = "category")]
|
||||
pub enum SubscriptionCreateDto {
|
||||
Mikan(SubscriptionCreateFromRssDto),
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl Model {
|
||||
pub async fn add_rss(
|
||||
db: &DatabaseConnection,
|
||||
create_dto: RssCreateDto,
|
||||
impl ActiveModel {
|
||||
pub fn from_create_dto(create_dto: SubscriptionCreateDto, subscriber_id: i32) -> Self {
|
||||
match create_dto {
|
||||
SubscriptionCreateDto::Mikan(create_dto) => {
|
||||
Self::from_rss_create_dto(SubscriptionCategory::Mikan, create_dto, subscriber_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn from_rss_create_dto(
|
||||
category: SubscriptionCategory,
|
||||
create_dto: SubscriptionCreateFromRssDto,
|
||||
subscriber_id: i32,
|
||||
) -> eyre::Result<Self> {
|
||||
let subscription = ActiveModel {
|
||||
) -> Self {
|
||||
Self {
|
||||
display_name: ActiveValue::Set(create_dto.display_name),
|
||||
enabled: ActiveValue::Set(create_dto.enabled.unwrap_or(false)),
|
||||
aggregate: ActiveValue::Set(create_dto.aggregate),
|
||||
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||
category: ActiveValue::Set(SubscriptionCategory::Mikan),
|
||||
category: ActiveValue::Set(category),
|
||||
source_url: ActiveValue::Set(create_dto.rss_link),
|
||||
..Default::default()
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub async fn add_subscription(
|
||||
db: &DatabaseConnection,
|
||||
create_dto: SubscriptionCreateDto,
|
||||
subscriber_id: i32,
|
||||
) -> eyre::Result<Self> {
|
||||
let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id);
|
||||
|
||||
Ok(subscription.insert(db).await?)
|
||||
}
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
use crate::parsers::errors::ParseError;
|
||||
|
||||
pub fn parse_bangumi_season(season_str: &str) -> Result<i32, ParseError> {
|
||||
season_str
|
||||
.parse::<i32>()
|
||||
.map_err(ParseError::BangumiSeasonError)
|
||||
}
|
||||
@@ -6,4 +6,10 @@ pub enum ParseError {
|
||||
BangumiSeasonError(#[from] std::num::ParseIntError),
|
||||
#[error("Parse file url error: {0}")]
|
||||
FileUrlError(#[from] url::ParseError),
|
||||
#[error("Parse {desc} with mime error, expected {expected}, but got {found}")]
|
||||
MimeError {
|
||||
desc: String,
|
||||
expected: String,
|
||||
found: String,
|
||||
},
|
||||
}
|
||||
|
||||
3
crates/recorder/src/parsers/html/mod.rs
Normal file
3
crates/recorder/src/parsers/html/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod html_parser_utils;
|
||||
|
||||
pub use html_parser_utils::{get_tag_style, query_selector_first_tag};
|
||||
@@ -7,18 +7,18 @@ use url::Url;
|
||||
|
||||
use crate::{
|
||||
downloaders::{html::download_html, image::download_image},
|
||||
parsers::html_parser::{get_tag_style, query_selector_first_tag},
|
||||
parsers::html::{get_tag_style, query_selector_first_tag},
|
||||
};
|
||||
|
||||
pub struct MikanEpisodeMeta {
|
||||
pub homepage: Url,
|
||||
pub poster_src: Option<Url>,
|
||||
pub poster_data: Option<Bytes>,
|
||||
pub origin_poster_src: Option<Url>,
|
||||
pub official_title: String,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref MIKAN_TITLE_SEASON: Regex = Regex::new("第.*季").unwrap();
|
||||
static ref MIKAN_TITLE_SEASON: Regex = Regex::new("第.*季").unwrap();
|
||||
}
|
||||
|
||||
pub async fn parse_episode_meta_from_mikan_homepage(
|
||||
@@ -30,7 +30,7 @@ pub async fn parse_episode_meta_from_mikan_homepage(
|
||||
let parser = dom.parser();
|
||||
let poster_node = query_selector_first_tag(&dom, r"div.bangumi-poster", parser);
|
||||
let official_title_node = query_selector_first_tag(&dom, r"p.bangumi-title", parser);
|
||||
let mut poster_src = None;
|
||||
let mut origin_poster_src = None;
|
||||
if let Some(style) = poster_node.and_then(get_tag_style) {
|
||||
for (prop, _) in style.iter() {
|
||||
match prop {
|
||||
@@ -38,7 +38,7 @@ pub async fn parse_episode_meta_from_mikan_homepage(
|
||||
if let Some(Image::Url(path)) = images.first() {
|
||||
if let Ok(url) = Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
|
||||
{
|
||||
poster_src = Some(url);
|
||||
origin_poster_src = Some(url);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -48,7 +48,7 @@ pub async fn parse_episode_meta_from_mikan_homepage(
|
||||
if let Ok(url) =
|
||||
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
|
||||
{
|
||||
poster_src = Some(url);
|
||||
origin_poster_src = Some(url);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -57,12 +57,12 @@ pub async fn parse_episode_meta_from_mikan_homepage(
|
||||
}
|
||||
}
|
||||
};
|
||||
poster_src = poster_src.map(|mut p| {
|
||||
origin_poster_src = origin_poster_src.map(|mut p| {
|
||||
p.set_query(None);
|
||||
p
|
||||
});
|
||||
let poster_data = if let Some(p) = poster_src.as_ref() {
|
||||
download_image(p.as_str()).await.ok()
|
||||
let poster_data = if let Some(p) = origin_poster_src.as_ref() {
|
||||
download_image(p.clone()).await.ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -81,9 +81,9 @@ pub async fn parse_episode_meta_from_mikan_homepage(
|
||||
})
|
||||
.map(|title| MikanEpisodeMeta {
|
||||
homepage: url,
|
||||
poster_src,
|
||||
official_title: title,
|
||||
poster_data,
|
||||
official_title: title,
|
||||
origin_poster_src,
|
||||
});
|
||||
Ok(meta)
|
||||
}
|
||||
@@ -92,7 +92,7 @@ pub async fn parse_episode_meta_from_mikan_homepage(
|
||||
mod test {
|
||||
use url::Url;
|
||||
|
||||
use crate::parsers::mikan_ep_parser::parse_episode_meta_from_mikan_homepage;
|
||||
use super::parse_episode_meta_from_mikan_homepage;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_mikan() {
|
||||
@@ -103,13 +103,13 @@ mod test {
|
||||
|
||||
if let Some(ep_meta) = parse_episode_meta_from_mikan_homepage(url.clone()).await? {
|
||||
assert_eq!(ep_meta.homepage, url);
|
||||
assert_eq!(ep_meta.official_title, "葬送的芙莉莲");
|
||||
assert_eq!(
|
||||
ep_meta.poster_src,
|
||||
ep_meta.origin_poster_src,
|
||||
Some(Url::parse(
|
||||
"https://mikanani.me/images/Bangumi/202309/5ce9fed1.jpg"
|
||||
)?)
|
||||
);
|
||||
assert_eq!(ep_meta.official_title, "葬送的芙莉莲");
|
||||
let u8_data = ep_meta.poster_data.expect("should have poster data");
|
||||
assert!(
|
||||
u8_data.starts_with(&[255, 216, 255, 224]),
|
||||
@@ -1,10 +1,14 @@
|
||||
use chrono::DateTime;
|
||||
use reqwest::IntoUrl;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::downloaders::{bytes::download_bytes, defs::BITTORRENT_MIME_TYPE};
|
||||
use crate::{
|
||||
downloaders::{bytes::download_bytes, defs::BITTORRENT_MIME_TYPE},
|
||||
parsers::errors::ParseError,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanSubscriptionItem {
|
||||
pub struct MikanRssItem {
|
||||
pub title: String,
|
||||
pub homepage: Option<String>,
|
||||
pub url: String,
|
||||
@@ -13,16 +17,18 @@ pub struct MikanSubscriptionItem {
|
||||
pub pub_date: Option<i64>,
|
||||
}
|
||||
|
||||
impl MikanSubscriptionItem {
|
||||
pub fn from_rss_item(item: rss::Item) -> Option<Self> {
|
||||
let mime_match = item
|
||||
impl TryFrom<rss::Item> for MikanRssItem {
|
||||
type Error = ParseError;
|
||||
|
||||
fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
|
||||
let mime_type = item
|
||||
.enclosure()
|
||||
.map(|x| x.mime_type == BITTORRENT_MIME_TYPE)
|
||||
.map(|x| x.mime_type.to_string())
|
||||
.unwrap_or_default();
|
||||
if mime_match {
|
||||
if mime_type == BITTORRENT_MIME_TYPE {
|
||||
let enclosure = item.enclosure.unwrap();
|
||||
|
||||
Some(MikanSubscriptionItem {
|
||||
Ok(MikanRssItem {
|
||||
title: item.title.unwrap_or_default(),
|
||||
homepage: item.link,
|
||||
url: enclosure.url,
|
||||
@@ -34,36 +40,36 @@ impl MikanSubscriptionItem {
|
||||
.map(|s| s.timestamp_millis()),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
Err(ParseError::MimeError {
|
||||
expected: String::from(BITTORRENT_MIME_TYPE),
|
||||
found: mime_type,
|
||||
desc: String::from("MikanRssItem"),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MikanSubscriptionEngine;
|
||||
pub async fn parse_mikan_rss_items_from_rss_link(
|
||||
url: impl IntoUrl,
|
||||
) -> eyre::Result<impl Iterator<Item = MikanRssItem>> {
|
||||
let bytes = download_bytes(url).await?;
|
||||
|
||||
impl MikanSubscriptionEngine {
|
||||
pub async fn subscription_items_from_rss_url(
|
||||
url: &str,
|
||||
) -> eyre::Result<impl Iterator<Item = MikanSubscriptionItem>> {
|
||||
let bytes = download_bytes(url).await?;
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
|
||||
Ok(channel
|
||||
.items
|
||||
.into_iter()
|
||||
.flat_map(MikanSubscriptionItem::from_rss_item))
|
||||
}
|
||||
Ok(channel.items.into_iter().flat_map(MikanRssItem::try_from))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use url::Url;
|
||||
|
||||
use super::parse_mikan_rss_items_from_rss_link;
|
||||
use crate::downloaders::defs::BITTORRENT_MIME_TYPE;
|
||||
|
||||
#[tokio::test]
|
||||
pub async fn test_mikan_subscription_items_from_rss_url() {
|
||||
let url = "https://mikanani.me/RSS/Bangumi?bangumiId=3141&subgroupid=370";
|
||||
let items = super::MikanSubscriptionEngine::subscription_items_from_rss_url(url)
|
||||
let items = parse_mikan_rss_items_from_rss_link(url)
|
||||
.await
|
||||
.expect("should get subscription items from rss url")
|
||||
.collect::<Vec<_>>();
|
||||
5
crates/recorder/src/parsers/mikan/mod.rs
Normal file
5
crates/recorder/src/parsers/mikan/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod mikan_ep_parser;
|
||||
pub mod mikan_rss_parser;
|
||||
|
||||
pub use mikan_ep_parser::{parse_episode_meta_from_mikan_homepage, MikanEpisodeMeta};
|
||||
pub use mikan_rss_parser::{parse_mikan_rss_items_from_rss_link, MikanRssItem};
|
||||
@@ -1,8 +1,8 @@
|
||||
pub mod bangumi_parser;
|
||||
pub mod defs;
|
||||
pub mod errors;
|
||||
pub mod html_parser;
|
||||
pub mod mikan_ep_parser;
|
||||
pub mod raw_ep_parser;
|
||||
pub mod html;
|
||||
pub mod mikan;
|
||||
pub mod raw;
|
||||
pub mod title_parser;
|
||||
pub mod torrent;
|
||||
pub mod torrent_parser;
|
||||
|
||||
3
crates/recorder/src/parsers/raw/mod.rs
Normal file
3
crates/recorder/src/parsers/raw/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod raw_ep_parser;
|
||||
|
||||
pub use raw_ep_parser::{parse_episode_meta_from_raw_name, RawEpisodeMeta};
|
||||
@@ -5,7 +5,7 @@ use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE};
|
||||
use crate::parsers::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE};
|
||||
|
||||
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
|
||||
|
||||
@@ -95,7 +95,10 @@ fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> eyre::Resul
|
||||
}
|
||||
if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw) {
|
||||
if m.len() as f32 > (raw.len() as f32) * 0.5 {
|
||||
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1.replace(&raw, "").chars().collect_vec();
|
||||
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1
|
||||
.replace(&raw, "")
|
||||
.chars()
|
||||
.collect_vec();
|
||||
while let Some(ch) = raw1.pop() {
|
||||
if ch == ']' {
|
||||
break;
|
||||
@@ -168,7 +171,8 @@ fn extract_name_from_title_body_name_section(
|
||||
let mut name_zh = None;
|
||||
let mut name_jp = None;
|
||||
let replaced1 = NAME_EXTRACT_REMOVE_RE.replace_all(title_body_name_section, "");
|
||||
let replaced2 = NAME_EXTRACT_REPLACE_ADHOC1_RE.replace_all(&replaced1, NAME_EXTRACT_REPLACE_ADHOC1_REPLACED);
|
||||
let replaced2 = NAME_EXTRACT_REPLACE_ADHOC1_RE
|
||||
.replace_all(&replaced1, NAME_EXTRACT_REPLACE_ADHOC1_REPLACED);
|
||||
let trimmed = replaced2.trim();
|
||||
let mut split = NAME_EXTRACT_SPLIT_RE
|
||||
.split(trimmed)
|
||||
@@ -256,11 +260,15 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> eyre::Result<RawEpisodeMeta>
|
||||
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
||||
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
||||
let is_movie = check_is_movie(&raw_title_without_ch_brackets);
|
||||
if let Some(title_re_match_obj) = MOVIE_TITLE_RE.captures(&raw_title_without_ch_brackets).or(TITLE_RE.captures(&raw_title_without_ch_brackets)) {
|
||||
if let Some(title_re_match_obj) = MOVIE_TITLE_RE
|
||||
.captures(&raw_title_without_ch_brackets)
|
||||
.or(TITLE_RE.captures(&raw_title_without_ch_brackets))
|
||||
{
|
||||
let mut title_body = title_re_match_obj
|
||||
.get(1)
|
||||
.map(|s| s.as_str().trim())
|
||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups")).to_string();
|
||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"))
|
||||
.to_string();
|
||||
let mut title_episode = title_re_match_obj
|
||||
.get(2)
|
||||
.map(|s| s.as_str().trim())
|
||||
@@ -306,18 +314,25 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> eyre::Result<RawEpisodeMeta>
|
||||
mod tests {
|
||||
use super::{parse_episode_meta_from_raw_name, RawEpisodeMeta};
|
||||
|
||||
struct TestCase {
|
||||
source: &'static str,
|
||||
expected: &'static str,
|
||||
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
|
||||
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap();
|
||||
let found = parse_episode_meta_from_raw_name(raw_name).ok();
|
||||
|
||||
if expected != found {
|
||||
println!(
|
||||
"expected {} and found {} are not equal",
|
||||
serde_json::to_string_pretty(&expected).unwrap(),
|
||||
serde_json::to_string_pretty(&found).unwrap()
|
||||
)
|
||||
}
|
||||
assert_eq!(expected, found);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_episode_meta_from_raw_name() {
|
||||
let test_cases = vec![
|
||||
// all field wrapped by []
|
||||
TestCase {
|
||||
source: r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
|
||||
expected: r#"{
|
||||
fn test_parse_ep_with_all_parts_wrapped() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
|
||||
r#"{
|
||||
"name_zh": "我心里危险的东西",
|
||||
"name_zh_no_season": "我心里危险的东西",
|
||||
"season": 2,
|
||||
@@ -328,11 +343,14 @@ mod tests {
|
||||
"fansub": "新Sub",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
},
|
||||
// title wrap with []
|
||||
TestCase {
|
||||
source: r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_title_wrapped_by_one_square_bracket_and_season_prefix() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
|
||||
r#"{
|
||||
"name_en": "Boku no Kokoro no Yabai Yatsu",
|
||||
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu",
|
||||
"name_zh": "我内心的糟糕念头",
|
||||
@@ -345,11 +363,14 @@ mod tests {
|
||||
"fansub": "喵萌奶茶屋",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
TestCase {
|
||||
// ep+version case
|
||||
source: r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
expected: r#"{
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_ep_and_version() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Shin no Nakama 2nd",
|
||||
"name_en_no_season": "Shin no Nakama",
|
||||
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
|
||||
@@ -362,11 +383,14 @@ mod tests {
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
TestCase {
|
||||
// pure english title case
|
||||
source: r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_en_title_only() {
|
||||
test_raw_ep_parser_case(
|
||||
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
|
||||
r#"{
|
||||
"name_en": "THE MARGINAL SERVICE",
|
||||
"name_en_no_season": "THE MARGINAL SERVICE",
|
||||
"season": 1,
|
||||
@@ -376,11 +400,14 @@ mod tests {
|
||||
"fansub": "动漫国字幕组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
TestCase {
|
||||
// two zh titles case
|
||||
source: r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_two_zh_title() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Nozomanu Fushi no Boukensha",
|
||||
"name_en_no_season": "Nozomanu Fushi no Boukensha",
|
||||
"name_zh": "事与愿违的不死冒险者",
|
||||
@@ -393,11 +420,14 @@ mod tests {
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
TestCase {
|
||||
// en+zh+jp case
|
||||
source: r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_en_zh_jp_titles() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Pon no Michi",
|
||||
"name_jp": "ぽんのみち",
|
||||
"name_zh": "碰之道",
|
||||
@@ -412,11 +442,14 @@ mod tests {
|
||||
"fansub": "喵萌奶茶屋&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
TestCase {
|
||||
// season nth case
|
||||
source: r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_nth_season() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
r#"{
|
||||
"name_en": "Yowai Character Tomozakikun",
|
||||
"name_en_no_season": "Yowai Character Tomozakikun",
|
||||
"name_zh": "弱角友崎同学 2nd STAGE",
|
||||
@@ -429,11 +462,14 @@ mod tests {
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
},
|
||||
TestCase {
|
||||
// season en + season zh case
|
||||
source: r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_season_en_and_season_zh() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Kingdom S5",
|
||||
"name_en_no_season": "Kingdom",
|
||||
"name_zh": "王者天下 第五季",
|
||||
@@ -446,11 +482,14 @@ mod tests {
|
||||
"fansub": "豌豆字幕组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
// ad-hoc cases for 千夏字幕组 _sep style
|
||||
TestCase {
|
||||
source: r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_airota_fansub_style_case1() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
|
||||
r#"{
|
||||
"name_en": "Alice to Therese no Maboroshi Koujou",
|
||||
"name_en_no_season": "Alice to Therese no Maboroshi Koujou",
|
||||
"name_zh": "爱丽丝与特蕾丝的虚幻工厂",
|
||||
@@ -462,11 +501,14 @@ mod tests {
|
||||
"fansub": "千夏字幕组",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
// ad-hoc cases for 千夏字幕组 _sep style starting with ") "
|
||||
TestCase {
|
||||
source: r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_airota_fansub_style_case2() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
|
||||
r#"{
|
||||
"name_en": "Yuru Camp Movie",
|
||||
"name_en_no_season": "Yuru Camp Movie",
|
||||
"name_jp": null,
|
||||
@@ -481,11 +523,14 @@ mod tests {
|
||||
"fansub": "千夏字幕组&喵萌奶茶屋",
|
||||
"resolution": "2160p"
|
||||
}"#,
|
||||
},
|
||||
// title split by ][
|
||||
TestCase {
|
||||
source: r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_many_square_brackets_split_title() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
|
||||
r#"{
|
||||
"name_en": "Yuru Camp",
|
||||
"name_en_no_season": "Yuru Camp",
|
||||
"name_zh": "剧场版-摇曳露营",
|
||||
@@ -496,11 +541,14 @@ mod tests {
|
||||
"fansub": "MCE汉化组",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
},
|
||||
// single title block split by space + netflex
|
||||
TestCase {
|
||||
source: r#"[天月搬运组][迷宫饭 Delicious in Dungeon][03][日语中字][MKV][1080P][NETFLIX][高画质版]"#,
|
||||
expected: r#"
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_square_brackets_wrapped_and_space_split() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[天月搬运组][迷宫饭 Delicious in Dungeon][03][日语中字][MKV][1080P][NETFLIX][高画质版]"#,
|
||||
r#"
|
||||
{
|
||||
"name_en": "Delicious in Dungeon",
|
||||
"name_en_no_season": "Delicious in Dungeon",
|
||||
@@ -514,11 +562,14 @@ mod tests {
|
||||
"resolution": "1080P"
|
||||
}
|
||||
"#,
|
||||
},
|
||||
// start with season like 1月新番
|
||||
TestCase {
|
||||
source: r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_start_with_brackets_wrapped_season_info_prefix() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
|
||||
r#"{
|
||||
"name_en": "Dungeon Meshi",
|
||||
"name_en_no_season": "Dungeon Meshi",
|
||||
"name_zh": "迷宫饭",
|
||||
@@ -529,11 +580,14 @@ mod tests {
|
||||
"fansub": "爱恋字幕社",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
},
|
||||
// prevent [ ] pair to small, chars size in biggest [ ] in title should greater than len(title_body) * 0.5
|
||||
TestCase {
|
||||
source: r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_small_no_title_extra_brackets_case() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
r#"{
|
||||
"name_en": "Mahou Shoujo ni Akogarete",
|
||||
"name_en_no_season": "Mahou Shoujo ni Akogarete",
|
||||
"name_zh": "梦想成为魔法少女 [年龄限制版]",
|
||||
@@ -545,11 +599,15 @@ mod tests {
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
},
|
||||
// TODO: failed case, can not find capture point
|
||||
TestCase {
|
||||
source: r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
|
||||
expected: r#"{
|
||||
)
|
||||
}
|
||||
|
||||
// TODO: FIXME
|
||||
#[test]
|
||||
fn test_bad_case() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
|
||||
r#"{
|
||||
"name_zh": "摇曳露营△剧场版",
|
||||
"name_zh_no_season": "摇曳露营△剧场版",
|
||||
"season": 1,
|
||||
@@ -560,21 +618,6 @@ mod tests {
|
||||
"fansub": "7³ACG x 桜都字幕组",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
];
|
||||
|
||||
for case in test_cases {
|
||||
let expected: Option<RawEpisodeMeta> = serde_json::from_str(case.expected).unwrap();
|
||||
let found = parse_episode_meta_from_raw_name(case.source).ok();
|
||||
|
||||
if expected != found {
|
||||
println!(
|
||||
"expected {} and found {} are not equal",
|
||||
serde_json::to_string_pretty(&expected).unwrap(),
|
||||
serde_json::to_string_pretty(&found).unwrap()
|
||||
)
|
||||
}
|
||||
assert_eq!(expected, found);
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1 @@
|
||||
use crate::path::VFSPath;
|
||||
|
||||
pub fn parse_torrent_title(
|
||||
torrent_path: VFSPath<'_>,
|
||||
torrent_name: Option<&str>,
|
||||
season: Option<i32>,
|
||||
file_type: &str,
|
||||
) {
|
||||
}
|
||||
|
||||
1
crates/recorder/src/parsers/torrent/mod.rs
Normal file
1
crates/recorder/src/parsers/torrent/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
mod torrent_ep_parser;
|
||||
45
crates/recorder/src/parsers/torrent/torrent_ep_parser.rs
Normal file
45
crates/recorder/src/parsers/torrent/torrent_ep_parser.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct TorrentEpisodeMediaMeta {}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct TorrentEpisodeSubtitleMeta {}
|
||||
|
||||
pub fn parse_episode_media_meta_from_torrent(
|
||||
torrent_path: &str,
|
||||
torrent_name: Option<&str>,
|
||||
season: Option<i32>,
|
||||
) -> eyre::Result<TorrentEpisodeMediaMeta> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
pub fn parse_episode_subtitle_meta_from_torrent(
|
||||
torrent_path: &str,
|
||||
torrent_name: Option<&str>,
|
||||
season: Option<i32>,
|
||||
) -> eyre::Result<TorrentEpisodeMediaMeta> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{
|
||||
parse_episode_media_meta_from_torrent, parse_episode_subtitle_meta_from_torrent,
|
||||
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta,
|
||||
};
|
||||
|
||||
pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) {
|
||||
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).unwrap();
|
||||
let found = parse_episode_media_meta_from_torrent(raw_name, None, None).ok();
|
||||
|
||||
if expected != found {
|
||||
println!(
|
||||
"expected {} and found {} are not equal",
|
||||
serde_json::to_string_pretty(&expected).unwrap(),
|
||||
serde_json::to_string_pretty(&found).unwrap()
|
||||
)
|
||||
}
|
||||
assert_eq!(expected, found);
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,7 @@ pub fn get_path_basename<'a>(path: &'a VFSPath) -> &'a str {
|
||||
path.basename()
|
||||
}
|
||||
|
||||
pub fn get_group(group_and_title: &str) -> (Option<&str>, &str) {
|
||||
pub fn get_fansub(group_and_title: &str) -> (Option<&str>, &str) {
|
||||
let n = BRACKETS_REG
|
||||
.split(group_and_title)
|
||||
.map(|s| s.trim())
|
||||
@@ -72,7 +72,7 @@ pub fn parse_torrent<'a>(
|
||||
.get(1)
|
||||
.unwrap_or_else(|| unreachable!("should have 1 group"))
|
||||
.as_str();
|
||||
let (group, title) = get_group(group_and_title);
|
||||
let (group, title) = get_fansub(group_and_title);
|
||||
let season_and_title = get_season_and_title(title);
|
||||
let season = season.unwrap_or(season_and_title.1);
|
||||
let title = season_and_title.0;
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::collections::HashSet;
|
||||
use crate::{
|
||||
downloaders::defs::Torrent,
|
||||
models::{bangumi, subscribers},
|
||||
parsers::{bangumi_parser::parse_bangumi_season, defs::SEASON_REGEX},
|
||||
parsers::defs::SEASON_REGEX,
|
||||
path::{VFSPath, VFSSubPathBuf},
|
||||
};
|
||||
|
||||
@@ -40,13 +40,12 @@ pub fn path_to_bangumi<'a>(
|
||||
for part in save_path.components().map(|s| s.as_str()) {
|
||||
if let Some(match_result) = SEASON_REGEX.captures(part) {
|
||||
season = Some(
|
||||
parse_bangumi_season(
|
||||
match_result
|
||||
.get(2)
|
||||
.unwrap_or_else(|| unreachable!("must have a season"))
|
||||
.as_str(),
|
||||
)
|
||||
.unwrap_or_else(|e| unreachable!("{}", e.to_string())),
|
||||
match_result
|
||||
.get(2)
|
||||
.unwrap_or_else(|| unreachable!("must have a season"))
|
||||
.as_str()
|
||||
.parse::<i32>()
|
||||
.unwrap_or_else(|e| unreachable!("{}", e.to_string())),
|
||||
);
|
||||
} else if !downloader_parts.contains(part) {
|
||||
bangumi_name = Some(part);
|
||||
@@ -71,7 +70,7 @@ pub fn gen_bangumi_sub_path(data: &bangumi::Model) -> VFSSubPathBuf {
|
||||
}
|
||||
|
||||
pub fn rule_name(bgm: &bangumi::Model, conf: &subscribers::SubscriberBangumiConfig) -> String {
|
||||
if let (Some(true), Some(group_name)) = (conf.leading_group_tag, &bgm.group_name) {
|
||||
if let (Some(true), Some(group_name)) = (conf.leading_group_tag, &bgm.fansub) {
|
||||
format!("[{}] {} S{}", group_name, bgm.official_title, bgm.season)
|
||||
} else {
|
||||
format!("{} S{}", bgm.official_title, bgm.season)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use bytes::Buf;
|
||||
use lazy_static::lazy_static;
|
||||
pub use uni_path::{Path as VFSSubPath, PathBuf as VFSSubPathBuf};
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
use crate::models::prelude::*;
|
||||
|
||||
pub struct RssCreateDto {
|
||||
pub rss_link: String,
|
||||
pub display_name: String,
|
||||
pub aggregate: bool,
|
||||
pub category: SubscriptionCategory,
|
||||
pub enabled: Option<bool>,
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
pub mod defs;
|
||||
pub mod bangumi;
|
||||
pub mod mikan;
|
||||
@@ -1,43 +0,0 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use loco_rs::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::time::sleep;
|
||||
|
||||
use crate::models::subscribers;
|
||||
|
||||
pub struct DownloadWorker {
|
||||
pub ctx: AppContext,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Serialize)]
|
||||
pub struct DownloadWorkerArgs {
|
||||
pub user_guid: String,
|
||||
}
|
||||
|
||||
impl worker::AppWorker<DownloadWorkerArgs> for DownloadWorker {
|
||||
fn build(ctx: &AppContext) -> Self {
|
||||
Self { ctx: ctx.clone() }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl worker::Worker<DownloadWorkerArgs> for DownloadWorker {
|
||||
async fn perform(&self, args: DownloadWorkerArgs) -> worker::Result<()> {
|
||||
// TODO: Some actual work goes here...
|
||||
println!("================================================");
|
||||
println!("Sending payment report to user {}", args.user_guid);
|
||||
|
||||
sleep(Duration::from_millis(2000)).await;
|
||||
|
||||
let all = subscribers::Entity::find()
|
||||
.all(&self.ctx.db)
|
||||
.await
|
||||
.map_err(Box::from)?;
|
||||
for user in &all {
|
||||
println!("user: {}", user.id);
|
||||
}
|
||||
println!("================================================");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
pub mod downloader;
|
||||
pub mod subscription_worker;
|
||||
|
||||
32
crates/recorder/src/workers/subscription_worker.rs
Normal file
32
crates/recorder/src/workers/subscription_worker.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
use loco_rs::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::models::subscriptions;
|
||||
|
||||
pub struct SubscriptionWorker {
|
||||
pub ctx: AppContext,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct SubscriptionWorkerArgs {
|
||||
pub subscription: subscriptions::Model,
|
||||
}
|
||||
|
||||
impl worker::AppWorker<SubscriptionWorkerArgs> for SubscriptionWorker {
|
||||
fn build(ctx: &AppContext) -> Self {
|
||||
Self { ctx: ctx.clone() }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl worker::Worker<SubscriptionWorkerArgs> for SubscriptionWorker {
|
||||
async fn perform(&self, args: SubscriptionWorkerArgs) -> worker::Result<()> {
|
||||
println!("================================================");
|
||||
|
||||
let db = &self.ctx.db;
|
||||
let storage = &self.ctx.storage;
|
||||
|
||||
println!("================================================");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user