feat: add mikan and rawer parsers
This commit is contained in:
@@ -36,6 +36,16 @@ rss = "2.0.7"
|
||||
bytes = "1.5.0"
|
||||
futures = "0.3.30"
|
||||
itertools = "0.12.1"
|
||||
qbit-rs = "0.4.1"
|
||||
url = "2.5.0"
|
||||
fancy-regex = "0.13.0"
|
||||
regex = "1.10.3"
|
||||
lazy_static = "1.4.0"
|
||||
maplit = "1.0.2"
|
||||
uni-path = "1.51.1"
|
||||
tl = { version = "0.7.8", features = ["simd"] }
|
||||
lightningcss = "1.0.0-alpha.54"
|
||||
html-escape = "0.2.13"
|
||||
|
||||
[lib]
|
||||
name = "recorder"
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
#![allow(unused_imports)]
|
||||
use eyre::Context;
|
||||
#[allow(unused_imports)]
|
||||
use loco_rs::{cli::playground, prelude::*};
|
||||
|
||||
async fn fetch_and_parse_rss_demo () -> eyre::Result<()> {
|
||||
async fn fetch_and_parse_rss_demo() -> eyre::Result<()> {
|
||||
let url =
|
||||
"https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
|
||||
"https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
|
||||
|
||||
let res = reqwest::get(url).await?.bytes().await?;
|
||||
let channel = rss::Channel::read_from(&res[..])?;
|
||||
|
||||
@@ -14,7 +14,7 @@ use loco_rs::{
|
||||
use sea_orm::DatabaseConnection;
|
||||
|
||||
use crate::{
|
||||
controllers, migrations::Migrator, models::_entities::subscribers,
|
||||
controllers, migrations::Migrator, models::entities::subscribers,
|
||||
workers::downloader::DownloadWorker,
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use loco_rs::prelude::*;
|
||||
|
||||
use crate::{models::_entities::subscribers, views::subscribers::CurrentResponse};
|
||||
use crate::{models::entities::subscribers, views::subscribers::CurrentResponse};
|
||||
|
||||
async fn current(State(ctx): State<AppContext>) -> Result<Json<CurrentResponse>> {
|
||||
let subscriber = subscribers::Model::find_root(&ctx.db).await?;
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
use bytes::Bytes;
|
||||
|
||||
pub async fn download_bytes (url: &str) -> eyre::Result<Bytes> {
|
||||
let bytes = reqwest::get(url).await?.bytes().await?;
|
||||
Ok(bytes)
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
|
||||
@@ -1,4 +0,0 @@
|
||||
pub mod aria;
|
||||
pub mod qbitorrent;
|
||||
pub mod defs;
|
||||
pub mod bytes;
|
||||
67
crates/recorder/src/downloaders/aria.rs
Normal file
67
crates/recorder/src/downloaders/aria.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
#![allow(unused_variables)]
|
||||
use super::{
|
||||
defs::{Torrent, TorrentFilter, TorrentSources},
|
||||
torrent_downloader::TorrentDownloader,
|
||||
};
|
||||
use crate::path::{VFSPathBuf, VFSSubPath};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AriaDownloader {}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl TorrentDownloader for AriaDownloader {
|
||||
async fn get_torrents_info(
|
||||
&self,
|
||||
status_filter: TorrentFilter,
|
||||
category: String,
|
||||
tag: Option<String>,
|
||||
) -> eyre::Result<Vec<Torrent>> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
async fn add_torrents(
|
||||
&self,
|
||||
source: TorrentSources,
|
||||
save_path: String,
|
||||
category: Option<String>,
|
||||
) -> eyre::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
async fn delete_torrents(&self, hashes: Vec<String>) -> eyre::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
async fn rename_torrent_file(
|
||||
&self,
|
||||
hash: &str,
|
||||
old_path: &str,
|
||||
new_path: &str,
|
||||
) -> eyre::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
async fn move_torrents(&self, hashes: Vec<String>, new_path: &str) -> eyre::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
async fn get_torrent_path(&self, hashes: String) -> eyre::Result<Option<String>> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
async fn check_connection(&self) -> eyre::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
async fn set_torrents_category(&self, hashes: Vec<String>, category: &str) -> eyre::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
async fn add_torrent_tags(&self, hashes: Vec<String>, tags: Vec<String>) -> eyre::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn get_save_path(&self, sub_path: &VFSSubPath) -> VFSPathBuf {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
12
crates/recorder/src/downloaders/bytes.rs
Normal file
12
crates/recorder/src/downloaders/bytes.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
use bytes::Bytes;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::defs::DEFAULT_USER_AEGNT;
|
||||
|
||||
pub async fn download_bytes<T: IntoUrl>(url: T) -> eyre::Result<Bytes> {
|
||||
let request_client = reqwest::Client::builder()
|
||||
.user_agent(DEFAULT_USER_AEGNT)
|
||||
.build()?;
|
||||
let bytes = request_client.get(url).send().await?.bytes().await?;
|
||||
Ok(bytes)
|
||||
}
|
||||
89
crates/recorder/src/downloaders/defs.rs
Normal file
89
crates/recorder/src/downloaders/defs.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
pub use qbit_rs::model::{
|
||||
Torrent as QbitTorrent, TorrentContent as QbitTorrentContent,
|
||||
TorrentFilter as QbitTorrentFilter, TorrentSource as QbitTorrentSource,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
pub const BITTORRENT_MIME_TYPE: &str = "application/x-bittorrent";
|
||||
pub const DEFAULT_USER_AEGNT: &str = "Wget/1.13.4 (linux-gnu)";
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TorrentFilter {
|
||||
All,
|
||||
Downloading,
|
||||
Completed,
|
||||
Paused,
|
||||
Active,
|
||||
Inactive,
|
||||
Resumed,
|
||||
Stalled,
|
||||
StalledUploading,
|
||||
StalledDownloading,
|
||||
Errored,
|
||||
}
|
||||
|
||||
impl From<TorrentFilter> for QbitTorrentFilter {
|
||||
fn from(val: TorrentFilter) -> Self {
|
||||
match val {
|
||||
TorrentFilter::All => QbitTorrentFilter::All,
|
||||
TorrentFilter::Downloading => QbitTorrentFilter::Downloading,
|
||||
TorrentFilter::Completed => QbitTorrentFilter::Completed,
|
||||
TorrentFilter::Paused => QbitTorrentFilter::Paused,
|
||||
TorrentFilter::Active => QbitTorrentFilter::Active,
|
||||
TorrentFilter::Inactive => QbitTorrentFilter::Inactive,
|
||||
TorrentFilter::Resumed => QbitTorrentFilter::Resumed,
|
||||
TorrentFilter::Stalled => QbitTorrentFilter::Stalled,
|
||||
TorrentFilter::StalledUploading => QbitTorrentFilter::StalledUploading,
|
||||
TorrentFilter::StalledDownloading => QbitTorrentFilter::StalledDownloading,
|
||||
TorrentFilter::Errored => QbitTorrentFilter::Errored,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum TorrentSources {
|
||||
Urls { urls: Vec<Url> },
|
||||
TorrentFiles { torrents: Vec<u8> },
|
||||
}
|
||||
|
||||
impl From<TorrentSources> for QbitTorrentSource {
|
||||
fn from(value: TorrentSources) -> Self {
|
||||
match value {
|
||||
TorrentSources::Urls { urls } => QbitTorrentSource::Urls {
|
||||
urls: qbit_rs::model::Sep::from(urls),
|
||||
},
|
||||
TorrentSources::TorrentFiles { torrents } => {
|
||||
QbitTorrentSource::TorrentFiles { torrents }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TorrentContent {
|
||||
fn get_name(&self) -> &str;
|
||||
}
|
||||
|
||||
impl TorrentContent for QbitTorrentContent {
|
||||
fn get_name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
pub enum Torrent {
|
||||
Qbit {
|
||||
torrent: QbitTorrent,
|
||||
contents: Vec<QbitTorrentContent>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Torrent {
|
||||
pub fn iter_files(&self) -> impl Iterator<Item = &dyn TorrentContent> {
|
||||
match self {
|
||||
Torrent::Qbit { contents, .. } => {
|
||||
contents.iter().map(|item| item as &dyn TorrentContent)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
9
crates/recorder/src/downloaders/error.rs
Normal file
9
crates/recorder/src/downloaders/error.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DownloaderError {
|
||||
#[error("Invalid mime (expected {expected:?}, got {found:?})")]
|
||||
InvalidMime { expected: String, found: String },
|
||||
#[error("Invalid url format")]
|
||||
InvalidUrlFormat(#[from] url::ParseError),
|
||||
}
|
||||
11
crates/recorder/src/downloaders/html.rs
Normal file
11
crates/recorder/src/downloaders/html.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::defs::DEFAULT_USER_AEGNT;
|
||||
|
||||
pub async fn download_html<U: IntoUrl>(url: U) -> eyre::Result<String> {
|
||||
let request_client = reqwest::Client::builder()
|
||||
.user_agent(DEFAULT_USER_AEGNT)
|
||||
.build()?;
|
||||
let content = request_client.get(url).send().await?.text().await?;
|
||||
Ok(content)
|
||||
}
|
||||
8
crates/recorder/src/downloaders/image.rs
Normal file
8
crates/recorder/src/downloaders/image.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use bytes::Bytes;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::bytes::download_bytes;
|
||||
|
||||
pub async fn download_image<U: IntoUrl>(url: U) -> eyre::Result<Bytes> {
|
||||
download_bytes(url).await
|
||||
}
|
||||
8
crates/recorder/src/downloaders/mod.rs
Normal file
8
crates/recorder/src/downloaders/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub mod aria;
|
||||
pub mod bytes;
|
||||
pub mod defs;
|
||||
pub mod error;
|
||||
pub mod html;
|
||||
pub mod qbitorrent;
|
||||
pub mod torrent_downloader;
|
||||
pub mod image;
|
||||
181
crates/recorder/src/downloaders/qbitorrent.rs
Normal file
181
crates/recorder/src/downloaders/qbitorrent.rs
Normal file
@@ -0,0 +1,181 @@
|
||||
use std::fmt::Debug;
|
||||
|
||||
use eyre::OptionExt;
|
||||
use futures::future::try_join_all;
|
||||
use qbit_rs::{
|
||||
model::{AddTorrentArg, Credential, GetTorrentListArg, NonEmptyStr},
|
||||
Qbit,
|
||||
};
|
||||
use url::Url;
|
||||
|
||||
use super::{
|
||||
defs::{Torrent, TorrentFilter, TorrentSources},
|
||||
error::DownloaderError,
|
||||
torrent_downloader::TorrentDownloader,
|
||||
};
|
||||
use crate::{
|
||||
models::{entities::downloaders, prelude::DownloaderCategory},
|
||||
path::{VFSPathBuf, VFSSubPath},
|
||||
};
|
||||
|
||||
pub struct QBittorrentDownloader {
|
||||
pub subscriber_id: i32,
|
||||
pub endpoint_url: Url,
|
||||
pub client: Qbit,
|
||||
pub save_path: String,
|
||||
}
|
||||
|
||||
impl QBittorrentDownloader {
|
||||
pub fn from_downloader_model(model: downloaders::Model) -> Result<Self, DownloaderError> {
|
||||
if model.category != DownloaderCategory::QBittorrent {
|
||||
return Err(DownloaderError::InvalidMime {
|
||||
expected: DownloaderCategory::QBittorrent.to_string(),
|
||||
found: model.category.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
let endpoint_url = model
|
||||
.endpoint_url()
|
||||
.map_err(DownloaderError::InvalidUrlFormat)?;
|
||||
let credential = Credential::new(model.username, model.password);
|
||||
let client = Qbit::new(endpoint_url.clone(), credential);
|
||||
|
||||
Ok(Self {
|
||||
client,
|
||||
endpoint_url,
|
||||
subscriber_id: model.subscriber_id,
|
||||
save_path: model.download_path,
|
||||
})
|
||||
}
|
||||
|
||||
async fn api_version(&self) -> eyre::Result<String> {
|
||||
let result = self.client.get_webapi_version().await?;
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl TorrentDownloader for QBittorrentDownloader {
|
||||
async fn get_torrents_info(
|
||||
&self,
|
||||
status_filter: TorrentFilter,
|
||||
category: String,
|
||||
tag: Option<String>,
|
||||
) -> eyre::Result<Vec<Torrent>> {
|
||||
let arg = GetTorrentListArg {
|
||||
filter: Some(status_filter.into()),
|
||||
category: Some(category),
|
||||
tag,
|
||||
..Default::default()
|
||||
};
|
||||
let torrent_list = self.client.get_torrent_list(arg).await?;
|
||||
let torrent_contents = try_join_all(torrent_list.iter().map(|s| async {
|
||||
if let Some(hash) = &s.hash {
|
||||
self.client.get_torrent_contents(hash as &str, None).await
|
||||
} else {
|
||||
Ok(vec![])
|
||||
}
|
||||
}))
|
||||
.await?;
|
||||
Ok(torrent_list
|
||||
.into_iter()
|
||||
.zip(torrent_contents)
|
||||
.map(|(torrent, contents)| Torrent::Qbit { torrent, contents })
|
||||
.collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
async fn add_torrents(
|
||||
&self,
|
||||
source: TorrentSources,
|
||||
save_path: String,
|
||||
category: Option<String>,
|
||||
) -> eyre::Result<()> {
|
||||
let arg = AddTorrentArg {
|
||||
source: source.into(),
|
||||
savepath: Some(save_path),
|
||||
category,
|
||||
auto_torrent_management: Some(false),
|
||||
..Default::default()
|
||||
};
|
||||
self.client.add_torrent(arg).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn delete_torrents(&self, hashes: Vec<String>) -> eyre::Result<()> {
|
||||
self.client.delete_torrents(hashes, None).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn rename_torrent_file(
|
||||
&self,
|
||||
hash: &str,
|
||||
old_path: &str,
|
||||
new_path: &str,
|
||||
) -> eyre::Result<()> {
|
||||
self.client.rename_file(hash, old_path, new_path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn move_torrents(&self, hashes: Vec<String>, new_path: &str) -> eyre::Result<()> {
|
||||
self.client.set_torrent_location(hashes, new_path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_torrent_path(&self, hashes: String) -> eyre::Result<Option<String>> {
|
||||
let mut torrent_list = self
|
||||
.client
|
||||
.get_torrent_list(GetTorrentListArg {
|
||||
hashes: Some(hashes),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let torrent = torrent_list.first_mut().ok_or_eyre("No torrent found")?;
|
||||
Ok(torrent.save_path.take())
|
||||
}
|
||||
|
||||
async fn check_connection(&self) -> eyre::Result<()> {
|
||||
self.api_version().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn set_torrents_category(&self, hashes: Vec<String>, category: &str) -> eyre::Result<()> {
|
||||
if category.is_empty() {
|
||||
return Err(eyre::anyhow!("Category cannot be empty"));
|
||||
}
|
||||
let result = self
|
||||
.client
|
||||
.set_torrent_category(hashes.clone(), category)
|
||||
.await;
|
||||
if let Err(qbit_rs::Error::ApiError(qbit_rs::ApiError::CategoryNotFound)) = result {
|
||||
self.client
|
||||
.add_category(
|
||||
NonEmptyStr::new(category)
|
||||
.unwrap_or_else(|| unreachable!("Category cannot be empty")),
|
||||
self.save_path.as_str(),
|
||||
)
|
||||
.await?;
|
||||
self.client.set_torrent_category(hashes, category).await?;
|
||||
} else {
|
||||
result?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn add_torrent_tags(&self, hashes: Vec<String>, tags: Vec<String>) -> eyre::Result<()> {
|
||||
self.client.add_torrent_tags(hashes, tags).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_save_path(&self, sub_path: &VFSSubPath) -> VFSPathBuf {
|
||||
VFSPathBuf::new(self.save_path.clone(), sub_path.to_path_buf())
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for QBittorrentDownloader {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("QBittorrentDownloader")
|
||||
.field("subscriber_id", &self.subscriber_id)
|
||||
.field("client", &self.endpoint_url.as_str())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
109
crates/recorder/src/downloaders/torrent_downloader.rs
Normal file
109
crates/recorder/src/downloaders/torrent_downloader.rs
Normal file
@@ -0,0 +1,109 @@
|
||||
use downloaders::DownloaderCategory;
|
||||
use sea_orm::{ActiveModelTrait, ActiveValue, DatabaseConnection, IntoActiveModel};
|
||||
use url::Url;
|
||||
|
||||
use super::{
|
||||
bytes::download_bytes,
|
||||
defs::{Torrent, TorrentFilter, TorrentSources},
|
||||
qbitorrent::QBittorrentDownloader,
|
||||
};
|
||||
use crate::{
|
||||
models::{bangumi, downloaders, downloads},
|
||||
path::{torrent_path::gen_bangumi_sub_path, VFSPathBuf, VFSSubPath},
|
||||
};
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait TorrentDownloader {
|
||||
async fn get_torrents_info(
|
||||
&self,
|
||||
status_filter: TorrentFilter,
|
||||
category: String,
|
||||
tag: Option<String>,
|
||||
) -> eyre::Result<Vec<Torrent>>;
|
||||
|
||||
async fn add_torrents(
|
||||
&self,
|
||||
source: TorrentSources,
|
||||
save_path: String,
|
||||
category: Option<String>,
|
||||
) -> eyre::Result<()>;
|
||||
|
||||
async fn delete_torrents(&self, hashes: Vec<String>) -> eyre::Result<()>;
|
||||
|
||||
async fn rename_torrent_file(
|
||||
&self,
|
||||
hash: &str,
|
||||
old_path: &str,
|
||||
new_path: &str,
|
||||
) -> eyre::Result<()>;
|
||||
|
||||
async fn move_torrents(&self, hashes: Vec<String>, new_path: &str) -> eyre::Result<()>;
|
||||
|
||||
async fn get_torrent_path(&self, hashes: String) -> eyre::Result<Option<String>>;
|
||||
|
||||
async fn check_connection(&self) -> eyre::Result<()>;
|
||||
|
||||
async fn set_torrents_category(&self, hashes: Vec<String>, category: &str) -> eyre::Result<()>;
|
||||
|
||||
async fn add_torrent_tags(&self, hashes: Vec<String>, tags: Vec<String>) -> eyre::Result<()>;
|
||||
|
||||
fn get_save_path(&self, sub_path: &VFSSubPath) -> VFSPathBuf;
|
||||
|
||||
async fn add_downlods_for_bangumi<'a, 'b>(
|
||||
&self,
|
||||
db: &'a DatabaseConnection,
|
||||
downloads: &[&downloads::Model],
|
||||
mut bangumi: bangumi::Model,
|
||||
) -> eyre::Result<bangumi::Model> {
|
||||
if bangumi.sub_path.is_none() {
|
||||
let gen_sub_path = gen_bangumi_sub_path(&bangumi);
|
||||
let mut bangumi_active = bangumi.into_active_model();
|
||||
bangumi_active.sub_path = ActiveValue::Set(Some(gen_sub_path.to_string()));
|
||||
bangumi = bangumi_active.update(db).await?;
|
||||
}
|
||||
|
||||
let sub_path = bangumi
|
||||
.sub_path
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| unreachable!("must have a sub path"));
|
||||
|
||||
let mut torrent_urls = vec![];
|
||||
for m in downloads.iter() {
|
||||
torrent_urls.push(Url::parse(&m.url as &str)?);
|
||||
}
|
||||
|
||||
let source = build_torrent_source_from_urls(torrent_urls.into_iter()).await?;
|
||||
|
||||
self.add_torrents(source, sub_path.to_string(), Some("bangumi".to_string()))
|
||||
.await?;
|
||||
|
||||
Ok(bangumi)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_torrent_downloader_from_downloader_model(
|
||||
model: downloaders::Model,
|
||||
) -> eyre::Result<Box<dyn TorrentDownloader>> {
|
||||
Ok(Box::new(match &model.category {
|
||||
DownloaderCategory::QBittorrent => QBittorrentDownloader::from_downloader_model(model)?,
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn build_torrent_source_from_url(url: Url) -> eyre::Result<TorrentSources> {
|
||||
let source = if url.scheme() == "magnet" {
|
||||
TorrentSources::Urls { urls: vec![url] }
|
||||
} else {
|
||||
let bytes = download_bytes(url).await?;
|
||||
TorrentSources::TorrentFiles {
|
||||
torrents: bytes.into(),
|
||||
}
|
||||
};
|
||||
Ok(source)
|
||||
}
|
||||
|
||||
pub async fn build_torrent_source_from_urls<IU: Iterator<Item = Url>>(
|
||||
urls: IU,
|
||||
) -> eyre::Result<TorrentSources> {
|
||||
let urls = urls.collect::<Vec<_>>();
|
||||
Ok(TorrentSources::Urls { urls })
|
||||
}
|
||||
@@ -1,8 +1,11 @@
|
||||
#![feature(async_closure)]
|
||||
pub mod app;
|
||||
pub mod controllers;
|
||||
pub mod downloader;
|
||||
pub mod downloaders;
|
||||
pub mod migrations;
|
||||
pub mod models;
|
||||
pub mod parsers;
|
||||
pub mod path;
|
||||
pub mod subscriptions;
|
||||
pub mod tasks;
|
||||
pub mod views;
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
use std::{collections::HashSet};
|
||||
use std::fmt::Display;
|
||||
use std::{collections::HashSet, fmt::Display};
|
||||
|
||||
use sea_orm::{DeriveIden, Statement};
|
||||
use sea_orm_migration::prelude::*;
|
||||
use sea_orm_migration::prelude::extension::postgres::IntoTypeRef;
|
||||
use sea_orm_migration::prelude::{extension::postgres::IntoTypeRef, *};
|
||||
|
||||
use crate::migrations::extension::postgres::Type;
|
||||
|
||||
@@ -19,6 +17,7 @@ pub enum Subscribers {
|
||||
Id,
|
||||
Pid,
|
||||
DisplayName,
|
||||
DownloaderId,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
@@ -65,6 +64,18 @@ pub enum Downloads {
|
||||
Url,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub enum Downloaders {
|
||||
Table,
|
||||
Id,
|
||||
Category,
|
||||
Endpoint,
|
||||
Password,
|
||||
Username,
|
||||
SubscriberId,
|
||||
DownloadPath,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait CustomSchemaManagerExt {
|
||||
async fn create_postgres_auto_update_ts_fn(&self, col_name: &str) -> Result<(), DbErr>;
|
||||
@@ -98,7 +109,7 @@ pub trait CustomSchemaManagerExt {
|
||||
&table_ident.to_string(),
|
||||
&column_ident.to_string(),
|
||||
)
|
||||
.await?;
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -134,14 +145,14 @@ pub trait CustomSchemaManagerExt {
|
||||
&table_ident.to_string(),
|
||||
&column_ident.to_string(),
|
||||
)
|
||||
.await?;
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_postgres_enum_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
T: Display + Send,
|
||||
I: IntoIterator<Item=T> + Send,
|
||||
I: IntoIterator<Item = T> + Send,
|
||||
>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
@@ -151,7 +162,7 @@ pub trait CustomSchemaManagerExt {
|
||||
async fn add_postgres_enum_values_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
T: Display + Send,
|
||||
I: IntoIterator<Item=T> + Send,
|
||||
I: IntoIterator<Item = T> + Send,
|
||||
>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
@@ -229,7 +240,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
|
||||
async fn create_postgres_enum_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
T: Display + Send,
|
||||
I: IntoIterator<Item=T> + Send,
|
||||
I: IntoIterator<Item = T> + Send,
|
||||
>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
@@ -241,12 +252,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
|
||||
.into_iter()
|
||||
.map(|v| Alias::new(v.to_string()))
|
||||
.collect::<Vec<_>>();
|
||||
self.create_type(
|
||||
Type::create()
|
||||
.as_enum(enum_name)
|
||||
.values(idents)
|
||||
.to_owned(),
|
||||
)
|
||||
self.create_type(Type::create().as_enum(enum_name).values(idents).to_owned())
|
||||
.await?;
|
||||
} else {
|
||||
self.add_postgres_enum_values_for_active_enum(enum_name, values)
|
||||
@@ -258,7 +264,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
|
||||
async fn add_postgres_enum_values_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
T: Display + Send,
|
||||
I: IntoIterator<Item=T> + Send,
|
||||
I: IntoIterator<Item = T> + Send,
|
||||
>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
|
||||
@@ -65,7 +65,9 @@ impl MigrationTrait for Migration {
|
||||
ForeignKey::create()
|
||||
.name("fk_subscription_subscriber_id")
|
||||
.from(Subscriptions::Table, Subscriptions::SubscriberId)
|
||||
.to(Subscribers::Table, Subscribers::Id),
|
||||
.to(Subscribers::Table, Subscribers::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
@@ -88,7 +90,9 @@ impl MigrationTrait for Migration {
|
||||
ForeignKey::create()
|
||||
.name("fk_bangumi_subscription_id")
|
||||
.from(Bangumi::Table, Bangumi::SubscriptionId)
|
||||
.to(Subscriptions::Table, Subscriptions::Id),
|
||||
.to(Subscriptions::Table, Subscriptions::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
@@ -109,7 +113,9 @@ impl MigrationTrait for Migration {
|
||||
ForeignKey::create()
|
||||
.name("fk_episode_bangumi_id")
|
||||
.from(Episodes::Table, Episodes::BangumiId)
|
||||
.to(Bangumi::Table, Bangumi::Id),
|
||||
.to(Bangumi::Table, Bangumi::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
|
||||
@@ -2,8 +2,10 @@ use loco_rs::schema::table_auto;
|
||||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use super::defs::*;
|
||||
use crate::models::prelude::{DownloadMime, DownloadStatus};
|
||||
use crate::models::prelude::downloads::{DownloadMimeEnum, DownloadStatusEnum};
|
||||
use crate::models::prelude::{
|
||||
downloads::{DownloadMimeEnum, DownloadStatusEnum},
|
||||
DownloadMime, DownloadStatus,
|
||||
};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
@@ -53,13 +55,18 @@ impl MigrationTrait for Migration {
|
||||
.col(big_unsigned(Downloads::CurrSize))
|
||||
.col(text(Downloads::Url))
|
||||
.index(
|
||||
Index::create().table(Downloads::Table).col(Downloads::Url).name("idx_download_url")
|
||||
Index::create()
|
||||
.table(Downloads::Table)
|
||||
.col(Downloads::Url)
|
||||
.name("idx_download_url"),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_download_subscription_id")
|
||||
.from(Downloads::Table, Downloads::SubscriptionId)
|
||||
.to(Subscriptions::Table, Subscriptions::Id),
|
||||
.to(Subscriptions::Table, Subscriptions::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
@@ -73,14 +80,16 @@ impl MigrationTrait for Migration {
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Episodes::Table)
|
||||
.add_column_if_not_exists(integer(Episodes::DownloadId))
|
||||
.add_column_if_not_exists(integer_null(Episodes::DownloadId))
|
||||
.add_foreign_key(
|
||||
TableForeignKey::new()
|
||||
.name("fk_episode_download_id")
|
||||
.from_tbl(Episodes::Table)
|
||||
.from_col(Episodes::DownloadId)
|
||||
.to_tbl(Downloads::Table)
|
||||
.to_col(Downloads::Id),
|
||||
.to_col(Downloads::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::SetNull),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use crate::{
|
||||
migrations::defs::{CustomSchemaManagerExt, Downloaders, GeneralIds, Subscribers},
|
||||
models::{downloaders::DownloaderCategoryEnum, prelude::DownloaderCategory},
|
||||
};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_postgres_enum_for_active_enum(
|
||||
DownloaderCategoryEnum,
|
||||
&[DownloaderCategory::QBittorrent],
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
table_auto(Downloaders::Table)
|
||||
.col(pk_auto(Downloaders::Id))
|
||||
.col(text(Downloaders::Endpoint))
|
||||
.col(string_null(Downloaders::Username))
|
||||
.col(string_null(Downloaders::Password))
|
||||
.col(enumeration(
|
||||
Downloaders::Category,
|
||||
DownloaderCategoryEnum,
|
||||
DownloaderCategory::iden_values(),
|
||||
))
|
||||
.col(text(Downloaders::DownloadPath))
|
||||
.col(integer(Downloaders::SubscriberId))
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_downloader_subscriber_id")
|
||||
.from_tbl(Downloaders::Table)
|
||||
.from_col(Downloaders::SubscriberId)
|
||||
.to_tbl(Subscribers::Table)
|
||||
.to_col(Subscribers::Id)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.on_update(ForeignKeyAction::Restrict),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_postgres_auto_update_ts_trigger_for_col(
|
||||
Downloaders::Table,
|
||||
GeneralIds::UpdatedAt,
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Subscribers::Table)
|
||||
.add_column_if_not_exists(integer_null(Subscribers::DownloaderId))
|
||||
.add_foreign_key(
|
||||
TableForeignKey::new()
|
||||
.name("fk_subscriber_downloader_id")
|
||||
.from_tbl(Subscribers::Table)
|
||||
.from_col(Subscribers::DownloaderId)
|
||||
.to_tbl(Downloaders::Table)
|
||||
.to_col(Downloaders::Id)
|
||||
.on_delete(ForeignKeyAction::SetNull)
|
||||
.on_update(ForeignKeyAction::Restrict),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Subscribers::Table)
|
||||
.drop_foreign_key(Alias::new("fk_subscriber_downloader_id"))
|
||||
.drop_column(Subscribers::DownloaderId)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_auto_update_ts_trigger_for_col(Downloaders::Table, GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().table(Downloaders::Table).to_owned())
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(DownloaderCategoryEnum)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ pub use sea_orm_migration::prelude::*;
|
||||
pub mod defs;
|
||||
pub mod m20220101_000001_init;
|
||||
pub mod m20240224_082543_add_downloads;
|
||||
pub mod m20240225_060853_subscriber_add_downloader;
|
||||
|
||||
pub struct Migrator;
|
||||
|
||||
@@ -12,6 +13,7 @@ impl MigratorTrait for Migrator {
|
||||
vec![
|
||||
Box::new(m20220101_000001_init::Migration),
|
||||
Box::new(m20240224_082543_add_downloads::Migration),
|
||||
Box::new(m20240225_060853_subscriber_add_downloader::Migration),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
pub use super::{
|
||||
bangumi,
|
||||
bangumi::Entity as Bangumi,
|
||||
downloads,
|
||||
downloads::{DownloadMime, DownloadStatus, Entity as Download},
|
||||
episodes,
|
||||
episodes::Entity as Episode,
|
||||
subscribers,
|
||||
subscribers::Entity as Subscriber,
|
||||
subscriptions,
|
||||
subscriptions::{Entity as Subscription, SubscriptionCategory},
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
pub use super::_entities::bangumi::{self, ActiveModel, Entity, Model};
|
||||
pub use super::entities::bangumi::*;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
14
crates/recorder/src/models/downloaders.rs
Normal file
14
crates/recorder/src/models/downloaders.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use sea_orm::prelude::*;
|
||||
use url::Url;
|
||||
|
||||
pub use crate::models::entities::downloaders::*;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl Model {
|
||||
pub fn endpoint_url(&self) -> Result<Url, url::ParseError> {
|
||||
let url = Url::parse(&self.endpoint)?;
|
||||
Ok(url)
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,10 @@
|
||||
use sea_orm::{prelude::*, ActiveValue, Condition, QuerySelect, QueryOrder};
|
||||
use sea_orm::sea_query::OnConflict;
|
||||
use sea_orm::{prelude::*, sea_query::OnConflict, ActiveValue, Condition, QueryOrder, QuerySelect};
|
||||
|
||||
use crate::models::_entities::downloads::*;
|
||||
use crate::models::prelude::{SubscriptionCategory, subscriptions};
|
||||
use crate::subscriptions::mikan::{MikanSubscriptionEngine, MikanSubscriptionItem};
|
||||
pub use crate::models::entities::downloads::*;
|
||||
use crate::{
|
||||
models::subscriptions::{self, SubscriptionCategory},
|
||||
subscriptions::mikan::{MikanSubscriptionEngine, MikanSubscriptionItem},
|
||||
};
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -17,8 +18,9 @@ impl ActiveModel {
|
||||
status: ActiveValue::Set(DownloadStatus::Pending),
|
||||
mime: ActiveValue::Set(DownloadMime::BitTorrent),
|
||||
url: ActiveValue::Set(m.url),
|
||||
all_size: ActiveValue::Set(m.content_length.unwrap_or_default()),
|
||||
curr_size: ActiveValue::Set(0),
|
||||
curr_size: ActiveValue::Set(m.content_length.as_ref().map(|_| 0)),
|
||||
all_size: ActiveValue::Set(m.content_length),
|
||||
homepage: ActiveValue::Set(m.homepage),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
@@ -32,8 +34,8 @@ impl Model {
|
||||
match &item.category {
|
||||
SubscriptionCategory::Mikan => {
|
||||
let items =
|
||||
MikanSubscriptionEngine::subscription_items_from_rss_url(&item.source_url).
|
||||
await?;
|
||||
MikanSubscriptionEngine::subscription_items_from_rss_url(&item.source_url)
|
||||
.await?;
|
||||
let all_items = items.collect::<Vec<_>>();
|
||||
|
||||
let last_old_id = {
|
||||
@@ -42,23 +44,21 @@ impl Model {
|
||||
.column(Column::Id)
|
||||
.order_by_desc(Column::Id)
|
||||
.filter(Column::SubscriptionId.eq(item.id))
|
||||
.one(db).await?
|
||||
}.map(|i| i.id);
|
||||
.one(db)
|
||||
.await?
|
||||
}
|
||||
.map(|i| i.id);
|
||||
|
||||
if all_items.is_empty() {
|
||||
return Ok(vec![]);
|
||||
}
|
||||
|
||||
let new_items = all_items.into_iter().map(|i| {
|
||||
ActiveModel::from_mikan_subscription_item(i, item.id)
|
||||
});
|
||||
let new_items = all_items
|
||||
.into_iter()
|
||||
.map(|i| ActiveModel::from_mikan_subscription_item(i, item.id));
|
||||
|
||||
let insert_result = Entity::insert_many(new_items)
|
||||
.on_conflict(
|
||||
OnConflict::column(Column::Url)
|
||||
.do_nothing()
|
||||
.to_owned()
|
||||
)
|
||||
.on_conflict(OnConflict::column(Column::Url).do_nothing().to_owned())
|
||||
.exec(db)
|
||||
.await?;
|
||||
|
||||
@@ -71,9 +71,7 @@ impl Model {
|
||||
.add(Column::Id.lte(insert_result.last_insert_id));
|
||||
|
||||
if let Some(last_old_id) = last_old_id {
|
||||
cond = cond.add(
|
||||
Column::Id.gt(last_old_id)
|
||||
)
|
||||
cond = cond.add(Column::Id.gt(last_old_id))
|
||||
}
|
||||
|
||||
cond
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct BangumiFilter {
|
||||
pub name: Option<Vec<String>>,
|
||||
pub group: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "bangumi")]
|
||||
pub struct Model {
|
||||
@@ -12,6 +16,19 @@ pub struct Model {
|
||||
pub id: i32,
|
||||
pub display_name: String,
|
||||
pub subscription_id: i32,
|
||||
pub official_title: String,
|
||||
pub season: i32,
|
||||
pub season_raw: Option<String>,
|
||||
pub group_name: Option<String>,
|
||||
pub resolution: Option<String>,
|
||||
pub source: Option<String>,
|
||||
pub filter: Option<BangumiFilter>,
|
||||
pub subtitle: Option<String>,
|
||||
pub rss_link: Option<String>,
|
||||
pub poster_link: Option<String>,
|
||||
pub rule_name: Option<String>,
|
||||
pub sub_path: Option<String>,
|
||||
pub deleted: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
45
crates/recorder/src/models/entities/downloaders.rs
Normal file
45
crates/recorder/src/models/entities/downloaders.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "downloader_type")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DownloaderCategory {
|
||||
#[sea_orm(string_value = "qbittorrent")]
|
||||
QBittorrent,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "downloaders")]
|
||||
pub struct Model {
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub created_at: DateTime,
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub category: DownloaderCategory,
|
||||
pub endpoint: String,
|
||||
pub password: String,
|
||||
pub username: String,
|
||||
pub subscriber_id: i32,
|
||||
pub download_path: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id"
|
||||
)]
|
||||
Subscriber,
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@ use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_status")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
@@ -22,7 +22,7 @@ pub enum DownloadStatus {
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_mime")]
|
||||
pub enum DownloadMime {
|
||||
@@ -47,16 +47,17 @@ pub struct Model {
|
||||
pub status: DownloadStatus,
|
||||
pub mime: DownloadMime,
|
||||
pub url: String,
|
||||
pub all_size: u64,
|
||||
pub curr_size: u64,
|
||||
pub all_size: Option<u64>,
|
||||
pub curr_size: Option<u64>,
|
||||
pub homepage: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscriptions::Entity",
|
||||
from = "Column::SubscriptionId",
|
||||
to = "super::subscriptions::Column::Id"
|
||||
belongs_to = "super::subscriptions::Entity",
|
||||
from = "Column::SubscriptionId",
|
||||
to = "super::subscriptions::Column::Id"
|
||||
)]
|
||||
Subscription,
|
||||
#[sea_orm(has_many = "super::episodes::Entity")]
|
||||
@@ -1,8 +1,25 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct EpisodeFileMeta {
|
||||
pub media_path: String,
|
||||
pub group: Option<String>,
|
||||
pub title: String,
|
||||
pub season: i32,
|
||||
pub episode_index: Option<i32>,
|
||||
pub extension: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct SubtitleFileMeta {
|
||||
pub episode_file_meta: EpisodeFileMeta,
|
||||
pub extension: String,
|
||||
pub lang: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "episodes")]
|
||||
pub struct Model {
|
||||
@@ -19,15 +36,15 @@ pub struct Model {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::bangumi::Entity",
|
||||
from = "Column::BangumiId",
|
||||
to = "super::bangumi::Column::Id"
|
||||
belongs_to = "super::bangumi::Entity",
|
||||
from = "Column::BangumiId",
|
||||
to = "super::bangumi::Column::Id"
|
||||
)]
|
||||
Bangumi,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::downloads::Entity",
|
||||
from = "Column::DownloadId",
|
||||
to = "super::downloads::Column::Id"
|
||||
belongs_to = "super::downloads::Entity",
|
||||
from = "Column::DownloadId",
|
||||
to = "super::downloads::Column::Id"
|
||||
)]
|
||||
Downloads,
|
||||
}
|
||||
@@ -1,9 +1,7 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.4
|
||||
|
||||
pub mod prelude;
|
||||
|
||||
pub mod bangumi;
|
||||
pub mod downloads;
|
||||
pub mod episodes;
|
||||
pub mod subscribers;
|
||||
pub mod subscriptions;
|
||||
pub mod downloaders;
|
||||
@@ -1,8 +1,13 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct SubscriberBangumiConfig {
|
||||
pub leading_group_tag: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "subscribers")]
|
||||
pub struct Model {
|
||||
@@ -13,12 +18,20 @@ pub struct Model {
|
||||
#[sea_orm(unique)]
|
||||
pub pid: String,
|
||||
pub display_name: String,
|
||||
pub downloader_id: Option<i32>,
|
||||
pub bangumi_conf: SubscriberBangumiConfig,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::subscriptions::Entity")]
|
||||
Subscription,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::downloaders::Entity",
|
||||
from = "Column::DownloaderId",
|
||||
to = "super::downloaders::Column::Id"
|
||||
)]
|
||||
Downloader,
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
@@ -26,3 +39,9 @@ impl Related<super::subscriptions::Entity> for Entity {
|
||||
Relation::Subscription.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::downloaders::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Downloader.def()
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,13 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
|
||||
)]
|
||||
#[sea_orm(
|
||||
rs_type = "String",
|
||||
db_type = "Enum",
|
||||
enum_name = "subscription_category"
|
||||
rs_type = "String",
|
||||
db_type = "Enum",
|
||||
enum_name = "subscription_category"
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SubscriptionCategory {
|
||||
@@ -39,9 +37,9 @@ pub struct Model {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id"
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id"
|
||||
)]
|
||||
Subscriber,
|
||||
#[sea_orm(has_many = "super::bangumi::Entity")]
|
||||
@@ -1,6 +1,6 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
pub use super::_entities::episodes::{self, ActiveModel, Entity, Model};
|
||||
pub use super::entities::episodes::*;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
pub mod _entities;
|
||||
pub mod bangumi;
|
||||
pub mod downloaders;
|
||||
pub mod downloads;
|
||||
pub mod entities;
|
||||
pub mod episodes;
|
||||
pub mod notifications;
|
||||
pub mod prelude;
|
||||
pub mod subscribers;
|
||||
pub mod subscriptions;
|
||||
|
||||
pub use _entities::prelude;
|
||||
|
||||
9
crates/recorder/src/models/notifications.rs
Normal file
9
crates/recorder/src/models/notifications.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Notification {
|
||||
official_title: String,
|
||||
season: i32,
|
||||
episode_size: u32,
|
||||
poster_url: Option<String>,
|
||||
}
|
||||
8
crates/recorder/src/models/prelude.rs
Normal file
8
crates/recorder/src/models/prelude.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub use super::{
|
||||
bangumi::{self, Entity as Bangumi},
|
||||
downloaders::{self, DownloaderCategory, Entity as Downloader},
|
||||
downloads::{self, DownloadMime, DownloadStatus, Entity as Download},
|
||||
episodes::{self, Entity as Episode},
|
||||
subscribers::{self, Entity as Subscriber},
|
||||
subscriptions::{self, Entity as Subscription, SubscriptionCategory},
|
||||
};
|
||||
@@ -2,7 +2,7 @@ use loco_rs::model::{ModelError, ModelResult};
|
||||
use sea_orm::{entity::prelude::*, ActiveValue, TransactionTrait};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub use super::_entities::subscribers::{self, ActiveModel, Entity, Model};
|
||||
pub use super::entities::subscribers::*;
|
||||
|
||||
pub const ROOT_SUBSCRIBER: &str = "konobangu";
|
||||
|
||||
@@ -36,7 +36,7 @@ impl Model {
|
||||
pub async fn find_by_pid(db: &DatabaseConnection, pid: &str) -> ModelResult<Self> {
|
||||
let parse_uuid = Uuid::parse_str(pid).map_err(|e| ModelError::Any(e.into()))?;
|
||||
let subscriber = Entity::find()
|
||||
.filter(subscribers::Column::Pid.eq(parse_uuid))
|
||||
.filter(Column::Pid.eq(parse_uuid))
|
||||
.one(db)
|
||||
.await?;
|
||||
subscriber.ok_or_else(|| ModelError::EntityNotFound)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use sea_orm::{entity::prelude::*, ActiveValue};
|
||||
|
||||
pub use super::_entities::subscriptions::{self, *};
|
||||
pub use super::entities::subscriptions::{self, *};
|
||||
use crate::subscriptions::defs::RssCreateDto;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
@@ -27,7 +27,7 @@ impl Model {
|
||||
|
||||
pub async fn toggle_iters(
|
||||
db: &DatabaseConnection,
|
||||
ids: impl Iterator<Item=i32>,
|
||||
ids: impl Iterator<Item = i32>,
|
||||
enabled: bool,
|
||||
) -> eyre::Result<()> {
|
||||
Entity::update_many()
|
||||
@@ -40,7 +40,7 @@ impl Model {
|
||||
|
||||
pub async fn delete_iters(
|
||||
db: &DatabaseConnection,
|
||||
ids: impl Iterator<Item=i32>,
|
||||
ids: impl Iterator<Item = i32>,
|
||||
) -> eyre::Result<()> {
|
||||
Entity::delete_many()
|
||||
.filter(Column::Id.is_in(ids))
|
||||
|
||||
7
crates/recorder/src/parsers/bangumi_parser.rs
Normal file
7
crates/recorder/src/parsers/bangumi_parser.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
use crate::parsers::errors::ParseError;
|
||||
|
||||
pub fn parse_bangumi_season(season_str: &str) -> Result<i32, ParseError> {
|
||||
season_str
|
||||
.parse::<i32>()
|
||||
.map_err(ParseError::BangumiSeasonError)
|
||||
}
|
||||
73
crates/recorder/src/parsers/defs.rs
Normal file
73
crates/recorder/src/parsers/defs.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
|
||||
use fancy_regex::Regex as FancyRegex;
|
||||
use lazy_static::lazy_static;
|
||||
use maplit::{btreemap, hashmap};
|
||||
use regex::Regex;
|
||||
|
||||
const LANG_ZH_TW: &str = "zh-tw";
|
||||
const LANG_ZH: &str = "zh";
|
||||
const LANG_EN: &str = "en";
|
||||
const LANG_JP: &str = "jp";
|
||||
|
||||
lazy_static! {
|
||||
pub static ref SEASON_REGEX: Regex =
|
||||
Regex::new(r"(S\|[Ss]eason\s+)(\d+)").expect("Invalid regex");
|
||||
pub static ref TORRENT_PRASE_RULE_REGS: Vec<FancyRegex> = vec(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)"
|
||||
)
|
||||
.unwrap(),
|
||||
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
|
||||
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
|
||||
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
|
||||
];
|
||||
pub static ref SUBTITLE_LANG: BTreeMap<&'static str, Vec<&'static str>> = {
|
||||
btreemap! {
|
||||
LANG_ZH_TW => vec!["tc", "cht", "繁", "zh-tw"],
|
||||
LANG_ZH => vec!["sc", "chs", "简", "zh", "zh-cn"],
|
||||
LANG_EN => vec!["en", "eng", "英"],
|
||||
LANG_JP => vec!["jp", "jpn", "日"],
|
||||
}
|
||||
};
|
||||
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
|
||||
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
|
||||
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {
|
||||
hashmap! {
|
||||
"〇" => 0,
|
||||
"一" => 1,
|
||||
"二" => 2,
|
||||
"三" => 3,
|
||||
"四" => 4,
|
||||
"五" => 5,
|
||||
"六" => 6,
|
||||
"七" => 7,
|
||||
"八" => 8,
|
||||
"九" => 9,
|
||||
"十" => 10,
|
||||
"廿" => 20,
|
||||
"百" => 100,
|
||||
"千" => 1000,
|
||||
"零" => 0,
|
||||
"壹" => 1,
|
||||
"贰" => 2,
|
||||
"叁" => 3,
|
||||
"肆" => 4,
|
||||
"伍" => 5,
|
||||
"陆" => 6,
|
||||
"柒" => 7,
|
||||
"捌" => 8,
|
||||
"玖" => 9,
|
||||
"拾" => 10,
|
||||
"念" => 20,
|
||||
"佰" => 100,
|
||||
"仟" => 1000,
|
||||
}
|
||||
};
|
||||
pub static ref ZH_NUM_RE: Regex =
|
||||
Regex::new(r"[〇一二三四五六七八九十廿百千零壹贰叁肆伍陆柒捌玖拾念佰仟]").unwrap();
|
||||
}
|
||||
9
crates/recorder/src/parsers/errors.rs
Normal file
9
crates/recorder/src/parsers/errors.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ParseError {
|
||||
#[error("Parse bangumi season error: {0}")]
|
||||
BangumiSeasonError(#[from] std::num::ParseIntError),
|
||||
#[error("Parse file url error: {0}")]
|
||||
FileUrlError(#[from] url::ParseError),
|
||||
}
|
||||
34
crates/recorder/src/parsers/html_parser.rs
Normal file
34
crates/recorder/src/parsers/html_parser.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
use lightningcss::declaration::DeclarationBlock;
|
||||
|
||||
pub fn query_selector_first<'a>(
|
||||
dom: &'a tl::VDom<'a>,
|
||||
selector: &'a str,
|
||||
parser: &'a tl::Parser<'a>,
|
||||
) -> Option<&'a tl::Node<'a>> {
|
||||
dom.query_selector(selector)
|
||||
.and_then(|mut s| s.next())
|
||||
.and_then(|n| n.get(parser))
|
||||
}
|
||||
|
||||
pub fn query_selector_first_tag<'a>(
|
||||
dom: &'a tl::VDom<'a>,
|
||||
selector: &'a str,
|
||||
parser: &'a tl::Parser<'a>,
|
||||
) -> Option<&'a tl::HTMLTag<'a>> {
|
||||
query_selector_first(dom, selector, parser).and_then(|n| n.as_tag())
|
||||
}
|
||||
|
||||
pub fn parse_style_attr(style_attr: &str) -> Option<DeclarationBlock> {
|
||||
let result = DeclarationBlock::parse_string(style_attr, Default::default()).ok()?;
|
||||
Some(result)
|
||||
}
|
||||
|
||||
pub fn get_tag_style<'a>(tag: &'a tl::HTMLTag<'a>) -> Option<DeclarationBlock<'a>> {
|
||||
let style_attr = tag
|
||||
.attributes()
|
||||
.get("style")
|
||||
.flatten()
|
||||
.and_then(|s| std::str::from_utf8(s.as_bytes()).ok());
|
||||
|
||||
style_attr.and_then(parse_style_attr)
|
||||
}
|
||||
127
crates/recorder/src/parsers/mikan_ep_parser.rs
Normal file
127
crates/recorder/src/parsers/mikan_ep_parser.rs
Normal file
@@ -0,0 +1,127 @@
|
||||
use bytes::Bytes;
|
||||
use html_escape::decode_html_entities;
|
||||
use lazy_static::lazy_static;
|
||||
use lightningcss::{properties::Property, values::image::Image};
|
||||
use regex::Regex;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
downloaders::{html::download_html, image::download_image},
|
||||
parsers::html_parser::{get_tag_style, query_selector_first_tag},
|
||||
};
|
||||
|
||||
pub struct MikanEpisodeMeta {
|
||||
pub homepage: Url,
|
||||
pub poster_src: Option<Url>,
|
||||
pub poster_data: Option<Bytes>,
|
||||
pub official_title: String,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref MIKAN_TITLE_SEASON: Regex = Regex::new("第.*季").unwrap();
|
||||
}
|
||||
|
||||
pub async fn parse_episode_meta_from_mikan_homepage(
|
||||
url: Url,
|
||||
) -> eyre::Result<Option<MikanEpisodeMeta>> {
|
||||
let url_host = url.origin().unicode_serialization();
|
||||
let content = download_html(url.as_str()).await?;
|
||||
let dom = tl::parse(&content, tl::ParserOptions::default())?;
|
||||
let parser = dom.parser();
|
||||
let poster_node = query_selector_first_tag(&dom, r"div.bangumi-poster", parser);
|
||||
let official_title_node = query_selector_first_tag(&dom, r"p.bangumi-title", parser);
|
||||
let mut poster_src = None;
|
||||
if let Some(style) = poster_node.and_then(get_tag_style) {
|
||||
for (prop, _) in style.iter() {
|
||||
match prop {
|
||||
Property::BackgroundImage(images) => {
|
||||
if let Some(Image::Url(path)) = images.first() {
|
||||
if let Ok(url) = Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
|
||||
{
|
||||
poster_src = Some(url);
|
||||
}
|
||||
}
|
||||
}
|
||||
Property::Background(backgrounds) => {
|
||||
for bg in backgrounds {
|
||||
if let Image::Url(path) = &bg.image {
|
||||
if let Ok(url) =
|
||||
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
|
||||
{
|
||||
poster_src = Some(url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
};
|
||||
poster_src = poster_src.map(|mut p| {
|
||||
p.set_query(None);
|
||||
p
|
||||
});
|
||||
let poster_data = if let Some(p) = poster_src.as_ref() {
|
||||
download_image(p.as_str()).await.ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let meta = official_title_node
|
||||
.map(|s| s.inner_text(parser))
|
||||
.and_then(|official_title| {
|
||||
let title = MIKAN_TITLE_SEASON
|
||||
.replace(&decode_html_entities(&official_title), "")
|
||||
.trim()
|
||||
.to_string();
|
||||
if title.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(title)
|
||||
}
|
||||
})
|
||||
.map(|title| MikanEpisodeMeta {
|
||||
homepage: url,
|
||||
poster_src,
|
||||
official_title: title,
|
||||
poster_data,
|
||||
});
|
||||
Ok(meta)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use url::Url;
|
||||
|
||||
use crate::parsers::mikan_ep_parser::parse_episode_meta_from_mikan_homepage;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_mikan() {
|
||||
let test_fn = async || -> eyre::Result<()> {
|
||||
let url_str =
|
||||
"https://mikanani.me/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a";
|
||||
let url = Url::parse(url_str)?;
|
||||
|
||||
if let Some(ep_meta) = parse_episode_meta_from_mikan_homepage(url.clone()).await? {
|
||||
assert_eq!(ep_meta.homepage, url);
|
||||
assert_eq!(
|
||||
ep_meta.poster_src,
|
||||
Some(Url::parse(
|
||||
"https://mikanani.me/images/Bangumi/202309/5ce9fed1.jpg"
|
||||
)?)
|
||||
);
|
||||
assert_eq!(ep_meta.official_title, "葬送的芙莉莲");
|
||||
let u8_data = ep_meta.poster_data.expect("should have poster data");
|
||||
assert!(
|
||||
u8_data.starts_with(&[255, 216, 255, 224]),
|
||||
"should start with valid jpeg data magic number"
|
||||
);
|
||||
} else {
|
||||
panic!("can not find mikan episode title")
|
||||
}
|
||||
|
||||
Ok(())
|
||||
};
|
||||
|
||||
test_fn().await.expect("test parse mikan failed");
|
||||
}
|
||||
}
|
||||
8
crates/recorder/src/parsers/mod.rs
Normal file
8
crates/recorder/src/parsers/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub mod bangumi_parser;
|
||||
pub mod defs;
|
||||
pub mod errors;
|
||||
pub mod html_parser;
|
||||
pub mod mikan_ep_parser;
|
||||
pub mod raw_ep_parser;
|
||||
pub mod title_parser;
|
||||
pub mod torrent_parser;
|
||||
394
crates/recorder/src/parsers/raw_ep_parser.rs
Normal file
394
crates/recorder/src/parsers/raw_ep_parser.rs
Normal file
@@ -0,0 +1,394 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE};
|
||||
|
||||
lazy_static! {
|
||||
static ref TITLE_RE: Regex = Regex::new(
|
||||
r#"(.*|\[.*])( -? \d+|\[\d+]|\[\d+.?[vV]\d]|第\d+[话話集]|\[第?\d+[话話集]]|\[\d+.?END]|[Ee][Pp]?\d+)(.*)"#
|
||||
).unwrap();
|
||||
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
|
||||
static ref SOURCE_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|Web|WebRip").unwrap();
|
||||
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap();
|
||||
static ref PREFIX_RE: Regex =
|
||||
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
|
||||
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
|
||||
static ref MAIN_TITLE_PREFIX_PROCESS_RE1: Regex = Regex::new(r"新番|月?番").unwrap();
|
||||
static ref MAIN_TITLE_PREFIX_PROCESS_RE2: Regex = Regex::new(r"[港澳台]{1,3}地区").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_ALL_RE: Regex = Regex::new(r"S\d{1,2}|Season \d{1,2}|[第].[季期]|1st|2nd|3rd|\d{1,2}th").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_EN_PREFIX_RE: Regex = Regex::new(r"Season|S").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_EN_NTH_RE: Regex = Regex::new(r"1st|2nd|3rd|\d{1,2}th").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_RE: Regex = Regex::new(r"[第 ].*[季期(部分)]|部分").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE: Regex = Regex::new(r"[第季期 ]").unwrap();
|
||||
static ref NAME_EXTRACT_REMOVE_RE: Regex = Regex::new(r"[((]仅限[港澳台]{1,3}地区[))]").unwrap();
|
||||
static ref NAME_EXTRACT_SPLIT_RE: Regex = Regex::new(r"/|\s{2}|-\s{2}").unwrap();
|
||||
static ref NAME_JP_TEST: Regex = Regex::new(r"[\p{scx=Hira}\p{scx=Kana}]{2,}").unwrap();
|
||||
static ref NAME_ZH_TEST: Regex = Regex::new(r"[\p{scx=Han}]{2,}").unwrap();
|
||||
static ref NAME_EN_TEST: Regex = Regex::new(r"[a-zA-Z]{3,}").unwrap();
|
||||
static ref TAGS_EXTRACT_SPLIT_RE: Regex = Regex::new(r"[\[\]()()]").unwrap();
|
||||
static ref CLEAR_SUB_RE: Regex = Regex::new(r"_MP4|_MKV").unwrap();
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct RawEpisodeMeta {
|
||||
name_en: Option<String>,
|
||||
name_en_no_season: Option<String>,
|
||||
name_jp: Option<String>,
|
||||
name_jp_no_season: Option<String>,
|
||||
name_zh: Option<String>,
|
||||
name_zh_no_season: Option<String>,
|
||||
season: i32,
|
||||
season_raw: Option<String>,
|
||||
episode_index: i32,
|
||||
sub: Option<String>,
|
||||
source: Option<String>,
|
||||
fansub: Option<String>,
|
||||
resolution: Option<String>,
|
||||
}
|
||||
|
||||
fn extract_fansub(raw_name: &str) -> Option<&str> {
|
||||
let mut groups = EN_BRACKET_SPLIT_RE.splitn(raw_name, 3);
|
||||
groups.nth(1)
|
||||
}
|
||||
|
||||
fn replace_ch_bracket_to_en(raw_name: &str) -> String {
|
||||
raw_name.replace('【', "[").replace('】', "]")
|
||||
}
|
||||
|
||||
fn title_body_prefix_process(title_body: &str, fansub: Option<&str>) -> eyre::Result<String> {
|
||||
let raw_without_fansub = if let Some(fansub) = fansub {
|
||||
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
|
||||
fan_sub_re.replace_all(title_body, "")
|
||||
} else {
|
||||
Cow::Borrowed(title_body)
|
||||
};
|
||||
let raw_with_prefix_replaced = PREFIX_RE.replace_all(&raw_without_fansub, "/");
|
||||
let mut arg_group = raw_with_prefix_replaced
|
||||
.split('/')
|
||||
.map(|s| s.trim())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if arg_group.len() == 1 {
|
||||
arg_group = arg_group.first_mut().unwrap().split(' ').collect();
|
||||
}
|
||||
let mut raw = raw_without_fansub.to_string();
|
||||
for arg in arg_group.iter() {
|
||||
if (arg_group.len() <= 5 && MAIN_TITLE_PREFIX_PROCESS_RE1.is_match(arg))
|
||||
|| (MAIN_TITLE_PREFIX_PROCESS_RE2.is_match(arg))
|
||||
{
|
||||
let sub = Regex::new(&format!(".{arg}."))?;
|
||||
raw = sub.replace_all(&raw, "").to_string();
|
||||
}
|
||||
}
|
||||
Ok(raw.to_string())
|
||||
}
|
||||
|
||||
fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, i32) {
|
||||
let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " ");
|
||||
let seasons = SEASON_EXTRACT_SEASON_ALL_RE
|
||||
.find(&name_and_season)
|
||||
.into_iter()
|
||||
.map(|s| s.as_str())
|
||||
.collect_vec();
|
||||
|
||||
if seasons.is_empty() {
|
||||
return (title_body.to_string(), None, 1);
|
||||
}
|
||||
|
||||
let mut season = 1;
|
||||
let mut season_raw = None;
|
||||
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
|
||||
|
||||
for s in seasons {
|
||||
season_raw = Some(s);
|
||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s) {
|
||||
if let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
|
||||
.replace_all(m.as_str(), "")
|
||||
.parse::<i32>()
|
||||
{
|
||||
season = s;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s) {
|
||||
if let Some(s) = DIGIT_1PLUS_REG
|
||||
.find(m.as_str())
|
||||
.and_then(|s| s.as_str().parse::<i32>().ok())
|
||||
{
|
||||
season = s;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
|
||||
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
|
||||
.replace(m.as_str(), "")
|
||||
.parse::<i32>()
|
||||
{
|
||||
season = s;
|
||||
break;
|
||||
}
|
||||
if let Some(m) = ZH_NUM_RE.find(m.as_str()) {
|
||||
season = ZH_NUM_MAP[m.as_str()];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(name.to_string(), season_raw.map(|s| s.to_string()), season)
|
||||
}
|
||||
|
||||
fn extract_name_from_title_body_name_section(
|
||||
title_body_name_section: &str,
|
||||
) -> (Option<String>, Option<String>, Option<String>) {
|
||||
let mut name_en = None;
|
||||
let mut name_zh = None;
|
||||
let mut name_jp = None;
|
||||
let replaced = NAME_EXTRACT_REMOVE_RE.replace_all(title_body_name_section, "");
|
||||
let trimed = replaced.trim();
|
||||
let mut split = NAME_EXTRACT_SPLIT_RE
|
||||
.split(trimed)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|s| s.to_string())
|
||||
.collect_vec();
|
||||
if split.len() == 1 {
|
||||
let mut split_space = split[0].split(' ').collect_vec();
|
||||
let mut search_indices = vec![0];
|
||||
if split_space.len() > 1 {
|
||||
search_indices.push(search_indices.len() - 1);
|
||||
}
|
||||
for i in search_indices {
|
||||
if NAME_ZH_TEST.is_match(split_space[i]) {
|
||||
let chs = split_space[i];
|
||||
split_space.remove(i);
|
||||
split = vec![chs.to_string(), split_space.join(" ")];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
for item in split {
|
||||
if NAME_JP_TEST.is_match(&item) && name_jp.is_none() {
|
||||
name_jp = Some(item);
|
||||
} else if NAME_ZH_TEST.is_match(&item) && name_zh.is_none() {
|
||||
name_zh = Some(item);
|
||||
} else if NAME_EN_TEST.is_match(&item) && name_en.is_none() {
|
||||
name_en = Some(item);
|
||||
}
|
||||
}
|
||||
(name_en, name_zh, name_jp)
|
||||
}
|
||||
|
||||
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32> {
|
||||
DIGIT_1PLUS_REG
|
||||
.find(title_episode)?
|
||||
.as_str()
|
||||
.parse::<i32>()
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn clear_sub(sub: Option<String>) -> Option<String> {
|
||||
sub.map(|s| CLEAR_SUB_RE.replace_all(&s, "").to_string())
|
||||
}
|
||||
|
||||
fn extract_tags_from_title_extra(
|
||||
title_extra: &str,
|
||||
) -> (Option<String>, Option<String>, Option<String>) {
|
||||
let replaced = TAGS_EXTRACT_SPLIT_RE.replace_all(title_extra, " ");
|
||||
let elements = replaced
|
||||
.split(' ')
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty());
|
||||
|
||||
let mut sub = None;
|
||||
let mut resolution = None;
|
||||
let mut source = None;
|
||||
for element in elements {
|
||||
if SUB_RE.is_match(element) {
|
||||
sub = Some(element.to_string())
|
||||
} else if RESOLUTION_RE.is_match(element) {
|
||||
resolution = Some(element.to_string())
|
||||
} else if SOURCE_RE.is_match(element) {
|
||||
source = Some(element.to_string())
|
||||
}
|
||||
}
|
||||
(clear_sub(sub), resolution, source)
|
||||
}
|
||||
|
||||
pub fn parse_episode_meta_from_raw_name(s: &str) -> eyre::Result<RawEpisodeMeta> {
|
||||
let raw_title = s.trim();
|
||||
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
||||
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
||||
if let Some(title_re_match_obj) = TITLE_RE.captures(&raw_title_without_ch_brackets) {
|
||||
let title_body = title_re_match_obj
|
||||
.get(1)
|
||||
.map(|s| s.as_str().trim())
|
||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
|
||||
let title_episode = title_re_match_obj
|
||||
.get(2)
|
||||
.map(|s| s.as_str().trim())
|
||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
|
||||
let title_extra = title_re_match_obj
|
||||
.get(3)
|
||||
.map(|s| s.as_str().trim())
|
||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
|
||||
let title_body = title_body_prefix_process(title_body, fansub)?;
|
||||
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
|
||||
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
|
||||
let (name_en_no_season, name_zh_no_season, name_jp_no_season) =
|
||||
extract_name_from_title_body_name_section(&name_without_season);
|
||||
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(0);
|
||||
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
|
||||
Ok(RawEpisodeMeta {
|
||||
name_en,
|
||||
name_en_no_season,
|
||||
name_jp,
|
||||
name_jp_no_season,
|
||||
name_zh,
|
||||
name_zh_no_season,
|
||||
season,
|
||||
season_raw,
|
||||
episode_index,
|
||||
sub,
|
||||
source,
|
||||
fansub: fansub.map(|s| s.to_string()),
|
||||
resolution,
|
||||
})
|
||||
} else {
|
||||
Err(eyre::eyre!("Can not parse episode meta from raw filename"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{parse_episode_meta_from_raw_name, RawEpisodeMeta};
|
||||
|
||||
struct TestCase {
|
||||
source: &'static str,
|
||||
expected: &'static str,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_episode_meta_from_raw_name() {
|
||||
let test_cases = vec![
|
||||
TestCase {
|
||||
// ep+version case
|
||||
source: r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
expected: r#"{
|
||||
"name_en": "Shin no Nakama 2nd",
|
||||
"name_en_no_season": "Shin no Nakama",
|
||||
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
|
||||
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
|
||||
"season": 2,
|
||||
"season_raw": "2nd",
|
||||
"episode_index": 8,
|
||||
"sub": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
TestCase {
|
||||
// pure english title case
|
||||
source: r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
|
||||
expected: r#"{
|
||||
"name_en": "THE MARGINAL SERVICE",
|
||||
"name_en_no_season": "THE MARGINAL SERVICE",
|
||||
"season": 1,
|
||||
"episode_index": 8,
|
||||
"sub": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "动漫国字幕组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
TestCase {
|
||||
// two zh titles case
|
||||
source: r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
expected: r#"{
|
||||
"name_en": "Nozomanu Fushi no Boukensha",
|
||||
"name_en_no_season": "Nozomanu Fushi no Boukensha",
|
||||
"name_zh": "事与愿违的不死冒险者",
|
||||
"name_zh_no_season": "事与愿违的不死冒险者",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"sub": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
TestCase {
|
||||
// en+zh+jp case
|
||||
source: r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
|
||||
expected: r#"{
|
||||
"name_en": "Pon no Michi",
|
||||
"name_jp": "ぽんのみち",
|
||||
"name_zh": "碰之道",
|
||||
"name_en_no_season": "Pon no Michi",
|
||||
"name_jp_no_season": "ぽんのみち",
|
||||
"name_zh_no_season": "碰之道",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 7,
|
||||
"sub": "简繁日内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "喵萌奶茶屋&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
TestCase {
|
||||
// season nth case
|
||||
source: r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
expected: r#"{
|
||||
"name_en": "Yowai Character Tomozakikun",
|
||||
"name_en_no_season": "Yowai Character Tomozakikun",
|
||||
"name_zh": "弱角友崎同学 2nd STAGE",
|
||||
"name_zh_no_season": "弱角友崎同学",
|
||||
"season": 2,
|
||||
"season_raw": "2nd",
|
||||
"episode_index": 9,
|
||||
"sub": "CHT",
|
||||
"source": "Baha",
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
},
|
||||
TestCase {
|
||||
// season en + season zh case
|
||||
source: r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
|
||||
expected: r#"{
|
||||
"name_en": "Kingdom S5",
|
||||
"name_en_no_season": "Kingdom",
|
||||
"name_zh": "王者天下 第五季",
|
||||
"name_zh_no_season": "王者天下",
|
||||
"season": 5,
|
||||
"season_raw": "第五季",
|
||||
"episode_index": 7,
|
||||
"sub": "简繁外挂字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "豌豆字幕组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
},
|
||||
];
|
||||
|
||||
for case in test_cases {
|
||||
let expected: Option<RawEpisodeMeta> = serde_json::from_str(case.expected).unwrap();
|
||||
let found = parse_episode_meta_from_raw_name(case.source).ok();
|
||||
|
||||
if expected != found {
|
||||
println!(
|
||||
"expected {} and found {} are not equal",
|
||||
serde_json::to_string_pretty(&expected).unwrap(),
|
||||
serde_json::to_string_pretty(&found).unwrap()
|
||||
)
|
||||
}
|
||||
assert_eq!(expected, found);
|
||||
}
|
||||
}
|
||||
}
|
||||
9
crates/recorder/src/parsers/title_parser.rs
Normal file
9
crates/recorder/src/parsers/title_parser.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use crate::path::VFSPath;
|
||||
|
||||
pub fn parse_torrent_title(
|
||||
torrent_path: VFSPath<'_>,
|
||||
torrent_name: Option<&str>,
|
||||
season: Option<i32>,
|
||||
file_type: &str,
|
||||
) {
|
||||
}
|
||||
90
crates/recorder/src/parsers/torrent_parser.rs
Normal file
90
crates/recorder/src/parsers/torrent_parser.rs
Normal file
@@ -0,0 +1,90 @@
|
||||
use super::defs::{
|
||||
BRACKETS_REG, DIGIT_1PLUS_REG, SEASON_REGEX, SUBTITLE_LANG, TORRENT_PRASE_RULE_REGS,
|
||||
};
|
||||
use crate::path::VFSPath;
|
||||
|
||||
pub fn get_path_basename<'a>(path: &'a VFSPath) -> &'a str {
|
||||
path.basename()
|
||||
}
|
||||
|
||||
pub fn get_group(group_and_title: &str) -> (Option<&str>, &str) {
|
||||
let n = BRACKETS_REG
|
||||
.split(group_and_title)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if n.len() > 1 {
|
||||
if DIGIT_1PLUS_REG.is_match(n[1]) {
|
||||
(None, group_and_title)
|
||||
} else {
|
||||
(Some(n[0]), n[1])
|
||||
}
|
||||
} else {
|
||||
(None, n[0])
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_season_and_title(season_and_title: &str) -> (String, i32) {
|
||||
let title = SEASON_REGEX.replace(season_and_title, "");
|
||||
let title = title.trim().to_string();
|
||||
let mut season = 1;
|
||||
if let Some(match_result) = SEASON_REGEX.captures(season_and_title) {
|
||||
let season_str = match_result
|
||||
.get(2)
|
||||
.unwrap_or_else(|| unreachable!("season regex should have 2 groups"))
|
||||
.as_str();
|
||||
season = season_str
|
||||
.parse::<i32>()
|
||||
.unwrap_or_else(|_| unreachable!("season should be a number"));
|
||||
}
|
||||
(title, season)
|
||||
}
|
||||
|
||||
pub fn get_subtitle_lang(subtitle_name: &str) -> Option<&'static str> {
|
||||
let subtitle_name_lower = subtitle_name.to_lowercase();
|
||||
for (lang, matches) in SUBTITLE_LANG.iter() {
|
||||
for m in matches {
|
||||
if subtitle_name_lower.contains(m) {
|
||||
return Some(lang);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn parse_torrent<'a>(
|
||||
torrent_path: &'a VFSPath<'a>,
|
||||
torrent_name: Option<&str>,
|
||||
season: Option<i32>,
|
||||
file_type: Option<&str>,
|
||||
) {
|
||||
let media_name = get_path_basename(torrent_path);
|
||||
for rule in TORRENT_PRASE_RULE_REGS.iter() {
|
||||
let match_obj = if let Some(torrent_name) = torrent_name {
|
||||
rule.captures(torrent_name)
|
||||
} else {
|
||||
rule.captures(media_name)
|
||||
};
|
||||
|
||||
if let Ok(Some(match_obj)) = match_obj {
|
||||
let group_and_title = match_obj
|
||||
.get(1)
|
||||
.unwrap_or_else(|| unreachable!("should have 1 group"))
|
||||
.as_str();
|
||||
let (group, title) = get_group(group_and_title);
|
||||
let season_and_title = get_season_and_title(title);
|
||||
let season = season.unwrap_or(season_and_title.1);
|
||||
let title = season_and_title.0;
|
||||
let episode = match_obj
|
||||
.get(2)
|
||||
.unwrap_or_else(|| unreachable!("should have 2 group"))
|
||||
.as_str()
|
||||
.parse::<i32>()
|
||||
.unwrap_or_else(|_| unreachable!("episode should be a number"));
|
||||
|
||||
let extension = media_name;
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
}
|
||||
4
crates/recorder/src/path/mod.rs
Normal file
4
crates/recorder/src/path/mod.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub mod torrent_path;
|
||||
pub mod vfs_path;
|
||||
|
||||
pub use vfs_path::{VFSComponent, VFSComponents, VFSPath, VFSPathBuf, VFSSubPath, VFSSubPathBuf};
|
||||
79
crates/recorder/src/path/torrent_path.rs
Normal file
79
crates/recorder/src/path/torrent_path.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use crate::{
|
||||
downloaders::defs::Torrent,
|
||||
models::{bangumi, subscribers},
|
||||
parsers::{bangumi_parser::parse_bangumi_season, defs::SEASON_REGEX},
|
||||
path::{VFSPath, VFSSubPathBuf},
|
||||
};
|
||||
|
||||
pub fn check_files(info: &Torrent) -> (Vec<VFSSubPathBuf>, Vec<VFSSubPathBuf>) {
|
||||
let mut media_list = vec![];
|
||||
let mut subtitle_list = vec![];
|
||||
for f in info.iter_files() {
|
||||
let file_name = VFSSubPathBuf::from(f.get_name());
|
||||
let extension = file_name.extension().unwrap_or_default().to_lowercase();
|
||||
|
||||
match extension.as_str() {
|
||||
".mp4" | ".mkv" => {
|
||||
media_list.push(file_name);
|
||||
}
|
||||
".ass" | ".srt" => subtitle_list.push(file_name),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
(media_list, subtitle_list)
|
||||
}
|
||||
|
||||
pub fn path_to_bangumi<'a>(
|
||||
save_path: VFSPath<'a>,
|
||||
downloader_path: VFSPath<'a>,
|
||||
) -> Option<(&'a str, i32)> {
|
||||
let downloader_parts = downloader_path
|
||||
.components()
|
||||
.map(|s| s.as_str())
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let mut season = None;
|
||||
let mut bangumi_name = None;
|
||||
for part in save_path.components().map(|s| s.as_str()) {
|
||||
if let Some(match_result) = SEASON_REGEX.captures(part) {
|
||||
season = Some(
|
||||
parse_bangumi_season(
|
||||
match_result
|
||||
.get(2)
|
||||
.unwrap_or_else(|| unreachable!("must have a season"))
|
||||
.as_str(),
|
||||
)
|
||||
.unwrap_or_else(|e| unreachable!("{}", e.to_string())),
|
||||
);
|
||||
} else if !downloader_parts.contains(part) {
|
||||
bangumi_name = Some(part);
|
||||
}
|
||||
}
|
||||
match (season, bangumi_name) {
|
||||
(Some(season), Some(bangumi_name)) => Some((bangumi_name, season)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn file_depth(path: &VFSPath<'_>) -> usize {
|
||||
path.components().count()
|
||||
}
|
||||
|
||||
pub fn is_ep(path: &VFSPath<'_>) -> bool {
|
||||
file_depth(path) <= 2
|
||||
}
|
||||
|
||||
pub fn gen_bangumi_sub_path(data: &bangumi::Model) -> VFSSubPathBuf {
|
||||
VFSSubPathBuf::from(data.official_title.to_string()).join(format!("Season {}", data.season))
|
||||
}
|
||||
|
||||
pub fn rule_name(bgm: &bangumi::Model, conf: &subscribers::SubscriberBangumiConfig) -> String {
|
||||
if let (Some(true), Some(group_name)) = (conf.leading_group_tag, &bgm.group_name) {
|
||||
format!("[{}] {} S{}", group_name, bgm.official_title, bgm.season)
|
||||
} else {
|
||||
format!("{} S{}", bgm.official_title, bgm.season)
|
||||
}
|
||||
}
|
||||
113
crates/recorder/src/path/vfs_path.rs
Normal file
113
crates/recorder/src/path/vfs_path.rs
Normal file
@@ -0,0 +1,113 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use bytes::Buf;
|
||||
use lazy_static::lazy_static;
|
||||
pub use uni_path::{Path as VFSSubPath, PathBuf as VFSSubPathBuf};
|
||||
|
||||
use crate::parsers::errors::ParseError;
|
||||
|
||||
const VFS_EMPTY_STR: &str = "";
|
||||
lazy_static! {
|
||||
pub static ref VFS_SUB_ROOT_BUF: VFSSubPathBuf = VFSSubPathBuf::from("/");
|
||||
pub static ref VFS_SUB_ROOT: &'static VFSSubPath = &VFS_SUB_ROOT_BUF.as_path();
|
||||
}
|
||||
|
||||
pub type VFSComponents<'a> = uni_path::Components<'a>;
|
||||
pub type VFSComponent<'a> = uni_path::Component<'a>;
|
||||
|
||||
pub struct VFSPath<'a> {
|
||||
pub root: &'a str,
|
||||
pub sub: &'a VFSSubPath,
|
||||
}
|
||||
|
||||
impl<'a> VFSPath<'a> {
|
||||
pub fn new(root: &'a str, sub: &'a VFSSubPath) -> VFSPath<'a> {
|
||||
Self { root, sub }
|
||||
}
|
||||
|
||||
pub fn file_name(&self) -> Option<&str> {
|
||||
self.sub.file_name()
|
||||
}
|
||||
|
||||
pub fn parent(&self) -> Option<VFSPath> {
|
||||
self.sub.parent().map(|p| Self::new(self.root, p))
|
||||
}
|
||||
|
||||
pub fn dirname(&'a self) -> VFSPath<'a> {
|
||||
self.parent()
|
||||
.unwrap_or_else(|| Self::new(self.root, &VFS_SUB_ROOT))
|
||||
}
|
||||
|
||||
pub fn basename(&self) -> &str {
|
||||
self.file_name().unwrap_or(VFS_EMPTY_STR)
|
||||
}
|
||||
|
||||
pub fn components(&self) -> VFSComponents<'a> {
|
||||
self.sub.components()
|
||||
}
|
||||
|
||||
pub fn join<P: AsRef<VFSSubPath>>(&self, path: P) -> VFSPathBuf {
|
||||
VFSPathBuf::new(self.root, self.sub.join(path))
|
||||
}
|
||||
|
||||
pub fn extension(&self) -> Option<&str> {
|
||||
self.sub.extension()
|
||||
}
|
||||
|
||||
pub fn extname(&self) -> &str {
|
||||
self.extension().unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn to_std_path_buf(&self) -> PathBuf {
|
||||
PathBuf::from(self.root).join(self.sub.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct VFSPathBuf {
|
||||
pub root: String,
|
||||
pub sub: VFSSubPathBuf,
|
||||
}
|
||||
|
||||
impl VFSPathBuf {
|
||||
pub fn new<R: Into<String>, S: Into<VFSSubPathBuf>>(root: R, sub: S) -> Self {
|
||||
Self {
|
||||
root: root.into(),
|
||||
sub: sub.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_root(root: &str) -> Result<Self, ParseError> {
|
||||
Ok(Self {
|
||||
root: root.to_string(),
|
||||
sub: VFS_SUB_ROOT_BUF.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn as_path(&self) -> VFSPath {
|
||||
VFSPath::new(&self.root as &str, self.sub.as_path())
|
||||
}
|
||||
|
||||
pub fn push<P: AsRef<VFSSubPath>>(&mut self, path: P) {
|
||||
self.sub.push(path);
|
||||
}
|
||||
|
||||
pub fn pop(&mut self) -> bool {
|
||||
self.sub.pop()
|
||||
}
|
||||
|
||||
pub fn set_extension<S: AsRef<str>>(&mut self, ext: S) {
|
||||
self.sub.set_extension(ext);
|
||||
}
|
||||
|
||||
pub fn set_file_name<S: AsRef<str>>(&mut self, file_name: S) {
|
||||
self.sub.set_file_name(file_name);
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<PathBuf> for VFSPathBuf {
|
||||
fn into(self) -> PathBuf {
|
||||
let root = self.root;
|
||||
PathBuf::from(root).join(self.sub.as_str())
|
||||
}
|
||||
}
|
||||
@@ -1,31 +1,37 @@
|
||||
use crate::downloader::bytes::download_bytes;
|
||||
use crate::downloader::defs::BITTORRENT_MIME_TYPE;
|
||||
use chrono::DateTime;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
use crate::downloaders::{bytes::download_bytes, defs::BITTORRENT_MIME_TYPE};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanSubscriptionItem {
|
||||
pub title: String,
|
||||
pub home_page: Option<String>,
|
||||
pub homepage: Option<String>,
|
||||
pub url: String,
|
||||
pub content_length: Option<u64>,
|
||||
pub mime: String,
|
||||
pub pub_date: Option<String>,
|
||||
pub pub_date: Option<i64>,
|
||||
}
|
||||
|
||||
impl MikanSubscriptionItem {
|
||||
pub fn from_rss_item(item: rss::Item) -> Option<Self> {
|
||||
let mime_match = item.enclosure()
|
||||
let mime_match = item
|
||||
.enclosure()
|
||||
.map(|x| x.mime_type == BITTORRENT_MIME_TYPE)
|
||||
.unwrap_or_default();
|
||||
if mime_match {
|
||||
let enclosure = item.enclosure.unwrap();
|
||||
let content_length = enclosure.length.parse().ok();
|
||||
|
||||
Some(MikanSubscriptionItem {
|
||||
title: item.title.unwrap_or_default(),
|
||||
home_page: item.link,
|
||||
homepage: item.link,
|
||||
url: enclosure.url,
|
||||
content_length,
|
||||
content_length: enclosure.length.parse().ok(),
|
||||
mime: enclosure.mime_type,
|
||||
pub_date: item.pub_date,
|
||||
pub_date: item
|
||||
.pub_date
|
||||
.and_then(|s| DateTime::parse_from_rfc2822(&s).ok())
|
||||
.map(|s| s.timestamp_millis()),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
@@ -37,12 +43,42 @@ pub struct MikanSubscriptionEngine;
|
||||
|
||||
impl MikanSubscriptionEngine {
|
||||
pub async fn subscription_items_from_rss_url(
|
||||
url: &str
|
||||
) -> eyre::Result<impl Iterator<Item=MikanSubscriptionItem>> {
|
||||
url: &str,
|
||||
) -> eyre::Result<impl Iterator<Item = MikanSubscriptionItem>> {
|
||||
let bytes = download_bytes(url).await?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
|
||||
Ok(channel.items.into_iter().flat_map(MikanSubscriptionItem::from_rss_item))
|
||||
Ok(channel
|
||||
.items
|
||||
.into_iter()
|
||||
.flat_map(MikanSubscriptionItem::from_rss_item))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::downloaders::defs::BITTORRENT_MIME_TYPE;
|
||||
|
||||
#[tokio::test]
|
||||
pub async fn test_mikan_subscription_items_from_rss_url() {
|
||||
let url = "https://mikanani.me/RSS/Bangumi?bangumiId=3141&subgroupid=370";
|
||||
let items = super::MikanSubscriptionEngine::subscription_items_from_rss_url(url)
|
||||
.await
|
||||
.expect("should get subscription items from rss url")
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let first_sub_item = items
|
||||
.first()
|
||||
.expect("mikan subscriptions should have at least one subs");
|
||||
|
||||
assert_eq!(first_sub_item.mime, BITTORRENT_MIME_TYPE);
|
||||
let homepage = first_sub_item
|
||||
.homepage
|
||||
.as_ref()
|
||||
.expect("mikan subscription item should have home page");
|
||||
assert!(homepage.starts_with("https://mikanani.me/Home/Episode"));
|
||||
let name = first_sub_item.title.as_str();
|
||||
assert!(name.contains("葬送的芙莉莲"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::models::_entities::subscribers;
|
||||
use crate::models::entities::subscribers;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct CurrentResponse {
|
||||
|
||||
@@ -1,2 +1 @@
|
||||
mod notes;
|
||||
mod subscribers;
|
||||
|
||||
@@ -16,8 +16,7 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use loco_rs::{db, prelude::*};
|
||||
use migration::Migrator;
|
||||
use recorder::app::App;
|
||||
use recorder::{app::App, migrations::Migrator};
|
||||
|
||||
#[allow(clippy::module_name_repetitions)]
|
||||
pub struct SeedData;
|
||||
|
||||
Reference in New Issue
Block a user