feat: pull subscription

This commit is contained in:
master 2024-03-30 01:01:00 +08:00
parent 50243db5dc
commit 2f5b001bb6
52 changed files with 946 additions and 730 deletions

View File

@ -1,6 +1,6 @@
[alias] [alias]
recorder = "run -p recorder --bin recorder_cli -- --environment recorder.development" recorder = "run -p recorder --bin recorder_cli -- --environment development"
recorder-playground = "run -p recorder --example playground -- --environment recorder.development" recorder-playground = "run -p recorder --example playground -- --environment development"
[build] [build]
rustflags = ["-Zthreads=12", "-Clink-arg=-fuse-ld=lld"] rustflags = ["-Zthreads=12", "-Clink-arg=-fuse-ld=lld"]

4
.gitignore vendored
View File

@ -222,4 +222,6 @@ index.d.ts.map
/temp /temp
/rustc-ice-* /rustc-ice-*
/test.env /crates/recorder/config/test.local.env
**/*.local.yaml
**/*.local.env

1
Cargo.lock generated
View File

@ -3654,6 +3654,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"axum", "axum",
"base64 0.22.0",
"bytes", "bytes",
"chrono", "chrono",
"dateparser", "dateparser",

View File

@ -7,5 +7,5 @@ members = [
resolver = "2" resolver = "2"
[profile.dev] [profile.dev]
debug = 0 #debug = 0
codegen-backend = "cranelift" codegen-backend = "cranelift"

View File

@ -15,3 +15,5 @@ Cargo.lock
# MSVC Windows builds of rustc generate these, which store debugging information # MSVC Windows builds of rustc generate these, which store debugging information
*.pdb *.pdb
/data

View File

@ -62,6 +62,7 @@ oxilangtag = { version = "0.1.5", features = ["serde"] }
dateparser = "0.2.1" dateparser = "0.2.1"
dotenv = "0.15.0" dotenv = "0.15.0"
weak-table = "0.3.2" weak-table = "0.3.2"
base64 = "0.22.0"
[dev-dependencies] [dev-dependencies]
serial_test = "3.0.0" serial_test = "3.0.0"

View File

@ -88,5 +88,6 @@ database:
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode # Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false dangerously_recreate: false
tmdb: settings:
api_token: '{{ get_env(name="TMDB_API_TOKEN") }}' dal:
fs_root: "data/dal/"

View File

@ -0,0 +1 @@
TMDB_API_TOKEN=""

View File

@ -1,9 +1,7 @@
# Loco configuration file documentation
# Application logging configuration # Application logging configuration
logger: logger:
# Enable or disable logging. # Enable or disable logging.
enable: false enable: true
# Log level, options: trace, debug, info, warn or error. # Log level, options: trace, debug, info, warn or error.
level: debug level: debug
# Define the logging format. options: compact, pretty or Json # Define the logging format. options: compact, pretty or Json
@ -65,9 +63,9 @@ workers:
# Database Configuration # Database Configuration
database: database:
# Database connection URI # Database connection URI
uri: {{get_env(name="DATABASE_URL", default="postgres://loco:loco@localhost:5432/recorder_test")}} uri: '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu") }}'
# When enabled, the sql query will be logged. # When enabled, the sql query will be logged.
enable_logging: false enable_logging: true
# Set the timeout duration when acquiring a connection. # Set the timeout duration when acquiring a connection.
connect_timeout: 500 connect_timeout: 500
# Set the idle duration before closing a connection. # Set the idle duration before closing a connection.
@ -81,12 +79,8 @@ database:
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode # Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_truncate: true dangerously_truncate: true
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode # Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false dangerously_recreate: true
# Redis Configuration
redis:
# Redis connection URI
uri: {{get_env(name="REDIS_URL", default="redis://127.0.0.1")}}
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_flush: false
settings:
dal:
fs_root: "data/dal"

View File

@ -2,6 +2,7 @@ use async_trait::async_trait;
use loco_rs::{ use loco_rs::{
app::Hooks, app::Hooks,
boot::{create_app, BootResult, StartMode}, boot::{create_app, BootResult, StartMode},
config::Config,
controller::AppRoutes, controller::AppRoutes,
db::truncate_table, db::truncate_table,
environment::Environment, environment::Environment,
@ -10,12 +11,14 @@ use loco_rs::{
worker::Processor, worker::Processor,
}; };
use sea_orm::prelude::*; use sea_orm::prelude::*;
use tracing_subscriber::EnvFilter;
use crate::{ use crate::{
controllers, controllers,
migrations::Migrator, migrations::Migrator,
models::{bangumi, downloaders, episodes, resources, subscribers, subscriptions}, models::{bangumi, downloaders, episodes, resources, subscribers, subscriptions},
storage::AppDalInitializer, storage::AppDalInitializer,
utils::cli::hack_env_to_fit_workspace,
workers::subscription::SubscriptionWorker, workers::subscription::SubscriptionWorker,
}; };
@ -38,6 +41,7 @@ impl Hooks for App {
} }
async fn boot(mode: StartMode, environment: &Environment) -> Result<BootResult> { async fn boot(mode: StartMode, environment: &Environment) -> Result<BootResult> {
hack_env_to_fit_workspace()?;
create_app::<Self, Migrator>(mode, environment).await create_app::<Self, Migrator>(mode, environment).await
} }
@ -56,7 +60,7 @@ impl Hooks for App {
async fn truncate(db: &DatabaseConnection) -> Result<()> { async fn truncate(db: &DatabaseConnection) -> Result<()> {
futures::try_join!( futures::try_join!(
subscribers::Entity::delete_many() subscribers::Entity::delete_many()
.filter(subscribers::Column::Id.ne(subscribers::ROOT_SUBSCRIBER_ID)) .filter(subscribers::Column::Pid.ne(subscribers::ROOT_SUBSCRIBER_NAME))
.exec(db), .exec(db),
truncate_table(db, subscriptions::Entity), truncate_table(db, subscriptions::Entity),
truncate_table(db, resources::Entity), truncate_table(db, resources::Entity),
@ -74,4 +78,46 @@ impl Hooks for App {
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> { async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
Ok(vec![Box::new(AppDalInitializer)]) Ok(vec![Box::new(AppDalInitializer)])
} }
fn init_logger(app_config: &Config, _env: &Environment) -> Result<bool> {
let config = &app_config.logger;
if config.enable {
let filter = EnvFilter::try_from_default_env()
.or_else(|_| {
// user wanted a specific filter, don't care about our internal whitelist
// or, if no override give them the default whitelisted filter (most common)
config.override_filter.as_ref().map_or_else(
|| {
EnvFilter::try_new(
["loco_rs", "sea_orm_migration", "tower_http", "sqlx::query"]
.iter()
.map(|m| format!("{}={}", m, config.level))
.chain(std::iter::once(format!(
"{}={}",
App::app_name(),
config.level
)))
.collect::<Vec<_>>()
.join(","),
)
},
EnvFilter::try_new,
)
})
.expect("logger initialization failed");
let builder = tracing_subscriber::FmtSubscriber::builder().with_env_filter(filter);
match serde_json::to_string(&config.format)
.expect("init logger format can serialized")
.trim_matches('"')
{
"pretty" => builder.pretty().init(),
"json" => builder.json().init(),
_ => builder.compact().init(),
};
}
Ok(true)
}
} }

View File

@ -1,8 +1,8 @@
use loco_rs::cli; use loco_rs::cli;
use recorder::migrations::Migrator; use recorder::{app::App, migrations::Migrator, utils::cli::hack_env_to_fit_workspace};
use recorder::app::App;
#[tokio::main] #[tokio::main]
async fn main() -> eyre::Result<()> { async fn main() -> eyre::Result<()> {
hack_env_to_fit_workspace()?;
cli::main::<App, Migrator>().await cli::main::<App, Migrator>().await
} }

View File

@ -1,6 +1,6 @@
use loco_rs::prelude::*; use loco_rs::prelude::*;
use crate::{models::entities::subscribers, views::subscribers::CurrentResponse}; use crate::{models::subscribers, views::subscribers::CurrentResponse};
async fn current(State(ctx): State<AppContext>) -> Result<Json<CurrentResponse>> { async fn current(State(ctx): State<AppContext>) -> Result<Json<CurrentResponse>> {
let subscriber = subscribers::Model::find_root(&ctx.db).await?; let subscriber = subscribers::Model::find_root(&ctx.db).await?;

View File

@ -23,7 +23,7 @@ use crate::{
defs::{QbitTorrent, QbitTorrentContent}, defs::{QbitTorrent, QbitTorrentContent},
torrent::{Torrent, TorrentContent, TorrentDownloader, TorrentFilter, TorrentSource}, torrent::{Torrent, TorrentContent, TorrentDownloader, TorrentFilter, TorrentSource},
}, },
models::{entities::downloaders, prelude::DownloaderCategory}, models::{downloaders, prelude::DownloaderCategory},
}; };
pub struct SyncDataCache { pub struct SyncDataCache {

View File

@ -17,3 +17,4 @@ pub mod workers;
pub mod i18n; pub mod i18n;
pub mod subscribe; pub mod subscribe;
pub mod utils;

View File

@ -1,6 +1,7 @@
use std::{collections::HashSet, fmt::Display}; use std::collections::HashSet;
use sea_orm::{DeriveIden, Statement}; use itertools::Itertools;
use sea_orm::{ActiveEnum, DeriveIden, Statement};
use sea_orm_migration::prelude::{extension::postgres::IntoTypeRef, *}; use sea_orm_migration::prelude::{extension::postgres::IntoTypeRef, *};
use crate::migrations::extension::postgres::Type; use crate::migrations::extension::postgres::Type;
@ -71,9 +72,10 @@ pub enum Episodes {
SeasonRaw, SeasonRaw,
Fansub, Fansub,
PosterLink, PosterLink,
HomePage, Homepage,
Subtitle, Subtitle,
Source, Source,
EpIndex,
} }
#[derive(DeriveIden)] #[derive(DeriveIden)]
@ -86,9 +88,10 @@ pub enum Resources {
Status, Status,
CurrSize, CurrSize,
AllSize, AllSize,
Mime, Category,
Url, Url,
HomePage, Homepage,
SavePath,
} }
#[derive(DeriveIden)] #[derive(DeriveIden)]
@ -178,7 +181,7 @@ pub trait CustomSchemaManagerExt {
async fn create_postgres_enum_for_active_enum< async fn create_postgres_enum_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone, E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send, T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
I: IntoIterator<Item = T> + Send, I: IntoIterator<Item = T> + Send,
>( >(
&self, &self,
@ -188,7 +191,7 @@ pub trait CustomSchemaManagerExt {
async fn add_postgres_enum_values_for_active_enum< async fn add_postgres_enum_values_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone, E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send, T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
I: IntoIterator<Item = T> + Send, I: IntoIterator<Item = T> + Send,
>( >(
&self, &self,
@ -210,6 +213,48 @@ pub trait CustomSchemaManagerExt {
&self, &self,
enum_name: E, enum_name: E,
) -> Result<HashSet<String>, DbErr>; ) -> Result<HashSet<String>, DbErr>;
async fn create_convention_index<
T: IntoTableRef + Send,
I: IntoIterator<Item = C> + Send,
C: IntoIndexColumn + Send,
>(
&self,
table: T,
columns: I,
) -> Result<(), DbErr>;
fn build_convention_index<
T: IntoTableRef + Send,
I: IntoIterator<Item = C> + Send,
C: IntoIndexColumn + Send,
>(
&self,
table: T,
columns: I,
) -> IndexCreateStatement {
let table = table.into_table_ref();
let table_name = match &table {
TableRef::Table(s) => s.to_string(),
_ => panic!("unsupported table ref"),
};
let columns = columns
.into_iter()
.map(|c| c.into_index_column())
.collect_vec();
let mut stmt = Index::create();
stmt.table(table);
for c in columns {
stmt.col(c);
}
let index_name = format!(
"idx_{}_{}",
table_name,
stmt.get_index_spec().get_column_names().join("-")
);
stmt.name(&index_name);
stmt
}
} }
#[async_trait::async_trait] #[async_trait::async_trait]
@ -217,7 +262,8 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
async fn create_postgres_auto_update_ts_fn(&self, col_name: &str) -> Result<(), DbErr> { async fn create_postgres_auto_update_ts_fn(&self, col_name: &str) -> Result<(), DbErr> {
let sql = format!( let sql = format!(
"CREATE OR REPLACE FUNCTION update_{col_name}_column() RETURNS TRIGGER AS $$ BEGIN \ "CREATE OR REPLACE FUNCTION update_{col_name}_column() RETURNS TRIGGER AS $$ BEGIN \
NEW.{col_name} = current_timestamp; RETURN NEW; END; $$ language 'plpgsql';" NEW.\"{col_name}\" = current_timestamp; RETURN NEW; END; $$ language 'plpgsql';",
col_name = col_name
); );
self.get_connection() self.get_connection()
@ -266,7 +312,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
async fn create_postgres_enum_for_active_enum< async fn create_postgres_enum_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone, E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send, T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
I: IntoIterator<Item = T> + Send, I: IntoIterator<Item = T> + Send,
>( >(
&self, &self,
@ -277,7 +323,10 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
if !existed { if !existed {
let idents = values let idents = values
.into_iter() .into_iter()
.map(|v| Alias::new(v.to_string())) .map(|v| {
let v = v.to_value();
Alias::new(v)
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self.create_type(Type::create().as_enum(enum_name).values(idents).to_owned()) self.create_type(Type::create().as_enum(enum_name).values(idents).to_owned())
.await?; .await?;
@ -290,7 +339,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
async fn add_postgres_enum_values_for_active_enum< async fn add_postgres_enum_values_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone, E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send, T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
I: IntoIterator<Item = T> + Send, I: IntoIterator<Item = T> + Send,
>( >(
&self, &self,
@ -300,7 +349,8 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
let exists_values = self.get_postgres_enum_values(enum_name.clone()).await?; let exists_values = self.get_postgres_enum_values(enum_name.clone()).await?;
let to_add_values = values let to_add_values = values
.into_iter() .into_iter()
.filter(|v| !exists_values.contains(&v.to_string())) .map(|v| v.to_value())
.filter(|v| !exists_values.contains(v))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if to_add_values.is_empty() { if to_add_values.is_empty() {
@ -310,6 +360,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
let mut type_alter = Type::alter().name(enum_name); let mut type_alter = Type::alter().name(enum_name);
for v in to_add_values { for v in to_add_values {
let v: Value = v.into();
type_alter = type_alter.add_value(Alias::new(v.to_string())); type_alter = type_alter.add_value(Alias::new(v.to_string()));
} }
@ -345,8 +396,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
) -> Result<HashSet<String>, DbErr> { ) -> Result<HashSet<String>, DbErr> {
let enum_name: String = enum_name.into_iden().to_string(); let enum_name: String = enum_name.into_iden().to_string();
let sql = format!( let sql = format!(
"SELECT pg_enum.enumlabel AS enumlabel FROM pg_type JOIN pg_enum ON pg_enum.enumtypid \ r#"SELECT pg_enum.enumlabel AS enumlabel FROM pg_type JOIN pg_enum ON pg_enum.enumtypid = pg_type.oid WHERE pg_type.typname = '{enum_name}';"#
= pg_type.oid WHERE pg_type.typname = '{enum_name}';"
); );
let results = self let results = self
@ -361,4 +411,17 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
Ok(items) Ok(items)
} }
async fn create_convention_index<
T: IntoTableRef + Send,
I: IntoIterator<Item = C> + Send,
C: IntoIndexColumn + Send,
>(
&self,
table: T,
columns: I,
) -> Result<(), DbErr> {
let stmt = self.build_convention_index(table, columns);
self.create_index(stmt.to_owned()).await
}
} }

View File

@ -4,10 +4,7 @@ use sea_orm_migration::{prelude::*, schema::*};
use super::defs::{ use super::defs::{
Bangumi, CustomSchemaManagerExt, Episodes, GeneralIds, Subscribers, Subscriptions, Bangumi, CustomSchemaManagerExt, Episodes, GeneralIds, Subscribers, Subscriptions,
}; };
use crate::models::{ use crate::models::{subscribers::ROOT_SUBSCRIBER_NAME, subscriptions};
subscribers::{ROOT_SUBSCRIBER_ID, ROOT_SUBSCRIBER_NAME},
subscriptions,
};
#[derive(DeriveMigrationName)] #[derive(DeriveMigrationName)]
pub struct Migration; pub struct Migration;
@ -37,19 +34,15 @@ impl MigrationTrait for Migration {
let insert = Query::insert() let insert = Query::insert()
.into_table(Subscribers::Table) .into_table(Subscribers::Table)
.columns([Subscribers::Id, Subscribers::Pid, Subscribers::DisplayName]) .columns([Subscribers::Pid, Subscribers::DisplayName])
.values_panic([ .values_panic([ROOT_SUBSCRIBER_NAME.into(), ROOT_SUBSCRIBER_NAME.into()])
ROOT_SUBSCRIBER_ID.into(),
ROOT_SUBSCRIBER_NAME.into(),
ROOT_SUBSCRIBER_NAME.into(),
])
.to_owned(); .to_owned();
manager.exec_stmt(insert).await?; manager.exec_stmt(insert).await?;
manager manager
.create_postgres_enum_for_active_enum( .create_postgres_enum_for_active_enum(
subscriptions::SubscriptionCategoryEnum, subscriptions::SubscriptionCategoryEnum,
&[ [
subscriptions::SubscriptionCategory::Mikan, subscriptions::SubscriptionCategory::Mikan,
subscriptions::SubscriptionCategory::Tmdb, subscriptions::SubscriptionCategory::Tmdb,
], ],
@ -88,7 +81,6 @@ impl MigrationTrait for Migration {
GeneralIds::UpdatedAt, GeneralIds::UpdatedAt,
) )
.await?; .await?;
manager manager
.create_table( .create_table(
table_auto(Bangumi::Table) table_auto(Bangumi::Table)
@ -112,30 +104,23 @@ impl MigrationTrait for Migration {
.on_delete(ForeignKeyAction::Cascade), .on_delete(ForeignKeyAction::Cascade),
) )
.index( .index(
Index::create() manager
.name("idx_bangumi_official_title") .build_convention_index(
.table(Bangumi::Table) Bangumi::Table,
.col(Bangumi::OfficialTitle) [Bangumi::OfficialTitle, Bangumi::Fansub, Bangumi::Season],
.unique(), )
)
.index(
Index::create()
.name("idx_bangumi_fansub")
.table(Bangumi::Table)
.col(Bangumi::Fansub)
.unique(),
)
.index(
Index::create()
.name("idx_bangumi_display_name")
.table(Bangumi::Table)
.col(Bangumi::DisplayName)
.unique(), .unique(),
) )
.to_owned(), .to_owned(),
) )
.await?; .await?;
futures::try_join!(
manager.create_convention_index(Bangumi::Table, [Bangumi::Fansub]),
manager.create_convention_index(Bangumi::Table, [Bangumi::Season]),
manager.create_convention_index(Bangumi::Table, [Bangumi::OfficialTitle]),
)?;
manager manager
.create_postgres_auto_update_ts_trigger_for_col(Bangumi::Table, GeneralIds::UpdatedAt) .create_postgres_auto_update_ts_trigger_for_col(Bangumi::Table, GeneralIds::UpdatedAt)
.await?; .await?;
@ -154,16 +139,16 @@ impl MigrationTrait for Migration {
.col(text_null(Episodes::SNameJp)) .col(text_null(Episodes::SNameJp))
.col(text_null(Episodes::SNameEn)) .col(text_null(Episodes::SNameEn))
.col(integer(Episodes::BangumiId)) .col(integer(Episodes::BangumiId))
.col(integer(Episodes::ResourceId))
.col(text_null(Episodes::SavePath)) .col(text_null(Episodes::SavePath))
.col(string_null(Episodes::Resolution)) .col(string_null(Episodes::Resolution))
.col(integer(Episodes::Season)) .col(integer(Episodes::Season))
.col(string_null(Episodes::SeasonRaw)) .col(string_null(Episodes::SeasonRaw))
.col(string_null(Episodes::Fansub)) .col(string_null(Episodes::Fansub))
.col(text_null(Episodes::PosterLink)) .col(text_null(Episodes::PosterLink))
.col(text_null(Episodes::HomePage)) .col(text_null(Episodes::Homepage))
.col(jsonb_null(Episodes::Subtitle)) .col(array_null(Episodes::Subtitle, ColumnType::Text))
.col(text_null(Episodes::Source)) .col(text_null(Episodes::Source))
.col(unsigned(Episodes::EpIndex))
.foreign_key( .foreign_key(
ForeignKey::create() ForeignKey::create()
.name("fk_episode_bangumi_id") .name("fk_episode_bangumi_id")
@ -172,28 +157,17 @@ impl MigrationTrait for Migration {
.on_update(ForeignKeyAction::Restrict) .on_update(ForeignKeyAction::Restrict)
.on_delete(ForeignKeyAction::Cascade), .on_delete(ForeignKeyAction::Cascade),
) )
.index(
Index::create()
.name("idx_episode_official_title")
.table(Episodes::Table)
.col(Episodes::OfficialTitle),
)
.index(
Index::create()
.name("idx_episode_fansub")
.table(Episodes::Table)
.col(Episodes::Fansub),
)
.index(
Index::create()
.name("idx_episode_display_name")
.table(Episodes::Table)
.col(Episodes::DisplayName),
)
.to_owned(), .to_owned(),
) )
.await?; .await?;
futures::try_join!(
manager.create_convention_index(Episodes::Table, [Episodes::OfficialTitle]),
manager.create_convention_index(Episodes::Table, [Episodes::Fansub]),
manager.create_convention_index(Episodes::Table, [Episodes::Season]),
manager.create_convention_index(Episodes::Table, [Episodes::EpIndex]),
)?;
manager manager
.create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt) .create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
.await?; .await?;

View File

@ -3,7 +3,7 @@ use sea_orm_migration::{prelude::*, schema::*};
use super::defs::*; use super::defs::*;
use crate::models::resources::{ use crate::models::resources::{
DownloadStatus, DownloadStatusEnum, ResourceMime, ResourceMimeEnum, DownloadStatus, DownloadStatusEnum, ResourceCategory, ResourceCategoryEnum,
}; };
#[derive(DeriveMigrationName)] #[derive(DeriveMigrationName)]
@ -14,15 +14,19 @@ impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager manager
.create_postgres_enum_for_active_enum( .create_postgres_enum_for_active_enum(
ResourceMimeEnum, ResourceCategoryEnum,
&[ResourceMime::OctetStream, ResourceMime::BitTorrent], [
ResourceCategory::BitTorrent,
ResourceCategory::OctetStream,
ResourceCategory::Poster,
],
) )
.await?; .await?;
manager manager
.create_postgres_enum_for_active_enum( .create_postgres_enum_for_active_enum(
DownloadStatusEnum, DownloadStatusEnum,
&[ [
DownloadStatus::Pending, DownloadStatus::Pending,
DownloadStatus::Downloading, DownloadStatus::Downloading,
DownloadStatus::Completed, DownloadStatus::Completed,
@ -43,17 +47,18 @@ impl MigrationTrait for Migration {
.col(enumeration( .col(enumeration(
Resources::Status, Resources::Status,
DownloadStatusEnum, DownloadStatusEnum,
ResourceMime::iden_values(), ResourceCategory::iden_values(),
)) ))
.col(enumeration( .col(enumeration(
Resources::Mime, Resources::Category,
ResourceMimeEnum, ResourceCategoryEnum,
ResourceMime::iden_values(), ResourceCategory::iden_values(),
)) ))
.col(big_unsigned_null(Resources::AllSize)) .col(big_integer_null(Resources::AllSize))
.col(big_unsigned_null(Resources::CurrSize)) .col(big_integer_null(Resources::CurrSize))
.col(text(Resources::Url)) .col(text(Resources::Url))
.col(text_null(Resources::HomePage)) .col(text_null(Resources::Homepage))
.col(text_null(Resources::SavePath))
.foreign_key( .foreign_key(
ForeignKey::create() ForeignKey::create()
.name("fk_download_subscription_id") .name("fk_download_subscription_id")
@ -63,21 +68,18 @@ impl MigrationTrait for Migration {
.on_delete(ForeignKeyAction::Cascade), .on_delete(ForeignKeyAction::Cascade),
) )
.index( .index(
Index::create() manager
.name("idx_download_url") .build_convention_index(Resources::Table, [Resources::Url])
.table(Resources::Table) .unique(),
.col(Resources::Url),
)
.index(
Index::create()
.name("idx_download_home_page")
.table(Resources::Table)
.col(Resources::HomePage),
) )
.to_owned(), .to_owned(),
) )
.await?; .await?;
futures::try_join!(
manager.create_convention_index(Resources::Table, [Resources::Homepage]),
)?;
manager manager
.create_postgres_auto_update_ts_fn_for_col(GeneralIds::UpdatedAt) .create_postgres_auto_update_ts_fn_for_col(GeneralIds::UpdatedAt)
.await?; .await?;
@ -124,7 +126,7 @@ impl MigrationTrait for Migration {
.await?; .await?;
manager manager
.drop_postgres_enum_for_active_enum(ResourceMimeEnum) .drop_postgres_enum_for_active_enum(ResourceCategoryEnum)
.await?; .await?;
manager manager
.drop_postgres_enum_for_active_enum(DownloadStatusEnum) .drop_postgres_enum_for_active_enum(DownloadStatusEnum)

View File

@ -2,7 +2,7 @@ use sea_orm_migration::{prelude::*, schema::*};
use crate::{ use crate::{
migrations::defs::{CustomSchemaManagerExt, Downloaders, GeneralIds, Subscribers}, migrations::defs::{CustomSchemaManagerExt, Downloaders, GeneralIds, Subscribers},
models::{downloaders::DownloaderCategoryEnum, prelude::DownloaderCategory}, models::downloaders::{DownloaderCategory, DownloaderCategoryEnum},
}; };
#[derive(DeriveMigrationName)] #[derive(DeriveMigrationName)]
@ -14,7 +14,7 @@ impl MigrationTrait for Migration {
manager manager
.create_postgres_enum_for_active_enum( .create_postgres_enum_for_active_enum(
DownloaderCategoryEnum, DownloaderCategoryEnum,
&[DownloaderCategory::QBittorrent], [DownloaderCategory::QBittorrent],
) )
.await?; .await?;

View File

@ -2,9 +2,107 @@ use std::collections::HashSet;
use itertools::Itertools; use itertools::Itertools;
use regex::Regex; use regex::Regex;
use sea_orm::entity::prelude::*; use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
pub use super::entities::bangumi::*; #[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "bangumi_distribution"
)]
#[serde(rename_all = "snake_case")]
pub enum BangumiDistribution {
#[sea_orm(string_value = "movie")]
Movie,
#[sea_orm(string_value = "ova")]
Ova,
#[sea_orm(string_value = "oad")]
Oad,
#[sea_orm(string_value = "sp")]
Sp,
#[sea_orm(string_value = "ex")]
Ex,
#[sea_orm(string_value = "tv")]
Tv,
#[sea_orm(string_value = "unknown")]
Unknown,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
#[serde(rename_all = "snake_case")]
pub enum BangumiRenameMethod {
Pn,
Advance,
SubtitlePn,
SubtitleAdvance,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscribeBangumiConfigOverride {
pub leading_fansub_tag: Option<bool>,
pub complete_history_episodes: Option<bool>,
pub rename_method: Option<BangumiRenameMethod>,
pub remove_bad_torrent: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct BangumiFilter {
pub plaintext_filters: Option<Vec<String>>,
pub regex_filters: Option<Vec<String>>,
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct BangumiUniqueKey {
pub official_title: String,
pub season: i32,
pub fansub: Option<String>,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "bangumi")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub subscription_id: i32,
pub display_name: String,
pub official_title: String,
pub fansub: Option<String>,
pub season: i32,
pub filter: Option<BangumiFilter>,
pub poster_link: Option<String>,
pub save_path: Option<String>,
pub last_ep: i32,
pub bangumi_conf_override: Option<SubscribeBangumiConfigOverride>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id"
)]
Subscription,
#[sea_orm(has_many = "super::episodes::Entity")]
Episode,
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
Relation::Episode.def()
}
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
#[async_trait::async_trait] #[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}

View File

@ -1,7 +1,51 @@
use sea_orm::prelude::*; use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
pub use crate::models::entities::downloaders::*; #[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "downloader_category"
)]
#[serde(rename_all = "snake_case")]
pub enum DownloaderCategory {
#[sea_orm(string_value = "qbittorrent")]
QBittorrent,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "downloaders")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub category: DownloaderCategory,
pub endpoint: String,
pub password: String,
pub username: String,
pub subscriber_id: i32,
pub save_path: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id"
)]
Subscriber,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
#[async_trait::async_trait] #[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}

View File

@ -1,101 +0,0 @@
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "bangumi_distribution"
)]
#[serde(rename_all = "snake_case")]
pub enum BangumiDistribution {
#[sea_orm(string_value = "movie")]
Movie,
#[sea_orm(string_value = "ova")]
Ova,
#[sea_orm(string_value = "oad")]
Oad,
#[sea_orm(string_value = "sp")]
Sp,
#[sea_orm(string_value = "ex")]
Ex,
#[sea_orm(string_value = "tv")]
Tv,
#[sea_orm(string_value = "unknown")]
Unknown,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
#[serde(rename_all = "snake_case")]
pub enum BangumiRenameMethod {
Pn,
Advance,
SubtitlePn,
SubtitleAdvance,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscribeBangumiConfigOverride {
pub leading_fansub_tag: Option<bool>,
pub complete_history_episodes: Option<bool>,
pub rename_method: Option<BangumiRenameMethod>,
pub remove_bad_torrent: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct BangumiFilter {
pub plaintext_filters: Option<Vec<String>>,
pub regex_filters: Option<Vec<String>>,
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct BangumiUniqueKey {
pub official_title: String,
pub season: u32,
pub fansub: Option<String>,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "bangumi")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub subscription_id: i32,
pub display_name: String,
pub official_title: String,
pub fansub: Option<String>,
pub season: u32,
pub filter: Option<BangumiFilter>,
pub poster_link: Option<String>,
pub save_path: Option<String>,
pub last_ep: u32,
pub bangumi_conf_override: Option<SubscribeBangumiConfigOverride>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id"
)]
Subscription,
#[sea_orm(has_many = "super::episodes::Entity")]
Episode,
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
Relation::Episode.def()
}
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}

View File

@ -1,47 +0,0 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "downloader_category"
)]
#[serde(rename_all = "snake_case")]
pub enum DownloaderCategory {
#[sea_orm(string_value = "qbittorrent")]
QBittorrent,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "downloaders")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub category: DownloaderCategory,
pub endpoint: String,
pub password: String,
pub username: String,
pub subscriber_id: i32,
pub save_path: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id"
)]
Subscriber,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}

View File

@ -1,61 +0,0 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "episodes")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub origin_title: String,
pub official_title: String,
pub display_name: String,
pub name_zh: Option<String>,
pub name_jp: Option<String>,
pub name_en: Option<String>,
pub s_name_zh: Option<String>,
pub s_name_jp: Option<String>,
pub s_name_en: Option<String>,
pub bangumi_id: i32,
pub resource_id: Option<i32>,
pub save_path: Option<String>,
pub resolution: Option<String>,
pub season: u32,
pub season_raw: Option<String>,
pub fansub: Option<String>,
pub poster_link: Option<String>,
pub home_page: Option<String>,
pub subtitle: Option<Vec<String>>,
pub source: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::bangumi::Entity",
from = "Column::BangumiId",
to = "super::bangumi::Column::Id"
)]
Bangumi,
#[sea_orm(
belongs_to = "super::resources::Entity",
from = "Column::ResourceId",
to = "super::resources::Column::Id"
)]
Resources,
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::Bangumi.def()
}
}
impl Related<super::resources::Entity> for Entity {
fn to() -> RelationDef {
Relation::Resources.def()
}
}

View File

@ -1,7 +0,0 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.4
pub mod bangumi;
pub mod downloaders;
pub mod episodes;
pub mod resources;
pub mod subscribers;
pub mod subscriptions;

View File

@ -1,77 +0,0 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_status")]
#[serde(rename_all = "snake_case")]
pub enum DownloadStatus {
#[sea_orm(string_value = "pending")]
Pending,
#[sea_orm(string_value = "downloading")]
Downloading,
#[sea_orm(string_value = "paused")]
Paused,
#[sea_orm(string_value = "completed")]
Completed,
#[sea_orm(string_value = "failed")]
Failed,
#[sea_orm(string_value = "deleted")]
Deleted,
}
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "resource_mime")]
pub enum ResourceMime {
#[sea_orm(string_value = "application/octet-stream")]
#[serde(rename = "application/octet-stream")]
OctetStream,
#[sea_orm(string_value = "application/x-bittorrent")]
#[serde(rename = "application/x-bittorrent")]
BitTorrent,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "resources")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub origin_title: String,
pub display_name: String,
pub subscription_id: i32,
pub status: DownloadStatus,
pub mime: ResourceMime,
pub url: String,
pub all_size: Option<u64>,
pub curr_size: Option<u64>,
pub homepage: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id"
)]
Subscription,
#[sea_orm(has_many = "super::episodes::Entity")]
Episode,
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
Relation::Episode.def()
}
}

View File

@ -1,63 +0,0 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
use super::bangumi::BangumiRenameMethod;
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscribeBangumiConfig {
pub leading_fansub_tag: bool,
pub complete_history_episodes: bool,
pub rename_method: BangumiRenameMethod,
pub remove_bad_torrent: bool,
}
impl Default for SubscribeBangumiConfig {
fn default() -> Self {
Self {
leading_fansub_tag: false,
complete_history_episodes: false,
rename_method: BangumiRenameMethod::Pn,
remove_bad_torrent: false,
}
}
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscribers")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
#[sea_orm(unique)]
pub pid: String,
pub display_name: String,
pub downloader_id: Option<i32>,
pub bangumi_conf: SubscribeBangumiConfig,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::subscriptions::Entity")]
Subscription,
#[sea_orm(
belongs_to = "super::downloaders::Entity",
from = "Column::DownloaderId",
to = "super::downloaders::Column::Id"
)]
Downloader,
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::downloaders::Entity> for Entity {
fn to() -> RelationDef {
Relation::Downloader.def()
}
}

View File

@ -1,59 +0,0 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
#[serde(rename_all = "snake_case")]
pub enum SubscriptionCategory {
#[sea_orm(string_value = "mikan")]
Mikan,
#[sea_orm(string_value = "tmdb")]
Tmdb,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")]
pub struct Model {
#[sea_orm(column_type = "Timestamp")]
pub created_at: DateTime,
#[sea_orm(column_type = "Timestamp")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub display_name: String,
pub subscriber_id: i32,
pub category: SubscriptionCategory,
pub source_url: String,
pub aggregate: bool,
pub enabled: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id"
)]
Subscriber,
#[sea_orm(has_many = "super::bangumi::Entity")]
Bangumi,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::Bangumi.def()
}
}

View File

@ -1,11 +1,69 @@
use sea_orm::{entity::prelude::*, ActiveValue}; use sea_orm::{entity::prelude::*, ActiveValue};
use serde::{Deserialize, Serialize};
pub use super::entities::episodes::*;
use crate::{ use crate::{
models::resources, models::resources,
parsers::{mikan::MikanEpisodeMeta, raw::RawEpisodeMeta}, parsers::{mikan::MikanEpisodeMeta, raw::RawEpisodeMeta},
}; };
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "episodes")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub origin_title: String,
pub official_title: String,
pub display_name: String,
pub name_zh: Option<String>,
pub name_jp: Option<String>,
pub name_en: Option<String>,
pub s_name_zh: Option<String>,
pub s_name_jp: Option<String>,
pub s_name_en: Option<String>,
pub bangumi_id: i32,
pub resource_id: Option<i32>,
pub save_path: Option<String>,
pub resolution: Option<String>,
pub season: i32,
pub season_raw: Option<String>,
pub fansub: Option<String>,
pub poster_link: Option<String>,
pub homepage: Option<String>,
pub subtitle: Option<Vec<String>>,
pub source: Option<String>,
pub ep_index: i32,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::bangumi::Entity",
from = "Column::BangumiId",
to = "super::bangumi::Column::Id"
)]
Bangumi,
#[sea_orm(
belongs_to = "super::resources::Entity",
from = "Column::ResourceId",
to = "super::resources::Column::Id"
)]
Resources,
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::Bangumi.def()
}
}
impl Related<super::resources::Entity> for Entity {
fn to() -> RelationDef {
Relation::Resources.def()
}
}
#[async_trait::async_trait] #[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}
@ -34,9 +92,10 @@ impl ActiveModel {
season_raw: ActiveValue::Set(raw_meta.season_raw), season_raw: ActiveValue::Set(raw_meta.season_raw),
fansub: ActiveValue::Set(raw_meta.fansub), fansub: ActiveValue::Set(raw_meta.fansub),
poster_link: ActiveValue::Set(mikan_poster), poster_link: ActiveValue::Set(mikan_poster),
home_page: ActiveValue::Set(resource.homepage), homepage: ActiveValue::Set(resource.homepage),
subtitle: ActiveValue::Set(raw_meta.sub), subtitle: ActiveValue::Set(raw_meta.sub),
source: ActiveValue::Set(raw_meta.source), source: ActiveValue::Set(raw_meta.source),
ep_index: ActiveValue::Set(raw_meta.episode_index),
..Default::default() ..Default::default()
} }
} }

View File

@ -1,7 +1,5 @@
pub mod bangumi; pub mod bangumi;
pub(crate) mod db_utils;
pub mod downloaders; pub mod downloaders;
pub mod entities;
pub mod episodes; pub mod episodes;
pub mod notifications; pub mod notifications;
pub mod prelude; pub mod prelude;

View File

@ -4,6 +4,6 @@ use serde::{Deserialize, Serialize};
pub struct Notification { pub struct Notification {
official_title: String, official_title: String,
season: i32, season: i32,
episode_size: u32, episode_size: i32,
poster_url: Option<String>, poster_url: Option<String>,
} }

View File

@ -2,7 +2,7 @@ pub use super::{
bangumi::{self, Entity as Bangumi}, bangumi::{self, Entity as Bangumi},
downloaders::{self, DownloaderCategory, Entity as Downloader}, downloaders::{self, DownloaderCategory, Entity as Downloader},
episodes::{self, Entity as Episode}, episodes::{self, Entity as Episode},
resources::{self, DownloadStatus, Entity as Download, ResourceMime}, resources::{self, DownloadStatus, Entity as Download, ResourceCategory},
subscribers::{self, Entity as Subscriber}, subscribers::{self, Entity as Subscriber},
subscriptions::{self, Entity as Subscription, SubscriptionCategory}, subscriptions::{self, Entity as Subscription, SubscriptionCategory},
}; };

View File

@ -1,20 +1,108 @@
use sea_orm::{prelude::*, ActiveValue}; use std::future::Future;
pub use crate::models::entities::resources::*; use bytes::Bytes;
use crate::parsers::mikan::MikanRssItem; use loco_rs::app::AppContext;
use sea_orm::{entity::prelude::*, ActiveValue, TryIntoModel};
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
parsers::{errors::ParseError, mikan::MikanRssItem},
path::extract_extname_from_url,
storage::{AppContextDalExt, DalContentType},
};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_status")]
#[serde(rename_all = "snake_case")]
pub enum DownloadStatus {
#[sea_orm(string_value = "pending")]
Pending,
#[sea_orm(string_value = "downloading")]
Downloading,
#[sea_orm(string_value = "paused")]
Paused,
#[sea_orm(string_value = "completed")]
Completed,
#[sea_orm(string_value = "failed")]
Failed,
#[sea_orm(string_value = "deleted")]
Deleted,
}
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "resource_category")]
pub enum ResourceCategory {
#[sea_orm(string_value = "octet-stream")]
#[serde(rename = "octet-stream")]
OctetStream,
#[sea_orm(string_value = "bittorrent")]
#[serde(rename = "bittorrent")]
BitTorrent,
#[sea_orm(string_value = "poster")]
#[serde(rename = "poster")]
Poster,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "resources")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub origin_title: String,
pub display_name: String,
pub subscription_id: i32,
pub status: DownloadStatus,
pub category: ResourceCategory,
pub url: String,
pub all_size: Option<i64>,
pub curr_size: Option<i64>,
pub homepage: Option<String>,
pub save_path: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id"
)]
Subscription,
#[sea_orm(has_many = "super::episodes::Entity")]
Episode,
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
Relation::Episode.def()
}
}
#[async_trait::async_trait] #[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel { impl ActiveModel {
pub fn from_mikan_rss_item(rss_item: MikanRssItem, subscription_id: i32) -> Self { pub fn from_mikan_rss_item(rss_item: MikanRssItem, subscription_id: i32) -> Self {
let download_mime = rss_item.get_download_mime(); let resource_category = rss_item.get_resource_category();
Self { Self {
origin_title: ActiveValue::Set(rss_item.title.clone()), origin_title: ActiveValue::Set(rss_item.title.clone()),
display_name: ActiveValue::Set(rss_item.title), display_name: ActiveValue::Set(rss_item.title),
subscription_id: ActiveValue::Set(subscription_id), subscription_id: ActiveValue::Set(subscription_id),
status: ActiveValue::Set(DownloadStatus::Pending), status: ActiveValue::Set(DownloadStatus::Pending),
mime: ActiveValue::Set(download_mime), category: ActiveValue::Set(resource_category),
url: ActiveValue::Set(rss_item.url), url: ActiveValue::Set(rss_item.url),
all_size: ActiveValue::Set(rss_item.content_length), all_size: ActiveValue::Set(rss_item.content_length),
curr_size: ActiveValue::Set(Some(0)), curr_size: ActiveValue::Set(Some(0)),
@ -22,4 +110,78 @@ impl ActiveModel {
..Default::default() ..Default::default()
} }
} }
pub fn from_poster_url(
subscription_id: i32,
origin_title: String,
url: Url,
save_path: Option<String>,
content_length: i64,
) -> Self {
Self {
origin_title: ActiveValue::Set(origin_title.clone()),
display_name: ActiveValue::Set(origin_title),
subscription_id: ActiveValue::Set(subscription_id),
status: ActiveValue::Set(DownloadStatus::Completed),
category: ActiveValue::Set(ResourceCategory::Poster),
url: ActiveValue::Set(url.to_string()),
all_size: ActiveValue::Set(Some(content_length)),
curr_size: ActiveValue::Set(Some(content_length)),
save_path: ActiveValue::Set(save_path),
..Default::default()
}
}
}
impl Model {
pub async fn from_poster_url<F, R, E>(
ctx: &AppContext,
subscriber_pid: &str,
subscription_id: i32,
original_title: String,
url: Url,
fetch_fn: F,
) -> eyre::Result<Self>
where
F: FnOnce(Url) -> R,
R: Future<Output = Result<Bytes, E>>,
E: Into<eyre::Report>,
{
let db = &ctx.db;
let found = Entity::find()
.filter(
Column::SubscriptionId
.eq(subscription_id)
.and(Column::Url.eq(url.as_str())),
)
.one(db)
.await?;
let resource = if let Some(found) = found {
found
} else {
let bytes = fetch_fn(url.clone()).await.map_err(|e| e.into())?;
let content_length = bytes.len() as i64;
let dal = ctx.get_dal_unwrap().await;
let extname = extract_extname_from_url(&url)
.ok_or_else(|| ParseError::ParseExtnameError(url.to_string()))?;
let stored_url = dal
.store_blob(DalContentType::Poster, &extname, bytes, subscriber_pid)
.await?;
let saved_path = Some(stored_url.to_string());
let new_resource = ActiveModel::from_poster_url(
subscription_id,
original_title,
url,
saved_path,
content_length,
);
let new_resource = new_resource.save(db).await?;
new_resource.try_into_model()?
};
Ok(resource)
}
} }

View File

@ -1,15 +1,70 @@
use loco_rs::model::{ModelError, ModelResult}; use loco_rs::model::{ModelError, ModelResult};
use sea_orm::{entity::prelude::*, ActiveValue, TransactionTrait}; use sea_orm::{entity::prelude::*, ActiveValue, FromJsonQueryResult, TransactionTrait};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
pub use super::entities::subscribers::*; use super::bangumi::BangumiRenameMethod;
pub const ROOT_SUBSCRIBER_ID: i32 = 1;
pub const ROOT_SUBSCRIBER_NAME: &str = "konobangu"; pub const ROOT_SUBSCRIBER_NAME: &str = "konobangu";
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscribeBangumiConfig {
pub leading_fansub_tag: bool,
pub complete_history_episodes: bool,
pub rename_method: BangumiRenameMethod,
pub remove_bad_torrent: bool,
}
impl Default for SubscribeBangumiConfig {
fn default() -> Self {
Self {
leading_fansub_tag: false,
complete_history_episodes: false,
rename_method: BangumiRenameMethod::Pn,
remove_bad_torrent: false,
}
}
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscribers")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub pid: String,
pub display_name: String,
pub downloader_id: Option<i32>,
pub bangumi_conf: Option<SubscribeBangumiConfig>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::subscriptions::Entity")]
Subscription,
#[sea_orm(
belongs_to = "super::downloaders::Entity",
from = "Column::DownloaderId",
to = "super::downloaders::Column::Id"
)]
Downloader,
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::downloaders::Entity> for Entity {
fn to() -> RelationDef {
Relation::Downloader.def()
}
}
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
pub struct SubscriberIdParams { pub struct SubscriberIdParams {
pub id: String, pub pid: String,
} }
#[async_trait::async_trait] #[async_trait::async_trait]
@ -20,7 +75,9 @@ impl ActiveModelBehavior for ActiveModel {
{ {
if insert { if insert {
let mut this = self; let mut this = self;
this.pid = ActiveValue::Set(Uuid::new_v4().to_string()); if this.pid.is_not_set() {
this.pid = ActiveValue::Set(Uuid::new_v4().to_string());
};
Ok(this) Ok(this)
} else { } else {
Ok(self) Ok(self)
@ -29,17 +86,13 @@ impl ActiveModelBehavior for ActiveModel {
} }
impl Model { impl Model {
/// finds a user by the provided pid
///
/// # Errors
///
/// When could not find user or DB query error
pub async fn find_by_pid(db: &DatabaseConnection, pid: &str) -> ModelResult<Self> { pub async fn find_by_pid(db: &DatabaseConnection, pid: &str) -> ModelResult<Self> {
let parse_uuid = Uuid::parse_str(pid).map_err(|e| ModelError::Any(e.into()))?; let subscriber = Entity::find().filter(Column::Pid.eq(pid)).one(db).await?;
let subscriber = Entity::find() subscriber.ok_or_else(|| ModelError::EntityNotFound)
.filter(Column::Pid.eq(parse_uuid)) }
.one(db)
.await?; pub async fn find_by_id(db: &DatabaseConnection, id: i32) -> ModelResult<Self> {
let subscriber = Entity::find().filter(Column::Id.eq(id)).one(db).await?;
subscriber.ok_or_else(|| ModelError::EntityNotFound) subscriber.ok_or_else(|| ModelError::EntityNotFound)
} }
@ -47,12 +100,6 @@ impl Model {
Self::find_by_pid(db, ROOT_SUBSCRIBER_NAME).await Self::find_by_pid(db, ROOT_SUBSCRIBER_NAME).await
} }
/// Asynchronously creates a user with a password and saves it to the
/// database.
///
/// # Errors
///
/// When could not save the user into the DB
pub async fn create_root(db: &DatabaseConnection) -> ModelResult<Self> { pub async fn create_root(db: &DatabaseConnection) -> ModelResult<Self> {
let txn = db.begin().await?; let txn = db.begin().await?;

View File

@ -10,9 +10,65 @@ use sea_orm::{
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::{event, instrument, Level}; use tracing::{event, instrument, Level};
pub use super::entities::subscriptions::{self, *}; #[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
#[serde(rename_all = "snake_case")]
pub enum SubscriptionCategory {
#[sea_orm(string_value = "mikan")]
Mikan,
#[sea_orm(string_value = "tmdb")]
Tmdb,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")]
pub struct Model {
#[sea_orm(column_type = "Timestamp")]
pub created_at: DateTime,
#[sea_orm(column_type = "Timestamp")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub display_name: String,
pub subscriber_id: i32,
pub category: SubscriptionCategory,
pub source_url: String,
pub aggregate: bool,
pub enabled: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id"
)]
Subscriber,
#[sea_orm(has_many = "super::bangumi::Entity")]
Bangumi,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::Bangumi.def()
}
}
use crate::{ use crate::{
models::{bangumi, db_utils::insert_many_with_returning_all, episodes, resources}, models::{bangumi, episodes, resources, subscribers},
parsers::{ parsers::{
mikan::{ mikan::{
parse_episode_meta_from_mikan_homepage, parse_mikan_rss_items_from_rss_link, parse_episode_meta_from_mikan_homepage, parse_mikan_rss_items_from_rss_link,
@ -20,8 +76,7 @@ use crate::{
}, },
raw::{parse_episode_meta_from_raw_name, RawEpisodeMeta}, raw::{parse_episode_meta_from_raw_name, RawEpisodeMeta},
}, },
path::extract_extname_from_url, utils::db::insert_many_with_returning_all,
storage::{AppContextDalExt, DalContentType},
}; };
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
@ -104,9 +159,14 @@ impl Model {
#[instrument( #[instrument(
fields(subscriber_id = "self.subscriber_id", subscription_id = "self.id"), fields(subscriber_id = "self.subscriber_id", subscription_id = "self.id"),
skip(self, db, ctx) skip(self, ctx)
)] )]
pub async fn pull_one(&self, db: &DatabaseConnection, ctx: &AppContext) -> eyre::Result<()> { pub async fn pull_one(
&self,
ctx: &AppContext,
subscriber: &subscribers::Model,
) -> eyre::Result<()> {
let db = &ctx.db;
let subscription = self; let subscription = self;
let subscription_id = subscription.id; let subscription_id = subscription.id;
match &subscription.category { match &subscription.category {
@ -152,7 +212,6 @@ impl Model {
let mut ep_metas: HashMap<bangumi::BangumiUniqueKey, Vec<MikanEpMetaBundle>> = let mut ep_metas: HashMap<bangumi::BangumiUniqueKey, Vec<MikanEpMetaBundle>> =
HashMap::new(); HashMap::new();
let dal = ctx.get_dal_unwrap().await;
{ {
for r in new_resources { for r in new_resources {
let mut mikan_meta = if let Some(homepage) = r.homepage.as_deref() { let mut mikan_meta = if let Some(homepage) = r.homepage.as_deref() {
@ -174,40 +233,34 @@ impl Model {
} else { } else {
continue; continue;
}; };
let mikan_poster_link = if let Some(poster) = mikan_meta.poster.take() { let mikan_poster_link =
if let Some(extname) = extract_extname_from_url(&poster.origin_url) { if let Some(poster_url) = mikan_meta.poster_url.take() {
let result = dal let poster_url_str = poster_url.to_string();
.store_blob( let poster_resource_result = resources::Model::from_poster_url(
DalContentType::Poster, ctx,
&extname, &subscriber.pid,
poster.data, subscription_id,
&subscriber_id.to_string(), mikan_meta.official_title.clone(),
) poster_url,
.await; |url| mikan_client.fetch_bytes(|f| f.get(url)),
match result { )
Ok(stored_url) => Some(stored_url.to_string()), .await;
match poster_resource_result {
Ok(resource) => resource.save_path,
Err(e) => { Err(e) => {
let error: &dyn std::error::Error = e.as_ref(); let error: &dyn std::error::Error = e.as_ref();
event!( event!(
Level::ERROR, Level::ERROR,
desc = "failed to store mikan meta poster", desc = "failed to fetch mikan meta poster",
origin_url = poster.origin_url.as_str(), poster_url = poster_url_str,
error = error error = error
); );
None None
} }
} }
} else { } else {
event!(
Level::ERROR,
desc = "failed to extract mikan meta poster extname",
origin_url = poster.origin_url.as_str(),
);
None None
} };
} else {
None
};
let raw_meta = match parse_episode_meta_from_raw_name(&r.origin_title) { let raw_meta = match parse_episode_meta_from_raw_name(&r.origin_title) {
Ok(raw_meta) => raw_meta, Ok(raw_meta) => raw_meta,
Err(e) => { Err(e) => {

View File

@ -18,4 +18,6 @@ pub enum ParseError {
UnsupportedLanguagePreset(String), UnsupportedLanguagePreset(String),
#[error("Parse episode meta error, get empty official title, homepage = {0}")] #[error("Parse episode meta error, get empty official title, homepage = {0}")]
MikanEpisodeMetaEmptyOfficialTitleError(String), MikanEpisodeMetaEmptyOfficialTitleError(String),
#[error("Parse extname error from source = {0}")]
ParseExtnameError(String),
} }

View File

@ -22,7 +22,7 @@ pub struct MikanEpisodeMetaPosterBlob {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct MikanEpisodeMeta { pub struct MikanEpisodeMeta {
pub homepage: Url, pub homepage: Url,
pub poster: Option<MikanEpisodeMetaPosterBlob>, pub poster_url: Option<Url>,
pub official_title: String, pub official_title: String,
} }
@ -73,18 +73,6 @@ pub async fn parse_episode_meta_from_mikan_homepage(
p.set_query(None); p.set_query(None);
p p
}); });
let poster = if let Some(p) = origin_poster_src {
client
.fetch_bytes(|f| f.get(p.clone()))
.await
.ok()
.map(|data| MikanEpisodeMetaPosterBlob {
data,
origin_url: p,
})
} else {
None
};
let official_title = official_title_node let official_title = official_title_node
.map(|s| s.inner_text(parser)) .map(|s| s.inner_text(parser))
.and_then(|official_title| { .and_then(|official_title| {
@ -102,7 +90,7 @@ pub async fn parse_episode_meta_from_mikan_homepage(
Ok(MikanEpisodeMeta { Ok(MikanEpisodeMeta {
homepage: url, homepage: url,
poster, poster_url: origin_poster_src,
official_title, official_title,
}) })
} }
@ -128,20 +116,11 @@ mod test {
assert_eq!(ep_meta.homepage, url); assert_eq!(ep_meta.homepage, url);
assert_eq!(ep_meta.official_title, "葬送的芙莉莲"); assert_eq!(ep_meta.official_title, "葬送的芙莉莲");
assert_eq!( assert_eq!(
ep_meta.poster.clone().map(|p| p.origin_url), ep_meta.poster_url.clone(),
Some(Url::parse( Some(Url::parse(
"https://mikanani.me/images/Bangumi/202309/5ce9fed1.jpg" "https://mikanani.me/images/Bangumi/202309/5ce9fed1.jpg"
)?) )?)
); );
let u8_data = ep_meta
.poster
.clone()
.map(|p| p.data)
.expect("should have poster data");
assert!(
u8_data.starts_with(&[255, 216, 255, 224]),
"should start with valid jpeg data magic number"
);
} }
Ok(()) Ok(())

View File

@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize};
use crate::{ use crate::{
downloaders::defs::BITTORRENT_MIME_TYPE, downloaders::defs::BITTORRENT_MIME_TYPE,
models::prelude::ResourceMime, models::prelude::ResourceCategory,
parsers::{errors::ParseError, mikan::mikan_client::MikanClient}, parsers::{errors::ParseError, mikan::mikan_client::MikanClient},
}; };
@ -13,14 +13,14 @@ pub struct MikanRssItem {
pub title: String, pub title: String,
pub homepage: Option<String>, pub homepage: Option<String>,
pub url: String, pub url: String,
pub content_length: Option<u64>, pub content_length: Option<i64>,
pub mime: String, pub mime: String,
pub pub_date: Option<i64>, pub pub_date: Option<i64>,
} }
impl MikanRssItem { impl MikanRssItem {
pub fn get_download_mime(&self) -> ResourceMime { pub fn get_resource_category(&self) -> ResourceCategory {
ResourceMime::BitTorrent ResourceCategory::BitTorrent
} }
} }

View File

@ -17,7 +17,8 @@ lazy_static! {
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap(); static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
static ref SOURCE_L1_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|W[Ee][Bb][Rr][Ii][Pp]|Sentai|B[Dd][Rr][Ii][Pp]|UHD[Rr][Ii][Pp]|NETFLIX").unwrap(); static ref SOURCE_L1_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|W[Ee][Bb][Rr][Ii][Pp]|Sentai|B[Dd][Rr][Ii][Pp]|UHD[Rr][Ii][Pp]|NETFLIX").unwrap();
static ref SOURCE_L2_RE: Regex = Regex::new(r"AMZ|CR|W[Ee][Bb]|B[Dd]").unwrap(); static ref SOURCE_L2_RE: Regex = Regex::new(r"AMZ|CR|W[Ee][Bb]|B[Dd]").unwrap();
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap(); static ref SUB_RE: Regex = Regex::new(r"[简繁日英字幕]|CH|BIG5|GB").unwrap();
static ref SUB_RE_EXCLUDE: Regex = Regex::new(r"字幕[社组]").unwrap();
static ref PREFIX_RE: Regex = static ref PREFIX_RE: Regex =
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap(); Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap(); static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
@ -49,9 +50,9 @@ pub struct RawEpisodeMeta {
pub s_name_jp: Option<String>, pub s_name_jp: Option<String>,
pub name_zh: Option<String>, pub name_zh: Option<String>,
pub s_name_zh: Option<String>, pub s_name_zh: Option<String>,
pub season: u32, pub season: i32,
pub season_raw: Option<String>, pub season_raw: Option<String>,
pub episode_index: u32, pub episode_index: i32,
pub sub: Option<Vec<String>>, pub sub: Option<Vec<String>>,
pub source: Option<String>, pub source: Option<String>,
pub fansub: Option<String>, pub fansub: Option<String>,
@ -110,7 +111,7 @@ fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> eyre::Resul
Ok(raw.to_string()) Ok(raw.to_string())
} }
fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, u32) { fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, i32) {
let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " "); let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " ");
let seasons = SEASON_EXTRACT_SEASON_ALL_RE let seasons = SEASON_EXTRACT_SEASON_ALL_RE
.find(&name_and_season) .find(&name_and_season)
@ -122,7 +123,7 @@ fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>,
return (title_body.to_string(), None, 1); return (title_body.to_string(), None, 1);
} }
let mut season = 1u32; let mut season = 1i32;
let mut season_raw = None; let mut season_raw = None;
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, ""); let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
@ -131,7 +132,7 @@ fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>,
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s) { if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s) {
if let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE if let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
.replace_all(m.as_str(), "") .replace_all(m.as_str(), "")
.parse::<u32>() .parse::<i32>()
{ {
season = s; season = s;
break; break;
@ -140,7 +141,7 @@ fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>,
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s) { if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s) {
if let Some(s) = DIGIT_1PLUS_REG if let Some(s) = DIGIT_1PLUS_REG
.find(m.as_str()) .find(m.as_str())
.and_then(|s| s.as_str().parse::<u32>().ok()) .and_then(|s| s.as_str().parse::<i32>().ok())
{ {
season = s; season = s;
break; break;
@ -149,13 +150,13 @@ fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>,
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) { if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
.replace(m.as_str(), "") .replace(m.as_str(), "")
.parse::<u32>() .parse::<i32>()
{ {
season = s; season = s;
break; break;
} }
if let Some(m) = ZH_NUM_RE.find(m.as_str()) { if let Some(m) = ZH_NUM_RE.find(m.as_str()) {
season = ZH_NUM_MAP[m.as_str()] as u32; season = ZH_NUM_MAP[m.as_str()];
break; break;
} }
} }
@ -207,11 +208,11 @@ fn extract_name_from_title_body_name_section(
(name_en, name_zh, name_jp) (name_en, name_zh, name_jp)
} }
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<u32> { fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32> {
DIGIT_1PLUS_REG DIGIT_1PLUS_REG
.find(title_episode)? .find(title_episode)?
.as_str() .as_str()
.parse::<u32>() .parse::<i32>()
.ok() .ok()
} }
@ -237,7 +238,7 @@ fn extract_tags_from_title_extra(
let mut resolution = None; let mut resolution = None;
let mut source = None; let mut source = None;
for element in elements.iter() { for element in elements.iter() {
if SUB_RE.is_match(element) { if SUB_RE.is_match(element) && !SUB_RE_EXCLUDE.is_match(element) {
let el = element.to_string(); let el = element.to_string();
sub = Some(match sub { sub = Some(match sub {
Some(mut res) => { Some(mut res) => {
@ -297,17 +298,17 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> eyre::Result<RawEpisodeMeta>
let title_body = title_body_pre_process(&title_body, fansub)?; let title_body = title_body_pre_process(&title_body, fansub)?;
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body); let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body); let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
let (name_en_no_season, name_zh_no_season, name_jp_no_season) = let (s_name_en, s_name_zh, s_name_jp) =
extract_name_from_title_body_name_section(&name_without_season); extract_name_from_title_body_name_section(&name_without_season);
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(1); let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(1);
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra); let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
Ok(RawEpisodeMeta { Ok(RawEpisodeMeta {
name_en, name_en,
s_name_en: name_en_no_season, s_name_en,
name_jp, name_jp,
s_name_jp: name_jp_no_season, s_name_jp,
name_zh, name_zh,
s_name_zh: name_zh_no_season, s_name_zh,
season, season,
season_raw, season_raw,
episode_index, episode_index,
@ -345,11 +346,11 @@ mod tests {
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#, r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
r#"{ r#"{
"name_zh": "我心里危险的东西", "name_zh": "我心里危险的东西",
"name_zh_no_season": "我心里危险的东西", "s_name_zh": "我心里危险的东西",
"season": 2, "season": 2,
"season_raw": "第二季", "season_raw": "第二季",
"episode_index": 5, "episode_index": 5,
"sub": "简日双语", "sub": ["简日双语"],
"source": null, "source": null,
"fansub": "新Sub", "fansub": "新Sub",
"resolution": "1080P" "resolution": "1080P"
@ -363,13 +364,13 @@ mod tests {
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#, r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
r#"{ r#"{
"name_en": "Boku no Kokoro no Yabai Yatsu", "name_en": "Boku no Kokoro no Yabai Yatsu",
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu", "s_name_en": "Boku no Kokoro no Yabai Yatsu",
"name_zh": "我内心的糟糕念头", "name_zh": "我内心的糟糕念头",
"name_zh_no_season": "我内心的糟糕念头", "s_name_zh": "我内心的糟糕念头",
"season": 1, "season": 1,
"season_raw": null, "season_raw": null,
"episode_index": 18, "episode_index": 18,
"sub": "简日双语", "sub": ["简日双语"],
"source": null, "source": null,
"fansub": "喵萌奶茶屋", "fansub": "喵萌奶茶屋",
"resolution": "1080p" "resolution": "1080p"
@ -383,13 +384,13 @@ mod tests {
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#, r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{ r#"{
"name_en": "Shin no Nakama 2nd", "name_en": "Shin no Nakama 2nd",
"name_en_no_season": "Shin no Nakama", "s_name_en": "Shin no Nakama",
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd", "name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生", "s_name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
"season": 2, "season": 2,
"season_raw": "2nd", "season_raw": "2nd",
"episode_index": 8, "episode_index": 8,
"sub": "简繁内封字幕", "sub": ["简繁内封字幕"],
"source": "WebRip", "source": "WebRip",
"fansub": "LoliHouse", "fansub": "LoliHouse",
"resolution": "1080p" "resolution": "1080p"
@ -403,10 +404,10 @@ mod tests {
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]", r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
r#"{ r#"{
"name_en": "THE MARGINAL SERVICE", "name_en": "THE MARGINAL SERVICE",
"name_en_no_season": "THE MARGINAL SERVICE", "s_name_en": "THE MARGINAL SERVICE",
"season": 1, "season": 1,
"episode_index": 8, "episode_index": 8,
"sub": "简繁内封字幕", "sub": ["简繁内封字幕"],
"source": "WebRip", "source": "WebRip",
"fansub": "动漫国字幕组&LoliHouse", "fansub": "动漫国字幕组&LoliHouse",
"resolution": "1080p" "resolution": "1080p"
@ -420,13 +421,13 @@ mod tests {
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#, r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{ r#"{
"name_en": "Nozomanu Fushi no Boukensha", "name_en": "Nozomanu Fushi no Boukensha",
"name_en_no_season": "Nozomanu Fushi no Boukensha", "s_name_en": "Nozomanu Fushi no Boukensha",
"name_zh": "事与愿违的不死冒险者", "name_zh": "事与愿违的不死冒险者",
"name_zh_no_season": "事与愿违的不死冒险者", "s_name_zh": "事与愿违的不死冒险者",
"season": 1, "season": 1,
"season_raw": null, "season_raw": null,
"episode_index": 1, "episode_index": 1,
"sub": "简繁内封字幕", "sub": ["简繁内封字幕"],
"source": "WebRip", "source": "WebRip",
"fansub": "LoliHouse", "fansub": "LoliHouse",
"resolution": "1080p" "resolution": "1080p"
@ -442,13 +443,13 @@ mod tests {
"name_en": "Pon no Michi", "name_en": "Pon no Michi",
"name_jp": "ぽんのみち", "name_jp": "ぽんのみち",
"name_zh": "碰之道", "name_zh": "碰之道",
"name_en_no_season": "Pon no Michi", "s_name_en": "Pon no Michi",
"name_jp_no_season": "ぽんのみち", "s_name_jp": "ぽんのみち",
"name_zh_no_season": "碰之道", "s_name_zh": "碰之道",
"season": 1, "season": 1,
"season_raw": null, "season_raw": null,
"episode_index": 7, "episode_index": 7,
"sub": "简繁日内封字幕", "sub": ["简繁日内封字幕"],
"source": "WebRip", "source": "WebRip",
"fansub": "喵萌奶茶屋&LoliHouse", "fansub": "喵萌奶茶屋&LoliHouse",
"resolution": "1080p" "resolution": "1080p"
@ -462,13 +463,13 @@ mod tests {
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#, r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{ r#"{
"name_en": "Yowai Character Tomozakikun", "name_en": "Yowai Character Tomozakikun",
"name_en_no_season": "Yowai Character Tomozakikun", "s_name_en": "Yowai Character Tomozakikun",
"name_zh": "弱角友崎同学 2nd STAGE", "name_zh": "弱角友崎同学 2nd STAGE",
"name_zh_no_season": "弱角友崎同学", "s_name_zh": "弱角友崎同学",
"season": 2, "season": 2,
"season_raw": "2nd", "season_raw": "2nd",
"episode_index": 9, "episode_index": 9,
"sub": "CHT", "sub": ["CHT"],
"source": "Baha", "source": "Baha",
"fansub": "ANi", "fansub": "ANi",
"resolution": "1080P" "resolution": "1080P"
@ -482,13 +483,13 @@ mod tests {
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#, r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
r#"{ r#"{
"name_en": "Kingdom S5", "name_en": "Kingdom S5",
"name_en_no_season": "Kingdom", "s_name_en": "Kingdom",
"name_zh": "王者天下 第五季", "name_zh": "王者天下 第五季",
"name_zh_no_season": "王者天下", "s_name_zh": "王者天下",
"season": 5, "season": 5,
"season_raw": "第五季", "season_raw": "第五季",
"episode_index": 7, "episode_index": 7,
"sub": "简繁外挂字幕", "sub": ["简繁外挂字幕"],
"source": "WebRip", "source": "WebRip",
"fansub": "豌豆字幕组&LoliHouse", "fansub": "豌豆字幕组&LoliHouse",
"resolution": "1080p" "resolution": "1080p"
@ -502,12 +503,12 @@ mod tests {
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#, r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
r#"{ r#"{
"name_en": "Alice to Therese no Maboroshi Koujou", "name_en": "Alice to Therese no Maboroshi Koujou",
"name_en_no_season": "Alice to Therese no Maboroshi Koujou", "s_name_en": "Alice to Therese no Maboroshi Koujou",
"name_zh": "爱丽丝与特蕾丝的虚幻工厂", "name_zh": "爱丽丝与特蕾丝的虚幻工厂",
"name_zh_no_season": "爱丽丝与特蕾丝的虚幻工厂", "s_name_zh": "爱丽丝与特蕾丝的虚幻工厂",
"season": 1, "season": 1,
"episode_index": 1, "episode_index": 1,
"sub": "简繁内封", "sub": ["简繁内封"],
"source": "WebRip", "source": "WebRip",
"fansub": "千夏字幕组", "fansub": "千夏字幕组",
"resolution": "1080p" "resolution": "1080p"
@ -521,12 +522,12 @@ mod tests {
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#, r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
r#"{ r#"{
"name_en": "Yuru Camp Movie", "name_en": "Yuru Camp Movie",
"name_en_no_season": "Yuru Camp Movie", "s_name_en": "Yuru Camp Movie",
"name_zh": "电影 轻旅轻营 (摇曳露营)", "name_zh": "电影 轻旅轻营 (摇曳露营)",
"name_zh_no_season": "电影 轻旅轻营 (摇曳露营)", "s_name_zh": "电影 轻旅轻营 (摇曳露营)",
"season": 1, "season": 1,
"episode_index": 1, "episode_index": 1,
"sub": "繁体", "sub": ["繁体"],
"source": "UHDRip", "source": "UHDRip",
"fansub": "千夏字幕组&喵萌奶茶屋", "fansub": "千夏字幕组&喵萌奶茶屋",
"resolution": "2160p" "resolution": "2160p"
@ -540,12 +541,12 @@ mod tests {
r#"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"#, r#"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"#,
r#"{ r#"{
"name_en": "New Doraemon", "name_en": "New Doraemon",
"name_en_no_season": "New Doraemon", "s_name_en": "New Doraemon",
"name_zh": "哆啦A梦新番", "name_zh": "哆啦A梦新番",
"name_zh_no_season": "哆啦A梦新番", "s_name_zh": "哆啦A梦新番",
"season": 1, "season": 1,
"episode_index": 747, "episode_index": 747,
"sub": "GB", "sub": ["GB"],
"fansub": "梦蓝字幕组", "fansub": "梦蓝字幕组",
"resolution": "1080P" "resolution": "1080P"
}"#, }"#,
@ -558,12 +559,12 @@ mod tests {
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#, r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
r#"{ r#"{
"name_en": "Yuru Camp", "name_en": "Yuru Camp",
"name_en_no_season": "Yuru Camp", "s_name_en": "Yuru Camp",
"name_zh": "剧场版-摇曳露营", "name_zh": "剧场版-摇曳露营",
"name_zh_no_season": "剧场版-摇曳露营", "s_name_zh": "剧场版-摇曳露营",
"season": 1, "season": 1,
"episode_index": 1, "episode_index": 1,
"sub": "简日双语", "sub": ["简日双语"],
"fansub": "MCE汉化组", "fansub": "MCE汉化组",
"resolution": "1080P" "resolution": "1080P"
}"#, }"#,
@ -576,12 +577,12 @@ mod tests {
r#"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"#, r#"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"#,
r#"{ r#"{
"name_en": "NieR Automata Ver1.1a", "name_en": "NieR Automata Ver1.1a",
"name_en_no_season": "NieR Automata Ver1.1a", "s_name_en": "NieR Automata Ver1.1a",
"name_zh": "尼尔:机械纪元", "name_zh": "尼尔:机械纪元",
"name_zh_no_season": "尼尔:机械纪元", "s_name_zh": "尼尔:机械纪元",
"season": 1, "season": 1,
"episode_index": 2, "episode_index": 2,
"sub": "简日双语", "sub": ["简日双语"],
"fansub": "织梦字幕组", "fansub": "织梦字幕组",
"resolution": "1080P" "resolution": "1080P"
}"#, }"#,
@ -595,12 +596,12 @@ mod tests {
r#" r#"
{ {
"name_en": "Delicious in Dungeon", "name_en": "Delicious in Dungeon",
"name_en_no_season": "Delicious in Dungeon", "s_name_en": "Delicious in Dungeon",
"name_zh": "迷宫饭", "name_zh": "迷宫饭",
"name_zh_no_season": "迷宫饭", "s_name_zh": "迷宫饭",
"season": 1, "season": 1,
"episode_index": 3, "episode_index": 3,
"sub": "日语中字", "sub": ["日语中字"],
"source": "NETFLIX", "source": "NETFLIX",
"fansub": "天月搬运组", "fansub": "天月搬运组",
"resolution": "1080P" "resolution": "1080P"
@ -615,12 +616,12 @@ mod tests {
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#, r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
r#"{ r#"{
"name_en": "Dungeon Meshi", "name_en": "Dungeon Meshi",
"name_en_no_season": "Dungeon Meshi", "s_name_en": "Dungeon Meshi",
"name_zh": "迷宫饭", "name_zh": "迷宫饭",
"name_zh_no_season": "迷宫饭", "s_name_zh": "迷宫饭",
"season": 1, "season": 1,
"episode_index": 1, "episode_index": 1,
"sub": "简日双语", "sub": ["简日双语"],
"fansub": "爱恋字幕社", "fansub": "爱恋字幕社",
"resolution": "1080P" "resolution": "1080P"
}"#, }"#,
@ -633,12 +634,12 @@ mod tests {
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#, r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{ r#"{
"name_en": "Mahou Shoujo ni Akogarete", "name_en": "Mahou Shoujo ni Akogarete",
"name_en_no_season": "Mahou Shoujo ni Akogarete", "s_name_en": "Mahou Shoujo ni Akogarete",
"name_zh": "梦想成为魔法少女 [年龄限制版]", "name_zh": "梦想成为魔法少女 [年龄限制版]",
"name_zh_no_season": "梦想成为魔法少女 [年龄限制版]", "s_name_zh": "梦想成为魔法少女 [年龄限制版]",
"season": 1, "season": 1,
"episode_index": 9, "episode_index": 9,
"sub": "CHT", "sub": ["CHT"],
"source": "Baha", "source": "Baha",
"fansub": "ANi", "fansub": "ANi",
"resolution": "1080P" "resolution": "1080P"
@ -652,11 +653,11 @@ mod tests {
r#"[ANi] 16bit 的感动 ANOTHER LAYER - 01 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#, r#"[ANi] 16bit 的感动 ANOTHER LAYER - 01 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{ r#"{
"name_zh": "16bit 的感动 ANOTHER LAYER", "name_zh": "16bit 的感动 ANOTHER LAYER",
"name_zh_no_season": "16bit 的感动 ANOTHER LAYER", "s_name_zh": "16bit 的感动 ANOTHER LAYER",
"season": 1, "season": 1,
"season_raw": null, "season_raw": null,
"episode_index": 1, "episode_index": 1,
"sub": "CHT", "sub": ["CHT"],
"source": "Baha", "source": "Baha",
"fansub": "ANi", "fansub": "ANi",
"resolution": "1080P" "resolution": "1080P"
@ -670,12 +671,12 @@ mod tests {
r#"【喵萌奶茶屋】★07月新番★[银砂糖师与黑妖精 ~ Sugar Apple Fairy Tale ~][13][1080p][简日双语][招募翻译]"#, r#"【喵萌奶茶屋】★07月新番★[银砂糖师与黑妖精 ~ Sugar Apple Fairy Tale ~][13][1080p][简日双语][招募翻译]"#,
r#"{ r#"{
"name_en": "~ Sugar Apple Fairy Tale ~", "name_en": "~ Sugar Apple Fairy Tale ~",
"name_en_no_season": "~ Sugar Apple Fairy Tale ~", "s_name_en": "~ Sugar Apple Fairy Tale ~",
"name_zh": "银砂糖师与黑妖精", "name_zh": "银砂糖师与黑妖精",
"name_zh_no_season": "银砂糖师与黑妖精", "s_name_zh": "银砂糖师与黑妖精",
"season": 1, "season": 1,
"episode_index": 13, "episode_index": 13,
"sub": "简日双语", "sub": ["简日双语"],
"fansub": "喵萌奶茶屋", "fansub": "喵萌奶茶屋",
"resolution": "1080p" "resolution": "1080p"
}"#, }"#,
@ -688,12 +689,12 @@ mod tests {
r#"【极影字幕社】★4月新番 天国大魔境 Tengoku Daimakyou 第05话 GB 720P MP4字幕社招人内详"#, r#"【极影字幕社】★4月新番 天国大魔境 Tengoku Daimakyou 第05话 GB 720P MP4字幕社招人内详"#,
r#"{ r#"{
"name_en": "Tengoku Daimakyou", "name_en": "Tengoku Daimakyou",
"name_en_no_season": "Tengoku Daimakyou", "s_name_en": "Tengoku Daimakyou",
"name_zh": "天国大魔境", "name_zh": "天国大魔境",
"name_zh_no_season": "天国大魔境", "s_name_zh": "天国大魔境",
"season": 1, "season": 1,
"episode_index": 5, "episode_index": 5,
"sub": "字幕社招人内详", "sub": ["GB"],
"source": null, "source": null,
"fansub": "极影字幕社", "fansub": "极影字幕社",
"resolution": "720P" "resolution": "720P"
@ -707,9 +708,9 @@ mod tests {
r#"[MagicStar] 假面骑士Geats / 仮面ライダーギーツ EP33 [WEBDL] [1080p] [TTFC]【生】"#, r#"[MagicStar] 假面骑士Geats / 仮面ライダーギーツ EP33 [WEBDL] [1080p] [TTFC]【生】"#,
r#"{ r#"{
"name_jp": "仮面ライダーギーツ", "name_jp": "仮面ライダーギーツ",
"name_jp_no_season": "仮面ライダーギーツ", "s_name_jp": "仮面ライダーギーツ",
"name_zh": "假面骑士Geats", "name_zh": "假面骑士Geats",
"name_zh_no_season": "假面骑士Geats", "s_name_zh": "假面骑士Geats",
"season": 1, "season": 1,
"episode_index": 33, "episode_index": 33,
"source": "WEBDL", "source": "WEBDL",
@ -725,12 +726,12 @@ mod tests {
r#"[百冬练习组&LoliHouse] BanG Dream! 少女乐团派对☆PICO FEVER / Garupa Pico: Fever! - 26 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕][END] [101.69 MB]"#, r#"[百冬练习组&LoliHouse] BanG Dream! 少女乐团派对☆PICO FEVER / Garupa Pico: Fever! - 26 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕][END] [101.69 MB]"#,
r#"{ r#"{
"name_en": "Garupa Pico: Fever!", "name_en": "Garupa Pico: Fever!",
"name_en_no_season": "Garupa Pico: Fever!", "s_name_en": "Garupa Pico: Fever!",
"name_zh": "BanG Dream! 少女乐团派对☆PICO FEVER", "name_zh": "BanG Dream! 少女乐团派对☆PICO FEVER",
"name_zh_no_season": "BanG Dream! 少女乐团派对☆PICO FEVER", "s_name_zh": "BanG Dream! 少女乐团派对☆PICO FEVER",
"season": 1, "season": 1,
"episode_index": 26, "episode_index": 26,
"sub": "简繁内封字幕", "sub": ["简繁内封字幕"],
"source": "WebRip", "source": "WebRip",
"fansub": "百冬练习组&LoliHouse", "fansub": "百冬练习组&LoliHouse",
"resolution": "1080p" "resolution": "1080p"
@ -745,11 +746,11 @@ mod tests {
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#, r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
r#"{ r#"{
"name_zh": "摇曳露营△剧场版", "name_zh": "摇曳露营△剧场版",
"name_zh_no_season": "摇曳露营△剧场版", "s_name_zh": "摇曳露营△剧场版",
"season": 1, "season": 1,
"season_raw": null, "season_raw": null,
"episode_index": 1, "episode_index": 1,
"sub": "简繁字幕", "sub": ["简繁字幕"],
"source": "BDrip", "source": "BDrip",
"fansub": "7³ACG x 桜都字幕组", "fansub": "7³ACG x 桜都字幕组",
"resolution": "1080p" "resolution": "1080p"
@ -760,13 +761,13 @@ mod tests {
r#"【幻樱字幕组】【4月新番】【古见同学有交流障碍症 第二季 Komi-san wa, Komyushou Desu. S02】【22】【GB_MP4】【1920X1080】"#, r#"【幻樱字幕组】【4月新番】【古见同学有交流障碍症 第二季 Komi-san wa, Komyushou Desu. S02】【22】【GB_MP4】【1920X1080】"#,
r#"{ r#"{
"name_en": "第二季 Komi-san wa, Komyushou Desu. S02", "name_en": "第二季 Komi-san wa, Komyushou Desu. S02",
"name_en_no_season": "Komi-san wa, Komyushou Desu.", "s_name_en": "Komi-san wa, Komyushou Desu.",
"name_zh": "古见同学有交流障碍症", "name_zh": "古见同学有交流障碍症",
"name_zh_no_season": "古见同学有交流障碍症", "s_name_zh": "古见同学有交流障碍症",
"season": 2, "season": 2,
"season_raw": "第二季", "season_raw": "第二季",
"episode_index": 22, "episode_index": 22,
"sub": "GB", "sub": ["GB"],
"fansub": "幻樱字幕组", "fansub": "幻樱字幕组",
"resolution": "1920X1080" "resolution": "1920X1080"
}"#, }"#,

View File

@ -70,10 +70,10 @@ impl Deref for TmdbApiClient {
pub(crate) mod tests { pub(crate) mod tests {
use std::{env, sync::Arc}; use std::{env, sync::Arc};
use crate::parsers::tmdb::tmdb_client::TmdbApiClient; use crate::{parsers::tmdb::tmdb_client::TmdbApiClient, utils::test::load_test_env_panic};
pub async fn prepare_tmdb_api_client() -> Arc<TmdbApiClient> { pub async fn prepare_tmdb_api_client() -> Arc<TmdbApiClient> {
dotenv::from_filename("test.env").expect("failed to load test.env"); load_test_env_panic();
let tmdb_api_token = env::var("TMDB_API_TOKEN").expect("TMDB_API_TOKEN is not set"); let tmdb_api_token = env::var("TMDB_API_TOKEN").expect("TMDB_API_TOKEN is not set");
TmdbApiClient::new(tmdb_api_token) TmdbApiClient::new(tmdb_api_token)
.await .await

View File

@ -86,7 +86,7 @@ pub struct TmdbTvSeriesDetailDto {
pub episode_run_time: Option<Vec<i32>>, pub episode_run_time: Option<Vec<i32>>,
pub genres: Vec<TmdbGenresObjDto>, pub genres: Vec<TmdbGenresObjDto>,
pub first_air_date: Option<String>, pub first_air_date: Option<String>,
pub home_page: Option<String>, pub homepage: Option<String>,
pub in_production: bool, pub in_production: bool,
pub languages: Vec<String>, pub languages: Vec<String>,
pub last_air_date: Option<String>, pub last_air_date: Option<String>,

View File

@ -4,9 +4,9 @@ use url::Url;
pub fn extract_filename_from_url(url: &Url) -> Option<&str> { pub fn extract_filename_from_url(url: &Url) -> Option<&str> {
url.path_segments().and_then(|s| s.last()).and_then(|last| { url.path_segments().and_then(|s| s.last()).and_then(|last| {
if last.is_empty() { if last.is_empty() {
Some(last)
} else {
None None
} else {
Some(last)
} }
}) })
} }

View File

@ -28,6 +28,7 @@ pub struct DalContext {
pub config: AppDalConf, pub config: AppDalConf,
} }
#[derive(Debug, Clone)]
pub enum DalStoredUrl { pub enum DalStoredUrl {
RelativePath { path: String }, RelativePath { path: String },
Absolute { url: Url }, Absolute { url: Url },
@ -79,7 +80,8 @@ impl DalContext {
.layer(LoggingLayer::default()) .layer(LoggingLayer::default())
.finish(); .finish();
fs_op.create_dir(dirname.as_str()).await?; let dirpath = format!("{}/", dirname.as_str());
fs_op.create_dir(&dirpath).await?;
let fullname = { let fullname = {
dirname.push(basename); dirname.push(basename);
@ -93,3 +95,36 @@ impl DalContext {
}) })
} }
} }
#[cfg(test)]
mod tests {
use base64::Engine;
use crate::{
config::AppDalConf, models::subscribers::ROOT_SUBSCRIBER_NAME, storage::DalContext,
};
#[tokio::test]
async fn test_dal_context() {
let dal_context = DalContext::new(AppDalConf {
fs_root: "data/dal".to_string(),
});
let a = dal_context
.store_blob(
crate::storage::DalContentType::Poster,
".jpg",
bytes::Bytes::from(
base64::engine::general_purpose::STANDARD.decode("iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8BQDwAEhQGAhKmMIQAAAABJRU5ErkJggg==").expect("should decode as vec u8")
),
ROOT_SUBSCRIBER_NAME,
)
.await
.expect("dal context should store blob");
assert!(
matches!(a, crate::storage::DalStoredUrl::RelativePath { .. }),
"dal context should store blob as relative path"
);
}
}

View File

@ -0,0 +1,8 @@
pub fn hack_env_to_fit_workspace() -> std::io::Result<()> {
if cfg!(test) || cfg!(debug_assertions) {
let package_dir = env!("CARGO_MANIFEST_DIR");
let package_dir = std::path::Path::new(package_dir);
std::env::set_current_dir(package_dir)?;
}
Ok(())
}

View File

@ -1,6 +1,6 @@
use sea_orm::{ use sea_orm::{
sea_query::{Expr, InsertStatement, IntoIden, Query, SimpleExpr}, sea_query::{Expr, InsertStatement, IntoIden, Query, SimpleExpr},
ActiveModelTrait, ActiveValue, ColumnTrait, ConnectionTrait, DynIden, EntityName, EntityTrait, ActiveModelTrait, ActiveValue, ColumnTrait, ConnectionTrait, EntityName, EntityTrait,
FromQueryResult, Iterable, SelectModel, SelectorRaw, TryGetable, FromQueryResult, Iterable, SelectModel, SelectorRaw, TryGetable,
}; };
@ -34,27 +34,34 @@ where
); );
let ent = V::Entity::default(); let ent = V::Entity::default();
let mut insert = Query::insert(); let mut insert = Query::insert();
let mut insert_statement = insert let insert_statement = insert
.into_table(ent.table_ref()) .into_table(ent.table_ref())
.returning(Query::returning().exprs(returning_columns)); .returning(Query::returning().exprs(returning_columns));
{ {
extra_config(&mut insert_statement); extra_config(insert_statement);
} }
let mut columns = vec![];
for new_item in insert_values { for new_item in insert_values {
let mut columns = vec![];
let mut values = vec![]; let mut values = vec![];
for c in <V::Entity as EntityTrait>::Column::iter() { for c in <V::Entity as EntityTrait>::Column::iter() {
if let ActiveValue::Set(value) = new_item.get(c.clone()) { let av = new_item.get(c);
columns.push(c); match av {
values.push(SimpleExpr::Value(value)); ActiveValue::Set(value) => {
values.push(c.save_as(Expr::val(value)));
columns.push(c);
}
ActiveValue::Unchanged(value) => {
values.push(c.save_as(Expr::val(value)));
columns.push(c);
}
_ => {}
} }
} }
insert_statement.columns(columns);
insert_statement.values(values)?; insert_statement.values(values)?;
} }
insert_statement.columns(columns);
let result = SelectorRaw::<SelectModel<M>>::from_statement(db_backend.build(insert_statement)) let result = SelectorRaw::<SelectModel<M>>::from_statement(db_backend.build(insert_statement))
.all(db) .all(db)

View File

@ -0,0 +1,5 @@
pub mod cli;
pub mod db;
#[cfg(test)]
pub mod test;

View File

@ -0,0 +1,20 @@
use std::path::Path;
pub fn load_test_env() -> Result<(), dotenv::Error> {
let package_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
let env_files = vec![
package_dir.join("configs/test.local.env"),
package_dir.join("configs/test.env"),
];
for env_file in env_files {
if env_file.exists() {
dotenv::from_path(env_file)?;
break;
}
}
Ok(())
}
pub fn load_test_env_panic() {
load_test_env().expect("failed to load test env")
}

View File

@ -1,6 +1,6 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::models::entities::subscribers; use crate::models::subscribers;
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
pub struct CurrentResponse { pub struct CurrentResponse {

View File

@ -34,7 +34,7 @@ impl worker::Worker<CollectHistoryEpisodesWorkerArgs> for CollectHistoryEpisodes
async fn perform(&self, _args: CollectHistoryEpisodesWorkerArgs) -> worker::Result<()> { async fn perform(&self, _args: CollectHistoryEpisodesWorkerArgs) -> worker::Result<()> {
println!("================================================"); println!("================================================");
let db = &self.ctx.db; // let db = &self.ctx.db;
println!("================================================"); println!("================================================");
Ok(()) Ok(())

View File

@ -0,0 +1,6 @@
---
source: crates/recorder/tests/models/subscriptions.rs
assertion_line: 55
expression: a
---
1

View File

@ -1,9 +1,13 @@
use insta::assert_debug_snapshot; // use insta::assert_debug_snapshot;
use loco_rs::testing; use loco_rs::{app::Hooks, testing};
use recorder::{ use recorder::{
app::App, app::App,
models::{subscribers::ROOT_SUBSCRIBER_ID, subscriptions}, models::{
subscribers::{self},
subscriptions,
},
}; };
use sea_orm::{ActiveModelTrait, TryIntoModel};
use serial_test::serial; use serial_test::serial;
macro_rules! configure_insta { macro_rules! configure_insta {
@ -21,22 +25,38 @@ async fn can_pull_subscription() {
configure_insta!(); configure_insta!();
let boot = testing::boot_test::<App>().await.unwrap(); let boot = testing::boot_test::<App>().await.unwrap();
App::init_logger(&boot.app_context.config, &boot.app_context.environment).unwrap();
testing::seed::<App>(&boot.app_context.db).await.unwrap(); testing::seed::<App>(&boot.app_context.db).await.unwrap();
let db = &boot.app_context.db;
let create_rss = serde_json::from_str( let create_rss = serde_json::from_str(
r#"{ r#"{
"rss_link": "https://mikanani.me/RSS/Bangumi?bangumiId=3141&subgroupid=370", "rss_link": "https://mikanani.me/RSS/Bangumi?bangumiId=3271&subgroupid=370",
"display_name": "Mikan Project - 葬送的芙莉莲", "display_name": "Mikan Project - 我心里危险的东西 第二季",
"aggregate": false, "aggregate": false,
"enabled": true, "enabled": true,
"category": "mikan" "category": "mikan"
}"#, }"#,
) )
.expect("should parse create rss dto from json"); .expect("should parse create rss dto from json");
let subscription = subscriptions::ActiveModel::from_create_dto(create_rss, ROOT_SUBSCRIBER_ID) let subscriber = subscribers::Model::find_by_pid(db, subscribers::ROOT_SUBSCRIBER_NAME)
.await .await
.expect("should create subscription"); .expect("should find subscriber");
let subscription = subscriptions::ActiveModel::assert_debug_snapshot!(existing_subscriber); let subscription = subscriptions::ActiveModel::from_create_dto(create_rss, subscriber.id);
let subscription = subscription
.save(&boot.app_context.db)
.await
.expect("should save subscription")
.try_into_model()
.expect("should convert to model");
subscription
.pull_one(&boot.app_context, &subscriber)
.await
.expect("should pull subscription");
// assert_debug_snapshot!(a);
} }

View File

@ -1 +0,0 @@
TMDB_API_TOKEN=your_token_here