feat: pull subscription

This commit is contained in:
master 2024-03-30 01:01:00 +08:00
parent 50243db5dc
commit 2f5b001bb6
52 changed files with 946 additions and 730 deletions

View File

@ -1,6 +1,6 @@
[alias]
recorder = "run -p recorder --bin recorder_cli -- --environment recorder.development"
recorder-playground = "run -p recorder --example playground -- --environment recorder.development"
recorder = "run -p recorder --bin recorder_cli -- --environment development"
recorder-playground = "run -p recorder --example playground -- --environment development"
[build]
rustflags = ["-Zthreads=12", "-Clink-arg=-fuse-ld=lld"]

4
.gitignore vendored
View File

@ -222,4 +222,6 @@ index.d.ts.map
/temp
/rustc-ice-*
/test.env
/crates/recorder/config/test.local.env
**/*.local.yaml
**/*.local.env

1
Cargo.lock generated
View File

@ -3654,6 +3654,7 @@ version = "0.1.0"
dependencies = [
"async-trait",
"axum",
"base64 0.22.0",
"bytes",
"chrono",
"dateparser",

View File

@ -7,5 +7,5 @@ members = [
resolver = "2"
[profile.dev]
debug = 0
#debug = 0
codegen-backend = "cranelift"

View File

@ -15,3 +15,5 @@ Cargo.lock
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
/data

View File

@ -62,6 +62,7 @@ oxilangtag = { version = "0.1.5", features = ["serde"] }
dateparser = "0.2.1"
dotenv = "0.15.0"
weak-table = "0.3.2"
base64 = "0.22.0"
[dev-dependencies]
serial_test = "3.0.0"

View File

@ -88,5 +88,6 @@ database:
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false
tmdb:
api_token: '{{ get_env(name="TMDB_API_TOKEN") }}'
settings:
dal:
fs_root: "data/dal/"

View File

@ -0,0 +1 @@
TMDB_API_TOKEN=""

View File

@ -1,9 +1,7 @@
# Loco configuration file documentation
# Application logging configuration
logger:
# Enable or disable logging.
enable: false
enable: true
# Log level, options: trace, debug, info, warn or error.
level: debug
# Define the logging format. options: compact, pretty or Json
@ -65,9 +63,9 @@ workers:
# Database Configuration
database:
# Database connection URI
uri: {{get_env(name="DATABASE_URL", default="postgres://loco:loco@localhost:5432/recorder_test")}}
uri: '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu") }}'
# When enabled, the sql query will be logged.
enable_logging: false
enable_logging: true
# Set the timeout duration when acquiring a connection.
connect_timeout: 500
# Set the idle duration before closing a connection.
@ -81,12 +79,8 @@ database:
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_truncate: true
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false
# Redis Configuration
redis:
# Redis connection URI
uri: {{get_env(name="REDIS_URL", default="redis://127.0.0.1")}}
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_flush: false
dangerously_recreate: true
settings:
dal:
fs_root: "data/dal"

View File

@ -2,6 +2,7 @@ use async_trait::async_trait;
use loco_rs::{
app::Hooks,
boot::{create_app, BootResult, StartMode},
config::Config,
controller::AppRoutes,
db::truncate_table,
environment::Environment,
@ -10,12 +11,14 @@ use loco_rs::{
worker::Processor,
};
use sea_orm::prelude::*;
use tracing_subscriber::EnvFilter;
use crate::{
controllers,
migrations::Migrator,
models::{bangumi, downloaders, episodes, resources, subscribers, subscriptions},
storage::AppDalInitializer,
utils::cli::hack_env_to_fit_workspace,
workers::subscription::SubscriptionWorker,
};
@ -38,6 +41,7 @@ impl Hooks for App {
}
async fn boot(mode: StartMode, environment: &Environment) -> Result<BootResult> {
hack_env_to_fit_workspace()?;
create_app::<Self, Migrator>(mode, environment).await
}
@ -56,7 +60,7 @@ impl Hooks for App {
async fn truncate(db: &DatabaseConnection) -> Result<()> {
futures::try_join!(
subscribers::Entity::delete_many()
.filter(subscribers::Column::Id.ne(subscribers::ROOT_SUBSCRIBER_ID))
.filter(subscribers::Column::Pid.ne(subscribers::ROOT_SUBSCRIBER_NAME))
.exec(db),
truncate_table(db, subscriptions::Entity),
truncate_table(db, resources::Entity),
@ -74,4 +78,46 @@ impl Hooks for App {
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
Ok(vec![Box::new(AppDalInitializer)])
}
fn init_logger(app_config: &Config, _env: &Environment) -> Result<bool> {
let config = &app_config.logger;
if config.enable {
let filter = EnvFilter::try_from_default_env()
.or_else(|_| {
// user wanted a specific filter, don't care about our internal whitelist
// or, if no override give them the default whitelisted filter (most common)
config.override_filter.as_ref().map_or_else(
|| {
EnvFilter::try_new(
["loco_rs", "sea_orm_migration", "tower_http", "sqlx::query"]
.iter()
.map(|m| format!("{}={}", m, config.level))
.chain(std::iter::once(format!(
"{}={}",
App::app_name(),
config.level
)))
.collect::<Vec<_>>()
.join(","),
)
},
EnvFilter::try_new,
)
})
.expect("logger initialization failed");
let builder = tracing_subscriber::FmtSubscriber::builder().with_env_filter(filter);
match serde_json::to_string(&config.format)
.expect("init logger format can serialized")
.trim_matches('"')
{
"pretty" => builder.pretty().init(),
"json" => builder.json().init(),
_ => builder.compact().init(),
};
}
Ok(true)
}
}

View File

@ -1,8 +1,8 @@
use loco_rs::cli;
use recorder::migrations::Migrator;
use recorder::app::App;
use recorder::{app::App, migrations::Migrator, utils::cli::hack_env_to_fit_workspace};
#[tokio::main]
async fn main() -> eyre::Result<()> {
hack_env_to_fit_workspace()?;
cli::main::<App, Migrator>().await
}

View File

@ -1,6 +1,6 @@
use loco_rs::prelude::*;
use crate::{models::entities::subscribers, views::subscribers::CurrentResponse};
use crate::{models::subscribers, views::subscribers::CurrentResponse};
async fn current(State(ctx): State<AppContext>) -> Result<Json<CurrentResponse>> {
let subscriber = subscribers::Model::find_root(&ctx.db).await?;

View File

@ -23,7 +23,7 @@ use crate::{
defs::{QbitTorrent, QbitTorrentContent},
torrent::{Torrent, TorrentContent, TorrentDownloader, TorrentFilter, TorrentSource},
},
models::{entities::downloaders, prelude::DownloaderCategory},
models::{downloaders, prelude::DownloaderCategory},
};
pub struct SyncDataCache {

View File

@ -17,3 +17,4 @@ pub mod workers;
pub mod i18n;
pub mod subscribe;
pub mod utils;

View File

@ -1,6 +1,7 @@
use std::{collections::HashSet, fmt::Display};
use std::collections::HashSet;
use sea_orm::{DeriveIden, Statement};
use itertools::Itertools;
use sea_orm::{ActiveEnum, DeriveIden, Statement};
use sea_orm_migration::prelude::{extension::postgres::IntoTypeRef, *};
use crate::migrations::extension::postgres::Type;
@ -71,9 +72,10 @@ pub enum Episodes {
SeasonRaw,
Fansub,
PosterLink,
HomePage,
Homepage,
Subtitle,
Source,
EpIndex,
}
#[derive(DeriveIden)]
@ -86,9 +88,10 @@ pub enum Resources {
Status,
CurrSize,
AllSize,
Mime,
Category,
Url,
HomePage,
Homepage,
SavePath,
}
#[derive(DeriveIden)]
@ -178,7 +181,7 @@ pub trait CustomSchemaManagerExt {
async fn create_postgres_enum_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send,
T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
I: IntoIterator<Item = T> + Send,
>(
&self,
@ -188,7 +191,7 @@ pub trait CustomSchemaManagerExt {
async fn add_postgres_enum_values_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send,
T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
I: IntoIterator<Item = T> + Send,
>(
&self,
@ -210,6 +213,48 @@ pub trait CustomSchemaManagerExt {
&self,
enum_name: E,
) -> Result<HashSet<String>, DbErr>;
async fn create_convention_index<
T: IntoTableRef + Send,
I: IntoIterator<Item = C> + Send,
C: IntoIndexColumn + Send,
>(
&self,
table: T,
columns: I,
) -> Result<(), DbErr>;
fn build_convention_index<
T: IntoTableRef + Send,
I: IntoIterator<Item = C> + Send,
C: IntoIndexColumn + Send,
>(
&self,
table: T,
columns: I,
) -> IndexCreateStatement {
let table = table.into_table_ref();
let table_name = match &table {
TableRef::Table(s) => s.to_string(),
_ => panic!("unsupported table ref"),
};
let columns = columns
.into_iter()
.map(|c| c.into_index_column())
.collect_vec();
let mut stmt = Index::create();
stmt.table(table);
for c in columns {
stmt.col(c);
}
let index_name = format!(
"idx_{}_{}",
table_name,
stmt.get_index_spec().get_column_names().join("-")
);
stmt.name(&index_name);
stmt
}
}
#[async_trait::async_trait]
@ -217,7 +262,8 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
async fn create_postgres_auto_update_ts_fn(&self, col_name: &str) -> Result<(), DbErr> {
let sql = format!(
"CREATE OR REPLACE FUNCTION update_{col_name}_column() RETURNS TRIGGER AS $$ BEGIN \
NEW.{col_name} = current_timestamp; RETURN NEW; END; $$ language 'plpgsql';"
NEW.\"{col_name}\" = current_timestamp; RETURN NEW; END; $$ language 'plpgsql';",
col_name = col_name
);
self.get_connection()
@ -266,7 +312,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
async fn create_postgres_enum_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send,
T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
I: IntoIterator<Item = T> + Send,
>(
&self,
@ -277,7 +323,10 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
if !existed {
let idents = values
.into_iter()
.map(|v| Alias::new(v.to_string()))
.map(|v| {
let v = v.to_value();
Alias::new(v)
})
.collect::<Vec<_>>();
self.create_type(Type::create().as_enum(enum_name).values(idents).to_owned())
.await?;
@ -290,7 +339,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
async fn add_postgres_enum_values_for_active_enum<
E: IntoTypeRef + IntoIden + Send + Clone,
T: Display + Send,
T: ActiveEnum<Value = String, ValueVec = Vec<String>> + Send,
I: IntoIterator<Item = T> + Send,
>(
&self,
@ -300,7 +349,8 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
let exists_values = self.get_postgres_enum_values(enum_name.clone()).await?;
let to_add_values = values
.into_iter()
.filter(|v| !exists_values.contains(&v.to_string()))
.map(|v| v.to_value())
.filter(|v| !exists_values.contains(v))
.collect::<Vec<_>>();
if to_add_values.is_empty() {
@ -310,6 +360,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
let mut type_alter = Type::alter().name(enum_name);
for v in to_add_values {
let v: Value = v.into();
type_alter = type_alter.add_value(Alias::new(v.to_string()));
}
@ -345,8 +396,7 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
) -> Result<HashSet<String>, DbErr> {
let enum_name: String = enum_name.into_iden().to_string();
let sql = format!(
"SELECT pg_enum.enumlabel AS enumlabel FROM pg_type JOIN pg_enum ON pg_enum.enumtypid \
= pg_type.oid WHERE pg_type.typname = '{enum_name}';"
r#"SELECT pg_enum.enumlabel AS enumlabel FROM pg_type JOIN pg_enum ON pg_enum.enumtypid = pg_type.oid WHERE pg_type.typname = '{enum_name}';"#
);
let results = self
@ -361,4 +411,17 @@ impl<'c> CustomSchemaManagerExt for SchemaManager<'c> {
Ok(items)
}
async fn create_convention_index<
T: IntoTableRef + Send,
I: IntoIterator<Item = C> + Send,
C: IntoIndexColumn + Send,
>(
&self,
table: T,
columns: I,
) -> Result<(), DbErr> {
let stmt = self.build_convention_index(table, columns);
self.create_index(stmt.to_owned()).await
}
}

View File

@ -4,10 +4,7 @@ use sea_orm_migration::{prelude::*, schema::*};
use super::defs::{
Bangumi, CustomSchemaManagerExt, Episodes, GeneralIds, Subscribers, Subscriptions,
};
use crate::models::{
subscribers::{ROOT_SUBSCRIBER_ID, ROOT_SUBSCRIBER_NAME},
subscriptions,
};
use crate::models::{subscribers::ROOT_SUBSCRIBER_NAME, subscriptions};
#[derive(DeriveMigrationName)]
pub struct Migration;
@ -37,19 +34,15 @@ impl MigrationTrait for Migration {
let insert = Query::insert()
.into_table(Subscribers::Table)
.columns([Subscribers::Id, Subscribers::Pid, Subscribers::DisplayName])
.values_panic([
ROOT_SUBSCRIBER_ID.into(),
ROOT_SUBSCRIBER_NAME.into(),
ROOT_SUBSCRIBER_NAME.into(),
])
.columns([Subscribers::Pid, Subscribers::DisplayName])
.values_panic([ROOT_SUBSCRIBER_NAME.into(), ROOT_SUBSCRIBER_NAME.into()])
.to_owned();
manager.exec_stmt(insert).await?;
manager
.create_postgres_enum_for_active_enum(
subscriptions::SubscriptionCategoryEnum,
&[
[
subscriptions::SubscriptionCategory::Mikan,
subscriptions::SubscriptionCategory::Tmdb,
],
@ -88,7 +81,6 @@ impl MigrationTrait for Migration {
GeneralIds::UpdatedAt,
)
.await?;
manager
.create_table(
table_auto(Bangumi::Table)
@ -112,30 +104,23 @@ impl MigrationTrait for Migration {
.on_delete(ForeignKeyAction::Cascade),
)
.index(
Index::create()
.name("idx_bangumi_official_title")
.table(Bangumi::Table)
.col(Bangumi::OfficialTitle)
.unique(),
)
.index(
Index::create()
.name("idx_bangumi_fansub")
.table(Bangumi::Table)
.col(Bangumi::Fansub)
.unique(),
)
.index(
Index::create()
.name("idx_bangumi_display_name")
.table(Bangumi::Table)
.col(Bangumi::DisplayName)
manager
.build_convention_index(
Bangumi::Table,
[Bangumi::OfficialTitle, Bangumi::Fansub, Bangumi::Season],
)
.unique(),
)
.to_owned(),
)
.await?;
futures::try_join!(
manager.create_convention_index(Bangumi::Table, [Bangumi::Fansub]),
manager.create_convention_index(Bangumi::Table, [Bangumi::Season]),
manager.create_convention_index(Bangumi::Table, [Bangumi::OfficialTitle]),
)?;
manager
.create_postgres_auto_update_ts_trigger_for_col(Bangumi::Table, GeneralIds::UpdatedAt)
.await?;
@ -154,16 +139,16 @@ impl MigrationTrait for Migration {
.col(text_null(Episodes::SNameJp))
.col(text_null(Episodes::SNameEn))
.col(integer(Episodes::BangumiId))
.col(integer(Episodes::ResourceId))
.col(text_null(Episodes::SavePath))
.col(string_null(Episodes::Resolution))
.col(integer(Episodes::Season))
.col(string_null(Episodes::SeasonRaw))
.col(string_null(Episodes::Fansub))
.col(text_null(Episodes::PosterLink))
.col(text_null(Episodes::HomePage))
.col(jsonb_null(Episodes::Subtitle))
.col(text_null(Episodes::Homepage))
.col(array_null(Episodes::Subtitle, ColumnType::Text))
.col(text_null(Episodes::Source))
.col(unsigned(Episodes::EpIndex))
.foreign_key(
ForeignKey::create()
.name("fk_episode_bangumi_id")
@ -172,28 +157,17 @@ impl MigrationTrait for Migration {
.on_update(ForeignKeyAction::Restrict)
.on_delete(ForeignKeyAction::Cascade),
)
.index(
Index::create()
.name("idx_episode_official_title")
.table(Episodes::Table)
.col(Episodes::OfficialTitle),
)
.index(
Index::create()
.name("idx_episode_fansub")
.table(Episodes::Table)
.col(Episodes::Fansub),
)
.index(
Index::create()
.name("idx_episode_display_name")
.table(Episodes::Table)
.col(Episodes::DisplayName),
)
.to_owned(),
)
.await?;
futures::try_join!(
manager.create_convention_index(Episodes::Table, [Episodes::OfficialTitle]),
manager.create_convention_index(Episodes::Table, [Episodes::Fansub]),
manager.create_convention_index(Episodes::Table, [Episodes::Season]),
manager.create_convention_index(Episodes::Table, [Episodes::EpIndex]),
)?;
manager
.create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
.await?;

View File

@ -3,7 +3,7 @@ use sea_orm_migration::{prelude::*, schema::*};
use super::defs::*;
use crate::models::resources::{
DownloadStatus, DownloadStatusEnum, ResourceMime, ResourceMimeEnum,
DownloadStatus, DownloadStatusEnum, ResourceCategory, ResourceCategoryEnum,
};
#[derive(DeriveMigrationName)]
@ -14,15 +14,19 @@ impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_postgres_enum_for_active_enum(
ResourceMimeEnum,
&[ResourceMime::OctetStream, ResourceMime::BitTorrent],
ResourceCategoryEnum,
[
ResourceCategory::BitTorrent,
ResourceCategory::OctetStream,
ResourceCategory::Poster,
],
)
.await?;
manager
.create_postgres_enum_for_active_enum(
DownloadStatusEnum,
&[
[
DownloadStatus::Pending,
DownloadStatus::Downloading,
DownloadStatus::Completed,
@ -43,17 +47,18 @@ impl MigrationTrait for Migration {
.col(enumeration(
Resources::Status,
DownloadStatusEnum,
ResourceMime::iden_values(),
ResourceCategory::iden_values(),
))
.col(enumeration(
Resources::Mime,
ResourceMimeEnum,
ResourceMime::iden_values(),
Resources::Category,
ResourceCategoryEnum,
ResourceCategory::iden_values(),
))
.col(big_unsigned_null(Resources::AllSize))
.col(big_unsigned_null(Resources::CurrSize))
.col(big_integer_null(Resources::AllSize))
.col(big_integer_null(Resources::CurrSize))
.col(text(Resources::Url))
.col(text_null(Resources::HomePage))
.col(text_null(Resources::Homepage))
.col(text_null(Resources::SavePath))
.foreign_key(
ForeignKey::create()
.name("fk_download_subscription_id")
@ -63,21 +68,18 @@ impl MigrationTrait for Migration {
.on_delete(ForeignKeyAction::Cascade),
)
.index(
Index::create()
.name("idx_download_url")
.table(Resources::Table)
.col(Resources::Url),
)
.index(
Index::create()
.name("idx_download_home_page")
.table(Resources::Table)
.col(Resources::HomePage),
manager
.build_convention_index(Resources::Table, [Resources::Url])
.unique(),
)
.to_owned(),
)
.await?;
futures::try_join!(
manager.create_convention_index(Resources::Table, [Resources::Homepage]),
)?;
manager
.create_postgres_auto_update_ts_fn_for_col(GeneralIds::UpdatedAt)
.await?;
@ -124,7 +126,7 @@ impl MigrationTrait for Migration {
.await?;
manager
.drop_postgres_enum_for_active_enum(ResourceMimeEnum)
.drop_postgres_enum_for_active_enum(ResourceCategoryEnum)
.await?;
manager
.drop_postgres_enum_for_active_enum(DownloadStatusEnum)

View File

@ -2,7 +2,7 @@ use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{CustomSchemaManagerExt, Downloaders, GeneralIds, Subscribers},
models::{downloaders::DownloaderCategoryEnum, prelude::DownloaderCategory},
models::downloaders::{DownloaderCategory, DownloaderCategoryEnum},
};
#[derive(DeriveMigrationName)]
@ -14,7 +14,7 @@ impl MigrationTrait for Migration {
manager
.create_postgres_enum_for_active_enum(
DownloaderCategoryEnum,
&[DownloaderCategory::QBittorrent],
[DownloaderCategory::QBittorrent],
)
.await?;

View File

@ -2,9 +2,107 @@ use std::collections::HashSet;
use itertools::Itertools;
use regex::Regex;
use sea_orm::entity::prelude::*;
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
pub use super::entities::bangumi::*;
#[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "bangumi_distribution"
)]
#[serde(rename_all = "snake_case")]
pub enum BangumiDistribution {
#[sea_orm(string_value = "movie")]
Movie,
#[sea_orm(string_value = "ova")]
Ova,
#[sea_orm(string_value = "oad")]
Oad,
#[sea_orm(string_value = "sp")]
Sp,
#[sea_orm(string_value = "ex")]
Ex,
#[sea_orm(string_value = "tv")]
Tv,
#[sea_orm(string_value = "unknown")]
Unknown,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
#[serde(rename_all = "snake_case")]
pub enum BangumiRenameMethod {
Pn,
Advance,
SubtitlePn,
SubtitleAdvance,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscribeBangumiConfigOverride {
pub leading_fansub_tag: Option<bool>,
pub complete_history_episodes: Option<bool>,
pub rename_method: Option<BangumiRenameMethod>,
pub remove_bad_torrent: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct BangumiFilter {
pub plaintext_filters: Option<Vec<String>>,
pub regex_filters: Option<Vec<String>>,
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct BangumiUniqueKey {
pub official_title: String,
pub season: i32,
pub fansub: Option<String>,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "bangumi")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub subscription_id: i32,
pub display_name: String,
pub official_title: String,
pub fansub: Option<String>,
pub season: i32,
pub filter: Option<BangumiFilter>,
pub poster_link: Option<String>,
pub save_path: Option<String>,
pub last_ep: i32,
pub bangumi_conf_override: Option<SubscribeBangumiConfigOverride>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id"
)]
Subscription,
#[sea_orm(has_many = "super::episodes::Entity")]
Episode,
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
Relation::Episode.def()
}
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}

View File

@ -1,7 +1,51 @@
use sea_orm::prelude::*;
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use url::Url;
pub use crate::models::entities::downloaders::*;
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "downloader_category"
)]
#[serde(rename_all = "snake_case")]
pub enum DownloaderCategory {
#[sea_orm(string_value = "qbittorrent")]
QBittorrent,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "downloaders")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub category: DownloaderCategory,
pub endpoint: String,
pub password: String,
pub username: String,
pub subscriber_id: i32,
pub save_path: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id"
)]
Subscriber,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}

View File

@ -1,101 +0,0 @@
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "bangumi_distribution"
)]
#[serde(rename_all = "snake_case")]
pub enum BangumiDistribution {
#[sea_orm(string_value = "movie")]
Movie,
#[sea_orm(string_value = "ova")]
Ova,
#[sea_orm(string_value = "oad")]
Oad,
#[sea_orm(string_value = "sp")]
Sp,
#[sea_orm(string_value = "ex")]
Ex,
#[sea_orm(string_value = "tv")]
Tv,
#[sea_orm(string_value = "unknown")]
Unknown,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
#[serde(rename_all = "snake_case")]
pub enum BangumiRenameMethod {
Pn,
Advance,
SubtitlePn,
SubtitleAdvance,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscribeBangumiConfigOverride {
pub leading_fansub_tag: Option<bool>,
pub complete_history_episodes: Option<bool>,
pub rename_method: Option<BangumiRenameMethod>,
pub remove_bad_torrent: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct BangumiFilter {
pub plaintext_filters: Option<Vec<String>>,
pub regex_filters: Option<Vec<String>>,
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct BangumiUniqueKey {
pub official_title: String,
pub season: u32,
pub fansub: Option<String>,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "bangumi")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub subscription_id: i32,
pub display_name: String,
pub official_title: String,
pub fansub: Option<String>,
pub season: u32,
pub filter: Option<BangumiFilter>,
pub poster_link: Option<String>,
pub save_path: Option<String>,
pub last_ep: u32,
pub bangumi_conf_override: Option<SubscribeBangumiConfigOverride>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id"
)]
Subscription,
#[sea_orm(has_many = "super::episodes::Entity")]
Episode,
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
Relation::Episode.def()
}
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}

View File

@ -1,47 +0,0 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "downloader_category"
)]
#[serde(rename_all = "snake_case")]
pub enum DownloaderCategory {
#[sea_orm(string_value = "qbittorrent")]
QBittorrent,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "downloaders")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub category: DownloaderCategory,
pub endpoint: String,
pub password: String,
pub username: String,
pub subscriber_id: i32,
pub save_path: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id"
)]
Subscriber,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}

View File

@ -1,61 +0,0 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "episodes")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub origin_title: String,
pub official_title: String,
pub display_name: String,
pub name_zh: Option<String>,
pub name_jp: Option<String>,
pub name_en: Option<String>,
pub s_name_zh: Option<String>,
pub s_name_jp: Option<String>,
pub s_name_en: Option<String>,
pub bangumi_id: i32,
pub resource_id: Option<i32>,
pub save_path: Option<String>,
pub resolution: Option<String>,
pub season: u32,
pub season_raw: Option<String>,
pub fansub: Option<String>,
pub poster_link: Option<String>,
pub home_page: Option<String>,
pub subtitle: Option<Vec<String>>,
pub source: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::bangumi::Entity",
from = "Column::BangumiId",
to = "super::bangumi::Column::Id"
)]
Bangumi,
#[sea_orm(
belongs_to = "super::resources::Entity",
from = "Column::ResourceId",
to = "super::resources::Column::Id"
)]
Resources,
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::Bangumi.def()
}
}
impl Related<super::resources::Entity> for Entity {
fn to() -> RelationDef {
Relation::Resources.def()
}
}

View File

@ -1,7 +0,0 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.4
pub mod bangumi;
pub mod downloaders;
pub mod episodes;
pub mod resources;
pub mod subscribers;
pub mod subscriptions;

View File

@ -1,77 +0,0 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_status")]
#[serde(rename_all = "snake_case")]
pub enum DownloadStatus {
#[sea_orm(string_value = "pending")]
Pending,
#[sea_orm(string_value = "downloading")]
Downloading,
#[sea_orm(string_value = "paused")]
Paused,
#[sea_orm(string_value = "completed")]
Completed,
#[sea_orm(string_value = "failed")]
Failed,
#[sea_orm(string_value = "deleted")]
Deleted,
}
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "resource_mime")]
pub enum ResourceMime {
#[sea_orm(string_value = "application/octet-stream")]
#[serde(rename = "application/octet-stream")]
OctetStream,
#[sea_orm(string_value = "application/x-bittorrent")]
#[serde(rename = "application/x-bittorrent")]
BitTorrent,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "resources")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub origin_title: String,
pub display_name: String,
pub subscription_id: i32,
pub status: DownloadStatus,
pub mime: ResourceMime,
pub url: String,
pub all_size: Option<u64>,
pub curr_size: Option<u64>,
pub homepage: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id"
)]
Subscription,
#[sea_orm(has_many = "super::episodes::Entity")]
Episode,
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
Relation::Episode.def()
}
}

View File

@ -1,63 +0,0 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
use super::bangumi::BangumiRenameMethod;
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscribeBangumiConfig {
pub leading_fansub_tag: bool,
pub complete_history_episodes: bool,
pub rename_method: BangumiRenameMethod,
pub remove_bad_torrent: bool,
}
impl Default for SubscribeBangumiConfig {
fn default() -> Self {
Self {
leading_fansub_tag: false,
complete_history_episodes: false,
rename_method: BangumiRenameMethod::Pn,
remove_bad_torrent: false,
}
}
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscribers")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
#[sea_orm(unique)]
pub pid: String,
pub display_name: String,
pub downloader_id: Option<i32>,
pub bangumi_conf: SubscribeBangumiConfig,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::subscriptions::Entity")]
Subscription,
#[sea_orm(
belongs_to = "super::downloaders::Entity",
from = "Column::DownloaderId",
to = "super::downloaders::Column::Id"
)]
Downloader,
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::downloaders::Entity> for Entity {
fn to() -> RelationDef {
Relation::Downloader.def()
}
}

View File

@ -1,59 +0,0 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
#[serde(rename_all = "snake_case")]
pub enum SubscriptionCategory {
#[sea_orm(string_value = "mikan")]
Mikan,
#[sea_orm(string_value = "tmdb")]
Tmdb,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")]
pub struct Model {
#[sea_orm(column_type = "Timestamp")]
pub created_at: DateTime,
#[sea_orm(column_type = "Timestamp")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub display_name: String,
pub subscriber_id: i32,
pub category: SubscriptionCategory,
pub source_url: String,
pub aggregate: bool,
pub enabled: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id"
)]
Subscriber,
#[sea_orm(has_many = "super::bangumi::Entity")]
Bangumi,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::Bangumi.def()
}
}

View File

@ -1,11 +1,69 @@
use sea_orm::{entity::prelude::*, ActiveValue};
use serde::{Deserialize, Serialize};
pub use super::entities::episodes::*;
use crate::{
models::resources,
parsers::{mikan::MikanEpisodeMeta, raw::RawEpisodeMeta},
};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "episodes")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub origin_title: String,
pub official_title: String,
pub display_name: String,
pub name_zh: Option<String>,
pub name_jp: Option<String>,
pub name_en: Option<String>,
pub s_name_zh: Option<String>,
pub s_name_jp: Option<String>,
pub s_name_en: Option<String>,
pub bangumi_id: i32,
pub resource_id: Option<i32>,
pub save_path: Option<String>,
pub resolution: Option<String>,
pub season: i32,
pub season_raw: Option<String>,
pub fansub: Option<String>,
pub poster_link: Option<String>,
pub homepage: Option<String>,
pub subtitle: Option<Vec<String>>,
pub source: Option<String>,
pub ep_index: i32,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::bangumi::Entity",
from = "Column::BangumiId",
to = "super::bangumi::Column::Id"
)]
Bangumi,
#[sea_orm(
belongs_to = "super::resources::Entity",
from = "Column::ResourceId",
to = "super::resources::Column::Id"
)]
Resources,
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::Bangumi.def()
}
}
impl Related<super::resources::Entity> for Entity {
fn to() -> RelationDef {
Relation::Resources.def()
}
}
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}
@ -34,9 +92,10 @@ impl ActiveModel {
season_raw: ActiveValue::Set(raw_meta.season_raw),
fansub: ActiveValue::Set(raw_meta.fansub),
poster_link: ActiveValue::Set(mikan_poster),
home_page: ActiveValue::Set(resource.homepage),
homepage: ActiveValue::Set(resource.homepage),
subtitle: ActiveValue::Set(raw_meta.sub),
source: ActiveValue::Set(raw_meta.source),
ep_index: ActiveValue::Set(raw_meta.episode_index),
..Default::default()
}
}

View File

@ -1,7 +1,5 @@
pub mod bangumi;
pub(crate) mod db_utils;
pub mod downloaders;
pub mod entities;
pub mod episodes;
pub mod notifications;
pub mod prelude;

View File

@ -4,6 +4,6 @@ use serde::{Deserialize, Serialize};
pub struct Notification {
official_title: String,
season: i32,
episode_size: u32,
episode_size: i32,
poster_url: Option<String>,
}

View File

@ -2,7 +2,7 @@ pub use super::{
bangumi::{self, Entity as Bangumi},
downloaders::{self, DownloaderCategory, Entity as Downloader},
episodes::{self, Entity as Episode},
resources::{self, DownloadStatus, Entity as Download, ResourceMime},
resources::{self, DownloadStatus, Entity as Download, ResourceCategory},
subscribers::{self, Entity as Subscriber},
subscriptions::{self, Entity as Subscription, SubscriptionCategory},
};

View File

@ -1,20 +1,108 @@
use sea_orm::{prelude::*, ActiveValue};
use std::future::Future;
pub use crate::models::entities::resources::*;
use crate::parsers::mikan::MikanRssItem;
use bytes::Bytes;
use loco_rs::app::AppContext;
use sea_orm::{entity::prelude::*, ActiveValue, TryIntoModel};
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
parsers::{errors::ParseError, mikan::MikanRssItem},
path::extract_extname_from_url,
storage::{AppContextDalExt, DalContentType},
};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_status")]
#[serde(rename_all = "snake_case")]
pub enum DownloadStatus {
#[sea_orm(string_value = "pending")]
Pending,
#[sea_orm(string_value = "downloading")]
Downloading,
#[sea_orm(string_value = "paused")]
Paused,
#[sea_orm(string_value = "completed")]
Completed,
#[sea_orm(string_value = "failed")]
Failed,
#[sea_orm(string_value = "deleted")]
Deleted,
}
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "resource_category")]
pub enum ResourceCategory {
#[sea_orm(string_value = "octet-stream")]
#[serde(rename = "octet-stream")]
OctetStream,
#[sea_orm(string_value = "bittorrent")]
#[serde(rename = "bittorrent")]
BitTorrent,
#[sea_orm(string_value = "poster")]
#[serde(rename = "poster")]
Poster,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "resources")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub origin_title: String,
pub display_name: String,
pub subscription_id: i32,
pub status: DownloadStatus,
pub category: ResourceCategory,
pub url: String,
pub all_size: Option<i64>,
pub curr_size: Option<i64>,
pub homepage: Option<String>,
pub save_path: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id"
)]
Subscription,
#[sea_orm(has_many = "super::episodes::Entity")]
Episode,
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::episodes::Entity> for Entity {
fn to() -> RelationDef {
Relation::Episode.def()
}
}
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {
pub fn from_mikan_rss_item(rss_item: MikanRssItem, subscription_id: i32) -> Self {
let download_mime = rss_item.get_download_mime();
let resource_category = rss_item.get_resource_category();
Self {
origin_title: ActiveValue::Set(rss_item.title.clone()),
display_name: ActiveValue::Set(rss_item.title),
subscription_id: ActiveValue::Set(subscription_id),
status: ActiveValue::Set(DownloadStatus::Pending),
mime: ActiveValue::Set(download_mime),
category: ActiveValue::Set(resource_category),
url: ActiveValue::Set(rss_item.url),
all_size: ActiveValue::Set(rss_item.content_length),
curr_size: ActiveValue::Set(Some(0)),
@ -22,4 +110,78 @@ impl ActiveModel {
..Default::default()
}
}
pub fn from_poster_url(
subscription_id: i32,
origin_title: String,
url: Url,
save_path: Option<String>,
content_length: i64,
) -> Self {
Self {
origin_title: ActiveValue::Set(origin_title.clone()),
display_name: ActiveValue::Set(origin_title),
subscription_id: ActiveValue::Set(subscription_id),
status: ActiveValue::Set(DownloadStatus::Completed),
category: ActiveValue::Set(ResourceCategory::Poster),
url: ActiveValue::Set(url.to_string()),
all_size: ActiveValue::Set(Some(content_length)),
curr_size: ActiveValue::Set(Some(content_length)),
save_path: ActiveValue::Set(save_path),
..Default::default()
}
}
}
impl Model {
pub async fn from_poster_url<F, R, E>(
ctx: &AppContext,
subscriber_pid: &str,
subscription_id: i32,
original_title: String,
url: Url,
fetch_fn: F,
) -> eyre::Result<Self>
where
F: FnOnce(Url) -> R,
R: Future<Output = Result<Bytes, E>>,
E: Into<eyre::Report>,
{
let db = &ctx.db;
let found = Entity::find()
.filter(
Column::SubscriptionId
.eq(subscription_id)
.and(Column::Url.eq(url.as_str())),
)
.one(db)
.await?;
let resource = if let Some(found) = found {
found
} else {
let bytes = fetch_fn(url.clone()).await.map_err(|e| e.into())?;
let content_length = bytes.len() as i64;
let dal = ctx.get_dal_unwrap().await;
let extname = extract_extname_from_url(&url)
.ok_or_else(|| ParseError::ParseExtnameError(url.to_string()))?;
let stored_url = dal
.store_blob(DalContentType::Poster, &extname, bytes, subscriber_pid)
.await?;
let saved_path = Some(stored_url.to_string());
let new_resource = ActiveModel::from_poster_url(
subscription_id,
original_title,
url,
saved_path,
content_length,
);
let new_resource = new_resource.save(db).await?;
new_resource.try_into_model()?
};
Ok(resource)
}
}

View File

@ -1,15 +1,70 @@
use loco_rs::model::{ModelError, ModelResult};
use sea_orm::{entity::prelude::*, ActiveValue, TransactionTrait};
use sea_orm::{entity::prelude::*, ActiveValue, FromJsonQueryResult, TransactionTrait};
use serde::{Deserialize, Serialize};
pub use super::entities::subscribers::*;
use super::bangumi::BangumiRenameMethod;
pub const ROOT_SUBSCRIBER_ID: i32 = 1;
pub const ROOT_SUBSCRIBER_NAME: &str = "konobangu";
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct SubscribeBangumiConfig {
pub leading_fansub_tag: bool,
pub complete_history_episodes: bool,
pub rename_method: BangumiRenameMethod,
pub remove_bad_torrent: bool,
}
impl Default for SubscribeBangumiConfig {
fn default() -> Self {
Self {
leading_fansub_tag: false,
complete_history_episodes: false,
rename_method: BangumiRenameMethod::Pn,
remove_bad_torrent: false,
}
}
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscribers")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub pid: String,
pub display_name: String,
pub downloader_id: Option<i32>,
pub bangumi_conf: Option<SubscribeBangumiConfig>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::subscriptions::Entity")]
Subscription,
#[sea_orm(
belongs_to = "super::downloaders::Entity",
from = "Column::DownloaderId",
to = "super::downloaders::Column::Id"
)]
Downloader,
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
impl Related<super::downloaders::Entity> for Entity {
fn to() -> RelationDef {
Relation::Downloader.def()
}
}
#[derive(Debug, Deserialize, Serialize)]
pub struct SubscriberIdParams {
pub id: String,
pub pid: String,
}
#[async_trait::async_trait]
@ -20,7 +75,9 @@ impl ActiveModelBehavior for ActiveModel {
{
if insert {
let mut this = self;
this.pid = ActiveValue::Set(Uuid::new_v4().to_string());
if this.pid.is_not_set() {
this.pid = ActiveValue::Set(Uuid::new_v4().to_string());
};
Ok(this)
} else {
Ok(self)
@ -29,17 +86,13 @@ impl ActiveModelBehavior for ActiveModel {
}
impl Model {
/// finds a user by the provided pid
///
/// # Errors
///
/// When could not find user or DB query error
pub async fn find_by_pid(db: &DatabaseConnection, pid: &str) -> ModelResult<Self> {
let parse_uuid = Uuid::parse_str(pid).map_err(|e| ModelError::Any(e.into()))?;
let subscriber = Entity::find()
.filter(Column::Pid.eq(parse_uuid))
.one(db)
.await?;
let subscriber = Entity::find().filter(Column::Pid.eq(pid)).one(db).await?;
subscriber.ok_or_else(|| ModelError::EntityNotFound)
}
pub async fn find_by_id(db: &DatabaseConnection, id: i32) -> ModelResult<Self> {
let subscriber = Entity::find().filter(Column::Id.eq(id)).one(db).await?;
subscriber.ok_or_else(|| ModelError::EntityNotFound)
}
@ -47,12 +100,6 @@ impl Model {
Self::find_by_pid(db, ROOT_SUBSCRIBER_NAME).await
}
/// Asynchronously creates a user with a password and saves it to the
/// database.
///
/// # Errors
///
/// When could not save the user into the DB
pub async fn create_root(db: &DatabaseConnection) -> ModelResult<Self> {
let txn = db.begin().await?;

View File

@ -10,9 +10,65 @@ use sea_orm::{
use serde::{Deserialize, Serialize};
use tracing::{event, instrument, Level};
pub use super::entities::subscriptions::{self, *};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
#[serde(rename_all = "snake_case")]
pub enum SubscriptionCategory {
#[sea_orm(string_value = "mikan")]
Mikan,
#[sea_orm(string_value = "tmdb")]
Tmdb,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")]
pub struct Model {
#[sea_orm(column_type = "Timestamp")]
pub created_at: DateTime,
#[sea_orm(column_type = "Timestamp")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub display_name: String,
pub subscriber_id: i32,
pub category: SubscriptionCategory,
pub source_url: String,
pub aggregate: bool,
pub enabled: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id"
)]
Subscriber,
#[sea_orm(has_many = "super::bangumi::Entity")]
Bangumi,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::bangumi::Entity> for Entity {
fn to() -> RelationDef {
Relation::Bangumi.def()
}
}
use crate::{
models::{bangumi, db_utils::insert_many_with_returning_all, episodes, resources},
models::{bangumi, episodes, resources, subscribers},
parsers::{
mikan::{
parse_episode_meta_from_mikan_homepage, parse_mikan_rss_items_from_rss_link,
@ -20,8 +76,7 @@ use crate::{
},
raw::{parse_episode_meta_from_raw_name, RawEpisodeMeta},
},
path::extract_extname_from_url,
storage::{AppContextDalExt, DalContentType},
utils::db::insert_many_with_returning_all,
};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
@ -104,9 +159,14 @@ impl Model {
#[instrument(
fields(subscriber_id = "self.subscriber_id", subscription_id = "self.id"),
skip(self, db, ctx)
skip(self, ctx)
)]
pub async fn pull_one(&self, db: &DatabaseConnection, ctx: &AppContext) -> eyre::Result<()> {
pub async fn pull_one(
&self,
ctx: &AppContext,
subscriber: &subscribers::Model,
) -> eyre::Result<()> {
let db = &ctx.db;
let subscription = self;
let subscription_id = subscription.id;
match &subscription.category {
@ -152,7 +212,6 @@ impl Model {
let mut ep_metas: HashMap<bangumi::BangumiUniqueKey, Vec<MikanEpMetaBundle>> =
HashMap::new();
let dal = ctx.get_dal_unwrap().await;
{
for r in new_resources {
let mut mikan_meta = if let Some(homepage) = r.homepage.as_deref() {
@ -174,40 +233,34 @@ impl Model {
} else {
continue;
};
let mikan_poster_link = if let Some(poster) = mikan_meta.poster.take() {
if let Some(extname) = extract_extname_from_url(&poster.origin_url) {
let result = dal
.store_blob(
DalContentType::Poster,
&extname,
poster.data,
&subscriber_id.to_string(),
)
.await;
match result {
Ok(stored_url) => Some(stored_url.to_string()),
let mikan_poster_link =
if let Some(poster_url) = mikan_meta.poster_url.take() {
let poster_url_str = poster_url.to_string();
let poster_resource_result = resources::Model::from_poster_url(
ctx,
&subscriber.pid,
subscription_id,
mikan_meta.official_title.clone(),
poster_url,
|url| mikan_client.fetch_bytes(|f| f.get(url)),
)
.await;
match poster_resource_result {
Ok(resource) => resource.save_path,
Err(e) => {
let error: &dyn std::error::Error = e.as_ref();
event!(
Level::ERROR,
desc = "failed to store mikan meta poster",
origin_url = poster.origin_url.as_str(),
desc = "failed to fetch mikan meta poster",
poster_url = poster_url_str,
error = error
);
None
}
}
} else {
event!(
Level::ERROR,
desc = "failed to extract mikan meta poster extname",
origin_url = poster.origin_url.as_str(),
);
None
}
} else {
None
};
};
let raw_meta = match parse_episode_meta_from_raw_name(&r.origin_title) {
Ok(raw_meta) => raw_meta,
Err(e) => {

View File

@ -18,4 +18,6 @@ pub enum ParseError {
UnsupportedLanguagePreset(String),
#[error("Parse episode meta error, get empty official title, homepage = {0}")]
MikanEpisodeMetaEmptyOfficialTitleError(String),
#[error("Parse extname error from source = {0}")]
ParseExtnameError(String),
}

View File

@ -22,7 +22,7 @@ pub struct MikanEpisodeMetaPosterBlob {
#[derive(Clone, Debug)]
pub struct MikanEpisodeMeta {
pub homepage: Url,
pub poster: Option<MikanEpisodeMetaPosterBlob>,
pub poster_url: Option<Url>,
pub official_title: String,
}
@ -73,18 +73,6 @@ pub async fn parse_episode_meta_from_mikan_homepage(
p.set_query(None);
p
});
let poster = if let Some(p) = origin_poster_src {
client
.fetch_bytes(|f| f.get(p.clone()))
.await
.ok()
.map(|data| MikanEpisodeMetaPosterBlob {
data,
origin_url: p,
})
} else {
None
};
let official_title = official_title_node
.map(|s| s.inner_text(parser))
.and_then(|official_title| {
@ -102,7 +90,7 @@ pub async fn parse_episode_meta_from_mikan_homepage(
Ok(MikanEpisodeMeta {
homepage: url,
poster,
poster_url: origin_poster_src,
official_title,
})
}
@ -128,20 +116,11 @@ mod test {
assert_eq!(ep_meta.homepage, url);
assert_eq!(ep_meta.official_title, "葬送的芙莉莲");
assert_eq!(
ep_meta.poster.clone().map(|p| p.origin_url),
ep_meta.poster_url.clone(),
Some(Url::parse(
"https://mikanani.me/images/Bangumi/202309/5ce9fed1.jpg"
)?)
);
let u8_data = ep_meta
.poster
.clone()
.map(|p| p.data)
.expect("should have poster data");
assert!(
u8_data.starts_with(&[255, 216, 255, 224]),
"should start with valid jpeg data magic number"
);
}
Ok(())

View File

@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize};
use crate::{
downloaders::defs::BITTORRENT_MIME_TYPE,
models::prelude::ResourceMime,
models::prelude::ResourceCategory,
parsers::{errors::ParseError, mikan::mikan_client::MikanClient},
};
@ -13,14 +13,14 @@ pub struct MikanRssItem {
pub title: String,
pub homepage: Option<String>,
pub url: String,
pub content_length: Option<u64>,
pub content_length: Option<i64>,
pub mime: String,
pub pub_date: Option<i64>,
}
impl MikanRssItem {
pub fn get_download_mime(&self) -> ResourceMime {
ResourceMime::BitTorrent
pub fn get_resource_category(&self) -> ResourceCategory {
ResourceCategory::BitTorrent
}
}

View File

@ -17,7 +17,8 @@ lazy_static! {
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
static ref SOURCE_L1_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|W[Ee][Bb][Rr][Ii][Pp]|Sentai|B[Dd][Rr][Ii][Pp]|UHD[Rr][Ii][Pp]|NETFLIX").unwrap();
static ref SOURCE_L2_RE: Regex = Regex::new(r"AMZ|CR|W[Ee][Bb]|B[Dd]").unwrap();
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap();
static ref SUB_RE: Regex = Regex::new(r"[简繁日英字幕]|CH|BIG5|GB").unwrap();
static ref SUB_RE_EXCLUDE: Regex = Regex::new(r"字幕[社组]").unwrap();
static ref PREFIX_RE: Regex =
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
@ -49,9 +50,9 @@ pub struct RawEpisodeMeta {
pub s_name_jp: Option<String>,
pub name_zh: Option<String>,
pub s_name_zh: Option<String>,
pub season: u32,
pub season: i32,
pub season_raw: Option<String>,
pub episode_index: u32,
pub episode_index: i32,
pub sub: Option<Vec<String>>,
pub source: Option<String>,
pub fansub: Option<String>,
@ -110,7 +111,7 @@ fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> eyre::Resul
Ok(raw.to_string())
}
fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, u32) {
fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, i32) {
let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " ");
let seasons = SEASON_EXTRACT_SEASON_ALL_RE
.find(&name_and_season)
@ -122,7 +123,7 @@ fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>,
return (title_body.to_string(), None, 1);
}
let mut season = 1u32;
let mut season = 1i32;
let mut season_raw = None;
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
@ -131,7 +132,7 @@ fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>,
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s) {
if let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
.replace_all(m.as_str(), "")
.parse::<u32>()
.parse::<i32>()
{
season = s;
break;
@ -140,7 +141,7 @@ fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>,
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s) {
if let Some(s) = DIGIT_1PLUS_REG
.find(m.as_str())
.and_then(|s| s.as_str().parse::<u32>().ok())
.and_then(|s| s.as_str().parse::<i32>().ok())
{
season = s;
break;
@ -149,13 +150,13 @@ fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>,
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
.replace(m.as_str(), "")
.parse::<u32>()
.parse::<i32>()
{
season = s;
break;
}
if let Some(m) = ZH_NUM_RE.find(m.as_str()) {
season = ZH_NUM_MAP[m.as_str()] as u32;
season = ZH_NUM_MAP[m.as_str()];
break;
}
}
@ -207,11 +208,11 @@ fn extract_name_from_title_body_name_section(
(name_en, name_zh, name_jp)
}
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<u32> {
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32> {
DIGIT_1PLUS_REG
.find(title_episode)?
.as_str()
.parse::<u32>()
.parse::<i32>()
.ok()
}
@ -237,7 +238,7 @@ fn extract_tags_from_title_extra(
let mut resolution = None;
let mut source = None;
for element in elements.iter() {
if SUB_RE.is_match(element) {
if SUB_RE.is_match(element) && !SUB_RE_EXCLUDE.is_match(element) {
let el = element.to_string();
sub = Some(match sub {
Some(mut res) => {
@ -297,17 +298,17 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> eyre::Result<RawEpisodeMeta>
let title_body = title_body_pre_process(&title_body, fansub)?;
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
let (name_en_no_season, name_zh_no_season, name_jp_no_season) =
let (s_name_en, s_name_zh, s_name_jp) =
extract_name_from_title_body_name_section(&name_without_season);
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(1);
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
Ok(RawEpisodeMeta {
name_en,
s_name_en: name_en_no_season,
s_name_en,
name_jp,
s_name_jp: name_jp_no_season,
s_name_jp,
name_zh,
s_name_zh: name_zh_no_season,
s_name_zh,
season,
season_raw,
episode_index,
@ -345,11 +346,11 @@ mod tests {
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
r#"{
"name_zh": "我心里危险的东西",
"name_zh_no_season": "我心里危险的东西",
"s_name_zh": "我心里危险的东西",
"season": 2,
"season_raw": "第二季",
"episode_index": 5,
"sub": "简日双语",
"sub": ["简日双语"],
"source": null,
"fansub": "新Sub",
"resolution": "1080P"
@ -363,13 +364,13 @@ mod tests {
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
r#"{
"name_en": "Boku no Kokoro no Yabai Yatsu",
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu",
"s_name_en": "Boku no Kokoro no Yabai Yatsu",
"name_zh": "我内心的糟糕念头",
"name_zh_no_season": "我内心的糟糕念头",
"s_name_zh": "我内心的糟糕念头",
"season": 1,
"season_raw": null,
"episode_index": 18,
"sub": "简日双语",
"sub": ["简日双语"],
"source": null,
"fansub": "喵萌奶茶屋",
"resolution": "1080p"
@ -383,13 +384,13 @@ mod tests {
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{
"name_en": "Shin no Nakama 2nd",
"name_en_no_season": "Shin no Nakama",
"s_name_en": "Shin no Nakama",
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
"s_name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
"season": 2,
"season_raw": "2nd",
"episode_index": 8,
"sub": "简繁内封字幕",
"sub": ["简繁内封字幕"],
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
@ -403,10 +404,10 @@ mod tests {
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
r#"{
"name_en": "THE MARGINAL SERVICE",
"name_en_no_season": "THE MARGINAL SERVICE",
"s_name_en": "THE MARGINAL SERVICE",
"season": 1,
"episode_index": 8,
"sub": "简繁内封字幕",
"sub": ["简繁内封字幕"],
"source": "WebRip",
"fansub": "动漫国字幕组&LoliHouse",
"resolution": "1080p"
@ -420,13 +421,13 @@ mod tests {
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
r#"{
"name_en": "Nozomanu Fushi no Boukensha",
"name_en_no_season": "Nozomanu Fushi no Boukensha",
"s_name_en": "Nozomanu Fushi no Boukensha",
"name_zh": "事与愿违的不死冒险者",
"name_zh_no_season": "事与愿违的不死冒险者",
"s_name_zh": "事与愿违的不死冒险者",
"season": 1,
"season_raw": null,
"episode_index": 1,
"sub": "简繁内封字幕",
"sub": ["简繁内封字幕"],
"source": "WebRip",
"fansub": "LoliHouse",
"resolution": "1080p"
@ -442,13 +443,13 @@ mod tests {
"name_en": "Pon no Michi",
"name_jp": "ぽんのみち",
"name_zh": "碰之道",
"name_en_no_season": "Pon no Michi",
"name_jp_no_season": "ぽんのみち",
"name_zh_no_season": "碰之道",
"s_name_en": "Pon no Michi",
"s_name_jp": "ぽんのみち",
"s_name_zh": "碰之道",
"season": 1,
"season_raw": null,
"episode_index": 7,
"sub": "简繁日内封字幕",
"sub": ["简繁日内封字幕"],
"source": "WebRip",
"fansub": "喵萌奶茶屋&LoliHouse",
"resolution": "1080p"
@ -462,13 +463,13 @@ mod tests {
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_en": "Yowai Character Tomozakikun",
"name_en_no_season": "Yowai Character Tomozakikun",
"s_name_en": "Yowai Character Tomozakikun",
"name_zh": "弱角友崎同学 2nd STAGE",
"name_zh_no_season": "弱角友崎同学",
"s_name_zh": "弱角友崎同学",
"season": 2,
"season_raw": "2nd",
"episode_index": 9,
"sub": "CHT",
"sub": ["CHT"],
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
@ -482,13 +483,13 @@ mod tests {
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
r#"{
"name_en": "Kingdom S5",
"name_en_no_season": "Kingdom",
"s_name_en": "Kingdom",
"name_zh": "王者天下 第五季",
"name_zh_no_season": "王者天下",
"s_name_zh": "王者天下",
"season": 5,
"season_raw": "第五季",
"episode_index": 7,
"sub": "简繁外挂字幕",
"sub": ["简繁外挂字幕"],
"source": "WebRip",
"fansub": "豌豆字幕组&LoliHouse",
"resolution": "1080p"
@ -502,12 +503,12 @@ mod tests {
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
r#"{
"name_en": "Alice to Therese no Maboroshi Koujou",
"name_en_no_season": "Alice to Therese no Maboroshi Koujou",
"s_name_en": "Alice to Therese no Maboroshi Koujou",
"name_zh": "爱丽丝与特蕾丝的虚幻工厂",
"name_zh_no_season": "爱丽丝与特蕾丝的虚幻工厂",
"s_name_zh": "爱丽丝与特蕾丝的虚幻工厂",
"season": 1,
"episode_index": 1,
"sub": "简繁内封",
"sub": ["简繁内封"],
"source": "WebRip",
"fansub": "千夏字幕组",
"resolution": "1080p"
@ -521,12 +522,12 @@ mod tests {
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
r#"{
"name_en": "Yuru Camp Movie",
"name_en_no_season": "Yuru Camp Movie",
"s_name_en": "Yuru Camp Movie",
"name_zh": "电影 轻旅轻营 (摇曳露营)",
"name_zh_no_season": "电影 轻旅轻营 (摇曳露营)",
"s_name_zh": "电影 轻旅轻营 (摇曳露营)",
"season": 1,
"episode_index": 1,
"sub": "繁体",
"sub": ["繁体"],
"source": "UHDRip",
"fansub": "千夏字幕组&喵萌奶茶屋",
"resolution": "2160p"
@ -540,12 +541,12 @@ mod tests {
r#"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"#,
r#"{
"name_en": "New Doraemon",
"name_en_no_season": "New Doraemon",
"s_name_en": "New Doraemon",
"name_zh": "哆啦A梦新番",
"name_zh_no_season": "哆啦A梦新番",
"s_name_zh": "哆啦A梦新番",
"season": 1,
"episode_index": 747,
"sub": "GB",
"sub": ["GB"],
"fansub": "梦蓝字幕组",
"resolution": "1080P"
}"#,
@ -558,12 +559,12 @@ mod tests {
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
r#"{
"name_en": "Yuru Camp",
"name_en_no_season": "Yuru Camp",
"s_name_en": "Yuru Camp",
"name_zh": "剧场版-摇曳露营",
"name_zh_no_season": "剧场版-摇曳露营",
"s_name_zh": "剧场版-摇曳露营",
"season": 1,
"episode_index": 1,
"sub": "简日双语",
"sub": ["简日双语"],
"fansub": "MCE汉化组",
"resolution": "1080P"
}"#,
@ -576,12 +577,12 @@ mod tests {
r#"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"#,
r#"{
"name_en": "NieR Automata Ver1.1a",
"name_en_no_season": "NieR Automata Ver1.1a",
"s_name_en": "NieR Automata Ver1.1a",
"name_zh": "尼尔:机械纪元",
"name_zh_no_season": "尼尔:机械纪元",
"s_name_zh": "尼尔:机械纪元",
"season": 1,
"episode_index": 2,
"sub": "简日双语",
"sub": ["简日双语"],
"fansub": "织梦字幕组",
"resolution": "1080P"
}"#,
@ -595,12 +596,12 @@ mod tests {
r#"
{
"name_en": "Delicious in Dungeon",
"name_en_no_season": "Delicious in Dungeon",
"s_name_en": "Delicious in Dungeon",
"name_zh": "迷宫饭",
"name_zh_no_season": "迷宫饭",
"s_name_zh": "迷宫饭",
"season": 1,
"episode_index": 3,
"sub": "日语中字",
"sub": ["日语中字"],
"source": "NETFLIX",
"fansub": "天月搬运组",
"resolution": "1080P"
@ -615,12 +616,12 @@ mod tests {
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
r#"{
"name_en": "Dungeon Meshi",
"name_en_no_season": "Dungeon Meshi",
"s_name_en": "Dungeon Meshi",
"name_zh": "迷宫饭",
"name_zh_no_season": "迷宫饭",
"s_name_zh": "迷宫饭",
"season": 1,
"episode_index": 1,
"sub": "简日双语",
"sub": ["简日双语"],
"fansub": "爱恋字幕社",
"resolution": "1080P"
}"#,
@ -633,12 +634,12 @@ mod tests {
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_en": "Mahou Shoujo ni Akogarete",
"name_en_no_season": "Mahou Shoujo ni Akogarete",
"s_name_en": "Mahou Shoujo ni Akogarete",
"name_zh": "梦想成为魔法少女 [年龄限制版]",
"name_zh_no_season": "梦想成为魔法少女 [年龄限制版]",
"s_name_zh": "梦想成为魔法少女 [年龄限制版]",
"season": 1,
"episode_index": 9,
"sub": "CHT",
"sub": ["CHT"],
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
@ -652,11 +653,11 @@ mod tests {
r#"[ANi] 16bit 的感动 ANOTHER LAYER - 01 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
r#"{
"name_zh": "16bit 的感动 ANOTHER LAYER",
"name_zh_no_season": "16bit 的感动 ANOTHER LAYER",
"s_name_zh": "16bit 的感动 ANOTHER LAYER",
"season": 1,
"season_raw": null,
"episode_index": 1,
"sub": "CHT",
"sub": ["CHT"],
"source": "Baha",
"fansub": "ANi",
"resolution": "1080P"
@ -670,12 +671,12 @@ mod tests {
r#"【喵萌奶茶屋】★07月新番★[银砂糖师与黑妖精 ~ Sugar Apple Fairy Tale ~][13][1080p][简日双语][招募翻译]"#,
r#"{
"name_en": "~ Sugar Apple Fairy Tale ~",
"name_en_no_season": "~ Sugar Apple Fairy Tale ~",
"s_name_en": "~ Sugar Apple Fairy Tale ~",
"name_zh": "银砂糖师与黑妖精",
"name_zh_no_season": "银砂糖师与黑妖精",
"s_name_zh": "银砂糖师与黑妖精",
"season": 1,
"episode_index": 13,
"sub": "简日双语",
"sub": ["简日双语"],
"fansub": "喵萌奶茶屋",
"resolution": "1080p"
}"#,
@ -688,12 +689,12 @@ mod tests {
r#"【极影字幕社】★4月新番 天国大魔境 Tengoku Daimakyou 第05话 GB 720P MP4字幕社招人内详"#,
r#"{
"name_en": "Tengoku Daimakyou",
"name_en_no_season": "Tengoku Daimakyou",
"s_name_en": "Tengoku Daimakyou",
"name_zh": "天国大魔境",
"name_zh_no_season": "天国大魔境",
"s_name_zh": "天国大魔境",
"season": 1,
"episode_index": 5,
"sub": "字幕社招人内详",
"sub": ["GB"],
"source": null,
"fansub": "极影字幕社",
"resolution": "720P"
@ -707,9 +708,9 @@ mod tests {
r#"[MagicStar] 假面骑士Geats / 仮面ライダーギーツ EP33 [WEBDL] [1080p] [TTFC]【生】"#,
r#"{
"name_jp": "仮面ライダーギーツ",
"name_jp_no_season": "仮面ライダーギーツ",
"s_name_jp": "仮面ライダーギーツ",
"name_zh": "假面骑士Geats",
"name_zh_no_season": "假面骑士Geats",
"s_name_zh": "假面骑士Geats",
"season": 1,
"episode_index": 33,
"source": "WEBDL",
@ -725,12 +726,12 @@ mod tests {
r#"[百冬练习组&LoliHouse] BanG Dream! 少女乐团派对☆PICO FEVER / Garupa Pico: Fever! - 26 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕][END] [101.69 MB]"#,
r#"{
"name_en": "Garupa Pico: Fever!",
"name_en_no_season": "Garupa Pico: Fever!",
"s_name_en": "Garupa Pico: Fever!",
"name_zh": "BanG Dream! 少女乐团派对☆PICO FEVER",
"name_zh_no_season": "BanG Dream! 少女乐团派对☆PICO FEVER",
"s_name_zh": "BanG Dream! 少女乐团派对☆PICO FEVER",
"season": 1,
"episode_index": 26,
"sub": "简繁内封字幕",
"sub": ["简繁内封字幕"],
"source": "WebRip",
"fansub": "百冬练习组&LoliHouse",
"resolution": "1080p"
@ -745,11 +746,11 @@ mod tests {
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
r#"{
"name_zh": "摇曳露营△剧场版",
"name_zh_no_season": "摇曳露营△剧场版",
"s_name_zh": "摇曳露营△剧场版",
"season": 1,
"season_raw": null,
"episode_index": 1,
"sub": "简繁字幕",
"sub": ["简繁字幕"],
"source": "BDrip",
"fansub": "7³ACG x 桜都字幕组",
"resolution": "1080p"
@ -760,13 +761,13 @@ mod tests {
r#"【幻樱字幕组】【4月新番】【古见同学有交流障碍症 第二季 Komi-san wa, Komyushou Desu. S02】【22】【GB_MP4】【1920X1080】"#,
r#"{
"name_en": "第二季 Komi-san wa, Komyushou Desu. S02",
"name_en_no_season": "Komi-san wa, Komyushou Desu.",
"s_name_en": "Komi-san wa, Komyushou Desu.",
"name_zh": "古见同学有交流障碍症",
"name_zh_no_season": "古见同学有交流障碍症",
"s_name_zh": "古见同学有交流障碍症",
"season": 2,
"season_raw": "第二季",
"episode_index": 22,
"sub": "GB",
"sub": ["GB"],
"fansub": "幻樱字幕组",
"resolution": "1920X1080"
}"#,

View File

@ -70,10 +70,10 @@ impl Deref for TmdbApiClient {
pub(crate) mod tests {
use std::{env, sync::Arc};
use crate::parsers::tmdb::tmdb_client::TmdbApiClient;
use crate::{parsers::tmdb::tmdb_client::TmdbApiClient, utils::test::load_test_env_panic};
pub async fn prepare_tmdb_api_client() -> Arc<TmdbApiClient> {
dotenv::from_filename("test.env").expect("failed to load test.env");
load_test_env_panic();
let tmdb_api_token = env::var("TMDB_API_TOKEN").expect("TMDB_API_TOKEN is not set");
TmdbApiClient::new(tmdb_api_token)
.await

View File

@ -86,7 +86,7 @@ pub struct TmdbTvSeriesDetailDto {
pub episode_run_time: Option<Vec<i32>>,
pub genres: Vec<TmdbGenresObjDto>,
pub first_air_date: Option<String>,
pub home_page: Option<String>,
pub homepage: Option<String>,
pub in_production: bool,
pub languages: Vec<String>,
pub last_air_date: Option<String>,

View File

@ -4,9 +4,9 @@ use url::Url;
pub fn extract_filename_from_url(url: &Url) -> Option<&str> {
url.path_segments().and_then(|s| s.last()).and_then(|last| {
if last.is_empty() {
Some(last)
} else {
None
} else {
Some(last)
}
})
}

View File

@ -28,6 +28,7 @@ pub struct DalContext {
pub config: AppDalConf,
}
#[derive(Debug, Clone)]
pub enum DalStoredUrl {
RelativePath { path: String },
Absolute { url: Url },
@ -79,7 +80,8 @@ impl DalContext {
.layer(LoggingLayer::default())
.finish();
fs_op.create_dir(dirname.as_str()).await?;
let dirpath = format!("{}/", dirname.as_str());
fs_op.create_dir(&dirpath).await?;
let fullname = {
dirname.push(basename);
@ -93,3 +95,36 @@ impl DalContext {
})
}
}
#[cfg(test)]
mod tests {
use base64::Engine;
use crate::{
config::AppDalConf, models::subscribers::ROOT_SUBSCRIBER_NAME, storage::DalContext,
};
#[tokio::test]
async fn test_dal_context() {
let dal_context = DalContext::new(AppDalConf {
fs_root: "data/dal".to_string(),
});
let a = dal_context
.store_blob(
crate::storage::DalContentType::Poster,
".jpg",
bytes::Bytes::from(
base64::engine::general_purpose::STANDARD.decode("iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8BQDwAEhQGAhKmMIQAAAABJRU5ErkJggg==").expect("should decode as vec u8")
),
ROOT_SUBSCRIBER_NAME,
)
.await
.expect("dal context should store blob");
assert!(
matches!(a, crate::storage::DalStoredUrl::RelativePath { .. }),
"dal context should store blob as relative path"
);
}
}

View File

@ -0,0 +1,8 @@
pub fn hack_env_to_fit_workspace() -> std::io::Result<()> {
if cfg!(test) || cfg!(debug_assertions) {
let package_dir = env!("CARGO_MANIFEST_DIR");
let package_dir = std::path::Path::new(package_dir);
std::env::set_current_dir(package_dir)?;
}
Ok(())
}

View File

@ -1,6 +1,6 @@
use sea_orm::{
sea_query::{Expr, InsertStatement, IntoIden, Query, SimpleExpr},
ActiveModelTrait, ActiveValue, ColumnTrait, ConnectionTrait, DynIden, EntityName, EntityTrait,
ActiveModelTrait, ActiveValue, ColumnTrait, ConnectionTrait, EntityName, EntityTrait,
FromQueryResult, Iterable, SelectModel, SelectorRaw, TryGetable,
};
@ -34,27 +34,34 @@ where
);
let ent = V::Entity::default();
let mut insert = Query::insert();
let mut insert_statement = insert
let insert_statement = insert
.into_table(ent.table_ref())
.returning(Query::returning().exprs(returning_columns));
{
extra_config(&mut insert_statement);
extra_config(insert_statement);
}
let mut columns = vec![];
for new_item in insert_values {
let mut columns = vec![];
let mut values = vec![];
for c in <V::Entity as EntityTrait>::Column::iter() {
if let ActiveValue::Set(value) = new_item.get(c.clone()) {
columns.push(c);
values.push(SimpleExpr::Value(value));
let av = new_item.get(c);
match av {
ActiveValue::Set(value) => {
values.push(c.save_as(Expr::val(value)));
columns.push(c);
}
ActiveValue::Unchanged(value) => {
values.push(c.save_as(Expr::val(value)));
columns.push(c);
}
_ => {}
}
}
insert_statement.columns(columns);
insert_statement.values(values)?;
}
insert_statement.columns(columns);
let result = SelectorRaw::<SelectModel<M>>::from_statement(db_backend.build(insert_statement))
.all(db)

View File

@ -0,0 +1,5 @@
pub mod cli;
pub mod db;
#[cfg(test)]
pub mod test;

View File

@ -0,0 +1,20 @@
use std::path::Path;
pub fn load_test_env() -> Result<(), dotenv::Error> {
let package_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
let env_files = vec![
package_dir.join("configs/test.local.env"),
package_dir.join("configs/test.env"),
];
for env_file in env_files {
if env_file.exists() {
dotenv::from_path(env_file)?;
break;
}
}
Ok(())
}
pub fn load_test_env_panic() {
load_test_env().expect("failed to load test env")
}

View File

@ -1,6 +1,6 @@
use serde::{Deserialize, Serialize};
use crate::models::entities::subscribers;
use crate::models::subscribers;
#[derive(Debug, Deserialize, Serialize)]
pub struct CurrentResponse {

View File

@ -34,7 +34,7 @@ impl worker::Worker<CollectHistoryEpisodesWorkerArgs> for CollectHistoryEpisodes
async fn perform(&self, _args: CollectHistoryEpisodesWorkerArgs) -> worker::Result<()> {
println!("================================================");
let db = &self.ctx.db;
// let db = &self.ctx.db;
println!("================================================");
Ok(())

View File

@ -0,0 +1,6 @@
---
source: crates/recorder/tests/models/subscriptions.rs
assertion_line: 55
expression: a
---
1

View File

@ -1,9 +1,13 @@
use insta::assert_debug_snapshot;
use loco_rs::testing;
// use insta::assert_debug_snapshot;
use loco_rs::{app::Hooks, testing};
use recorder::{
app::App,
models::{subscribers::ROOT_SUBSCRIBER_ID, subscriptions},
models::{
subscribers::{self},
subscriptions,
},
};
use sea_orm::{ActiveModelTrait, TryIntoModel};
use serial_test::serial;
macro_rules! configure_insta {
@ -21,22 +25,38 @@ async fn can_pull_subscription() {
configure_insta!();
let boot = testing::boot_test::<App>().await.unwrap();
App::init_logger(&boot.app_context.config, &boot.app_context.environment).unwrap();
testing::seed::<App>(&boot.app_context.db).await.unwrap();
let db = &boot.app_context.db;
let create_rss = serde_json::from_str(
r#"{
"rss_link": "https://mikanani.me/RSS/Bangumi?bangumiId=3141&subgroupid=370",
"display_name": "Mikan Project - 葬送的芙莉莲",
"rss_link": "https://mikanani.me/RSS/Bangumi?bangumiId=3271&subgroupid=370",
"display_name": "Mikan Project - 我心里危险的东西 第二季",
"aggregate": false,
"enabled": true,
"category": "mikan"
}"#,
}"#,
)
.expect("should parse create rss dto from json");
let subscription = subscriptions::ActiveModel::from_create_dto(create_rss, ROOT_SUBSCRIBER_ID)
let subscriber = subscribers::Model::find_by_pid(db, subscribers::ROOT_SUBSCRIBER_NAME)
.await
.expect("should create subscription");
.expect("should find subscriber");
let subscription = subscriptions::ActiveModel::assert_debug_snapshot!(existing_subscriber);
let subscription = subscriptions::ActiveModel::from_create_dto(create_rss, subscriber.id);
let subscription = subscription
.save(&boot.app_context.db)
.await
.expect("should save subscription")
.try_into_model()
.expect("should convert to model");
subscription
.pull_one(&boot.app_context, &subscriber)
.await
.expect("should pull subscription");
// assert_debug_snapshot!(a);
}

View File

@ -1 +0,0 @@
TMDB_API_TOKEN=your_token_here