feat: test?

This commit is contained in:
master 2024-03-29 02:01:00 +08:00
parent 035d4e20dd
commit 50243db5dc
23 changed files with 203 additions and 110 deletions

View File

@ -65,7 +65,7 @@ workers:
# - BackgroundQueue - Workers operate asynchronously in the background, processing queued. # - BackgroundQueue - Workers operate asynchronously in the background, processing queued.
# - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed. # - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed.
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities. # - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
mode: BackgroundQueue mode: BackgroundAsync
# Database Configuration # Database Configuration
database: database:
@ -88,3 +88,5 @@ database:
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode # Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false dangerously_recreate: false
tmdb:
api_token: '{{ get_env(name="TMDB_API_TOKEN") }}'

View File

@ -1,20 +1,21 @@
use std::path::Path;
use async_trait::async_trait; use async_trait::async_trait;
use loco_rs::{ use loco_rs::{
app::{AppContext, Hooks, Initializer}, app::Hooks,
boot::{create_app, BootResult, StartMode}, boot::{create_app, BootResult, StartMode},
controller::AppRoutes, controller::AppRoutes,
db::truncate_table, db::truncate_table,
environment::Environment, environment::Environment,
prelude::*,
task::Tasks, task::Tasks,
worker::{AppWorker, Processor}, worker::Processor,
Result,
}; };
use sea_orm::DatabaseConnection; use sea_orm::prelude::*;
use crate::{ use crate::{
controllers, migrations::Migrator, models::entities::subscribers, storage::AppDalInitializer, controllers,
migrations::Migrator,
models::{bangumi, downloaders, episodes, resources, subscribers, subscriptions},
storage::AppDalInitializer,
workers::subscription::SubscriptionWorker, workers::subscription::SubscriptionWorker,
}; };
@ -53,11 +54,20 @@ impl Hooks for App {
fn register_tasks(_tasks: &mut Tasks) {} fn register_tasks(_tasks: &mut Tasks) {}
async fn truncate(db: &DatabaseConnection) -> Result<()> { async fn truncate(db: &DatabaseConnection) -> Result<()> {
truncate_table(db, subscribers::Entity).await?; futures::try_join!(
subscribers::Entity::delete_many()
.filter(subscribers::Column::Id.ne(subscribers::ROOT_SUBSCRIBER_ID))
.exec(db),
truncate_table(db, subscriptions::Entity),
truncate_table(db, resources::Entity),
truncate_table(db, downloaders::Entity),
truncate_table(db, bangumi::Entity),
truncate_table(db, episodes::Entity),
)?;
Ok(()) Ok(())
} }
async fn seed(_db: &DatabaseConnection, _base: &Path) -> Result<()> { async fn seed(_db: &DatabaseConnection, _base: &std::path::Path) -> Result<()> {
Ok(()) Ok(())
} }

View File

@ -20,7 +20,7 @@ use super::{
qbitorrent::QBittorrentDownloader, qbitorrent::QBittorrentDownloader,
}; };
use crate::{ use crate::{
models::{bangumi, downloaders, downloaders::DownloaderCategory, downloads}, models::{bangumi, downloaders, downloaders::DownloaderCategory, resources},
path::torrent_path::gen_bangumi_sub_path, path::torrent_path::gen_bangumi_sub_path,
}; };
@ -296,10 +296,10 @@ pub trait TorrentDownloader {
fn get_save_path(&self, sub_path: &Path) -> PathBuf; fn get_save_path(&self, sub_path: &Path) -> PathBuf;
async fn add_downloads_for_bangumi<'a, 'b>( async fn add_resources_for_bangumi<'a, 'b>(
&self, &self,
db: &'a DatabaseConnection, db: &'a DatabaseConnection,
downloads: &[&downloads::Model], resources: &[&resources::Model],
mut bangumi: bangumi::Model, mut bangumi: bangumi::Model,
) -> eyre::Result<bangumi::Model> { ) -> eyre::Result<bangumi::Model> {
if bangumi.save_path.is_none() { if bangumi.save_path.is_none() {
@ -315,12 +315,12 @@ pub trait TorrentDownloader {
.unwrap_or_else(|| unreachable!("must have a sub path")); .unwrap_or_else(|| unreachable!("must have a sub path"));
let mut torrent_urls = vec![]; let mut torrent_urls = vec![];
for m in downloads.iter() { for m in resources.iter() {
torrent_urls.push(Url::parse(&m.url as &str)?); torrent_urls.push(Url::parse(&m.url as &str)?);
} }
// make sequence to prevent too fast to be banned // make sequence to prevent too fast to be banned
for d in downloads.iter() { for d in resources.iter() {
let source = TorrentSource::parse(&d.url).await?; let source = TorrentSource::parse(&d.url).await?;
self.add_torrents(source, sub_path.clone(), Some("bangumi")) self.add_torrents(source, sub_path.clone(), Some("bangumi"))
.await?; .await?;

View File

@ -9,6 +9,7 @@ use crate::migrations::extension::postgres::Type;
pub enum GeneralIds { pub enum GeneralIds {
CreatedAt, CreatedAt,
UpdatedAt, UpdatedAt,
Id,
} }
#[derive(DeriveIden)] #[derive(DeriveIden)]
@ -63,7 +64,7 @@ pub enum Episodes {
SNameJp, SNameJp,
SNameEn, SNameEn,
BangumiId, BangumiId,
DownloadId, ResourceId,
SavePath, SavePath,
Resolution, Resolution,
Season, Season,
@ -76,7 +77,7 @@ pub enum Episodes {
} }
#[derive(DeriveIden)] #[derive(DeriveIden)]
pub enum Downloads { pub enum Resources {
Table, Table,
Id, Id,
SubscriptionId, SubscriptionId,

View File

@ -4,7 +4,10 @@ use sea_orm_migration::{prelude::*, schema::*};
use super::defs::{ use super::defs::{
Bangumi, CustomSchemaManagerExt, Episodes, GeneralIds, Subscribers, Subscriptions, Bangumi, CustomSchemaManagerExt, Episodes, GeneralIds, Subscribers, Subscriptions,
}; };
use crate::models::{subscribers::ROOT_SUBSCRIBER, subscriptions}; use crate::models::{
subscribers::{ROOT_SUBSCRIBER_ID, ROOT_SUBSCRIBER_NAME},
subscriptions,
};
#[derive(DeriveMigrationName)] #[derive(DeriveMigrationName)]
pub struct Migration; pub struct Migration;
@ -34,8 +37,12 @@ impl MigrationTrait for Migration {
let insert = Query::insert() let insert = Query::insert()
.into_table(Subscribers::Table) .into_table(Subscribers::Table)
.columns([Subscribers::Pid, Subscribers::DisplayName]) .columns([Subscribers::Id, Subscribers::Pid, Subscribers::DisplayName])
.values_panic([ROOT_SUBSCRIBER.into(), ROOT_SUBSCRIBER.into()]) .values_panic([
ROOT_SUBSCRIBER_ID.into(),
ROOT_SUBSCRIBER_NAME.into(),
ROOT_SUBSCRIBER_NAME.into(),
])
.to_owned(); .to_owned();
manager.exec_stmt(insert).await?; manager.exec_stmt(insert).await?;
@ -147,7 +154,7 @@ impl MigrationTrait for Migration {
.col(text_null(Episodes::SNameJp)) .col(text_null(Episodes::SNameJp))
.col(text_null(Episodes::SNameEn)) .col(text_null(Episodes::SNameEn))
.col(integer(Episodes::BangumiId)) .col(integer(Episodes::BangumiId))
.col(integer(Episodes::DownloadId)) .col(integer(Episodes::ResourceId))
.col(text_null(Episodes::SavePath)) .col(text_null(Episodes::SavePath))
.col(string_null(Episodes::Resolution)) .col(string_null(Episodes::Resolution))
.col(integer(Episodes::Season)) .col(integer(Episodes::Season))

View File

@ -2,9 +2,8 @@ use loco_rs::schema::table_auto;
use sea_orm_migration::{prelude::*, schema::*}; use sea_orm_migration::{prelude::*, schema::*};
use super::defs::*; use super::defs::*;
use crate::models::prelude::{ use crate::models::resources::{
downloads::{DownloadMimeEnum, DownloadStatusEnum}, DownloadStatus, DownloadStatusEnum, ResourceMime, ResourceMimeEnum,
DownloadMime, DownloadStatus,
}; };
#[derive(DeriveMigrationName)] #[derive(DeriveMigrationName)]
@ -15,8 +14,8 @@ impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager manager
.create_postgres_enum_for_active_enum( .create_postgres_enum_for_active_enum(
DownloadMimeEnum, ResourceMimeEnum,
&[DownloadMime::OctetStream, DownloadMime::BitTorrent], &[ResourceMime::OctetStream, ResourceMime::BitTorrent],
) )
.await?; .await?;
@ -36,29 +35,29 @@ impl MigrationTrait for Migration {
manager manager
.create_table( .create_table(
table_auto(Downloads::Table) table_auto(Resources::Table)
.col(pk_auto(Downloads::Id)) .col(pk_auto(Resources::Id))
.col(text(Downloads::OriginTitle)) .col(text(Resources::OriginTitle))
.col(text(Downloads::DisplayName)) .col(text(Resources::DisplayName))
.col(integer(Downloads::SubscriptionId)) .col(integer(Resources::SubscriptionId))
.col(enumeration( .col(enumeration(
Downloads::Status, Resources::Status,
DownloadStatusEnum, DownloadStatusEnum,
DownloadMime::iden_values(), ResourceMime::iden_values(),
)) ))
.col(enumeration( .col(enumeration(
Downloads::Mime, Resources::Mime,
DownloadMimeEnum, ResourceMimeEnum,
DownloadMime::iden_values(), ResourceMime::iden_values(),
)) ))
.col(big_unsigned_null(Downloads::AllSize)) .col(big_unsigned_null(Resources::AllSize))
.col(big_unsigned_null(Downloads::CurrSize)) .col(big_unsigned_null(Resources::CurrSize))
.col(text(Downloads::Url)) .col(text(Resources::Url))
.col(text_null(Downloads::HomePage)) .col(text_null(Resources::HomePage))
.foreign_key( .foreign_key(
ForeignKey::create() ForeignKey::create()
.name("fk_download_subscription_id") .name("fk_download_subscription_id")
.from(Downloads::Table, Downloads::SubscriptionId) .from(Resources::Table, Resources::SubscriptionId)
.to(Subscriptions::Table, Subscriptions::Id) .to(Subscriptions::Table, Subscriptions::Id)
.on_update(ForeignKeyAction::Restrict) .on_update(ForeignKeyAction::Restrict)
.on_delete(ForeignKeyAction::Cascade), .on_delete(ForeignKeyAction::Cascade),
@ -66,14 +65,14 @@ impl MigrationTrait for Migration {
.index( .index(
Index::create() Index::create()
.name("idx_download_url") .name("idx_download_url")
.table(Downloads::Table) .table(Resources::Table)
.col(Downloads::Url), .col(Resources::Url),
) )
.index( .index(
Index::create() Index::create()
.name("idx_download_home_page") .name("idx_download_home_page")
.table(Downloads::Table) .table(Resources::Table)
.col(Downloads::HomePage), .col(Resources::HomePage),
) )
.to_owned(), .to_owned(),
) )
@ -87,14 +86,14 @@ impl MigrationTrait for Migration {
.alter_table( .alter_table(
Table::alter() Table::alter()
.table(Episodes::Table) .table(Episodes::Table)
.add_column_if_not_exists(integer_null(Episodes::DownloadId)) .add_column_if_not_exists(integer_null(Episodes::ResourceId))
.add_foreign_key( .add_foreign_key(
TableForeignKey::new() TableForeignKey::new()
.name("fk_episode_download_id") .name("fk_episode_resource_id")
.from_tbl(Episodes::Table) .from_tbl(Episodes::Table)
.from_col(Episodes::DownloadId) .from_col(Episodes::ResourceId)
.to_tbl(Downloads::Table) .to_tbl(Resources::Table)
.to_col(Downloads::Id) .to_col(Resources::Id)
.on_update(ForeignKeyAction::Restrict) .on_update(ForeignKeyAction::Restrict)
.on_delete(ForeignKeyAction::SetNull), .on_delete(ForeignKeyAction::SetNull),
) )
@ -110,8 +109,8 @@ impl MigrationTrait for Migration {
.alter_table( .alter_table(
Table::alter() Table::alter()
.table(Episodes::Table) .table(Episodes::Table)
.drop_foreign_key(Alias::new("fk_episode_download_id")) .drop_foreign_key(Alias::new("fk_episode_resource_id"))
.drop_column(Episodes::DownloadId) .drop_column(Episodes::ResourceId)
.to_owned(), .to_owned(),
) )
.await?; .await?;
@ -121,11 +120,11 @@ impl MigrationTrait for Migration {
.await?; .await?;
manager manager
.drop_table(Table::drop().table(Downloads::Table).to_owned()) .drop_table(Table::drop().table(Resources::Table).to_owned())
.await?; .await?;
manager manager
.drop_postgres_enum_for_active_enum(DownloadMimeEnum) .drop_postgres_enum_for_active_enum(ResourceMimeEnum)
.await?; .await?;
manager manager
.drop_postgres_enum_for_active_enum(DownloadStatusEnum) .drop_postgres_enum_for_active_enum(DownloadStatusEnum)

View File

@ -2,7 +2,7 @@ pub use sea_orm_migration::prelude::*;
pub mod defs; pub mod defs;
pub mod m20220101_000001_init; pub mod m20220101_000001_init;
pub mod m20240224_082543_add_downloads; pub mod m20240224_082543_add_resources;
pub mod m20240225_060853_subscriber_add_downloader; pub mod m20240225_060853_subscriber_add_downloader;
pub struct Migrator; pub struct Migrator;
@ -12,7 +12,7 @@ impl MigratorTrait for Migrator {
fn migrations() -> Vec<Box<dyn MigrationTrait>> { fn migrations() -> Vec<Box<dyn MigrationTrait>> {
vec![ vec![
Box::new(m20220101_000001_init::Migration), Box::new(m20220101_000001_init::Migration),
Box::new(m20240224_082543_add_downloads::Migration), Box::new(m20240224_082543_add_resources::Migration),
Box::new(m20240225_060853_subscriber_add_downloader::Migration), Box::new(m20240225_060853_subscriber_add_downloader::Migration),
] ]
} }

View File

@ -1,9 +1,11 @@
use sea_orm::{ use sea_orm::{
sea_query::{Expr, InsertStatement, Query, SimpleExpr}, sea_query::{Expr, InsertStatement, IntoIden, Query, SimpleExpr},
ActiveModelTrait, ActiveValue, ColumnTrait, ConnectionTrait, EntityName, EntityTrait, ActiveModelTrait, ActiveValue, ColumnTrait, ConnectionTrait, DynIden, EntityName, EntityTrait,
FromQueryResult, Iterable, SelectModel, SelectorRaw, TryGetable, FromQueryResult, Iterable, SelectModel, SelectorRaw, TryGetable,
}; };
use crate::migrations::{defs::GeneralIds, ColumnRef};
#[derive(FromQueryResult)] #[derive(FromQueryResult)]
pub(crate) struct OnlyIdsModel<Id> pub(crate) struct OnlyIdsModel<Id>
where where
@ -81,3 +83,25 @@ where
Ok(result) Ok(result)
} }
pub(crate) async fn insert_many_with_returning_id<D, V, F, I>(
db: &D,
insert_values: impl IntoIterator<Item = V>,
extra_config: F,
) -> eyre::Result<Vec<OnlyIdsModel<I>>>
where
D: ConnectionTrait,
V: ActiveModelTrait,
F: FnOnce(&mut InsertStatement),
I: TryGetable,
{
let result: Vec<OnlyIdsModel<I>> = insert_many_with_returning_columns(
db,
insert_values,
[Expr::col(ColumnRef::Column(GeneralIds::Id.into_iden()))],
extra_config,
)
.await?;
Ok(result)
}

View File

@ -20,7 +20,7 @@ pub struct Model {
pub s_name_jp: Option<String>, pub s_name_jp: Option<String>,
pub s_name_en: Option<String>, pub s_name_en: Option<String>,
pub bangumi_id: i32, pub bangumi_id: i32,
pub download_id: Option<i32>, pub resource_id: Option<i32>,
pub save_path: Option<String>, pub save_path: Option<String>,
pub resolution: Option<String>, pub resolution: Option<String>,
pub season: u32, pub season: u32,
@ -41,11 +41,11 @@ pub enum Relation {
)] )]
Bangumi, Bangumi,
#[sea_orm( #[sea_orm(
belongs_to = "super::downloads::Entity", belongs_to = "super::resources::Entity",
from = "Column::DownloadId", from = "Column::ResourceId",
to = "super::downloads::Column::Id" to = "super::resources::Column::Id"
)] )]
Downloads, Resources,
} }
impl Related<super::bangumi::Entity> for Entity { impl Related<super::bangumi::Entity> for Entity {
@ -54,8 +54,8 @@ impl Related<super::bangumi::Entity> for Entity {
} }
} }
impl Related<super::downloads::Entity> for Entity { impl Related<super::resources::Entity> for Entity {
fn to() -> RelationDef { fn to() -> RelationDef {
Relation::Downloads.def() Relation::Resources.def()
} }
} }

View File

@ -1,7 +1,7 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.4 //! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.4
pub mod bangumi; pub mod bangumi;
pub mod downloads; pub mod downloaders;
pub mod episodes; pub mod episodes;
pub mod resources;
pub mod subscribers; pub mod subscribers;
pub mod subscriptions; pub mod subscriptions;
pub mod downloaders;

View File

@ -24,8 +24,8 @@ pub enum DownloadStatus {
#[derive( #[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize, Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)] )]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_mime")] #[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "resource_mime")]
pub enum DownloadMime { pub enum ResourceMime {
#[sea_orm(string_value = "application/octet-stream")] #[sea_orm(string_value = "application/octet-stream")]
#[serde(rename = "application/octet-stream")] #[serde(rename = "application/octet-stream")]
OctetStream, OctetStream,
@ -35,7 +35,7 @@ pub enum DownloadMime {
} }
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "downloads")] #[sea_orm(table_name = "resources")]
pub struct Model { pub struct Model {
pub created_at: DateTime, pub created_at: DateTime,
pub updated_at: DateTime, pub updated_at: DateTime,
@ -45,7 +45,7 @@ pub struct Model {
pub display_name: String, pub display_name: String,
pub subscription_id: i32, pub subscription_id: i32,
pub status: DownloadStatus, pub status: DownloadStatus,
pub mime: DownloadMime, pub mime: ResourceMime,
pub url: String, pub url: String,
pub all_size: Option<u64>, pub all_size: Option<u64>,
pub curr_size: Option<u64>, pub curr_size: Option<u64>,

View File

@ -2,7 +2,7 @@ use sea_orm::{entity::prelude::*, ActiveValue};
pub use super::entities::episodes::*; pub use super::entities::episodes::*;
use crate::{ use crate::{
models::downloads, models::resources,
parsers::{mikan::MikanEpisodeMeta, raw::RawEpisodeMeta}, parsers::{mikan::MikanEpisodeMeta, raw::RawEpisodeMeta},
}; };
@ -12,13 +12,13 @@ impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel { impl ActiveModel {
pub fn from_mikan_meta( pub fn from_mikan_meta(
bangumi_id: i32, bangumi_id: i32,
dl: downloads::Model, resource: resources::Model,
raw_meta: RawEpisodeMeta, raw_meta: RawEpisodeMeta,
mikan_meta: MikanEpisodeMeta, mikan_meta: MikanEpisodeMeta,
mikan_poster: Option<String>, mikan_poster: Option<String>,
) -> Self { ) -> Self {
Self { Self {
origin_title: ActiveValue::Set(dl.origin_title), origin_title: ActiveValue::Set(resource.origin_title),
official_title: ActiveValue::Set(mikan_meta.official_title.clone()), official_title: ActiveValue::Set(mikan_meta.official_title.clone()),
display_name: ActiveValue::Set(mikan_meta.official_title), display_name: ActiveValue::Set(mikan_meta.official_title),
name_zh: ActiveValue::Set(raw_meta.name_zh), name_zh: ActiveValue::Set(raw_meta.name_zh),
@ -28,13 +28,13 @@ impl ActiveModel {
s_name_jp: ActiveValue::Set(raw_meta.s_name_jp), s_name_jp: ActiveValue::Set(raw_meta.s_name_jp),
s_name_en: ActiveValue::Set(raw_meta.s_name_en), s_name_en: ActiveValue::Set(raw_meta.s_name_en),
bangumi_id: ActiveValue::Set(bangumi_id), bangumi_id: ActiveValue::Set(bangumi_id),
download_id: ActiveValue::Set(Some(dl.id)), resource_id: ActiveValue::Set(Some(resource.id)),
resolution: ActiveValue::Set(raw_meta.resolution), resolution: ActiveValue::Set(raw_meta.resolution),
season: ActiveValue::Set(raw_meta.season), season: ActiveValue::Set(raw_meta.season),
season_raw: ActiveValue::Set(raw_meta.season_raw), season_raw: ActiveValue::Set(raw_meta.season_raw),
fansub: ActiveValue::Set(raw_meta.fansub), fansub: ActiveValue::Set(raw_meta.fansub),
poster_link: ActiveValue::Set(mikan_poster), poster_link: ActiveValue::Set(mikan_poster),
home_page: ActiveValue::Set(dl.homepage), home_page: ActiveValue::Set(resource.homepage),
subtitle: ActiveValue::Set(raw_meta.sub), subtitle: ActiveValue::Set(raw_meta.sub),
source: ActiveValue::Set(raw_meta.source), source: ActiveValue::Set(raw_meta.source),
..Default::default() ..Default::default()

View File

@ -1,10 +1,10 @@
pub mod bangumi; pub mod bangumi;
pub(crate) mod db_utils; pub(crate) mod db_utils;
pub mod downloaders; pub mod downloaders;
pub mod downloads;
pub mod entities; pub mod entities;
pub mod episodes; pub mod episodes;
pub mod notifications; pub mod notifications;
pub mod prelude; pub mod prelude;
pub mod resources;
pub mod subscribers; pub mod subscribers;
pub mod subscriptions; pub mod subscriptions;

View File

@ -1,8 +1,8 @@
pub use super::{ pub use super::{
bangumi::{self, Entity as Bangumi}, bangumi::{self, Entity as Bangumi},
downloaders::{self, DownloaderCategory, Entity as Downloader}, downloaders::{self, DownloaderCategory, Entity as Downloader},
downloads::{self, DownloadMime, DownloadStatus, Entity as Download},
episodes::{self, Entity as Episode}, episodes::{self, Entity as Episode},
resources::{self, DownloadStatus, Entity as Download, ResourceMime},
subscribers::{self, Entity as Subscriber}, subscribers::{self, Entity as Subscriber},
subscriptions::{self, Entity as Subscription, SubscriptionCategory}, subscriptions::{self, Entity as Subscription, SubscriptionCategory},
}; };

View File

@ -1,6 +1,6 @@
use sea_orm::{prelude::*, ActiveValue}; use sea_orm::{prelude::*, ActiveValue};
pub use crate::models::entities::downloads::*; pub use crate::models::entities::resources::*;
use crate::parsers::mikan::MikanRssItem; use crate::parsers::mikan::MikanRssItem;
#[async_trait::async_trait] #[async_trait::async_trait]

View File

@ -4,7 +4,8 @@ use serde::{Deserialize, Serialize};
pub use super::entities::subscribers::*; pub use super::entities::subscribers::*;
pub const ROOT_SUBSCRIBER: &str = "konobangu"; pub const ROOT_SUBSCRIBER_ID: i32 = 1;
pub const ROOT_SUBSCRIBER_NAME: &str = "konobangu";
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
pub struct SubscriberIdParams { pub struct SubscriberIdParams {
@ -43,7 +44,7 @@ impl Model {
} }
pub async fn find_root(db: &DatabaseConnection) -> ModelResult<Self> { pub async fn find_root(db: &DatabaseConnection) -> ModelResult<Self> {
Self::find_by_pid(db, ROOT_SUBSCRIBER).await Self::find_by_pid(db, ROOT_SUBSCRIBER_NAME).await
} }
/// Asynchronously creates a user with a password and saves it to the /// Asynchronously creates a user with a password and saves it to the
@ -56,8 +57,8 @@ impl Model {
let txn = db.begin().await?; let txn = db.begin().await?;
let user = ActiveModel { let user = ActiveModel {
display_name: ActiveValue::set(ROOT_SUBSCRIBER.to_string()), display_name: ActiveValue::set(ROOT_SUBSCRIBER_NAME.to_string()),
pid: ActiveValue::set(ROOT_SUBSCRIBER.to_string()), pid: ActiveValue::set(ROOT_SUBSCRIBER_NAME.to_string()),
..Default::default() ..Default::default()
} }
.insert(&txn) .insert(&txn)

View File

@ -12,7 +12,7 @@ use tracing::{event, instrument, Level};
pub use super::entities::subscriptions::{self, *}; pub use super::entities::subscriptions::{self, *};
use crate::{ use crate::{
models::{bangumi, db_utils::insert_many_with_returning_all, downloads, episodes}, models::{bangumi, db_utils::insert_many_with_returning_all, episodes, resources},
parsers::{ parsers::{
mikan::{ mikan::{
parse_episode_meta_from_mikan_homepage, parse_mikan_rss_items_from_rss_link, parse_episode_meta_from_mikan_homepage, parse_mikan_rss_items_from_rss_link,
@ -33,7 +33,7 @@ pub struct SubscriptionCreateFromRssDto {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(tag = "category")] #[serde(tag = "category", rename_all = "snake_case")]
pub enum SubscriptionCreateDto { pub enum SubscriptionCreateDto {
Mikan(SubscriptionCreateFromRssDto), Mikan(SubscriptionCreateFromRssDto),
} }
@ -106,7 +106,7 @@ impl Model {
fields(subscriber_id = "self.subscriber_id", subscription_id = "self.id"), fields(subscriber_id = "self.subscriber_id", subscription_id = "self.id"),
skip(self, db, ctx) skip(self, db, ctx)
)] )]
pub async fn pull_item(&self, db: &DatabaseConnection, ctx: &AppContext) -> eyre::Result<()> { pub async fn pull_one(&self, db: &DatabaseConnection, ctx: &AppContext) -> eyre::Result<()> {
let subscription = self; let subscription = self;
let subscription_id = subscription.id; let subscription_id = subscription.id;
match &subscription.category { match &subscription.category {
@ -122,20 +122,20 @@ impl Model {
return Ok(()); return Ok(());
} }
let new_downloads = all_items let new_resources = all_items
.into_iter() .into_iter()
.map(|rss_item| { .map(|rss_item| {
downloads::ActiveModel::from_mikan_rss_item(rss_item, subscription.id) resources::ActiveModel::from_mikan_rss_item(rss_item, subscription.id)
}) })
.collect_vec(); .collect_vec();
// insert and filter out duplicated items // insert and filter out duplicated items
let new_downloads: Vec<downloads::Model> = insert_many_with_returning_all( let new_resources: Vec<resources::Model> = insert_many_with_returning_all(
db, db,
new_downloads, new_resources,
|stat: &mut InsertStatement| { |stat: &mut InsertStatement| {
stat.on_conflict( stat.on_conflict(
OnConflict::column(downloads::Column::Url) OnConflict::column(resources::Column::Url)
.do_nothing() .do_nothing()
.to_owned(), .to_owned(),
); );
@ -144,7 +144,7 @@ impl Model {
.await?; .await?;
pub struct MikanEpMetaBundle { pub struct MikanEpMetaBundle {
pub download: downloads::Model, pub resource: resources::Model,
pub mikan: MikanEpisodeMeta, pub mikan: MikanEpisodeMeta,
pub raw: RawEpisodeMeta, pub raw: RawEpisodeMeta,
pub poster: Option<String>, pub poster: Option<String>,
@ -154,8 +154,8 @@ impl Model {
HashMap::new(); HashMap::new();
let dal = ctx.get_dal_unwrap().await; let dal = ctx.get_dal_unwrap().await;
{ {
for dl in new_downloads { for r in new_resources {
let mut mikan_meta = if let Some(homepage) = dl.homepage.as_deref() { let mut mikan_meta = if let Some(homepage) = r.homepage.as_deref() {
match parse_episode_meta_from_mikan_homepage(&mikan_client, homepage) match parse_episode_meta_from_mikan_homepage(&mikan_client, homepage)
.await .await
{ {
@ -208,14 +208,14 @@ impl Model {
} else { } else {
None None
}; };
let raw_meta = match parse_episode_meta_from_raw_name(&dl.origin_title) { let raw_meta = match parse_episode_meta_from_raw_name(&r.origin_title) {
Ok(raw_meta) => raw_meta, Ok(raw_meta) => raw_meta,
Err(e) => { Err(e) => {
let error: &dyn std::error::Error = e.as_ref(); let error: &dyn std::error::Error = e.as_ref();
event!( event!(
Level::ERROR, Level::ERROR,
desc = "failed to parse episode meta from origin name", desc = "failed to parse episode meta from origin name",
origin_name = &dl.origin_title, origin_name = &r.origin_title,
error = error error = error
); );
continue; continue;
@ -227,7 +227,7 @@ impl Model {
fansub: raw_meta.fansub.clone(), fansub: raw_meta.fansub.clone(),
}; };
let meta = MikanEpMetaBundle { let meta = MikanEpMetaBundle {
download: dl, resource: r,
mikan: mikan_meta, mikan: mikan_meta,
raw: raw_meta, raw: raw_meta,
poster: mikan_poster_link, poster: mikan_poster_link,
@ -239,7 +239,7 @@ impl Model {
for (_, eps) in ep_metas { for (_, eps) in ep_metas {
let meta = eps.first().unwrap_or_else(|| { let meta = eps.first().unwrap_or_else(|| {
unreachable!( unreachable!(
"subscriptions pull items bangumi must have at least one episode meta" "subscriptions pull one bangumi must have at least one episode meta"
) )
}); });
let last_ep = eps.iter().fold(0, |acc, ep| acc.max(ep.raw.episode_index)); let last_ep = eps.iter().fold(0, |acc, ep| acc.max(ep.raw.episode_index));
@ -271,7 +271,7 @@ impl Model {
let eps = eps.into_iter().map(|ep| { let eps = eps.into_iter().map(|ep| {
episodes::ActiveModel::from_mikan_meta( episodes::ActiveModel::from_mikan_meta(
bgm.id, bgm.id,
ep.download, ep.resource,
ep.raw, ep.raw,
ep.mikan, ep.mikan,
ep.poster, ep.poster,

View File

@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize};
use crate::{ use crate::{
downloaders::defs::BITTORRENT_MIME_TYPE, downloaders::defs::BITTORRENT_MIME_TYPE,
models::prelude::DownloadMime, models::prelude::ResourceMime,
parsers::{errors::ParseError, mikan::mikan_client::MikanClient}, parsers::{errors::ParseError, mikan::mikan_client::MikanClient},
}; };
@ -19,8 +19,8 @@ pub struct MikanRssItem {
} }
impl MikanRssItem { impl MikanRssItem {
pub fn get_download_mime(&self) -> DownloadMime { pub fn get_download_mime(&self) -> ResourceMime {
DownloadMime::BitTorrent ResourceMime::BitTorrent
} }
} }

View File

@ -1 +1,2 @@
mod subscribers; mod subscribers;
mod subscriptions;

View File

@ -1,7 +0,0 @@
---
source: tests/models/subscribers.rs
expression: non_existing_subscriber_results
---
Err(
EntityNotFound,
)

View File

@ -0,0 +1,13 @@
---
source: tests/models/subscription.rs
expression: existing_subscription
---
Ok(
Model {
created_at: 2023-11-12T12:34:56.789,
updated_at: 2023-11-12T12:34:56.789,
id: 1,
pid: "11111111-1111-1111-1111-111111111111",
display_name: "user1"
},
)

View File

@ -7,7 +7,7 @@ macro_rules! configure_insta {
($($expr:expr),*) => { ($($expr:expr),*) => {
let mut settings = insta::Settings::clone_current(); let mut settings = insta::Settings::clone_current();
settings.set_prepend_module_to_snapshot(false); settings.set_prepend_module_to_snapshot(false);
settings.set_snapshot_suffix("users"); settings.set_snapshot_suffix("subscribers");
let _guard = settings.bind_to_scope(); let _guard = settings.bind_to_scope();
}; };
} }

View File

@ -0,0 +1,42 @@
use insta::assert_debug_snapshot;
use loco_rs::testing;
use recorder::{
app::App,
models::{subscribers::ROOT_SUBSCRIBER_ID, subscriptions},
};
use serial_test::serial;
macro_rules! configure_insta {
($($expr:expr),*) => {
let mut settings = insta::Settings::clone_current();
settings.set_prepend_module_to_snapshot(false);
settings.set_snapshot_suffix("subscriptions");
let _guard = settings.bind_to_scope();
};
}
#[tokio::test]
#[serial]
async fn can_pull_subscription() {
configure_insta!();
let boot = testing::boot_test::<App>().await.unwrap();
testing::seed::<App>(&boot.app_context.db).await.unwrap();
let create_rss = serde_json::from_str(
r#"{
"rss_link": "https://mikanani.me/RSS/Bangumi?bangumiId=3141&subgroupid=370",
"display_name": "Mikan Project - 葬送的芙莉莲",
"aggregate": false,
"enabled": true,
"category": "mikan"
}"#,
)
.expect("should parse create rss dto from json");
let subscription = subscriptions::ActiveModel::from_create_dto(create_rss, ROOT_SUBSCRIBER_ID)
.await
.expect("should create subscription");
let subscription = subscriptions::ActiveModel::assert_debug_snapshot!(existing_subscriber);
}