feat: add basic webui
This commit is contained in:
8
apps/recorder/.devcontainer/Dockerfile
Normal file
8
apps/recorder/.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,8 @@
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/rust:0-1
|
||||
|
||||
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
&& apt-get -y install --no-install-recommends postgresql-client \
|
||||
&& cargo install sea-orm-cli cargo-insta \
|
||||
&& chown -R vscode /usr/local/cargo
|
||||
|
||||
COPY .env /.env
|
||||
9
apps/recorder/.devcontainer/devcontainer.json
Normal file
9
apps/recorder/.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"name": "Konobangu Recorder",
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
"service": "app",
|
||||
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
|
||||
"forwardPorts": [
|
||||
3001
|
||||
]
|
||||
}
|
||||
40
apps/recorder/.devcontainer/docker-compose.yml
Normal file
40
apps/recorder/.devcontainer/docker-compose.yml
Normal file
@@ -0,0 +1,40 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
command: sleep infinity
|
||||
networks:
|
||||
- db
|
||||
- redis
|
||||
volumes:
|
||||
- ../..:/workspaces:cached
|
||||
env_file:
|
||||
- .env
|
||||
db:
|
||||
image: postgres:15.3-alpine
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 5432:5432
|
||||
networks:
|
||||
- db
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
env_file:
|
||||
- .env
|
||||
redis:
|
||||
image: redis:latest
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 6379:6379
|
||||
networks:
|
||||
- redis
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
|
||||
networks:
|
||||
db:
|
||||
redis:
|
||||
107
apps/recorder/.github/workflows/ci.yaml
vendored
Normal file
107
apps/recorder/.github/workflows/ci.yaml
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
name: CI
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
RUST_TOOLCHAIN: stable
|
||||
TOOLCHAIN_PROFILE: minimal
|
||||
|
||||
jobs:
|
||||
rustfmt:
|
||||
name: Check Style
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
||||
override: true
|
||||
components: rustfmt
|
||||
- name: Run cargo fmt
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
clippy:
|
||||
name: Run Clippy
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
||||
override: true
|
||||
- name: Setup Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-features -- -D warnings -W clippy::pedantic -W clippy::nursery -W rust-2018-idioms
|
||||
|
||||
test:
|
||||
name: Run Tests
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: redis
|
||||
options: >-
|
||||
--health-cmd "redis-cli ping"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- "6379:6379"
|
||||
postgres:
|
||||
image: postgres
|
||||
env:
|
||||
POSTGRES_DB: postgress_test
|
||||
POSTGRES_USER: postgress
|
||||
POSTGRES_PASSWORD: postgress
|
||||
ports:
|
||||
- "5432:5432"
|
||||
# Set health checks to wait until postgres has started
|
||||
options: --health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
||||
override: true
|
||||
- name: Setup Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --all-features --all
|
||||
env:
|
||||
REDIS_URL: redis://localhost:${{job.services.redis.ports[6379]}}
|
||||
DATABASE_URL: postgres://postgress:postgress@localhost:5432/postgress_test
|
||||
17
apps/recorder/.gitignore
vendored
Normal file
17
apps/recorder/.gitignore
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
**/config/local.yaml
|
||||
**/config/*.local.yaml
|
||||
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
debug/
|
||||
target/
|
||||
|
||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||
Cargo.lock
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||
*.pdb
|
||||
64
apps/recorder/Cargo.toml
Normal file
64
apps/recorder/Cargo.toml
Normal file
@@ -0,0 +1,64 @@
|
||||
[package]
|
||||
name = "recorder"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[lib]
|
||||
name = "recorder"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "recorder_cli"
|
||||
path = "src/bin/main.rs"
|
||||
required-features = []
|
||||
|
||||
[dependencies]
|
||||
quirks_path = { path = "../../packages/quirks-path" }
|
||||
torrent = { path = "../../packages/torrent" }
|
||||
loco-rs = { version = "0.13" }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
eyre = "0.6"
|
||||
tokio = { version = "1.42", default-features = false }
|
||||
async-trait = "0.1.83"
|
||||
tracing = "0.1"
|
||||
chrono = "0.4"
|
||||
sea-orm = { version = "1", features = [
|
||||
"sqlx-sqlite",
|
||||
"sqlx-postgres",
|
||||
"runtime-tokio-rustls",
|
||||
"macros",
|
||||
"debug-print"
|
||||
] }
|
||||
|
||||
axum = "0.7.9"
|
||||
uuid = { version = "1.6.0", features = ["v4"] }
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
|
||||
sea-orm-migration = { version = "1", features = ["runtime-tokio-rustls"] }
|
||||
reqwest = "0.12.9"
|
||||
thiserror = "2"
|
||||
rss = "2"
|
||||
bytes = "1.9"
|
||||
itertools = "0.13.0"
|
||||
url = "2.5"
|
||||
fancy-regex = "0.14"
|
||||
regex = "1.11"
|
||||
lazy_static = "1.5"
|
||||
maplit = "1.0.2"
|
||||
lightningcss = "1.0.0-alpha.61"
|
||||
html-escape = "0.2.13"
|
||||
opendal = { version = "0.51.0", features = ["default", "services-fs"] }
|
||||
zune-image = "0.4.15"
|
||||
once_cell = "1.20.2"
|
||||
reqwest-middleware = "0.4.0"
|
||||
reqwest-retry = "0.7.0"
|
||||
reqwest-tracing = "0.5.5"
|
||||
scraper = "0.22.0"
|
||||
leaky-bucket = "1.1.2"
|
||||
serde_with = "3"
|
||||
|
||||
[dev-dependencies]
|
||||
serial_test = "3"
|
||||
loco-rs = { version = "0.13", features = ["testing"] }
|
||||
insta = { version = "1", features = ["redactions", "yaml", "filters"] }
|
||||
79
apps/recorder/examples/playground.rs
Normal file
79
apps/recorder/examples/playground.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
#![allow(unused_imports)]
|
||||
use eyre::Context;
|
||||
use itertools::Itertools;
|
||||
use loco_rs::{
|
||||
app::Hooks,
|
||||
boot::{BootResult, StartMode},
|
||||
environment::Environment,
|
||||
prelude::*,
|
||||
};
|
||||
use recorder::{
|
||||
app::App,
|
||||
extract::mikan::parse_mikan_rss_items_from_rss_link,
|
||||
migrations::Migrator,
|
||||
models::{
|
||||
subscribers::ROOT_SUBSCRIBER,
|
||||
subscriptions::{self, SubscriptionCreateFromRssDto},
|
||||
},
|
||||
};
|
||||
use sea_orm_migration::MigratorTrait;
|
||||
|
||||
async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> eyre::Result<()> {
|
||||
let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
|
||||
|
||||
// let rss_link =
|
||||
// "https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
|
||||
let subscription = if let Some(subscription) = subscriptions::Entity::find()
|
||||
.filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
|
||||
.one(&ctx.db)
|
||||
.await?
|
||||
{
|
||||
subscription
|
||||
} else {
|
||||
subscriptions::Model::add_subscription(
|
||||
ctx,
|
||||
subscriptions::SubscriptionCreateDto::Mikan(SubscriptionCreateFromRssDto {
|
||||
rss_link: rss_link.to_string(),
|
||||
display_name: String::from("Mikan Project - 我的番组"),
|
||||
enabled: Some(true),
|
||||
}),
|
||||
1,
|
||||
)
|
||||
.await?
|
||||
};
|
||||
|
||||
subscription.pull_subscription(ctx).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn init() -> eyre::Result<AppContext> {
|
||||
tracing_subscriber::fmt()
|
||||
.with_max_level(tracing::Level::INFO)
|
||||
.with_test_writer()
|
||||
.init();
|
||||
let ctx = loco_rs::cli::playground::<App>().await?;
|
||||
let BootResult {
|
||||
app_context: ctx, ..
|
||||
} = loco_rs::boot::run_app::<App>(&StartMode::ServerOnly, ctx).await?;
|
||||
Migrator::up(&ctx.db, None).await?;
|
||||
Ok(ctx)
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> eyre::Result<()> {
|
||||
let ctx = init().await?;
|
||||
pull_mikan_bangumi_rss(&ctx).await?;
|
||||
|
||||
// let active_model: articles::ActiveModel = ActiveModel {
|
||||
// title: Set(Some("how to build apps in 3 steps".to_string())),
|
||||
// content: Set(Some("use Loco: https://loco.rs".to_string())),
|
||||
// ..Default::default()
|
||||
// };
|
||||
// active_model.insert(&ctx.db).await.unwrap();
|
||||
|
||||
// let res = articles::Entity::find().all(&ctx.db).await.unwrap();
|
||||
// println!("{:?}", res);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
97
apps/recorder/src/app.rs
Normal file
97
apps/recorder/src/app.rs
Normal file
@@ -0,0 +1,97 @@
|
||||
use std::path::Path;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use loco_rs::{
|
||||
app::{AppContext, Hooks},
|
||||
boot::{create_app, BootResult, StartMode},
|
||||
cache,
|
||||
controller::AppRoutes,
|
||||
db::truncate_table,
|
||||
environment::Environment,
|
||||
prelude::*,
|
||||
task::Tasks,
|
||||
Result,
|
||||
};
|
||||
use sea_orm::DatabaseConnection;
|
||||
|
||||
use crate::{
|
||||
controllers,
|
||||
dal::{AppDalClient, AppDalInitalizer},
|
||||
extract::mikan::{client::AppMikanClientInitializer, AppMikanClient},
|
||||
migrations::Migrator,
|
||||
models::entities::subscribers,
|
||||
workers::subscription_worker::SubscriptionWorker,
|
||||
};
|
||||
|
||||
pub trait AppContextExt {
|
||||
fn get_dal_client(&self) -> &AppDalClient {
|
||||
AppDalClient::global()
|
||||
}
|
||||
|
||||
fn get_mikan_client(&self) -> &AppMikanClient {
|
||||
AppMikanClient::global()
|
||||
}
|
||||
}
|
||||
|
||||
impl AppContextExt for AppContext {}
|
||||
|
||||
pub struct App;
|
||||
|
||||
#[async_trait]
|
||||
impl Hooks for App {
|
||||
fn app_name() -> &'static str {
|
||||
env!("CARGO_CRATE_NAME")
|
||||
}
|
||||
|
||||
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
|
||||
let initializers: Vec<Box<dyn Initializer>> = vec![
|
||||
Box::new(AppDalInitalizer),
|
||||
Box::new(AppMikanClientInitializer),
|
||||
];
|
||||
|
||||
Ok(initializers)
|
||||
}
|
||||
|
||||
fn app_version() -> String {
|
||||
format!(
|
||||
"{} ({})",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
option_env!("BUILD_SHA")
|
||||
.or(option_env!("GITHUB_SHA"))
|
||||
.unwrap_or("dev")
|
||||
)
|
||||
}
|
||||
|
||||
async fn boot(mode: StartMode, environment: &Environment) -> Result<BootResult> {
|
||||
create_app::<Self, Migrator>(mode, environment).await
|
||||
}
|
||||
|
||||
fn routes(_ctx: &AppContext) -> AppRoutes {
|
||||
AppRoutes::with_default_routes()
|
||||
.prefix("/api")
|
||||
.add_route(controllers::subscribers::routes())
|
||||
}
|
||||
|
||||
async fn connect_workers(ctx: &AppContext, queue: &Queue) -> Result<()> {
|
||||
queue.register(SubscriptionWorker::build(ctx)).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn after_context(ctx: AppContext) -> Result<AppContext> {
|
||||
Ok(AppContext {
|
||||
cache: cache::Cache::new(cache::drivers::inmem::new()).into(),
|
||||
..ctx
|
||||
})
|
||||
}
|
||||
|
||||
fn register_tasks(_tasks: &mut Tasks) {}
|
||||
|
||||
async fn truncate(db: &DatabaseConnection) -> Result<()> {
|
||||
truncate_table(db, subscribers::Entity).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn seed(_db: &DatabaseConnection, _base: &Path) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
8
apps/recorder/src/bin/main.rs
Normal file
8
apps/recorder/src/bin/main.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use loco_rs::cli;
|
||||
use recorder::{app::App, migrations::Migrator};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> eyre::Result<()> {
|
||||
cli::main::<App, Migrator>().await?;
|
||||
Ok(())
|
||||
}
|
||||
53
apps/recorder/src/config/mod.rs
Normal file
53
apps/recorder/src/config/mod.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
use crate::{
|
||||
dal::{config::AppDalConfig, DAL_CONF_KEY},
|
||||
extract::mikan::{AppMikanConfig, MIKAN_CONF_KEY},
|
||||
};
|
||||
|
||||
pub fn deserialize_key_path_from_json_value<T: DeserializeOwned>(
|
||||
value: &serde_json::Value,
|
||||
key_path: &[&str],
|
||||
) -> Result<Option<T>, loco_rs::Error> {
|
||||
let mut stack = vec![("", value)];
|
||||
for key in key_path {
|
||||
let current = stack.last().unwrap().1;
|
||||
if let Some(v) = current.get(key) {
|
||||
stack.push((key, v));
|
||||
} else {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
let result: T = serde_json::from_value(stack.pop().unwrap().1.clone())?;
|
||||
Ok(Some(result))
|
||||
}
|
||||
|
||||
pub fn deserialize_key_path_from_app_config<T: DeserializeOwned>(
|
||||
app_config: &loco_rs::config::Config,
|
||||
key_path: &[&str],
|
||||
) -> Result<Option<T>, loco_rs::Error> {
|
||||
let settings = app_config.settings.as_ref();
|
||||
if let Some(settings) = settings {
|
||||
deserialize_key_path_from_json_value(settings, key_path)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AppConfigExt {
|
||||
fn get_root_conf(&self) -> &loco_rs::config::Config;
|
||||
|
||||
fn get_dal_conf(&self) -> loco_rs::Result<Option<AppDalConfig>> {
|
||||
deserialize_key_path_from_app_config(self.get_root_conf(), &[DAL_CONF_KEY])
|
||||
}
|
||||
|
||||
fn get_mikan_conf(&self) -> loco_rs::Result<Option<AppMikanConfig>> {
|
||||
deserialize_key_path_from_app_config(self.get_root_conf(), &[MIKAN_CONF_KEY])
|
||||
}
|
||||
}
|
||||
|
||||
impl AppConfigExt for loco_rs::config::Config {
|
||||
fn get_root_conf(&self) -> &loco_rs::config::Config {
|
||||
self
|
||||
}
|
||||
}
|
||||
1
apps/recorder/src/controllers/mod.rs
Normal file
1
apps/recorder/src/controllers/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod subscribers;
|
||||
14
apps/recorder/src/controllers/subscribers.rs
Normal file
14
apps/recorder/src/controllers/subscribers.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use loco_rs::prelude::*;
|
||||
|
||||
use crate::{models::entities::subscribers, views::subscribers::CurrentResponse};
|
||||
|
||||
async fn current(State(ctx): State<AppContext>) -> Result<impl IntoResponse> {
|
||||
let subscriber = subscribers::Model::find_root(&ctx).await?;
|
||||
format::json(CurrentResponse::new(&subscriber))
|
||||
}
|
||||
|
||||
pub fn routes() -> Routes {
|
||||
Routes::new()
|
||||
.prefix("subscribers")
|
||||
.add("/current", get(current))
|
||||
}
|
||||
201
apps/recorder/src/dal/client.rs
Normal file
201
apps/recorder/src/dal/client.rs
Normal file
@@ -0,0 +1,201 @@
|
||||
use std::fmt;
|
||||
|
||||
use bytes::Bytes;
|
||||
use loco_rs::app::{AppContext, Initializer};
|
||||
use once_cell::sync::OnceCell;
|
||||
use opendal::{layers::LoggingLayer, services::Fs, Buffer, Operator};
|
||||
use quirks_path::{Path, PathBuf};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::AppDalConfig;
|
||||
use crate::config::AppConfigExt;
|
||||
|
||||
// TODO: wait app-context-trait to integrate
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DalContentCategory {
|
||||
Image,
|
||||
}
|
||||
|
||||
impl AsRef<str> for DalContentCategory {
|
||||
fn as_ref(&self) -> &str {
|
||||
match self {
|
||||
Self::Image => "image",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AppDalClient {
|
||||
pub config: AppDalConfig,
|
||||
}
|
||||
|
||||
static APP_DAL_CLIENT: OnceCell<AppDalClient> = OnceCell::new();
|
||||
|
||||
pub enum DalStoredUrl {
|
||||
RelativePath { path: String },
|
||||
Absolute { url: Url },
|
||||
}
|
||||
|
||||
impl AsRef<str> for DalStoredUrl {
|
||||
fn as_ref(&self) -> &str {
|
||||
match &self {
|
||||
Self::Absolute { url } => url.as_str(),
|
||||
Self::RelativePath { path } => path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for DalStoredUrl {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl AppDalClient {
|
||||
pub fn new(config: AppDalConfig) -> Self {
|
||||
Self { config }
|
||||
}
|
||||
|
||||
pub fn global() -> &'static AppDalClient {
|
||||
APP_DAL_CLIENT
|
||||
.get()
|
||||
.expect("Global app dal client is not initialized")
|
||||
}
|
||||
|
||||
pub fn get_fs(&self) -> Fs {
|
||||
Fs::default().root(
|
||||
self.config
|
||||
.data_dir
|
||||
.as_ref()
|
||||
.map(|s| s as &str)
|
||||
.unwrap_or("./data"),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn create_filename(extname: &str) -> String {
|
||||
format!("{}{}", Uuid::new_v4(), extname)
|
||||
}
|
||||
|
||||
pub async fn store_object(
|
||||
&self,
|
||||
content_category: DalContentCategory,
|
||||
subscriber_pid: &str,
|
||||
bucket: Option<&str>,
|
||||
filename: &str,
|
||||
data: Bytes,
|
||||
) -> eyre::Result<DalStoredUrl> {
|
||||
match content_category {
|
||||
DalContentCategory::Image => {
|
||||
let fullname = [
|
||||
subscriber_pid,
|
||||
content_category.as_ref(),
|
||||
bucket.unwrap_or_default(),
|
||||
filename,
|
||||
]
|
||||
.into_iter()
|
||||
.map(Path::new)
|
||||
.collect::<PathBuf>();
|
||||
|
||||
let fs_op = Operator::new(self.get_fs())?
|
||||
.layer(LoggingLayer::default())
|
||||
.finish();
|
||||
|
||||
if let Some(dirname) = fullname.parent() {
|
||||
let dirname = dirname.join("/");
|
||||
fs_op.create_dir(dirname.as_str()).await?;
|
||||
}
|
||||
|
||||
fs_op.write(fullname.as_str(), data).await?;
|
||||
|
||||
Ok(DalStoredUrl::RelativePath {
|
||||
path: fullname.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn exists_object(
|
||||
&self,
|
||||
content_category: DalContentCategory,
|
||||
subscriber_pid: &str,
|
||||
bucket: Option<&str>,
|
||||
filename: &str,
|
||||
) -> eyre::Result<Option<DalStoredUrl>> {
|
||||
match content_category {
|
||||
DalContentCategory::Image => {
|
||||
let fullname = [
|
||||
subscriber_pid,
|
||||
content_category.as_ref(),
|
||||
bucket.unwrap_or_default(),
|
||||
filename,
|
||||
]
|
||||
.into_iter()
|
||||
.map(Path::new)
|
||||
.collect::<PathBuf>();
|
||||
|
||||
let fs_op = Operator::new(self.get_fs())?
|
||||
.layer(LoggingLayer::default())
|
||||
.finish();
|
||||
|
||||
if fs_op.exists(fullname.as_str()).await? {
|
||||
Ok(Some(DalStoredUrl::RelativePath {
|
||||
path: fullname.to_string(),
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load_object(
|
||||
&self,
|
||||
content_category: DalContentCategory,
|
||||
subscriber_pid: &str,
|
||||
bucket: Option<&str>,
|
||||
filename: &str,
|
||||
) -> eyre::Result<Buffer> {
|
||||
match content_category {
|
||||
DalContentCategory::Image => {
|
||||
let fullname = [
|
||||
subscriber_pid,
|
||||
content_category.as_ref(),
|
||||
bucket.unwrap_or_default(),
|
||||
filename,
|
||||
]
|
||||
.into_iter()
|
||||
.map(Path::new)
|
||||
.collect::<PathBuf>();
|
||||
|
||||
let fs_op = Operator::new(self.get_fs())?
|
||||
.layer(LoggingLayer::default())
|
||||
.finish();
|
||||
|
||||
let data = fs_op.read(fullname.as_str()).await?;
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AppDalInitalizer;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Initializer for AppDalInitalizer {
|
||||
fn name(&self) -> String {
|
||||
String::from("AppDalInitalizer")
|
||||
}
|
||||
|
||||
async fn before_run(&self, app_context: &AppContext) -> loco_rs::Result<()> {
|
||||
let config = &app_context.config;
|
||||
let app_dal_conf = config.get_dal_conf()?;
|
||||
|
||||
APP_DAL_CLIENT.get_or_init(|| AppDalClient::new(app_dal_conf.unwrap_or_default()));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
8
apps/recorder/src/dal/config.rs
Normal file
8
apps/recorder/src/dal/config.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub const DAL_CONF_KEY: &str = "dal";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub struct AppDalConfig {
|
||||
pub data_dir: Option<String>,
|
||||
}
|
||||
4
apps/recorder/src/dal/mod.rs
Normal file
4
apps/recorder/src/dal/mod.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub mod client;
|
||||
pub mod config;
|
||||
pub use client::{AppDalClient, AppDalInitalizer, DalContentCategory};
|
||||
pub use config::{AppDalConfig, DAL_CONF_KEY};
|
||||
73
apps/recorder/src/extract/defs.rs
Normal file
73
apps/recorder/src/extract/defs.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use fancy_regex::Regex as FancyRegex;
|
||||
use lazy_static::lazy_static;
|
||||
use maplit::hashmap;
|
||||
use regex::Regex;
|
||||
|
||||
const LANG_ZH_TW: &str = "zh-tw";
|
||||
const LANG_ZH: &str = "zh";
|
||||
const LANG_EN: &str = "en";
|
||||
const LANG_JP: &str = "jp";
|
||||
|
||||
lazy_static! {
|
||||
pub static ref SEASON_REGEX: Regex =
|
||||
Regex::new(r"(S\|[Ss]eason\s+)(\d+)").expect("Invalid regex");
|
||||
pub static ref TORRENT_PRASE_RULE_REGS: Vec<FancyRegex> = vec(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)"
|
||||
)
|
||||
.unwrap(),
|
||||
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
|
||||
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
|
||||
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
|
||||
];
|
||||
pub static ref SUBTITLE_LANG: Vec<(&'static str, Vec<&'static str>)> = {
|
||||
vec![
|
||||
(LANG_ZH_TW, vec!["tc", "cht", "繁", "zh-tw"]),
|
||||
(LANG_ZH, vec!["sc", "chs", "简", "zh", "zh-cn"]),
|
||||
(LANG_EN, vec!["en", "eng", "英"]),
|
||||
(LANG_JP, vec!["jp", "jpn", "日"]),
|
||||
]
|
||||
};
|
||||
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
|
||||
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
|
||||
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {
|
||||
hashmap! {
|
||||
"〇" => 0,
|
||||
"一" => 1,
|
||||
"二" => 2,
|
||||
"三" => 3,
|
||||
"四" => 4,
|
||||
"五" => 5,
|
||||
"六" => 6,
|
||||
"七" => 7,
|
||||
"八" => 8,
|
||||
"九" => 9,
|
||||
"十" => 10,
|
||||
"廿" => 20,
|
||||
"百" => 100,
|
||||
"千" => 1000,
|
||||
"零" => 0,
|
||||
"壹" => 1,
|
||||
"贰" => 2,
|
||||
"叁" => 3,
|
||||
"肆" => 4,
|
||||
"伍" => 5,
|
||||
"陆" => 6,
|
||||
"柒" => 7,
|
||||
"捌" => 8,
|
||||
"玖" => 9,
|
||||
"拾" => 10,
|
||||
"念" => 20,
|
||||
"佰" => 100,
|
||||
"仟" => 1000,
|
||||
}
|
||||
};
|
||||
pub static ref ZH_NUM_RE: Regex =
|
||||
Regex::new(r"[〇一二三四五六七八九十廿百千零壹贰叁肆伍陆柒捌玖拾念佰仟]").unwrap();
|
||||
}
|
||||
19
apps/recorder/src/extract/errors.rs
Normal file
19
apps/recorder/src/extract/errors.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ParseError {
|
||||
#[error("Parse bangumi season error: {0}")]
|
||||
BangumiSeasonError(#[from] std::num::ParseIntError),
|
||||
#[error("Parse file url error: {0}")]
|
||||
FileUrlError(#[from] url::ParseError),
|
||||
#[error("Parse {desc} with mime error, expected {expected}, but got {found}")]
|
||||
MimeError {
|
||||
desc: String,
|
||||
expected: String,
|
||||
found: String,
|
||||
},
|
||||
#[error("Parse mikan rss {url} format error")]
|
||||
MikanRssFormatError { url: String },
|
||||
#[error("Parse mikan rss item format error, {reason}")]
|
||||
MikanRssItemFormatError { reason: String },
|
||||
}
|
||||
3
apps/recorder/src/extract/html/mod.rs
Normal file
3
apps/recorder/src/extract/html/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod styles;
|
||||
|
||||
pub use styles::parse_style_attr;
|
||||
6
apps/recorder/src/extract/html/styles.rs
Normal file
6
apps/recorder/src/extract/html/styles.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
use lightningcss::declaration::DeclarationBlock;
|
||||
|
||||
pub fn parse_style_attr(style_attr: &str) -> Option<DeclarationBlock> {
|
||||
let result = DeclarationBlock::parse_string(style_attr, Default::default()).ok()?;
|
||||
Some(result)
|
||||
}
|
||||
64
apps/recorder/src/extract/mikan/client.rs
Normal file
64
apps/recorder/src/extract/mikan/client.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use loco_rs::app::{AppContext, Initializer};
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use super::{AppMikanConfig, MIKAN_BASE_URL};
|
||||
use crate::{config::AppConfigExt, fetch::HttpClient};
|
||||
|
||||
static APP_MIKAN_CLIENT: OnceCell<AppMikanClient> = OnceCell::new();
|
||||
|
||||
pub struct AppMikanClient {
|
||||
http_client: HttpClient,
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
impl AppMikanClient {
|
||||
pub fn new(mut config: AppMikanConfig) -> loco_rs::Result<Self> {
|
||||
let http_client =
|
||||
HttpClient::new(config.http_client.take()).map_err(loco_rs::Error::wrap)?;
|
||||
let base_url = config
|
||||
.base_url
|
||||
.unwrap_or_else(|| String::from(MIKAN_BASE_URL));
|
||||
Ok(Self {
|
||||
http_client,
|
||||
base_url,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn global() -> &'static AppMikanClient {
|
||||
APP_MIKAN_CLIENT
|
||||
.get()
|
||||
.expect("Global mikan http client is not initialized")
|
||||
}
|
||||
|
||||
pub fn base_url(&self) -> &str {
|
||||
&self.base_url
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for AppMikanClient {
|
||||
type Target = HttpClient;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.http_client
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AppMikanClientInitializer;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Initializer for AppMikanClientInitializer {
|
||||
fn name(&self) -> String {
|
||||
"AppMikanClientInitializer".to_string()
|
||||
}
|
||||
|
||||
async fn before_run(&self, app_context: &AppContext) -> loco_rs::Result<()> {
|
||||
let config = &app_context.config;
|
||||
let app_mikan_conf = config.get_mikan_conf()?.unwrap_or_default();
|
||||
|
||||
APP_MIKAN_CLIENT.get_or_try_init(|| AppMikanClient::new(app_mikan_conf))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
11
apps/recorder/src/extract/mikan/config.rs
Normal file
11
apps/recorder/src/extract/mikan/config.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::fetch::HttpClientConfig;
|
||||
|
||||
pub const MIKAN_CONF_KEY: &str = "mikan";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub struct AppMikanConfig {
|
||||
pub http_client: Option<HttpClientConfig>,
|
||||
pub base_url: Option<String>,
|
||||
}
|
||||
4
apps/recorder/src/extract/mikan/constants.rs
Normal file
4
apps/recorder/src/extract/mikan/constants.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub const MIKAN_BUCKET_KEY: &str = "mikan";
|
||||
pub const MIKAN_BASE_URL: &str = "https://mikanani.me/";
|
||||
pub const MIKAN_UNKNOWN_FANSUB_NAME: &str = "生肉/不明字幕";
|
||||
pub const MIKAN_UNKNOWN_FANSUB_ID: &str = "202";
|
||||
22
apps/recorder/src/extract/mikan/mod.rs
Normal file
22
apps/recorder/src/extract/mikan/mod.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
pub mod client;
|
||||
pub mod config;
|
||||
pub mod constants;
|
||||
pub mod rss_parser;
|
||||
pub mod web_parser;
|
||||
|
||||
pub use client::{AppMikanClient, AppMikanClientInitializer};
|
||||
pub use config::{AppMikanConfig, MIKAN_CONF_KEY};
|
||||
pub use constants::{MIKAN_BASE_URL, MIKAN_BUCKET_KEY};
|
||||
pub use rss_parser::{
|
||||
build_mikan_bangumi_rss_link, build_mikan_subscriber_aggregation_rss_link,
|
||||
parse_mikan_bangumi_id_from_rss_link, parse_mikan_rss_channel_from_rss_link,
|
||||
parse_mikan_rss_items_from_rss_link, parse_mikan_subscriber_aggregation_id_from_rss_link,
|
||||
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanBangumiRssLink,
|
||||
MikanRssChannel, MikanRssItem, MikanSubscriberAggregationRssChannel,
|
||||
MikanSubscriberAggregationRssLink,
|
||||
};
|
||||
pub use web_parser::{
|
||||
build_mikan_bangumi_homepage, build_mikan_episode_homepage,
|
||||
parse_mikan_bangumi_meta_from_mikan_homepage, parse_mikan_episode_meta_from_mikan_homepage,
|
||||
MikanBangumiMeta, MikanEpisodeMeta,
|
||||
};
|
||||
353
apps/recorder/src/extract/mikan/rss_parser.rs
Normal file
353
apps/recorder/src/extract/mikan/rss_parser.rs
Normal file
@@ -0,0 +1,353 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use chrono::DateTime;
|
||||
use itertools::Itertools;
|
||||
use reqwest::IntoUrl;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use torrent::core::BITTORRENT_MIME_TYPE;
|
||||
use url::Url;
|
||||
|
||||
use super::{
|
||||
web_parser::{parse_mikan_episode_id_from_homepage, MikanEpisodeHomepage},
|
||||
AppMikanClient,
|
||||
};
|
||||
use crate::{extract::errors::ParseError, fetch::bytes::download_bytes_with_client};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanRssItem {
|
||||
pub title: String,
|
||||
pub homepage: Url,
|
||||
pub url: Url,
|
||||
pub content_length: Option<u64>,
|
||||
pub mime: String,
|
||||
pub pub_date: Option<i64>,
|
||||
pub mikan_episode_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanBangumiRssChannel {
|
||||
pub name: String,
|
||||
pub url: Url,
|
||||
pub mikan_bangumi_id: String,
|
||||
pub mikan_fansub_id: String,
|
||||
pub items: Vec<MikanRssItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanBangumiAggregationRssChannel {
|
||||
pub name: String,
|
||||
pub url: Url,
|
||||
pub mikan_bangumi_id: String,
|
||||
pub items: Vec<MikanRssItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanSubscriberAggregationRssChannel {
|
||||
pub mikan_aggregation_id: String,
|
||||
pub url: Url,
|
||||
pub items: Vec<MikanRssItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub enum MikanRssChannel {
|
||||
Bangumi(MikanBangumiRssChannel),
|
||||
BangumiAggregation(MikanBangumiAggregationRssChannel),
|
||||
SubscriberAggregation(MikanSubscriberAggregationRssChannel),
|
||||
}
|
||||
|
||||
impl MikanRssChannel {
|
||||
pub fn items(&self) -> &[MikanRssItem] {
|
||||
match &self {
|
||||
Self::Bangumi(MikanBangumiRssChannel { items, .. })
|
||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { items, .. })
|
||||
| Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { items, .. }) => {
|
||||
items
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_items(self) -> Vec<MikanRssItem> {
|
||||
match self {
|
||||
Self::Bangumi(MikanBangumiRssChannel { items, .. })
|
||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { items, .. })
|
||||
| Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { items, .. }) => {
|
||||
items
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn name(&self) -> Option<&str> {
|
||||
match &self {
|
||||
Self::Bangumi(MikanBangumiRssChannel { name, .. })
|
||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { name, .. }) => {
|
||||
Some(name.as_str())
|
||||
}
|
||||
Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { .. }) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn url(&self) -> &Url {
|
||||
match &self {
|
||||
Self::Bangumi(MikanBangumiRssChannel { url, .. })
|
||||
| Self::BangumiAggregation(MikanBangumiAggregationRssChannel { url, .. })
|
||||
| Self::SubscriberAggregation(MikanSubscriberAggregationRssChannel { url, .. }) => url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<rss::Item> for MikanRssItem {
|
||||
type Error = ParseError;
|
||||
|
||||
fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
|
||||
let mime_type = item
|
||||
.enclosure()
|
||||
.map(|x| x.mime_type.to_string())
|
||||
.unwrap_or_default();
|
||||
if mime_type == BITTORRENT_MIME_TYPE {
|
||||
let enclosure = item.enclosure.unwrap();
|
||||
|
||||
let homepage = item
|
||||
.link
|
||||
.ok_or_else(|| ParseError::MikanRssItemFormatError {
|
||||
reason: String::from("must to have link for homepage"),
|
||||
})?;
|
||||
|
||||
let homepage = Url::parse(&homepage)?;
|
||||
|
||||
let enclosure_url = Url::parse(&enclosure.url)?;
|
||||
|
||||
let MikanEpisodeHomepage {
|
||||
mikan_episode_id, ..
|
||||
} = parse_mikan_episode_id_from_homepage(&homepage).ok_or_else(|| {
|
||||
ParseError::MikanRssItemFormatError {
|
||||
reason: String::from("homepage link format invalid"),
|
||||
}
|
||||
})?;
|
||||
|
||||
Ok(MikanRssItem {
|
||||
title: item.title.unwrap_or_default(),
|
||||
homepage,
|
||||
url: enclosure_url,
|
||||
content_length: enclosure.length.parse().ok(),
|
||||
mime: enclosure.mime_type,
|
||||
pub_date: item
|
||||
.pub_date
|
||||
.and_then(|s| DateTime::parse_from_rfc2822(&s).ok())
|
||||
.map(|s| s.timestamp_millis()),
|
||||
mikan_episode_id,
|
||||
})
|
||||
} else {
|
||||
Err(ParseError::MimeError {
|
||||
expected: String::from(BITTORRENT_MIME_TYPE),
|
||||
found: mime_type,
|
||||
desc: String::from("MikanRssItem"),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MikanBangumiRssLink {
|
||||
pub mikan_bangumi_id: String,
|
||||
pub mikan_fansub_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MikanSubscriberAggregationRssLink {
|
||||
pub mikan_aggregation_id: String,
|
||||
}
|
||||
|
||||
pub fn build_mikan_bangumi_rss_link(
|
||||
mikan_base_url: &str,
|
||||
mikan_bangumi_id: &str,
|
||||
mikan_fansub_id: Option<&str>,
|
||||
) -> eyre::Result<Url> {
|
||||
let mut url = Url::parse(mikan_base_url)?;
|
||||
url.set_path("/RSS/Bangumi");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("bangumiId", mikan_bangumi_id);
|
||||
if let Some(mikan_fansub_id) = mikan_fansub_id {
|
||||
url.query_pairs_mut()
|
||||
.append_pair("subgroupid", mikan_fansub_id);
|
||||
};
|
||||
Ok(url)
|
||||
}
|
||||
|
||||
pub fn build_mikan_subscriber_aggregation_rss_link(
|
||||
mikan_base_url: &str,
|
||||
mikan_aggregation_id: &str,
|
||||
) -> eyre::Result<Url> {
|
||||
let mut url = Url::parse(mikan_base_url)?;
|
||||
url.set_path("/RSS/MyBangumi");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("token", mikan_aggregation_id);
|
||||
Ok(url)
|
||||
}
|
||||
|
||||
pub fn parse_mikan_bangumi_id_from_rss_link(url: &Url) -> Option<MikanBangumiRssLink> {
|
||||
if url.path() == "/RSS/Bangumi" {
|
||||
url.query_pairs()
|
||||
.find(|(k, _)| k == "bangumiId")
|
||||
.map(|(_, v)| MikanBangumiRssLink {
|
||||
mikan_bangumi_id: v.to_string(),
|
||||
mikan_fansub_id: url
|
||||
.query_pairs()
|
||||
.find(|(k, _)| k == "subgroupid")
|
||||
.map(|(_, v)| v.to_string()),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_mikan_subscriber_aggregation_id_from_rss_link(
|
||||
url: &Url,
|
||||
) -> Option<MikanSubscriberAggregationRssLink> {
|
||||
if url.path() == "/RSS/MyBangumi" {
|
||||
url.query_pairs().find(|(k, _)| k == "token").map(|(_, v)| {
|
||||
MikanSubscriberAggregationRssLink {
|
||||
mikan_aggregation_id: v.to_string(),
|
||||
}
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn parse_mikan_rss_items_from_rss_link(
|
||||
client: Option<&AppMikanClient>,
|
||||
url: impl IntoUrl,
|
||||
) -> eyre::Result<Vec<MikanRssItem>> {
|
||||
let channel = parse_mikan_rss_channel_from_rss_link(client, url).await?;
|
||||
|
||||
Ok(channel.into_items())
|
||||
}
|
||||
|
||||
pub async fn parse_mikan_rss_channel_from_rss_link(
|
||||
client: Option<&AppMikanClient>,
|
||||
url: impl IntoUrl,
|
||||
) -> eyre::Result<MikanRssChannel> {
|
||||
let http_client = client.map(|s| s.deref());
|
||||
let bytes = download_bytes_with_client(http_client, url.as_str()).await?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
|
||||
let channel_link = Url::parse(channel.link())?;
|
||||
|
||||
if let Some(MikanBangumiRssLink {
|
||||
mikan_bangumi_id,
|
||||
mikan_fansub_id,
|
||||
}) = parse_mikan_bangumi_id_from_rss_link(&channel_link)
|
||||
{
|
||||
let channel_name = channel.title().replace("Mikan Project - ", "");
|
||||
|
||||
let items = channel
|
||||
.items
|
||||
.into_iter()
|
||||
// @TODO log error
|
||||
.flat_map(MikanRssItem::try_from)
|
||||
.collect_vec();
|
||||
|
||||
if let Some(mikan_fansub_id) = mikan_fansub_id {
|
||||
Ok(MikanRssChannel::Bangumi(MikanBangumiRssChannel {
|
||||
name: channel_name,
|
||||
mikan_bangumi_id,
|
||||
mikan_fansub_id,
|
||||
url: channel_link,
|
||||
items,
|
||||
}))
|
||||
} else {
|
||||
Ok(MikanRssChannel::BangumiAggregation(
|
||||
MikanBangumiAggregationRssChannel {
|
||||
name: channel_name,
|
||||
mikan_bangumi_id,
|
||||
url: channel_link,
|
||||
items,
|
||||
},
|
||||
))
|
||||
}
|
||||
} else if let Some(MikanSubscriberAggregationRssLink {
|
||||
mikan_aggregation_id,
|
||||
..
|
||||
}) = parse_mikan_subscriber_aggregation_id_from_rss_link(&channel_link)
|
||||
{
|
||||
let items = channel
|
||||
.items
|
||||
.into_iter()
|
||||
// @TODO log error
|
||||
.flat_map(MikanRssItem::try_from)
|
||||
.collect_vec();
|
||||
|
||||
return Ok(MikanRssChannel::SubscriberAggregation(
|
||||
MikanSubscriberAggregationRssChannel {
|
||||
mikan_aggregation_id,
|
||||
items,
|
||||
url: channel_link,
|
||||
},
|
||||
));
|
||||
} else {
|
||||
return Err(ParseError::MikanRssFormatError {
|
||||
url: url.as_str().into(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::assert_matches::assert_matches;
|
||||
|
||||
use torrent::core::BITTORRENT_MIME_TYPE;
|
||||
|
||||
use crate::extract::mikan::{
|
||||
parse_mikan_rss_channel_from_rss_link, MikanBangumiAggregationRssChannel,
|
||||
MikanBangumiRssChannel, MikanRssChannel,
|
||||
};
|
||||
|
||||
#[tokio::test]
|
||||
pub async fn test_parse_mikan_rss_channel_from_rss_link() {
|
||||
{
|
||||
let bangumi_url = "https://mikanani.me/RSS/Bangumi?bangumiId=3141&subgroupid=370";
|
||||
|
||||
let channel = parse_mikan_rss_channel_from_rss_link(None, bangumi_url)
|
||||
.await
|
||||
.expect("should get mikan channel from rss url");
|
||||
|
||||
assert_matches!(
|
||||
&channel,
|
||||
MikanRssChannel::Bangumi(MikanBangumiRssChannel { .. })
|
||||
);
|
||||
|
||||
assert_matches!(&channel.name(), Some("葬送的芙莉莲"));
|
||||
|
||||
let items = channel.items();
|
||||
let first_sub_item = items
|
||||
.first()
|
||||
.expect("mikan subscriptions should have at least one subs");
|
||||
|
||||
assert_eq!(first_sub_item.mime, BITTORRENT_MIME_TYPE);
|
||||
|
||||
assert!(&first_sub_item
|
||||
.homepage
|
||||
.as_str()
|
||||
.starts_with("https://mikanani.me/Home/Episode"));
|
||||
|
||||
let name = first_sub_item.title.as_str();
|
||||
assert!(name.contains("葬送的芙莉莲"));
|
||||
}
|
||||
{
|
||||
let bangumi_url = "https://mikanani.me/RSS/Bangumi?bangumiId=3416";
|
||||
|
||||
let channel = parse_mikan_rss_channel_from_rss_link(None, bangumi_url)
|
||||
.await
|
||||
.expect("should get mikan channel from rss url");
|
||||
|
||||
assert_matches!(
|
||||
&channel,
|
||||
MikanRssChannel::BangumiAggregation(MikanBangumiAggregationRssChannel { .. })
|
||||
);
|
||||
|
||||
assert_matches!(&channel.name(), Some("叹气的亡灵想隐退"));
|
||||
}
|
||||
}
|
||||
}
|
||||
493
apps/recorder/src/extract/mikan/web_parser.rs
Normal file
493
apps/recorder/src/extract/mikan/web_parser.rs
Normal file
@@ -0,0 +1,493 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use bytes::Bytes;
|
||||
use eyre::ContextCompat;
|
||||
use html_escape::decode_html_entities;
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use lightningcss::{properties::Property, values::image::Image as CSSImage};
|
||||
use loco_rs::app::AppContext;
|
||||
use regex::Regex;
|
||||
use scraper::Html;
|
||||
use url::Url;
|
||||
|
||||
use super::{
|
||||
parse_mikan_bangumi_id_from_rss_link, AppMikanClient, MikanBangumiRssLink, MIKAN_BUCKET_KEY,
|
||||
};
|
||||
use crate::{
|
||||
app::AppContextExt,
|
||||
dal::DalContentCategory,
|
||||
extract::html::parse_style_attr,
|
||||
fetch::{html::download_html_with_client, image::download_image_with_client},
|
||||
models::subscribers,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct MikanEpisodeMeta {
|
||||
pub homepage: Url,
|
||||
pub origin_poster_src: Option<Url>,
|
||||
pub bangumi_title: String,
|
||||
pub episode_title: String,
|
||||
pub fansub: String,
|
||||
pub mikan_bangumi_id: String,
|
||||
pub mikan_fansub_id: String,
|
||||
pub mikan_episode_id: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct MikanBangumiMeta {
|
||||
pub homepage: Url,
|
||||
pub origin_poster_src: Option<Url>,
|
||||
pub bangumi_title: String,
|
||||
pub mikan_bangumi_id: String,
|
||||
pub mikan_fansub_id: Option<String>,
|
||||
pub fansub: Option<String>,
|
||||
pub mikan_fansub_candidates: Vec<(String, String)>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct MikanBangumiPosterMeta {
|
||||
pub origin_poster_src: Url,
|
||||
pub poster_data: Option<Bytes>,
|
||||
pub poster_src: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct MikanEpisodeHomepage {
|
||||
pub mikan_episode_id: String,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref MIKAN_TITLE_SEASON: Regex = Regex::new("第.*季").unwrap();
|
||||
}
|
||||
|
||||
pub fn build_mikan_bangumi_homepage(
|
||||
mikan_base_url: &str,
|
||||
mikan_bangumi_id: &str,
|
||||
mikan_fansub_id: Option<&str>,
|
||||
) -> eyre::Result<Url> {
|
||||
let mut url = Url::parse(mikan_base_url)?;
|
||||
url.set_path(&format!("/Home/Bangumi/{mikan_bangumi_id}"));
|
||||
url.set_fragment(mikan_fansub_id);
|
||||
Ok(url)
|
||||
}
|
||||
|
||||
pub fn build_mikan_episode_homepage(
|
||||
mikan_base_url: &str,
|
||||
mikan_episode_id: &str,
|
||||
) -> eyre::Result<Url> {
|
||||
let mut url = Url::parse(mikan_base_url)?;
|
||||
url.set_path(&format!("/Home/Episode/{mikan_episode_id}"));
|
||||
Ok(url)
|
||||
}
|
||||
|
||||
pub fn parse_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeHomepage> {
|
||||
if url.path().starts_with("/Home/Episode/") {
|
||||
let mikan_episode_id = url.path().replace("/Home/Episode/", "");
|
||||
Some(MikanEpisodeHomepage { mikan_episode_id })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn parse_mikan_bangumi_poster_from_origin_poster_src(
|
||||
client: Option<&AppMikanClient>,
|
||||
origin_poster_src: Url,
|
||||
) -> eyre::Result<MikanBangumiPosterMeta> {
|
||||
let http_client = client.map(|s| s.deref());
|
||||
let poster_data = download_image_with_client(http_client, origin_poster_src.clone()).await?;
|
||||
Ok(MikanBangumiPosterMeta {
|
||||
origin_poster_src,
|
||||
poster_data: Some(poster_data),
|
||||
poster_src: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn parse_mikan_bangumi_poster_from_origin_poster_src_with_cache(
|
||||
ctx: &AppContext,
|
||||
origin_poster_src: Url,
|
||||
subscriber_id: i32,
|
||||
) -> eyre::Result<MikanBangumiPosterMeta> {
|
||||
let dal_client = ctx.get_dal_client();
|
||||
let mikan_client = ctx.get_mikan_client();
|
||||
let subscriber_pid = &subscribers::Model::find_pid_by_id_with_cache(ctx, subscriber_id).await?;
|
||||
if let Some(poster_src) = dal_client
|
||||
.exists_object(
|
||||
DalContentCategory::Image,
|
||||
subscriber_pid,
|
||||
Some(MIKAN_BUCKET_KEY),
|
||||
&origin_poster_src.path().replace("/images/Bangumi/", ""),
|
||||
)
|
||||
.await?
|
||||
{
|
||||
return Ok(MikanBangumiPosterMeta {
|
||||
origin_poster_src,
|
||||
poster_data: None,
|
||||
poster_src: Some(poster_src.to_string()),
|
||||
});
|
||||
}
|
||||
|
||||
let poster_data =
|
||||
download_image_with_client(Some(mikan_client.deref()), origin_poster_src.clone()).await?;
|
||||
|
||||
let poster_str = dal_client
|
||||
.store_object(
|
||||
DalContentCategory::Image,
|
||||
subscriber_pid,
|
||||
Some(MIKAN_BUCKET_KEY),
|
||||
&origin_poster_src.path().replace("/images/Bangumi/", ""),
|
||||
poster_data.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(MikanBangumiPosterMeta {
|
||||
origin_poster_src,
|
||||
poster_data: Some(poster_data),
|
||||
poster_src: Some(poster_str.to_string()),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn parse_mikan_bangumi_meta_from_mikan_homepage(
|
||||
client: Option<&AppMikanClient>,
|
||||
url: Url,
|
||||
) -> eyre::Result<MikanBangumiMeta> {
|
||||
let http_client = client.map(|s| s.deref());
|
||||
let url_host = url.origin().unicode_serialization();
|
||||
let content = download_html_with_client(http_client, url.as_str()).await?;
|
||||
let html = Html::parse_document(&content);
|
||||
|
||||
let bangumi_fansubs = html
|
||||
.select(&scraper::Selector::parse(".subgroup-text").unwrap())
|
||||
.filter_map(|el| {
|
||||
if let (Some(fansub_id), Some(fansub_name)) = (
|
||||
el.value()
|
||||
.attr("id")
|
||||
.map(|s| decode_html_entities(s).trim().to_string()),
|
||||
el.select(&scraper::Selector::parse("a:nth-child(1)").unwrap())
|
||||
.next()
|
||||
.map(|child| {
|
||||
let mut s = String::from(
|
||||
child
|
||||
.prev_sibling()
|
||||
.and_then(|t| t.value().as_text())
|
||||
.map(|s| s.trim())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
s.extend(child.text());
|
||||
decode_html_entities(&s).trim().to_string()
|
||||
}),
|
||||
) {
|
||||
Some((fansub_id, fansub_name))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect_vec();
|
||||
|
||||
let fansub_info = url.fragment().and_then(|b| {
|
||||
bangumi_fansubs
|
||||
.iter()
|
||||
.find_map(|(id, name)| if id == b { Some((id, name)) } else { None })
|
||||
});
|
||||
|
||||
let bangumi_title = html
|
||||
.select(&scraper::Selector::parse(".bangumi-title").unwrap())
|
||||
.next()
|
||||
.map(|el| {
|
||||
decode_html_entities(&el.text().collect::<String>())
|
||||
.trim()
|
||||
.to_string()
|
||||
})
|
||||
.and_then(|title| if title.is_empty() { None } else { Some(title) })
|
||||
.wrap_err_with(|| {
|
||||
// todo: error handler
|
||||
format!("Missing mikan bangumi official title for {}", url)
|
||||
})?;
|
||||
|
||||
let MikanBangumiRssLink {
|
||||
mikan_bangumi_id, ..
|
||||
} = html
|
||||
.select(&scraper::Selector::parse(".bangumi-title > .mikan-rss").unwrap())
|
||||
.next()
|
||||
.and_then(|el| el.value().attr("href"))
|
||||
.as_ref()
|
||||
.and_then(|s| url.join(s).ok())
|
||||
.and_then(|rss_link_url| parse_mikan_bangumi_id_from_rss_link(&rss_link_url))
|
||||
.wrap_err_with(|| {
|
||||
// todo: error handler
|
||||
format!("Missing mikan bangumi rss link or error format for {}", url)
|
||||
})?;
|
||||
|
||||
let origin_poster_src = html
|
||||
.select(&scraper::Selector::parse(".bangumi-poster").unwrap())
|
||||
.next()
|
||||
.and_then(|el| el.value().attr("style"))
|
||||
.as_ref()
|
||||
.and_then(|s| parse_style_attr(s))
|
||||
.and_then(|style| {
|
||||
style.iter().find_map(|(prop, _)| {
|
||||
match prop {
|
||||
Property::BackgroundImage(images) => {
|
||||
for img in images {
|
||||
if let CSSImage::Url(path) = img {
|
||||
if let Ok(url) =
|
||||
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
|
||||
{
|
||||
return Some(url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Property::Background(backgrounds) => {
|
||||
for bg in backgrounds {
|
||||
if let CSSImage::Url(path) = &bg.image {
|
||||
if let Ok(url) =
|
||||
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
|
||||
{
|
||||
return Some(url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
None
|
||||
})
|
||||
})
|
||||
.map(|mut origin_poster_src| {
|
||||
origin_poster_src.set_query(None);
|
||||
origin_poster_src
|
||||
});
|
||||
|
||||
Ok(MikanBangumiMeta {
|
||||
homepage: url,
|
||||
bangumi_title,
|
||||
origin_poster_src,
|
||||
mikan_bangumi_id,
|
||||
fansub: fansub_info.map(|s| s.1.to_string()),
|
||||
mikan_fansub_id: fansub_info.map(|s| s.0.to_string()),
|
||||
mikan_fansub_candidates: bangumi_fansubs.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn parse_mikan_episode_meta_from_mikan_homepage(
|
||||
client: Option<&AppMikanClient>,
|
||||
url: Url,
|
||||
) -> eyre::Result<MikanEpisodeMeta> {
|
||||
let http_client = client.map(|s| s.deref());
|
||||
let url_host = url.origin().unicode_serialization();
|
||||
let content = download_html_with_client(http_client, url.as_str()).await?;
|
||||
|
||||
let html = Html::parse_document(&content);
|
||||
|
||||
let bangumi_title = html
|
||||
.select(&scraper::Selector::parse(".bangumi-title").unwrap())
|
||||
.next()
|
||||
.map(|el| {
|
||||
decode_html_entities(&el.text().collect::<String>())
|
||||
.trim()
|
||||
.to_string()
|
||||
})
|
||||
.and_then(|title| if title.is_empty() { None } else { Some(title) })
|
||||
.wrap_err_with(|| {
|
||||
// todo: error handler
|
||||
format!("Missing mikan bangumi official title for {}", url)
|
||||
})?;
|
||||
|
||||
let episode_title = html
|
||||
.select(&scraper::Selector::parse("title").unwrap())
|
||||
.next()
|
||||
.map(|el| {
|
||||
decode_html_entities(&el.text().collect::<String>())
|
||||
.replace(" - Mikan Project", "")
|
||||
.trim()
|
||||
.to_string()
|
||||
})
|
||||
.and_then(|title| if title.is_empty() { None } else { Some(title) })
|
||||
.wrap_err_with(|| {
|
||||
// todo: error handler
|
||||
format!("Missing mikan episode official title for {}", url)
|
||||
})?;
|
||||
|
||||
let (mikan_bangumi_id, mikan_fansub_id) = html
|
||||
.select(&scraper::Selector::parse(".bangumi-title > .mikan-rss").unwrap())
|
||||
.next()
|
||||
.and_then(|el| el.value().attr("href"))
|
||||
.as_ref()
|
||||
.and_then(|s| url.join(s).ok())
|
||||
.and_then(|rss_link_url| parse_mikan_bangumi_id_from_rss_link(&rss_link_url))
|
||||
.and_then(
|
||||
|MikanBangumiRssLink {
|
||||
mikan_bangumi_id,
|
||||
mikan_fansub_id,
|
||||
..
|
||||
}| {
|
||||
mikan_fansub_id.map(|mikan_fansub_id| (mikan_bangumi_id, mikan_fansub_id))
|
||||
},
|
||||
)
|
||||
.wrap_err_with(|| {
|
||||
// todo: error handler
|
||||
format!("Missing mikan bangumi rss link or error format for {}", url)
|
||||
})?;
|
||||
|
||||
let fansub = html
|
||||
.select(&scraper::Selector::parse(".bangumi-info>.magnet-link-wrap").unwrap())
|
||||
.next()
|
||||
.map(|el| {
|
||||
decode_html_entities(&el.text().collect::<String>())
|
||||
.trim()
|
||||
.to_string()
|
||||
})
|
||||
.wrap_err_with(|| {
|
||||
// todo: error handler
|
||||
format!("Missing mikan bangumi fansub name for {}", url)
|
||||
})?;
|
||||
|
||||
let origin_poster_src = html
|
||||
.select(&scraper::Selector::parse(".bangumi-poster").unwrap())
|
||||
.next()
|
||||
.and_then(|el| el.value().attr("style"))
|
||||
.as_ref()
|
||||
.and_then(|s| parse_style_attr(s))
|
||||
.and_then(|style| {
|
||||
style.iter().find_map(|(prop, _)| {
|
||||
match prop {
|
||||
Property::BackgroundImage(images) => {
|
||||
for img in images {
|
||||
if let CSSImage::Url(path) = img {
|
||||
if let Ok(url) =
|
||||
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
|
||||
{
|
||||
return Some(url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Property::Background(backgrounds) => {
|
||||
for bg in backgrounds {
|
||||
if let CSSImage::Url(path) = &bg.image {
|
||||
if let Ok(url) =
|
||||
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
|
||||
{
|
||||
return Some(url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
None
|
||||
})
|
||||
})
|
||||
.map(|mut origin_poster_src| {
|
||||
origin_poster_src.set_query(None);
|
||||
origin_poster_src
|
||||
});
|
||||
|
||||
let MikanEpisodeHomepage {
|
||||
mikan_episode_id, ..
|
||||
} = parse_mikan_episode_id_from_homepage(&url)
|
||||
.wrap_err_with(|| format!("Failed to extract mikan_episode_id from {}", &url))?;
|
||||
|
||||
Ok(MikanEpisodeMeta {
|
||||
mikan_bangumi_id,
|
||||
mikan_fansub_id,
|
||||
bangumi_title,
|
||||
episode_title,
|
||||
homepage: url,
|
||||
origin_poster_src,
|
||||
fansub,
|
||||
mikan_episode_id,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::assert_matches::assert_matches;
|
||||
|
||||
use url::Url;
|
||||
use zune_image::{codecs::ImageFormat, image::Image};
|
||||
|
||||
use super::{
|
||||
parse_mikan_bangumi_meta_from_mikan_homepage,
|
||||
parse_mikan_bangumi_poster_from_origin_poster_src,
|
||||
parse_mikan_episode_meta_from_mikan_homepage,
|
||||
};
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_mikan_episode() {
|
||||
let test_fn = async || -> eyre::Result<()> {
|
||||
let url_str =
|
||||
"https://mikanani.me/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a";
|
||||
let url = Url::parse(url_str)?;
|
||||
|
||||
let ep_meta = parse_mikan_episode_meta_from_mikan_homepage(None, url.clone()).await?;
|
||||
|
||||
assert_eq!(ep_meta.homepage, url);
|
||||
assert_eq!(ep_meta.bangumi_title, "葬送的芙莉莲");
|
||||
assert_eq!(
|
||||
ep_meta.origin_poster_src,
|
||||
Some(Url::parse(
|
||||
"https://mikanani.me/images/Bangumi/202309/5ce9fed1.jpg"
|
||||
)?)
|
||||
);
|
||||
assert_eq!(ep_meta.fansub, "LoliHouse");
|
||||
assert_eq!(ep_meta.mikan_fansub_id, "370");
|
||||
assert_eq!(ep_meta.mikan_bangumi_id, "3141");
|
||||
|
||||
assert_matches!(ep_meta.origin_poster_src, Some(..));
|
||||
|
||||
let bgm_poster = parse_mikan_bangumi_poster_from_origin_poster_src(
|
||||
None,
|
||||
ep_meta.origin_poster_src.unwrap(),
|
||||
)
|
||||
.await?;
|
||||
let u8_data = bgm_poster.poster_data.expect("should have poster data");
|
||||
let image = Image::read(u8_data.to_vec(), Default::default());
|
||||
assert!(
|
||||
image.is_ok_and(|img| img
|
||||
.metadata()
|
||||
.get_image_format()
|
||||
.is_some_and(|fmt| matches!(fmt, ImageFormat::JPEG))),
|
||||
"should start with valid jpeg data magic number"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
};
|
||||
|
||||
test_fn().await.expect("test parse mikan failed");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_mikan_bangumi() {
|
||||
let test_fn = async || -> eyre::Result<()> {
|
||||
let url_str = "https://mikanani.me/Home/Bangumi/3416#370";
|
||||
let url = Url::parse(url_str)?;
|
||||
|
||||
let bgm_meta = parse_mikan_bangumi_meta_from_mikan_homepage(None, url.clone()).await?;
|
||||
|
||||
assert_eq!(bgm_meta.homepage, url);
|
||||
assert_eq!(bgm_meta.bangumi_title, "叹气的亡灵想隐退");
|
||||
assert_eq!(
|
||||
bgm_meta.origin_poster_src,
|
||||
Some(Url::parse(
|
||||
"https://mikanani.me/images/Bangumi/202410/480ef127.jpg"
|
||||
)?)
|
||||
);
|
||||
assert_eq!(bgm_meta.fansub, Some(String::from("LoliHouse")));
|
||||
assert_eq!(bgm_meta.mikan_fansub_id, Some(String::from("370")));
|
||||
assert_eq!(bgm_meta.mikan_bangumi_id, "3416");
|
||||
|
||||
assert_eq!(
|
||||
bgm_meta.homepage.as_str(),
|
||||
"https://mikanani.me/Home/Bangumi/3416#370"
|
||||
);
|
||||
|
||||
assert_eq!(bgm_meta.mikan_fansub_candidates.len(), 6);
|
||||
|
||||
Ok(())
|
||||
};
|
||||
|
||||
test_fn().await.expect("test parse mikan failed");
|
||||
}
|
||||
}
|
||||
6
apps/recorder/src/extract/mod.rs
Normal file
6
apps/recorder/src/extract/mod.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
pub mod defs;
|
||||
pub mod errors;
|
||||
pub mod html;
|
||||
pub mod mikan;
|
||||
pub mod rawname;
|
||||
pub mod torrent;
|
||||
5
apps/recorder/src/extract/rawname/mod.rs
Normal file
5
apps/recorder/src/extract/rawname/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod parser;
|
||||
|
||||
pub use parser::{
|
||||
extract_season_from_title_body, parse_episode_meta_from_raw_name, RawEpisodeMeta,
|
||||
};
|
||||
843
apps/recorder/src/extract/rawname/parser.rs
Normal file
843
apps/recorder/src/extract/rawname/parser.rs
Normal file
@@ -0,0 +1,843 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::extract::defs::{DIGIT_1PLUS_REG, ZH_NUM_MAP, ZH_NUM_RE};
|
||||
|
||||
const NAME_EXTRACT_REPLACE_ADHOC1_REPLACED: &str = "$1/$2";
|
||||
|
||||
lazy_static! {
|
||||
static ref TITLE_RE: Regex = Regex::new(
|
||||
r#"(.*|\[.*])( -? \d+|\[\d+]|\[\d+.?[vV]\d]|第\d+[话話集]|\[第?\d+[话話集]]|\[\d+.?END]|[Ee][Pp]?\d+|\[\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*[话話集]\s*])(.*)"#
|
||||
).unwrap();
|
||||
static ref EP_COLLECTION_RE:Regex = Regex::new(r#"\[?\s*\d+\s*[\-\~]\s*\d+\s*\p{scx=Han}*合?[话話集]\s*]?"#).unwrap();
|
||||
static ref MOVIE_TITLE_RE:Regex = Regex::new(r#"(.*|\[.*])(剧场版|[Mm]ovie|电影)(.*?)$"#).unwrap();
|
||||
static ref RESOLUTION_RE: Regex = Regex::new(r"1080|720|2160|4K|2K").unwrap();
|
||||
static ref SOURCE_L1_RE: Regex = Regex::new(r"B-Global|[Bb]aha|[Bb]ilibili|AT-X|W[Ee][Bb][Rr][Ii][Pp]|Sentai|B[Dd][Rr][Ii][Pp]|UHD[Rr][Ii][Pp]|NETFLIX").unwrap();
|
||||
static ref SOURCE_L2_RE: Regex = Regex::new(r"AMZ|CR|W[Ee][Bb]|B[Dd]").unwrap();
|
||||
static ref SUB_RE: Regex = Regex::new(r"[简繁日字幕]|CH|BIG5|GB").unwrap();
|
||||
static ref PREFIX_RE: Regex =
|
||||
Regex::new(r"[^\w\s\p{Unified_Ideograph}\p{scx=Han}\p{scx=Hira}\p{scx=Kana}-]").unwrap();
|
||||
static ref EN_BRACKET_SPLIT_RE: Regex = Regex::new(r"[\[\]]").unwrap();
|
||||
static ref MOVIE_SEASON_EXTRACT_RE: Regex = Regex::new(r"剧场版|Movie|电影").unwrap();
|
||||
static ref MAIN_TITLE_PREFIX_PROCESS_RE1: Regex = Regex::new(r"新番|月?番").unwrap();
|
||||
static ref MAIN_TITLE_PREFIX_PROCESS_RE2: Regex = Regex::new(r"[港澳台]{1,3}地区").unwrap();
|
||||
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE: Regex = Regex::new(r"\[.+\]").unwrap();
|
||||
static ref MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1: Regex = Regex::new(r"^.*?\[").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_ALL_RE: Regex = Regex::new(r"S\d{1,2}|Season \d{1,2}|[第].[季期]|1st|2nd|3rd|\d{1,2}th").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_EN_PREFIX_RE: Regex = Regex::new(r"Season|S").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_EN_NTH_RE: Regex = Regex::new(r"1st|2nd|3rd|\d{1,2}th").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_RE: Regex = Regex::new(r"[第 ].*[季期(部分)]|部分").unwrap();
|
||||
static ref SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE: Regex = Regex::new(r"[第季期 ]").unwrap();
|
||||
static ref NAME_EXTRACT_REMOVE_RE: Regex = Regex::new(r"[((]仅限[港澳台]{1,3}地区[))]").unwrap();
|
||||
static ref NAME_EXTRACT_SPLIT_RE: Regex = Regex::new(r"/|\s{2}|-\s{2}|\]\[").unwrap();
|
||||
static ref NAME_EXTRACT_REPLACE_ADHOC1_RE: Regex = Regex::new(r"([\p{scx=Han}\s\(\)]{5,})_([a-zA-Z]{2,})").unwrap();
|
||||
static ref NAME_JP_TEST: Regex = Regex::new(r"[\p{scx=Hira}\p{scx=Kana}]{2,}").unwrap();
|
||||
static ref NAME_ZH_TEST: Regex = Regex::new(r"[\p{scx=Han}]{2,}").unwrap();
|
||||
static ref NAME_EN_TEST: Regex = Regex::new(r"[a-zA-Z]{3,}").unwrap();
|
||||
static ref TAGS_EXTRACT_SPLIT_RE: Regex = Regex::new(r"[\[\]()()_]").unwrap();
|
||||
static ref CLEAR_SUB_RE: Regex = Regex::new(r"_MP4|_MKV").unwrap();
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
|
||||
pub struct RawEpisodeMeta {
|
||||
pub name_en: Option<String>,
|
||||
pub name_en_no_season: Option<String>,
|
||||
pub name_jp: Option<String>,
|
||||
pub name_jp_no_season: Option<String>,
|
||||
pub name_zh: Option<String>,
|
||||
pub name_zh_no_season: Option<String>,
|
||||
pub season: i32,
|
||||
pub season_raw: Option<String>,
|
||||
pub episode_index: i32,
|
||||
pub subtitle: Option<String>,
|
||||
pub source: Option<String>,
|
||||
pub fansub: Option<String>,
|
||||
pub resolution: Option<String>,
|
||||
}
|
||||
|
||||
fn extract_fansub(raw_name: &str) -> Option<&str> {
|
||||
let mut groups = EN_BRACKET_SPLIT_RE.splitn(raw_name, 3);
|
||||
groups.nth(1)
|
||||
}
|
||||
|
||||
fn replace_ch_bracket_to_en(raw_name: &str) -> String {
|
||||
raw_name.replace('【', "[").replace('】', "]")
|
||||
}
|
||||
|
||||
fn title_body_pre_process(title_body: &str, fansub: Option<&str>) -> eyre::Result<String> {
|
||||
let raw_without_fansub = if let Some(fansub) = fansub {
|
||||
let fan_sub_re = Regex::new(&format!(".{fansub}."))?;
|
||||
fan_sub_re.replace_all(title_body, "")
|
||||
} else {
|
||||
Cow::Borrowed(title_body)
|
||||
};
|
||||
let raw_with_prefix_replaced = PREFIX_RE.replace_all(&raw_without_fansub, "/");
|
||||
let mut arg_group = raw_with_prefix_replaced
|
||||
.split('/')
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if arg_group.len() == 1 {
|
||||
arg_group = arg_group.first_mut().unwrap().split(' ').collect();
|
||||
}
|
||||
let mut raw = raw_without_fansub.to_string();
|
||||
for arg in arg_group.iter() {
|
||||
if (arg_group.len() <= 5 && MAIN_TITLE_PREFIX_PROCESS_RE1.is_match(arg))
|
||||
|| (MAIN_TITLE_PREFIX_PROCESS_RE2.is_match(arg))
|
||||
{
|
||||
let sub = Regex::new(&format!(".{arg}."))?;
|
||||
raw = sub.replace_all(&raw, "").to_string();
|
||||
}
|
||||
}
|
||||
if let Some(m) = MAIN_TITLE_PRE_PROCESS_BACKETS_RE.find(&raw) {
|
||||
if m.len() as f32 > (raw.len() as f32) * 0.5 {
|
||||
let mut raw1 = MAIN_TITLE_PRE_PROCESS_BACKETS_RE_SUB1
|
||||
.replace(&raw, "")
|
||||
.chars()
|
||||
.collect_vec();
|
||||
while let Some(ch) = raw1.pop() {
|
||||
if ch == ']' {
|
||||
break;
|
||||
}
|
||||
}
|
||||
raw = raw1.into_iter().collect();
|
||||
}
|
||||
}
|
||||
Ok(raw.to_string())
|
||||
}
|
||||
|
||||
pub fn extract_season_from_title_body(title_body: &str) -> (String, Option<String>, i32) {
|
||||
let name_and_season = EN_BRACKET_SPLIT_RE.replace_all(title_body, " ");
|
||||
let seasons = SEASON_EXTRACT_SEASON_ALL_RE
|
||||
.find(&name_and_season)
|
||||
.into_iter()
|
||||
.map(|s| s.as_str())
|
||||
.collect_vec();
|
||||
|
||||
if seasons.is_empty() {
|
||||
return (title_body.to_string(), None, 1);
|
||||
}
|
||||
|
||||
let mut season = 1;
|
||||
let mut season_raw = None;
|
||||
let name = SEASON_EXTRACT_SEASON_ALL_RE.replace_all(&name_and_season, "");
|
||||
|
||||
for s in seasons {
|
||||
season_raw = Some(s);
|
||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_PREFIX_RE.find(s) {
|
||||
if let Ok(s) = SEASON_EXTRACT_SEASON_ALL_RE
|
||||
.replace_all(m.as_str(), "")
|
||||
.parse::<i32>()
|
||||
{
|
||||
season = s;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(m) = SEASON_EXTRACT_SEASON_EN_NTH_RE.find(s) {
|
||||
if let Some(s) = DIGIT_1PLUS_REG
|
||||
.find(m.as_str())
|
||||
.and_then(|s| s.as_str().parse::<i32>().ok())
|
||||
{
|
||||
season = s;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(m) = SEASON_EXTRACT_SEASON_ZH_PREFIX_RE.find(s) {
|
||||
if let Ok(s) = SEASON_EXTRACT_SEASON_ZH_PREFIX_SUB_RE
|
||||
.replace(m.as_str(), "")
|
||||
.parse::<i32>()
|
||||
{
|
||||
season = s;
|
||||
break;
|
||||
}
|
||||
if let Some(m) = ZH_NUM_RE.find(m.as_str()) {
|
||||
season = ZH_NUM_MAP[m.as_str()];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(name.to_string(), season_raw.map(|s| s.to_string()), season)
|
||||
}
|
||||
|
||||
fn extract_name_from_title_body_name_section(
|
||||
title_body_name_section: &str,
|
||||
) -> (Option<String>, Option<String>, Option<String>) {
|
||||
let mut name_en = None;
|
||||
let mut name_zh = None;
|
||||
let mut name_jp = None;
|
||||
let replaced1 = NAME_EXTRACT_REMOVE_RE.replace_all(title_body_name_section, "");
|
||||
let replaced2 = NAME_EXTRACT_REPLACE_ADHOC1_RE
|
||||
.replace_all(&replaced1, NAME_EXTRACT_REPLACE_ADHOC1_REPLACED);
|
||||
let trimmed = replaced2.trim();
|
||||
let mut split = NAME_EXTRACT_SPLIT_RE
|
||||
.split(trimmed)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|s| s.to_string())
|
||||
.collect_vec();
|
||||
if split.len() == 1 {
|
||||
let mut split_space = split[0].split(' ').collect_vec();
|
||||
let mut search_indices = vec![0];
|
||||
if split_space.len() > 1 {
|
||||
search_indices.push(split_space.len() - 1);
|
||||
}
|
||||
for i in search_indices {
|
||||
if NAME_ZH_TEST.is_match(split_space[i]) {
|
||||
let chs = split_space[i];
|
||||
split_space.remove(i);
|
||||
split = vec![chs.to_string(), split_space.join(" ")];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
for item in split {
|
||||
if NAME_JP_TEST.is_match(&item) && name_jp.is_none() {
|
||||
name_jp = Some(item);
|
||||
} else if NAME_ZH_TEST.is_match(&item) && name_zh.is_none() {
|
||||
name_zh = Some(item);
|
||||
} else if NAME_EN_TEST.is_match(&item) && name_en.is_none() {
|
||||
name_en = Some(item);
|
||||
}
|
||||
}
|
||||
(name_en, name_zh, name_jp)
|
||||
}
|
||||
|
||||
fn extract_episode_index_from_title_episode(title_episode: &str) -> Option<i32> {
|
||||
DIGIT_1PLUS_REG
|
||||
.find(title_episode)?
|
||||
.as_str()
|
||||
.parse::<i32>()
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn clear_sub(sub: Option<String>) -> Option<String> {
|
||||
sub.map(|s| CLEAR_SUB_RE.replace_all(&s, "").to_string())
|
||||
}
|
||||
|
||||
fn extract_tags_from_title_extra(
|
||||
title_extra: &str,
|
||||
) -> (Option<String>, Option<String>, Option<String>) {
|
||||
let replaced = TAGS_EXTRACT_SPLIT_RE.replace_all(title_extra, " ");
|
||||
let elements = replaced
|
||||
.split(' ')
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect_vec();
|
||||
|
||||
let mut sub = None;
|
||||
let mut resolution = None;
|
||||
let mut source = None;
|
||||
for element in elements.iter() {
|
||||
if SUB_RE.is_match(element) {
|
||||
sub = Some(element.to_string())
|
||||
} else if RESOLUTION_RE.is_match(element) {
|
||||
resolution = Some(element.to_string())
|
||||
} else if SOURCE_L1_RE.is_match(element) {
|
||||
source = Some(element.to_string())
|
||||
}
|
||||
}
|
||||
if source.is_none() {
|
||||
for element in elements {
|
||||
if SOURCE_L2_RE.is_match(element) {
|
||||
source = Some(element.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
(clear_sub(sub), resolution, source)
|
||||
}
|
||||
|
||||
pub fn check_is_movie(title: &str) -> bool {
|
||||
MOVIE_TITLE_RE.is_match(title)
|
||||
}
|
||||
|
||||
pub fn parse_episode_meta_from_raw_name(s: &str) -> eyre::Result<RawEpisodeMeta> {
|
||||
let raw_title = s.trim();
|
||||
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
||||
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
||||
let movie_capture = check_is_movie(&raw_title_without_ch_brackets);
|
||||
if let Some(title_re_match_obj) = MOVIE_TITLE_RE
|
||||
.captures(&raw_title_without_ch_brackets)
|
||||
.or(TITLE_RE.captures(&raw_title_without_ch_brackets))
|
||||
{
|
||||
let mut title_body = title_re_match_obj
|
||||
.get(1)
|
||||
.map(|s| s.as_str().trim())
|
||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"))
|
||||
.to_string();
|
||||
let mut title_episode = title_re_match_obj
|
||||
.get(2)
|
||||
.map(|s| s.as_str().trim())
|
||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
|
||||
let title_extra = title_re_match_obj
|
||||
.get(3)
|
||||
.map(|s| s.as_str().trim())
|
||||
.unwrap_or_else(|| unreachable!("TITLE_RE has at least 3 capture groups"));
|
||||
|
||||
if movie_capture {
|
||||
title_body += title_episode;
|
||||
title_episode = "";
|
||||
} else if EP_COLLECTION_RE.is_match(title_episode) {
|
||||
title_episode = "";
|
||||
}
|
||||
|
||||
let title_body = title_body_pre_process(&title_body, fansub)?;
|
||||
let (name_without_season, season_raw, season) = extract_season_from_title_body(&title_body);
|
||||
let (name_en, name_zh, name_jp) = extract_name_from_title_body_name_section(&title_body);
|
||||
let (name_en_no_season, name_zh_no_season, name_jp_no_season) =
|
||||
extract_name_from_title_body_name_section(&name_without_season);
|
||||
let episode_index = extract_episode_index_from_title_episode(title_episode).unwrap_or(1);
|
||||
let (sub, resolution, source) = extract_tags_from_title_extra(title_extra);
|
||||
Ok(RawEpisodeMeta {
|
||||
name_en,
|
||||
name_en_no_season,
|
||||
name_jp,
|
||||
name_jp_no_season,
|
||||
name_zh,
|
||||
name_zh_no_season,
|
||||
season,
|
||||
season_raw,
|
||||
episode_index,
|
||||
subtitle: sub,
|
||||
source,
|
||||
fansub: fansub.map(|s| s.to_string()),
|
||||
resolution,
|
||||
})
|
||||
} else {
|
||||
Err(eyre::eyre!(
|
||||
"Can not parse episode meta from raw filename {}",
|
||||
raw_title
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::{parse_episode_meta_from_raw_name, RawEpisodeMeta};
|
||||
|
||||
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
|
||||
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap_or_default();
|
||||
let found = parse_episode_meta_from_raw_name(raw_name).ok();
|
||||
|
||||
if expected != found {
|
||||
println!(
|
||||
"expected {} and found {} are not equal",
|
||||
serde_json::to_string_pretty(&expected).unwrap(),
|
||||
serde_json::to_string_pretty(&found).unwrap()
|
||||
)
|
||||
}
|
||||
assert_eq!(expected, found);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_all_parts_wrapped() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[新Sub][1月新番][我心里危险的东西 第二季][05][HEVC][10Bit][1080P][简日双语][招募翻译]"#,
|
||||
r#"{
|
||||
"name_zh": "我心里危险的东西",
|
||||
"name_zh_no_season": "我心里危险的东西",
|
||||
"season": 2,
|
||||
"season_raw": "第二季",
|
||||
"episode_index": 5,
|
||||
"subtitle": "简日双语",
|
||||
"source": null,
|
||||
"fansub": "新Sub",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_title_wrapped_by_one_square_bracket_and_season_prefix() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【喵萌奶茶屋】★01月新番★[我内心的糟糕念头 / Boku no Kokoro no Yabai Yatsu][18][1080p][简日双语][招募翻译]"#,
|
||||
r#"{
|
||||
"name_en": "Boku no Kokoro no Yabai Yatsu",
|
||||
"name_en_no_season": "Boku no Kokoro no Yabai Yatsu",
|
||||
"name_zh": "我内心的糟糕念头",
|
||||
"name_zh_no_season": "我内心的糟糕念头",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 18,
|
||||
"subtitle": "简日双语",
|
||||
"source": null,
|
||||
"fansub": "喵萌奶茶屋",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_ep_and_version() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[LoliHouse] 因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd / Shin no Nakama 2nd - 08v2 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Shin no Nakama 2nd",
|
||||
"name_en_no_season": "Shin no Nakama",
|
||||
"name_zh": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生 2nd",
|
||||
"name_zh_no_season": "因为不是真正的伙伴而被逐出勇者队伍,流落到边境展开慢活人生",
|
||||
"season": 2,
|
||||
"season_raw": "2nd",
|
||||
"episode_index": 8,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_en_title_only() {
|
||||
test_raw_ep_parser_case(
|
||||
r"[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]",
|
||||
r#"{
|
||||
"name_en": "THE MARGINAL SERVICE",
|
||||
"name_en_no_season": "THE MARGINAL SERVICE",
|
||||
"season": 1,
|
||||
"episode_index": 8,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "动漫国字幕组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_two_zh_title() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[LoliHouse] 事与愿违的不死冒险者 / 非自愿的不死冒险者 / Nozomanu Fushi no Boukensha - 01 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Nozomanu Fushi no Boukensha",
|
||||
"name_en_no_season": "Nozomanu Fushi no Boukensha",
|
||||
"name_zh": "事与愿违的不死冒险者",
|
||||
"name_zh_no_season": "事与愿违的不死冒险者",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_en_zh_jp_titles() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[喵萌奶茶屋&LoliHouse] 碰之道 / ぽんのみち / Pon no Michi - 07 [WebRip 1080p HEVC-10bit AAC][简繁日内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Pon no Michi",
|
||||
"name_jp": "ぽんのみち",
|
||||
"name_zh": "碰之道",
|
||||
"name_en_no_season": "Pon no Michi",
|
||||
"name_jp_no_season": "ぽんのみち",
|
||||
"name_zh_no_season": "碰之道",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 7,
|
||||
"subtitle": "简繁日内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "喵萌奶茶屋&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_nth_season() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[ANi] Yowai Character Tomozakikun / 弱角友崎同学 2nd STAGE - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
r#"{
|
||||
"name_en": "Yowai Character Tomozakikun",
|
||||
"name_en_no_season": "Yowai Character Tomozakikun",
|
||||
"name_zh": "弱角友崎同学 2nd STAGE",
|
||||
"name_zh_no_season": "弱角友崎同学",
|
||||
"season": 2,
|
||||
"season_raw": "2nd",
|
||||
"episode_index": 9,
|
||||
"subtitle": "CHT",
|
||||
"source": "Baha",
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_season_en_and_season_zh() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[豌豆字幕组&LoliHouse] 王者天下 第五季 / Kingdom S5 - 07 [WebRip 1080p HEVC-10bit AAC][简繁外挂字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Kingdom S5",
|
||||
"name_en_no_season": "Kingdom",
|
||||
"name_zh": "王者天下 第五季",
|
||||
"name_zh_no_season": "王者天下",
|
||||
"season": 5,
|
||||
"season_raw": "第五季",
|
||||
"episode_index": 7,
|
||||
"subtitle": "简繁外挂字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "豌豆字幕组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_airota_fansub_style_case1() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【千夏字幕组】【爱丽丝与特蕾丝的虚幻工厂_Alice to Therese no Maboroshi Koujou】[剧场版][WebRip_1080p_HEVC][简繁内封][招募新人]"#,
|
||||
r#"{
|
||||
"name_en": "Alice to Therese no Maboroshi Koujou",
|
||||
"name_en_no_season": "Alice to Therese no Maboroshi Koujou",
|
||||
"name_zh": "爱丽丝与特蕾丝的虚幻工厂",
|
||||
"name_zh_no_season": "爱丽丝与特蕾丝的虚幻工厂",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁内封",
|
||||
"source": "WebRip",
|
||||
"fansub": "千夏字幕组",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_airota_fansub_style_case2() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[千夏字幕组&喵萌奶茶屋][电影 轻旅轻营 (摇曳露营) _Yuru Camp Movie][剧场版][UHDRip_2160p_HEVC][繁体][千夏15周年]"#,
|
||||
r#"{
|
||||
"name_en": "Yuru Camp Movie",
|
||||
"name_en_no_season": "Yuru Camp Movie",
|
||||
"name_zh": "电影 轻旅轻营 (摇曳露营)",
|
||||
"name_zh_no_season": "电影 轻旅轻营 (摇曳露营)",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "繁体",
|
||||
"source": "UHDRip",
|
||||
"fansub": "千夏字幕组&喵萌奶茶屋",
|
||||
"resolution": "2160p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_large_episode_style() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"#,
|
||||
r#"{
|
||||
"name_en": "New Doraemon",
|
||||
"name_en_no_season": "New Doraemon",
|
||||
"name_zh": "哆啦A梦新番",
|
||||
"name_zh_no_season": "哆啦A梦新番",
|
||||
"season": 1,
|
||||
"episode_index": 747,
|
||||
"subtitle": "GB",
|
||||
"fansub": "梦蓝字幕组",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_many_square_brackets_split_title() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【MCE汉化组】[剧场版-摇曳露营][Yuru Camp][Movie][简日双语][1080P][x264 AAC]"#,
|
||||
r#"{
|
||||
"name_en": "Yuru Camp",
|
||||
"name_en_no_season": "Yuru Camp",
|
||||
"name_zh": "剧场版-摇曳露营",
|
||||
"name_zh_no_season": "剧场版-摇曳露营",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简日双语",
|
||||
"fansub": "MCE汉化组",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_implicit_lang_title_sep() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"#,
|
||||
r#"{
|
||||
"name_en": "NieR Automata Ver1.1a",
|
||||
"name_en_no_season": "NieR Automata Ver1.1a",
|
||||
"name_zh": "尼尔:机械纪元",
|
||||
"name_zh_no_season": "尼尔:机械纪元",
|
||||
"season": 1,
|
||||
"episode_index": 2,
|
||||
"subtitle": "简日双语",
|
||||
"fansub": "织梦字幕组",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_square_brackets_wrapped_and_space_split() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[天月搬运组][迷宫饭 Delicious in Dungeon][03][日语中字][MKV][1080P][NETFLIX][高画质版]"#,
|
||||
r#"
|
||||
{
|
||||
"name_en": "Delicious in Dungeon",
|
||||
"name_en_no_season": "Delicious in Dungeon",
|
||||
"name_zh": "迷宫饭",
|
||||
"name_zh_no_season": "迷宫饭",
|
||||
"season": 1,
|
||||
"episode_index": 3,
|
||||
"subtitle": "日语中字",
|
||||
"source": "NETFLIX",
|
||||
"fansub": "天月搬运组",
|
||||
"resolution": "1080P"
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_start_with_brackets_wrapped_season_info_prefix() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[爱恋字幕社][1月新番][迷宫饭][Dungeon Meshi][01][1080P][MP4][简日双语] "#,
|
||||
r#"{
|
||||
"name_en": "Dungeon Meshi",
|
||||
"name_en_no_season": "Dungeon Meshi",
|
||||
"name_zh": "迷宫饭",
|
||||
"name_zh_no_season": "迷宫饭",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简日双语",
|
||||
"fansub": "爱恋字幕社",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_small_no_title_extra_brackets_case() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[ANi] Mahou Shoujo ni Akogarete / 梦想成为魔法少女 [年龄限制版] - 09 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
r#"{
|
||||
"name_en": "Mahou Shoujo ni Akogarete",
|
||||
"name_en_no_season": "Mahou Shoujo ni Akogarete",
|
||||
"name_zh": "梦想成为魔法少女 [年龄限制版]",
|
||||
"name_zh_no_season": "梦想成为魔法少女 [年龄限制版]",
|
||||
"season": 1,
|
||||
"episode_index": 9,
|
||||
"subtitle": "CHT",
|
||||
"source": "Baha",
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_title_leading_space_style() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[ANi] 16bit 的感动 ANOTHER LAYER - 01 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"#,
|
||||
r#"{
|
||||
"name_zh": "16bit 的感动 ANOTHER LAYER",
|
||||
"name_zh_no_season": "16bit 的感动 ANOTHER LAYER",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"subtitle": "CHT",
|
||||
"source": "Baha",
|
||||
"fansub": "ANi",
|
||||
"resolution": "1080P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_title_leading_month_and_wrapped_brackets_style() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【喵萌奶茶屋】★07月新番★[银砂糖师与黑妖精 ~ Sugar Apple Fairy Tale ~][13][1080p][简日双语][招募翻译]"#,
|
||||
r#"{
|
||||
"name_en": "~ Sugar Apple Fairy Tale ~",
|
||||
"name_en_no_season": "~ Sugar Apple Fairy Tale ~",
|
||||
"name_zh": "银砂糖师与黑妖精",
|
||||
"name_zh_no_season": "银砂糖师与黑妖精",
|
||||
"season": 1,
|
||||
"episode_index": 13,
|
||||
"subtitle": "简日双语",
|
||||
"fansub": "喵萌奶茶屋",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_title_leading_month_style() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"【极影字幕社】★4月新番 天国大魔境 Tengoku Daimakyou 第05话 GB 720P MP4(字幕社招人内详)"#,
|
||||
r#"{
|
||||
"name_en": "Tengoku Daimakyou",
|
||||
"name_en_no_season": "Tengoku Daimakyou",
|
||||
"name_zh": "天国大魔境",
|
||||
"name_zh_no_season": "天国大魔境",
|
||||
"season": 1,
|
||||
"episode_index": 5,
|
||||
"subtitle": "字幕社招人内详",
|
||||
"source": null,
|
||||
"fansub": "极影字幕社",
|
||||
"resolution": "720P"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_tokusatsu_style() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[MagicStar] 假面骑士Geats / 仮面ライダーギーツ EP33 [WEBDL] [1080p] [TTFC]【生】"#,
|
||||
r#"{
|
||||
"name_jp": "仮面ライダーギーツ",
|
||||
"name_jp_no_season": "仮面ライダーギーツ",
|
||||
"name_zh": "假面骑士Geats",
|
||||
"name_zh_no_season": "假面骑士Geats",
|
||||
"season": 1,
|
||||
"episode_index": 33,
|
||||
"source": "WEBDL",
|
||||
"fansub": "MagicStar",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ep_with_multi_lang_zh_title() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[百冬练习组&LoliHouse] BanG Dream! 少女乐团派对!☆PICO FEVER! / Garupa Pico: Fever! - 26 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕][END] [101.69 MB]"#,
|
||||
r#"{
|
||||
"name_en": "Garupa Pico: Fever!",
|
||||
"name_en_no_season": "Garupa Pico: Fever!",
|
||||
"name_zh": "BanG Dream! 少女乐团派对!☆PICO FEVER!",
|
||||
"name_zh_no_season": "BanG Dream! 少女乐团派对!☆PICO FEVER!",
|
||||
"season": 1,
|
||||
"episode_index": 26,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "百冬练习组&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ep_collections() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[奶²&LoliHouse] 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简日内封字幕]"#,
|
||||
r#"{
|
||||
"name_en": "Kinokoinu: Mushroom Pup",
|
||||
"name_en_no_season": "Kinokoinu: Mushroom Pup",
|
||||
"name_zh": "蘑菇狗",
|
||||
"name_zh_no_season": "蘑菇狗",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简日内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "奶²&LoliHouse",
|
||||
"resolution": "1080p",
|
||||
"name": " 蘑菇狗 / Kinokoinu: Mushroom Pup [01-12 精校合集]"
|
||||
}"#,
|
||||
);
|
||||
|
||||
test_raw_ep_parser_case(
|
||||
r#"[LoliHouse] 叹气的亡灵想隐退 / Nageki no Bourei wa Intai shitai [01-13 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
|
||||
r#"{
|
||||
"name_en": "Nageki no Bourei wa Intai shitai",
|
||||
"name_en_no_season": "Nageki no Bourei wa Intai shitai",
|
||||
"name_jp": null,
|
||||
"name_jp_no_season": null,
|
||||
"name_zh": "叹气的亡灵想隐退",
|
||||
"name_zh_no_season": "叹气的亡灵想隐退",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
);
|
||||
|
||||
test_raw_ep_parser_case(
|
||||
r#"[LoliHouse] 精灵幻想记 第二季 / Seirei Gensouki S2 [01-12 合集][WebRip 1080p HEVC-10bit AAC][简繁内封字幕][Fin]"#,
|
||||
r#"{
|
||||
"name_en": "Seirei Gensouki S2",
|
||||
"name_en_no_season": "Seirei Gensouki",
|
||||
"name_zh": "精灵幻想记 第二季",
|
||||
"name_zh_no_season": "精灵幻想记",
|
||||
"season": 2,
|
||||
"season_raw": "第二季",
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
);
|
||||
|
||||
test_raw_ep_parser_case(
|
||||
r#"[喵萌奶茶屋&LoliHouse] 超自然武装当哒当 / 胆大党 / Dandadan [01-12 精校合集][WebRip 1080p HEVC-10bit AAC][简繁日内封字幕][Fin]"#,
|
||||
r#" {
|
||||
"name_en": "Dandadan",
|
||||
"name_en_no_season": "Dandadan",
|
||||
"name_zh": "超自然武装当哒当",
|
||||
"name_zh_no_season": "超自然武装当哒当",
|
||||
"season": 1,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁日内封字幕",
|
||||
"source": "WebRip",
|
||||
"fansub": "喵萌奶茶屋&LoliHouse",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: FIXME
|
||||
#[test]
|
||||
fn test_bad_cases() {
|
||||
test_raw_ep_parser_case(
|
||||
r#"[7³ACG x 桜都字幕组] 摇曳露营△ 剧场版/映画 ゆるキャン△/Eiga Yuru Camp△ [简繁字幕] BDrip 1080p x265 FLAC 2.0"#,
|
||||
r#"{
|
||||
"name_zh": "摇曳露营△剧场版",
|
||||
"name_zh_no_season": "摇曳露营△剧场版",
|
||||
"season": 1,
|
||||
"season_raw": null,
|
||||
"episode_index": 1,
|
||||
"subtitle": "简繁字幕",
|
||||
"source": "BDrip",
|
||||
"fansub": "7³ACG x 桜都字幕组",
|
||||
"resolution": "1080p"
|
||||
}"#,
|
||||
);
|
||||
|
||||
test_raw_ep_parser_case(
|
||||
r#"【幻樱字幕组】【4月新番】【古见同学有交流障碍症 第二季 Komi-san wa, Komyushou Desu. S02】【22】【GB_MP4】【1920X1080】"#,
|
||||
r#"{
|
||||
"name_en": "第二季 Komi-san wa, Komyushou Desu. S02",
|
||||
"name_en_no_season": "Komi-san wa, Komyushou Desu.",
|
||||
"name_zh": "古见同学有交流障碍症",
|
||||
"name_zh_no_season": "古见同学有交流障碍症",
|
||||
"season": 2,
|
||||
"season_raw": "第二季",
|
||||
"episode_index": 22,
|
||||
"subtitle": "GB",
|
||||
"fansub": "幻樱字幕组",
|
||||
"resolution": "1920X1080"
|
||||
}"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
3
apps/recorder/src/extract/torrent/mod.rs
Normal file
3
apps/recorder/src/extract/torrent/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
mod parser;
|
||||
|
||||
pub use parser::*;
|
||||
316
apps/recorder/src/extract/torrent/parser.rs
Normal file
316
apps/recorder/src/extract/torrent/parser.rs
Normal file
@@ -0,0 +1,316 @@
|
||||
use eyre::OptionExt;
|
||||
use fancy_regex::Regex as FancyRegex;
|
||||
use lazy_static::lazy_static;
|
||||
use quirks_path::Path;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::extract::defs::SUBTITLE_LANG;
|
||||
|
||||
lazy_static! {
|
||||
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
|
||||
vec(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)",
|
||||
)
|
||||
.unwrap(),
|
||||
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
|
||||
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
|
||||
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
|
||||
]
|
||||
};
|
||||
static ref GET_FANSUB_SPLIT_RE: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
|
||||
static ref GET_FANSUB_FULL_MATCH_RE: Regex = Regex::new(r"^\d+$").unwrap();
|
||||
static ref GET_SEASON_AND_TITLE_SUB_RE: Regex = Regex::new(r"([Ss]|Season )\d{1,3}").unwrap();
|
||||
static ref GET_SEASON_AND_TITLE_FIND_RE: Regex =
|
||||
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct TorrentEpisodeMediaMeta {
|
||||
pub fansub: Option<String>,
|
||||
pub title: String,
|
||||
pub season: i32,
|
||||
pub episode_index: i32,
|
||||
pub extname: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct TorrentEpisodeSubtitleMeta {
|
||||
pub media: TorrentEpisodeMediaMeta,
|
||||
pub lang: Option<String>,
|
||||
}
|
||||
|
||||
fn get_fansub(group_and_title: &str) -> (Option<&str>, &str) {
|
||||
let n = GET_FANSUB_SPLIT_RE
|
||||
.split(group_and_title)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
match (n.first(), n.get(1)) {
|
||||
(None, None) => (None, ""),
|
||||
(Some(n0), None) => (None, *n0),
|
||||
(Some(n0), Some(n1)) => {
|
||||
if GET_FANSUB_FULL_MATCH_RE.is_match(n1) {
|
||||
(None, group_and_title)
|
||||
} else {
|
||||
(Some(*n0), *n1)
|
||||
}
|
||||
}
|
||||
_ => unreachable!("vec contains n1 must contains n0"),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_season_and_title(season_and_title: &str) -> (String, i32) {
|
||||
let replaced_title = GET_SEASON_AND_TITLE_SUB_RE.replace_all(season_and_title, "");
|
||||
let title = replaced_title.trim().to_string();
|
||||
|
||||
let season = GET_SEASON_AND_TITLE_FIND_RE
|
||||
.captures(season_and_title)
|
||||
.map(|m| {
|
||||
m.get(2)
|
||||
.unwrap_or_else(|| unreachable!("season regex should have 2 groups"))
|
||||
.as_str()
|
||||
.parse::<i32>()
|
||||
.unwrap_or_else(|_| unreachable!("season should be a number"))
|
||||
})
|
||||
.unwrap_or(1);
|
||||
|
||||
(title, season)
|
||||
}
|
||||
|
||||
fn get_subtitle_lang(media_name: &str) -> Option<&str> {
|
||||
let media_name_lower = media_name.to_lowercase();
|
||||
for (lang, lang_aliases) in SUBTITLE_LANG.iter() {
|
||||
if lang_aliases
|
||||
.iter()
|
||||
.any(|alias| media_name_lower.contains(alias))
|
||||
{
|
||||
return Some(lang);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn parse_episode_media_meta_from_torrent(
|
||||
torrent_path: &Path,
|
||||
torrent_name: Option<&str>,
|
||||
season: Option<i32>,
|
||||
) -> eyre::Result<TorrentEpisodeMediaMeta> {
|
||||
let media_name = torrent_path
|
||||
.file_name()
|
||||
.ok_or_else(|| eyre::eyre!("failed to get file name of {}", torrent_path))?;
|
||||
let mut match_obj = None;
|
||||
for rule in TORRENT_EP_PARSE_RULES.iter() {
|
||||
match_obj = if let Some(torrent_name) = torrent_name.as_ref() {
|
||||
rule.captures(torrent_name)?
|
||||
} else {
|
||||
rule.captures(media_name)?
|
||||
};
|
||||
if match_obj.is_some() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(match_obj) = match_obj {
|
||||
let group_season_and_title = match_obj
|
||||
.get(1)
|
||||
.ok_or_else(|| eyre::eyre!("should have 1 group"))?
|
||||
.as_str();
|
||||
let (fansub, season_and_title) = get_fansub(group_season_and_title);
|
||||
let (title, season) = if let Some(season) = season {
|
||||
let (title, _) = get_season_and_title(season_and_title);
|
||||
(title, season)
|
||||
} else {
|
||||
get_season_and_title(season_and_title)
|
||||
};
|
||||
let episode_index = match_obj
|
||||
.get(2)
|
||||
.ok_or_eyre("should have 2 group")?
|
||||
.as_str()
|
||||
.parse::<i32>()
|
||||
.unwrap_or(1);
|
||||
let extname = torrent_path
|
||||
.extension()
|
||||
.map(|e| format!(".{}", e))
|
||||
.unwrap_or_default();
|
||||
Ok(TorrentEpisodeMediaMeta {
|
||||
fansub: fansub.map(|s| s.to_string()),
|
||||
title,
|
||||
season,
|
||||
episode_index,
|
||||
extname,
|
||||
})
|
||||
} else {
|
||||
Err(eyre::eyre!(
|
||||
"failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'",
|
||||
torrent_path,
|
||||
torrent_name
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_episode_subtitle_meta_from_torrent(
|
||||
torrent_path: &Path,
|
||||
torrent_name: Option<&str>,
|
||||
season: Option<i32>,
|
||||
) -> eyre::Result<TorrentEpisodeSubtitleMeta> {
|
||||
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
|
||||
let media_name = torrent_path
|
||||
.file_name()
|
||||
.ok_or_else(|| eyre::eyre!("failed to get file name of {}", torrent_path))?;
|
||||
|
||||
let lang = get_subtitle_lang(media_name);
|
||||
|
||||
Ok(TorrentEpisodeSubtitleMeta {
|
||||
media: media_meta,
|
||||
lang: lang.map(|s| s.to_string()),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use quirks_path::Path;
|
||||
|
||||
use super::{
|
||||
parse_episode_media_meta_from_torrent, parse_episode_subtitle_meta_from_torrent,
|
||||
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_lilith_raws_media() {
|
||||
test_torrent_ep_parser(
|
||||
r#"[Lilith-Raws] Boku no Kokoro no Yabai Yatsu - 01 [Baha][WEB-DL][1080p][AVC AAC][CHT][MP4].mp4"#,
|
||||
r#"{"fansub": "Lilith-Raws", "title": "Boku no Kokoro no Yabai Yatsu", "season": 1, "episode_index": 1, "extname": ".mp4"}"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sakurato_media() {
|
||||
test_torrent_ep_parser(
|
||||
r#"[Sakurato] Tonikaku Kawaii S2 [03][AVC-8bit 1080p AAC][CHS].mp4"#,
|
||||
r#"{"fansub": "Sakurato", "title": "Tonikaku Kawaii", "season": 2, "episode_index": 3, "extname": ".mp4"}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lolihouse_media() {
|
||||
test_torrent_ep_parser(
|
||||
r#"[SweetSub&LoliHouse] Heavenly Delusion - 08 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"#,
|
||||
r#"{"fansub": "SweetSub&LoliHouse", "title": "Heavenly Delusion", "season": 1, "episode_index": 8, "extname": ".mkv"}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sbsub_media() {
|
||||
test_torrent_ep_parser(
|
||||
r#"[SBSUB][CONAN][1082][V2][1080P][AVC_AAC][CHS_JP](C1E4E331).mp4"#,
|
||||
r#"{"fansub": "SBSUB", "title": "CONAN", "season": 1, "episode_index": 1082, "extname": ".mp4"}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_fansub_media() {
|
||||
test_torrent_ep_parser(
|
||||
r#"海盗战记 (2019) S04E11.mp4"#,
|
||||
r#"{"title": "海盗战记 (2019)", "season": 4, "episode_index": 11, "extname": ".mp4"}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_fansub_media_with_dirname() {
|
||||
test_torrent_ep_parser(
|
||||
r#"海盗战记/海盗战记 S01E01.mp4"#,
|
||||
r#"{"title": "海盗战记", "season": 1, "episode_index": 1, "extname": ".mp4"}"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_fansub_tc_subtitle() {
|
||||
test_torrent_ep_parser(
|
||||
r#"海盗战记 S01E08.zh-tw.ass"#,
|
||||
r#"{"media": { "title": "海盗战记", "season": 1, "episode_index": 8, "extname": ".ass" }, "lang": "zh-tw"}"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_fansub_sc_subtitle() {
|
||||
test_torrent_ep_parser(
|
||||
r#"海盗战记 S01E01.SC.srt"#,
|
||||
r#"{ "media": { "title": "海盗战记", "season": 1, "episode_index": 1, "extname": ".srt" }, "lang": "zh" }"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_fansub_media_with_season_zero() {
|
||||
test_torrent_ep_parser(
|
||||
r#"水星的魔女(2022) S00E19.mp4"#,
|
||||
r#"{"fansub": null,"title": "水星的魔女(2022)","season": 0,"episode_index": 19,"extname": ".mp4"}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_shimian_fansub_media() {
|
||||
test_torrent_ep_parser(
|
||||
r#"【失眠搬运组】放学后失眠的你-Kimi wa Houkago Insomnia - 06 [bilibili - 1080p AVC1 CHS-JP].mp4"#,
|
||||
r#"{"fansub": "失眠搬运组","title": "放学后失眠的你-Kimi wa Houkago Insomnia","season": 1,"episode_index": 6,"extname": ".mp4"}"#,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) {
|
||||
let extname = Path::new(raw_name)
|
||||
.extension()
|
||||
.map(|e| format!(".{}", e))
|
||||
.unwrap_or_default()
|
||||
.to_lowercase();
|
||||
|
||||
if extname == ".srt" || extname == ".ass" {
|
||||
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
|
||||
let found_raw =
|
||||
parse_episode_subtitle_meta_from_torrent(Path::new(raw_name), None, None);
|
||||
let found = found_raw.as_ref().ok().cloned();
|
||||
|
||||
if expected != found {
|
||||
if found_raw.is_ok() {
|
||||
println!(
|
||||
"expected {} and found {} are not equal",
|
||||
serde_json::to_string_pretty(&expected).unwrap(),
|
||||
serde_json::to_string_pretty(&found).unwrap()
|
||||
)
|
||||
} else {
|
||||
println!(
|
||||
"expected {} and found {:#?} are not equal",
|
||||
serde_json::to_string_pretty(&expected).unwrap(),
|
||||
found_raw
|
||||
)
|
||||
}
|
||||
}
|
||||
assert_eq!(expected, found);
|
||||
} else {
|
||||
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
|
||||
let found_raw = parse_episode_media_meta_from_torrent(Path::new(raw_name), None, None);
|
||||
let found = found_raw.as_ref().ok().cloned();
|
||||
|
||||
if expected != found {
|
||||
if found_raw.is_ok() {
|
||||
println!(
|
||||
"expected {} and found {} are not equal",
|
||||
serde_json::to_string_pretty(&expected).unwrap(),
|
||||
serde_json::to_string_pretty(&found).unwrap()
|
||||
)
|
||||
} else {
|
||||
println!(
|
||||
"expected {} and found {:#?} are not equal",
|
||||
serde_json::to_string_pretty(&expected).unwrap(),
|
||||
found_raw
|
||||
)
|
||||
}
|
||||
}
|
||||
assert_eq!(expected, found);
|
||||
}
|
||||
}
|
||||
}
|
||||
24
apps/recorder/src/fetch/bytes.rs
Normal file
24
apps/recorder/src/fetch/bytes.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use bytes::Bytes;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::{core::DEFAULT_HTTP_CLIENT_USER_AGENT, HttpClient};
|
||||
|
||||
pub async fn download_bytes<T: IntoUrl>(url: T) -> eyre::Result<Bytes> {
|
||||
let request_client = reqwest::Client::builder()
|
||||
.user_agent(DEFAULT_HTTP_CLIENT_USER_AGENT)
|
||||
.build()?;
|
||||
let bytes = request_client.get(url).send().await?.bytes().await?;
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
pub async fn download_bytes_with_client<T: IntoUrl>(
|
||||
client: Option<&HttpClient>,
|
||||
url: T,
|
||||
) -> eyre::Result<Bytes> {
|
||||
if let Some(client) = client {
|
||||
let bytes = client.get(url).send().await?.bytes().await?;
|
||||
Ok(bytes)
|
||||
} else {
|
||||
download_bytes(url).await
|
||||
}
|
||||
}
|
||||
96
apps/recorder/src/fetch/client.rs
Normal file
96
apps/recorder/src/fetch/client.rs
Normal file
@@ -0,0 +1,96 @@
|
||||
use std::{ops::Deref, time::Duration};
|
||||
|
||||
use axum::http::Extensions;
|
||||
use leaky_bucket::RateLimiter;
|
||||
use reqwest::{ClientBuilder, Request, Response};
|
||||
use reqwest_middleware::{
|
||||
ClientBuilder as ClientWithMiddlewareBuilder, ClientWithMiddleware, Next,
|
||||
};
|
||||
use reqwest_retry::{policies::ExponentialBackoff, RetryTransientMiddleware};
|
||||
use reqwest_tracing::TracingMiddleware;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::serde_as;
|
||||
|
||||
use super::DEFAULT_HTTP_CLIENT_USER_AGENT;
|
||||
|
||||
#[serde_as]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub struct HttpClientConfig {
|
||||
pub exponential_backoff_max_retries: Option<u32>,
|
||||
pub leaky_bucket_max_tokens: Option<u32>,
|
||||
pub leaky_bucket_initial_tokens: Option<u32>,
|
||||
pub leaky_bucket_refill_tokens: Option<u32>,
|
||||
#[serde_as(as = "Option<serde_with::DurationMilliSeconds>")]
|
||||
pub leaky_bucket_refill_interval: Option<Duration>,
|
||||
pub user_agent: Option<String>,
|
||||
}
|
||||
|
||||
pub struct HttpClient {
|
||||
client: ClientWithMiddleware,
|
||||
}
|
||||
|
||||
impl Deref for HttpClient {
|
||||
type Target = ClientWithMiddleware;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.client
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RateLimiterMiddleware {
|
||||
rate_limiter: RateLimiter,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl reqwest_middleware::Middleware for RateLimiterMiddleware {
|
||||
async fn handle(
|
||||
&self,
|
||||
req: Request,
|
||||
extensions: &'_ mut Extensions,
|
||||
next: Next<'_>,
|
||||
) -> reqwest_middleware::Result<Response> {
|
||||
self.rate_limiter.acquire_one().await;
|
||||
next.run(req, extensions).await
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpClient {
|
||||
pub fn new(config: Option<HttpClientConfig>) -> reqwest::Result<Self> {
|
||||
let mut config = config.unwrap_or_default();
|
||||
let retry_policy = ExponentialBackoff::builder()
|
||||
.build_with_max_retries(config.exponential_backoff_max_retries.take().unwrap_or(3));
|
||||
let rate_limiter = RateLimiter::builder()
|
||||
.max(config.leaky_bucket_max_tokens.take().unwrap_or(3) as usize)
|
||||
.initial(
|
||||
config
|
||||
.leaky_bucket_initial_tokens
|
||||
.take()
|
||||
.unwrap_or_default() as usize,
|
||||
)
|
||||
.refill(config.leaky_bucket_refill_tokens.take().unwrap_or(1) as usize)
|
||||
.interval(
|
||||
config
|
||||
.leaky_bucket_refill_interval
|
||||
.take()
|
||||
.unwrap_or_else(|| Duration::from_millis(500)),
|
||||
)
|
||||
.build();
|
||||
|
||||
let client = ClientBuilder::new()
|
||||
.user_agent(
|
||||
config
|
||||
.user_agent
|
||||
.take()
|
||||
.unwrap_or_else(|| DEFAULT_HTTP_CLIENT_USER_AGENT.to_owned()),
|
||||
)
|
||||
.build()?;
|
||||
|
||||
Ok(Self {
|
||||
client: ClientWithMiddlewareBuilder::new(client)
|
||||
.with(TracingMiddleware::default())
|
||||
.with(RateLimiterMiddleware { rate_limiter })
|
||||
.with(RetryTransientMiddleware::new_with_policy(retry_policy))
|
||||
.build(),
|
||||
})
|
||||
}
|
||||
}
|
||||
1
apps/recorder/src/fetch/core.rs
Normal file
1
apps/recorder/src/fetch/core.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub const DEFAULT_HTTP_CLIENT_USER_AGENT: &str = "Wget/1.13.4 (linux-gnu)";
|
||||
23
apps/recorder/src/fetch/html.rs
Normal file
23
apps/recorder/src/fetch/html.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::{core::DEFAULT_HTTP_CLIENT_USER_AGENT, HttpClient};
|
||||
|
||||
pub async fn download_html<U: IntoUrl>(url: U) -> eyre::Result<String> {
|
||||
let request_client = reqwest::Client::builder()
|
||||
.user_agent(DEFAULT_HTTP_CLIENT_USER_AGENT)
|
||||
.build()?;
|
||||
let content = request_client.get(url).send().await?.text().await?;
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
pub async fn download_html_with_client<T: IntoUrl>(
|
||||
client: Option<&HttpClient>,
|
||||
url: T,
|
||||
) -> eyre::Result<String> {
|
||||
if let Some(client) = client {
|
||||
let content = client.get(url).send().await?.text().await?;
|
||||
Ok(content)
|
||||
} else {
|
||||
download_html(url).await
|
||||
}
|
||||
}
|
||||
18
apps/recorder/src/fetch/image.rs
Normal file
18
apps/recorder/src/fetch/image.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use bytes::Bytes;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::{
|
||||
bytes::{download_bytes, download_bytes_with_client},
|
||||
HttpClient,
|
||||
};
|
||||
|
||||
pub async fn download_image<U: IntoUrl>(url: U) -> eyre::Result<Bytes> {
|
||||
download_bytes(url).await
|
||||
}
|
||||
|
||||
pub async fn download_image_with_client<T: IntoUrl>(
|
||||
client: Option<&HttpClient>,
|
||||
url: T,
|
||||
) -> eyre::Result<Bytes> {
|
||||
download_bytes_with_client(client, url).await
|
||||
}
|
||||
11
apps/recorder/src/fetch/mod.rs
Normal file
11
apps/recorder/src/fetch/mod.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
pub mod bytes;
|
||||
pub mod client;
|
||||
pub mod core;
|
||||
pub mod html;
|
||||
pub mod image;
|
||||
|
||||
pub use core::DEFAULT_HTTP_CLIENT_USER_AGENT;
|
||||
|
||||
pub use bytes::download_bytes;
|
||||
pub use client::{HttpClient, HttpClientConfig};
|
||||
pub use image::download_image;
|
||||
13
apps/recorder/src/lib.rs
Normal file
13
apps/recorder/src/lib.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
#![feature(duration_constructors, assert_matches, unboxed_closures)]
|
||||
|
||||
pub mod app;
|
||||
pub mod config;
|
||||
pub mod controllers;
|
||||
pub mod dal;
|
||||
pub mod extract;
|
||||
pub mod fetch;
|
||||
pub mod migrations;
|
||||
pub mod models;
|
||||
pub mod tasks;
|
||||
pub mod views;
|
||||
pub mod workers;
|
||||
373
apps/recorder/src/migrations/defs.rs
Normal file
373
apps/recorder/src/migrations/defs.rs
Normal file
@@ -0,0 +1,373 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use sea_orm::{DeriveIden, Statement};
|
||||
use sea_orm_migration::prelude::{extension::postgres::IntoTypeRef, *};
|
||||
|
||||
use crate::migrations::extension::postgres::Type;
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub enum GeneralIds {
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub enum Subscribers {
|
||||
Table,
|
||||
Id,
|
||||
Pid,
|
||||
DisplayName,
|
||||
DownloaderId,
|
||||
BangumiConf,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub enum Subscriptions {
|
||||
Table,
|
||||
Id,
|
||||
DisplayName,
|
||||
SubscriberId,
|
||||
Category,
|
||||
SourceUrl,
|
||||
Enabled,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub enum Bangumi {
|
||||
Table,
|
||||
Id,
|
||||
MikanBangumiId,
|
||||
DisplayName,
|
||||
SubscriptionId,
|
||||
SubscriberId,
|
||||
RawName,
|
||||
Season,
|
||||
SeasonRaw,
|
||||
Fansub,
|
||||
MikanFansubId,
|
||||
Filter,
|
||||
RssLink,
|
||||
PosterLink,
|
||||
SavePath,
|
||||
Deleted,
|
||||
Homepage,
|
||||
Extra,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub enum Episodes {
|
||||
Table,
|
||||
Id,
|
||||
MikanEpisodeId,
|
||||
RawName,
|
||||
DisplayName,
|
||||
BangumiId,
|
||||
SubscriptionId,
|
||||
SubscriberId,
|
||||
DownloadId,
|
||||
SavePath,
|
||||
Resolution,
|
||||
Season,
|
||||
SeasonRaw,
|
||||
Fansub,
|
||||
PosterLink,
|
||||
EpisodeIndex,
|
||||
Homepage,
|
||||
Subtitle,
|
||||
Deleted,
|
||||
Source,
|
||||
Extra,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub enum Downloads {
|
||||
Table,
|
||||
Id,
|
||||
OriginalName,
|
||||
DisplayName,
|
||||
SubscriptionId,
|
||||
Status,
|
||||
CurrSize,
|
||||
AllSize,
|
||||
Mime,
|
||||
Url,
|
||||
Homepage,
|
||||
SavePath,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub enum Downloaders {
|
||||
Table,
|
||||
Id,
|
||||
Category,
|
||||
Endpoint,
|
||||
Password,
|
||||
Username,
|
||||
SubscriberId,
|
||||
SavePath,
|
||||
}
|
||||
|
||||
macro_rules! create_postgres_enum_for_active_enum {
|
||||
($manager: expr, $active_enum: expr, $($enum_value:expr),+) => {
|
||||
{
|
||||
use sea_orm::ActiveEnum;
|
||||
let values = [$($enum_value,)+].map(|v| ActiveEnum::to_value(&v));
|
||||
($manager).create_postgres_enum_for_active_enum($active_enum, values)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait CustomSchemaManagerExt {
|
||||
async fn create_postgres_auto_update_ts_fn(&self, col_name: &str) -> Result<(), DbErr>;
|
||||
async fn create_postgres_auto_update_ts_fn_for_col<C: IntoIden + 'static + Send>(
|
||||
&self,
|
||||
col: C,
|
||||
) -> Result<(), DbErr> {
|
||||
let column_ident = col.into_iden();
|
||||
self.create_postgres_auto_update_ts_fn(&column_ident.to_string())
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_postgres_auto_update_ts_trigger(
|
||||
&self,
|
||||
tab_name: &str,
|
||||
col_name: &str,
|
||||
) -> Result<(), DbErr>;
|
||||
|
||||
async fn create_postgres_auto_update_ts_trigger_for_col<
|
||||
T: IntoIden + 'static + Send,
|
||||
C: IntoIden + 'static + Send,
|
||||
>(
|
||||
&self,
|
||||
tab: T,
|
||||
col: C,
|
||||
) -> Result<(), DbErr> {
|
||||
let column_ident = col.into_iden();
|
||||
let table_ident = tab.into_iden();
|
||||
self.create_postgres_auto_update_ts_trigger(
|
||||
&table_ident.to_string(),
|
||||
&column_ident.to_string(),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn drop_postgres_auto_update_ts_fn(&self, col_name: &str) -> Result<(), DbErr>;
|
||||
|
||||
async fn drop_postgres_auto_update_ts_fn_for_col<C: IntoIden + Send>(
|
||||
&self,
|
||||
col: C,
|
||||
) -> Result<(), DbErr> {
|
||||
let column_ident = col.into_iden();
|
||||
self.drop_postgres_auto_update_ts_fn(&column_ident.to_string())
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn drop_postgres_auto_update_ts_trigger(
|
||||
&self,
|
||||
tab_name: &str,
|
||||
col_name: &str,
|
||||
) -> Result<(), DbErr>;
|
||||
|
||||
async fn drop_postgres_auto_update_ts_trigger_for_col<
|
||||
T: IntoIden + 'static + Send,
|
||||
C: IntoIden + 'static + Send,
|
||||
>(
|
||||
&self,
|
||||
tab: T,
|
||||
col: C,
|
||||
) -> Result<(), DbErr> {
|
||||
let column_ident = col.into_iden();
|
||||
let table_ident = tab.into_iden();
|
||||
self.drop_postgres_auto_update_ts_trigger(
|
||||
&table_ident.to_string(),
|
||||
&column_ident.to_string(),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_postgres_enum_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
I: IntoIterator<Item = String> + Send,
|
||||
>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
values: I,
|
||||
) -> Result<(), DbErr>;
|
||||
|
||||
async fn add_postgres_enum_values_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
I: IntoIterator<Item = String> + Send,
|
||||
>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
values: I,
|
||||
) -> Result<(), DbErr>;
|
||||
|
||||
async fn drop_postgres_enum_for_active_enum<E: IntoTypeRef + IntoIden + Send + Clone>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
) -> Result<(), DbErr>;
|
||||
|
||||
async fn if_postgres_enum_exists<E: IntoTypeRef + IntoIden + Send + Clone>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
) -> Result<bool, DbErr>;
|
||||
|
||||
async fn get_postgres_enum_values<E: IntoTypeRef + IntoIden + Send + Clone>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
) -> Result<HashSet<String>, DbErr>;
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl CustomSchemaManagerExt for SchemaManager<'_> {
|
||||
async fn create_postgres_auto_update_ts_fn(&self, col_name: &str) -> Result<(), DbErr> {
|
||||
let sql = format!(
|
||||
"CREATE OR REPLACE FUNCTION update_{col_name}_column() RETURNS TRIGGER AS $$ BEGIN \
|
||||
NEW.{col_name} = current_timestamp; RETURN NEW; END; $$ language 'plpgsql';"
|
||||
);
|
||||
|
||||
self.get_connection()
|
||||
.execute(Statement::from_string(self.get_database_backend(), sql))
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_postgres_auto_update_ts_trigger(
|
||||
&self,
|
||||
tab_name: &str,
|
||||
col_name: &str,
|
||||
) -> Result<(), DbErr> {
|
||||
let sql = format!(
|
||||
"CREATE OR REPLACE TRIGGER update_{tab_name}_{col_name}_column_trigger BEFORE UPDATE \
|
||||
ON {tab_name} FOR EACH ROW EXECUTE PROCEDURE update_{col_name}_column();"
|
||||
);
|
||||
self.get_connection()
|
||||
.execute(Statement::from_string(self.get_database_backend(), sql))
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn drop_postgres_auto_update_ts_fn(&self, col_name: &str) -> Result<(), DbErr> {
|
||||
let sql = format!("DROP FUNCTION IF EXISTS update_{col_name}_column();");
|
||||
self.get_connection()
|
||||
.execute(Statement::from_string(self.get_database_backend(), sql))
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn drop_postgres_auto_update_ts_trigger(
|
||||
&self,
|
||||
tab_name: &str,
|
||||
col_name: &str,
|
||||
) -> Result<(), DbErr> {
|
||||
let sql = format!(
|
||||
"DROP TRIGGER IF EXISTS update_{tab_name}_{col_name}_column_trigger ON {tab_name};"
|
||||
);
|
||||
self.get_connection()
|
||||
.execute(Statement::from_string(self.get_database_backend(), sql))
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_postgres_enum_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
I: IntoIterator<Item = String> + Send,
|
||||
>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
values: I,
|
||||
) -> Result<(), DbErr> {
|
||||
let existed = self.if_postgres_enum_exists(enum_name.clone()).await?;
|
||||
if !existed {
|
||||
let idents = values.into_iter().map(Alias::new).collect::<Vec<_>>();
|
||||
self.create_type(Type::create().as_enum(enum_name).values(idents).to_owned())
|
||||
.await?;
|
||||
} else {
|
||||
self.add_postgres_enum_values_for_active_enum(enum_name, values)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn add_postgres_enum_values_for_active_enum<
|
||||
E: IntoTypeRef + IntoIden + Send + Clone,
|
||||
I: IntoIterator<Item = String> + Send,
|
||||
>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
values: I,
|
||||
) -> Result<(), DbErr> {
|
||||
let exists_values = self.get_postgres_enum_values(enum_name.clone()).await?;
|
||||
let to_add_values = values
|
||||
.into_iter()
|
||||
.filter(|v| !exists_values.contains(v as &str))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if to_add_values.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut type_alter = Type::alter().name(enum_name);
|
||||
|
||||
for v in to_add_values {
|
||||
type_alter = type_alter.add_value(Alias::new(v));
|
||||
}
|
||||
|
||||
self.alter_type(type_alter.to_owned()).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn drop_postgres_enum_for_active_enum<E: IntoTypeRef + IntoIden + Send + Clone>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
) -> Result<(), DbErr> {
|
||||
if self.if_postgres_enum_exists(enum_name.clone()).await? {
|
||||
self.drop_type(Type::drop().name(enum_name).to_owned())
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn if_postgres_enum_exists<E: IntoTypeRef + IntoIden + Send + Clone>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
) -> Result<bool, DbErr> {
|
||||
let enum_name: String = enum_name.into_iden().to_string();
|
||||
let sql = format!("SELECT 1 FROM pg_type WHERE typname = '{enum_name}'");
|
||||
let result = self
|
||||
.get_connection()
|
||||
.query_one(Statement::from_string(self.get_database_backend(), sql))
|
||||
.await?;
|
||||
Ok(result.is_some())
|
||||
}
|
||||
|
||||
async fn get_postgres_enum_values<E: IntoTypeRef + IntoIden + Send + Clone>(
|
||||
&self,
|
||||
enum_name: E,
|
||||
) -> Result<HashSet<String>, DbErr> {
|
||||
let enum_name: String = enum_name.into_iden().to_string();
|
||||
let sql = format!(
|
||||
"SELECT pg_enum.enumlabel AS enumlabel FROM pg_type JOIN pg_enum ON pg_enum.enumtypid \
|
||||
= pg_type.oid WHERE pg_type.typname = '{enum_name}';"
|
||||
);
|
||||
|
||||
let results = self
|
||||
.get_connection()
|
||||
.query_all(Statement::from_string(self.get_database_backend(), sql))
|
||||
.await?;
|
||||
|
||||
let mut items = HashSet::new();
|
||||
for r in results {
|
||||
items.insert(r.try_get::<String>("", "enumlabel")?);
|
||||
}
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
}
|
||||
284
apps/recorder/src/migrations/m20220101_000001_init.rs
Normal file
284
apps/recorder/src/migrations/m20220101_000001_init.rs
Normal file
@@ -0,0 +1,284 @@
|
||||
use loco_rs::schema::jsonb_null;
|
||||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use super::defs::{
|
||||
Bangumi, CustomSchemaManagerExt, Episodes, GeneralIds, Subscribers, Subscriptions,
|
||||
};
|
||||
use crate::models::{
|
||||
subscribers::ROOT_SUBSCRIBER,
|
||||
subscriptions::{self, SubscriptionCategoryEnum},
|
||||
};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_postgres_auto_update_ts_fn_for_col(GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
table_auto(Subscribers::Table)
|
||||
.col(pk_auto(Subscribers::Id))
|
||||
.col(string_len_uniq(Subscribers::Pid, 64))
|
||||
.col(string(Subscribers::DisplayName))
|
||||
.col(jsonb_null(Subscribers::BangumiConf))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_postgres_auto_update_ts_trigger_for_col(
|
||||
Subscribers::Table,
|
||||
GeneralIds::UpdatedAt,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let insert = Query::insert()
|
||||
.into_table(Subscribers::Table)
|
||||
.columns([Subscribers::Pid, Subscribers::DisplayName])
|
||||
.values_panic([ROOT_SUBSCRIBER.into(), ROOT_SUBSCRIBER.into()])
|
||||
.to_owned();
|
||||
manager.exec_stmt(insert).await?;
|
||||
|
||||
create_postgres_enum_for_active_enum!(
|
||||
manager,
|
||||
subscriptions::SubscriptionCategoryEnum,
|
||||
subscriptions::SubscriptionCategory::Mikan,
|
||||
subscriptions::SubscriptionCategory::Manual
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
table_auto(Subscriptions::Table)
|
||||
.col(pk_auto(Subscriptions::Id))
|
||||
.col(string(Subscriptions::DisplayName))
|
||||
.col(integer(Subscriptions::SubscriberId))
|
||||
.col(text(Subscriptions::SourceUrl))
|
||||
.col(boolean(Subscriptions::Enabled))
|
||||
.col(enumeration(
|
||||
Subscriptions::Category,
|
||||
subscriptions::SubscriptionCategoryEnum,
|
||||
subscriptions::SubscriptionCategory::iden_values(),
|
||||
))
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_subscriptions_subscriber_id")
|
||||
.from(Subscriptions::Table, Subscriptions::SubscriberId)
|
||||
.to(Subscribers::Table, Subscribers::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_postgres_auto_update_ts_trigger_for_col(
|
||||
Subscriptions::Table,
|
||||
GeneralIds::UpdatedAt,
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
table_auto(Bangumi::Table)
|
||||
.col(pk_auto(Bangumi::Id))
|
||||
.col(text_null(Bangumi::MikanBangumiId))
|
||||
.col(integer(Bangumi::SubscriptionId))
|
||||
.col(integer(Bangumi::SubscriberId))
|
||||
.col(text(Bangumi::DisplayName))
|
||||
.col(text(Bangumi::RawName))
|
||||
.col(integer(Bangumi::Season))
|
||||
.col(text_null(Bangumi::SeasonRaw))
|
||||
.col(text_null(Bangumi::Fansub))
|
||||
.col(text_null(Bangumi::MikanFansubId))
|
||||
.col(jsonb_null(Bangumi::Filter))
|
||||
.col(text_null(Bangumi::RssLink))
|
||||
.col(text_null(Bangumi::PosterLink))
|
||||
.col(text_null(Bangumi::SavePath))
|
||||
.col(boolean(Bangumi::Deleted).default(false))
|
||||
.col(text_null(Bangumi::Homepage))
|
||||
.col(jsonb_null(Bangumi::Extra))
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_bangumi_subscription_id")
|
||||
.from(Bangumi::Table, Bangumi::SubscriptionId)
|
||||
.to(Subscriptions::Table, Subscriptions::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_bangumi_subscriber_id")
|
||||
.from(Bangumi::Table, Bangumi::SubscriberId)
|
||||
.to(Subscribers::Table, Subscribers::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("idx_bangumi_mikan_bangumi_id")
|
||||
.table(Bangumi::Table)
|
||||
.col(Bangumi::MikanBangumiId)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("idx_bangumi_mikan_fansub_id")
|
||||
.table(Bangumi::Table)
|
||||
.col(Bangumi::MikanFansubId)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_postgres_auto_update_ts_trigger_for_col(Bangumi::Table, GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
table_auto(Episodes::Table)
|
||||
.col(pk_auto(Episodes::Id))
|
||||
.col(text_null(Episodes::MikanEpisodeId))
|
||||
.col(text(Episodes::RawName))
|
||||
.col(text(Episodes::DisplayName))
|
||||
.col(integer(Episodes::BangumiId))
|
||||
.col(integer(Episodes::SubscriptionId))
|
||||
.col(integer(Episodes::SubscriberId))
|
||||
.col(text_null(Episodes::SavePath))
|
||||
.col(text_null(Episodes::Resolution))
|
||||
.col(integer(Episodes::Season))
|
||||
.col(text_null(Episodes::SeasonRaw))
|
||||
.col(text_null(Episodes::Fansub))
|
||||
.col(text_null(Episodes::PosterLink))
|
||||
.col(integer(Episodes::EpisodeIndex))
|
||||
.col(text_null(Episodes::Homepage))
|
||||
.col(text_null(Episodes::Subtitle))
|
||||
.col(boolean(Episodes::Deleted).default(false))
|
||||
.col(text_null(Episodes::Source))
|
||||
.col(jsonb_null(Episodes::Extra))
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_episodes_subscription_id")
|
||||
.from(Episodes::Table, Episodes::SubscriptionId)
|
||||
.to(Subscriptions::Table, Subscriptions::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_episodes_bangumi_id")
|
||||
.from(Episodes::Table, Episodes::BangumiId)
|
||||
.to(Bangumi::Table, Bangumi::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_episodes_subscriber_id")
|
||||
.from(Episodes::Table, Episodes::SubscriberId)
|
||||
.to(Subscribers::Table, Subscribers::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("idx_episodes_mikan_episode_id")
|
||||
.table(Episodes::Table)
|
||||
.col(Episodes::MikanEpisodeId)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("idx_episodes_bangumi_id_mikan_episode_id")
|
||||
.table(Episodes::Table)
|
||||
.col(Episodes::BangumiId)
|
||||
.col(Episodes::MikanEpisodeId)
|
||||
.unique()
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Episodes::Table).to_owned())
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().table(Bangumi::Table).to_owned())
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_auto_update_ts_trigger_for_col(Bangumi::Table, GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().table(Subscriptions::Table).to_owned())
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_auto_update_ts_trigger_for_col(
|
||||
Subscriptions::Table,
|
||||
GeneralIds::UpdatedAt,
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().table(Subscribers::Table).to_owned())
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_auto_update_ts_trigger_for_col(Subscribers::Table, GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(subscriptions::SubscriptionCategoryEnum)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_auto_update_ts_fn_for_col(GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(SubscriptionCategoryEnum)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
128
apps/recorder/src/migrations/m20240224_082543_add_downloads.rs
Normal file
128
apps/recorder/src/migrations/m20240224_082543_add_downloads.rs
Normal file
@@ -0,0 +1,128 @@
|
||||
use loco_rs::schema::table_auto;
|
||||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use super::defs::*;
|
||||
use crate::models::prelude::{
|
||||
downloads::{DownloadMimeEnum, DownloadStatusEnum},
|
||||
DownloadMime, DownloadStatus,
|
||||
};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
create_postgres_enum_for_active_enum!(
|
||||
manager,
|
||||
DownloadMimeEnum,
|
||||
DownloadMime::BitTorrent,
|
||||
DownloadMime::OctetStream
|
||||
)
|
||||
.await?;
|
||||
|
||||
create_postgres_enum_for_active_enum!(
|
||||
manager,
|
||||
DownloadStatusEnum,
|
||||
DownloadStatus::Downloading,
|
||||
DownloadStatus::Paused,
|
||||
DownloadStatus::Pending,
|
||||
DownloadStatus::Completed,
|
||||
DownloadStatus::Failed,
|
||||
DownloadStatus::Deleted
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
table_auto(Downloads::Table)
|
||||
.col(pk_auto(Downloads::Id))
|
||||
.col(string(Downloads::OriginalName))
|
||||
.col(string(Downloads::DisplayName))
|
||||
.col(integer(Downloads::SubscriptionId))
|
||||
.col(enumeration(
|
||||
Downloads::Status,
|
||||
DownloadStatusEnum,
|
||||
DownloadMime::iden_values(),
|
||||
))
|
||||
.col(enumeration(
|
||||
Downloads::Mime,
|
||||
DownloadMimeEnum,
|
||||
DownloadMime::iden_values(),
|
||||
))
|
||||
.col(big_unsigned(Downloads::AllSize))
|
||||
.col(big_unsigned(Downloads::CurrSize))
|
||||
.col(text(Downloads::Url))
|
||||
.col(text_null(Downloads::Homepage))
|
||||
.col(text_null(Downloads::SavePath))
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_downloads_subscription_id")
|
||||
.from(Downloads::Table, Downloads::SubscriptionId)
|
||||
.to(Subscriptions::Table, Subscriptions::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("idx_downloads_url")
|
||||
.table(Downloads::Table)
|
||||
.col(Downloads::Url)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Episodes::Table)
|
||||
.add_column_if_not_exists(integer_null(Episodes::DownloadId))
|
||||
.add_foreign_key(
|
||||
TableForeignKey::new()
|
||||
.name("fk_episodes_download_id")
|
||||
.from_tbl(Episodes::Table)
|
||||
.from_col(Episodes::DownloadId)
|
||||
.to_tbl(Downloads::Table)
|
||||
.to_col(Downloads::Id)
|
||||
.on_update(ForeignKeyAction::Restrict)
|
||||
.on_delete(ForeignKeyAction::SetNull),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Episodes::Table)
|
||||
.drop_foreign_key(Alias::new("fk_episodes_download_id"))
|
||||
.drop_column(Episodes::DownloadId)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().table(Downloads::Table).to_owned())
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(DownloadMimeEnum)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(DownloadStatusEnum)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use crate::{
|
||||
migrations::defs::{CustomSchemaManagerExt, Downloaders, GeneralIds, Subscribers},
|
||||
models::{downloaders::DownloaderCategoryEnum, prelude::DownloaderCategory},
|
||||
};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
create_postgres_enum_for_active_enum!(
|
||||
manager,
|
||||
DownloaderCategoryEnum,
|
||||
DownloaderCategory::QBittorrent
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
table_auto(Downloaders::Table)
|
||||
.col(pk_auto(Downloaders::Id))
|
||||
.col(text(Downloaders::Endpoint))
|
||||
.col(string_null(Downloaders::Username))
|
||||
.col(string_null(Downloaders::Password))
|
||||
.col(enumeration(
|
||||
Downloaders::Category,
|
||||
DownloaderCategoryEnum,
|
||||
DownloaderCategory::iden_values(),
|
||||
))
|
||||
.col(text(Downloaders::SavePath))
|
||||
.col(integer(Downloaders::SubscriberId))
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_downloader_subscriber_id")
|
||||
.from_tbl(Downloaders::Table)
|
||||
.from_col(Downloaders::SubscriberId)
|
||||
.to_tbl(Subscribers::Table)
|
||||
.to_col(Subscribers::Id)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.on_update(ForeignKeyAction::Restrict),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_postgres_auto_update_ts_trigger_for_col(
|
||||
Downloaders::Table,
|
||||
GeneralIds::UpdatedAt,
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Subscribers::Table)
|
||||
.add_column_if_not_exists(integer_null(Subscribers::DownloaderId))
|
||||
.add_foreign_key(
|
||||
TableForeignKey::new()
|
||||
.name("fk_subscribers_downloader_id")
|
||||
.from_tbl(Subscribers::Table)
|
||||
.from_col(Subscribers::DownloaderId)
|
||||
.to_tbl(Downloaders::Table)
|
||||
.to_col(Downloaders::Id)
|
||||
.on_delete(ForeignKeyAction::SetNull)
|
||||
.on_update(ForeignKeyAction::Restrict),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Subscribers::Table)
|
||||
.drop_foreign_key(Alias::new("fk_subscribers_downloader_id"))
|
||||
.drop_column(Subscribers::DownloaderId)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_auto_update_ts_trigger_for_col(Downloaders::Table, GeneralIds::UpdatedAt)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().table(Downloaders::Table).to_owned())
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_postgres_enum_for_active_enum(DownloaderCategoryEnum)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
20
apps/recorder/src/migrations/mod.rs
Normal file
20
apps/recorder/src/migrations/mod.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
pub use sea_orm_migration::prelude::*;
|
||||
|
||||
#[macro_use]
|
||||
pub mod defs;
|
||||
pub mod m20220101_000001_init;
|
||||
pub mod m20240224_082543_add_downloads;
|
||||
pub mod m20240225_060853_subscriber_add_downloader;
|
||||
|
||||
pub struct Migrator;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigratorTrait for Migrator {
|
||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||
vec![
|
||||
Box::new(m20220101_000001_init::Migration),
|
||||
Box::new(m20240224_082543_add_downloads::Migration),
|
||||
Box::new(m20240225_060853_subscriber_add_downloader::Migration),
|
||||
]
|
||||
}
|
||||
}
|
||||
45
apps/recorder/src/models/bangumi.rs
Normal file
45
apps/recorder/src/models/bangumi.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use loco_rs::app::AppContext;
|
||||
use sea_orm::{entity::prelude::*, ActiveValue, TryIntoModel};
|
||||
|
||||
pub use super::entities::bangumi::*;
|
||||
|
||||
impl Model {
|
||||
pub async fn get_or_insert_from_mikan<F>(
|
||||
ctx: &AppContext,
|
||||
subscriber_id: i32,
|
||||
subscription_id: i32,
|
||||
mikan_bangumi_id: String,
|
||||
mikan_fansub_id: String,
|
||||
f: F,
|
||||
) -> eyre::Result<Model>
|
||||
where
|
||||
F: AsyncFnOnce(&mut ActiveModel) -> eyre::Result<()>,
|
||||
{
|
||||
let db = &ctx.db;
|
||||
if let Some(existed) = Entity::find()
|
||||
.filter(
|
||||
Column::MikanBangumiId
|
||||
.eq(Some(mikan_bangumi_id.clone()))
|
||||
.and(Column::MikanFansubId.eq(Some(mikan_fansub_id.clone()))),
|
||||
)
|
||||
.one(db)
|
||||
.await?
|
||||
{
|
||||
Ok(existed)
|
||||
} else {
|
||||
let mut bgm = ActiveModel {
|
||||
mikan_bangumi_id: ActiveValue::Set(Some(mikan_bangumi_id)),
|
||||
mikan_fansub_id: ActiveValue::Set(Some(mikan_fansub_id)),
|
||||
subscription_id: ActiveValue::Set(subscription_id),
|
||||
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||
..Default::default()
|
||||
};
|
||||
f(&mut bgm).await?;
|
||||
let bgm: Model = bgm.save(db).await?.try_into_model()?;
|
||||
Ok(bgm)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
17
apps/recorder/src/models/downloaders.rs
Normal file
17
apps/recorder/src/models/downloaders.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
use sea_orm::prelude::*;
|
||||
use url::Url;
|
||||
|
||||
pub use crate::models::entities::downloaders::*;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl Model {
|
||||
pub fn get_endpoint(&self) -> String {
|
||||
self.endpoint.clone()
|
||||
}
|
||||
pub fn endpoint_url(&self) -> Result<Url, url::ParseError> {
|
||||
let url = Url::parse(&self.endpoint)?;
|
||||
Ok(url)
|
||||
}
|
||||
}
|
||||
27
apps/recorder/src/models/downloads.rs
Normal file
27
apps/recorder/src/models/downloads.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use sea_orm::{prelude::*, ActiveValue};
|
||||
|
||||
use crate::extract::mikan::MikanRssItem;
|
||||
pub use crate::models::entities::downloads::*;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl ActiveModel {
|
||||
pub fn from_mikan_rss_item(m: MikanRssItem, subscription_id: i32) -> Self {
|
||||
let _ = Self {
|
||||
origin_name: ActiveValue::Set(m.title.clone()),
|
||||
display_name: ActiveValue::Set(m.title),
|
||||
subscription_id: ActiveValue::Set(subscription_id),
|
||||
status: ActiveValue::Set(DownloadStatus::Pending),
|
||||
mime: ActiveValue::Set(DownloadMime::BitTorrent),
|
||||
url: ActiveValue::Set(m.url.to_string()),
|
||||
curr_size: ActiveValue::Set(m.content_length.as_ref().map(|_| 0)),
|
||||
all_size: ActiveValue::Set(m.content_length),
|
||||
homepage: ActiveValue::Set(Some(m.homepage.to_string())),
|
||||
..Default::default()
|
||||
};
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {}
|
||||
80
apps/recorder/src/models/entities/bangumi.rs
Normal file
80
apps/recorder/src/models/entities/bangumi.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct BangumiFilter {
|
||||
pub name: Option<Vec<String>>,
|
||||
pub group: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct BangumiExtra {
|
||||
pub name_zh: Option<String>,
|
||||
pub s_name_zh: Option<String>,
|
||||
pub name_en: Option<String>,
|
||||
pub s_name_en: Option<String>,
|
||||
pub name_jp: Option<String>,
|
||||
pub s_name_jp: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "bangumi")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub mikan_bangumi_id: Option<String>,
|
||||
pub subscription_id: i32,
|
||||
pub subscriber_id: i32,
|
||||
pub display_name: String,
|
||||
pub raw_name: String,
|
||||
pub season: i32,
|
||||
pub season_raw: Option<String>,
|
||||
pub fansub: Option<String>,
|
||||
pub mikan_fansub_id: Option<String>,
|
||||
pub filter: Option<BangumiFilter>,
|
||||
pub rss_link: Option<String>,
|
||||
pub poster_link: Option<String>,
|
||||
pub save_path: Option<String>,
|
||||
#[sea_orm(default = "false")]
|
||||
pub deleted: bool,
|
||||
pub homepage: Option<String>,
|
||||
pub extra: Option<BangumiExtra>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscriptions::Entity",
|
||||
from = "Column::SubscriptionId",
|
||||
to = "super::subscriptions::Column::Id"
|
||||
)]
|
||||
Subscription,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id"
|
||||
)]
|
||||
Subscriber,
|
||||
#[sea_orm(has_many = "super::episodes::Entity")]
|
||||
Episode,
|
||||
}
|
||||
|
||||
impl Related<super::episodes::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Episode.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscription.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
||||
45
apps/recorder/src/models/entities/downloaders.rs
Normal file
45
apps/recorder/src/models/entities/downloaders.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "downloader_type")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DownloaderCategory {
|
||||
#[sea_orm(string_value = "qbittorrent")]
|
||||
QBittorrent,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "downloaders")]
|
||||
pub struct Model {
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub created_at: DateTime,
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub category: DownloaderCategory,
|
||||
pub endpoint: String,
|
||||
pub password: String,
|
||||
pub username: String,
|
||||
pub subscriber_id: i32,
|
||||
pub save_path: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id"
|
||||
)]
|
||||
Subscriber,
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
||||
78
apps/recorder/src/models/entities/downloads.rs
Normal file
78
apps/recorder/src/models/entities/downloads.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_status")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DownloadStatus {
|
||||
#[sea_orm(string_value = "pending")]
|
||||
Pending,
|
||||
#[sea_orm(string_value = "downloading")]
|
||||
Downloading,
|
||||
#[sea_orm(string_value = "paused")]
|
||||
Paused,
|
||||
#[sea_orm(string_value = "completed")]
|
||||
Completed,
|
||||
#[sea_orm(string_value = "failed")]
|
||||
Failed,
|
||||
#[sea_orm(string_value = "deleted")]
|
||||
Deleted,
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
|
||||
)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "download_mime")]
|
||||
pub enum DownloadMime {
|
||||
#[sea_orm(string_value = "application/octet-stream")]
|
||||
#[serde(rename = "application/octet-stream")]
|
||||
OctetStream,
|
||||
#[sea_orm(string_value = "application/x-bittorrent")]
|
||||
#[serde(rename = "application/x-bittorrent")]
|
||||
BitTorrent,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "downloads")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub origin_name: String,
|
||||
pub display_name: String,
|
||||
pub subscription_id: i32,
|
||||
pub status: DownloadStatus,
|
||||
pub mime: DownloadMime,
|
||||
pub url: String,
|
||||
pub all_size: Option<u64>,
|
||||
pub curr_size: Option<u64>,
|
||||
pub homepage: Option<String>,
|
||||
pub save_path: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscriptions::Entity",
|
||||
from = "Column::SubscriptionId",
|
||||
to = "super::subscriptions::Column::Id"
|
||||
)]
|
||||
Subscription,
|
||||
#[sea_orm(has_many = "super::episodes::Entity")]
|
||||
Episode,
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscription.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::episodes::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Episode.def()
|
||||
}
|
||||
}
|
||||
95
apps/recorder/src/models/entities/episodes.rs
Normal file
95
apps/recorder/src/models/entities/episodes.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
|
||||
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, Default)]
|
||||
pub struct EpisodeExtra {
|
||||
pub name_zh: Option<String>,
|
||||
pub s_name_zh: Option<String>,
|
||||
pub name_en: Option<String>,
|
||||
pub s_name_en: Option<String>,
|
||||
pub name_jp: Option<String>,
|
||||
pub s_name_jp: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "episodes")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
#[sea_orm(indexed)]
|
||||
pub mikan_episode_id: Option<String>,
|
||||
pub raw_name: String,
|
||||
pub display_name: String,
|
||||
pub bangumi_id: i32,
|
||||
pub subscription_id: i32,
|
||||
pub subscriber_id: i32,
|
||||
pub download_id: Option<i32>,
|
||||
pub save_path: Option<String>,
|
||||
pub resolution: Option<String>,
|
||||
pub season: i32,
|
||||
pub season_raw: Option<String>,
|
||||
pub fansub: Option<String>,
|
||||
pub poster_link: Option<String>,
|
||||
pub episode_index: i32,
|
||||
pub homepage: Option<String>,
|
||||
pub subtitle: Option<Vec<String>>,
|
||||
#[sea_orm(default = "false")]
|
||||
pub deleted: bool,
|
||||
pub source: Option<String>,
|
||||
pub extra: EpisodeExtra,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::bangumi::Entity",
|
||||
from = "Column::BangumiId",
|
||||
to = "super::bangumi::Column::Id"
|
||||
)]
|
||||
Bangumi,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::downloads::Entity",
|
||||
from = "Column::DownloadId",
|
||||
to = "super::downloads::Column::Id"
|
||||
)]
|
||||
Downloads,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscriptions::Entity",
|
||||
from = "Column::SubscriptionId",
|
||||
to = "super::subscriptions::Column::Id"
|
||||
)]
|
||||
Subscriptions,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id"
|
||||
)]
|
||||
Subscriber,
|
||||
}
|
||||
|
||||
impl Related<super::bangumi::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Bangumi.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::downloads::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Downloads.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriptions.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
||||
7
apps/recorder/src/models/entities/mod.rs
Normal file
7
apps/recorder/src/models/entities/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.4
|
||||
pub mod bangumi;
|
||||
pub mod downloads;
|
||||
pub mod episodes;
|
||||
pub mod subscribers;
|
||||
pub mod subscriptions;
|
||||
pub mod downloaders;
|
||||
63
apps/recorder/src/models/entities/subscribers.rs
Normal file
63
apps/recorder/src/models/entities/subscribers.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.2
|
||||
|
||||
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct SubscriberBangumiConfig {
|
||||
pub leading_group_tag: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "subscribers")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
#[sea_orm(unique)]
|
||||
pub pid: String,
|
||||
pub display_name: String,
|
||||
pub downloader_id: Option<i32>,
|
||||
pub bangumi_conf: Option<SubscriberBangumiConfig>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::subscriptions::Entity")]
|
||||
Subscription,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::downloaders::Entity",
|
||||
from = "Column::DownloaderId",
|
||||
to = "super::downloaders::Column::Id"
|
||||
)]
|
||||
Downloader,
|
||||
#[sea_orm(has_many = "super::bangumi::Entity")]
|
||||
Bangumi,
|
||||
#[sea_orm(has_many = "super::episodes::Entity")]
|
||||
Episode,
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscription.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::downloaders::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Downloader.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::bangumi::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Bangumi.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::episodes::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Episode.def()
|
||||
}
|
||||
}
|
||||
66
apps/recorder/src/models/entities/subscriptions.rs
Normal file
66
apps/recorder/src/models/entities/subscriptions.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay,
|
||||
)]
|
||||
#[sea_orm(
|
||||
rs_type = "String",
|
||||
db_type = "Enum",
|
||||
enum_name = "subscription_category"
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SubscriptionCategory {
|
||||
#[sea_orm(string_value = "mikan")]
|
||||
Mikan,
|
||||
#[sea_orm(string_value = "manual")]
|
||||
Manual,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "subscriptions")]
|
||||
pub struct Model {
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub created_at: DateTime,
|
||||
#[sea_orm(column_type = "Timestamp")]
|
||||
pub updated_at: DateTime,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub display_name: String,
|
||||
pub subscriber_id: i32,
|
||||
pub category: SubscriptionCategory,
|
||||
pub source_url: String,
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::subscribers::Entity",
|
||||
from = "Column::SubscriberId",
|
||||
to = "super::subscribers::Column::Id"
|
||||
)]
|
||||
Subscriber,
|
||||
#[sea_orm(has_many = "super::bangumi::Entity")]
|
||||
Bangumi,
|
||||
#[sea_orm(has_many = "super::episodes::Entity")]
|
||||
Episodes,
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Subscriber.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::bangumi::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Bangumi.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::episodes::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Episodes.def()
|
||||
}
|
||||
}
|
||||
104
apps/recorder/src/models/episodes.rs
Normal file
104
apps/recorder/src/models/episodes.rs
Normal file
@@ -0,0 +1,104 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use loco_rs::app::AppContext;
|
||||
use sea_orm::{entity::prelude::*, sea_query::OnConflict, ActiveValue};
|
||||
|
||||
use super::bangumi;
|
||||
pub use super::entities::episodes::*;
|
||||
use crate::{
|
||||
app::AppContextExt,
|
||||
extract::{
|
||||
mikan::{build_mikan_episode_homepage, MikanEpisodeMeta},
|
||||
rawname::parse_episode_meta_from_raw_name,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct MikanEpsiodeCreation {
|
||||
pub episode: MikanEpisodeMeta,
|
||||
pub bangumi: Arc<bangumi::Model>,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub async fn add_episodes(
|
||||
ctx: &AppContext,
|
||||
creations: impl IntoIterator<Item = MikanEpsiodeCreation>,
|
||||
) -> eyre::Result<()> {
|
||||
let db = &ctx.db;
|
||||
let new_episode_active_modes = creations
|
||||
.into_iter()
|
||||
.map(|cr| ActiveModel::from_mikan_episode_meta(ctx, cr))
|
||||
.inspect(|result| {
|
||||
if let Err(e) = result {
|
||||
tracing::warn!("Failed to create episode: {:?}", e);
|
||||
}
|
||||
})
|
||||
.flatten();
|
||||
|
||||
Entity::insert_many(new_episode_active_modes)
|
||||
.on_conflict(
|
||||
OnConflict::columns([Column::BangumiId, Column::MikanEpisodeId])
|
||||
.do_nothing()
|
||||
.to_owned(),
|
||||
)
|
||||
.on_empty_do_nothing()
|
||||
.exec(db)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModel {
|
||||
pub fn from_mikan_episode_meta(
|
||||
ctx: &AppContext,
|
||||
creation: MikanEpsiodeCreation,
|
||||
) -> eyre::Result<Self> {
|
||||
let item = creation.episode;
|
||||
let bgm = creation.bangumi;
|
||||
let raw_meta = parse_episode_meta_from_raw_name(&item.episode_title)
|
||||
.inspect_err(|e| {
|
||||
tracing::warn!("Failed to parse episode meta: {:?}", e);
|
||||
})
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
let homepage = build_mikan_episode_homepage(
|
||||
ctx.get_mikan_client().base_url(),
|
||||
&item.mikan_episode_id,
|
||||
)?;
|
||||
|
||||
Ok(Self {
|
||||
mikan_episode_id: ActiveValue::Set(Some(item.mikan_episode_id)),
|
||||
raw_name: ActiveValue::Set(item.episode_title.clone()),
|
||||
display_name: ActiveValue::Set(item.episode_title.clone()),
|
||||
bangumi_id: ActiveValue::Set(bgm.id),
|
||||
subscription_id: ActiveValue::Set(bgm.subscription_id),
|
||||
subscriber_id: ActiveValue::Set(bgm.subscriber_id),
|
||||
resolution: ActiveValue::Set(raw_meta.resolution),
|
||||
season: ActiveValue::Set(if raw_meta.season > 0 {
|
||||
raw_meta.season
|
||||
} else {
|
||||
bgm.season
|
||||
}),
|
||||
season_raw: ActiveValue::Set(raw_meta.season_raw.or_else(|| bgm.season_raw.clone())),
|
||||
fansub: ActiveValue::Set(raw_meta.fansub.or_else(|| bgm.fansub.clone())),
|
||||
poster_link: ActiveValue::Set(bgm.poster_link.clone()),
|
||||
episode_index: ActiveValue::Set(raw_meta.episode_index),
|
||||
homepage: ActiveValue::Set(Some(homepage.to_string())),
|
||||
subtitle: ActiveValue::Set(raw_meta.subtitle.map(|s| vec![s])),
|
||||
source: ActiveValue::Set(raw_meta.source),
|
||||
extra: ActiveValue::Set(EpisodeExtra {
|
||||
name_zh: raw_meta.name_zh,
|
||||
name_en: raw_meta.name_en,
|
||||
name_jp: raw_meta.name_jp,
|
||||
s_name_en: raw_meta.name_en_no_season,
|
||||
s_name_jp: raw_meta.name_jp_no_season,
|
||||
s_name_zh: raw_meta.name_zh_no_season,
|
||||
}),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
10
apps/recorder/src/models/mod.rs
Normal file
10
apps/recorder/src/models/mod.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
pub mod bangumi;
|
||||
pub mod downloaders;
|
||||
pub mod downloads;
|
||||
pub mod entities;
|
||||
pub mod episodes;
|
||||
pub mod notifications;
|
||||
pub mod prelude;
|
||||
pub mod query;
|
||||
pub mod subscribers;
|
||||
pub mod subscriptions;
|
||||
8
apps/recorder/src/models/notifications.rs
Normal file
8
apps/recorder/src/models/notifications.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Notification {
|
||||
season: i32,
|
||||
episode_size: u32,
|
||||
poster_url: Option<String>,
|
||||
}
|
||||
8
apps/recorder/src/models/prelude.rs
Normal file
8
apps/recorder/src/models/prelude.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub use super::{
|
||||
bangumi::{self, Entity as Bangumi},
|
||||
downloaders::{self, DownloaderCategory, Entity as Downloader},
|
||||
downloads::{self, DownloadMime, DownloadStatus, Entity as Download},
|
||||
episodes::{self, Entity as Episode},
|
||||
subscribers::{self, Entity as Subscriber},
|
||||
subscriptions::{self, Entity as Subscription, SubscriptionCategory},
|
||||
};
|
||||
26
apps/recorder/src/models/query/mod.rs
Normal file
26
apps/recorder/src/models/query/mod.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use sea_orm::{
|
||||
prelude::Expr,
|
||||
sea_query::{Alias, IntoColumnRef, IntoTableRef, Query, SelectStatement},
|
||||
Value,
|
||||
};
|
||||
|
||||
pub fn filter_values_in<
|
||||
I: IntoIterator<Item = T>,
|
||||
T: Into<Value>,
|
||||
R: IntoTableRef,
|
||||
C: IntoColumnRef + Copy,
|
||||
>(
|
||||
tbl_ref: R,
|
||||
col_ref: C,
|
||||
values: I,
|
||||
) -> SelectStatement {
|
||||
Query::select()
|
||||
.expr(Expr::col((Alias::new("t"), Alias::new("column1"))))
|
||||
.from_values(values, Alias::new("t"))
|
||||
.left_join(
|
||||
tbl_ref,
|
||||
Expr::col((Alias::new("t"), Alias::new("column1"))).equals(col_ref),
|
||||
)
|
||||
.and_where(Expr::col(col_ref).is_not_null())
|
||||
.to_owned()
|
||||
}
|
||||
97
apps/recorder/src/models/subscribers.rs
Normal file
97
apps/recorder/src/models/subscribers.rs
Normal file
@@ -0,0 +1,97 @@
|
||||
use loco_rs::{
|
||||
app::AppContext,
|
||||
model::{ModelError, ModelResult},
|
||||
};
|
||||
use sea_orm::{entity::prelude::*, ActiveValue, TransactionTrait};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub use super::entities::subscribers::*;
|
||||
|
||||
pub const ROOT_SUBSCRIBER: &str = "konobangu";
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct SubscriberIdParams {
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {
|
||||
async fn before_save<C>(self, _db: &C, insert: bool) -> Result<Self, DbErr>
|
||||
where
|
||||
C: ConnectionTrait,
|
||||
{
|
||||
if insert {
|
||||
let mut this = self;
|
||||
this.pid = ActiveValue::Set(Uuid::new_v4().to_string());
|
||||
Ok(this)
|
||||
} else {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
/// finds a user by the provided pid
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// When could not find user or DB query error
|
||||
pub async fn find_by_pid(ctx: &AppContext, pid: &str) -> ModelResult<Self> {
|
||||
let db = &ctx.db;
|
||||
let parse_uuid = Uuid::parse_str(pid).map_err(|e| ModelError::Any(e.into()))?;
|
||||
let subscriber = Entity::find()
|
||||
.filter(Column::Pid.eq(parse_uuid))
|
||||
.one(db)
|
||||
.await?;
|
||||
subscriber.ok_or_else(|| ModelError::EntityNotFound)
|
||||
}
|
||||
|
||||
pub async fn find_by_id(ctx: &AppContext, id: i32) -> ModelResult<Self> {
|
||||
let db = &ctx.db;
|
||||
|
||||
let subscriber = Entity::find_by_id(id).one(db).await?;
|
||||
subscriber.ok_or_else(|| ModelError::EntityNotFound)
|
||||
}
|
||||
|
||||
pub async fn find_pid_by_id_with_cache(ctx: &AppContext, id: i32) -> eyre::Result<String> {
|
||||
let db = &ctx.db;
|
||||
let cache = &ctx.cache;
|
||||
let pid = cache
|
||||
.get_or_insert(&format!("subscriber-id2pid::{}", id), async {
|
||||
let subscriber = Entity::find_by_id(id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or_else(|| loco_rs::Error::string(&format!("No such pid for id {}", id)))?;
|
||||
Ok(subscriber.pid)
|
||||
})
|
||||
.await?;
|
||||
Ok(pid)
|
||||
}
|
||||
|
||||
pub async fn find_root(ctx: &AppContext) -> ModelResult<Self> {
|
||||
Self::find_by_pid(ctx, ROOT_SUBSCRIBER).await
|
||||
}
|
||||
|
||||
/// Asynchronously creates a user with a password and saves it to the
|
||||
/// database.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// When could not save the user into the DB
|
||||
pub async fn create_root(ctx: &AppContext) -> ModelResult<Self> {
|
||||
let db = &ctx.db;
|
||||
let txn = db.begin().await?;
|
||||
|
||||
let user = ActiveModel {
|
||||
display_name: ActiveValue::set(ROOT_SUBSCRIBER.to_string()),
|
||||
pid: ActiveValue::set(ROOT_SUBSCRIBER.to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&txn)
|
||||
.await?;
|
||||
|
||||
txn.commit().await?;
|
||||
|
||||
Ok(user)
|
||||
}
|
||||
}
|
||||
228
apps/recorder/src/models/subscriptions.rs
Normal file
228
apps/recorder/src/models/subscriptions.rs
Normal file
@@ -0,0 +1,228 @@
|
||||
use std::{collections::HashSet, sync::Arc};
|
||||
|
||||
use itertools::Itertools;
|
||||
use loco_rs::app::AppContext;
|
||||
use sea_orm::{entity::prelude::*, ActiveValue};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub use super::entities::subscriptions::{self, *};
|
||||
use super::{bangumi, episodes, query::filter_values_in};
|
||||
use crate::{
|
||||
app::AppContextExt,
|
||||
extract::{
|
||||
mikan::{
|
||||
build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link,
|
||||
parse_mikan_bangumi_meta_from_mikan_homepage,
|
||||
parse_mikan_episode_meta_from_mikan_homepage, parse_mikan_rss_channel_from_rss_link,
|
||||
web_parser::{
|
||||
parse_mikan_bangumi_poster_from_origin_poster_src_with_cache,
|
||||
MikanBangumiPosterMeta,
|
||||
},
|
||||
},
|
||||
rawname::extract_season_from_title_body,
|
||||
},
|
||||
models::episodes::MikanEpsiodeCreation,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct SubscriptionCreateFromRssDto {
|
||||
pub rss_link: String,
|
||||
pub display_name: String,
|
||||
pub enabled: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(tag = "category")]
|
||||
pub enum SubscriptionCreateDto {
|
||||
Mikan(SubscriptionCreateFromRssDto),
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl ActiveModel {
|
||||
pub fn from_create_dto(create_dto: SubscriptionCreateDto, subscriber_id: i32) -> Self {
|
||||
match create_dto {
|
||||
SubscriptionCreateDto::Mikan(create_dto) => {
|
||||
Self::from_rss_create_dto(SubscriptionCategory::Mikan, create_dto, subscriber_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn from_rss_create_dto(
|
||||
category: SubscriptionCategory,
|
||||
create_dto: SubscriptionCreateFromRssDto,
|
||||
subscriber_id: i32,
|
||||
) -> Self {
|
||||
Self {
|
||||
display_name: ActiveValue::Set(create_dto.display_name),
|
||||
enabled: ActiveValue::Set(create_dto.enabled.unwrap_or(false)),
|
||||
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||
category: ActiveValue::Set(category),
|
||||
source_url: ActiveValue::Set(create_dto.rss_link),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub async fn add_subscription(
|
||||
ctx: &AppContext,
|
||||
create_dto: SubscriptionCreateDto,
|
||||
subscriber_id: i32,
|
||||
) -> eyre::Result<Self> {
|
||||
let db = &ctx.db;
|
||||
let subscription = ActiveModel::from_create_dto(create_dto, subscriber_id);
|
||||
|
||||
Ok(subscription.insert(db).await?)
|
||||
}
|
||||
|
||||
pub async fn toggle_iters(
|
||||
ctx: &AppContext,
|
||||
ids: impl Iterator<Item = i32>,
|
||||
enabled: bool,
|
||||
) -> eyre::Result<()> {
|
||||
let db = &ctx.db;
|
||||
Entity::update_many()
|
||||
.col_expr(Column::Enabled, Expr::value(enabled))
|
||||
.filter(Column::Id.is_in(ids))
|
||||
.exec(db)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_iters(
|
||||
ctx: &AppContext,
|
||||
ids: impl Iterator<Item = i32>,
|
||||
) -> eyre::Result<()> {
|
||||
let db = &ctx.db;
|
||||
Entity::delete_many()
|
||||
.filter(Column::Id.is_in(ids))
|
||||
.exec(db)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn pull_subscription(&self, ctx: &AppContext) -> eyre::Result<()> {
|
||||
match &self.category {
|
||||
SubscriptionCategory::Mikan => {
|
||||
let mikan_client = ctx.get_mikan_client();
|
||||
let channel =
|
||||
parse_mikan_rss_channel_from_rss_link(Some(mikan_client), &self.source_url)
|
||||
.await?;
|
||||
|
||||
let items = channel.into_items();
|
||||
|
||||
let db = &ctx.db;
|
||||
let items = items.into_iter().collect_vec();
|
||||
|
||||
let mut stmt = filter_values_in(
|
||||
episodes::Entity,
|
||||
episodes::Column::MikanEpisodeId,
|
||||
items
|
||||
.iter()
|
||||
.map(|s| Value::from(s.mikan_episode_id.clone())),
|
||||
);
|
||||
stmt.and_where(Expr::col(episodes::Column::SubscriberId).eq(self.subscriber_id));
|
||||
|
||||
let builder = &db.get_database_backend();
|
||||
|
||||
let old_rss_item_mikan_episode_ids_set = db
|
||||
.query_all(builder.build(&stmt))
|
||||
.await?
|
||||
.into_iter()
|
||||
.flat_map(|qs| qs.try_get_by_index(0))
|
||||
.collect::<HashSet<String>>();
|
||||
|
||||
let new_rss_items = items
|
||||
.into_iter()
|
||||
.filter(|item| {
|
||||
!old_rss_item_mikan_episode_ids_set.contains(&item.mikan_episode_id)
|
||||
})
|
||||
.collect_vec();
|
||||
|
||||
let mut new_metas = vec![];
|
||||
for new_rss_item in new_rss_items.iter() {
|
||||
new_metas.push(
|
||||
parse_mikan_episode_meta_from_mikan_homepage(
|
||||
Some(mikan_client),
|
||||
new_rss_item.homepage.clone(),
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
||||
let new_mikan_bangumi_groups = new_metas
|
||||
.into_iter()
|
||||
.into_group_map_by(|s| (s.mikan_bangumi_id.clone(), s.mikan_fansub_id.clone()));
|
||||
|
||||
for ((mikan_bangumi_id, mikan_fansub_id), new_ep_metas) in new_mikan_bangumi_groups
|
||||
{
|
||||
let mikan_base_url = ctx.get_mikan_client().base_url();
|
||||
let bgm_homepage = build_mikan_bangumi_homepage(
|
||||
mikan_base_url,
|
||||
&mikan_bangumi_id,
|
||||
Some(&mikan_fansub_id),
|
||||
)?;
|
||||
let bgm_rss_link = build_mikan_bangumi_rss_link(
|
||||
mikan_base_url,
|
||||
&mikan_bangumi_id,
|
||||
Some(&mikan_fansub_id),
|
||||
)?;
|
||||
let bgm = Arc::new(
|
||||
bangumi::Model::get_or_insert_from_mikan(
|
||||
ctx,
|
||||
self.subscriber_id,
|
||||
self.id,
|
||||
mikan_bangumi_id.to_string(),
|
||||
mikan_fansub_id.to_string(),
|
||||
async |am| -> eyre::Result<()> {
|
||||
let bgm_meta = parse_mikan_bangumi_meta_from_mikan_homepage(
|
||||
Some(mikan_client),
|
||||
bgm_homepage.clone(),
|
||||
)
|
||||
.await?;
|
||||
let bgm_name = bgm_meta.bangumi_title;
|
||||
let (_, bgm_season_raw, bgm_season) =
|
||||
extract_season_from_title_body(&bgm_name);
|
||||
am.raw_name = ActiveValue::Set(bgm_name.clone());
|
||||
am.display_name = ActiveValue::Set(bgm_name);
|
||||
am.season = ActiveValue::Set(bgm_season);
|
||||
am.season_raw = ActiveValue::Set(bgm_season_raw);
|
||||
am.rss_link = ActiveValue::Set(Some(bgm_rss_link.to_string()));
|
||||
am.homepage = ActiveValue::Set(Some(bgm_homepage.to_string()));
|
||||
am.fansub = ActiveValue::Set(bgm_meta.fansub);
|
||||
if let Some(origin_poster_src) = bgm_meta.origin_poster_src {
|
||||
if let MikanBangumiPosterMeta {
|
||||
poster_src: Some(poster_src),
|
||||
..
|
||||
} = parse_mikan_bangumi_poster_from_origin_poster_src_with_cache(
|
||||
ctx,
|
||||
origin_poster_src,
|
||||
self.subscriber_id,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
am.poster_link = ActiveValue::Set(Some(poster_src))
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
episodes::Model::add_episodes(
|
||||
ctx,
|
||||
new_ep_metas.into_iter().map(|item| MikanEpsiodeCreation {
|
||||
episode: item,
|
||||
bangumi: bgm.clone(),
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
_ => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
1
apps/recorder/src/tasks/mod.rs
Normal file
1
apps/recorder/src/tasks/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1
apps/recorder/src/views/mod.rs
Normal file
1
apps/recorder/src/views/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod subscribers;
|
||||
19
apps/recorder/src/views/subscribers.rs
Normal file
19
apps/recorder/src/views/subscribers.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::models::entities::subscribers;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct CurrentResponse {
|
||||
pub pid: String,
|
||||
pub display_name: String,
|
||||
}
|
||||
|
||||
impl CurrentResponse {
|
||||
#[must_use]
|
||||
pub fn new(user: &subscribers::Model) -> Self {
|
||||
Self {
|
||||
pid: user.pid.to_string(),
|
||||
display_name: user.display_name.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
1
apps/recorder/src/workers/mod.rs
Normal file
1
apps/recorder/src/workers/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod subscription_worker;
|
||||
31
apps/recorder/src/workers/subscription_worker.rs
Normal file
31
apps/recorder/src/workers/subscription_worker.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use loco_rs::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::models::subscriptions;
|
||||
|
||||
pub struct SubscriptionWorker {
|
||||
pub ctx: AppContext,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct SubscriptionWorkerArgs {
|
||||
pub subscription: subscriptions::Model,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
|
||||
impl BackgroundWorker<SubscriptionWorkerArgs> for SubscriptionWorker {
|
||||
fn build(ctx: &AppContext) -> Self {
|
||||
Self { ctx: ctx.clone() }
|
||||
}
|
||||
|
||||
async fn perform(&self, _args: SubscriptionWorkerArgs) -> Result<()> {
|
||||
println!("================================================");
|
||||
|
||||
let _db = &self.ctx.db;
|
||||
let _storage = &self.ctx.storage;
|
||||
|
||||
println!("================================================");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
3
apps/recorder/tests/mod.rs
Normal file
3
apps/recorder/tests/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
mod models;
|
||||
mod requests;
|
||||
mod tasks;
|
||||
1
apps/recorder/tests/models/mod.rs
Normal file
1
apps/recorder/tests/models/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
mod subscribers;
|
||||
@@ -0,0 +1,7 @@
|
||||
---
|
||||
source: tests/models/subscribers.rs
|
||||
expression: non_existing_subscriber_results
|
||||
---
|
||||
Err(
|
||||
EntityNotFound,
|
||||
)
|
||||
@@ -0,0 +1,13 @@
|
||||
---
|
||||
source: tests/models/subscribers.rs
|
||||
expression: existing_subscriber
|
||||
---
|
||||
Ok(
|
||||
Model {
|
||||
created_at: 2023-11-12T12:34:56.789,
|
||||
updated_at: 2023-11-12T12:34:56.789,
|
||||
id: 1,
|
||||
pid: "11111111-1111-1111-1111-111111111111",
|
||||
display_name: "user1"
|
||||
},
|
||||
)
|
||||
27
apps/recorder/tests/models/subscribers.rs
Normal file
27
apps/recorder/tests/models/subscribers.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use insta::assert_debug_snapshot;
|
||||
use loco_rs::testing;
|
||||
use recorder::{app::App, models::subscribers::Model};
|
||||
use serial_test::serial;
|
||||
|
||||
macro_rules! configure_insta {
|
||||
($($expr:expr),*) => {
|
||||
let mut settings = insta::Settings::clone_current();
|
||||
settings.set_prepend_module_to_snapshot(false);
|
||||
settings.set_snapshot_suffix("users");
|
||||
let _guard = settings.bind_to_scope();
|
||||
};
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn can_find_by_pid() {
|
||||
// configure_insta!();
|
||||
//
|
||||
// let boot = testing::boot_test::<App>().await.unwrap();
|
||||
// testing::seed::<App>(&boot.app_context.db).await.unwrap();
|
||||
//
|
||||
// let existing_subscriber =
|
||||
// Model::find_by_pid(&boot.app_context, "11111111-1111-1111-1111-111111111111").await;
|
||||
//
|
||||
// assert_debug_snapshot!(existing_subscriber);
|
||||
}
|
||||
1
apps/recorder/tests/requests/mod.rs
Normal file
1
apps/recorder/tests/requests/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
mod subscribers;
|
||||
33
apps/recorder/tests/requests/subscribers.rs
Normal file
33
apps/recorder/tests/requests/subscribers.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
#![allow(unused_imports)]
|
||||
use insta::{assert_debug_snapshot, with_settings};
|
||||
use loco_rs::testing;
|
||||
use recorder::app::App;
|
||||
use serial_test::serial;
|
||||
|
||||
// TODO: see how to dedup / extract this to app-local test utils
|
||||
// not to framework, because that would require a runtime dep on insta
|
||||
// macro_rules! configure_insta {
|
||||
// ($($expr:expr),*) => {
|
||||
// let mut settings = insta::Settings::clone_current();
|
||||
// settings.set_prepend_module_to_snapshot(false);
|
||||
// settings.set_snapshot_suffix("user_request");
|
||||
// let _guard = settings.bind_to_scope();
|
||||
// };
|
||||
// }
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn can_get_current_user() {
|
||||
// configure_insta!();
|
||||
//
|
||||
// testing::request::<App, _, _>(|request, _ctx| async move {
|
||||
// let response = request.get("/api/user/current").await;
|
||||
//
|
||||
// with_settings!({
|
||||
// filters => testing::cleanup_user_model()
|
||||
// }, {
|
||||
// assert_debug_snapshot!((response.status_code(),
|
||||
// response.text())); });
|
||||
// })
|
||||
// .await;
|
||||
}
|
||||
1
apps/recorder/tests/tasks/mod.rs
Normal file
1
apps/recorder/tests/tasks/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod seed;
|
||||
42
apps/recorder/tests/tasks/seed.rs
Normal file
42
apps/recorder/tests/tasks/seed.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
//! This task implements data seeding functionality for initializing new
|
||||
//! development/demo environments.
|
||||
//!
|
||||
//! # Example
|
||||
//!
|
||||
//! Run the task with the following command:
|
||||
//! ```sh
|
||||
//! cargo run task
|
||||
//! ```
|
||||
//!
|
||||
//! To override existing data and reset the data structure, use the following
|
||||
//! command with the `refresh:true` argument:
|
||||
//! ```sh
|
||||
//! cargo run task seed_data refresh:true
|
||||
//! ```
|
||||
#![allow(unused_imports)]
|
||||
use loco_rs::{db, prelude::*};
|
||||
use recorder::{app::App, migrations::Migrator};
|
||||
|
||||
#[allow(clippy::module_name_repetitions)]
|
||||
pub struct SeedData;
|
||||
#[async_trait]
|
||||
impl Task for SeedData {
|
||||
fn task(&self) -> TaskInfo {
|
||||
TaskInfo {
|
||||
name: "seed_data".to_string(),
|
||||
detail: "Task for seeding data".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn run(&self, _app_context: &AppContext, _vars: &task::Vars) -> Result<()> {
|
||||
// let refresh = vars.cli.get("refresh").is_some_and(|refresh| refresh ==
|
||||
// "true");
|
||||
//
|
||||
// if refresh {
|
||||
// db::reset::<Migrator>(&app_context.db).await?;
|
||||
// }
|
||||
// let path = std::path::Path::new("src/fixtures");
|
||||
// db::run_app_seed::<App>(&app_context.db, path).await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user