Compare commits

..

No commits in common. "97b7bfb7fb346d4972965379c3699040eda5d0e9" and "2ed2b864b24daac72f56f25c65286e81c09cc12e" have entirely different histories.

26 changed files with 4072 additions and 630 deletions

26
.vscode/launch.json vendored
View File

@ -4,6 +4,25 @@
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0", "version": "0.2.0",
"configurations": [ "configurations": [
{
"type": "lldb",
"request": "launch",
"name": "debug quirks_path lib",
"cargo": {
"args": [
"test",
"--no-run",
"--lib",
"--package=quirks_path"
],
"filter": {
"name": "quirks_path",
"kind": "lib"
}
},
"args": [],
"cwd": "${workspaceFolder}"
},
{ {
"type": "lldb", "type": "lldb",
"request": "launch", "request": "launch",
@ -20,9 +39,8 @@
} }
}, },
"args": [ "args": [
"start",
"--environment", "--environment",
"development" "recorder/development"
], ],
"cwd": "${workspaceFolder}" "cwd": "${workspaceFolder}"
}, },
@ -43,14 +61,14 @@
}, },
"args": [ "args": [
"--environment", "--environment",
"development" "recorder/development"
], ],
"cwd": "${workspaceFolder}" "cwd": "${workspaceFolder}"
}, },
{ {
"type": "lldb", "type": "lldb",
"request": "launch", "request": "launch",
"name": "debug recorder lib", "name": "debug record lib",
"cargo": { "cargo": {
"args": [ "args": [
"test", "test",

914
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -4,11 +4,6 @@ resolver = "2"
[patch.crates-io] [patch.crates-io]
testcontainers = { git = "https://github.com/testcontainers/testcontainers-rs.git", rev = "af21727" } testcontainers = { git = "https://github.com/testcontainers/testcontainers-rs.git", rev = "af21727" }
# loco-rs = { git = "https://github.com/lonelyhentxi/loco.git", rev = "beb890e" }
# loco-rs = { git = "https://github.com/loco-rs/loco.git" }
async-graphql = { git = "https://github.com/aumetra/async-graphql.git", rev = "690ece7" }
async-graphql-axum = { git = "https://github.com/aumetra/async-graphql.git", rev = "690ece7" }
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
# [patch."https://github.com/lonelyhentxi/qbit.git"] # [patch."https://github.com/lonelyhentxi/qbit.git"]
# qbit-rs = { path = "./patches/qbit-rs" } # qbit-rs = { path = "./patches/qbit-rs" }

View File

@ -1,2 +1,2 @@
^https://konobangu.com/*** http://127.0.0.1:5000/$1 excludeFilter://^https://konobangu.com/api/*** ^https://konobangu.com/*** http://127.0.0.1:5000/$1
^wss://konobangu.com/*** ws://127.0.0.1:5000/$1 ^excludeFilter://^wss://konobangu.com/api/*** ^wss://konobangu.com/*** ws://127.0.0.1:5000/$1

View File

@ -1 +1 @@
^https://konobangu.com/api/*** http://127.0.0.1:5001/api/$1 ^https://recorder.konobangu.com/*** http://127.0.0.1:7600/$1

View File

@ -22,7 +22,7 @@ testcontainers = [
] ]
[dependencies] [dependencies]
loco-rs = { version = "0.14" } loco-rs = { version = "0.13" }
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1" serde_json = "1"
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] } tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
@ -38,17 +38,11 @@ sea-orm = { version = "1", features = [
] } ] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] } figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
axum = "0.8" axum = "0.7.9"
uuid = { version = "1.6.0", features = ["v4"] } uuid = { version = "1.6.0", features = ["v4"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
sea-orm-migration = { version = "1", features = ["runtime-tokio-rustls"] } sea-orm-migration = { version = "1", features = ["runtime-tokio-rustls"] }
reqwest = { version = "0.12", features = [ reqwest = { version = "0.12.9" }
"charset",
"http2",
"json",
"macos-system-configuration",
"rustls-tls",
] }
thiserror = "2" thiserror = "2"
rss = "2" rss = "2"
bytes = "1.9" bytes = "1.9"
@ -70,6 +64,7 @@ scraper = "0.22.0"
leaky-bucket = "1.1.2" leaky-bucket = "1.1.2"
serde_with = "3" serde_with = "3"
jwt-authorizer = "0.15.0" jwt-authorizer = "0.15.0"
axum-auth = "0.7.0"
futures = "0.3.31" futures = "0.3.31"
librqbit-core = "4" librqbit-core = "4"
qbit-rs = { git = "https://github.com/lonelyhentxi/qbit.git", rev = "72d53138ebe", features = [ qbit-rs = { git = "https://github.com/lonelyhentxi/qbit.git", rev = "72d53138ebe", features = [
@ -96,11 +91,8 @@ async-graphql-axum = "7.0.13"
fastrand = "2.3.0" fastrand = "2.3.0"
seaography = "1.1.2" seaography = "1.1.2"
quirks_path = "0.1.0" quirks_path = "0.1.0"
base64 = "0.22.1"
tower = "0.5.2"
axum-extra = "0.10.0"
[dev-dependencies] [dev-dependencies]
serial_test = "3" serial_test = "3"
loco-rs = { version = "0.14", features = ["testing"] } loco-rs = { version = "0.13", features = ["testing"] }
insta = { version = "1", features = ["redactions", "yaml", "filters"] } insta = { version = "1", features = ["redactions", "yaml", "filters"] }

View File

@ -19,6 +19,7 @@ use recorder::{
use sea_orm_migration::MigratorTrait; use sea_orm_migration::MigratorTrait;
async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> color_eyre::eyre::Result<()> { async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> color_eyre::eyre::Result<()> {
color_eyre::install()?;
let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370"; let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
// let rss_link = // let rss_link =
@ -48,7 +49,10 @@ async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> color_eyre::eyre::Result<()
} }
async fn init() -> color_eyre::eyre::Result<AppContext> { async fn init() -> color_eyre::eyre::Result<AppContext> {
color_eyre::install()?; tracing_subscriber::fmt()
.with_max_level(tracing::Level::INFO)
.with_test_writer()
.init();
let ctx = loco_rs::cli::playground::<App>().await?; let ctx = loco_rs::cli::playground::<App>().await?;
let BootResult { let BootResult {
app_context: ctx, .. app_context: ctx, ..
@ -62,5 +66,15 @@ async fn main() -> color_eyre::eyre::Result<()> {
let ctx = init().await?; let ctx = init().await?;
pull_mikan_bangumi_rss(&ctx).await?; pull_mikan_bangumi_rss(&ctx).await?;
// let active_model: articles::ActiveModel = ActiveModel {
// title: Set(Some("how to build apps in 3 steps".to_string())),
// content: Set(Some("use Loco: https://loco.rs".to_string())),
// ..Default::default()
// };
// active_model.insert(&ctx.db).await.unwrap();
// let res = articles::Entity::find().all(&ctx.db).await.unwrap();
// println!("{:?}", res);
Ok(()) Ok(())
} }

View File

@ -5,7 +5,6 @@ use loco_rs::{
app::{AppContext, Hooks}, app::{AppContext, Hooks},
boot::{create_app, BootResult, StartMode}, boot::{create_app, BootResult, StartMode},
cache, cache,
config::Config,
controller::AppRoutes, controller::AppRoutes,
db::truncate_table, db::truncate_table,
environment::Environment, environment::Environment,
@ -13,9 +12,10 @@ use loco_rs::{
task::Tasks, task::Tasks,
Result, Result,
}; };
use sea_orm::DatabaseConnection;
use crate::{ use crate::{
auth::service::{AppAuthService, AppAuthServiceInitializer}, auth::service::AppAuthService,
controllers::{self}, controllers::{self},
dal::{AppDalClient, AppDalInitalizer}, dal::{AppDalClient, AppDalInitalizer},
extract::mikan::{client::AppMikanClientInitializer, AppMikanClient}, extract::mikan::{client::AppMikanClientInitializer, AppMikanClient},
@ -25,8 +25,6 @@ use crate::{
workers::subscription_worker::SubscriptionWorker, workers::subscription_worker::SubscriptionWorker,
}; };
pub const CONFIG_FOLDER: &str = "LOCO_CONFIG_FOLDER";
pub trait AppContextExt { pub trait AppContextExt {
fn get_dal_client(&self) -> &AppDalClient { fn get_dal_client(&self) -> &AppDalClient {
AppDalClient::app_instance() AppDalClient::app_instance()
@ -51,20 +49,6 @@ pub struct App;
#[async_trait] #[async_trait]
impl Hooks for App { impl Hooks for App {
async fn load_config(env: &Environment) -> Result<Config> {
std::env::var(CONFIG_FOLDER).map_or_else(
|_| {
let monorepo_project_config_dir = Path::new("./apps/recorder/config");
if monorepo_project_config_dir.exists() && monorepo_project_config_dir.is_dir() {
return env.load_from_folder(monorepo_project_config_dir);
}
let current_config_dir = Path::new("./config");
env.load_from_folder(current_config_dir)
},
|config_folder| env.load_from_folder(Path::new(&config_folder)),
)
}
fn app_name() -> &'static str { fn app_name() -> &'static str {
env!("CARGO_CRATE_NAME") env!("CARGO_CRATE_NAME")
} }
@ -74,7 +58,6 @@ impl Hooks for App {
Box::new(AppDalInitalizer), Box::new(AppDalInitalizer),
Box::new(AppMikanClientInitializer), Box::new(AppMikanClientInitializer),
Box::new(AppGraphQLServiceInitializer), Box::new(AppGraphQLServiceInitializer),
Box::new(AppAuthServiceInitializer),
]; ];
Ok(initializers) Ok(initializers)
@ -90,19 +73,15 @@ impl Hooks for App {
) )
} }
async fn boot( async fn boot(mode: StartMode, environment: &Environment) -> Result<BootResult> {
mode: StartMode, create_app::<Self, Migrator>(mode, environment).await
environment: &Environment,
config: Config,
) -> Result<BootResult> {
create_app::<Self, Migrator>(mode, environment, config).await
} }
fn routes(ctx: &AppContext) -> AppRoutes { fn routes(ctx: &AppContext) -> AppRoutes {
AppRoutes::with_default_routes() AppRoutes::with_default_routes()
.prefix("/api") .prefix("/api")
.add_route(controllers::auth::routes()) .add_route(controllers::subscribers::routes())
.add_route(controllers::graphql::routes(ctx.clone())) .add_route(controllers::graphql::routes(ctx.get_graphql_service()))
} }
async fn connect_workers(ctx: &AppContext, queue: &Queue) -> Result<()> { async fn connect_workers(ctx: &AppContext, queue: &Queue) -> Result<()> {
@ -119,12 +98,12 @@ impl Hooks for App {
fn register_tasks(_tasks: &mut Tasks) {} fn register_tasks(_tasks: &mut Tasks) {}
async fn truncate(ctx: &AppContext) -> Result<()> { async fn truncate(db: &DatabaseConnection) -> Result<()> {
truncate_table(&ctx.db, subscribers::Entity).await?; truncate_table(db, subscribers::Entity).await?;
Ok(()) Ok(())
} }
async fn seed(_ctx: &AppContext, _base: &Path) -> Result<()> { async fn seed(_db: &DatabaseConnection, _base: &Path) -> Result<()> {
Ok(()) Ok(())
} }
} }

View File

@ -1,7 +1,6 @@
use async_trait::async_trait; use async_trait::async_trait;
use axum::http::request::Parts; use axum::{http::request::Parts, RequestPartsExt};
use base64::{self, Engine}; use axum_auth::AuthBasic;
use reqwest::header::AUTHORIZATION;
use super::{ use super::{
config::BasicAuthConfig, config::BasicAuthConfig,
@ -10,48 +9,6 @@ use super::{
}; };
use crate::models::{auth::AuthType, subscribers::SEED_SUBSCRIBER}; use crate::models::{auth::AuthType, subscribers::SEED_SUBSCRIBER};
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct AuthBasic {
pub user: String,
pub password: Option<String>,
}
impl AuthBasic {
fn decode_request_parts(req: &mut Parts) -> Result<Self, AuthError> {
let authorization = req
.headers
.get(AUTHORIZATION)
.and_then(|s| s.to_str().ok())
.ok_or_else(|| AuthError::BasicInvalidCredentials)?;
let split = authorization.split_once(' ');
match split {
Some((name, contents)) if name == "Basic" => {
let decoded = base64::engine::general_purpose::STANDARD
.decode(contents)
.map_err(|_| AuthError::BasicInvalidCredentials)?;
let decoded =
String::from_utf8(decoded).map_err(|_| AuthError::BasicInvalidCredentials)?;
Ok(if let Some((user, password)) = decoded.split_once(':') {
Self {
user: String::from(user),
password: Some(String::from(password)),
}
} else {
Self {
user: decoded,
password: None,
}
})
}
_ => Err(AuthError::BasicInvalidCredentials),
}
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct BasicAuthService { pub struct BasicAuthService {
pub config: BasicAuthConfig, pub config: BasicAuthConfig,
@ -60,11 +17,7 @@ pub struct BasicAuthService {
#[async_trait] #[async_trait]
impl AuthService for BasicAuthService { impl AuthService for BasicAuthService {
async fn extract_user_info(&self, request: &mut Parts) -> Result<AuthUserInfo, AuthError> { async fn extract_user_info(&self, request: &mut Parts) -> Result<AuthUserInfo, AuthError> {
if let Ok(AuthBasic { if let Ok(AuthBasic((found_user, found_password))) = request.extract().await {
user: found_user,
password: found_password,
}) = AuthBasic::decode_request_parts(request)
{
if self.config.user == found_user if self.config.user == found_user
&& self.config.password == found_password.unwrap_or_default() && self.config.password == found_password.unwrap_or_default()
{ {

View File

@ -1,7 +1,6 @@
use axum::{ use axum::{
http::StatusCode, http::StatusCode,
response::{IntoResponse, Response}, response::{IntoResponse, Response},
Json,
}; };
use thiserror::Error; use thiserror::Error;
@ -31,6 +30,6 @@ pub enum AuthError {
impl IntoResponse for AuthError { impl IntoResponse for AuthError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
(StatusCode::UNAUTHORIZED, Json(self.to_string())).into_response() (StatusCode::UNAUTHORIZED, self.to_string()).into_response()
} }
} }

View File

@ -3,6 +3,7 @@ use axum::{
extract::FromRequestParts, extract::FromRequestParts,
http::request::Parts, http::request::Parts,
response::{IntoResponse as _, Response}, response::{IntoResponse as _, Response},
Extension,
}; };
use jwt_authorizer::{JwtAuthorizer, Validation}; use jwt_authorizer::{JwtAuthorizer, Validation};
use loco_rs::app::{AppContext, Initializer}; use loco_rs::app::{AppContext, Initializer};
@ -21,17 +22,22 @@ pub struct AuthUserInfo {
pub auth_type: AuthType, pub auth_type: AuthType,
} }
impl FromRequestParts<AppContext> for AuthUserInfo { #[async_trait]
impl<S> FromRequestParts<S> for AuthUserInfo
where
S: Send + Sync,
{
type Rejection = Response; type Rejection = Response;
async fn from_request_parts( async fn from_request_parts(req: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
parts: &mut Parts, let Extension(ctx) = Extension::<AppContext>::from_request_parts(req, state)
state: &AppContext, .await
) -> Result<Self, Self::Rejection> { .expect("AppContext should be present");
let auth_service = state.get_auth_service();
let auth_service = ctx.get_auth_service();
auth_service auth_service
.extract_user_info(parts) .extract_user_info(req)
.await .await
.map_err(|err| err.into_response()) .map_err(|err| err.into_response())
} }

View File

@ -1,40 +1,19 @@
use async_graphql::http::{playground_source, GraphQLPlaygroundConfig}; use async_graphql::http::{playground_source, GraphQLPlaygroundConfig};
use async_graphql_axum::{GraphQLRequest, GraphQLResponse}; use async_graphql_axum::GraphQL;
use axum::{ use axum::response::Html;
extract::State, use loco_rs::prelude::*;
middleware::from_extractor_with_state,
response::{Html, IntoResponse},
routing::{get, post},
};
use loco_rs::{app::AppContext, prelude::Routes};
use crate::{app::AppContextExt, auth::AuthUserInfo}; use crate::graphql::service::AppGraphQLService;
async fn graphql_playground() -> impl IntoResponse { pub async fn graphql_playground() -> impl IntoResponse {
Html(playground_source(GraphQLPlaygroundConfig::new( Html(playground_source(GraphQLPlaygroundConfig::new(
"/api/graphql", "/api/graphql",
))) )))
} }
async fn graphql_handler( pub fn routes(graphql_service: &AppGraphQLService) -> Routes {
State(ctx): State<AppContext>, Routes::new().prefix("/graphql").add(
auth_user_info: AuthUserInfo, "/",
req: GraphQLRequest, get(graphql_playground).post_service(GraphQL::new(graphql_service.schema.clone())),
) -> GraphQLResponse { )
let graphql_service = ctx.get_graphql_service();
let mut req = req.into_inner();
req = req.data(auth_user_info);
graphql_service.schema.execute(req).await.into()
}
pub fn routes(state: AppContext) -> Routes {
Routes::new()
.prefix("/graphql")
.add("/playground", get(graphql_playground))
.add(
"/",
post(graphql_handler)
.layer(from_extractor_with_state::<AuthUserInfo, AppContext>(state)),
)
} }

View File

@ -1,2 +1,2 @@
pub mod auth;
pub mod graphql; pub mod graphql;
pub mod subscribers;

View File

@ -1,5 +1,5 @@
pub mod config;
pub mod query_root; pub mod query_root;
pub mod service; pub mod service;
pub mod config;
pub use query_root::schema; pub use query_root::schema;

View File

@ -2,11 +2,7 @@ use async_graphql::dynamic::*;
use sea_orm::DatabaseConnection; use sea_orm::DatabaseConnection;
use seaography::{Builder, BuilderContext}; use seaography::{Builder, BuilderContext};
lazy_static::lazy_static! { static ref CONTEXT : BuilderContext = { lazy_static::lazy_static! { static ref CONTEXT : BuilderContext = BuilderContext :: default () ; }
BuilderContext {
..Default::default()
}
}; }
pub fn schema( pub fn schema(
database: DatabaseConnection, database: DatabaseConnection,
@ -19,6 +15,7 @@ pub fn schema(
seaography::register_entities!( seaography::register_entities!(
builder, builder,
[ [
auth,
bangumi, bangumi,
downloaders, downloaders,
downloads, downloads,
@ -31,6 +28,7 @@ pub fn schema(
); );
{ {
builder.register_enumeration::<auth::AuthType>();
builder.register_enumeration::<downloads::DownloadStatus>(); builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<subscriptions::SubscriptionCategory>(); builder.register_enumeration::<subscriptions::SubscriptionCategory>();
builder.register_enumeration::<downloaders::DownloaderCategory>(); builder.register_enumeration::<downloaders::DownloaderCategory>();
@ -38,7 +36,6 @@ pub fn schema(
} }
let schema = builder.schema_builder(); let schema = builder.schema_builder();
let schema = if let Some(depth) = depth { let schema = if let Some(depth) = depth {
schema.limit_depth(depth) schema.limit_depth(depth)
} else { } else {

View File

@ -1,4 +1,5 @@
use async_trait::async_trait; use async_trait::async_trait;
use loco_rs::schema::jsonb_null;
use sea_orm_migration::{prelude::*, schema::*}; use sea_orm_migration::{prelude::*, schema::*};
use super::defs::{ use super::defs::{
@ -26,7 +27,7 @@ impl MigrationTrait for Migration {
.col(pk_auto(Subscribers::Id)) .col(pk_auto(Subscribers::Id))
.col(string_len_uniq(Subscribers::Pid, 64)) .col(string_len_uniq(Subscribers::Pid, 64))
.col(string(Subscribers::DisplayName)) .col(string(Subscribers::DisplayName))
.col(json_binary_null(Subscribers::BangumiConf)) .col(jsonb_null(Subscribers::BangumiConf))
.to_owned(), .to_owned(),
) )
.await?; .await?;
@ -100,13 +101,13 @@ impl MigrationTrait for Migration {
.col(text_null(Bangumi::SeasonRaw)) .col(text_null(Bangumi::SeasonRaw))
.col(text_null(Bangumi::Fansub)) .col(text_null(Bangumi::Fansub))
.col(text_null(Bangumi::MikanFansubId)) .col(text_null(Bangumi::MikanFansubId))
.col(json_binary_null(Bangumi::Filter)) .col(jsonb_null(Bangumi::Filter))
.col(text_null(Bangumi::RssLink)) .col(text_null(Bangumi::RssLink))
.col(text_null(Bangumi::PosterLink)) .col(text_null(Bangumi::PosterLink))
.col(text_null(Bangumi::SavePath)) .col(text_null(Bangumi::SavePath))
.col(boolean(Bangumi::Deleted).default(false)) .col(boolean(Bangumi::Deleted).default(false))
.col(text_null(Bangumi::Homepage)) .col(text_null(Bangumi::Homepage))
.col(json_binary_null(Bangumi::Extra)) .col(jsonb_null(Bangumi::Extra))
.foreign_key( .foreign_key(
ForeignKey::create() ForeignKey::create()
.name("fk_bangumi_subscriber_id") .name("fk_bangumi_subscriber_id")
@ -213,7 +214,7 @@ impl MigrationTrait for Migration {
.col(text_null(Episodes::Subtitle)) .col(text_null(Episodes::Subtitle))
.col(boolean(Episodes::Deleted).default(false)) .col(boolean(Episodes::Deleted).default(false))
.col(text_null(Episodes::Source)) .col(text_null(Episodes::Source))
.col(json_binary_null(Episodes::Extra)) .col(jsonb_null(Episodes::Extra))
.foreign_key( .foreign_key(
ForeignKey::create() ForeignKey::create()
.name("fk_episodes_bangumi_id") .name("fk_episodes_bangumi_id")

View File

@ -45,5 +45,11 @@ impl Related<super::subscribers::Entity> for Entity {
} }
} }
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
}
#[async_trait] #[async_trait]
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}

View File

@ -1,4 +1,3 @@
use async_graphql::SimpleObject;
use async_trait::async_trait; use async_trait::async_trait;
use loco_rs::app::AppContext; use loco_rs::app::AppContext;
use sea_orm::{entity::prelude::*, sea_query::OnConflict, ActiveValue, FromJsonQueryResult}; use sea_orm::{entity::prelude::*, sea_query::OnConflict, ActiveValue, FromJsonQueryResult};
@ -6,19 +5,13 @@ use serde::{Deserialize, Serialize};
use super::subscription_bangumi; use super::subscription_bangumi;
#[derive( #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
)]
#[graphql(name = "BangumiFilter")]
pub struct BangumiFilter { pub struct BangumiFilter {
pub name: Option<Vec<String>>, pub name: Option<Vec<String>>,
pub group: Option<Vec<String>>, pub group: Option<Vec<String>>,
} }
#[derive( #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
)]
#[graphql(name = "BangumiExtra")]
pub struct BangumiExtra { pub struct BangumiExtra {
pub name_zh: Option<String>, pub name_zh: Option<String>,
pub s_name_zh: Option<String>, pub s_name_zh: Option<String>,
@ -28,16 +21,14 @@ pub struct BangumiExtra {
pub s_name_jp: Option<String>, pub s_name_jp: Option<String>,
} }
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "bangumi")] #[sea_orm(table_name = "bangumi")]
#[graphql(name = "Bangumi")]
pub struct Model { pub struct Model {
pub created_at: DateTime, pub created_at: DateTime,
pub updated_at: DateTime, pub updated_at: DateTime,
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub id: i32, pub id: i32,
pub mikan_bangumi_id: Option<String>, pub mikan_bangumi_id: Option<String>,
#[graphql(default_with = "default_subscriber_id")]
pub subscriber_id: i32, pub subscriber_id: i32,
pub display_name: String, pub display_name: String,
pub raw_name: String, pub raw_name: String,

View File

@ -45,7 +45,7 @@ pub struct Model {
pub poster_link: Option<String>, pub poster_link: Option<String>,
pub episode_index: i32, pub episode_index: i32,
pub homepage: Option<String>, pub homepage: Option<String>,
pub subtitle: Option<String>, pub subtitle: Option<Vec<String>>,
#[sea_orm(default = "false")] #[sea_orm(default = "false")]
pub deleted: bool, pub deleted: bool,
pub source: Option<String>, pub source: Option<String>,
@ -218,7 +218,7 @@ impl ActiveModel {
poster_link: ActiveValue::Set(bgm.poster_link.clone()), poster_link: ActiveValue::Set(bgm.poster_link.clone()),
episode_index: ActiveValue::Set(raw_meta.episode_index), episode_index: ActiveValue::Set(raw_meta.episode_index),
homepage: ActiveValue::Set(Some(homepage.to_string())), homepage: ActiveValue::Set(Some(homepage.to_string())),
subtitle: ActiveValue::Set(raw_meta.subtitle), subtitle: ActiveValue::Set(raw_meta.subtitle.map(|s| vec![s])),
source: ActiveValue::Set(raw_meta.source), source: ActiveValue::Set(raw_meta.source),
extra: ActiveValue::Set(EpisodeExtra { extra: ActiveValue::Set(EpisodeExtra {
name_zh: raw_meta.name_zh, name_zh: raw_meta.name_zh,

View File

@ -1,4 +1,3 @@
use async_graphql::SimpleObject;
use async_trait::async_trait; use async_trait::async_trait;
use loco_rs::{ use loco_rs::{
app::AppContext, app::AppContext,
@ -9,9 +8,7 @@ use serde::{Deserialize, Serialize};
pub const SEED_SUBSCRIBER: &str = "konobangu"; pub const SEED_SUBSCRIBER: &str = "konobangu";
#[derive( #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
)]
pub struct SubscriberBangumiConfig { pub struct SubscriberBangumiConfig {
pub leading_group_tag: Option<bool>, pub leading_group_tag: Option<bool>,
} }
@ -83,6 +80,8 @@ pub enum RelatedEntity {
Bangumi, Bangumi,
#[sea_orm(entity = "super::episodes::Entity")] #[sea_orm(entity = "super::episodes::Entity")]
Episode, Episode,
#[sea_orm(entity = "super::auth::Entity")]
Auth,
} }
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
@ -107,6 +106,11 @@ impl ActiveModelBehavior for ActiveModel {
} }
impl Model { impl Model {
/// finds a user by the provided pid
///
/// # Errors
///
/// When could not find user or DB query error
pub async fn find_by_pid(ctx: &AppContext, pid: &str) -> ModelResult<Self> { pub async fn find_by_pid(ctx: &AppContext, pid: &str) -> ModelResult<Self> {
let db = &ctx.db; let db = &ctx.db;
let parse_uuid = Uuid::parse_str(pid).map_err(|e| ModelError::Any(e.into()))?; let parse_uuid = Uuid::parse_str(pid).map_err(|e| ModelError::Any(e.into()))?;
@ -146,6 +150,12 @@ impl Model {
Self::find_by_pid(ctx, SEED_SUBSCRIBER).await Self::find_by_pid(ctx, SEED_SUBSCRIBER).await
} }
/// Asynchronously creates a user with a password and saves it to the
/// database.
///
/// # Errors
///
/// When could not save the user into the DB
pub async fn create_root(ctx: &AppContext) -> ModelResult<Self> { pub async fn create_root(ctx: &AppContext) -> ModelResult<Self> {
let db = &ctx.db; let db = &ctx.db;
let txn = db.begin().await?; let txn = db.begin().await?;

3487
apps/recorder/user_agents Normal file

File diff suppressed because it is too large Load Diff

View File

@ -58,8 +58,6 @@ server:
# - POST # - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds # Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600 # max_age: 3600
fallback:
enable: false
# Worker Configuration # Worker Configuration
workers: workers:

View File

@ -9,11 +9,8 @@ prepare-dev-recorder:
dev-webui: dev-webui:
pnpm run dev pnpm run dev
dev-proxy:
pnpm run --filter=proxy dev
dev-recorder: dev-recorder:
cargo watch -w apps/recorder -x 'recorder start' cargo watch -w apps/recorder -w config -x 'recorder start'
down-recorder: down-recorder:
cargo run -p recorder --bin recorder_cli -- db down 999 --environment development cargo run -p recorder --bin recorder_cli -- db down 999 --environment development