Compare commits

..

13 Commits

135 changed files with 4156 additions and 1721 deletions

12
.vscode/settings.json vendored
View File

@@ -40,13 +40,9 @@
} }
], ],
"rust-analyzer.cargo.features": "all", "rust-analyzer.cargo.features": "all",
"rust-analyzer.testExplorer": true "rust-analyzer.testExplorer": true,
// https://github.com/rust-lang/rust/issues/141540 // https://github.com/rust-lang/rust/issues/141540
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer", "rust-analyzer.runnables.extraEnv": {
// "rust-analyzer.check.extraEnv": { "CARGO_INCREMENTAL": "0",
// "CARGO_TARGET_DIR": "target/rust-analyzer" }
// },
// "rust-analyzer.cargo.extraEnv": {
// "CARGO_TARGET_DIR": "target/analyzer"
// }
} }

65
Cargo.lock generated
View File

@@ -552,6 +552,7 @@ dependencies = [
"diligent-date-parser", "diligent-date-parser",
"never", "never",
"quick-xml", "quick-xml",
"serde",
] ]
[[package]] [[package]]
@@ -1579,6 +1580,15 @@ dependencies = [
"cfg-if", "cfg-if",
] ]
[[package]]
name = "croner"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c344b0690c1ad1c7176fe18eb173e0c927008fdaaa256e40dfd43ddd149c0843"
dependencies = [
"chrono",
]
[[package]] [[package]]
name = "crossbeam-channel" name = "crossbeam-channel"
version = "0.5.15" version = "0.5.15"
@@ -6749,6 +6759,7 @@ dependencies = [
"cocoon", "cocoon",
"color-eyre", "color-eyre",
"convert_case 0.8.0", "convert_case 0.8.0",
"croner",
"ctor", "ctor",
"dotenvy", "dotenvy",
"downloader", "downloader",
@@ -6782,8 +6793,10 @@ dependencies = [
"once_cell", "once_cell",
"opendal", "opendal",
"openidconnect", "openidconnect",
"paste",
"percent-encoding", "percent-encoding",
"polars", "polars",
"quick-xml",
"quirks_path", "quirks_path",
"rand 0.9.1", "rand 0.9.1",
"regex", "regex",
@@ -6795,6 +6808,7 @@ dependencies = [
"sea-orm", "sea-orm",
"sea-orm-migration", "sea-orm-migration",
"seaography", "seaography",
"secrecy",
"serde", "serde",
"serde_json", "serde_json",
"serde_variant", "serde_variant",
@@ -6812,6 +6826,7 @@ dependencies = [
"tracing-appender", "tracing-appender",
"tracing-subscriber", "tracing-subscriber",
"tracing-tree", "tracing-tree",
"ts-rs",
"typed-builder 0.21.0", "typed-builder 0.21.0",
"url", "url",
"util", "util",
@@ -7216,6 +7231,7 @@ dependencies = [
"derive_builder", "derive_builder",
"never", "never",
"quick-xml", "quick-xml",
"serde",
] ]
[[package]] [[package]]
@@ -7629,16 +7645,16 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]] [[package]]
name = "seaography" name = "seaography"
version = "1.1.4" version = "1.1.4"
source = "git+https://github.com/dumtruck/seaography.git?rev=a787c3a#a787c3ab83cf1f8275894e1bc1ca3c766b54674b" source = "git+https://github.com/dumtruck/seaography.git?rev=9f7fc7c#9f7fc7cf05234abe35fd9144c895321dd2b5db62"
dependencies = [ dependencies = [
"async-graphql", "async-graphql",
"fnv", "fnv",
"heck 0.4.1", "heck 0.5.0",
"itertools 0.12.1", "itertools 0.14.0",
"lazy_static", "lazy_static",
"sea-orm", "sea-orm",
"serde_json", "serde_json",
"thiserror 1.0.69", "thiserror 2.0.12",
] ]
[[package]] [[package]]
@@ -7655,6 +7671,16 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "secrecy"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a"
dependencies = [
"serde",
"zeroize",
]
[[package]] [[package]]
name = "security-framework" name = "security-framework"
version = "2.11.1" version = "2.11.1"
@@ -8677,6 +8703,15 @@ dependencies = [
"unic-segment", "unic-segment",
] ]
[[package]]
name = "termcolor"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
dependencies = [
"winapi-util",
]
[[package]] [[package]]
name = "testcontainers" name = "testcontainers"
version = "0.24.0" version = "0.24.0"
@@ -9201,6 +9236,28 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "ts-rs"
version = "11.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ef1b7a6d914a34127ed8e1fa927eb7088903787bcded4fa3eef8f85ee1568be"
dependencies = [
"thiserror 2.0.12",
"ts-rs-macros",
]
[[package]]
name = "ts-rs-macros"
version = "11.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9d4ed7b4c18cc150a6a0a1e9ea1ecfa688791220781af6e119f9599a8502a0a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"termcolor",
]
[[package]] [[package]]
name = "tungstenite" name = "tungstenite"
version = "0.26.2" version = "0.26.2"

View File

@@ -15,7 +15,7 @@ resolver = "2"
[profile.dev] [profile.dev]
debug = 0 debug = 0
# https://github.com/rust-lang/rust/issues/141540 # https://github.com/rust-lang/rust/issues/141540
incremental = false incremental = true # Then only change rust-analyzer incremental
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171) # [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
# codegen-backend = "cranelift" # codegen-backend = "cranelift"
@@ -87,4 +87,4 @@ nanoid = "0.4.0"
webp = "0.3.0" webp = "0.3.0"
[patch.crates-io] [patch.crates-io]
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" } seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "9f7fc7c" }

View File

@@ -1,17 +0,0 @@
HOST="konobangu.com"
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
STORAGE_DATA_DIR = "./data"
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# OIDC_EXTRA_CLAIM_KEY = ""
# OIDC_EXTRA_CLAIM_VALUE = ""
# MIKAN_PROXY = ""
# MIKAN_PROXY_AUTH_HEADER = ""
# MIKAN_NO_PROXY = ""
# MIKAN_PROXY_ACCEPT_INVALID_CERTS = "true"

View File

@@ -1,17 +0,0 @@
HOST="konobangu.com"
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
STORAGE_DATA_DIR = "./data"
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# OIDC_EXTRA_CLAIM_KEY = ""
# OIDC_EXTRA_CLAIM_VALUE = ""
MIKAN_PROXY = "http://127.0.0.1:8899"
# MIKAN_PROXY_AUTH_HEADER = ""
# MIKAN_NO_PROXY = ""
MIKAN_PROXY_ACCEPT_INVALID_CERTS = true

View File

@@ -0,0 +1,18 @@
LOGGER__LEVEL = "debug"
DATABASE__URI = "postgres://konobangu:konobangu@localhost:5432/konobangu"
AUTH__AUTH_TYPE = "basic"
AUTH__BASIC_USER = "konobangu"
AUTH__BASIC_PASSWORD = "konobangu"
# AUTH__OIDC_ISSUER = "https://auth.logto.io/oidc"
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
# AUTH__OIDC_CLIENT_ID = "client_id"
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# AUTH__OIDC_EXTRA_CLAIM_KEY = ""
# AUTH__OIDC_EXTRA_CLAIM_VALUE = ""
MIKAN__HTTP_CLIENT__PROXY__ACCEPT_INVALID_CERTS = true
MIKAN__HTTP_CLIENT__PROXY__SERVER = "http://127.0.0.1:8899"

View File

@@ -0,0 +1,15 @@
HOST="konobangu.com"
DATABASE__URI = "postgres://konobangu:konobangu@localhost:5432/konobangu"
AUTH__AUTH_TYPE = "basic" # or oidc
AUTH__BASIC_USER = "konobangu"
AUTH__BASIC_PASSWORD = "konobangu"
# AUTH__OIDC_ISSUER="https://auth.logto.io/oidc"
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
# AUTH__OIDC_CLIENT_ID = "client_id"
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# AUTH__OIDC_EXTRA_CLAIM_KEY = ""
# AUTH__OIDC_EXTRA_CLAIM_VALUE = ""

View File

@@ -28,4 +28,6 @@ dist/
temp/* temp/*
!temp/.gitkeep !temp/.gitkeep
tests/resources/mikan/classic_episodes/*/* tests/resources/mikan/classic_episodes/*/*
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet !tests/resources/mikan/classic_episodes/parquet/tiny.parquet
webui/
data/

View File

@@ -108,7 +108,7 @@ sea-orm = { version = "1.1", features = [
] } ] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] } figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] } sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
rss = "2" rss = { version = "2", features = ["builders", "with-serde"] }
fancy-regex = "0.14" fancy-regex = "0.14"
lightningcss = "1.0.0-alpha.66" lightningcss = "1.0.0-alpha.66"
html-escape = "0.2.13" html-escape = "0.2.13"
@@ -125,8 +125,9 @@ seaography = { version = "1.1", features = [
"with-bigdecimal", "with-bigdecimal",
"with-postgres-array", "with-postgres-array",
"with-json-as-scalar", "with-json-as-scalar",
"with-custom-as-json",
] } ] }
tower = "0.5.2" tower = { version = "0.5.2", features = ["util"] }
tower-http = { version = "0.6", features = [ tower-http = { version = "0.6", features = [
"trace", "trace",
"catch-panic", "catch-panic",
@@ -159,6 +160,15 @@ polars = { version = "0.49.1", features = [
"lazy", "lazy",
"diagonal_concat", "diagonal_concat",
], optional = true } ], optional = true }
quick-xml = { version = "0.37.5", features = [
"serialize",
"serde-types",
"serde",
] }
croner = "2.2.0"
ts-rs = "11.0.1"
secrecy = { version = "0.10.3", features = ["serde"] }
paste = "1.0.15"
[dev-dependencies] [dev-dependencies]
inquire = { workspace = true } inquire = { workspace = true }

View File

@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncOneSubscriptionFeedsFullTaskInput } from "./SyncOneSubscriptionFeedsFullTaskInput";
import type { SyncOneSubscriptionFeedsIncrementalTaskInput } from "./SyncOneSubscriptionFeedsIncrementalTaskInput";
import type { SyncOneSubscriptionSourcesTaskInput } from "./SyncOneSubscriptionSourcesTaskInput";
export type SubscriberTaskInput = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTaskInput | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTaskInput | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTaskInput;

View File

@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncOneSubscriptionFeedsFullTask } from "./SyncOneSubscriptionFeedsFullTask";
import type { SyncOneSubscriptionFeedsIncrementalTask } from "./SyncOneSubscriptionFeedsIncrementalTask";
import type { SyncOneSubscriptionSourcesTask } from "./SyncOneSubscriptionSourcesTask";
export type SubscriberTaskType = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTask | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTask | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTask;

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsFullTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsFullTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsIncrementalTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionFeedsIncrementalTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionSourcesTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SyncOneSubscriptionSourcesTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };

View File

@@ -1,4 +1,4 @@
use std::time::Duration; use std::{str::FromStr, time::Duration};
use color_eyre::{Result, eyre::OptionExt}; use color_eyre::{Result, eyre::OptionExt};
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest}; use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
@@ -6,7 +6,8 @@ use inquire::{Password, Text, validator::Validation};
use recorder::{ use recorder::{
crypto::UserPassCredential, crypto::UserPassCredential,
extract::mikan::{ extract::mikan::{
MikanClient, MikanConfig, MikanRssEpisodeItem, build_mikan_bangumi_expand_subscribed_url, MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
build_mikan_bangumi_expand_subscribed_url,
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment, extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
extract_mikan_bangumi_meta_from_expand_subscribed_fragment, extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
}, },
@@ -190,10 +191,10 @@ async fn main() -> Result<()> {
); );
String::from_utf8(bangumi_rss_doppel_path.read()?)? String::from_utf8(bangumi_rss_doppel_path.read()?)?
}; };
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items; let rss_items = MikanRssRoot::from_str(&bangumi_rss_data)?.channel.items;
rss_items rss_items
.into_iter() .into_iter()
.map(MikanRssEpisodeItem::try_from) .map(MikanRssItemMeta::try_from)
.collect::<Result<Vec<_>, _>>() .collect::<Result<Vec<_>, _>>()
}?; }?;
for rss_item in rss_items { for rss_item in rss_items {

View File

@@ -1,10 +1,10 @@
use std::time::Duration; use std::{str::FromStr, time::Duration};
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest}; use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
use recorder::{ use recorder::{
errors::RecorderResult, errors::RecorderResult,
extract::mikan::{ extract::mikan::{
MikanClient, MikanConfig, MikanRssEpisodeItem, MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
extract_mikan_episode_meta_from_episode_homepage_html, extract_mikan_episode_meta_from_episode_homepage_html,
}, },
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath}, test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
@@ -41,12 +41,12 @@ async fn main() -> RecorderResult<()> {
let mikan_base_url = mikan_scrape_client.base_url().clone(); let mikan_base_url = mikan_scrape_client.base_url().clone();
tracing::info!("Scraping subscriber subscription..."); tracing::info!("Scraping subscriber subscription...");
let subscriber_subscription = let subscriber_subscription =
fs::read("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?; fs::read_to_string("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
let channel = rss::Channel::read_from(&subscriber_subscription[..])?; let channel = MikanRssRoot::from_str(&subscriber_subscription)?.channel;
let rss_items: Vec<MikanRssEpisodeItem> = channel let rss_items: Vec<MikanRssItemMeta> = channel
.items .items
.into_iter() .into_iter()
.map(MikanRssEpisodeItem::try_from) .map(MikanRssItemMeta::try_from)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items { for rss_item in rss_items {
let episode_homepage_meta = { let episode_homepage_meta = {
@@ -150,11 +150,11 @@ async fn main() -> RecorderResult<()> {
String::from_utf8(bangumi_rss_doppel_path.read()?)? String::from_utf8(bangumi_rss_doppel_path.read()?)?
}; };
let channel = rss::Channel::read_from(bangumi_rss_data.as_bytes())?; let rss_items: Vec<MikanRssItemMeta> = MikanRssRoot::from_str(&bangumi_rss_data)?
let rss_items: Vec<MikanRssEpisodeItem> = channel .channel
.items .items
.into_iter() .into_iter()
.map(MikanRssEpisodeItem::try_from) .map(MikanRssItemMeta::try_from)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items { for rss_item in rss_items {
{ {

View File

@@ -0,0 +1,6 @@
{
"name": "recorder",
"version": "0.0.1",
"private": true,
"type": "module"
}

View File

@@ -4,8 +4,8 @@
enable = true enable = true
# Enable pretty backtrace (sets RUST_BACKTRACE=1) # Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace = true pretty_backtrace = true
level = "info"
# Log level, options: trace, debug, info, warn or error. # Log level, options: trace, debug, info, warn or error.
level = "debug"
# Define the logging format. options: compact, pretty or Json # Define the logging format. options: compact, pretty or Json
format = "compact" format = "compact"
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries # By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
@@ -77,7 +77,7 @@ max_connections = 10
auto_migrate = true auto_migrate = true
[storage] [storage]
data_dir = '{{ get_env(name="STORAGE_DATA_DIR", default="./data") }}' data_dir = './data'
[mikan] [mikan]
base_url = "https://mikanani.me/" base_url = "https://mikanani.me/"
@@ -89,26 +89,6 @@ leaky_bucket_initial_tokens = 1
leaky_bucket_refill_tokens = 1 leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500 leaky_bucket_refill_interval = 500
[mikan.http_client.proxy]
server = '{{ get_env(name="MIKAN_PROXY", default = "") }}'
auth_header = '{{ get_env(name="MIKAN_PROXY_AUTH_HEADER", default = "") }}'
no_proxy = '{{ get_env(name="MIKAN_NO_PROXY", default = "") }}'
accept_invalid_certs = '{{ get_env(name="MIKAN_PROXY_ACCEPT_INVALID_CERTS", default = "false") }}'
[auth]
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
basic_password = '{{ get_env(name="BASIC_PASSWORD", default = "konobangu") }}'
oidc_issuer = '{{ get_env(name="OIDC_ISSUER", default = "") }}'
oidc_audience = '{{ get_env(name="OIDC_AUDIENCE", default = "") }}'
oidc_client_id = '{{ get_env(name="OIDC_CLIENT_ID", default = "") }}'
oidc_client_secret = '{{ get_env(name="OIDC_CLIENT_SECRET", default = "") }}'
oidc_extra_scopes = '{{ get_env(name="OIDC_EXTRA_SCOPES", default = "") }}'
oidc_extra_claim_key = '{{ get_env(name="OIDC_EXTRA_CLAIM_KEY", default = "") }}'
oidc_extra_claim_value = '{{ get_env(name="OIDC_EXTRA_CLAIM_VALUE", default = "") }}'
[graphql] [graphql]
# depth_limit = inf # depth_limit = inf
# complexity_limit = inf # complexity_limit = inf

View File

@@ -72,6 +72,11 @@ impl AppBuilder {
} }
pub async fn build(self) -> RecorderResult<App> { pub async fn build(self) -> RecorderResult<App> {
if self.working_dir != "." {
std::env::set_current_dir(&self.working_dir)?;
println!("set current dir to working dir: {}", self.working_dir);
}
self.load_env().await?; self.load_env().await?;
let config = self.load_config().await?; let config = self.load_config().await?;
@@ -86,22 +91,12 @@ impl AppBuilder {
} }
pub async fn load_env(&self) -> RecorderResult<()> { pub async fn load_env(&self) -> RecorderResult<()> {
AppConfig::load_dotenv( AppConfig::load_dotenv(&self.environment, self.dotenv_file.as_deref()).await?;
&self.environment,
&self.working_dir,
self.dotenv_file.as_deref(),
)
.await?;
Ok(()) Ok(())
} }
pub async fn load_config(&self) -> RecorderResult<AppConfig> { pub async fn load_config(&self) -> RecorderResult<AppConfig> {
let config = AppConfig::load_config( let config = AppConfig::load_config(&self.environment, self.config_file.as_deref()).await?;
&self.environment,
&self.working_dir,
self.config_file.as_deref(),
)
.await?;
Ok(config) Ok(config)
} }
@@ -136,7 +131,7 @@ impl AppBuilder {
} }
pub fn working_dir_from_manifest_dir(self) -> Self { pub fn working_dir_from_manifest_dir(self) -> Self {
let manifest_dir = if cfg!(debug_assertions) || cfg!(test) { let manifest_dir = if cfg!(debug_assertions) || cfg!(test) || cfg!(feature = "playground") {
env!("CARGO_MANIFEST_DIR") env!("CARGO_MANIFEST_DIR")
} else { } else {
"./apps/recorder" "./apps/recorder"

View File

@@ -1,8 +1,13 @@
use std::{fs, path::Path, str}; use std::{
collections::HashMap,
fs,
path::Path,
str::{self, FromStr},
};
use figment::{ use figment::{
Figment, Provider, Figment, Provider,
providers::{Format, Json, Toml, Yaml}, providers::{Env, Format, Json, Toml, Yaml},
}; };
use itertools::Itertools; use itertools::Itertools;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -55,8 +60,8 @@ impl AppConfig {
format!(".{}.local", environment.full_name()), format!(".{}.local", environment.full_name()),
format!(".{}.local", environment.short_name()), format!(".{}.local", environment.short_name()),
String::from(".local"), String::from(".local"),
environment.full_name().to_string(), format!(".{}", environment.full_name()),
environment.short_name().to_string(), format!(".{}", environment.short_name()),
String::from(""), String::from(""),
] ]
} }
@@ -65,6 +70,102 @@ impl AppConfig {
Toml::string(DEFAULT_CONFIG_MIXIN) Toml::string(DEFAULT_CONFIG_MIXIN)
} }
fn build_enhanced_tera_engine() -> tera::Tera {
let mut tera = tera::Tera::default();
tera.register_filter(
"cast_to",
|value: &tera::Value,
args: &HashMap<String, tera::Value>|
-> tera::Result<tera::Value> {
let target_type = args
.get("type")
.and_then(|v| v.as_str())
.ok_or_else(|| tera::Error::msg("invalid target type: should be string"))?;
let target_type = TeraCastToFilterType::from_str(target_type)
.map_err(|e| tera::Error::msg(format!("invalid target type: {e}")))?;
let input_str = value.as_str().unwrap_or("");
match target_type {
TeraCastToFilterType::Boolean => {
let is_true = matches!(input_str.to_lowercase().as_str(), "true" | "1");
let is_false = matches!(input_str.to_lowercase().as_str(), "false" | "0");
if is_true {
Ok(tera::Value::Bool(true))
} else if is_false {
Ok(tera::Value::Bool(false))
} else {
Err(tera::Error::msg(
"target type is bool but value is not a boolean like true, false, \
1, 0",
))
}
}
TeraCastToFilterType::Integer => {
let parsed = input_str.parse::<i64>().map_err(|e| {
tera::Error::call_filter("invalid integer".to_string(), e)
})?;
Ok(tera::Value::Number(serde_json::Number::from(parsed)))
}
TeraCastToFilterType::Unsigned => {
let parsed = input_str.parse::<u64>().map_err(|e| {
tera::Error::call_filter("invalid unsigned integer".to_string(), e)
})?;
Ok(tera::Value::Number(serde_json::Number::from(parsed)))
}
TeraCastToFilterType::Float => {
let parsed = input_str.parse::<f64>().map_err(|e| {
tera::Error::call_filter("invalid float".to_string(), e)
})?;
Ok(tera::Value::Number(
serde_json::Number::from_f64(parsed).ok_or_else(|| {
tera::Error::msg("failed to convert f64 to serde_json::Number")
})?,
))
}
TeraCastToFilterType::String => Ok(tera::Value::String(input_str.to_string())),
TeraCastToFilterType::Null => Ok(tera::Value::Null),
}
},
);
tera.register_filter(
"try_auto_cast",
|value: &tera::Value,
_args: &HashMap<String, tera::Value>|
-> tera::Result<tera::Value> {
let input_str = value.as_str().unwrap_or("");
if input_str == "null" {
return Ok(tera::Value::Null);
}
if matches!(input_str, "true" | "false") {
return Ok(tera::Value::Bool(input_str == "true"));
}
if let Ok(parsed) = input_str.parse::<i64>() {
return Ok(tera::Value::Number(serde_json::Number::from(parsed)));
}
if let Ok(parsed) = input_str.parse::<u64>() {
return Ok(tera::Value::Number(serde_json::Number::from(parsed)));
}
if let Ok(parsed) = input_str.parse::<f64>() {
return Ok(tera::Value::Number(
serde_json::Number::from_f64(parsed).ok_or_else(|| {
tera::Error::msg("failed to convert f64 to serde_json::Number")
})?,
));
}
Ok(tera::Value::String(input_str.to_string()))
},
);
tera
}
pub fn merge_provider_from_file( pub fn merge_provider_from_file(
fig: Figment, fig: Figment,
filepath: impl AsRef<Path>, filepath: impl AsRef<Path>,
@@ -72,11 +173,9 @@ impl AppConfig {
) -> RecorderResult<Figment> { ) -> RecorderResult<Figment> {
let content = fs::read_to_string(filepath)?; let content = fs::read_to_string(filepath)?;
let rendered = tera::Tera::one_off( let mut tera_engine = AppConfig::build_enhanced_tera_engine();
&content, let rendered =
&tera::Context::from_value(serde_json::json!({}))?, tera_engine.render_str(&content, &tera::Context::from_value(serde_json::json!({}))?)?;
false,
)?;
Ok(match ext { Ok(match ext {
".toml" => fig.merge(Toml::string(&rendered)), ".toml" => fig.merge(Toml::string(&rendered)),
@@ -88,13 +187,12 @@ impl AppConfig {
pub async fn load_dotenv( pub async fn load_dotenv(
environment: &Environment, environment: &Environment,
working_dir: &str,
dotenv_file: Option<&str>, dotenv_file: Option<&str>,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
let try_dotenv_file_or_dirs = if dotenv_file.is_some() { let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
vec![dotenv_file] vec![dotenv_file]
} else { } else {
vec![Some(working_dir)] vec![Some(".")]
}; };
let priority_suffix = &AppConfig::priority_suffix(environment); let priority_suffix = &AppConfig::priority_suffix(environment);
@@ -111,11 +209,16 @@ impl AppConfig {
for f in try_filenames.iter() { for f in try_filenames.iter() {
let p = try_dotenv_file_or_dir_path.join(f); let p = try_dotenv_file_or_dir_path.join(f);
if p.exists() && p.is_file() { if p.exists() && p.is_file() {
println!("Loading dotenv file: {}", p.display());
dotenvy::from_path(p)?; dotenvy::from_path(p)?;
break; break;
} }
} }
} else if try_dotenv_file_or_dir_path.is_file() { } else if try_dotenv_file_or_dir_path.is_file() {
println!(
"Loading dotenv file: {}",
try_dotenv_file_or_dir_path.display()
);
dotenvy::from_path(try_dotenv_file_or_dir_path)?; dotenvy::from_path(try_dotenv_file_or_dir_path)?;
break; break;
} }
@@ -127,13 +230,12 @@ impl AppConfig {
pub async fn load_config( pub async fn load_config(
environment: &Environment, environment: &Environment,
working_dir: &str,
config_file: Option<&str>, config_file: Option<&str>,
) -> RecorderResult<AppConfig> { ) -> RecorderResult<AppConfig> {
let try_config_file_or_dirs = if config_file.is_some() { let try_config_file_or_dirs = if config_file.is_some() {
vec![config_file] vec![config_file]
} else { } else {
vec![Some(working_dir)] vec![Some(".")]
}; };
let allowed_extensions = &AppConfig::allowed_extension(); let allowed_extensions = &AppConfig::allowed_extension();
@@ -159,6 +261,7 @@ impl AppConfig {
let p = try_config_file_or_dir_path.join(f); let p = try_config_file_or_dir_path.join(f);
if p.exists() && p.is_file() { if p.exists() && p.is_file() {
fig = AppConfig::merge_provider_from_file(fig, &p, ext)?; fig = AppConfig::merge_provider_from_file(fig, &p, ext)?;
println!("Loaded config file: {}", p.display());
break; break;
} }
} }
@@ -169,13 +272,52 @@ impl AppConfig {
{ {
fig = fig =
AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?; AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?;
println!(
"Loaded config file: {}",
try_config_file_or_dir_path.display()
);
break; break;
} }
} }
} }
fig = fig.merge(Env::prefixed("").split("__").lowercase(true));
let app_config: AppConfig = fig.extract()?; let app_config: AppConfig = fig.extract()?;
Ok(app_config) Ok(app_config)
} }
} }
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
enum TeraCastToFilterType {
#[serde(alias = "str")]
String,
#[serde(alias = "bool")]
Boolean,
#[serde(alias = "int")]
Integer,
#[serde(alias = "uint")]
Unsigned,
#[serde(alias = "float")]
Float,
#[serde(alias = "null")]
Null,
}
impl FromStr for TeraCastToFilterType {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"string" | "str" => Ok(TeraCastToFilterType::String),
"boolean" | "bool" => Ok(TeraCastToFilterType::Boolean),
"integer" | "int" => Ok(TeraCastToFilterType::Integer),
"unsigned" | "uint" => Ok(TeraCastToFilterType::Unsigned),
"float" => Ok(TeraCastToFilterType::Float),
"null" => Ok(TeraCastToFilterType::Null),
_ => Err(format!("invalid target type: {s}")),
}
}
}

View File

@@ -1,11 +1,13 @@
use std::{net::SocketAddr, sync::Arc}; use std::{net::SocketAddr, sync::Arc};
use axum::Router; use axum::{Router, middleware::from_fn_with_state};
use tokio::{net::TcpSocket, signal}; use tokio::{net::TcpSocket, signal};
use tower_http::services::{ServeDir, ServeFile};
use tracing::instrument; use tracing::instrument;
use super::{builder::AppBuilder, context::AppContextTrait}; use super::{builder::AppBuilder, context::AppContextTrait};
use crate::{ use crate::{
auth::webui_auth_middleware,
errors::{RecorderError, RecorderResult}, errors::{RecorderError, RecorderResult},
web::{ web::{
controller::{self, core::ControllerTrait}, controller::{self, core::ControllerTrait},
@@ -58,13 +60,19 @@ impl App {
controller::oidc::create(context.clone()), controller::oidc::create(context.clone()),
controller::metadata::create(context.clone()), controller::metadata::create(context.clone()),
controller::r#static::create(context.clone()), controller::r#static::create(context.clone()),
controller::feeds::create(context.clone()), controller::feeds::create(context.clone())
)?; )?;
for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] { for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
router = c.apply_to(router); router = c.apply_to(router);
} }
router = router
.fallback_service(
ServeDir::new("webui").not_found_service(ServeFile::new("webui/index.html")),
)
.layer(from_fn_with_state(context.clone(), webui_auth_middleware));
let middlewares = default_middleware_stack(context.clone()); let middlewares = default_middleware_stack(context.clone());
for mid in middlewares { for mid in middlewares {
if mid.is_enabled() { if mid.is_enabled() {
@@ -99,26 +107,12 @@ impl App {
Ok::<(), RecorderError>(()) Ok::<(), RecorderError>(())
}, },
async { async {
{ task.run(if graceful_shutdown {
let monitor = task.setup_monitor().await?; Some(Self::shutdown_signal)
if graceful_shutdown { } else {
monitor None
.run_with_signal(async move { })
Self::shutdown_signal().await; .await?;
tracing::info!("apalis shutting down...");
Ok(())
})
.await?;
} else {
monitor.run().await?;
}
}
Ok::<(), RecorderError>(())
},
async {
let listener = task.setup_listener().await?;
listener.listen().await?;
Ok::<(), RecorderError>(()) Ok::<(), RecorderError>(())
} }

View File

@@ -7,7 +7,10 @@ use axum::{
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use crate::{app::AppContextTrait, auth::AuthServiceTrait}; use crate::{
app::AppContextTrait,
auth::{AuthService, AuthServiceTrait},
};
pub async fn auth_middleware( pub async fn auth_middleware(
State(ctx): State<Arc<dyn AppContextTrait>>, State(ctx): State<Arc<dyn AppContextTrait>>,
@@ -38,3 +41,37 @@ pub async fn auth_middleware(
response response
} }
pub async fn webui_auth_middleware(
State(ctx): State<Arc<dyn AppContextTrait>>,
request: Request,
next: Next,
) -> Response {
if (!request.uri().path().starts_with("/api"))
&& let AuthService::Basic(auth_service) = ctx.auth()
{
let (mut parts, body) = request.into_parts();
let mut response = match auth_service
.extract_user_info(ctx.as_ref() as &dyn AppContextTrait, &mut parts)
.await
{
Ok(auth_user_info) => {
let mut request = Request::from_parts(parts, body);
request.extensions_mut().insert(auth_user_info);
next.run(request).await
}
Err(auth_error) => auth_error.into_response(),
};
if let Some(header_value) = auth_service.www_authenticate_header_value() {
response
.headers_mut()
.insert(header::WWW_AUTHENTICATE, header_value);
};
response
} else {
next.run(request).await
}
}

View File

@@ -7,5 +7,5 @@ pub mod service;
pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig}; pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig};
pub use errors::AuthError; pub use errors::AuthError;
pub use middleware::auth_middleware; pub use middleware::{auth_middleware, webui_auth_middleware};
pub use service::{AuthService, AuthServiceTrait, AuthUserInfo}; pub use service::{AuthService, AuthServiceTrait, AuthUserInfo};

View File

@@ -21,7 +21,6 @@ use openidconnect::{
OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse, OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse,
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata}, core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata},
}; };
use sea_orm::DbErr;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use snafu::ResultExt; use snafu::ResultExt;
@@ -338,9 +337,9 @@ impl AuthServiceTrait for OidcAuthService {
} }
} }
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await { let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(RecorderError::DbError { Err(RecorderError::ModelEntityNotFound { .. }) => {
source: DbErr::RecordNotFound(..), crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await, }
r => r, r => r,
} }
.map_err(|e| { .map_err(|e| {

View File

@@ -18,6 +18,10 @@ use crate::{
#[derive(Snafu, Debug)] #[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))] #[snafu(visibility(pub(crate)))]
pub enum RecorderError { pub enum RecorderError {
#[snafu(transparent)]
SeaographyError { source: seaography::SeaographyError },
#[snafu(transparent)]
CronError { source: croner::errors::CronError },
#[snafu(display( #[snafu(display(
"HTTP {status} {reason}, source = {source:?}", "HTTP {status} {reason}, source = {source:?}",
status = status, status = status,
@@ -49,6 +53,8 @@ pub enum RecorderError {
InvalidMethodError, InvalidMethodError,
#[snafu(display("Invalid header value"))] #[snafu(display("Invalid header value"))]
InvalidHeaderValueError, InvalidHeaderValueError,
#[snafu(transparent)]
QuickXmlDeserializeError { source: quick_xml::DeError },
#[snafu(display("Invalid header name"))] #[snafu(display("Invalid header name"))]
InvalidHeaderNameError, InvalidHeaderNameError,
#[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))] #[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))]
@@ -118,8 +124,13 @@ pub enum RecorderError {
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))] #[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr, source: OptDynErr,
}, },
#[snafu(display("Model Entity {entity} not found or not belong to subscriber"))] #[snafu(display("Model Entity {entity} not found or not belong to subscriber{}", (
ModelEntityNotFound { entity: Cow<'static, str> }, detail.as_ref().map(|detail| format!(" : {detail}"))).unwrap_or_default()
))]
ModelEntityNotFound {
entity: Cow<'static, str>,
detail: Option<String>,
},
#[snafu(transparent)] #[snafu(transparent)]
FetchError { source: FetchError }, FetchError { source: FetchError },
#[snafu(display("Credential3rdError: {message}, source = {source}"))] #[snafu(display("Credential3rdError: {message}, source = {source}"))]
@@ -183,9 +194,17 @@ impl RecorderError {
} }
} }
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self { pub fn from_entity_not_found<E: sea_orm::EntityTrait>() -> Self {
Self::DbError { Self::ModelEntityNotFound {
source: sea_orm::DbErr::RecordNotFound(detail.to_string()), entity: std::any::type_name::<E::Model>().into(),
detail: None,
}
}
pub fn from_entity_not_found_detail<E: sea_orm::EntityTrait, T: ToString>(detail: T) -> Self {
Self::ModelEntityNotFound {
entity: std::any::type_name::<E::Model>().into(),
detail: Some(detail.to_string()),
} }
} }
} }
@@ -250,9 +269,9 @@ impl IntoResponse for RecorderError {
) )
.into_response() .into_response()
} }
Self::ModelEntityNotFound { entity } => ( merr @ Self::ModelEntityNotFound { .. } => (
StatusCode::NOT_FOUND, StatusCode::NOT_FOUND,
Json::<StandardErrorResponse>(StandardErrorResponse::from(entity.to_string())), Json::<StandardErrorResponse>(StandardErrorResponse::from(merr.to_string())),
) )
.into_response(), .into_response(),
err => ( err => (
@@ -294,4 +313,10 @@ impl From<http::method::InvalidMethod> for RecorderError {
} }
} }
impl From<async_graphql::Error> for RecorderError {
fn from(error: async_graphql::Error) -> Self {
seaography::SeaographyError::AsyncGraphQLError(error).into()
}
}
pub type RecorderResult<T> = Result<T, RecorderError>; pub type RecorderResult<T> = Result<T, RecorderError>;

View File

@@ -167,6 +167,7 @@ impl ForwardedRelatedInfo {
.as_ref() .as_ref()
.and_then(|s| s.host.as_deref()) .and_then(|s| s.host.as_deref())
.or(self.x_forwarded_host.as_deref()) .or(self.x_forwarded_host.as_deref())
.or(self.host.as_deref())
.or(self.uri.host()) .or(self.uri.host())
} }

View File

@@ -4,7 +4,7 @@ use fetch::{HttpClient, HttpClientTrait};
use maplit::hashmap; use maplit::hashmap;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use sea_orm::{ use sea_orm::{
ActiveModelTrait, ActiveValue::Set, ColumnTrait, DbErr, EntityTrait, QueryFilter, TryIntoModel, ActiveModelTrait, ActiveValue::Set, ColumnTrait, EntityTrait, QueryFilter, TryIntoModel,
}; };
use url::Url; use url::Url;
use util::OptDynErr; use util::OptDynErr;
@@ -227,9 +227,12 @@ impl MikanClient {
self.fork_with_userpass_credential(userpass_credential) self.fork_with_userpass_credential(userpass_credential)
.await .await
} else { } else {
Err(RecorderError::from_db_record_not_found( Err(RecorderError::from_entity_not_found_detail::<
DbErr::RecordNotFound(format!("credential={credential_id} not found")), credential_3rd::Entity,
)) _,
>(format!(
"credential id {credential_id} not found"
)))
} }
} }

View File

@@ -2,6 +2,7 @@ mod client;
mod config; mod config;
mod constants; mod constants;
mod credential; mod credential;
mod rss;
mod subscription; mod subscription;
mod web; mod web;
@@ -18,17 +19,19 @@ pub use constants::{
MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY, MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
}; };
pub use credential::MikanCredentialForm; pub use credential::MikanCredentialForm;
pub use rss::{
MikanRssChannel, MikanRssItem, MikanRssItemMeta, MikanRssItemTorrentExtension, MikanRssRoot,
build_mikan_bangumi_subscription_rss_url, build_mikan_subscriber_subscription_rss_url,
};
pub use subscription::{ pub use subscription::{
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription, MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
}; };
pub use web::{ pub use web::{
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta, MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanFansubHash, MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanFansubHash,
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionUrlMeta,
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url,
build_mikan_bangumi_homepage_url, build_mikan_bangumi_subscription_rss_url,
build_mikan_episode_homepage_url, build_mikan_season_flow_url, build_mikan_episode_homepage_url, build_mikan_season_flow_url,
build_mikan_subscriber_subscription_rss_url,
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment, extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
extract_mikan_bangumi_meta_from_expand_subscribed_fragment, extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
extract_mikan_episode_meta_from_episode_homepage_html, extract_mikan_episode_meta_from_episode_homepage_html,

View File

@@ -0,0 +1,215 @@
use std::{borrow::Cow, str::FromStr};
use chrono::{DateTime, Utc};
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
errors::{RecorderResult, app_error::RecorderError},
extract::{
bittorrent::EpisodeEnclosureMeta,
mikan::{
MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_RSS_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
MikanEpisodeHash, build_mikan_episode_homepage_url,
},
},
};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssItemEnclosure {
#[serde(rename = "@type")]
pub r#type: String,
#[serde(rename = "@length")]
pub length: i64,
#[serde(rename = "@url")]
pub url: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct MikanRssItemTorrentExtension {
pub pub_date: String,
pub content_length: i64,
pub link: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssItem {
pub torrent: MikanRssItemTorrentExtension,
pub link: String,
pub title: String,
pub enclosure: MikanRssItemEnclosure,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssChannel {
#[serde(rename = "item", default)]
pub items: Vec<MikanRssItem>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanRssRoot {
pub channel: MikanRssChannel,
}
impl FromStr for MikanRssRoot {
type Err = RecorderError;
fn from_str(source: &str) -> RecorderResult<Self> {
let me = quick_xml::de::from_str(source)?;
Ok(me)
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanRssItemMeta {
pub title: String,
pub torrent_link: Url,
pub content_length: i64,
pub mime: String,
pub pub_date: Option<DateTime<Utc>>,
pub mikan_episode_id: String,
pub magnet_link: Option<String>,
}
impl MikanRssItemMeta {
pub fn build_homepage_url(&self, mikan_base_url: Url) -> Url {
build_mikan_episode_homepage_url(mikan_base_url, &self.mikan_episode_id)
}
pub fn parse_pub_date(pub_date: &str) -> chrono::ParseResult<DateTime<Utc>> {
DateTime::parse_from_rfc2822(pub_date)
.or_else(|_| DateTime::parse_from_rfc3339(pub_date))
.or_else(|_| DateTime::parse_from_rfc3339(&format!("{pub_date}+08:00")))
.map(|s| s.with_timezone(&Utc))
}
}
impl TryFrom<MikanRssItem> for MikanRssItemMeta {
type Error = RecorderError;
fn try_from(item: MikanRssItem) -> Result<Self, Self::Error> {
let torrent = item.torrent;
let enclosure = item.enclosure;
let mime_type = enclosure.r#type;
if mime_type != BITTORRENT_MIME_TYPE {
return Err(RecorderError::MimeError {
expected: String::from(BITTORRENT_MIME_TYPE),
found: mime_type.to_string(),
desc: String::from("MikanRssItem"),
});
}
let title = item.title;
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
RecorderError::from_mikan_rss_invalid_field_and_source(
"enclosure_url:enclosure.link".into(),
err,
)
})?;
let homepage = Url::parse(&item.link).map_err(|err| {
RecorderError::from_mikan_rss_invalid_field_and_source(
"enclosure_url:enclosure.link".into(),
err,
)
})?;
let MikanEpisodeHash {
mikan_episode_id, ..
} = MikanEpisodeHash::from_homepage_url(&homepage).ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
})?;
Ok(MikanRssItemMeta {
title,
torrent_link: enclosure_url,
content_length: enclosure.length,
mime: mime_type,
pub_date: Self::parse_pub_date(&torrent.pub_date).ok(),
mikan_episode_id,
magnet_link: None,
})
}
}
impl From<MikanRssItemMeta> for EpisodeEnclosureMeta {
fn from(item: MikanRssItemMeta) -> Self {
Self {
magnet_link: item.magnet_link,
torrent_link: Some(item.torrent_link.to_string()),
pub_date: item.pub_date,
content_length: Some(item.content_length),
}
}
}
pub fn build_mikan_subscriber_subscription_rss_url(
mikan_base_url: Url,
mikan_subscription_token: &str,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH);
url.query_pairs_mut().append_pair(
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
mikan_subscription_token,
);
url
}
pub fn build_mikan_bangumi_subscription_rss_url(
mikan_base_url: Url,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_BANGUMI_RSS_PATH);
url.query_pairs_mut()
.append_pair(MIKAN_BANGUMI_ID_QUERY_KEY, mikan_bangumi_id);
if let Some(mikan_fansub_id) = mikan_fansub_id {
url.query_pairs_mut()
.append_pair(MIKAN_FANSUB_ID_QUERY_KEY, mikan_fansub_id);
};
url
}
#[cfg(test)]
mod test {
#![allow(unused_variables)]
use std::fs;
use rstest::{fixture, rstest};
use tracing::Level;
use super::*;
use crate::{errors::RecorderResult, test_utils::tracing::try_init_testing_tracing};
#[fixture]
fn before_each() {
try_init_testing_tracing(Level::DEBUG);
}
#[rstest]
#[test]
fn test_mikan_rss_episode_item_try_from_rss_item(before_each: ()) -> RecorderResult<()> {
let rss_str = fs::read_to_string(
"tests/resources/mikan/doppel/RSS/Bangumi-bangumiId%3D3288%26subgroupid%3D370.html",
)?;
let mut channel = MikanRssRoot::from_str(&rss_str)?.channel;
assert!(!channel.items.is_empty());
let item = channel.items.pop().unwrap();
let episode_item = MikanRssItemMeta::try_from(item.clone())?;
assert!(episode_item.pub_date.is_some());
Ok(())
}
}

View File

@@ -1,12 +1,13 @@
use std::{ use std::{
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
fmt::Debug, fmt::Debug,
str::FromStr,
sync::Arc, sync::Arc,
}; };
use async_graphql::{InputObject, SimpleObject}; use async_graphql::{InputObject, SimpleObject};
use async_stream::try_stream; use async_stream::try_stream;
use fetch::fetch_bytes; use fetch::fetch_html;
use futures::{Stream, TryStreamExt, pin_mut, try_join}; use futures::{Stream, TryStreamExt, pin_mut, try_join};
use maplit::hashmap; use maplit::hashmap;
use sea_orm::{ use sea_orm::{
@@ -24,8 +25,8 @@ use crate::{
bittorrent::EpisodeEnclosureMeta, bittorrent::EpisodeEnclosureMeta,
mikan::{ mikan::{
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanRssItemMeta, MikanRssRoot, MikanSeasonFlowUrlMeta, MikanSeasonStr,
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url, MikanSubscriberSubscriptionUrlMeta, build_mikan_bangumi_subscription_rss_url,
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url, build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
scrape_mikan_episode_meta_from_episode_homepage_url, scrape_mikan_episode_meta_from_episode_homepage_url,
}, },
@@ -39,7 +40,7 @@ use crate::{
#[tracing::instrument(err, skip(ctx, rss_item_list))] #[tracing::instrument(err, skip(ctx, rss_item_list))]
async fn sync_mikan_feeds_from_rss_item_list( async fn sync_mikan_feeds_from_rss_item_list(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
rss_item_list: Vec<MikanRssEpisodeItem>, rss_item_list: Vec<MikanRssItemMeta>,
subscriber_id: i32, subscriber_id: i32,
subscription_id: i32, subscription_id: i32,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
@@ -202,7 +203,7 @@ impl SubscriptionTrait for MikanSubscriberSubscription {
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> { fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
let source_url = Url::parse(&model.source_url)?; let source_url = Url::parse(&model.source_url)?;
let meta = MikanSubscriberSubscriptionRssUrlMeta::from_rss_url(&source_url) let meta = MikanSubscriberSubscriptionUrlMeta::from_rss_url(&source_url)
.with_whatever_context::<_, String, RecorderError>(|| { .with_whatever_context::<_, String, RecorderError>(|| {
format!( format!(
"MikanSubscriberSubscription should extract mikan_subscription_token from \ "MikanSubscriberSubscription should extract mikan_subscription_token from \
@@ -224,19 +225,19 @@ impl MikanSubscriberSubscription {
async fn get_rss_item_list_from_source_url( async fn get_rss_item_list_from_source_url(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssEpisodeItem>> { ) -> RecorderResult<Vec<MikanRssItemMeta>> {
let mikan_base_url = ctx.mikan().base_url().clone(); let mikan_base_url = ctx.mikan().base_url().clone();
let rss_url = build_mikan_subscriber_subscription_rss_url( let rss_url = build_mikan_subscriber_subscription_rss_url(
mikan_base_url.clone(), mikan_base_url.clone(),
&self.mikan_subscription_token, &self.mikan_subscription_token,
); );
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = MikanRssRoot::from_str(&html)?.channel;
let mut result = vec![]; let mut result = vec![];
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item) let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@@ -249,7 +250,7 @@ impl MikanSubscriberSubscription {
async fn get_rss_item_list_from_subsribed_url_rss_link( async fn get_rss_item_list_from_subsribed_url_rss_link(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssEpisodeItem>> { ) -> RecorderResult<Vec<MikanRssItemMeta>> {
let subscribed_bangumi_list = let subscribed_bangumi_list =
bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.subscription_id) bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.subscription_id)
.await?; .await?;
@@ -264,12 +265,12 @@ impl MikanSubscriberSubscription {
self.subscription_id, subscribed_bangumi.display_name self.subscription_id, subscribed_bangumi.display_name
) )
})?; })?;
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = MikanRssRoot::from_str(&html)?.channel;
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item) let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@@ -406,7 +407,7 @@ impl MikanSeasonSubscription {
fn get_rss_item_stream_from_subsribed_url_rss_link( fn get_rss_item_stream_from_subsribed_url_rss_link(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> impl Stream<Item = RecorderResult<Vec<MikanRssEpisodeItem>>> { ) -> impl Stream<Item = RecorderResult<Vec<MikanRssItemMeta>>> {
try_stream! { try_stream! {
let db = ctx.db(); let db = ctx.db();
@@ -433,14 +434,14 @@ impl MikanSeasonSubscription {
self.subscription_id, subscribed_bangumi.display_name self.subscription_id, subscribed_bangumi.display_name
) )
})?; })?;
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = MikanRssRoot::from_str(&html)?.channel;
let mut rss_item_list = vec![]; let mut rss_item_list = vec![];
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item) let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@@ -519,20 +520,20 @@ impl MikanBangumiSubscription {
async fn get_rss_item_list_from_source_url( async fn get_rss_item_list_from_source_url(
&self, &self,
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
) -> RecorderResult<Vec<MikanRssEpisodeItem>> { ) -> RecorderResult<Vec<MikanRssItemMeta>> {
let mikan_base_url = ctx.mikan().base_url().clone(); let mikan_base_url = ctx.mikan().base_url().clone();
let rss_url = build_mikan_bangumi_subscription_rss_url( let rss_url = build_mikan_bangumi_subscription_rss_url(
mikan_base_url.clone(), mikan_base_url.clone(),
&self.mikan_bangumi_id, &self.mikan_bangumi_id,
Some(&self.mikan_fansub_id), Some(&self.mikan_fansub_id),
); );
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?; let html = fetch_html(ctx.mikan(), rss_url).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = MikanRssRoot::from_str(&html)?.channel;
let mut result = vec![]; let mut result = vec![];
for (idx, item) in channel.items.into_iter().enumerate() { for (idx, item) in channel.items.into_iter().enumerate() {
let item = MikanRssEpisodeItem::try_from(item) let item = MikanRssItemMeta::try_from(item)
.with_whatever_context::<_, String, RecorderError>(|_| { .with_whatever_context::<_, String, RecorderError>(|_| {
format!("failed to extract rss item at idx {idx}") format!("failed to extract rss item at idx {idx}")
})?; })?;
@@ -556,7 +557,7 @@ mod tests {
errors::RecorderResult, errors::RecorderResult,
extract::mikan::{ extract::mikan::{
MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr,
MikanSubscriberSubscriptionRssUrlMeta, MikanSubscriberSubscriptionUrlMeta,
}, },
models::{ models::{
bangumi, episodes, bangumi, episodes,
@@ -677,7 +678,7 @@ mod tests {
subscriber_id: ActiveValue::Set(subscriber_id), subscriber_id: ActiveValue::Set(subscriber_id),
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber), category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber),
source_url: ActiveValue::Set( source_url: ActiveValue::Set(
MikanSubscriberSubscriptionRssUrlMeta { MikanSubscriberSubscriptionUrlMeta {
mikan_subscription_token: "test".into(), mikan_subscription_token: "test".into(),
} }
.build_rss_url(mikan_server.base_url().clone()) .build_rss_url(mikan_server.base_url().clone())

View File

@@ -26,7 +26,8 @@ use crate::{
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY, MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY, MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY, MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
MIKAN_YEAR_QUERY_KEY, MikanClient, MIKAN_YEAR_QUERY_KEY, MikanClient, build_mikan_bangumi_subscription_rss_url,
build_mikan_subscriber_subscription_rss_url,
}, },
}, },
media::{ media::{
@@ -139,16 +140,16 @@ impl From<MikanRssEpisodeItem> for EpisodeEnclosureMeta {
} }
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanSubscriberSubscriptionRssUrlMeta { pub struct MikanSubscriberSubscriptionUrlMeta {
pub mikan_subscription_token: String, pub mikan_subscription_token: String,
} }
impl MikanSubscriberSubscriptionRssUrlMeta { impl MikanSubscriberSubscriptionUrlMeta {
pub fn from_rss_url(url: &Url) -> Option<Self> { pub fn from_rss_url(url: &Url) -> Option<Self> {
if url.path() == MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH { if url.path() == MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH {
url.query_pairs() url.query_pairs()
.find(|(k, _)| k == MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY) .find(|(k, _)| k == MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY)
.map(|(_, v)| MikanSubscriberSubscriptionRssUrlMeta { .map(|(_, v)| MikanSubscriberSubscriptionUrlMeta {
mikan_subscription_token: v.to_string(), mikan_subscription_token: v.to_string(),
}) })
} else { } else {
@@ -161,19 +162,6 @@ impl MikanSubscriberSubscriptionRssUrlMeta {
} }
} }
pub fn build_mikan_subscriber_subscription_rss_url(
mikan_base_url: Url,
mikan_subscription_token: &str,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH);
url.query_pairs_mut().append_pair(
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
mikan_subscription_token,
);
url
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Eq)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Eq)]
pub struct MikanBangumiIndexMeta { pub struct MikanBangumiIndexMeta {
pub homepage: Url, pub homepage: Url,
@@ -289,22 +277,6 @@ pub struct MikanBangumiPosterMeta {
pub poster_src: Option<String>, pub poster_src: Option<String>,
} }
pub fn build_mikan_bangumi_subscription_rss_url(
mikan_base_url: Url,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> Url {
let mut url = mikan_base_url;
url.set_path(MIKAN_BANGUMI_RSS_PATH);
url.query_pairs_mut()
.append_pair(MIKAN_BANGUMI_ID_QUERY_KEY, mikan_bangumi_id);
if let Some(mikan_fansub_id) = mikan_fansub_id {
url.query_pairs_mut()
.append_pair(MIKAN_FANSUB_ID_QUERY_KEY, mikan_fansub_id);
};
url
}
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiIndexHash { pub struct MikanBangumiIndexHash {
pub mikan_bangumi_id: String, pub mikan_bangumi_id: String,
@@ -829,11 +801,6 @@ pub async fn scrape_mikan_poster_meta_from_image_url(
.write(storage_path.clone(), poster_data) .write(storage_path.clone(), poster_data)
.await?; .await?;
tracing::warn!(
poster_str = poster_str.to_string(),
"mikan poster meta extracted"
);
MikanBangumiPosterMeta { MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url, origin_poster_src: origin_poster_src_url,
poster_src: Some(poster_str.to_string()), poster_src: Some(poster_str.to_string()),

View File

@@ -47,8 +47,27 @@ impl<'a> EpisodeComp<'a> {
Ok((input, f32::round(num) as i32)) Ok((input, f32::round(num) as i32))
} }
fn parse_ep_special_num(input: &'a str) -> IResult<&'a str, i32> {
terminated(
alt((
value(0, tag_no_case("ova")),
value(0, tag_no_case("oad")),
value(0, tag_no_case("sp")),
value(0, tag_no_case("ex")),
)),
(space0, opt(parse_int::<i32>)),
)
.parse(input)
}
fn parse_ep_num(input: &'a str) -> IResult<&'a str, i32> { fn parse_ep_num(input: &'a str) -> IResult<&'a str, i32> {
alt((parse_int::<i32>, Self::parse_ep_round_num, ZhNum::parse_int)).parse(input) alt((
parse_int::<i32>,
Self::parse_ep_round_num,
ZhNum::parse_int,
Self::parse_ep_special_num,
))
.parse(input)
} }
fn parse_ep_nums_core(input: &'a str) -> IResult<&'a str, (i32, Option<i32>)> { fn parse_ep_nums_core(input: &'a str) -> IResult<&'a str, (i32, Option<i32>)> {
@@ -175,8 +194,13 @@ impl<'a> std::fmt::Debug for MoiveComp<'a> {
impl<'a> OriginCompTrait<'a> for MoiveComp<'a> { impl<'a> OriginCompTrait<'a> for MoiveComp<'a> {
#[cfg_attr(debug_assertions, instrument(level = Level::TRACE, ret, err(level=Level::TRACE), "MoiveComp::parse_comp"))] #[cfg_attr(debug_assertions, instrument(level = Level::TRACE, ret, err(level=Level::TRACE), "MoiveComp::parse_comp"))]
fn parse_comp(input: &'a str) -> IResult<&'a str, Self> { fn parse_comp(input: &'a str) -> IResult<&'a str, Self> {
let (input, source) = let (input, source) = alt((
alt((tag("剧场版"), tag("电影"), tag_no_case("movie"))).parse(input)?; tag("剧场版"),
tag("电影"),
tag_no_case("movie"),
tag_no_case("film"),
))
.parse(input)?;
Ok(( Ok((
input, input,
Self { Self {

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::bangumi}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::bangumi,
};
pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) { pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId); restrict_subscriber_for_entity::<bangumi::Entity>(context, &bangumi::Column::SubscriberId);
@@ -8,7 +14,6 @@ pub fn register_bangumi_to_schema_context(context: &mut BuilderContext) {
pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_bangumi_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<bangumi::BangumiType>(); builder.register_enumeration::<bangumi::BangumiType>();
seaography::register_entity!(builder, bangumi);
builder register_entity_default_writable!(builder, bangumi, false)
} }

View File

@@ -1,50 +1,28 @@
use std::sync::Arc; use std::sync::Arc;
use async_graphql::dynamic::{ use async_graphql::dynamic::{Field, FieldFuture, FieldValue, Object, TypeRef};
Field, FieldFuture, FieldValue, InputObject, InputValue, Object, TypeRef, use sea_orm::{EntityTrait, QueryFilter};
};
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use util_derive::DynamicGraphql; use util_derive::DynamicGraphql;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
auth::AuthUserInfo,
errors::RecorderError, errors::RecorderError,
graphql::{ graphql::{
domains::subscribers::restrict_subscriber_for_entity, domains::subscribers::restrict_subscriber_for_entity,
infra::crypto::{ infra::{
register_crypto_column_input_conversion_to_schema_context, crypto::{
register_crypto_column_output_conversion_to_schema_context, register_crypto_column_input_conversion_to_schema_context,
register_crypto_column_output_conversion_to_schema_context,
},
custom::{generate_entity_filtered_mutation_field, register_entity_default_writable},
name::get_entity_custom_mutation_field_name,
}, },
}, },
models::credential_3rd, models::credential_3rd,
}; };
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
struct Credential3rdCheckAvailableInput {
pub id: i32,
}
impl Credential3rdCheckAvailableInput {
fn input_type_name() -> &'static str {
"Credential3rdCheckAvailableInput"
}
fn arg_name() -> &'static str {
"filter"
}
fn generate_input_object() -> InputObject {
InputObject::new(Self::input_type_name())
.description("The input of the credential3rdCheckAvailable query")
.field(InputValue::new(
Credential3rdCheckAvailableInputFieldEnum::Id.as_str(),
TypeRef::named_nn(TypeRef::INT),
))
}
}
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)] #[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
pub struct Credential3rdCheckAvailableInfo { pub struct Credential3rdCheckAvailableInfo {
pub available: bool, pub available: bool,
@@ -117,52 +95,43 @@ pub fn register_credential3rd_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
builder.register_enumeration::<credential_3rd::Credential3rdType>(); builder.register_enumeration::<credential_3rd::Credential3rdType>();
seaography::register_entity!(builder, credential_3rd); builder = register_entity_default_writable!(builder, credential_3rd, false);
builder.schema = builder
.schema
.register(Credential3rdCheckAvailableInput::generate_input_object());
builder.schema = builder builder.schema = builder
.schema .schema
.register(Credential3rdCheckAvailableInfo::generate_output_object()); .register(Credential3rdCheckAvailableInfo::generate_output_object());
builder.queries.push( let builder_context = &builder.context;
Field::new( {
"credential3rdCheckAvailable", let check_available_mutation_name = get_entity_custom_mutation_field_name::<
TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()), credential_3rd::Entity,
move |ctx| { >(builder_context, "CheckAvailable");
FieldFuture::new(async move { let check_available_mutation =
let auth_user_info = ctx.data::<AuthUserInfo>()?; generate_entity_filtered_mutation_field::<credential_3rd::Entity, _, _>(
let input: Credential3rdCheckAvailableInput = ctx builder_context,
.args check_available_mutation_name,
.get(Credential3rdCheckAvailableInput::arg_name()) TypeRef::named_nn(Credential3rdCheckAvailableInfo::object_type_name()),
.unwrap() Arc::new(|_resolver_ctx, app_ctx, filters| {
.deserialize()?; Box::pin(async move {
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?; let db = app_ctx.db();
let credential_model = credential_3rd::Model::find_by_id_and_subscriber_id( let credential_model = credential_3rd::Entity::find()
app_ctx.as_ref(), .filter(filters)
input.id, .one(db)
auth_user_info.subscriber_auth.subscriber_id, .await?
) .ok_or_else(|| {
.await? RecorderError::from_entity_not_found::<credential_3rd::Entity>()
.ok_or_else(|| RecorderError::Credential3rdError { })?;
message: format!("credential = {} not found", input.id),
source: None.into(),
})?;
let available = credential_model.check_available(app_ctx.as_ref()).await?; let available = credential_model.check_available(app_ctx.as_ref()).await?;
Ok(Some(FieldValue::owned_any( Ok(Some(FieldValue::owned_any(
Credential3rdCheckAvailableInfo { available }, Credential3rdCheckAvailableInfo { available },
))) )))
}) })
}, }),
) );
.argument(InputValue::new( builder.mutations.push(check_available_mutation);
Credential3rdCheckAvailableInput::arg_name(), }
TypeRef::named_nn(Credential3rdCheckAvailableInput::input_type_name()),
)),
);
builder builder
} }

View File

@@ -0,0 +1,56 @@
use sea_orm::Iterable;
use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{
graphql::{
domains::{
subscriber_tasks::restrict_subscriber_tasks_for_entity,
subscribers::restrict_subscriber_for_entity,
},
infra::{custom::register_entity_default_writable, name::get_entity_and_column_name},
},
models::cron,
};
fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in cron::Column::iter() {
if matches!(
column,
cron::Column::SubscriberTask
| cron::Column::CronExpr
| cron::Column::Enabled
| cron::Column::TimeoutMs
| cron::Column::MaxAttempts
) {
continue;
}
let entity_column_key = get_entity_and_column_name::<cron::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
for column in cron::Column::iter() {
if matches!(column, |cron::Column::CronExpr| cron::Column::Enabled
| cron::Column::TimeoutMs
| cron::Column::Priority
| cron::Column::MaxAttempts)
{
continue;
}
let entity_column_key = get_entity_and_column_name::<cron::Entity>(context, &column);
context.entity_input.update_skips.push(entity_column_key);
}
}
pub fn register_cron_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<cron::Entity>(context, &cron::Column::SubscriberId);
restrict_subscriber_tasks_for_entity::<cron::Entity>(context, &cron::Column::SubscriberTask);
skip_columns_for_entity_input(context);
}
pub fn register_cron_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<cron::CronStatus>();
builder = register_entity_default_writable!(builder, cron, true);
builder
}

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloaders}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::downloaders,
};
pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) { pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloaders::Entity>( restrict_subscriber_for_entity::<downloaders::Entity>(
@@ -11,7 +17,7 @@ pub fn register_downloaders_to_schema_context(context: &mut BuilderContext) {
pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_downloaders_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloaders::DownloaderCategory>(); builder.register_enumeration::<downloaders::DownloaderCategory>();
seaography::register_entity!(builder, downloaders); builder = register_entity_default_writable!(builder, downloaders, false);
builder builder
} }

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::downloads}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::downloads,
};
pub fn register_downloads_to_schema_context(context: &mut BuilderContext) { pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId); restrict_subscriber_for_entity::<downloads::Entity>(context, &downloads::Column::SubscriberId);
@@ -9,7 +15,7 @@ pub fn register_downloads_to_schema_context(context: &mut BuilderContext) {
pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_downloads_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<downloads::DownloadStatus>(); builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<downloads::DownloadMime>(); builder.register_enumeration::<downloads::DownloadMime>();
seaography::register_entity!(builder, downloads); builder = register_entity_default_writable!(builder, downloads, false);
builder builder
} }

View File

@@ -1,6 +1,12 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::episodes}; use crate::{
graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::episodes,
};
pub fn register_episodes_to_schema_context(context: &mut BuilderContext) { pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId); restrict_subscriber_for_entity::<episodes::Entity>(context, &episodes::Column::SubscriberId);
@@ -8,7 +14,7 @@ pub fn register_episodes_to_schema_context(context: &mut BuilderContext) {
pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_episodes_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<episodes::EpisodeType>(); builder.register_enumeration::<episodes::EpisodeType>();
seaography::register_entity!(builder, episodes); builder = register_entity_default_writable!(builder, episodes, false);
builder builder
} }

View File

@@ -7,7 +7,13 @@ use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult};
use crate::{ use crate::{
graphql::{ graphql::{
domains::subscribers::restrict_subscriber_for_entity, domains::subscribers::restrict_subscriber_for_entity,
infra::util::{get_entity_column_key, get_entity_key}, infra::{
custom::register_entity_default_writable,
name::{
get_entity_and_column_name, get_entity_create_batch_mutation_field_name,
get_entity_create_one_mutation_field_name,
},
},
}, },
models::feeds, models::feeds,
}; };
@@ -15,22 +21,14 @@ use crate::{
pub fn register_feeds_to_schema_context(context: &mut BuilderContext) { pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId); restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId);
{ {
let entity_column_key = let entity_create_one_mutation_field_name = Arc::new(
get_entity_column_key::<feeds::Entity>(context, &feeds::Column::Token); get_entity_create_one_mutation_field_name::<feeds::Entity>(context),
let entity_key = get_entity_key::<feeds::Entity>(context); );
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key); let entity_create_batch_mutation_field_name =
let entity_create_one_mutation_field_name = Arc::new(format!( Arc::new(get_entity_create_batch_mutation_field_name::<feeds::Entity>(context));
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
context.types.input_none_conversions.insert( context.types.input_none_conversions.insert(
entity_column_key, get_entity_and_column_name::<feeds::Entity>(context, &feeds::Column::Token),
Box::new( Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> { move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name(); let field_name = context.field().name();
@@ -50,7 +48,8 @@ pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
builder.register_enumeration::<feeds::FeedType>(); builder.register_enumeration::<feeds::FeedType>();
builder.register_enumeration::<feeds::FeedSource>(); builder.register_enumeration::<feeds::FeedSource>();
seaography::register_entity!(builder, feeds);
builder = register_entity_default_writable!(builder, feeds, false);
builder builder
} }

View File

@@ -10,3 +10,4 @@ pub mod subscribers;
pub mod subscription_bangumi; pub mod subscription_bangumi;
pub mod subscription_episode; pub mod subscription_episode;
pub mod subscriptions; pub mod subscriptions;
pub mod cron;

View File

@@ -1,51 +1,142 @@
use std::{ops::Deref, sync::Arc}; use std::{ops::Deref, sync::Arc};
use async_graphql::dynamic::{FieldValue, TypeRef}; use async_graphql::dynamic::{FieldValue, Scalar, TypeRef};
use convert_case::Case;
use sea_orm::{ use sea_orm::{
ColumnTrait, ConnectionTrait, EntityTrait, QueryFilter, QuerySelect, QueryTrait, prelude::Expr, ActiveModelBehavior, ColumnTrait, ConnectionTrait, EntityTrait, Iterable, QueryFilter,
sea_query::Query, QuerySelect, QueryTrait, prelude::Expr, sea_query::Query,
}; };
use seaography::{ use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityDeleteMutationBuilder, EntityObjectBuilder, Builder as SeaographyBuilder, BuilderContext, SeaographyError, prepare_active_model,
EntityQueryFieldBuilder, get_filter_conditions,
}; };
use ts_rs::TS;
use crate::{ use crate::{
auth::AuthUserInfo,
errors::RecorderError, errors::RecorderError,
graphql::{ graphql::{
domains::subscribers::restrict_subscriber_for_entity, domains::subscribers::restrict_subscriber_for_entity,
infra::{ infra::{
custom::generate_entity_filter_mutation_field, custom::{
json::{convert_jsonb_output_case_for_entity, restrict_jsonb_filter_input_for_entity}, generate_entity_create_one_mutation_field,
generate_entity_default_basic_entity_object,
generate_entity_default_insert_input_object, generate_entity_delete_mutation_field,
generate_entity_filtered_mutation_field, register_entity_default_readonly,
},
json::{convert_jsonb_output_for_entity, restrict_jsonb_filter_input_for_entity},
name::{
get_entity_and_column_name, get_entity_basic_type_name,
get_entity_custom_mutation_field_name,
},
}, },
}, },
models::subscriber_tasks, models::subscriber_tasks,
task::{ApalisJobs, ApalisSchema}, task::{ApalisJobs, ApalisSchema, SubscriberTaskTrait},
}; };
pub fn register_subscriber_tasks_entity_mutations( fn skip_columns_for_entity_input(context: &mut BuilderContext) {
for column in subscriber_tasks::Column::iter() {
if matches!(
column,
subscriber_tasks::Column::Job | subscriber_tasks::Column::SubscriberId
) {
continue;
}
let entity_column_key =
get_entity_and_column_name::<subscriber_tasks::Entity>(context, &column);
context.entity_input.insert_skips.push(entity_column_key);
}
}
pub fn restrict_subscriber_tasks_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_and_column = get_entity_and_column_name::<T>(context, column);
restrict_jsonb_filter_input_for_entity::<T>(context, column);
convert_jsonb_output_for_entity::<T>(context, column, Some(Case::Camel));
let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.input_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(subscriber_tasks::SubscriberTask::ident().into()),
);
context.types.output_type_overwrites.insert(
entity_column_name.clone(),
TypeRef::Named(subscriber_tasks::SubscriberTask::ident().into()),
);
context.types.input_conversions.insert(
entity_column_name.clone(),
Box::new(move |resolve_context, value_accessor| {
let task: subscriber_tasks::SubscriberTaskInput = value_accessor.deserialize()?;
let subscriber_id = resolve_context
.data::<AuthUserInfo>()?
.subscriber_auth
.subscriber_id;
let task = subscriber_tasks::SubscriberTask::from_input(task, subscriber_id);
let json_value = serde_json::to_value(task).map_err(|err| {
SeaographyError::TypeConversionError(
err.to_string(),
format!("Json - {entity_column_name}"),
)
})?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}),
);
context.entity_input.update_skips.push(entity_and_column);
}
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::SubscriberId,
);
restrict_subscriber_tasks_for_entity::<subscriber_tasks::Entity>(
context,
&subscriber_tasks::Column::Job,
);
skip_columns_for_entity_input(context);
}
pub fn register_subscriber_tasks_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
let context = builder.context; builder.schema = builder.schema.register(
Scalar::new(subscriber_tasks::SubscriberTask::ident())
.description(subscriber_tasks::SubscriberTask::decl()),
);
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>();
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>();
builder = register_entity_default_readonly!(builder, subscriber_tasks);
let builder_context = builder.context;
{ {
let entitity_delete_mutation_builder = EntityDeleteMutationBuilder { context }; builder
let delete_mutation = generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>( .outputs
context, .push(generate_entity_default_basic_entity_object::<
entitity_delete_mutation_builder.type_name::<subscriber_tasks::Entity>(), subscriber_tasks::Entity,
TypeRef::named_nn(TypeRef::INT), >(builder_context));
Arc::new(|resolver_ctx, app_ctx, filters| { }
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>( {
resolver_ctx, let delete_mutation = generate_entity_delete_mutation_field::<subscriber_tasks::Entity>(
context, builder_context,
filters, Arc::new(|_resolver_ctx, app_ctx, filters| {
);
Box::pin(async move { Box::pin(async move {
let db = app_ctx.db(); let db = app_ctx.db();
let select_subquery = subscriber_tasks::Entity::find() let select_subquery = subscriber_tasks::Entity::find()
.select_only() .select_only()
.column(subscriber_tasks::Column::Id) .column(subscriber_tasks::Column::Id)
.filter(filters_condition); .filter(filters);
let delete_query = Query::delete() let delete_query = Query::delete()
.from_table((ApalisSchema::Schema, ApalisJobs::Table)) .from_table((ApalisSchema::Schema, ApalisJobs::Table))
@@ -59,42 +150,36 @@ pub fn register_subscriber_tasks_entity_mutations(
let result = db.execute(delete_statement).await?; let result = db.execute(delete_statement).await?;
Ok::<_, RecorderError>(Some(FieldValue::value(result.rows_affected() as i32))) Ok::<_, RecorderError>(result.rows_affected())
}) })
}), }),
); );
builder.mutations.push(delete_mutation); builder.mutations.push(delete_mutation);
} }
{ {
let entity_object_builder = EntityObjectBuilder { context }; let entity_retry_one_mutation_name = get_entity_custom_mutation_field_name::<
let entity_query_field = EntityQueryFieldBuilder { context }; subscriber_tasks::Entity,
let entity_retry_one_mutation_name = format!( >(builder_context, "RetryOne");
"{}RetryOne",
entity_query_field.type_name::<subscriber_tasks::Entity>()
);
let retry_one_mutation = let retry_one_mutation =
generate_entity_filter_mutation_field::<subscriber_tasks::Entity, _, _>( generate_entity_filtered_mutation_field::<subscriber_tasks::Entity, _, _>(
context, builder_context,
entity_retry_one_mutation_name, entity_retry_one_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()), TypeRef::named_nn(get_entity_basic_type_name::<subscriber_tasks::Entity>(
Arc::new(|resolver_ctx, app_ctx, filters| { builder_context,
let filters_condition = get_filter_conditions::<subscriber_tasks::Entity>( )),
resolver_ctx, Arc::new(|_resolver_ctx, app_ctx, filters| {
context,
filters,
);
Box::pin(async move { Box::pin(async move {
let db = app_ctx.db(); let db = app_ctx.db();
let job_id = subscriber_tasks::Entity::find() let job_id = subscriber_tasks::Entity::find()
.filter(filters_condition) .filter(filters)
.select_only() .select_only()
.column(subscriber_tasks::Column::Id) .column(subscriber_tasks::Column::Id)
.into_tuple::<String>() .into_tuple::<String>()
.one(db) .one(db)
.await? .await?
.ok_or_else(|| RecorderError::ModelEntityNotFound { .ok_or_else(|| {
entity: "SubscriberTask".into(), RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?; })?;
let task = app_ctx.task(); let task = app_ctx.task();
@@ -104,8 +189,8 @@ pub fn register_subscriber_tasks_entity_mutations(
.filter(subscriber_tasks::Column::Id.eq(&job_id)) .filter(subscriber_tasks::Column::Id.eq(&job_id))
.one(db) .one(db)
.await? .await?
.ok_or_else(|| RecorderError::ModelEntityNotFound { .ok_or_else(|| {
entity: "SubscriberTask".into(), RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
})?; })?;
Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model))) Ok::<_, RecorderError>(Some(FieldValue::owned_any(task_model)))
@@ -114,38 +199,54 @@ pub fn register_subscriber_tasks_entity_mutations(
); );
builder.mutations.push(retry_one_mutation); builder.mutations.push(retry_one_mutation);
} }
{
builder
.inputs
.push(generate_entity_default_insert_input_object::<
subscriber_tasks::Entity,
>(builder_context));
let create_one_mutation =
generate_entity_create_one_mutation_field::<subscriber_tasks::Entity>(
builder_context,
Arc::new(move |resolver_ctx, app_ctx, input_object| {
Box::pin(async move {
let active_model: Result<subscriber_tasks::ActiveModel, _> =
prepare_active_model(builder_context, &input_object, resolver_ctx);
builder let task_service = app_ctx.task();
}
let active_model = active_model?;
pub fn register_subscriber_tasks_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscriber_tasks::Entity>( let db = app_ctx.db();
context,
&subscriber_tasks::Column::SubscriberId, let active_model = active_model.before_save(db, true).await?;
);
restrict_jsonb_filter_input_for_entity::<subscriber_tasks::Entity>( let task = active_model.job.unwrap();
context, let subscriber_id = active_model.subscriber_id.unwrap();
&subscriber_tasks::Column::Job,
); if task.get_subscriber_id() != subscriber_id {
convert_jsonb_output_case_for_entity::<subscriber_tasks::Entity>( Err(async_graphql::Error::new(
context, "subscriber_id does not match with job.subscriber_id",
&subscriber_tasks::Column::Job, ))?;
); }
}
let task_id = task_service.add_subscriber_task(task).await?.to_string();
pub fn register_subscriber_tasks_to_schema_builder(
mut builder: SeaographyBuilder, let db = app_ctx.db();
) -> SeaographyBuilder {
builder.register_entity::<subscriber_tasks::Entity>( let task = subscriber_tasks::Entity::find()
<subscriber_tasks::RelatedEntity as sea_orm::Iterable>::iter() .filter(subscriber_tasks::Column::Id.eq(&task_id))
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context)) .one(db)
.collect(), .await?
); .ok_or_else(|| {
builder = builder.register_entity_dataloader_one_to_one(subscriber_tasks::Entity, tokio::spawn); RecorderError::from_entity_not_found::<subscriber_tasks::Entity>()
builder = })?;
builder.register_entity_dataloader_one_to_many(subscriber_tasks::Entity, tokio::spawn);
builder = register_subscriber_tasks_entity_mutations(builder); Ok::<_, RecorderError>(task)
builder.register_enumeration::<subscriber_tasks::SubscriberTaskType>(); })
builder.register_enumeration::<subscriber_tasks::SubscriberTaskStatus>(); }),
);
builder.mutations.push(create_one_mutation);
}
builder builder
} }

View File

@@ -7,12 +7,22 @@ use sea_orm::{ColumnTrait, Condition, EntityTrait, Iterable, Value as SeaValue};
use seaography::{ use seaography::{
Builder as SeaographyBuilder, BuilderContext, FilterInfo, Builder as SeaographyBuilder, BuilderContext, FilterInfo,
FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper, FilterOperation as SeaographqlFilterOperation, FilterType, FilterTypesMapHelper,
FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult, SeaographyError, FnFilterCondition, FnGuard, FnInputTypeNoneConversion, GuardAction, SeaResult,
}; };
use crate::{ use crate::{
auth::{AuthError, AuthUserInfo}, auth::{AuthError, AuthUserInfo},
graphql::infra::util::{get_column_key, get_entity_column_key, get_entity_key}, graphql::infra::{
custom::register_entity_default_readonly,
name::{
get_column_name, get_entity_and_column_name,
get_entity_create_batch_mutation_data_field_name,
get_entity_create_batch_mutation_field_name,
get_entity_create_one_mutation_data_field_name,
get_entity_create_one_mutation_field_name, get_entity_name,
get_entity_update_mutation_data_field_name, get_entity_update_mutation_field_name,
},
},
models::subscribers, models::subscribers,
}; };
@@ -82,32 +92,19 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context); let column_name = Arc::new(get_column_name::<T>(context, column));
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key); let entity_create_one_mutation_field_name =
let column_key = get_column_key::<T>(context, column); Arc::new(get_entity_create_one_mutation_field_name::<T>(context));
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
&entity_key,
&column_key,
));
let entity_create_one_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_one_mutation_data_field_name = let entity_create_one_mutation_data_field_name =
Arc::new(context.entity_create_one_mutation.data_field.clone()); Arc::new(get_entity_create_one_mutation_data_field_name(context).to_string());
let entity_create_batch_mutation_field_name = Arc::new(format!( let entity_create_batch_mutation_field_name =
"{}{}", Arc::new(get_entity_create_batch_mutation_field_name::<T>(context));
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
let entity_create_batch_mutation_data_field_name = let entity_create_batch_mutation_data_field_name =
Arc::new(context.entity_create_batch_mutation.data_field.clone()); Arc::new(get_entity_create_batch_mutation_data_field_name(context).to_string());
let entity_update_mutation_field_name = Arc::new(format!( let entity_update_mutation_field_name =
"{}{}", Arc::new(get_entity_update_mutation_field_name::<T>(context));
entity_name, context.entity_update_mutation.mutation_suffix
));
let entity_update_mutation_data_field_name = let entity_update_mutation_data_field_name =
Arc::new(context.entity_update_mutation.data_field.clone()); Arc::new(get_entity_update_mutation_data_field_name(context).to_string());
Box::new(move |context: &ResolverContext| -> GuardAction { Box::new(move |context: &ResolverContext| -> GuardAction {
match context.ctx.data::<AuthUserInfo>() { match context.ctx.data::<AuthUserInfo>() {
@@ -222,11 +219,10 @@ where
if let Some(value) = filter.get("eq") { if let Some(value) = filter.get("eq") {
let value: i32 = value.i64()?.try_into()?; let value: i32 = value.i64()?.try_into()?;
if value != subscriber_id { if value != subscriber_id {
return Err(SeaographyError::AsyncGraphQLError( return Err(async_graphql::Error::new(
async_graphql::Error::new( "subscriber_id and auth_info does not match",
"subscriber_id and auth_info does not match", )
), .into());
));
} }
} }
} }
@@ -253,17 +249,10 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context); let entity_create_one_mutation_field_name =
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key); Arc::new(get_entity_create_one_mutation_field_name::<T>(context));
let entity_create_one_mutation_field_name = Arc::new(format!( let entity_create_batch_mutation_field_name =
"{}{}", Arc::new(get_entity_create_batch_mutation_field_name::<T>(context));
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
Box::new( Box::new(
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> { move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
let field_name = context.field().name(); let field_name = context.field().name();
@@ -289,40 +278,39 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context); let entity_and_column = get_entity_and_column_name::<T>(context, column);
let entity_column_key = get_entity_column_key::<T>(context, column);
context.guards.entity_guards.insert( context.guards.entity_guards.insert(
entity_key.clone(), get_entity_name::<T>(context),
guard_entity_with_subscriber_id::<T>(context, column), guard_entity_with_subscriber_id::<T>(context, column),
); );
context.guards.field_guards.insert( context.guards.field_guards.insert(
entity_column_key.clone(), get_entity_and_column_name::<T>(context, column),
guard_field_with_subscriber_id::<T>(context, column), guard_field_with_subscriber_id::<T>(context, column),
); );
context.filter_types.overwrites.insert( context.filter_types.overwrites.insert(
entity_column_key.clone(), get_entity_and_column_name::<T>(context, column),
Some(FilterType::Custom( Some(FilterType::Custom(
SUBSCRIBER_ID_FILTER_INFO.type_name.clone(), SUBSCRIBER_ID_FILTER_INFO.type_name.clone(),
)), )),
); );
context.filter_types.condition_functions.insert( context.filter_types.condition_functions.insert(
entity_column_key.clone(), entity_and_column.clone(),
generate_subscriber_id_filter_condition::<T>(context, column), generate_subscriber_id_filter_condition::<T>(context, column),
); );
context.types.input_none_conversions.insert( context.types.input_none_conversions.insert(
entity_column_key.clone(), entity_and_column.clone(),
generate_default_subscriber_id_input_conversion::<T>(context, column), generate_default_subscriber_id_input_conversion::<T>(context, column),
); );
context.entity_input.update_skips.push(entity_column_key); context.entity_input.update_skips.push(entity_and_column);
} }
pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) { pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
restrict_subscriber_for_entity::<subscribers::Entity>(context, &subscribers::Column::Id); restrict_subscriber_for_entity::<subscribers::Entity>(context, &subscribers::Column::Id);
for column in subscribers::Column::iter() { for column in subscribers::Column::iter() {
if !matches!(column, subscribers::Column::Id) { if !matches!(column, subscribers::Column::Id) {
let key = get_entity_column_key::<subscribers::Entity>(context, &column); let key = get_entity_and_column_name::<subscribers::Entity>(context, &column);
context.filter_types.overwrites.insert(key, None); context.filter_types.overwrites.insert(key, None);
} }
} }
@@ -330,24 +318,14 @@ pub fn register_subscribers_to_schema_context(context: &mut BuilderContext) {
pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder { pub fn register_subscribers_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
{ {
let filter_types_map_helper = FilterTypesMapHelper {
context: builder.context,
};
builder.schema = builder builder.schema = builder
.schema .schema
.register(filter_types_map_helper.generate_filter_input(&SUBSCRIBER_ID_FILTER_INFO)); .register(FilterTypesMapHelper::generate_filter_input(
&SUBSCRIBER_ID_FILTER_INFO,
));
} }
{ builder = register_entity_default_readonly!(builder, subscribers);
builder.register_entity::<subscribers::Entity>(
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
}
builder builder
} }

View File

@@ -1,7 +1,11 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{ use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_bangumi, graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::subscription_bangumi,
}; };
pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) { pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderContext) {
@@ -14,7 +18,7 @@ pub fn register_subscription_bangumi_to_schema_context(context: &mut BuilderCont
pub fn register_subscription_bangumi_to_schema_builder( pub fn register_subscription_bangumi_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_bangumi); builder = register_entity_default_writable!(builder, subscription_bangumi, false);
builder builder
} }

View File

@@ -1,7 +1,11 @@
use seaography::{Builder as SeaographyBuilder, BuilderContext}; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use crate::{ use crate::{
graphql::domains::subscribers::restrict_subscriber_for_entity, models::subscription_episode, graphql::{
domains::subscribers::restrict_subscriber_for_entity,
infra::custom::register_entity_default_writable,
},
models::subscription_episode,
}; };
pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) { pub fn register_subscription_episode_to_schema_context(context: &mut BuilderContext) {
@@ -14,7 +18,7 @@ pub fn register_subscription_episode_to_schema_context(context: &mut BuilderCont
pub fn register_subscription_episode_to_schema_builder( pub fn register_subscription_episode_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
seaography::register_entity!(builder, subscription_episode); builder = register_entity_default_writable!(builder, subscription_episode, false);
builder builder
} }

View File

@@ -1,23 +1,11 @@
use std::sync::Arc; use seaography::{Builder as SeaographyBuilder, BuilderContext};
use async_graphql::dynamic::{FieldValue, TypeRef};
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityObjectBuilder, EntityQueryFieldBuilder,
get_filter_conditions,
};
use crate::{ use crate::{
errors::RecorderError,
graphql::{ graphql::{
domains::subscribers::restrict_subscriber_for_entity, domains::subscribers::restrict_subscriber_for_entity,
infra::custom::generate_entity_filter_mutation_field, infra::custom::register_entity_default_writable,
}, },
models::{ models::subscriptions,
subscriber_tasks,
subscriptions::{self, SubscriptionTrait},
},
task::SubscriberTask,
}; };
pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) { pub fn register_subscriptions_to_schema_context(context: &mut BuilderContext) {
@@ -31,184 +19,6 @@ pub fn register_subscriptions_to_schema_builder(
mut builder: SeaographyBuilder, mut builder: SeaographyBuilder,
) -> SeaographyBuilder { ) -> SeaographyBuilder {
builder.register_enumeration::<subscriptions::SubscriptionCategory>(); builder.register_enumeration::<subscriptions::SubscriptionCategory>();
seaography::register_entity!(builder, subscriptions); builder = register_entity_default_writable!(builder, subscriptions, false);
let context = builder.context;
let entity_object_builder = EntityObjectBuilder { context };
let entity_query_field = EntityQueryFieldBuilder { context };
{
let sync_one_feeds_incremental_mutation_name = format!(
"{}SyncOneFeedsIncremental",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_feeds_incremental_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_incremental_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsIncremental(
subscription.into(),
),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_feeds_incremental_mutation);
}
{
let sync_one_feeds_full_mutation_name = format!(
"{}SyncOneFeedsFull",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_feeds_full_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_feeds_full_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionFeedsFull(subscription.into()),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_feeds_full_mutation);
}
{
let sync_one_sources_mutation_name = format!(
"{}SyncOneSources",
entity_query_field.type_name::<subscriptions::Entity>()
);
let sync_one_sources_mutation = generate_entity_filter_mutation_field::<
subscriptions::Entity,
_,
_,
>(
builder.context,
sync_one_sources_mutation_name,
TypeRef::named_nn(entity_object_builder.type_name::<subscriber_tasks::Entity>()),
Arc::new(|resolver_ctx, app_ctx, filters| {
let filters_condition =
get_filter_conditions::<subscriptions::Entity>(resolver_ctx, context, filters);
Box::pin(async move {
let db = app_ctx.db();
let subscription_model = subscriptions::Entity::find()
.filter(filters_condition)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
let subscription =
subscriptions::Subscription::try_from_model(&subscription_model)?;
let task_service = app_ctx.task();
let task_id = task_service
.add_subscriber_task(
subscription_model.subscriber_id,
SubscriberTask::SyncOneSubscriptionSources(subscription.into()),
)
.await?;
let task_model = subscriber_tasks::Entity::find()
.filter(subscriber_tasks::Column::Id.eq(task_id.to_string()))
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "SubscriberTask".into(),
})?;
Ok(Some(FieldValue::owned_any(task_model)))
})
}),
);
builder.mutations.push(sync_one_sources_mutation);
}
builder builder
} }

View File

@@ -4,10 +4,7 @@ use async_graphql::dynamic::{ResolverContext, ValueAccessor};
use sea_orm::{EntityTrait, Value as SeaValue}; use sea_orm::{EntityTrait, Value as SeaValue};
use seaography::{BuilderContext, SeaResult}; use seaography::{BuilderContext, SeaResult};
use crate::{ use crate::{app::AppContextTrait, graphql::infra::name::get_entity_and_column_name};
app::AppContextTrait,
graphql::infra::util::{get_column_key, get_entity_key},
};
pub fn register_crypto_column_input_conversion_to_schema_context<T>( pub fn register_crypto_column_input_conversion_to_schema_context<T>(
context: &mut BuilderContext, context: &mut BuilderContext,
@@ -17,13 +14,8 @@ pub fn register_crypto_column_input_conversion_to_schema_context<T>(
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.input_conversions.insert( context.types.input_conversions.insert(
format!("{entity_name}.{column_name}"), get_entity_and_column_name::<T>(context, column),
Box::new( Box::new(
move |_resolve_context: &ResolverContext<'_>, move |_resolve_context: &ResolverContext<'_>,
value: &ValueAccessor| value: &ValueAccessor|
@@ -44,13 +36,8 @@ pub fn register_crypto_column_output_conversion_to_schema_context<T>(
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_key = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
let entity_name = context.entity_object.type_name.as_ref()(&entity_key);
let column_name = context.entity_object.column_name.as_ref()(&entity_key, &column_name);
context.types.output_conversions.insert( context.types.output_conversions.insert(
format!("{entity_name}.{column_name}"), get_entity_and_column_name::<T>(context, column),
Box::new( Box::new(
move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> { move |value: &sea_orm::Value| -> SeaResult<async_graphql::Value> {
if let SeaValue::String(s) = value { if let SeaValue::String(s) = value {

View File

@@ -1,53 +1,140 @@
use std::{pin::Pin, sync::Arc}; use std::{iter::FusedIterator, pin::Pin, sync::Arc};
use async_graphql::dynamic::{ use async_graphql::dynamic::{
Field, FieldFuture, FieldValue, InputValue, ResolverContext, TypeRef, ValueAccessor, Field, FieldFuture, FieldValue, InputObject, InputValue, Object, ObjectAccessor,
ResolverContext, TypeRef,
};
use sea_orm::{ActiveModelTrait, Condition, EntityTrait, IntoActiveModel};
use seaography::{
Builder as SeaographyBuilder, BuilderContext, EntityCreateBatchMutationBuilder,
EntityCreateOneMutationBuilder, EntityDeleteMutationBuilder, EntityInputBuilder,
EntityObjectBuilder, EntityUpdateMutationBuilder, GuardAction, RelationBuilder,
get_filter_conditions,
}; };
use sea_orm::EntityTrait;
use seaography::{BuilderContext, EntityObjectBuilder, FilterInputBuilder, GuardAction};
use crate::{app::AppContextTrait, errors::RecorderResult}; use crate::{
app::AppContextTrait,
errors::RecorderResult,
graphql::infra::name::{
get_entity_filter_input_type_name, get_entity_name,
get_entity_renormalized_filter_field_name,
},
};
pub type FilterMutationFn = Arc< pub type FilterMutationFn = Arc<
dyn for<'a> Fn( dyn for<'a> Fn(
&ResolverContext<'a>, &ResolverContext<'a>,
Arc<dyn AppContextTrait>, Arc<dyn AppContextTrait>,
Option<ValueAccessor<'_>>, Condition,
) -> Pin< ) -> Pin<
Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>, Box<dyn Future<Output = RecorderResult<Option<FieldValue<'a>>>> + Send + 'a>,
> + Send > + Send
+ Sync, + Sync,
>; >;
pub fn generate_entity_filter_mutation_field<T, N, R>( pub type CreateOneMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
ObjectAccessor<'a>,
) -> Pin<Box<dyn Future<Output = RecorderResult<M>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type CreateBatchMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Vec<ObjectAccessor<'a>>,
) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type UpdateMutationFn<M> = Arc<
dyn for<'a> Fn(
&'a ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Condition,
ObjectAccessor<'a>,
) -> Pin<Box<dyn Future<Output = RecorderResult<Vec<M>>> + Send + 'a>>
+ Send
+ Sync,
>;
pub type DeleteMutationFn = Arc<
dyn for<'a> Fn(
&ResolverContext<'a>,
Arc<dyn AppContextTrait>,
Condition,
) -> Pin<Box<dyn Future<Output = RecorderResult<u64>> + Send + 'a>>
+ Send
+ Sync,
>;
pub fn generate_entity_default_insert_input_object<T>(context: &BuilderContext) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
EntityInputBuilder::insert_input_object::<T>(context)
}
pub fn generate_entity_default_update_input_object<T>(context: &BuilderContext) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
EntityInputBuilder::update_input_object::<T>(context)
}
pub fn generate_entity_default_basic_entity_object<T>(context: &'static BuilderContext) -> Object
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_object_builder = EntityObjectBuilder { context };
entity_object_builder.basic_to_object::<T>()
}
pub fn generate_entity_input_object<T>(
context: &'static BuilderContext,
is_insert: bool,
) -> InputObject
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
if is_insert {
EntityInputBuilder::insert_input_object::<T>(context)
} else {
EntityInputBuilder::update_input_object::<T>(context)
}
}
pub fn generate_entity_filtered_mutation_field<E, N, R>(
builder_context: &'static BuilderContext, builder_context: &'static BuilderContext,
field_name: N, field_name: N,
type_ref: R, type_ref: R,
mutation_fn: FilterMutationFn, mutation_fn: FilterMutationFn,
) -> Field ) -> Field
where where
T: EntityTrait, E: EntityTrait,
<T as EntityTrait>::Model: Sync, <E as EntityTrait>::Model: Sync,
N: Into<String>, N: Into<String>,
R: Into<TypeRef>, R: Into<TypeRef>,
{ {
let entity_filter_input_builder = FilterInputBuilder { let object_name: String = get_entity_name::<E>(builder_context);
context: builder_context,
};
let entity_object_builder = EntityObjectBuilder {
context: builder_context,
};
let object_name: String = entity_object_builder.type_name::<T>();
let context = builder_context;
let guard = builder_context.guards.entity_guards.get(&object_name); let guard = builder_context.guards.entity_guards.get(&object_name);
Field::new(field_name, type_ref, move |ctx| { Field::new(field_name, type_ref, move |resolve_context| {
let mutation_fn = mutation_fn.clone(); let mutation_fn = mutation_fn.clone();
FieldFuture::new(async move { FieldFuture::new(async move {
let guard_flag = if let Some(guard) = guard { let guard_flag = if let Some(guard) = guard {
(*guard)(&ctx) (*guard)(&resolve_context)
} else { } else {
GuardAction::Allow GuardAction::Allow
}; };
@@ -58,19 +145,297 @@ where
)); ));
} }
let app_ctx = ctx.data::<Arc<dyn AppContextTrait>>()?; let filters = resolve_context
.args
.get(get_entity_renormalized_filter_field_name());
let filters = ctx.args.get(&context.entity_delete_mutation.filter_field); let filters = get_filter_conditions::<E>(&resolve_context, builder_context, filters);
let result = mutation_fn(&ctx, app_ctx.clone(), filters) let app_ctx = resolve_context.data::<Arc<dyn AppContextTrait>>()?;
.await
.map_err(async_graphql::Error::new_with_source)?; let result = mutation_fn(&resolve_context, app_ctx.clone(), filters).await?;
Ok(result) Ok(result)
}) })
}) })
.argument(InputValue::new( .argument(InputValue::new(
&context.entity_delete_mutation.filter_field, get_entity_renormalized_filter_field_name(),
TypeRef::named(entity_filter_input_builder.type_name(&object_name)), TypeRef::named(get_entity_filter_input_type_name::<E>(builder_context)),
)) ))
} }
pub fn generate_entity_create_one_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: CreateOneMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_create_one_mutation_builder = EntityCreateOneMutationBuilder {
context: builder_context,
};
entity_create_one_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, input_object| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(resolver_ctx, app_ctx.clone(), input_object).await?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_create_one_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_create_one_mutation_builder = EntityCreateOneMutationBuilder {
context: builder_context,
};
entity_create_one_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_create_batch_mutation_field<E, ID>(
builder_context: &'static BuilderContext,
mutation_fn: CreateBatchMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_create_batch_mutation_builder = EntityCreateBatchMutationBuilder {
context: builder_context,
};
entity_create_batch_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, input_objects| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(resolver_ctx, app_ctx.clone(), input_objects).await?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_create_batch_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_create_batch_mutation_builder = EntityCreateBatchMutationBuilder {
context: builder_context,
};
entity_create_batch_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_update_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: UpdateMutationFn<E::Model>,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_update_mutation_builder = EntityUpdateMutationBuilder {
context: builder_context,
};
entity_update_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, filters, input_object| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(
resolver_ctx,
app_ctx.clone(),
get_filter_conditions::<E>(resolver_ctx, builder_context, filters),
input_object,
)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_update_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
<E as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_update_mutation_builder = EntityUpdateMutationBuilder {
context: builder_context,
};
entity_update_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn generate_entity_delete_mutation_field<E>(
builder_context: &'static BuilderContext,
mutation_fn: DeleteMutationFn,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync,
{
let entity_delete_mutation_builder = EntityDeleteMutationBuilder {
context: builder_context,
};
entity_delete_mutation_builder.to_field_with_mutation_fn::<E>(Arc::new(
move |resolver_ctx, filters| {
let mutation_fn = mutation_fn.clone();
Box::pin(async move {
let app_ctx = resolver_ctx.data::<Arc<dyn AppContextTrait>>()?;
let result = mutation_fn(
resolver_ctx,
app_ctx.clone(),
get_filter_conditions::<E>(resolver_ctx, builder_context, filters),
)
.await
.map_err(async_graphql::Error::new_with_source)?;
Ok(result)
})
},
))
}
pub fn generate_entity_default_delete_mutation_field<E, A>(
builder_context: &'static BuilderContext,
active_model_hooks: bool,
) -> Field
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let entity_delete_mutation_builder = EntityDeleteMutationBuilder {
context: builder_context,
};
entity_delete_mutation_builder.to_field::<E, A>(active_model_hooks)
}
pub fn register_entity_default_mutations<E, A>(
mut builder: SeaographyBuilder,
active_model_hooks: bool,
) -> SeaographyBuilder
where
E: EntityTrait,
<E as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = E> + sea_orm::ActiveModelBehavior + std::marker::Send,
{
let builder_context = builder.context;
builder
.outputs
.push(generate_entity_default_basic_entity_object::<E>(
builder_context,
));
builder.inputs.extend([
generate_entity_default_insert_input_object::<E>(builder_context),
generate_entity_default_update_input_object::<E>(builder_context),
]);
builder.mutations.extend([
generate_entity_default_create_one_mutation_field::<E, A>(
builder_context,
active_model_hooks,
),
generate_entity_default_create_batch_mutation_field::<E, A>(
builder_context,
active_model_hooks,
),
generate_entity_default_update_mutation_field::<E, A>(builder_context, active_model_hooks),
generate_entity_default_delete_mutation_field::<E, A>(builder_context, active_model_hooks),
]);
builder
}
pub(crate) fn register_entity_default_readonly_impl<T, RE, I>(
mut builder: SeaographyBuilder,
entity: T,
) -> SeaographyBuilder
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
RE: sea_orm::Iterable<Iterator = I> + RelationBuilder,
I: Iterator<Item = RE> + Clone + DoubleEndedIterator + ExactSizeIterator + FusedIterator,
{
builder.register_entity::<T>(
<RE as sea_orm::Iterable>::iter()
.map(|rel| RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(entity, tokio::spawn);
builder
}
pub(crate) fn register_entity_default_writable_impl<T, RE, A, I>(
mut builder: SeaographyBuilder,
entity: T,
active_model_hooks: bool,
) -> SeaographyBuilder
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync + IntoActiveModel<A>,
A: ActiveModelTrait<Entity = T> + sea_orm::ActiveModelBehavior + std::marker::Send,
RE: sea_orm::Iterable<Iterator = I> + RelationBuilder,
I: Iterator<Item = RE> + Clone + DoubleEndedIterator + ExactSizeIterator + FusedIterator,
{
builder = register_entity_default_readonly_impl::<T, RE, I>(builder, entity);
builder = register_entity_default_mutations::<T, A>(builder, active_model_hooks);
builder
}
macro_rules! register_entity_default_readonly {
($builder:expr, $module_path:ident) => {
$crate::graphql::infra::custom::register_entity_default_readonly_impl::<
$module_path::Entity,
$module_path::RelatedEntity,
_,
>($builder, $module_path::Entity)
};
}
macro_rules! register_entity_default_writable {
($builder:expr, $module_path:ident, $active_model_hooks:expr) => {
$crate::graphql::infra::custom::register_entity_default_writable_impl::<
$module_path::Entity,
$module_path::RelatedEntity,
$module_path::ActiveModel,
_,
>($builder, $module_path::Entity, $active_model_hooks)
};
}
pub(crate) use register_entity_default_readonly;
pub(crate) use register_entity_default_writable;

View File

@@ -17,7 +17,7 @@ use serde::{Serialize, de::DeserializeOwned};
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use crate::{ use crate::{
errors::RecorderResult, graphql::infra::util::get_entity_column_key, errors::RecorderResult, graphql::infra::name::get_entity_and_column_name,
utils::json::convert_json_keys, utils::json::convert_json_keys,
}; };
@@ -911,18 +911,15 @@ where
Box::new( Box::new(
move |_resolve_context: &ResolverContext<'_>, condition, filter| { move |_resolve_context: &ResolverContext<'_>, condition, filter| {
if let Some(filter) = filter { if let Some(filter) = filter {
let filter_value = to_value(filter.as_index_map()).map_err(|e| { let filter_value =
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e)) to_value(filter.as_index_map()).map_err(GraphqlError::new_with_source)?;
})?;
let filter_json: JsonValue = filter_value.into_json().map_err(|e| { let filter_json: JsonValue = filter_value
SeaographyError::AsyncGraphQLError(GraphqlError::new(format!("{e:?}"))) .into_json()
})?; .map_err(GraphqlError::new_with_source)?;
let cond_where = prepare_jsonb_filter_input(&Expr::col(column), filter_json) let cond_where = prepare_jsonb_filter_input(&Expr::col(column), filter_json)
.map_err(|e| { .map_err(GraphqlError::new_with_source)?;
SeaographyError::AsyncGraphQLError(GraphqlError::new_with_source(e))
})?;
let condition = condition.add(cond_where); let condition = condition.add(cond_where);
Ok(condition) Ok(condition)
@@ -946,65 +943,76 @@ where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_column_key = get_entity_column_key::<T>(context, column); let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.filter_types.overwrites.insert( context.filter_types.overwrites.insert(
entity_column_key.clone(), get_entity_and_column_name::<T>(context, column),
Some(FilterType::Custom(JSONB_FILTER_NAME.to_string())), Some(FilterType::Custom(JSONB_FILTER_NAME.to_string())),
); );
context.filter_types.condition_functions.insert(
entity_column_name.clone(),
generate_jsonb_filter_condition_function::<T>(context, column),
);
} }
pub fn validate_jsonb_input_for_entity<T, S>(context: &mut BuilderContext, column: &T::Column) pub fn try_convert_jsonb_input_for_entity<T, S>(
where context: &mut BuilderContext,
column: &T::Column,
case: Option<Case<'static>>,
) where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
S: DeserializeOwned + Serialize, S: DeserializeOwned + Serialize,
{ {
let entity_column_key = get_entity_column_key::<T>(context, column); let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.input_conversions.insert( context.types.input_conversions.insert(
entity_column_key.clone(), entity_column_name.clone(),
Box::new(move |_resolve_context, accessor| { Box::new(move |_resolve_context, accessor| {
let deserialized = accessor.deserialize::<S>().map_err(|err| { let mut json_value: serde_json::Value = accessor.deserialize()?;
SeaographyError::TypeConversionError(
err.message, if let Some(case) = case {
format!("Json - {entity_column_key}"), json_value = convert_json_keys(json_value, case);
) }
})?;
let json_value = serde_json::to_value(deserialized).map_err(|err| { serde_json::from_value::<S>(json_value.clone()).map_err(|err| {
SeaographyError::TypeConversionError( SeaographyError::TypeConversionError(
err.to_string(), err.to_string(),
format!("Json - {entity_column_key}"), format!("Json - {entity_column_name}"),
) )
})?; })?;
Ok(sea_orm::Value::Json(Some(Box::new(json_value)))) Ok(sea_orm::Value::Json(Some(Box::new(json_value))))
}), }),
); );
} }
pub fn convert_jsonb_output_case_for_entity<T>(context: &mut BuilderContext, column: &T::Column) pub fn convert_jsonb_output_for_entity<T>(
where context: &mut BuilderContext,
column: &T::Column,
case: Option<Case<'static>>,
) where
T: EntityTrait, T: EntityTrait,
<T as EntityTrait>::Model: Sync, <T as EntityTrait>::Model: Sync,
{ {
let entity_column_key = get_entity_column_key::<T>(context, column); let entity_column_name = get_entity_and_column_name::<T>(context, column);
context.types.output_conversions.insert( context.types.output_conversions.insert(
entity_column_key.clone(), entity_column_name.clone(),
Box::new(move |value| { Box::new(move |value| {
if let sea_orm::Value::Json(Some(json)) = value { if let sea_orm::Value::Json(Some(json)) = value {
let result = async_graphql::Value::from_json(convert_json_keys( let mut json_value = json.as_ref().clone();
json.as_ref().clone(), if let Some(case) = case {
Case::Camel, json_value = convert_json_keys(json_value, case);
)) }
.map_err(|err| { let result = async_graphql::Value::from_json(json_value).map_err(|err| {
SeaographyError::TypeConversionError( SeaographyError::TypeConversionError(
err.to_string(), err.to_string(),
format!("Json - {entity_column_key}"), format!("Json - {entity_column_name}"),
) )
})?; })?;
Ok(result) Ok(result)
} else { } else {
Err(SeaographyError::TypeConversionError( Err(SeaographyError::TypeConversionError(
"value should be json".to_string(), "value should be json".to_string(),
format!("Json - {entity_column_key}"), format!("Json - {entity_column_name}"),
)) ))
} }
}), }),

View File

@@ -1,4 +1,4 @@
pub mod crypto; pub mod crypto;
pub mod custom; pub mod custom;
pub mod json; pub mod json;
pub mod util; pub mod name;

View File

@@ -0,0 +1,203 @@
use std::fmt::Display;
use sea_orm::{EntityName, EntityTrait, IdenStatic};
use seaography::BuilderContext;
pub fn get_entity_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let t = T::default();
let name = <T as EntityName>::table_name(&t);
context.entity_object.type_name.as_ref()(name)
}
pub fn get_column_name<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.entity_object.column_name.as_ref()(&entity_name, column.as_str())
}
pub fn get_entity_and_column_name<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
let column_name = get_column_name::<T>(context, column);
format!("{entity_name}.{column_name}")
}
pub fn get_entity_and_column_name_from_column_str<T>(
context: &BuilderContext,
column_str: &str,
) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}.{column_str}")
}
pub fn get_entity_basic_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let t = T::default();
let name = <T as EntityName>::table_name(&t);
format!(
"{}{}",
context.entity_object.type_name.as_ref()(name),
context.entity_object.basic_type_suffix
)
}
pub fn get_entity_query_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.entity_query_field.type_name.as_ref()(&entity_name)
}
pub fn get_entity_filter_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
context.filter_input.type_name.as_ref()(&entity_name)
}
pub fn get_entity_insert_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}{}", context.entity_input.insert_suffix)
}
pub fn get_entity_update_input_type_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_name::<T>(context);
format!("{entity_name}{}", context.entity_input.update_suffix)
}
pub fn get_entity_create_one_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_create_one_mutation.mutation_suffix
)
}
pub fn get_entity_create_batch_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_create_batch_mutation.mutation_suffix
)
}
pub fn get_entity_delete_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_delete_mutation.mutation_suffix
)
}
pub fn get_entity_update_mutation_field_name<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!(
"{}{}",
query_field_name, context.entity_update_mutation.mutation_suffix
)
}
pub fn get_entity_custom_mutation_field_name<T>(
context: &BuilderContext,
mutation_suffix: impl Display,
) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let query_field_name = get_entity_query_field_name::<T>(context);
format!("{query_field_name}{mutation_suffix}")
}
pub fn get_entity_renormalized_filter_field_name() -> &'static str {
"filter"
}
pub fn get_entity_query_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_query_field.filters
}
pub fn get_entity_update_mutation_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_update_mutation.filter_field
}
pub fn get_entity_delete_mutation_filter_field_name(context: &BuilderContext) -> &str {
&context.entity_delete_mutation.filter_field
}
pub fn renormalize_filter_field_names_to_schema_context(context: &mut BuilderContext) {
let renormalized_filter_field_name = get_entity_renormalized_filter_field_name();
context.entity_query_field.filters = renormalized_filter_field_name.to_string();
context.entity_update_mutation.filter_field = renormalized_filter_field_name.to_string();
context.entity_delete_mutation.filter_field = renormalized_filter_field_name.to_string();
}
pub fn get_entity_renormalized_data_field_name() -> &'static str {
"data"
}
pub fn get_entity_create_one_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_create_one_mutation.data_field
}
pub fn get_entity_create_batch_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_create_batch_mutation.data_field
}
pub fn get_entity_update_mutation_data_field_name(context: &BuilderContext) -> &str {
&context.entity_update_mutation.data_field
}
pub fn renormalize_data_field_names_to_schema_context(context: &mut BuilderContext) {
let renormalized_data_field_name = get_entity_renormalized_data_field_name();
context.entity_create_one_mutation.data_field = renormalized_data_field_name.to_string();
context.entity_create_batch_mutation.data_field = renormalized_data_field_name.to_string();
context.entity_update_mutation.data_field = renormalized_data_field_name.to_string();
}

View File

@@ -1,30 +0,0 @@
use sea_orm::{EntityName, EntityTrait, IdenStatic};
use seaography::BuilderContext;
pub fn get_entity_key<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
context.entity_object.type_name.as_ref()(<T as EntityName>::table_name(&T::default()))
}
pub fn get_column_key<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_key::<T>(context);
context.entity_object.column_name.as_ref()(&entity_name, column.as_str())
}
pub fn get_entity_column_key<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
format!("{}.{}", &entity_name, &column_name)
}

View File

@@ -12,6 +12,7 @@ use crate::{
credential_3rd::{ credential_3rd::{
register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context, register_credential3rd_to_schema_builder, register_credential3rd_to_schema_context,
}, },
cron::{register_cron_to_schema_builder, register_cron_to_schema_context},
downloaders::{ downloaders::{
register_downloaders_to_schema_builder, register_downloaders_to_schema_context, register_downloaders_to_schema_builder, register_downloaders_to_schema_context,
}, },
@@ -39,7 +40,13 @@ use crate::{
register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context, register_subscriptions_to_schema_builder, register_subscriptions_to_schema_context,
}, },
}, },
infra::json::register_jsonb_input_filter_to_schema_builder, infra::{
json::register_jsonb_input_filter_to_schema_builder,
name::{
renormalize_data_field_names_to_schema_context,
renormalize_filter_field_names_to_schema_context,
},
},
}, },
}; };
@@ -55,6 +62,9 @@ pub fn build_schema(
let context = CONTEXT.get_or_init(|| { let context = CONTEXT.get_or_init(|| {
let mut context = BuilderContext::default(); let mut context = BuilderContext::default();
renormalize_filter_field_names_to_schema_context(&mut context);
renormalize_data_field_names_to_schema_context(&mut context);
{ {
// domains // domains
register_feeds_to_schema_context(&mut context); register_feeds_to_schema_context(&mut context);
@@ -68,6 +78,7 @@ pub fn build_schema(
register_subscription_bangumi_to_schema_context(&mut context); register_subscription_bangumi_to_schema_context(&mut context);
register_subscription_episode_to_schema_context(&mut context); register_subscription_episode_to_schema_context(&mut context);
register_bangumi_to_schema_context(&mut context); register_bangumi_to_schema_context(&mut context);
register_cron_to_schema_context(&mut context);
} }
context context
}); });
@@ -91,6 +102,7 @@ pub fn build_schema(
builder = register_credential3rd_to_schema_builder(builder); builder = register_credential3rd_to_schema_builder(builder);
builder = register_subscriber_tasks_to_schema_builder(builder); builder = register_subscriber_tasks_to_schema_builder(builder);
builder = register_bangumi_to_schema_builder(builder); builder = register_bangumi_to_schema_builder(builder);
builder = register_cron_to_schema_builder(builder);
} }
let schema = builder.schema_builder(); let schema = builder.schema_builder();

View File

@@ -7,11 +7,11 @@
async_fn_traits, async_fn_traits,
error_generic_member_access, error_generic_member_access,
associated_type_defaults, associated_type_defaults,
let_chains let_chains,
impl_trait_in_fn_trait_return
)] )]
#![allow(clippy::enum_variant_names)] #![allow(clippy::enum_variant_names)]
pub use downloader; pub use downloader;
pub mod app; pub mod app;
pub mod auth; pub mod auth;
pub mod cache; pub mod cache;

View File

@@ -171,6 +171,27 @@ pub enum Feeds {
SubscriptionId, SubscriptionId,
} }
#[derive(DeriveIden)]
pub enum Cron {
Table,
Id,
SubscriberId,
SubscriptionId,
CronExpr,
NextRun,
LastRun,
LastError,
Enabled,
LockedBy,
LockedAt,
TimeoutMs,
Attempts,
MaxAttempts,
Priority,
Status,
SubscriberTask,
}
macro_rules! create_postgres_enum_for_active_enum { macro_rules! create_postgres_enum_for_active_enum {
($manager: expr, $active_enum: expr, $($enum_value:expr),+) => { ($manager: expr, $active_enum: expr, $($enum_value:expr),+) => {
{ {

View File

@@ -52,8 +52,7 @@ impl MigrationTrait for Migration {
subscriptions::SubscriptionCategoryEnum, subscriptions::SubscriptionCategoryEnum,
subscriptions::SubscriptionCategory::MikanSubscriber, subscriptions::SubscriptionCategory::MikanSubscriber,
subscriptions::SubscriptionCategory::MikanBangumi, subscriptions::SubscriptionCategory::MikanBangumi,
subscriptions::SubscriptionCategory::MikanSeason, subscriptions::SubscriptionCategory::MikanSeason
subscriptions::SubscriptionCategory::Manual
) )
.await?; .await?;

View File

@@ -0,0 +1,62 @@
use async_trait::async_trait;
use sea_orm_migration::prelude::*;
use crate::task::SUBSCRIBER_TASK_APALIS_NAME;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE VIEW subscriber_tasks AS
SELECT
job,
job_type,
status,
(job ->> 'subscriber_id')::integer AS subscriber_id,
job ->> 'task_type' AS task_type,
id,
attempts,
max_attempts,
run_at,
last_error,
lock_at,
lock_by,
done_at,
priority,
(job ->> 'subscription_id')::integer AS subscription_id
FROM apalis.jobs
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscription_id
ON apalis.jobs (((job -> 'subscription_id')::integer))
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
AND jsonb_path_exists(job, '$.subscription_id ? (@.type() == "number")')
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(
r#"DROP INDEX IF EXISTS idx_apalis_jobs_subscription_id
ON apalis.jobs"#,
)
.await?;
Ok(())
}
}

View File

@@ -0,0 +1,248 @@
use async_trait::async_trait;
use sea_orm::ActiveEnum;
use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{
Cron, CustomSchemaManagerExt, GeneralIds, Subscribers, Subscriptions, table_auto_z,
},
models::cron::{
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_EVENT, CronStatus, CronStatusEnum,
NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME, NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME,
SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME,
},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
create_postgres_enum_for_active_enum!(
manager,
CronStatusEnum,
CronStatus::Pending,
CronStatus::Running,
CronStatus::Completed,
CronStatus::Failed
)
.await?;
manager
.create_table(
table_auto_z(Cron::Table)
.col(pk_auto(Cron::Id))
.col(string(Cron::CronExpr))
.col(integer_null(Cron::SubscriberId))
.col(integer_null(Cron::SubscriptionId))
.col(timestamp_with_time_zone_null(Cron::NextRun))
.col(timestamp_with_time_zone_null(Cron::LastRun))
.col(string_null(Cron::LastError))
.col(boolean(Cron::Enabled).default(true))
.col(string_null(Cron::LockedBy))
.col(timestamp_with_time_zone_null(Cron::LockedAt))
.col(integer_null(Cron::TimeoutMs))
.col(integer(Cron::Attempts))
.col(integer(Cron::MaxAttempts))
.col(integer(Cron::Priority))
.col(enumeration(
Cron::Status,
CronStatusEnum,
CronStatus::iden_values(),
))
.col(json_binary_null(Cron::SubscriberTask))
.foreign_key(
ForeignKey::create()
.name("fk_cron_subscriber_id")
.from(Cron::Table, Cron::SubscriberId)
.to(Subscribers::Table, Subscribers::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.foreign_key(
ForeignKey::create()
.name("fk_cron_subscription_id")
.from(Cron::Table, Cron::SubscriptionId)
.to(Subscriptions::Table, Subscriptions::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Restrict),
)
.to_owned(),
)
.await?;
manager
.create_postgres_auto_update_ts_trigger_for_col(Cron::Table, GeneralIds::UpdatedAt)
.await?;
manager
.create_index(
IndexCreateStatement::new()
.if_not_exists()
.name("idx_cron_next_run")
.table(Cron::Table)
.col(Cron::NextRun)
.to_owned(),
)
.await?;
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}() RETURNS trigger AS $$
BEGIN
IF jsonb_path_exists(NEW.{subscriber_task}, '$.subscriber_id ? (@.type() == "number")') THEN
NEW.{subscriber_id} = (NEW.{subscriber_task} ->> 'subscriber_id')::integer;
END IF;
IF jsonb_path_exists(NEW.{subscriber_task}, '$.subscription_id ? (@.type() == "number")') THEN
NEW.{subscription_id} = (NEW.{subscriber_task} ->> 'subscription_id')::integer;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
subscriber_task = &Cron::SubscriberTask.to_string(),
subscriber_id = &Cron::SubscriberId.to_string(),
subscription_id = &Cron::SubscriptionId.to_string(),
)).await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE TRIGGER {SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME}
BEFORE INSERT OR UPDATE ON {table}
FOR EACH ROW
EXECUTE FUNCTION {SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME}();"#,
table = &Cron::Table.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME}() RETURNS trigger AS $$
BEGIN
-- Check if the cron is due to run
IF NEW.{next_run} IS NOT NULL
AND NEW.{next_run} <= CURRENT_TIMESTAMP
AND NEW.{enabled} = true
AND NEW.{status} = '{pending}'
AND NEW.{attempts} < NEW.{max_attempts}
-- Check if not locked or lock timeout
AND (
NEW.{locked_at} IS NULL
OR (
NEW.{timeout_ms} IS NOT NULL
AND (NEW.{locked_at} + NEW.{timeout_ms} * INTERVAL '1 millisecond') <= CURRENT_TIMESTAMP
)
)
-- Make sure the cron is a new due event, not a repeat event
AND (
OLD.{next_run} IS NULL
OR OLD.{next_run} > CURRENT_TIMESTAMP
OR OLD.{enabled} = false
OR OLD.{status} != '{pending}'
OR OLD.{attempts} != NEW.{attempts}
)
THEN
PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(NEW)::text);
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;"#,
next_run = &Cron::NextRun.to_string(),
enabled = &Cron::Enabled.to_string(),
locked_at = &Cron::LockedAt.to_string(),
timeout_ms = &Cron::TimeoutMs.to_string(),
status = &Cron::Status.to_string(),
pending = &CronStatus::Pending.to_value(),
attempts = &Cron::Attempts.to_string(),
max_attempts = &Cron::MaxAttempts.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE TRIGGER {NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME}
AFTER INSERT OR UPDATE ON {table}
FOR EACH ROW
EXECUTE FUNCTION {NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME}();"#,
table = &Cron::Table.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"CREATE OR REPLACE FUNCTION {CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME}() RETURNS INTEGER AS $$
DECLARE
cron_record RECORD;
notification_count INTEGER := 0;
BEGIN
FOR cron_record IN
SELECT * FROM {table}
WHERE {next_run} IS NOT NULL
AND {next_run} <= CURRENT_TIMESTAMP
AND {enabled} = true
AND {status} = '{pending}'
AND {attempts} < {max_attempts}
AND (
{locked_at} IS NULL
OR (
{timeout_ms} IS NOT NULL
AND {locked_at} + {timeout_ms} * INTERVAL '1 millisecond' <= CURRENT_TIMESTAMP
)
)
ORDER BY {priority} ASC, {next_run} ASC
FOR UPDATE SKIP LOCKED
LOOP
PERFORM pg_notify('{CRON_DUE_EVENT}', row_to_json(cron_record)::text);
notification_count := notification_count + 1;
END LOOP;
RETURN notification_count;
END;
$$ LANGUAGE plpgsql;"#,
table = &Cron::Table.to_string(),
next_run = &Cron::NextRun.to_string(),
enabled = &Cron::Enabled.to_string(),
status = &Cron::Status.to_string(),
pending = &CronStatus::Pending.to_value(),
locked_at = &Cron::LockedAt.to_string(),
timeout_ms = &Cron::TimeoutMs.to_string(),
priority = &Cron::Priority.to_string(),
attempts = &Cron::Attempts.to_string(),
max_attempts = &Cron::MaxAttempts.to_string(),
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
db.execute_unprepared(&format!(
r#"DROP TRIGGER IF EXISTS {NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME} ON {table};"#,
table = &Cron::Table.to_string(),
))
.await?;
db.execute_unprepared(&format!(
r#"DROP FUNCTION IF EXISTS {NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME}();"#,
))
.await?;
db.execute_unprepared(&format!(
r#"DROP FUNCTION IF EXISTS {CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME}();"#,
))
.await?;
manager
.drop_table(
TableDropStatement::new()
.if_exists()
.table(Cron::Table)
.to_owned(),
)
.await?;
manager
.drop_postgres_enum_for_active_enum(CronStatusEnum)
.await?;
Ok(())
}
}

View File

@@ -10,6 +10,8 @@ pub mod m20250501_021523_credential_3rd;
pub mod m20250520_021135_subscriber_tasks; pub mod m20250520_021135_subscriber_tasks;
pub mod m20250622_015618_feeds; pub mod m20250622_015618_feeds;
pub mod m20250622_020819_bangumi_and_episode_type; pub mod m20250622_020819_bangumi_and_episode_type;
pub mod m20250625_060701_add_subscription_id_to_subscriber_tasks;
pub mod m20250629_065628_add_cron;
pub struct Migrator; pub struct Migrator;
@@ -24,6 +26,8 @@ impl MigratorTrait for Migrator {
Box::new(m20250520_021135_subscriber_tasks::Migration), Box::new(m20250520_021135_subscriber_tasks::Migration),
Box::new(m20250622_015618_feeds::Migration), Box::new(m20250622_015618_feeds::Migration),
Box::new(m20250622_020819_bangumi_and_episode_type::Migration), Box::new(m20250622_020819_bangumi_and_episode_type::Migration),
Box::new(m20250625_060701_add_subscription_id_to_subscriber_tasks::Migration),
Box::new(m20250629_065628_add_cron::Migration),
] ]
} }
} }

View File

@@ -63,7 +63,11 @@ impl Model {
.filter(Column::Pid.eq(pid)) .filter(Column::Pid.eq(pid))
.one(db) .one(db)
.await? .await?
.ok_or_else(|| RecorderError::from_db_record_not_found("auth::find_by_pid"))?; .ok_or_else(|| {
RecorderError::from_entity_not_found_detail::<Entity, _>(format!(
"pid {pid} not found"
))
})?;
Ok(subscriber_auth) Ok(subscriber_auth)
} }

View File

@@ -0,0 +1,10 @@
pub const CRON_DUE_EVENT: &str = "cron_due";
pub const CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME: &str = "check_and_trigger_due_crons";
pub const NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME: &str = "notify_due_cron_when_mutating";
pub const NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME: &str =
"notify_due_cron_when_mutating_trigger";
pub const SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME: &str = "setup_cron_extra_foreign_keys";
pub const SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME: &str =
"setup_cron_extra_foreign_keys_trigger";

View File

@@ -0,0 +1,361 @@
mod core;
mod registry;
pub use core::{
CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME, CRON_DUE_EVENT,
NOTIFY_DUE_CRON_WHEN_MUTATING_FUNCTION_NAME, NOTIFY_DUE_CRON_WHEN_MUTATING_TRIGGER_NAME,
SETUP_CRON_EXTRA_FOREIGN_KEYS_FUNCTION_NAME, SETUP_CRON_EXTRA_FOREIGN_KEYS_TRIGGER_NAME,
};
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use croner::Cron;
use sea_orm::{
ActiveValue::{self, Set},
Condition, DeriveActiveEnum, DeriveDisplay, DeriveEntityModel, EnumIter, QuerySelect,
Statement, TransactionTrait,
entity::prelude::*,
sea_query::{ExprTrait, LockBehavior, LockType},
sqlx::postgres::PgNotification,
};
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait, errors::RecorderResult, models::subscriber_tasks,
task::SubscriberTaskTrait,
};
#[derive(
Debug, Clone, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay, Serialize, Deserialize,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "cron_status")]
#[serde(rename_all = "snake_case")]
pub enum CronStatus {
#[sea_orm(string_value = "pending")]
Pending,
#[sea_orm(string_value = "running")]
Running,
#[sea_orm(string_value = "completed")]
Completed,
#[sea_orm(string_value = "failed")]
Failed,
}
#[derive(Debug, Clone, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "cron")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTimeUtc,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTimeUtc,
#[sea_orm(primary_key)]
pub id: i32,
pub subscriber_id: Option<i32>,
pub subscription_id: Option<i32>,
pub cron_expr: String,
pub next_run: Option<DateTimeUtc>,
pub last_run: Option<DateTimeUtc>,
pub last_error: Option<String>,
pub locked_by: Option<String>,
pub locked_at: Option<DateTimeUtc>,
#[sea_orm(default_expr = "5000")]
pub timeout_ms: i32,
#[sea_orm(default_expr = "0")]
pub attempts: i32,
#[sea_orm(default_expr = "1")]
pub max_attempts: i32,
#[sea_orm(default_expr = "0")]
pub priority: i32,
pub status: CronStatus,
#[sea_orm(default_expr = "true")]
pub enabled: bool,
pub subscriber_task: Option<subscriber_tasks::SubscriberTask>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::subscribers::Entity",
from = "Column::SubscriberId",
to = "super::subscribers::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscriber,
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Subscription,
}
impl Related<super::subscribers::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscriber.def()
}
}
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")]
Subscriber,
#[sea_orm(entity = "super::subscriptions::Entity")]
Subscription,
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, _insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
if let ActiveValue::Set(ref cron_expr) = self.cron_expr
&& matches!(
self.next_run,
ActiveValue::NotSet | ActiveValue::Unchanged(_)
)
{
let next_run =
Model::calculate_next_run(cron_expr).map_err(|e| DbErr::Custom(e.to_string()))?;
self.next_run = Set(Some(next_run));
}
if let ActiveValue::Set(Some(subscriber_id)) = self.subscriber_id
&& let ActiveValue::Set(Some(ref subscriber_task)) = self.subscriber_task
&& subscriber_task.get_subscriber_id() != subscriber_id
{
return Err(DbErr::Custom(
"Cron subscriber_id does not match subscriber_task.subscriber_id".to_string(),
));
}
Ok(self)
}
}
impl Model {
pub async fn handle_cron_notification(
ctx: &dyn AppContextTrait,
notification: PgNotification,
worker_id: &str,
retry_duration: chrono::Duration,
) -> RecorderResult<()> {
let payload: Self = serde_json::from_str(notification.payload())?;
let cron_id = payload.id;
tracing::debug!("Cron notification received for cron {cron_id} and worker {worker_id}");
match Self::try_acquire_lock_with_cron_id(ctx, cron_id, worker_id).await? {
Some(cron) => match cron.exec_cron(ctx).await {
Ok(()) => {
tracing::debug!("Cron {cron_id} executed successfully");
cron.mark_cron_completed(ctx).await?;
}
Err(e) => {
tracing::error!("Error executing cron {cron_id}: {e}");
cron.mark_cron_failed(ctx, &e.to_string(), retry_duration)
.await?;
}
},
None => {
tracing::debug!(
"Cron lock not acquired for cron {cron_id} and worker {worker_id}, skipping..."
);
}
}
Ok(())
}
async fn try_acquire_lock_with_cron_id(
ctx: &dyn AppContextTrait,
cron_id: i32,
worker_id: &str,
) -> RecorderResult<Option<Self>> {
let db = ctx.db();
let txn = db.begin().await?;
let cron = Entity::find_by_id(cron_id)
.lock_with_behavior(LockType::Update, LockBehavior::SkipLocked)
.one(&txn)
.await?;
if let Some(cron) = cron {
if cron.enabled
&& cron.attempts < cron.max_attempts
&& cron.status == CronStatus::Pending
&& (cron.locked_at.is_none_or(|locked_at| {
locked_at + chrono::Duration::milliseconds(cron.timeout_ms as i64) <= Utc::now()
}))
&& cron.next_run.is_some_and(|next_run| next_run <= Utc::now())
{
let cron_active_model = ActiveModel {
id: Set(cron.id),
locked_by: Set(Some(worker_id.to_string())),
locked_at: Set(Some(Utc::now())),
status: Set(CronStatus::Running),
attempts: Set(cron.attempts + 1),
..Default::default()
};
let cron_model = cron_active_model.update(&txn).await?;
txn.commit().await?;
return Ok(Some(cron_model));
}
txn.commit().await?;
return Ok(Some(cron));
}
txn.rollback().await?;
Ok(None)
}
async fn exec_cron(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
if let Some(subscriber_task) = self.subscriber_task.as_ref() {
let task_service = ctx.task();
task_service
.add_subscriber_task(subscriber_task.clone())
.await?;
} else {
unimplemented!("Cron without subscriber task is not supported now");
}
Ok(())
}
async fn mark_cron_completed(&self, ctx: &dyn AppContextTrait) -> RecorderResult<()> {
let db = ctx.db();
let next_run = Self::calculate_next_run(&self.cron_expr)?;
ActiveModel {
id: Set(self.id),
next_run: Set(Some(next_run)),
last_run: Set(Some(Utc::now())),
status: Set(CronStatus::Pending),
locked_by: Set(None),
locked_at: Set(None),
attempts: Set(0),
last_error: Set(None),
..Default::default()
}
.update(db)
.await?;
Ok(())
}
async fn mark_cron_failed(
&self,
ctx: &dyn AppContextTrait,
error: &str,
retry_duration: chrono::Duration,
) -> RecorderResult<()> {
let db = ctx.db();
let should_retry = self.attempts < self.max_attempts;
let status = if should_retry {
CronStatus::Pending
} else {
CronStatus::Failed
};
let next_run = if should_retry {
Some(Utc::now() + retry_duration)
} else {
Some(Self::calculate_next_run(&self.cron_expr)?)
};
ActiveModel {
id: Set(self.id),
next_run: Set(next_run),
status: Set(status),
locked_by: Set(None),
locked_at: Set(None),
last_run: Set(Some(Utc::now())),
last_error: Set(Some(error.to_string())),
attempts: Set(if should_retry { self.attempts + 1 } else { 0 }),
..Default::default()
}
.update(db)
.await?;
Ok(())
}
pub async fn check_and_trigger_due_crons(ctx: &dyn AppContextTrait) -> RecorderResult<()> {
let db = ctx.db();
db.execute(Statement::from_string(
db.get_database_backend(),
format!("SELECT {CHECK_AND_TRIGGER_DUE_CRONS_FUNCTION_NAME}()"),
))
.await?;
Ok(())
}
pub async fn check_and_cleanup_expired_cron_locks(
ctx: &dyn AppContextTrait,
retry_duration: chrono::Duration,
) -> RecorderResult<()> {
let db = ctx.db();
let condition = Condition::all()
.add(Column::Status.eq(CronStatus::Running))
.add(Column::LastRun.is_not_null())
.add(Column::TimeoutMs.is_not_null())
.add(
Expr::col(Column::LastRun)
.add(Expr::col(Column::TimeoutMs).mul(Expr::cust("INTERVAL '1 millisecond'")))
.lte(Expr::current_timestamp()),
);
let cron_ids = Entity::find()
.select_only()
.column(Column::Id)
.filter(condition.clone())
.lock_with_behavior(LockType::Update, LockBehavior::SkipLocked)
.into_tuple::<i32>()
.all(db)
.await?;
for cron_id in cron_ids {
let txn = db.begin().await?;
let locked_cron = Entity::find_by_id(cron_id)
.filter(condition.clone())
.lock_with_behavior(LockType::Update, LockBehavior::SkipLocked)
.one(&txn)
.await?;
if let Some(locked_cron) = locked_cron {
locked_cron
.mark_cron_failed(
ctx,
format!("Cron timeout of {}ms", locked_cron.timeout_ms).as_str(),
retry_duration,
)
.await?;
txn.commit().await?;
} else {
txn.rollback().await?;
}
}
Ok(())
}
pub fn calculate_next_run(cron_expr: &str) -> RecorderResult<DateTime<Utc>> {
let cron_expr = Cron::new(cron_expr).parse()?;
let next = cron_expr.find_next_occurrence(&Utc::now(), false)?;
Ok(next)
}
}

View File

@@ -0,0 +1 @@

View File

@@ -129,7 +129,7 @@ pub enum RelatedEntity {
} }
impl ActiveModel { impl ActiveModel {
#[tracing::instrument(err, skip(ctx), fields(bangumi_id = ?bangumi.id, mikan_episode_id = ?episode.mikan_episode_id))] #[tracing::instrument(err, skip_all, fields(bangumi_id = ?bangumi.id, mikan_episode_id = ?episode.mikan_episode_id))]
pub fn from_mikan_bangumi_and_episode_meta( pub fn from_mikan_bangumi_and_episode_meta(
ctx: &dyn AppContextTrait, ctx: &dyn AppContextTrait,
bangumi: &bangumi::Model, bangumi: &bangumi::Model,

View File

@@ -122,9 +122,7 @@ impl Model {
.filter(Column::FeedType.eq(FeedType::Rss)) .filter(Column::FeedType.eq(FeedType::Rss))
.one(db) .one(db)
.await? .await?
.ok_or(RecorderError::ModelEntityNotFound { .ok_or(RecorderError::from_entity_not_found::<Entity>())?;
entity: "Feed".into(),
})?;
let feed = Feed::from_model(ctx, feed_model).await?; let feed = Feed::from_model(ctx, feed_model).await?;

View File

@@ -1,5 +1,7 @@
use rss::Channel; use rss::Channel;
use sea_orm::{ColumnTrait, EntityTrait, JoinType, QueryFilter, QuerySelect, RelationTrait}; use sea_orm::{
ColumnTrait, EntityTrait, JoinType, Order, QueryFilter, QueryOrder, QuerySelect, RelationTrait,
};
use url::Url; use url::Url;
use crate::{ use crate::{
@@ -37,13 +39,12 @@ impl Feed {
subscription_episode::Relation::Subscription.def(), subscription_episode::Relation::Subscription.def(),
) )
.filter(subscriptions::Column::Id.eq(subscription_id)) .filter(subscriptions::Column::Id.eq(subscription_id))
.order_by(episodes::Column::EnclosurePubDate, Order::Desc)
.all(db) .all(db)
.await?; .await?;
(subscription, episodes) (subscription, episodes)
} else { } else {
return Err(RecorderError::ModelEntityNotFound { return Err(RecorderError::from_entity_not_found::<subscriptions::Entity>());
entity: "Subscription".into(),
});
}; };
Ok(Feed::SubscritpionEpisodes( Ok(Feed::SubscritpionEpisodes(

View File

@@ -24,6 +24,7 @@ pub trait RssFeedItemTrait: Sized {
-> Option<Cow<'_, str>>; -> Option<Cow<'_, str>>;
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>>; fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>>;
fn get_enclosure_content_length(&self) -> Option<i64>; fn get_enclosure_content_length(&self) -> Option<i64>;
fn get_xmlns(&self) -> Cow<'_, str>;
fn into_item(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Item> { fn into_item(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Item> {
let enclosure_mime_type = let enclosure_mime_type =
self.get_enclosure_mime() self.get_enclosure_mime()
@@ -53,32 +54,49 @@ pub trait RssFeedItemTrait: Sized {
let mut extensions = ExtensionMap::default(); let mut extensions = ExtensionMap::default();
if enclosure_mime_type == BITTORRENT_MIME_TYPE { if enclosure_mime_type == BITTORRENT_MIME_TYPE {
extensions.insert("torrent".to_string(), { let xmlns = self.get_xmlns();
let mut map = btreemap! {
"link".to_string() => vec![ let torrent_extension = ExtensionBuilder::default()
ExtensionBuilder::default().name( .name("torrent")
"link" .attrs(btreemap! {
).value(enclosure_link.to_string()).build() "xmlns".to_string() => xmlns.to_string()
], })
"contentLength".to_string() => vec![ .children({
ExtensionBuilder::default().name( let mut m = btreemap! {
"contentLength" "link".to_string() => vec![
).value(enclosure_content_length.to_string()).build()
],
};
if let Some(pub_date) = enclosure_pub_date {
map.insert(
"pubDate".to_string(),
vec![
ExtensionBuilder::default() ExtensionBuilder::default()
.name("pubDate") .name("link")
.value(pub_date.to_rfc3339()) .value(link.to_string())
.build(), .build()
], ],
); "contentLength".to_string() => vec![
} ExtensionBuilder::default()
map .name("contentLength")
}); .value(enclosure_content_length.to_string())
.build()
]
};
if let Some(pub_date) = enclosure_pub_date {
m.insert(
"pubDate".to_string(),
vec![
ExtensionBuilder::default()
.name("pubDate")
.value(pub_date.to_rfc3339())
.build(),
],
);
};
m
})
.build();
extensions.insert(
"".to_string(),
btreemap! {
"torrent".to_string() => vec![torrent_extension]
},
);
}; };
let enclosure = EnclosureBuilder::default() let enclosure = EnclosureBuilder::default()

View File

@@ -42,6 +42,12 @@ impl RssFeedItemTrait for episodes::Model {
Cow::Owned(format!("{PROJECT_NAME}:episode:{}", self.id)) Cow::Owned(format!("{PROJECT_NAME}:episode:{}", self.id))
} }
fn get_xmlns(&self) -> Cow<'_, str> {
match self.episode_type {
episodes::EpisodeType::Mikan => Cow::Borrowed("https://mikanani.me/0.1/"),
}
}
fn get_title(&self) -> Cow<'_, str> { fn get_title(&self) -> Cow<'_, str> {
Cow::Borrowed(&self.display_name) Cow::Borrowed(&self.display_name)
} }

View File

@@ -11,3 +11,4 @@ pub mod subscribers;
pub mod subscription_bangumi; pub mod subscription_bangumi;
pub mod subscription_episode; pub mod subscription_episode;
pub mod subscriptions; pub mod subscriptions;
pub mod cron;

View File

@@ -1,7 +1,7 @@
use async_trait::async_trait; use async_trait::async_trait;
use sea_orm::{ use sea_orm::{
ActiveModelTrait, ColumnTrait, ConnectionTrait, DbErr, EntityTrait, Insert, IntoActiveModel, ActiveModelTrait, ColumnTrait, ConnectionTrait, DbErr, EntityTrait, Insert, IntoActiveModel,
Iterable, QueryResult, QueryTrait, SelectModel, SelectorRaw, sea_query::Query, QueryResult, QueryTrait, sea_query::Query,
}; };
#[async_trait] #[async_trait]
@@ -10,13 +10,6 @@ where
<A::Entity as EntityTrait>::Model: IntoActiveModel<A>, <A::Entity as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait, A: ActiveModelTrait,
{ {
fn exec_with_returning_models<C>(
self,
db: &C,
) -> SelectorRaw<SelectModel<<A::Entity as EntityTrait>::Model>>
where
C: ConnectionTrait;
async fn exec_with_returning_columns<C, I>( async fn exec_with_returning_columns<C, I>(
self, self,
db: &C, db: &C,
@@ -33,26 +26,6 @@ where
<A::Entity as EntityTrait>::Model: IntoActiveModel<A>, <A::Entity as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait + Send, A: ActiveModelTrait + Send,
{ {
fn exec_with_returning_models<C>(
self,
db: &C,
) -> SelectorRaw<SelectModel<<A::Entity as EntityTrait>::Model>>
where
C: ConnectionTrait,
{
let mut insert_statement = self.into_query();
let db_backend = db.get_database_backend();
let returning = Query::returning().exprs(
<A::Entity as EntityTrait>::Column::iter()
.map(|c| c.select_as(c.into_returning_expr(db_backend))),
);
insert_statement.returning(returning);
let insert_statement = db_backend.build(&insert_statement);
SelectorRaw::<SelectModel<<A::Entity as EntityTrait>::Model>>::from_statement(
insert_statement,
)
}
async fn exec_with_returning_columns<C, I>( async fn exec_with_returning_columns<C, I>(
self, self,
db: &C, db: &C,

View File

@@ -1,9 +1,10 @@
use async_trait::async_trait; use async_trait::async_trait;
use sea_orm::entity::prelude::*; use sea_orm::{ActiveValue, entity::prelude::*};
use crate::task::SubscriberTaskTrait;
pub use crate::task::{ pub use crate::task::{
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant, SubscriberTask, SubscriberTaskInput, SubscriberTaskType, SubscriberTaskTypeEnum,
SubscriberTaskTypeVariantIter, SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter,
}; };
#[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)] #[derive(Clone, Debug, PartialEq, Eq, DeriveActiveEnum, EnumIter, DeriveDisplay)]
@@ -29,6 +30,7 @@ pub struct Model {
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub id: String, pub id: String,
pub subscriber_id: i32, pub subscriber_id: i32,
pub subscription_id: Option<i32>,
pub job: SubscriberTask, pub job: SubscriberTask,
pub task_type: SubscriberTaskType, pub task_type: SubscriberTaskType,
pub status: SubscriberTaskStatus, pub status: SubscriberTaskStatus,
@@ -52,6 +54,14 @@ pub enum Relation {
on_delete = "Cascade" on_delete = "Cascade"
)] )]
Subscriber, Subscriber,
#[sea_orm(
belongs_to = "super::subscriptions::Entity",
from = "Column::SubscriptionId",
to = "super::subscriptions::Column::Id",
on_update = "NoAction",
on_delete = "NoAction"
)]
Subscription,
} }
impl Related<super::subscribers::Entity> for Entity { impl Related<super::subscribers::Entity> for Entity {
@@ -60,11 +70,34 @@ impl Related<super::subscribers::Entity> for Entity {
} }
} }
impl Related<super::subscriptions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Subscription.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity { pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")] #[sea_orm(entity = "super::subscribers::Entity")]
Subscriber, Subscriber,
#[sea_orm(entity = "super::subscriptions::Entity")]
Subscription,
} }
#[async_trait] #[async_trait]
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(mut self, _db: &C, _insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
if let ActiveValue::Set(subscriber_id) = self.subscriber_id
&& let ActiveValue::Set(ref job) = self.job
&& job.get_subscriber_id() != subscriber_id
{
return Err(DbErr::Custom(
"SubscriberTask subscriber_id does not match job.subscriber_id".to_string(),
));
}
Ok(self)
}
}

View File

@@ -130,10 +130,9 @@ impl Model {
pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RecorderResult<Self> { pub async fn find_by_id(ctx: &dyn AppContextTrait, id: i32) -> RecorderResult<Self> {
let db = ctx.db(); let db = ctx.db();
let subscriber = Entity::find_by_id(id) let subscriber = Entity::find_by_id(id).one(db).await?.ok_or_else(|| {
.one(db) RecorderError::from_entity_not_found_detail::<Entity, _>(format!("id {id} not found"))
.await? })?;
.ok_or_else(|| RecorderError::from_db_record_not_found("subscribers::find_by_id"))?;
Ok(subscriber) Ok(subscriber)
} }

View File

@@ -11,10 +11,7 @@ pub use registry::{
use sea_orm::entity::prelude::*; use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{app::AppContextTrait, errors::RecorderResult};
app::AppContextTrait,
errors::{RecorderError, RecorderResult},
};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")] #[sea_orm(table_name = "subscriptions")]
@@ -61,6 +58,8 @@ pub enum Relation {
Credential3rd, Credential3rd,
#[sea_orm(has_many = "super::feeds::Entity")] #[sea_orm(has_many = "super::feeds::Entity")]
Feed, Feed,
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
SubscriberTask,
} }
impl Related<super::subscribers::Entity> for Entity { impl Related<super::subscribers::Entity> for Entity {
@@ -121,6 +120,12 @@ impl Related<super::credential_3rd::Entity> for Entity {
} }
} }
impl Related<super::subscriber_tasks::Entity> for Entity {
fn to() -> RelationDef {
Relation::SubscriberTask.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity { pub enum RelatedEntity {
#[sea_orm(entity = "super::subscribers::Entity")] #[sea_orm(entity = "super::subscribers::Entity")]
@@ -137,6 +142,8 @@ pub enum RelatedEntity {
Credential3rd, Credential3rd,
#[sea_orm(entity = "super::feeds::Entity")] #[sea_orm(entity = "super::feeds::Entity")]
Feed, Feed,
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
SubscriberTask,
} }
#[async_trait] #[async_trait]
@@ -145,51 +152,7 @@ impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {} impl ActiveModel {}
impl Model { impl Model {
pub async fn toggle_with_ids( pub async fn exec_cron(&self, _ctx: &dyn AppContextTrait) -> RecorderResult<()> {
ctx: &dyn AppContextTrait, todo!()
ids: impl Iterator<Item = i32>,
enabled: bool,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::update_many()
.col_expr(Column::Enabled, Expr::value(enabled))
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn delete_with_ids(
ctx: &dyn AppContextTrait,
ids: impl Iterator<Item = i32>,
) -> RecorderResult<()> {
let db = ctx.db();
Entity::delete_many()
.filter(Column::Id.is_in(ids))
.exec(db)
.await?;
Ok(())
}
pub async fn find_by_id_and_subscriber_id(
ctx: &dyn AppContextTrait,
subscriber_id: i32,
subscription_id: i32,
) -> RecorderResult<Self> {
let db = ctx.db();
let subscription_model = Entity::find_by_id(subscription_id)
.one(db)
.await?
.ok_or_else(|| RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
if subscription_model.subscriber_id != subscriber_id {
Err(RecorderError::ModelEntityNotFound {
entity: "Subscription".into(),
})?;
}
Ok(subscription_model)
} }
} }

View File

@@ -1,129 +1,147 @@
use std::{fmt::Debug, sync::Arc}; use std::{fmt::Debug, sync::Arc};
use async_trait::async_trait;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter}; use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
app::AppContextTrait, errors::RecorderResult,
errors::{RecorderError, RecorderResult},
extract::mikan::{ extract::mikan::{
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription, MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
}, },
models::subscriptions::{self, SubscriptionTrait}, models::subscriptions::{self, SubscriptionTrait},
}; };
#[derive( macro_rules! register_subscription_type {
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize, DeriveDisplay, (
)] subscription_category_enum: {
#[sea_orm( $(#[$subscription_category_enum_meta:meta])*
rs_type = "String", pub enum $type_enum_name:ident {
db_type = "Enum", $(
enum_name = "subscription_category" $(#[$variant_meta:meta])*
)] $variant:ident => $string_value:literal
#[serde(rename_all = "snake_case")] ),* $(,)?
pub enum SubscriptionCategory {
#[sea_orm(string_value = "mikan_subscriber")]
MikanSubscriber,
#[sea_orm(string_value = "mikan_season")]
MikanSeason,
#[sea_orm(string_value = "mikan_bangumi")]
MikanBangumi,
#[sea_orm(string_value = "manual")]
Manual,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "category")]
pub enum Subscription {
#[serde(rename = "mikan_subscriber")]
MikanSubscriber(MikanSubscriberSubscription),
#[serde(rename = "mikan_season")]
MikanSeason(MikanSeasonSubscription),
#[serde(rename = "mikan_bangumi")]
MikanBangumi(MikanBangumiSubscription),
#[serde(rename = "manual")]
Manual,
}
impl Subscription {
pub fn category(&self) -> SubscriptionCategory {
match self {
Self::MikanSubscriber(_) => SubscriptionCategory::MikanSubscriber,
Self::MikanSeason(_) => SubscriptionCategory::MikanSeason,
Self::MikanBangumi(_) => SubscriptionCategory::MikanBangumi,
Self::Manual => SubscriptionCategory::Manual,
}
}
}
#[async_trait]
impl SubscriptionTrait for Subscription {
fn get_subscriber_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscriber_id(),
Self::MikanSeason(subscription) => subscription.get_subscriber_id(),
Self::MikanBangumi(subscription) => subscription.get_subscriber_id(),
Self::Manual => unreachable!(),
}
}
fn get_subscription_id(&self) -> i32 {
match self {
Self::MikanSubscriber(subscription) => subscription.get_subscription_id(),
Self::MikanSeason(subscription) => subscription.get_subscription_id(),
Self::MikanBangumi(subscription) => subscription.get_subscription_id(),
Self::Manual => unreachable!(),
}
}
async fn sync_feeds_incremental(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_incremental(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_feeds_full(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_feeds_full(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_feeds_full(ctx).await,
Self::Manual => Ok(()),
}
}
async fn sync_sources(&self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::MikanSubscriber(subscription) => subscription.sync_sources(ctx).await,
Self::MikanSeason(subscription) => subscription.sync_sources(ctx).await,
Self::MikanBangumi(subscription) => subscription.sync_sources(ctx).await,
Self::Manual => Ok(()),
}
}
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
match model.category {
SubscriptionCategory::MikanSubscriber => {
MikanSubscriberSubscription::try_from_model(model).map(Self::MikanSubscriber)
} }
SubscriptionCategory::MikanSeason => { }$(,)?
MikanSeasonSubscription::try_from_model(model).map(Self::MikanSeason) subscription_enum: {
$(#[$subscription_enum_meta:meta])*
pub enum $subscription_enum_name:ident {
$(
$subscription_variant:ident($subscription_type:ty)
),* $(,)?
} }
SubscriptionCategory::MikanBangumi => { }
MikanBangumiSubscription::try_from_model(model).map(Self::MikanBangumi) ) => {
$(#[$subscription_category_enum_meta])*
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "subscription_category"
)]
pub enum $type_enum_name {
$(
$(#[$variant_meta])*
#[serde(rename = $string_value)]
#[sea_orm(string_value = $string_value)]
$variant,
)*
}
$(#[$subscription_enum_meta])*
#[serde(tag = "category")]
pub enum $subscription_enum_name {
$(
#[serde(rename = $string_value)]
$subscription_variant($subscription_type),
)*
}
impl $subscription_enum_name {
pub fn category(&self) -> $type_enum_name {
match self {
$(Self::$subscription_variant(_) => $type_enum_name::$variant,)*
}
} }
SubscriptionCategory::Manual => Ok(Self::Manual), }
#[async_trait::async_trait]
impl $crate::models::subscriptions::SubscriptionTrait for $subscription_enum_name {
fn get_subscriber_id(&self) -> i32 {
match self {
$(Self::$subscription_variant(subscription) => subscription.get_subscriber_id(),)*
}
}
fn get_subscription_id(&self) -> i32 {
match self {
$(Self::$subscription_variant(subscription) => subscription.get_subscription_id(),)*
}
}
async fn sync_feeds_incremental(&self, ctx: Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$subscription_variant(subscription) => subscription.sync_feeds_incremental(ctx).await,)*
}
}
async fn sync_feeds_full(&self, ctx: Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$subscription_variant(subscription) => subscription.sync_feeds_full(ctx).await,)*
}
}
async fn sync_sources(&self, ctx: Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$subscription_variant(subscription) => subscription.sync_sources(ctx).await,)*
}
}
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
match model.category {
$($type_enum_name::$variant => {
<$subscription_type as $crate::models::subscriptions::SubscriptionTrait>::try_from_model(model).map(Self::$subscription_variant)
})*
}
}
}
impl TryFrom<&$crate::models::subscriptions::Model> for $subscription_enum_name {
type Error = $crate::errors::RecorderError;
fn try_from(model: &$crate::models::subscriptions::Model) -> Result<Self, Self::Error> {
Self::try_from_model(model)
}
}
};
}
register_subscription_type! {
subscription_category_enum: {
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
pub enum SubscriptionCategory {
MikanSubscriber => "mikan_subscriber",
MikanSeason => "mikan_season",
MikanBangumi => "mikan_bangumi",
}
}
subscription_enum: {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum Subscription {
MikanSubscriber(MikanSubscriberSubscription),
MikanSeason(MikanSeasonSubscription),
MikanBangumi(MikanBangumiSubscription)
} }
} }
} }
impl TryFrom<&subscriptions::Model> for Subscription {
type Error = RecorderError;
fn try_from(model: &subscriptions::Model) -> Result<Self, Self::Error> {
Self::try_from_model(model)
}
}

View File

@@ -209,7 +209,7 @@ impl StorageService {
lister.try_collect().await lister.try_collect().await
} }
#[instrument(skip_all, err, fields(storage_path = %storage_path.as_ref(), range = ?range, accept = ?accept))] #[instrument(skip_all, err, fields(storage_path = %storage_path.as_ref(), range = ?range, accept = accept.to_string()))]
pub async fn serve_optimized_image( pub async fn serve_optimized_image(
&self, &self,
storage_path: impl AsRef<Path>, storage_path: impl AsRef<Path>,

View File

@@ -8,10 +8,12 @@ pub struct TaskConfig {
pub subscriber_task_concurrency: u32, pub subscriber_task_concurrency: u32,
#[serde(default = "default_system_task_workers")] #[serde(default = "default_system_task_workers")]
pub system_task_concurrency: u32, pub system_task_concurrency: u32,
#[serde(default = "default_subscriber_task_timeout")] #[serde(default = "default_subscriber_task_reenqueue_orphaned_after")]
pub subscriber_task_timeout: Duration, pub subscriber_task_reenqueue_orphaned_after: Duration,
#[serde(default = "default_system_task_timeout")] #[serde(default = "default_system_task_reenqueue_orphaned_after")]
pub system_task_timeout: Duration, pub system_task_reenqueue_orphaned_after: Duration,
#[serde(default = "default_cron_retry_duration")]
pub cron_retry_duration: Duration,
} }
impl Default for TaskConfig { impl Default for TaskConfig {
@@ -19,8 +21,10 @@ impl Default for TaskConfig {
Self { Self {
subscriber_task_concurrency: default_subscriber_task_workers(), subscriber_task_concurrency: default_subscriber_task_workers(),
system_task_concurrency: default_system_task_workers(), system_task_concurrency: default_system_task_workers(),
subscriber_task_timeout: default_subscriber_task_timeout(), subscriber_task_reenqueue_orphaned_after:
system_task_timeout: default_system_task_timeout(), default_subscriber_task_reenqueue_orphaned_after(),
system_task_reenqueue_orphaned_after: default_system_task_reenqueue_orphaned_after(),
cron_retry_duration: default_cron_retry_duration(),
} }
} }
} }
@@ -41,10 +45,14 @@ pub fn default_system_task_workers() -> u32 {
} }
} }
pub fn default_subscriber_task_timeout() -> Duration { pub fn default_subscriber_task_reenqueue_orphaned_after() -> Duration {
Duration::from_secs(3600) Duration::from_secs(3600)
} }
pub fn default_system_task_timeout() -> Duration { pub fn default_system_task_reenqueue_orphaned_after() -> Duration {
Duration::from_secs(3600) Duration::from_secs(3600)
} }
pub fn default_cron_retry_duration() -> Duration {
Duration::from_secs(5)
}

View File

@@ -1,34 +1,60 @@
use std::sync::Arc; use std::sync::Arc;
use futures::Stream; use async_trait::async_trait;
use serde::{Serialize, de::DeserializeOwned}; use futures::{Stream, StreamExt, pin_mut};
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use crate::{app::AppContextTrait, errors::RecorderResult}; use crate::{app::AppContextTrait, errors::RecorderResult};
pub const SYSTEM_TASK_APALIS_NAME: &str = "system_task"; pub const SYSTEM_TASK_APALIS_NAME: &str = "system_task";
pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task"; pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task";
#[async_trait::async_trait] #[async_trait]
pub trait AsyncTaskTrait: Serialize + DeserializeOwned + Sized { pub trait AsyncTaskTrait: Serialize + DeserializeOwned + Sized {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>; async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.run_async(ctx).await?;
Ok(())
}
} }
#[async_trait::async_trait] pub trait StreamTaskTrait {
pub trait StreamTaskTrait: Serialize + DeserializeOwned + Sized {
type Yield: Serialize + DeserializeOwned + Send; type Yield: Serialize + DeserializeOwned + Send;
fn run_stream( fn run_stream(
self, self,
ctx: Arc<dyn AppContextTrait>, ctx: Arc<dyn AppContextTrait>,
) -> impl Stream<Item = RecorderResult<Self::Yield>> + Send; ) -> impl Stream<Item = RecorderResult<Self::Yield>> + Send;
}
async fn run(self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> { #[async_trait]
unimplemented!() impl<T> AsyncTaskTrait for T
where
T: StreamTaskTrait + Serialize + DeserializeOwned + Sized + Send,
{
async fn run_async(self, _ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
let s = self.run_stream(_ctx);
pin_mut!(s);
while let Some(item) = s.next().await {
item?;
}
Ok(())
} }
} }
pub trait SubscriberTaskTrait: AsyncTaskTrait {
type InputType: Serialize + DeserializeOwned + Sized + Send;
fn get_subscriber_id(&self) -> i32;
fn get_cron_id(&self) -> Option<i32>;
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self;
}
pub trait SystemTaskTrait: AsyncTaskTrait {}
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)]
pub struct SubscriberTaskBase {
pub subscriber_id: i32,
pub cron_id: Option<i32>,
}

View File

@@ -6,14 +6,16 @@ mod service;
pub use core::{ pub use core::{
AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, StreamTaskTrait, AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, StreamTaskTrait,
SubscriberTaskBase, SubscriberTaskTrait, SystemTaskTrait,
}; };
pub use config::TaskConfig; pub use config::TaskConfig;
pub use r#extern::{ApalisJobs, ApalisSchema}; pub use r#extern::{ApalisJobs, ApalisSchema};
pub use registry::{ pub use registry::{
OptimizeImageTask, SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, OptimizeImageTask, SubscriberTask, SubscriberTaskInput, SubscriberTaskType,
SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter,
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask, SystemTask, SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant, SystemTaskTypeVariantIter, SyncOneSubscriptionSourcesTask, SystemTask, SystemTaskType, SystemTaskTypeEnum,
SystemTaskTypeVariant, SystemTaskTypeVariantIter,
}; };
pub use service::TaskService; pub use service::TaskService;

View File

@@ -1,134 +1,12 @@
mod media; mod subscriber;
mod subscription; mod system;
use std::sync::Arc;
pub use media::OptimizeImageTask; pub use subscriber::{
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult}; SubscriberTask, SubscriberTaskInput, SubscriberTaskType, SubscriberTaskTypeEnum,
use serde::{Deserialize, Serialize}; SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask,
pub use subscription::{ SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask,
SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SyncOneSubscriptionSourcesTask,
}; };
pub use system::{
use crate::{ OptimizeImageTask, SystemTask, SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant,
app::AppContextTrait, SystemTaskTypeVariantIter,
errors::{RecorderError, RecorderResult},
models::subscriptions::SubscriptionTrait,
task::AsyncTaskTrait,
}; };
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SubscriberTaskType {
#[serde(rename = "sync_one_subscription_feeds_incremental")]
#[sea_orm(string_value = "sync_one_subscription_feeds_incremental")]
SyncOneSubscriptionFeedsIncremental,
#[serde(rename = "sync_one_subscription_feeds_full")]
#[sea_orm(string_value = "sync_one_subscription_feeds_full")]
SyncOneSubscriptionFeedsFull,
#[serde(rename = "sync_one_subscription_sources")]
#[sea_orm(string_value = "sync_one_subscription_sources")]
SyncOneSubscriptionSources,
}
impl TryFrom<&SubscriberTask> for serde_json::Value {
type Error = RecorderError;
fn try_from(value: &SubscriberTask) -> Result<Self, Self::Error> {
let json_value = serde_json::to_value(value)?;
Ok(match json_value {
serde_json::Value::Object(mut map) => {
map.remove("task_type");
serde_json::Value::Object(map)
}
_ => {
unreachable!("subscriber task must be an json object");
}
})
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, FromJsonQueryResult)]
#[serde(tag = "task_type")]
pub enum SubscriberTask {
#[serde(rename = "sync_one_subscription_feeds_incremental")]
SyncOneSubscriptionFeedsIncremental(SyncOneSubscriptionFeedsIncrementalTask),
#[serde(rename = "sync_one_subscription_feeds_full")]
SyncOneSubscriptionFeedsFull(SyncOneSubscriptionFeedsFullTask),
#[serde(rename = "sync_one_subscription_sources")]
SyncOneSubscriptionSources(SyncOneSubscriptionSourcesTask),
}
impl SubscriberTask {
pub fn get_subscriber_id(&self) -> i32 {
match self {
Self::SyncOneSubscriptionFeedsIncremental(task) => task.0.get_subscriber_id(),
Self::SyncOneSubscriptionFeedsFull(task) => task.0.get_subscriber_id(),
Self::SyncOneSubscriptionSources(task) => task.0.get_subscriber_id(),
}
}
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::SyncOneSubscriptionFeedsIncremental(task) => task.run(ctx).await,
Self::SyncOneSubscriptionFeedsFull(task) => task.run(ctx).await,
Self::SyncOneSubscriptionSources(task) => task.run(ctx).await,
}
}
pub fn task_type(&self) -> SubscriberTaskType {
match self {
Self::SyncOneSubscriptionFeedsIncremental(_) => {
SubscriberTaskType::SyncOneSubscriptionFeedsIncremental
}
Self::SyncOneSubscriptionFeedsFull(_) => {
SubscriberTaskType::SyncOneSubscriptionFeedsFull
}
Self::SyncOneSubscriptionSources(_) => SubscriberTaskType::SyncOneSubscriptionSources,
}
}
}
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SystemTaskType {
#[serde(rename = "optimize_image")]
#[sea_orm(string_value = "optimize_image")]
OptimizeImage,
}
#[derive(Clone, Debug, Serialize, Deserialize, FromJsonQueryResult)]
pub enum SystemTask {
#[serde(rename = "optimize_image")]
OptimizeImage(OptimizeImageTask),
}
impl SystemTask {
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::OptimizeImage(task) => task.run(ctx).await,
}
}
}

View File

@@ -0,0 +1,58 @@
macro_rules! register_subscriber_task_type {
(
$(#[$type_meta:meta])*
$task_vis:vis struct $task_name:ident {
$($(#[$field_meta:meta])* pub $field_name:ident: $field_type:ty),* $(,)?
}
) => {
$(#[$type_meta])*
#[derive(typed_builder::TypedBuilder, ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[ts(export, rename_all = "camelCase")]
$task_vis struct $task_name {
$($(#[$field_meta])* pub $field_name: $field_type,)*
pub subscriber_id: i32,
#[builder(default = None)]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cron_id: Option<i32>,
}
paste::paste! {
$(#[$type_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
#[ts(export, rename_all = "camelCase")]
$task_vis struct [<$task_name Input>] {
$($(#[$field_meta])* pub $field_name: $field_type,)*
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subscriber_id: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cron_id: Option<i32>,
}
}
impl $crate::task::SubscriberTaskTrait for $task_name {
paste::paste! {
type InputType = [<$task_name Input>];
}
fn get_subscriber_id(&self) -> i32 {
self.subscriber_id
}
fn get_cron_id(&self) -> Option<i32> {
self.cron_id
}
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self {
Self {
$($field_name: input.$field_name,)*
cron_id: input.cron_id,
subscriber_id: input.subscriber_id.unwrap_or(subscriber_id),
}
}
}
}
}
pub(crate) use register_subscriber_task_type;

View File

@@ -0,0 +1,169 @@
mod base;
mod subscription;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
pub use subscription::{
SyncOneSubscriptionFeedsFullTask, SyncOneSubscriptionFeedsIncrementalTask,
SyncOneSubscriptionSourcesTask,
};
macro_rules! register_subscriber_task_types {
(
task_type_enum: {
$(#[$type_enum_meta:meta])*
$type_vis:vis enum $type_enum_name:ident {
$(
$(#[$variant_meta:meta])*
$variant:ident => $string_value:literal
),* $(,)?
}
},
task_enum: {
$(#[$task_enum_meta:meta])*
$task_vis:vis enum $task_enum_name:ident {
$(
$(#[$task_variant_meta:meta])*
$task_variant:ident($task_type:ty)
),* $(,)?
}
}
) => {
$(#[$type_enum_meta])*
#[derive(serde::Serialize, serde::Deserialize)]
#[sea_orm(rs_type = "String", db_type = "Text")]
$type_vis enum $type_enum_name {
$(
$(#[$variant_meta])*
#[serde(rename = $string_value)]
#[sea_orm(string_value = $string_value)]
$variant,
)*
}
$(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(tag = "task_type")]
#[ts(export,rename = "SubscriberTaskType", rename_all = "camelCase", tag = "taskType")]
$task_vis enum $task_enum_name {
$(
$(#[$task_variant_meta])*
#[serde(rename = $string_value)]
$task_variant($task_type),
)*
}
paste::paste! {
$(#[$task_enum_meta])*
#[derive(ts_rs::TS, serde::Serialize, serde::Deserialize)]
#[serde(tag = "taskType", rename_all = "camelCase")]
#[ts(export,rename_all = "camelCase", tag = "taskType")]
$task_vis enum [<$task_enum_name Input>] {
$(
$(#[$task_variant_meta])*
#[serde(rename = $string_value)]
$task_variant(<$task_type as $crate::task::SubscriberTaskTrait>::InputType),
)*
}
}
impl TryFrom<$task_enum_name> for serde_json::Value {
type Error = $crate::errors::RecorderError;
fn try_from(value: $task_enum_name) -> Result<Self, Self::Error> {
let json_value = serde_json::to_value(value)?;
Ok(match json_value {
serde_json::Value::Object(mut map) => {
map.remove("task_type");
serde_json::Value::Object(map)
}
_ => {
unreachable!("subscriber task must be an json object");
}
})
}
}
impl $task_enum_name {
pub fn task_type(&self) -> $type_enum_name {
match self {
$(Self::$task_variant(_) => $type_enum_name::$variant,)*
}
}
}
#[async_trait::async_trait]
impl $crate::task::AsyncTaskTrait for $task_enum_name {
async fn run_async(self, ctx: std::sync::Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$task_variant(t) =>
<$task_type as $crate::task::AsyncTaskTrait>::run_async(t, ctx).await,)*
}
}
}
impl $crate::task::SubscriberTaskTrait for $task_enum_name {
paste::paste! {
type InputType = [<$task_enum_name Input>];
}
fn get_subscriber_id(&self) -> i32 {
match self {
$(Self::$task_variant(t) =>
<$task_type as $crate::task::SubscriberTaskTrait>::get_subscriber_id(t),)*
}
}
fn get_cron_id(&self) -> Option<i32> {
match self {
$(Self::$task_variant(t) =>
<$task_type as $crate::task::SubscriberTaskTrait>::get_cron_id(t),)*
}
}
fn from_input(input: Self::InputType, subscriber_id: i32) -> Self {
match input {
$(Self::InputType::$task_variant(t) =>
Self::$task_variant(<$task_type as $crate::task::SubscriberTaskTrait>::from_input(t, subscriber_id)),)*
}
}
}
$(
impl From<$task_type> for $task_enum_name {
fn from(task: $task_type) -> Self {
Self::$task_variant(task)
}
}
)*
};
}
register_subscriber_task_types!(
task_type_enum: {
#[derive(
Clone,
Debug,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
pub enum SubscriberTaskType {
SyncOneSubscriptionFeedsIncremental => "sync_one_subscription_feeds_incremental",
SyncOneSubscriptionFeedsFull => "sync_one_subscription_feeds_full",
SyncOneSubscriptionSources => "sync_one_subscription_sources"
}
},
task_enum: {
#[derive(Clone, Debug, PartialEq, Eq, FromJsonQueryResult)]
pub enum SubscriberTask {
SyncOneSubscriptionFeedsIncremental(SyncOneSubscriptionFeedsIncrementalTask),
SyncOneSubscriptionFeedsFull(SyncOneSubscriptionFeedsFullTask),
SyncOneSubscriptionSources(SyncOneSubscriptionSourcesTask),
}
}
);

View File

@@ -0,0 +1,66 @@
use sea_orm::prelude::*;
use super::base::register_subscriber_task_type;
use crate::{errors::RecorderResult, models::subscriptions::SubscriptionTrait};
macro_rules! register_subscription_task_type {
(
$(#[$type_meta:meta])* pub struct $task_name:ident {
$($(#[$field_meta:meta])* pub $field_name:ident: $field_type:ty),* $(,)?
} => async |$subscription_param:ident, $ctx_param:ident| -> $task_return_type:ty $method_body:block
) => {
register_subscriber_task_type! {
$(#[$type_meta])*
pub struct $task_name {
$($(#[$field_meta])* pub $field_name: $field_type,)*
pub subscription_id: i32,
}
}
#[async_trait::async_trait]
impl $crate::task::AsyncTaskTrait for $task_name {
async fn run_async(self, ctx: std::sync::Arc<dyn $crate::app::AppContextTrait>) -> $task_return_type {
use $crate::models::subscriptions::{
Entity, Column, Subscription,
};
let subscription_model = Entity::find()
.filter(Column::Id.eq(self.subscription_id))
.filter(Column::SubscriberId.eq(self.subscriber_id))
.one(ctx.db())
.await?
.ok_or_else(|| $crate::errors::RecorderError::from_entity_not_found::<Entity>())?;
let $subscription_param = Subscription::try_from_model(&subscription_model)?;
let $ctx_param = ctx;
$method_body
}
}
}
}
register_subscription_task_type! {
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SyncOneSubscriptionFeedsIncrementalTask {
} => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_feeds_incremental(ctx).await?;
Ok(())
}
}
register_subscription_task_type! {
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SyncOneSubscriptionFeedsFullTask {
} => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_feeds_full(ctx).await?;
Ok(())
}
}
register_subscription_task_type! {
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SyncOneSubscriptionSourcesTask {
} => async |subscription, ctx| -> RecorderResult<()> {
subscription.sync_sources(ctx).await?;
Ok(())
}
}

View File

@@ -1,62 +0,0 @@
use std::sync::Arc;
use sea_orm::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{
app::AppContextTrait,
errors::RecorderResult,
models::subscriptions::{self, SubscriptionTrait},
task::AsyncTaskTrait,
};
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct SyncOneSubscriptionFeedsIncrementalTask(pub subscriptions::Subscription);
impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsIncrementalTask {
fn from(subscription: subscriptions::Subscription) -> Self {
Self(subscription)
}
}
#[async_trait::async_trait]
impl AsyncTaskTrait for SyncOneSubscriptionFeedsIncrementalTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_feeds_incremental(ctx).await?;
Ok(())
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct SyncOneSubscriptionFeedsFullTask(pub subscriptions::Subscription);
impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsFullTask {
fn from(subscription: subscriptions::Subscription) -> Self {
Self(subscription)
}
}
#[async_trait::async_trait]
impl AsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_feeds_full(ctx).await?;
Ok(())
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct SyncOneSubscriptionSourcesTask(pub subscriptions::Subscription);
#[async_trait::async_trait]
impl AsyncTaskTrait for SyncOneSubscriptionSourcesTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_sources(ctx).await?;
Ok(())
}
}
impl From<subscriptions::Subscription> for SyncOneSubscriptionSourcesTask {
fn from(subscription: subscriptions::Subscription) -> Self {
Self(subscription)
}
}

View File

@@ -0,0 +1,108 @@
mod media;
pub use media::OptimizeImageTask;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
macro_rules! register_system_task_types {
(
task_type_enum: {
$(#[$type_enum_meta:meta])*
pub enum $type_enum_name:ident {
$(
$(#[$variant_meta:meta])*
$variant:ident => $string_value:literal
),* $(,)?
}
},
task_enum: {
$(#[$task_enum_meta:meta])*
pub enum $task_enum_name:ident {
$(
$task_variant:ident($task_type:ty)
),* $(,)?
}
}
) => {
$(#[$type_enum_meta])*
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum $type_enum_name {
$(
$(#[$variant_meta])*
#[serde(rename = $string_value)]
#[sea_orm(string_value = $string_value)]
$variant,
)*
}
$(#[$task_enum_meta])*
#[serde(tag = "task_type")]
pub enum $task_enum_name {
$(
$task_variant($task_type),
)*
}
impl TryFrom<$task_enum_name> for serde_json::Value {
type Error = $crate::errors::RecorderError;
fn try_from(value: $task_enum_name) -> Result<Self, Self::Error> {
let json_value = serde_json::to_value(value)?;
Ok(match json_value {
serde_json::Value::Object(mut map) => {
map.remove("task_type");
serde_json::Value::Object(map)
}
_ => {
unreachable!("subscriber task must be an json object");
}
})
}
}
impl $task_enum_name {
pub fn task_type(&self) -> $type_enum_name {
match self {
$(Self::$task_variant(_) => $type_enum_name::$variant,)*
}
}
}
#[async_trait::async_trait]
impl $crate::task::AsyncTaskTrait for $task_enum_name {
async fn run_async(self, ctx: std::sync::Arc<dyn $crate::app::AppContextTrait>) -> $crate::errors::RecorderResult<()> {
match self {
$(Self::$task_variant(t) =>
<$task_type as $crate::task::AsyncTaskTrait>::run_async(t, ctx).await,)*
}
}
}
};
}
register_system_task_types! {
task_type_enum: {
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
pub enum SystemTaskType {
OptimizeImage => "optimize_image"
}
},
task_enum: {
#[derive(Clone, Debug, Serialize, Deserialize, FromJsonQueryResult)]
pub enum SystemTask {
OptimizeImage(OptimizeImageTask),
}
}
}

View File

@@ -1,18 +1,21 @@
use std::{ops::Deref, str::FromStr, sync::Arc}; use std::{future::Future, ops::Deref, str::FromStr, sync::Arc};
use apalis::prelude::*; use apalis::prelude::*;
use apalis_sql::{ use apalis_sql::{
Config, Config,
context::SqlContext, context::SqlContext,
postgres::{PgListen, PostgresStorage}, postgres::{PgListen as ApalisPgListen, PostgresStorage as ApalisPostgresStorage},
}; };
use sea_orm::sqlx::postgres::PgListener;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::{RecorderError, RecorderResult}, errors::{RecorderError, RecorderResult},
models::cron::{self, CRON_DUE_EVENT},
task::{ task::{
SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, SubscriberTask, TaskConfig, AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, SubscriberTask,
TaskConfig,
config::{default_subscriber_task_workers, default_system_task_workers}, config::{default_subscriber_task_workers, default_system_task_workers},
registry::SystemTask, registry::SystemTask,
}, },
@@ -21,8 +24,9 @@ use crate::{
pub struct TaskService { pub struct TaskService {
pub config: TaskConfig, pub config: TaskConfig,
ctx: Arc<dyn AppContextTrait>, ctx: Arc<dyn AppContextTrait>,
subscriber_task_storage: Arc<RwLock<PostgresStorage<SubscriberTask>>>, subscriber_task_storage: Arc<RwLock<ApalisPostgresStorage<SubscriberTask>>>,
system_task_storage: Arc<RwLock<PostgresStorage<SystemTask>>>, system_task_storage: Arc<RwLock<ApalisPostgresStorage<SystemTask>>>,
cron_worker_id: String,
} }
impl TaskService { impl TaskService {
@@ -38,17 +42,18 @@ impl TaskService {
}; };
let pool = ctx.db().get_postgres_connection_pool().clone(); let pool = ctx.db().get_postgres_connection_pool().clone();
let subscriber_task_storage_config = let subscriber_task_storage_config = Config::new(SUBSCRIBER_TASK_APALIS_NAME)
Config::new(SUBSCRIBER_TASK_APALIS_NAME).set_keep_alive(config.subscriber_task_timeout); .set_reenqueue_orphaned_after(config.subscriber_task_reenqueue_orphaned_after);
let system_task_storage_config = let system_task_storage_config = Config::new(SYSTEM_TASK_APALIS_NAME)
Config::new(SYSTEM_TASK_APALIS_NAME).set_keep_alive(config.system_task_timeout); .set_reenqueue_orphaned_after(config.system_task_reenqueue_orphaned_after);
let subscriber_task_storage = let subscriber_task_storage =
PostgresStorage::new_with_config(pool.clone(), subscriber_task_storage_config); ApalisPostgresStorage::new_with_config(pool.clone(), subscriber_task_storage_config);
let system_task_storage = let system_task_storage =
PostgresStorage::new_with_config(pool, system_task_storage_config); ApalisPostgresStorage::new_with_config(pool, system_task_storage_config);
Ok(Self { Ok(Self {
config, config,
cron_worker_id: nanoid::nanoid!(),
ctx, ctx,
subscriber_task_storage: Arc::new(RwLock::new(subscriber_task_storage)), subscriber_task_storage: Arc::new(RwLock::new(subscriber_task_storage)),
system_task_storage: Arc::new(RwLock::new(system_task_storage)), system_task_storage: Arc::new(RwLock::new(system_task_storage)),
@@ -61,7 +66,7 @@ impl TaskService {
) -> RecorderResult<()> { ) -> RecorderResult<()> {
let ctx = data.deref().clone(); let ctx = data.deref().clone();
job.run(ctx).await job.run_async(ctx).await
} }
async fn run_system_task( async fn run_system_task(
@@ -69,7 +74,7 @@ impl TaskService {
data: Data<Arc<dyn AppContextTrait>>, data: Data<Arc<dyn AppContextTrait>>,
) -> RecorderResult<()> { ) -> RecorderResult<()> {
let ctx = data.deref().clone(); let ctx = data.deref().clone();
job.run(ctx).await job.run_async(ctx).await
} }
pub async fn retry_subscriber_task(&self, job_id: String) -> RecorderResult<()> { pub async fn retry_subscriber_task(&self, job_id: String) -> RecorderResult<()> {
@@ -100,7 +105,6 @@ impl TaskService {
pub async fn add_subscriber_task( pub async fn add_subscriber_task(
&self, &self,
_subscriber_id: i32,
subscriber_task: SubscriberTask, subscriber_task: SubscriberTask,
) -> RecorderResult<TaskId> { ) -> RecorderResult<TaskId> {
let task_id = { let task_id = {
@@ -132,8 +136,88 @@ impl TaskService {
Ok(task_id) Ok(task_id)
} }
pub async fn setup_monitor(&self) -> RecorderResult<Monitor> { pub async fn run<F, Fut>(&self, shutdown_signal: Option<F>) -> RecorderResult<()>
let mut monitor = Monitor::new(); where
F: Fn() -> Fut + Send + 'static,
Fut: Future<Output = ()> + Send,
{
tokio::try_join!(
async {
let monitor = self.setup_apalis_monitor().await?;
if let Some(shutdown_signal) = shutdown_signal {
monitor
.run_with_signal(async move {
shutdown_signal().await;
tracing::info!("apalis shutting down...");
Ok(())
})
.await?;
} else {
monitor.run().await?;
}
Ok::<_, RecorderError>(())
},
async {
let listener = self.setup_apalis_listener().await?;
tokio::task::spawn(async move {
if let Err(e) = listener.listen().await {
tracing::error!("Error listening to apalis: {e}");
}
});
Ok::<_, RecorderError>(())
},
async {
let listener = self.setup_cron_due_listening().await?;
let ctx = self.ctx.clone();
let cron_worker_id = self.cron_worker_id.clone();
let retry_duration = chrono::Duration::milliseconds(
self.config.cron_retry_duration.as_millis() as i64,
);
tokio::task::spawn(async move {
if let Err(e) =
Self::listen_cron_due(listener, ctx, &cron_worker_id, retry_duration).await
{
tracing::error!("Error listening to cron due: {e}");
}
});
Ok::<_, RecorderError>(())
},
async {
let ctx = self.ctx.clone();
let retry_duration = chrono::Duration::milliseconds(
self.config.cron_retry_duration.as_millis() as i64,
);
tokio::task::spawn(async move {
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(60));
loop {
interval.tick().await;
if let Err(e) = cron::Model::check_and_cleanup_expired_cron_locks(
ctx.as_ref(),
retry_duration,
)
.await
{
tracing::error!(
"Error checking and cleaning up expired cron locks: {e}"
);
}
if let Err(e) = cron::Model::check_and_trigger_due_crons(ctx.as_ref()).await
{
tracing::error!("Error checking and triggering due crons: {e}");
}
}
});
Ok::<_, RecorderError>(())
}
)?;
Ok(())
}
async fn setup_apalis_monitor(&self) -> RecorderResult<Monitor> {
let mut apalis_monitor = Monitor::new();
{ {
let subscriber_task_worker = WorkerBuilder::new(SUBSCRIBER_TASK_APALIS_NAME) let subscriber_task_worker = WorkerBuilder::new(SUBSCRIBER_TASK_APALIS_NAME)
@@ -155,28 +239,58 @@ impl TaskService {
.backend(self.system_task_storage.read().await.clone()) .backend(self.system_task_storage.read().await.clone())
.build_fn(Self::run_system_task); .build_fn(Self::run_system_task);
monitor = monitor apalis_monitor = apalis_monitor
.register(subscriber_task_worker) .register(subscriber_task_worker)
.register(system_task_worker); .register(system_task_worker);
} }
Ok(monitor) Ok(apalis_monitor)
} }
pub async fn setup_listener(&self) -> RecorderResult<PgListen> { async fn setup_apalis_listener(&self) -> RecorderResult<ApalisPgListen> {
let pool = self.ctx.db().get_postgres_connection_pool().clone(); let pool = self.ctx.db().get_postgres_connection_pool().clone();
let mut task_listener = PgListen::new(pool).await?; let mut apalis_pg_listener = ApalisPgListen::new(pool).await?;
{ {
let mut subscriber_task_storage = self.subscriber_task_storage.write().await; let mut subscriber_task_storage = self.subscriber_task_storage.write().await;
task_listener.subscribe_with(&mut subscriber_task_storage); apalis_pg_listener.subscribe_with(&mut subscriber_task_storage);
} }
{ {
let mut system_task_storage = self.system_task_storage.write().await; let mut system_task_storage = self.system_task_storage.write().await;
task_listener.subscribe_with(&mut system_task_storage); apalis_pg_listener.subscribe_with(&mut system_task_storage);
} }
Ok(task_listener) Ok(apalis_pg_listener)
}
async fn setup_cron_due_listening(&self) -> RecorderResult<PgListener> {
let pool = self.ctx.db().get_postgres_connection_pool().clone();
let listener = PgListener::connect_with(&pool).await?;
Ok(listener)
}
async fn listen_cron_due(
mut listener: PgListener,
ctx: Arc<dyn AppContextTrait>,
worker_id: &str,
retry_duration: chrono::Duration,
) -> RecorderResult<()> {
listener.listen(CRON_DUE_EVENT).await?;
loop {
let notification = listener.recv().await?;
if let Err(e) = cron::Model::handle_cron_notification(
ctx.as_ref(),
notification,
worker_id,
retry_duration,
)
.await
{
tracing::error!("Error handling cron notification: {e}");
}
}
} }
} }

View File

@@ -9,12 +9,12 @@ pub trait ControllerTrait: Sized {
-> Router<Arc<dyn AppContextTrait>>; -> Router<Arc<dyn AppContextTrait>>;
} }
pub struct PrefixController { pub struct NestRouterController {
prefix: Cow<'static, str>, prefix: Cow<'static, str>,
router: Router<Arc<dyn AppContextTrait>>, router: Router<Arc<dyn AppContextTrait>>,
} }
impl PrefixController { impl NestRouterController {
pub fn new( pub fn new(
prefix: impl Into<Cow<'static, str>>, prefix: impl Into<Cow<'static, str>>,
router: Router<Arc<dyn AppContextTrait>>, router: Router<Arc<dyn AppContextTrait>>,
@@ -26,7 +26,7 @@ impl PrefixController {
} }
} }
impl ControllerTrait for PrefixController { impl ControllerTrait for NestRouterController {
fn apply_to( fn apply_to(
self, self,
router: Router<Arc<dyn AppContextTrait>>, router: Router<Arc<dyn AppContextTrait>>,
@@ -36,15 +36,15 @@ impl ControllerTrait for PrefixController {
} }
pub enum Controller { pub enum Controller {
Prefix(PrefixController), NestRouter(NestRouterController),
} }
impl Controller { impl Controller {
pub fn from_prefix( pub fn from_nest_router(
prefix: impl Into<Cow<'static, str>>, prefix: impl Into<Cow<'static, str>>,
router: Router<Arc<dyn AppContextTrait>>, router: Router<Arc<dyn AppContextTrait>>,
) -> Self { ) -> Self {
Self::Prefix(PrefixController::new(prefix, router)) Self::NestRouter(NestRouterController::new(prefix, router))
} }
} }
@@ -54,7 +54,7 @@ impl ControllerTrait for Controller {
router: Router<Arc<dyn AppContextTrait>>, router: Router<Arc<dyn AppContextTrait>>,
) -> Router<Arc<dyn AppContextTrait>> { ) -> Router<Arc<dyn AppContextTrait>> {
match self { match self {
Self::Prefix(p) => p.apply_to(router), Self::NestRouter(p) => p.apply_to(router),
} }
} }
} }

View File

@@ -38,5 +38,5 @@ async fn rss_handler(
pub async fn create(_ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> { pub async fn create(_ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
let router = Router::<Arc<dyn AppContextTrait>>::new().route("/rss/{token}", get(rss_handler)); let router = Router::<Arc<dyn AppContextTrait>>::new().route("/rss/{token}", get(rss_handler));
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router)) Ok(Controller::from_nest_router(CONTROLLER_PREFIX, router))
} }

View File

@@ -71,5 +71,5 @@ pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller>
post(graphql_handler).layer(from_fn_with_state(ctx, auth_middleware)), post(graphql_handler).layer(from_fn_with_state(ctx, auth_middleware)),
) )
.route("/introspection", introspection_handler); .route("/introspection", introspection_handler);
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router)) Ok(Controller::from_nest_router(CONTROLLER_PREFIX, router))
} }

View File

@@ -38,5 +38,5 @@ pub async fn create(_context: Arc<dyn AppContextTrait>) -> RecorderResult<Contro
.route("/health", get(health)) .route("/health", get(health))
.route("/ping", get(ping)); .route("/ping", get(ping));
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router)) Ok(Controller::from_nest_router(CONTROLLER_PREFIX, router))
} }

View File

@@ -5,4 +5,4 @@ pub mod metadata;
pub mod oidc; pub mod oidc;
pub mod r#static; pub mod r#static;
pub use core::{Controller, ControllerTrait, PrefixController}; pub use core::{Controller, ControllerTrait, NestRouterController};

View File

@@ -77,5 +77,5 @@ pub async fn create(_context: Arc<dyn AppContextTrait>) -> RecorderResult<Contro
.route("/auth", get(oidc_auth)) .route("/auth", get(oidc_auth))
.route("/callback", get(oidc_callback)); .route("/callback", get(oidc_callback));
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router)) Ok(Controller::from_nest_router(CONTROLLER_PREFIX, router))
} }

View File

@@ -99,5 +99,5 @@ pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller>
) )
.route("/public/{*path}", get(serve_public_static)); .route("/public/{*path}", get(serve_public_static));
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router)) Ok(Controller::from_nest_router(CONTROLLER_PREFIX, router))
} }

View File

@@ -0,0 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"rootDir": ".",
"composite": true,
"module": "ESNext",
"moduleResolution": "bundler"
},
"include": ["bindings"]
}

View File

@@ -1,8 +0,0 @@
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"

View File

@@ -0,0 +1,8 @@
AUTH__AUTH_TYPE = "basic" # or oidc
AUTH__BASIC_USER = "konobangu"
AUTH__BASIC_PASSWORD = "konobangu"
# AUTH__OIDC_ISSUER="https://auth.logto.io/oidc"
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
# AUTH__OIDC_CLIENT_ID = "client_id"
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"

View File

@@ -0,0 +1,6 @@
AUTH__AUTH_TYPE = "basic" # or oidc
# AUTH__OIDC_ISSUER="https://auth.logto.io/oidc"
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
# AUTH__OIDC_CLIENT_ID = "client_id"
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"

View File

@@ -12,6 +12,12 @@ const config: CodegenConfig = {
}, },
config: { config: {
enumsAsConst: true, enumsAsConst: true,
scalars: {
SubscriberTaskType: {
input: 'recorder/bindings/SubscriberTaskInput#SubscriberTaskInput',
output: 'recorder/bindings/SubscriberTaskType#SubscriberTaskType',
},
},
}, },
}, },
}, },

Some files were not shown because too many files have changed in this diff Show More