feat: support optimize images

This commit is contained in:
master 2025-06-20 01:56:34 +08:00
parent 324427513c
commit 02c16a2972
37 changed files with 1781 additions and 698 deletions

View File

@ -40,6 +40,7 @@
} }
], ],
"rust-analyzer.cargo.features": "all", "rust-analyzer.cargo.features": "all",
"rust-analyzer.testExplorer": true
// https://github.com/rust-lang/rust/issues/141540 // https://github.com/rust-lang/rust/issues/141540
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer", // "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
// "rust-analyzer.check.extraEnv": { // "rust-analyzer.check.extraEnv": {

1203
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -66,6 +66,25 @@ mockito = { version = "1.6.1" }
convert_case = "0.8" convert_case = "0.8"
color-eyre = "0.6.5" color-eyre = "0.6.5"
inquire = "0.7.5" inquire = "0.7.5"
image = "0.25.6"
uuid = { version = "1.6.0", features = ["v4"] }
maplit = "1.0.2"
once_cell = "1.20.2"
rand = "0.9.1"
rust_decimal = "1.37.2"
base64 = "0.22.1"
nom = "8.0.0"
percent-encoding = "2.3.1"
num-traits = "0.2.19"
http = "1.2.0"
async-stream = "0.3.6"
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.40"
ipnetwork = "0.21.1"
typed-builder = "0.21.0"
nanoid = "0.4.0"
webp = "0.3.0"
[patch.crates-io] [patch.crates-io]
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" } seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" }

View File

@ -14,7 +14,7 @@ path = "src/bin/main.rs"
required-features = [] required-features = []
[features] [features]
default = [] default = ["jxl"]
playground = ["dep:inquire", "dep:color-eyre"] playground = ["dep:inquire", "dep:color-eyre"]
testcontainers = [ testcontainers = [
"dep:testcontainers", "dep:testcontainers",
@ -23,6 +23,7 @@ testcontainers = [
"downloader/testcontainers", "downloader/testcontainers",
"testcontainers-modules/postgres", "testcontainers-modules/postgres",
] ]
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
[dependencies] [dependencies]
downloader = { workspace = true } downloader = { workspace = true }
@ -58,6 +59,25 @@ mockito = { workspace = true }
color-eyre = { workspace = true, optional = true } color-eyre = { workspace = true, optional = true }
inquire = { workspace = true, optional = true } inquire = { workspace = true, optional = true }
convert_case = { workspace = true } convert_case = { workspace = true }
image = { workspace = true }
uuid = { workspace = true }
maplit = { workspace = true }
once_cell = { workspace = true }
rand = { workspace = true }
rust_decimal = { workspace = true }
base64 = { workspace = true }
nom = { workspace = true }
percent-encoding = { workspace = true }
num-traits = { workspace = true }
http = { workspace = true }
async-stream = { workspace = true }
serde_variant = { workspace = true }
tracing-appender = { workspace = true }
clap = { workspace = true }
ipnetwork = { workspace = true }
typed-builder = { workspace = true }
nanoid = { workspace = true }
webp = { workspace = true }
sea-orm = { version = "1.1", features = [ sea-orm = { version = "1.1", features = [
"sqlx-sqlite", "sqlx-sqlite",
@ -67,19 +87,13 @@ sea-orm = { version = "1.1", features = [
"debug-print", "debug-print",
] } ] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] } figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
uuid = { version = "1.6.0", features = ["v4"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] } sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
rss = "2" rss = "2"
fancy-regex = "0.14" fancy-regex = "0.14"
maplit = "1.0.2"
lightningcss = "1.0.0-alpha.66" lightningcss = "1.0.0-alpha.66"
html-escape = "0.2.13" html-escape = "0.2.13"
opendal = { version = "0.53", features = ["default", "services-fs"] } opendal = { version = "0.53", features = ["default", "services-fs"] }
zune-image = "0.4.15"
once_cell = "1.20.2"
scraper = "0.23" scraper = "0.23"
log = "0.4"
async-graphql = { version = "7", features = ["dynamic-schema"] } async-graphql = { version = "7", features = ["dynamic-schema"] }
async-graphql-axum = "7" async-graphql-axum = "7"
seaography = { version = "1.1", features = [ seaography = { version = "1.1", features = [
@ -92,7 +106,6 @@ seaography = { version = "1.1", features = [
"with-postgres-array", "with-postgres-array",
"with-json-as-scalar", "with-json-as-scalar",
] } ] }
base64 = "0.22.1"
tower = "0.5.2" tower = "0.5.2"
tower-http = { version = "0.6", features = [ tower-http = { version = "0.6", features = [
"trace", "trace",
@ -107,39 +120,26 @@ tower-http = { version = "0.6", features = [
tera = "1.20.0" tera = "1.20.0"
openidconnect = { version = "4" } openidconnect = { version = "4" }
dotenvy = "0.15.7" dotenvy = "0.15.7"
http = "1.2.0" jpegxl-rs = { version = "0.11.2", optional = true }
async-stream = "0.3.6" jpegxl-sys = { version = "0.11.2", optional = true }
serde_variant = "0.1.3"
tracing-appender = "0.2.3" apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
clap = "4.5.31"
ipnetwork = "0.21.1"
typed-builder = "0.21.0"
apalis = { version = "0.7", features = [
"limit",
"tracing",
"catch-panic",
"retry",
] }
apalis-sql = { version = "0.7", features = ["postgres"] } apalis-sql = { version = "0.7", features = ["postgres"] }
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] } cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
rand = "0.9.1"
rust_decimal = "1.37.1"
reqwest_cookie_store = "0.8.0" reqwest_cookie_store = "0.8.0"
nanoid = "0.4.0"
jwtk = "0.4.0" jwtk = "0.4.0"
percent-encoding = "2.3.1"
mime_guess = "2.0.5" mime_guess = "2.0.5"
nom = "8.0.0"
icu_properties = "2.0.1" icu_properties = "2.0.1"
icu = "2.0.0" icu = "2.0.0"
num-traits = "0.2.19"
tracing-tree = "0.4.0" tracing-tree = "0.4.0"
num_cpus = "1.17.0"
headers-accept = "0.1.4"
[dev-dependencies] [dev-dependencies]
inquire = { workspace = true }
color-eyre = { workspace = true }
serial_test = "3" serial_test = "3"
insta = { version = "1", features = ["redactions", "toml", "filters"] } insta = { version = "1", features = ["redactions", "toml", "filters"] }
rstest = "0.25" rstest = "0.25"
ctor = "0.4.0" ctor = "0.4.0"
inquire = { workspace = true }
color-eyre = { workspace = true }

View File

@ -1,45 +0,0 @@
#!/usr/bin/env zx
import { glob } from 'node:fs/promises';
import os from 'node:os';
import path from 'node:path';
import { chunk } from 'es-toolkit/array';
const dataDir = path.join(import.meta.dirname, '../../../data')
/**
* @type {string[]}
*/
const images = [];
for await (const image of glob('**/*.{jpg,jpeg,png,gif,svg}', {
cwd: dataDir,
})) {
images.push(image)
}
const cpus = os.cpus().length - 1;
const chunkSize = Math.ceil(images.length / cpus);
const chunks = chunk(images, chunkSize);
/**
* @param {string[]} images
*/
async function convertImages(images) {
for await (const image of images) {
const imagePath = path.resolve(dataDir, image)
const webp = imagePath.replace(path.extname(imagePath), '.webp')
const avif = imagePath.replace(path.extname(imagePath), '.avif')
console.log(`Converting ${imagePath} to ${webp}...`);
await $`ffmpeg -i "${imagePath}" -c:v libwebp -lossless 1 "${webp}"`;
console.log(`Converting ${imagePath} to ${avif}...`);
await $`ffmpeg -i "${imagePath}" -c:v libaom-av1 -still-picture 1 -pix_fmt yuv420p10le -crf 0 -strict experimental "${avif}"`;
}
}
await Promise.all(
chunks.map(convertImages)
)

View File

@ -21,6 +21,9 @@ pub struct MainCliArgs {
/// Explicit environment /// Explicit environment
#[arg(short, long)] #[arg(short, long)]
environment: Option<Environment>, environment: Option<Environment>,
#[arg(long)]
graceful_shutdown: Option<bool>,
} }
pub struct AppBuilder { pub struct AppBuilder {
@ -28,6 +31,7 @@ pub struct AppBuilder {
config_file: Option<String>, config_file: Option<String>,
working_dir: String, working_dir: String,
environment: Environment, environment: Environment,
pub graceful_shutdown: bool,
} }
impl AppBuilder { impl AppBuilder {
@ -61,7 +65,8 @@ impl AppBuilder {
builder = builder builder = builder
.config_file(args.config_file) .config_file(args.config_file)
.dotenv_file(args.dotenv_file) .dotenv_file(args.dotenv_file)
.environment(environment); .environment(environment)
.graceful_shutdown(args.graceful_shutdown.unwrap_or(true));
Ok(builder) Ok(builder)
} }
@ -118,6 +123,12 @@ impl AppBuilder {
ret ret
} }
pub fn graceful_shutdown(self, graceful_shutdown: bool) -> Self {
let mut ret = self;
ret.graceful_shutdown = graceful_shutdown;
ret
}
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self { pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
let mut ret = self; let mut ret = self;
ret.dotenv_file = dotenv_file; ret.dotenv_file = dotenv_file;
@ -141,6 +152,7 @@ impl Default for AppBuilder {
dotenv_file: None, dotenv_file: None,
config_file: None, config_file: None,
working_dir: String::from("."), working_dir: String::from("."),
graceful_shutdown: true,
} }
} }
} }

View File

@ -11,6 +11,7 @@ leaky_bucket_initial_tokens = 0
leaky_bucket_refill_tokens = 1 leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500 leaky_bucket_refill_interval = 500
[mikan.http_client.proxy] [mikan.http_client.proxy]
[mikan.http_client.proxy.headers] [mikan.http_client.proxy.headers]
@ -26,3 +27,5 @@ complexity_limit = inf
[task] [task]
[message] [message]
[media]

View File

@ -11,8 +11,8 @@ use super::env::Environment;
use crate::{ use crate::{
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig, auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig, errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
logger::LoggerConfig, message::MessageConfig, storage::StorageConfig, task::TaskConfig, logger::LoggerConfig, media::MediaConfig, message::MessageConfig, storage::StorageConfig,
web::WebServerConfig, task::TaskConfig, web::WebServerConfig,
}; };
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml"); const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
@ -27,6 +27,7 @@ pub struct AppConfig {
pub mikan: MikanConfig, pub mikan: MikanConfig,
pub crypto: CryptoConfig, pub crypto: CryptoConfig,
pub graphql: GraphQLConfig, pub graphql: GraphQLConfig,
pub media: MediaConfig,
pub logger: LoggerConfig, pub logger: LoggerConfig,
pub database: DatabaseConfig, pub database: DatabaseConfig,
pub task: TaskConfig, pub task: TaskConfig,

View File

@ -6,7 +6,8 @@ use super::{Environment, config::AppConfig};
use crate::{ use crate::{
auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService, auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService, errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
logger::LoggerService, message::MessageService, storage::StorageService, task::TaskService, logger::LoggerService, media::MediaService, message::MessageService, storage::StorageService,
task::TaskService,
}; };
pub trait AppContextTrait: Send + Sync + Debug { pub trait AppContextTrait: Send + Sync + Debug {
@ -23,6 +24,7 @@ pub trait AppContextTrait: Send + Sync + Debug {
fn crypto(&self) -> &CryptoService; fn crypto(&self) -> &CryptoService;
fn task(&self) -> &TaskService; fn task(&self) -> &TaskService;
fn message(&self) -> &MessageService; fn message(&self) -> &MessageService;
fn media(&self) -> &MediaService;
} }
pub struct AppContext { pub struct AppContext {
@ -37,6 +39,7 @@ pub struct AppContext {
working_dir: String, working_dir: String,
environment: Environment, environment: Environment,
message: MessageService, message: MessageService,
media: MediaService,
task: OnceCell<TaskService>, task: OnceCell<TaskService>,
graphql: OnceCell<GraphQLService>, graphql: OnceCell<GraphQLService>,
} }
@ -57,6 +60,7 @@ impl AppContext {
let auth = AuthService::from_conf(config.auth).await?; let auth = AuthService::from_conf(config.auth).await?;
let mikan = MikanClient::from_config(config.mikan).await?; let mikan = MikanClient::from_config(config.mikan).await?;
let crypto = CryptoService::from_config(config.crypto).await?; let crypto = CryptoService::from_config(config.crypto).await?;
let media = MediaService::from_config(config.media).await?;
let ctx = Arc::new(AppContext { let ctx = Arc::new(AppContext {
config: config_cloned, config: config_cloned,
@ -70,6 +74,7 @@ impl AppContext {
working_dir: working_dir.to_string(), working_dir: working_dir.to_string(),
crypto, crypto,
message, message,
media,
task: OnceCell::new(), task: OnceCell::new(),
graphql: OnceCell::new(), graphql: OnceCell::new(),
}); });
@ -136,4 +141,7 @@ impl AppContextTrait for AppContext {
fn message(&self) -> &MessageService { fn message(&self) -> &MessageService {
&self.message &self.message
} }
fn media(&self) -> &MediaService {
&self.media
}
} }

View File

@ -6,7 +6,6 @@ use tracing::instrument;
use super::{builder::AppBuilder, context::AppContextTrait}; use super::{builder::AppBuilder, context::AppContextTrait};
use crate::{ use crate::{
app::Environment,
errors::{RecorderError, RecorderResult}, errors::{RecorderError, RecorderResult},
web::{ web::{
controller::{self, core::ControllerTrait}, controller::{self, core::ControllerTrait},
@ -76,22 +75,30 @@ impl App {
.into_make_service_with_connect_info::<SocketAddr>(); .into_make_service_with_connect_info::<SocketAddr>();
let task = context.task(); let task = context.task();
let graceful_shutdown = self.builder.graceful_shutdown;
tokio::try_join!( tokio::try_join!(
async { async {
axum::serve(listener, router) let axum_serve = axum::serve(listener, router);
.with_graceful_shutdown(async move {
Self::shutdown_signal().await; if graceful_shutdown {
tracing::info!("axum shutting down..."); axum_serve
}) .with_graceful_shutdown(async move {
.await?; Self::shutdown_signal().await;
tracing::info!("axum shutting down...");
})
.await?;
} else {
axum_serve.await?;
}
Ok::<(), RecorderError>(()) Ok::<(), RecorderError>(())
}, },
async { async {
{ {
let monitor = task.setup_monitor().await?; let monitor = task.setup_monitor().await?;
if matches!(context.environment(), Environment::Development) { if graceful_shutdown {
monitor.run().await?;
} else {
monitor monitor
.run_with_signal(async move { .run_with_signal(async move {
Self::shutdown_signal().await; Self::shutdown_signal().await;
@ -99,6 +106,8 @@ impl App {
Ok(()) Ok(())
}) })
.await?; .await?;
} else {
monitor.run().await?;
} }
} }

View File

@ -29,6 +29,11 @@ pub enum RecorderError {
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))] #[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr, source: OptDynErr,
}, },
#[snafu(transparent)]
ImageError { source: image::ImageError },
#[cfg(feature = "jxl")]
#[snafu(transparent)]
JxlEncodeError { source: jpegxl_rs::EncodeError },
#[snafu(transparent, context(false))] #[snafu(transparent, context(false))]
HttpError { source: http::Error }, HttpError { source: http::Error },
#[snafu(transparent, context(false))] #[snafu(transparent, context(false))]

View File

@ -1,8 +1,5 @@
use std::collections::HashMap;
use fancy_regex::Regex as FancyRegex; use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use maplit::hashmap;
use regex::Regex; use regex::Regex;
const LANG_ZH_TW: &str = "zh-tw"; const LANG_ZH_TW: &str = "zh-tw";
@ -34,40 +31,4 @@ lazy_static! {
(LANG_JP, vec!["jp", "jpn", ""]), (LANG_JP, vec!["jp", "jpn", ""]),
] ]
}; };
pub static ref BRACKETS_REG: Regex = Regex::new(r"[\[\]()【】()]").unwrap();
pub static ref DIGIT_1PLUS_REG: Regex = Regex::new(r"\d+").unwrap();
pub static ref ZH_NUM_MAP: HashMap<&'static str, i32> = {
hashmap! {
"" => 0,
"" => 1,
"" => 2,
"" => 3,
"" => 4,
"" => 5,
"" => 6,
"" => 7,
"" => 8,
"" => 9,
"" => 10,
"廿" => 20,
"" => 100,
"" => 1000,
"" => 0,
"" => 1,
"" => 2,
"" => 3,
"" => 4,
"" => 5,
"" => 6,
"" => 7,
"" => 8,
"" => 9,
"" => 10,
"" => 20,
"" => 100,
"" => 1000,
}
};
pub static ref ZH_NUM_RE: Regex =
Regex::new(r"[〇一二三四五六七八九十廿百千零壹贰叁肆伍陆柒捌玖拾念佰仟]").unwrap();
} }

View File

@ -2,10 +2,6 @@ use url::Url;
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> { pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
let mut image_url = base_url.join(image_src).ok()?; let mut image_url = base_url.join(image_src).ok()?;
if let Some((_, value)) = image_url.query_pairs().find(|(key, _)| key == "webp") { image_url.set_query(None);
image_url.set_query(Some(&format!("webp={value}")));
} else {
image_url.set_query(None);
}
Some(image_url) Some(image_url)
} }

View File

@ -556,13 +556,8 @@ mod tests {
subscriptions::{self, SubscriptionTrait}, subscriptions::{self, SubscriptionTrait},
}, },
test_utils::{ test_utils::{
app::TestingAppContext, app::{TestingAppContext, TestingAppContextPreset},
crypto::build_testing_crypto_service, mikan::{MikanMockServer, build_testing_mikan_credential_form},
database::build_testing_database_service,
mikan::{
MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential_form,
},
storage::build_testing_storage_service,
tracing::try_init_testing_tracing, tracing::try_init_testing_tracing,
}, },
}; };
@ -577,20 +572,11 @@ mod tests {
let mikan_base_url = mikan_server.base_url().clone(); let mikan_base_url = mikan_server.base_url().clone();
let app_ctx = { let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; mikan_base_url: mikan_base_url.to_string(),
let db_service = build_testing_database_service(Default::default()).await?; database_config: None,
let crypto_service = build_testing_crypto_service().await?; })
let storage_service = build_testing_storage_service().await?; .await?;
let app_ctx = TestingAppContext::builder()
.mikan(mikan_client)
.db(db_service)
.crypto(crypto_service)
.storage(storage_service)
.build();
Arc::new(app_ctx)
};
Ok(TestingResources { Ok(TestingResources {
app_ctx, app_ctx,

View File

@ -28,7 +28,12 @@ use crate::{
MIKAN_YEAR_QUERY_KEY, MikanClient, MIKAN_YEAR_QUERY_KEY, MikanClient,
}, },
}, },
storage::{StorageContentCategory, StorageService}, media::{
AutoOptimizeImageFormat, EncodeAvifOptions, EncodeImageOptions, EncodeJxlOptions,
EncodeWebpOptions,
},
storage::StorageContentCategory,
task::{OptimizeImageTask, SystemTask},
}; };
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
@ -738,49 +743,92 @@ pub async fn scrape_mikan_poster_data_from_image_url(
#[instrument(skip_all, fields(origin_poster_src_url = origin_poster_src_url.as_str()))] #[instrument(skip_all, fields(origin_poster_src_url = origin_poster_src_url.as_str()))]
pub async fn scrape_mikan_poster_meta_from_image_url( pub async fn scrape_mikan_poster_meta_from_image_url(
mikan_client: &MikanClient, ctx: &dyn AppContextTrait,
storage_service: &StorageService,
origin_poster_src_url: Url, origin_poster_src_url: Url,
) -> RecorderResult<MikanBangumiPosterMeta> { ) -> RecorderResult<MikanBangumiPosterMeta> {
if let Some(poster_src) = storage_service let storage_service = ctx.storage();
.exists( let media_service = ctx.media();
storage_service.build_public_object_path( let mikan_client = ctx.mikan();
StorageContentCategory::Image, let task_service = ctx.task();
MIKAN_POSTER_BUCKET_KEY,
&origin_poster_src_url let storage_path = storage_service.build_public_object_path(
.path() StorageContentCategory::Image,
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""), MIKAN_POSTER_BUCKET_KEY,
), &origin_poster_src_url
) .path()
.await? .replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
{ );
return Ok(MikanBangumiPosterMeta { let meta = if let Some(poster_src) = storage_service.exists(&storage_path).await? {
MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url, origin_poster_src: origin_poster_src_url,
poster_src: Some(poster_src.to_string()), poster_src: Some(poster_src.to_string()),
}); }
} } else {
let poster_data =
scrape_mikan_poster_data_from_image_url(mikan_client, origin_poster_src_url.clone())
.await?;
let poster_data = let poster_str = storage_service
scrape_mikan_poster_data_from_image_url(mikan_client, origin_poster_src_url.clone()) .write(storage_path.clone(), poster_data)
.await?; .await?;
let poster_str = storage_service tracing::warn!(
.write( poster_str = poster_str.to_string(),
storage_service.build_public_object_path( "mikan poster meta extracted"
StorageContentCategory::Image, );
MIKAN_POSTER_BUCKET_KEY,
&origin_poster_src_url
.path()
.replace(&format!("{MIKAN_BANGUMI_POSTER_PATH}/"), ""),
),
poster_data,
)
.await?;
Ok(MikanBangumiPosterMeta { MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url, origin_poster_src: origin_poster_src_url,
poster_src: Some(poster_str.to_string()), poster_src: Some(poster_str.to_string()),
}) }
};
if meta.poster_src.is_some()
&& storage_path
.extension()
.is_some_and(|ext| media_service.is_legacy_image_format(ext))
{
let auto_optimize_formats = &media_service.config.auto_optimize_formats;
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Webp) {
let webp_storage_path = storage_path.with_extension("webp");
if storage_service.exists(&webp_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: webp_storage_path.to_string(),
format_options: EncodeImageOptions::Webp(EncodeWebpOptions::default()),
}))
.await?;
}
}
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Avif) {
let avif_storage_path = storage_path.with_extension("avif");
if storage_service.exists(&avif_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: avif_storage_path.to_string(),
format_options: EncodeImageOptions::Avif(EncodeAvifOptions::default()),
}))
.await?;
}
}
if auto_optimize_formats.contains(&AutoOptimizeImageFormat::Jxl) {
let jxl_storage_path = storage_path.with_extension("jxl");
if storage_service.exists(&jxl_storage_path).await?.is_none() {
task_service
.add_system_task(SystemTask::OptimizeImage(OptimizeImageTask {
source_path: storage_path.clone().to_string(),
target_path: jxl_storage_path.to_string(),
format_options: EncodeImageOptions::Jxl(EncodeJxlOptions::default()),
}))
.await?;
}
}
}
Ok(meta)
} }
pub fn extract_mikan_bangumi_index_meta_list_from_season_flow_fragment( pub fn extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(
@ -1007,24 +1055,23 @@ pub async fn scrape_mikan_bangumi_meta_list_from_season_flow_url(
#[cfg(test)] #[cfg(test)]
mod test { mod test {
#![allow(unused_variables)] #![allow(unused_variables)]
use std::{fs, sync::Arc}; use std::{fs, io::Cursor, sync::Arc};
use futures::StreamExt; use futures::StreamExt;
use image::{ImageFormat, ImageReader};
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
use tracing::Level; use tracing::Level;
use url::Url; use url::Url;
use zune_image::{codecs::ImageFormat, image::Image};
use super::*; use super::*;
use crate::test_utils::{ use crate::test_utils::{
app::TestingAppContext, app::{TestingAppContext, TestingAppContextPreset},
crypto::build_testing_crypto_service, crypto::build_testing_crypto_service,
database::build_testing_database_service, database::build_testing_database_service,
mikan::{ mikan::{
MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential, MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential,
build_testing_mikan_credential_form, build_testing_mikan_credential_form,
}, },
storage::build_testing_storage_service,
tracing::try_init_testing_tracing, tracing::try_init_testing_tracing,
}; };
@ -1049,12 +1096,14 @@ mod test {
scrape_mikan_poster_data_from_image_url(&mikan_client, bangumi_poster_url).await?; scrape_mikan_poster_data_from_image_url(&mikan_client, bangumi_poster_url).await?;
resources_mock.shared_resource_mock.expect(1); resources_mock.shared_resource_mock.expect(1);
let image = Image::read(bgm_poster_data.to_vec(), Default::default());
let image = {
let c = Cursor::new(bgm_poster_data);
ImageReader::new(c)
};
let image_format = image.with_guessed_format().ok().and_then(|i| i.format());
assert!( assert!(
image.is_ok_and(|img| img image_format.is_some_and(|fmt| matches!(fmt, ImageFormat::Jpeg)),
.metadata()
.get_image_format()
.is_some_and(|fmt| matches!(fmt, ImageFormat::JPEG))),
"should start with valid jpeg data magic number" "should start with valid jpeg data magic number"
); );
@ -1068,37 +1117,47 @@ mod test {
let mikan_base_url = mikan_server.base_url().clone(); let mikan_base_url = mikan_server.base_url().clone();
let app_ctx = TestingAppContext::from_preset(TestingAppContextPreset {
mikan_base_url: mikan_base_url.to_string(),
database_config: None,
})
.await?;
let resources_mock = mikan_server.mock_resources_with_doppel(); let resources_mock = mikan_server.mock_resources_with_doppel();
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
let storage_service = build_testing_storage_service().await?;
let storage_operator = storage_service.get_operator()?;
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?; let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;
let bgm_poster = scrape_mikan_poster_meta_from_image_url( let bgm_poster =
&mikan_client, scrape_mikan_poster_meta_from_image_url(app_ctx.as_ref(), bangumi_poster_url).await?;
&storage_service,
bangumi_poster_url,
)
.await?;
resources_mock.shared_resource_mock.expect(1); resources_mock.shared_resource_mock.expect(1);
let storage_service = app_ctx.storage();
let storage_fullname = storage_service.build_public_object_path( let storage_fullname = storage_service.build_public_object_path(
StorageContentCategory::Image, StorageContentCategory::Image,
MIKAN_POSTER_BUCKET_KEY, MIKAN_POSTER_BUCKET_KEY,
"202309/5ce9fed1.jpg", "202309/5ce9fed1.jpg",
); );
let storage_fullename_str = storage_fullname.as_str();
assert!(storage_operator.exists(storage_fullename_str).await?); assert!(
storage_service.exists(&storage_fullname).await?.is_some(),
"storage_fullename_str = {}, list public = {:?}",
&storage_fullname,
storage_service.list_public().await?
);
let expected_data = let bgm_poster_data = storage_service.read(&storage_fullname).await?;
fs::read("tests/resources/mikan/doppel/images/Bangumi/202309/5ce9fed1.jpg")?;
let found_data = storage_operator.read(storage_fullename_str).await?.to_vec(); let image = {
assert_eq!(expected_data, found_data); let c = Cursor::new(bgm_poster_data.to_vec());
ImageReader::new(c)
};
let image_format = image.with_guessed_format().ok().and_then(|i| i.format());
assert!(
image_format.is_some_and(|fmt| matches!(fmt, ImageFormat::Jpeg)),
"should start with valid jpeg data magic number"
);
Ok(()) Ok(())
} }

View File

@ -21,6 +21,7 @@ pub mod errors;
pub mod extract; pub mod extract;
pub mod graphql; pub mod graphql;
pub mod logger; pub mod logger;
pub mod media;
pub mod message; pub mod message;
pub mod migrations; pub mod migrations;
pub mod models; pub mod models;

View File

@ -0,0 +1,105 @@
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum AutoOptimizeImageFormat {
#[serde(rename = "image/webp")]
Webp,
#[serde(rename = "image/avif")]
Avif,
#[serde(rename = "image/jxl")]
Jxl,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct EncodeWebpOptions {
pub quality: Option<f32>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct EncodeAvifOptions {
pub quality: Option<u8>,
pub speed: Option<u8>,
pub threads: Option<u8>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct EncodeJxlOptions {
pub quality: Option<f32>,
pub speed: Option<u8>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(tag = "mime_type")]
pub enum EncodeImageOptions {
#[serde(rename = "image/webp")]
Webp(EncodeWebpOptions),
#[serde(rename = "image/avif")]
Avif(EncodeAvifOptions),
#[serde(rename = "image/jxl")]
Jxl(EncodeJxlOptions),
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MediaConfig {
#[serde(default = "default_webp_quality")]
pub webp_quality: f32,
#[serde(default = "default_avif_quality")]
pub avif_quality: u8,
#[serde(default = "default_avif_speed")]
pub avif_speed: u8,
#[serde(default = "default_avif_threads")]
pub avif_threads: u8,
#[serde(default = "default_jxl_quality")]
pub jxl_quality: f32,
#[serde(default = "default_jxl_speed")]
pub jxl_speed: u8,
#[serde(default = "default_auto_optimize_formats")]
pub auto_optimize_formats: Vec<AutoOptimizeImageFormat>,
}
impl Default for MediaConfig {
fn default() -> Self {
Self {
webp_quality: default_webp_quality(),
avif_quality: default_avif_quality(),
avif_speed: default_avif_speed(),
avif_threads: default_avif_threads(),
jxl_quality: default_jxl_quality(),
jxl_speed: default_jxl_speed(),
auto_optimize_formats: default_auto_optimize_formats(),
}
}
}
fn default_webp_quality() -> f32 {
80.0
}
fn default_avif_quality() -> u8 {
80
}
fn default_avif_speed() -> u8 {
6
}
fn default_avif_threads() -> u8 {
1
}
fn default_jxl_quality() -> f32 {
80.0
}
fn default_jxl_speed() -> u8 {
7
}
fn default_auto_optimize_formats() -> Vec<AutoOptimizeImageFormat> {
vec![
AutoOptimizeImageFormat::Webp,
// AutoOptimizeImageFormat::Avif, // TOO SLOW */
#[cfg(feature = "jxl")]
AutoOptimizeImageFormat::Jxl,
]
}

View File

@ -0,0 +1,8 @@
mod config;
mod service;
pub use config::{
AutoOptimizeImageFormat, EncodeAvifOptions, EncodeImageOptions, EncodeJxlOptions,
EncodeWebpOptions, MediaConfig,
};
pub use service::MediaService;

View File

@ -0,0 +1,199 @@
use std::io::Cursor;
use bytes::Bytes;
use image::{GenericImageView, ImageEncoder, ImageReader, codecs::avif::AvifEncoder};
use quirks_path::Path;
use snafu::ResultExt;
use crate::{
errors::{RecorderError, RecorderResult},
media::{EncodeAvifOptions, EncodeJxlOptions, EncodeWebpOptions, MediaConfig},
};
#[derive(Debug)]
pub struct MediaService {
pub config: MediaConfig,
}
impl MediaService {
pub async fn from_config(config: MediaConfig) -> RecorderResult<Self> {
Ok(Self { config })
}
pub fn is_legacy_image_format(&self, ext: &str) -> bool {
matches!(ext, "jpeg" | "jpg" | "png")
}
pub async fn optimize_image_to_webp(
&self,
path: impl AsRef<Path>,
data: impl Into<Bytes>,
options: Option<EncodeWebpOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.and_then(|o| o.quality)
.unwrap_or(self.config.webp_quality);
let data = data.into();
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let img = image_reader.decode()?;
let (width, height) = (img.width(), img.height());
let color = img.color();
let webp_data = if color.has_alpha() {
let rgba_image = img.into_rgba8();
let encoder = webp::Encoder::from_rgba(&rgba_image, width, height);
encoder.encode(quality)
} else {
let rgba_image = img.into_rgb8();
let encoder = webp::Encoder::from_rgb(&rgba_image, width, height);
encoder.encode(quality)
};
Ok(Bytes::from(webp_data.to_vec()))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to webp: {}",
path.as_ref().display()
)
})?
}
pub async fn optimize_image_to_avif(
&self,
path: impl AsRef<Path>,
data: Bytes,
options: Option<EncodeAvifOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.as_ref()
.and_then(|o| o.quality)
.unwrap_or(self.config.avif_quality);
let speed = options
.as_ref()
.and_then(|o| o.speed)
.unwrap_or(self.config.avif_speed);
let threads = options
.as_ref()
.and_then(|o| o.threads)
.unwrap_or(self.config.avif_threads);
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
let mut buf = vec![];
{
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let img = image_reader.decode()?;
let (width, height) = img.dimensions();
let color_type = img.color();
let encoder = AvifEncoder::new_with_speed_quality(&mut buf, speed, quality)
.with_num_threads(Some(threads as usize));
encoder.write_image(img.as_bytes(), width, height, color_type.into())?;
}
Ok(Bytes::from(buf))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to avif: {}",
path.as_ref().display()
)
})?
}
#[cfg(feature = "jxl")]
pub async fn optimize_image_to_jxl(
&self,
path: impl AsRef<Path>,
data: Bytes,
options: Option<EncodeJxlOptions>,
) -> RecorderResult<Bytes> {
let quality = options
.as_ref()
.and_then(|o| o.quality)
.unwrap_or(self.config.jxl_quality);
let speed = options
.as_ref()
.and_then(|o| o.speed)
.unwrap_or(self.config.jxl_speed);
tokio::task::spawn_blocking(move || -> RecorderResult<Bytes> {
use jpegxl_rs::encode::{ColorEncoding, EncoderResult, EncoderSpeed};
let cursor = Cursor::new(data);
let image_reader = ImageReader::new(cursor).with_guessed_format()?;
let image = image_reader.decode()?;
let (width, height) = image.dimensions();
let color = image.color();
let has_alpha = color.has_alpha();
let libjxl_speed = {
match speed {
0 | 1 => EncoderSpeed::Lightning,
2 => EncoderSpeed::Thunder,
3 => EncoderSpeed::Falcon,
4 => EncoderSpeed::Cheetah,
5 => EncoderSpeed::Hare,
6 => EncoderSpeed::Wombat,
7 => EncoderSpeed::Squirrel,
8 => EncoderSpeed::Kitten,
_ => EncoderSpeed::Tortoise,
}
};
let mut encoder_builder = jpegxl_rs::encoder_builder()
.lossless(false)
.has_alpha(has_alpha)
.color_encoding(ColorEncoding::Srgb)
.speed(libjxl_speed)
.jpeg_quality(quality)
.build()?;
let buffer: EncoderResult<u8> = if color.has_alpha() {
let sample = image.into_rgba8();
encoder_builder.encode(&sample, width, height)?
} else {
let sample = image.into_rgb8();
encoder_builder.encode(&sample, width, height)?
};
Ok(Bytes::from(buffer.data))
})
.await
.with_whatever_context::<_, String, RecorderError>(|_| {
format!(
"failed to spawn blocking task to optimize legacy image to avif: {}",
path.as_ref().display()
)
})?
}
#[cfg(not(feature = "jxl"))]
pub async fn optimize_image_to_jxl(
&self,
_path: impl AsRef<Path>,
_data: Bytes,
_options: Option<EncodeJxlOptions>,
) -> RecorderResult<Bytes> {
Err(RecorderError::Whatever {
message: "jxl feature is not enabled".to_string(),
source: None.into(),
})
}
}

View File

@ -121,7 +121,6 @@ impl ActiveModel {
_subscription_id: i32, _subscription_id: i32,
) -> RecorderResult<Self> { ) -> RecorderResult<Self> {
let mikan_client = ctx.mikan(); let mikan_client = ctx.mikan();
let storage_service = ctx.storage();
let mikan_base_url = mikan_client.base_url(); let mikan_base_url = mikan_client.base_url();
let season_comp = SeasonComp::parse_comp(&meta.bangumi_title) let season_comp = SeasonComp::parse_comp(&meta.bangumi_title)
.ok() .ok()
@ -136,12 +135,8 @@ impl ActiveModel {
); );
let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src.clone() { let poster_link = if let Some(origin_poster_src) = meta.origin_poster_src.clone() {
let poster_meta = scrape_mikan_poster_meta_from_image_url( let poster_meta =
mikan_client, scrape_mikan_poster_meta_from_image_url(ctx, origin_poster_src).await?;
storage_service,
origin_poster_src,
)
.await?;
poster_meta.poster_src poster_meta.poster_src
} else { } else {
None None

View File

@ -5,6 +5,7 @@ use axum::{body::Body, response::Response};
use axum_extra::{TypedHeader, headers::Range}; use axum_extra::{TypedHeader, headers::Range};
use bytes::Bytes; use bytes::Bytes;
use futures::{Stream, StreamExt}; use futures::{Stream, StreamExt};
use headers_accept::Accept;
use http::{HeaderValue, StatusCode, header}; use http::{HeaderValue, StatusCode, header};
use opendal::{Buffer, Metadata, Operator, Reader, Writer, layers::LoggingLayer}; use opendal::{Buffer, Metadata, Operator, Reader, Writer, layers::LoggingLayer};
use quirks_path::{Path, PathBuf}; use quirks_path::{Path, PathBuf};
@ -56,22 +57,24 @@ impl fmt::Display for StorageStoredUrl {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct StorageService { pub struct StorageService {
pub data_dir: String, pub data_dir: String,
pub operator: Operator,
} }
impl StorageService { impl StorageService {
pub async fn from_config(config: StorageConfig) -> RecorderResult<Self> { pub async fn from_config(config: StorageConfig) -> RecorderResult<Self> {
Ok(Self { Ok(Self {
data_dir: config.data_dir.to_string(), data_dir: config.data_dir.to_string(),
operator: Self::get_operator(&config.data_dir)?,
}) })
} }
pub fn get_operator(&self) -> Result<Operator, opendal::Error> { pub fn get_operator(data_dir: &str) -> Result<Operator, opendal::Error> {
let op = if cfg!(test) { let op = if cfg!(test) {
Operator::new(opendal::services::Memory::default())? Operator::new(opendal::services::Memory::default())?
.layer(LoggingLayer::default()) .layer(LoggingLayer::default())
.finish() .finish()
} else { } else {
Operator::new(opendal::services::Fs::default().root(&self.data_dir))? Operator::new(opendal::services::Fs::default().root(data_dir))?
.layer(LoggingLayer::default()) .layer(LoggingLayer::default())
.finish() .finish()
}; };
@ -125,7 +128,7 @@ impl StorageService {
path: P, path: P,
data: Bytes, data: Bytes,
) -> Result<StorageStoredUrl, opendal::Error> { ) -> Result<StorageStoredUrl, opendal::Error> {
let operator = self.get_operator()?; let operator = &self.operator;
let path = path.into(); let path = path.into();
@ -145,7 +148,7 @@ impl StorageService {
&self, &self,
path: P, path: P,
) -> Result<Option<StorageStoredUrl>, opendal::Error> { ) -> Result<Option<StorageStoredUrl>, opendal::Error> {
let operator = self.get_operator()?; let operator = &self.operator;
let path = path.to_string(); let path = path.to_string();
@ -157,7 +160,7 @@ impl StorageService {
} }
pub async fn read(&self, path: impl AsRef<str>) -> Result<Buffer, opendal::Error> { pub async fn read(&self, path: impl AsRef<str>) -> Result<Buffer, opendal::Error> {
let operator = self.get_operator()?; let operator = &self.operator;
let data = operator.read(path.as_ref()).await?; let data = operator.read(path.as_ref()).await?;
@ -165,7 +168,7 @@ impl StorageService {
} }
pub async fn reader(&self, path: impl AsRef<str>) -> Result<Reader, opendal::Error> { pub async fn reader(&self, path: impl AsRef<str>) -> Result<Reader, opendal::Error> {
let operator = self.get_operator()?; let operator = &self.operator;
let reader = operator.reader(path.as_ref()).await?; let reader = operator.reader(path.as_ref()).await?;
@ -173,7 +176,7 @@ impl StorageService {
} }
pub async fn writer(&self, path: impl AsRef<str>) -> Result<Writer, opendal::Error> { pub async fn writer(&self, path: impl AsRef<str>) -> Result<Writer, opendal::Error> {
let operator = self.get_operator()?; let operator = &self.operator;
let writer = operator.writer(path.as_ref()).await?; let writer = operator.writer(path.as_ref()).await?;
@ -181,13 +184,57 @@ impl StorageService {
} }
pub async fn stat(&self, path: impl AsRef<str>) -> Result<Metadata, opendal::Error> { pub async fn stat(&self, path: impl AsRef<str>) -> Result<Metadata, opendal::Error> {
let operator = self.get_operator()?; let operator = &self.operator;
let metadata = operator.stat(path.as_ref()).await?; let metadata = operator.stat(path.as_ref()).await?;
Ok(metadata) Ok(metadata)
} }
#[cfg(test)]
pub async fn list_public(&self) -> Result<Vec<opendal::Entry>, opendal::Error> {
use futures::TryStreamExt;
let lister = self.operator.lister_with("public/").recursive(true).await?;
lister.try_collect().await
}
#[cfg(test)]
pub async fn list_subscribers(&self) -> Result<Vec<opendal::Entry>, opendal::Error> {
use futures::TryStreamExt;
let lister = self
.operator
.lister_with("subscribers/")
.recursive(true)
.await?;
lister.try_collect().await
}
#[instrument(skip_all, err, fields(storage_path = %storage_path.as_ref(), range = ?range, accept = ?accept))]
pub async fn serve_optimized_image(
&self,
storage_path: impl AsRef<Path>,
range: Option<TypedHeader<Range>>,
accept: Accept,
) -> RecorderResult<Response> {
let storage_path = Path::new(storage_path.as_ref());
for mime_type in accept.media_types() {
let accpetable_path = match mime_type.subty().as_str() {
"webp" => Some(storage_path.with_extension("webp")),
"avif" => Some(storage_path.with_extension("avif")),
"jxl" => Some(storage_path.with_extension("jxl")),
_ => None,
};
if let Some(accpetable_path) = accpetable_path
&& self.exists(&accpetable_path).await?.is_some()
&& self.stat(&accpetable_path).await?.is_file()
{
return self.serve_file(accpetable_path, range).await;
}
}
self.serve_file(storage_path, range).await
}
#[instrument(skip_all, err, fields(storage_path = %storage_path.as_ref(), range = ?range))] #[instrument(skip_all, err, fields(storage_path = %storage_path.as_ref(), range = ?range))]
pub async fn serve_file( pub async fn serve_file(
&self, &self,

View File

@ -1,4 +1,50 @@
use std::time::Duration;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TaskConfig {} pub struct TaskConfig {
#[serde(default = "default_subscriber_task_workers")]
pub subscriber_task_concurrency: u32,
#[serde(default = "default_system_task_workers")]
pub system_task_concurrency: u32,
#[serde(default = "default_subscriber_task_timeout")]
pub subscriber_task_timeout: Duration,
#[serde(default = "default_system_task_timeout")]
pub system_task_timeout: Duration,
}
impl Default for TaskConfig {
fn default() -> Self {
Self {
subscriber_task_concurrency: default_subscriber_task_workers(),
system_task_concurrency: default_system_task_workers(),
subscriber_task_timeout: default_subscriber_task_timeout(),
system_task_timeout: default_system_task_timeout(),
}
}
}
pub fn default_subscriber_task_workers() -> u32 {
if cfg!(test) {
1
} else {
((num_cpus::get_physical() as f32 / 2.0).floor() as u32).max(1)
}
}
pub fn default_system_task_workers() -> u32 {
if cfg!(test) {
1
} else {
((num_cpus::get_physical() as f32 / 2.0).floor() as u32).max(1)
}
}
pub fn default_subscriber_task_timeout() -> Duration {
Duration::from_secs(3600)
}
pub fn default_system_task_timeout() -> Duration {
Duration::from_secs(3600)
}

View File

@ -5,10 +5,11 @@ use serde::{Serialize, de::DeserializeOwned};
use crate::{app::AppContextTrait, errors::RecorderResult}; use crate::{app::AppContextTrait, errors::RecorderResult};
pub const SYSTEM_TASK_APALIS_NAME: &str = "system_task";
pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task"; pub const SUBSCRIBER_TASK_APALIS_NAME: &str = "subscriber_task";
#[async_trait::async_trait] #[async_trait::async_trait]
pub trait SubscriberAsyncTaskTrait: Serialize + DeserializeOwned + Sized { pub trait AsyncTaskTrait: Serialize + DeserializeOwned + Sized {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>; async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()>;
async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> { async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
@ -19,7 +20,7 @@ pub trait SubscriberAsyncTaskTrait: Serialize + DeserializeOwned + Sized {
} }
#[async_trait::async_trait] #[async_trait::async_trait]
pub trait SubscriberStreamTaskTrait: Serialize + DeserializeOwned + Sized { pub trait StreamTaskTrait: Serialize + DeserializeOwned + Sized {
type Yield: Serialize + DeserializeOwned + Send; type Yield: Serialize + DeserializeOwned + Send;
fn run_stream( fn run_stream(

View File

@ -4,13 +4,16 @@ mod r#extern;
mod registry; mod registry;
mod service; mod service;
pub use core::{SUBSCRIBER_TASK_APALIS_NAME, SubscriberAsyncTaskTrait, SubscriberStreamTaskTrait}; pub use core::{
AsyncTaskTrait, SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, StreamTaskTrait,
};
pub use config::TaskConfig; pub use config::TaskConfig;
pub use r#extern::{ApalisJobs, ApalisSchema}; pub use r#extern::{ApalisJobs, ApalisSchema};
pub use registry::{ pub use registry::{
SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum, SubscriberTaskTypeVariant, OptimizeImageTask, SubscriberTask, SubscriberTaskType, SubscriberTaskTypeEnum,
SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask, SubscriberTaskTypeVariant, SubscriberTaskTypeVariantIter, SyncOneSubscriptionFeedsFullTask,
SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask, SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask, SystemTask,
SystemTaskType, SystemTaskTypeEnum, SystemTaskTypeVariant, SystemTaskTypeVariantIter,
}; };
pub use service::TaskService; pub use service::TaskService;

View File

@ -0,0 +1,53 @@
use std::sync::Arc;
use quirks_path::Path;
use serde::{Deserialize, Serialize};
use tracing::instrument;
use crate::{
app::AppContextTrait, errors::RecorderResult, media::EncodeImageOptions, task::AsyncTaskTrait,
};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct OptimizeImageTask {
pub source_path: String,
pub target_path: String,
pub format_options: EncodeImageOptions,
}
#[async_trait::async_trait]
impl AsyncTaskTrait for OptimizeImageTask {
#[instrument(err, skip(ctx))]
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
let storage = ctx.storage();
let source_path = Path::new(&self.source_path);
let media_service = ctx.media();
let image_data = storage.read(source_path).await?;
match self.format_options {
EncodeImageOptions::Webp(options) => {
let data = media_service
.optimize_image_to_webp(source_path, image_data.to_bytes(), Some(options))
.await?;
storage.write(self.target_path, data).await?;
}
EncodeImageOptions::Avif(options) => {
let data = media_service
.optimize_image_to_avif(source_path, image_data.to_bytes(), Some(options))
.await?;
storage.write(self.target_path, data).await?;
}
EncodeImageOptions::Jxl(options) => {
let data = media_service
.optimize_image_to_jxl(source_path, image_data.to_bytes(), Some(options))
.await?;
storage.write(self.target_path, data).await?;
}
};
Ok(())
}
}

View File

@ -1,6 +1,8 @@
mod media;
mod subscription; mod subscription;
use std::sync::Arc; use std::sync::Arc;
pub use media::OptimizeImageTask;
use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult}; use sea_orm::{DeriveActiveEnum, DeriveDisplay, EnumIter, FromJsonQueryResult};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
pub use subscription::{ pub use subscription::{
@ -8,11 +10,11 @@ pub use subscription::{
SyncOneSubscriptionSourcesTask, SyncOneSubscriptionSourcesTask,
}; };
use super::SubscriberAsyncTaskTrait;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::{RecorderError, RecorderResult}, errors::{RecorderError, RecorderResult},
models::subscriptions::SubscriptionTrait, models::subscriptions::SubscriptionTrait,
task::AsyncTaskTrait,
}; };
#[derive( #[derive(
@ -97,3 +99,36 @@ impl SubscriberTask {
} }
} }
} }
#[derive(
Clone,
Debug,
Serialize,
Deserialize,
PartialEq,
Eq,
Copy,
DeriveActiveEnum,
DeriveDisplay,
EnumIter,
)]
#[sea_orm(rs_type = "String", db_type = "Text")]
pub enum SystemTaskType {
#[serde(rename = "optimize_image")]
#[sea_orm(string_value = "optimize_image")]
OptimizeImage,
}
#[derive(Clone, Debug, Serialize, Deserialize, FromJsonQueryResult)]
pub enum SystemTask {
#[serde(rename = "optimize_image")]
OptimizeImage(OptimizeImageTask),
}
impl SystemTask {
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
match self {
Self::OptimizeImage(task) => task.run(ctx).await,
}
}
}

View File

@ -7,7 +7,7 @@ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::RecorderResult, errors::RecorderResult,
models::subscriptions::{self, SubscriptionTrait}, models::subscriptions::{self, SubscriptionTrait},
task::SubscriberAsyncTaskTrait, task::AsyncTaskTrait,
}; };
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
@ -20,7 +20,7 @@ impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsIncrementalTa
} }
#[async_trait::async_trait] #[async_trait::async_trait]
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionFeedsIncrementalTask { impl AsyncTaskTrait for SyncOneSubscriptionFeedsIncrementalTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> { async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_feeds_incremental(ctx).await?; self.0.sync_feeds_incremental(ctx).await?;
Ok(()) Ok(())
@ -37,7 +37,7 @@ impl From<subscriptions::Subscription> for SyncOneSubscriptionFeedsFullTask {
} }
#[async_trait::async_trait] #[async_trait::async_trait]
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionFeedsFullTask { impl AsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> { async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_feeds_full(ctx).await?; self.0.sync_feeds_full(ctx).await?;
Ok(()) Ok(())
@ -48,7 +48,7 @@ impl SubscriberAsyncTaskTrait for SyncOneSubscriptionFeedsFullTask {
pub struct SyncOneSubscriptionSourcesTask(pub subscriptions::Subscription); pub struct SyncOneSubscriptionSourcesTask(pub subscriptions::Subscription);
#[async_trait::async_trait] #[async_trait::async_trait]
impl SubscriberAsyncTaskTrait for SyncOneSubscriptionSourcesTask { impl AsyncTaskTrait for SyncOneSubscriptionSourcesTask {
async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> { async fn run_async(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
self.0.sync_sources(ctx).await?; self.0.sync_sources(ctx).await?;
Ok(()) Ok(())

View File

@ -11,28 +11,47 @@ use tokio::sync::RwLock;
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
errors::{RecorderError, RecorderResult}, errors::{RecorderError, RecorderResult},
task::{SUBSCRIBER_TASK_APALIS_NAME, SubscriberTask, TaskConfig}, task::{
SUBSCRIBER_TASK_APALIS_NAME, SYSTEM_TASK_APALIS_NAME, SubscriberTask, TaskConfig,
config::{default_subscriber_task_workers, default_system_task_workers},
registry::SystemTask,
},
}; };
pub struct TaskService { pub struct TaskService {
pub config: TaskConfig, pub config: TaskConfig,
ctx: Arc<dyn AppContextTrait>, ctx: Arc<dyn AppContextTrait>,
subscriber_task_storage: Arc<RwLock<PostgresStorage<SubscriberTask>>>, subscriber_task_storage: Arc<RwLock<PostgresStorage<SubscriberTask>>>,
system_task_storage: Arc<RwLock<PostgresStorage<SystemTask>>>,
} }
impl TaskService { impl TaskService {
pub async fn from_config_and_ctx( pub async fn from_config_and_ctx(
config: TaskConfig, mut config: TaskConfig,
ctx: Arc<dyn AppContextTrait>, ctx: Arc<dyn AppContextTrait>,
) -> RecorderResult<Self> { ) -> RecorderResult<Self> {
if config.subscriber_task_concurrency == 0 {
config.subscriber_task_concurrency = default_subscriber_task_workers();
};
if config.system_task_concurrency == 0 {
config.system_task_concurrency = default_system_task_workers();
};
let pool = ctx.db().get_postgres_connection_pool().clone(); let pool = ctx.db().get_postgres_connection_pool().clone();
let storage_config = Config::new(SUBSCRIBER_TASK_APALIS_NAME); let subscriber_task_storage_config =
let subscriber_task_storage = PostgresStorage::new_with_config(pool, storage_config); Config::new(SUBSCRIBER_TASK_APALIS_NAME).set_keep_alive(config.subscriber_task_timeout);
let system_task_storage_config =
Config::new(SYSTEM_TASK_APALIS_NAME).set_keep_alive(config.system_task_timeout);
let subscriber_task_storage =
PostgresStorage::new_with_config(pool.clone(), subscriber_task_storage_config);
let system_task_storage =
PostgresStorage::new_with_config(pool, system_task_storage_config);
Ok(Self { Ok(Self {
config, config,
ctx, ctx,
subscriber_task_storage: Arc::new(RwLock::new(subscriber_task_storage)), subscriber_task_storage: Arc::new(RwLock::new(subscriber_task_storage)),
system_task_storage: Arc::new(RwLock::new(system_task_storage)),
}) })
} }
@ -45,6 +64,14 @@ impl TaskService {
job.run(ctx).await job.run(ctx).await
} }
async fn run_system_task(
job: SystemTask,
data: Data<Arc<dyn AppContextTrait>>,
) -> RecorderResult<()> {
let ctx = data.deref().clone();
job.run(ctx).await
}
pub async fn retry_subscriber_task(&self, job_id: String) -> RecorderResult<()> { pub async fn retry_subscriber_task(&self, job_id: String) -> RecorderResult<()> {
{ {
let mut storage = self.subscriber_task_storage.write().await; let mut storage = self.subscriber_task_storage.write().await;
@ -58,6 +85,19 @@ impl TaskService {
Ok(()) Ok(())
} }
pub async fn retry_system_task(&self, job_id: String) -> RecorderResult<()> {
{
let mut storage = self.system_task_storage.write().await;
let task_id =
TaskId::from_str(&job_id).map_err(|err| RecorderError::InvalidTaskId {
message: err.to_string(),
})?;
let worker_id = WorkerId::new(SYSTEM_TASK_APALIS_NAME);
storage.retry(&worker_id, &task_id).await?;
}
Ok(())
}
pub async fn add_subscriber_task( pub async fn add_subscriber_task(
&self, &self,
_subscriber_id: i32, _subscriber_id: i32,
@ -77,11 +117,27 @@ impl TaskService {
Ok(task_id) Ok(task_id)
} }
pub async fn add_system_task(&self, system_task: SystemTask) -> RecorderResult<TaskId> {
let task_id = {
let mut storage = self.system_task_storage.write().await;
let sql_context = {
let mut c = SqlContext::default();
c.set_max_attempts(1);
c
};
let request = Request::new_with_ctx(system_task, sql_context);
storage.push_request(request).await?.task_id
};
Ok(task_id)
}
pub async fn setup_monitor(&self) -> RecorderResult<Monitor> { pub async fn setup_monitor(&self) -> RecorderResult<Monitor> {
let mut monitor = Monitor::new(); let mut monitor = Monitor::new();
{ {
let subscriber_task_worker = WorkerBuilder::new(SUBSCRIBER_TASK_APALIS_NAME) let subscriber_task_worker = WorkerBuilder::new(SUBSCRIBER_TASK_APALIS_NAME)
.concurrency(self.config.subscriber_task_concurrency as usize)
.catch_panic() .catch_panic()
.enable_tracing() .enable_tracing()
.data(self.ctx.clone()) .data(self.ctx.clone())
@ -91,7 +147,17 @@ impl TaskService {
}) })
.build_fn(Self::run_subscriber_task); .build_fn(Self::run_subscriber_task);
monitor = monitor.register(subscriber_task_worker); let system_task_worker = WorkerBuilder::new(SYSTEM_TASK_APALIS_NAME)
.concurrency(self.config.system_task_concurrency as usize)
.catch_panic()
.enable_tracing()
.data(self.ctx.clone())
.backend(self.system_task_storage.read().await.clone())
.build_fn(Self::run_system_task);
monitor = monitor
.register(subscriber_task_worker)
.register(system_task_worker);
} }
Ok(monitor) Ok(monitor)
@ -99,13 +165,18 @@ impl TaskService {
pub async fn setup_listener(&self) -> RecorderResult<PgListen> { pub async fn setup_listener(&self) -> RecorderResult<PgListen> {
let pool = self.ctx.db().get_postgres_connection_pool().clone(); let pool = self.ctx.db().get_postgres_connection_pool().clone();
let mut subscriber_task_listener = PgListen::new(pool).await?; let mut task_listener = PgListen::new(pool).await?;
{ {
let mut subscriber_task_storage = self.subscriber_task_storage.write().await; let mut subscriber_task_storage = self.subscriber_task_storage.write().await;
subscriber_task_listener.subscribe_with(&mut subscriber_task_storage); task_listener.subscribe_with(&mut subscriber_task_storage);
} }
Ok(subscriber_task_listener) {
let mut system_task_storage = self.system_task_storage.write().await;
task_listener.subscribe_with(&mut system_task_storage);
}
Ok(task_listener)
} }
} }

View File

@ -3,7 +3,17 @@ use std::{fmt::Debug, sync::Arc};
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use typed_builder::TypedBuilder; use typed_builder::TypedBuilder;
use crate::app::AppContextTrait; use crate::{
app::AppContextTrait,
test_utils::{
crypto::build_testing_crypto_service,
database::{TestingDatabaseServiceConfig, build_testing_database_service},
media::build_testing_media_service,
mikan::build_testing_mikan_client,
storage::build_testing_storage_service,
task::build_testing_task_service,
},
};
#[derive(TypedBuilder)] #[derive(TypedBuilder)]
#[builder(field_defaults(default, setter(strip_option)))] #[builder(field_defaults(default, setter(strip_option)))]
@ -17,6 +27,7 @@ pub struct TestingAppContext {
graphql: Option<crate::graphql::GraphQLService>, graphql: Option<crate::graphql::GraphQLService>,
storage: Option<crate::storage::StorageService>, storage: Option<crate::storage::StorageService>,
crypto: Option<crate::crypto::CryptoService>, crypto: Option<crate::crypto::CryptoService>,
media: Option<crate::media::MediaService>,
#[builder(default = Arc::new(OnceCell::new()), setter(!strip_option))] #[builder(default = Arc::new(OnceCell::new()), setter(!strip_option))]
task: Arc<OnceCell<crate::task::TaskService>>, task: Arc<OnceCell<crate::task::TaskService>>,
message: Option<crate::message::MessageService>, message: Option<crate::message::MessageService>,
@ -30,6 +41,32 @@ impl TestingAppContext {
pub fn set_task(&self, task: crate::task::TaskService) { pub fn set_task(&self, task: crate::task::TaskService) {
self.task.get_or_init(|| task); self.task.get_or_init(|| task);
} }
pub async fn from_preset(
preset: TestingAppContextPreset,
) -> crate::errors::RecorderResult<Arc<Self>> {
let mikan_client = build_testing_mikan_client(preset.mikan_base_url.clone()).await?;
let db_service =
build_testing_database_service(preset.database_config.unwrap_or_default()).await?;
let crypto_service = build_testing_crypto_service().await?;
let storage_service = build_testing_storage_service().await?;
let media_service = build_testing_media_service().await?;
let app_ctx = Arc::new(
TestingAppContext::builder()
.mikan(mikan_client)
.db(db_service)
.crypto(crypto_service)
.storage(storage_service)
.media(media_service)
.build(),
);
let task_service = build_testing_task_service(app_ctx.clone()).await?;
app_ctx.set_task(task_service);
Ok(app_ctx)
}
} }
impl Debug for TestingAppContext { impl Debug for TestingAppContext {
@ -90,4 +127,13 @@ impl AppContextTrait for TestingAppContext {
fn message(&self) -> &crate::message::MessageService { fn message(&self) -> &crate::message::MessageService {
self.message.as_ref().expect("should set message") self.message.as_ref().expect("should set message")
} }
fn media(&self) -> &crate::media::MediaService {
self.media.as_ref().expect("should set media")
}
}
pub struct TestingAppContextPreset {
pub mikan_base_url: String,
pub database_config: Option<TestingDatabaseServiceConfig>,
} }

View File

@ -17,6 +17,10 @@ impl Default for TestingDatabaseServiceConfig {
pub async fn build_testing_database_service( pub async fn build_testing_database_service(
config: TestingDatabaseServiceConfig, config: TestingDatabaseServiceConfig,
) -> RecorderResult<DatabaseService> { ) -> RecorderResult<DatabaseService> {
tracing::info!(
"enable testcontainers feature, build testing database service in testcontainers..."
);
use testcontainers::{ImageExt, runners::AsyncRunner}; use testcontainers::{ImageExt, runners::AsyncRunner};
use testcontainers_ext::{ImageDefaultLogConsumerExt, ImagePruneExistedLabelExt}; use testcontainers_ext::{ImageDefaultLogConsumerExt, ImagePruneExistedLabelExt};
use testcontainers_modules::postgres::Postgres; use testcontainers_modules::postgres::Postgres;
@ -38,6 +42,11 @@ pub async fn build_testing_database_service(
let connection_string = let connection_string =
format!("postgres://konobangu:konobangu@{host_ip}:{host_port}/konobangu"); format!("postgres://konobangu:konobangu@{host_ip}:{host_port}/konobangu");
tracing::debug!(
"testing database service connection string: {}",
connection_string
);
let mut db_service = DatabaseService::from_config(DatabaseConfig { let mut db_service = DatabaseService::from_config(DatabaseConfig {
uri: connection_string, uri: connection_string,
enable_logging: true, enable_logging: true,

View File

@ -0,0 +1,8 @@
use crate::{
errors::RecorderResult,
media::{MediaConfig, MediaService},
};
pub async fn build_testing_media_service() -> RecorderResult<MediaService> {
MediaService::from_config(MediaConfig::default()).await
}

View File

@ -1,6 +1,7 @@
pub mod app; pub mod app;
pub mod crypto; pub mod crypto;
pub mod database; pub mod database;
pub mod media;
pub mod mikan; pub mod mikan;
pub mod storage; pub mod storage;
pub mod task; pub mod task;

View File

@ -9,7 +9,7 @@ use crate::{
pub async fn build_testing_task_service( pub async fn build_testing_task_service(
ctx: Arc<dyn AppContextTrait>, ctx: Arc<dyn AppContextTrait>,
) -> RecorderResult<TaskService> { ) -> RecorderResult<TaskService> {
let config = TaskConfig {}; let config = TaskConfig::default();
let task_service = TaskService::from_config_and_ctx(config, ctx).await?; let task_service = TaskService::from_config_and_ctx(config, ctx).await?;
Ok(task_service) Ok(task_service)
} }

View File

@ -9,7 +9,7 @@ fn build_testing_tracing_filter(level: Level) -> EnvFilter {
let level = level.as_str().to_lowercase(); let level = level.as_str().to_lowercase();
let mut filter = EnvFilter::new(format!("{crate_name}[]={level}")); let mut filter = EnvFilter::new(format!("{crate_name}[]={level}"));
let mut modules = vec!["mockito"]; let mut modules = vec!["mockito", "testcontainers"];
modules.extend(MODULE_WHITELIST.iter()); modules.extend(MODULE_WHITELIST.iter());
for module in modules { for module in modules {
filter = filter.add_directive(format!("{module}[]={level}").parse().unwrap()); filter = filter.add_directive(format!("{module}[]={level}").parse().unwrap());

View File

@ -2,12 +2,14 @@ use std::sync::Arc;
use axum::{ use axum::{
Extension, Router, Extension, Router,
extract::{Path, State}, extract::{Path, Query, State},
middleware::from_fn_with_state, middleware::from_fn_with_state,
response::Response, response::Response,
routing::get, routing::get,
}; };
use axum_extra::{TypedHeader, headers::Range}; use axum_extra::{TypedHeader, headers::Range};
use headers_accept::Accept;
use serde::{Deserialize, Serialize};
use crate::{ use crate::{
app::AppContextTrait, app::AppContextTrait,
@ -18,33 +20,75 @@ use crate::{
pub const CONTROLLER_PREFIX: &str = "/api/static"; pub const CONTROLLER_PREFIX: &str = "/api/static";
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
pub enum OptimizeType {
#[serde(rename = "accept")]
AcceptHeader,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct StaticQuery {
optimize: Option<OptimizeType>,
}
async fn serve_subscriber_static( async fn serve_subscriber_static(
State(ctx): State<Arc<dyn AppContextTrait>>, State(ctx): State<Arc<dyn AppContextTrait>>,
Path((subscriber_id, path)): Path<(i32, String)>, Path((subscriber_id, path)): Path<(i32, String)>,
Extension(auth_user_info): Extension<AuthUserInfo>, Extension(auth_user_info): Extension<AuthUserInfo>,
Query(query): Query<StaticQuery>,
range: Option<TypedHeader<Range>>, range: Option<TypedHeader<Range>>,
accept: Option<TypedHeader<Accept>>,
) -> RecorderResult<Response> { ) -> RecorderResult<Response> {
if subscriber_id != auth_user_info.subscriber_auth.id { if subscriber_id != auth_user_info.subscriber_auth.id {
Err(AuthError::PermissionError)?; Err(AuthError::PermissionError)?;
} }
let storage = ctx.storage(); let storage = ctx.storage();
let media = ctx.media();
let storage_path = storage.build_subscriber_path(subscriber_id, &path); let storage_path = storage.build_subscriber_path(subscriber_id, &path);
storage.serve_file(storage_path, range).await if query
.optimize
.is_some_and(|optimize| optimize == OptimizeType::AcceptHeader)
&& storage_path
.extension()
.is_some_and(|ext| media.is_legacy_image_format(ext))
&& let Some(TypedHeader(accept)) = accept
{
storage
.serve_optimized_image(storage_path, range, accept)
.await
} else {
storage.serve_file(storage_path, range).await
}
} }
async fn serve_public_static( async fn serve_public_static(
State(ctx): State<Arc<dyn AppContextTrait>>, State(ctx): State<Arc<dyn AppContextTrait>>,
Path(path): Path<String>, Path(path): Path<String>,
Query(query): Query<StaticQuery>,
range: Option<TypedHeader<Range>>, range: Option<TypedHeader<Range>>,
accept: Option<TypedHeader<Accept>>,
) -> RecorderResult<Response> { ) -> RecorderResult<Response> {
let storage = ctx.storage(); let storage = ctx.storage();
let media = ctx.media();
let storage_path = storage.build_public_path(&path); let storage_path = storage.build_public_path(&path);
storage.serve_file(storage_path, range).await if query
.optimize
.is_some_and(|optimize| optimize == OptimizeType::AcceptHeader)
&& storage_path
.extension()
.is_some_and(|ext| media.is_legacy_image_format(ext))
&& let Some(TypedHeader(accept)) = accept
{
storage
.serve_optimized_image(storage_path, range, accept)
.await
} else {
storage.serve_file(storage_path, range).await
}
} }
pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> { pub async fn create(ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {

View File

@ -1,38 +1,45 @@
import { type ComponentProps, useMemo, useState } from "react"; import { useInject } from "@/infra/di/inject";
import { DOCUMENT } from "@/infra/platform/injection";
import { type ComponentProps, useMemo } from "react";
const URL_PARSE_REGEX = /^([^?#]*)(\?[^#]*)?(#.*)?$/;
function parseURL(url: string) {
const match = url.match(URL_PARSE_REGEX);
if (!match) {
return { other: url, search: "", hash: "" };
}
return {
other: match[1] || "",
search: match[2] || "",
hash: match[3] || "",
};
}
export type ImgProps = Omit<ComponentProps<"img">, "alt"> & export type ImgProps = Omit<ComponentProps<"img">, "alt"> &
Required<Pick<ComponentProps<"img">, "alt">> & { Required<Pick<ComponentProps<"img">, "alt">> & {
optimize?: boolean; optimize?: "accept";
}; };
const LEGACY_IMAGE_REGEX = /\.(jpg|jpeg|png|gif|svg)$/; export const Img = ({
src: propsSrc,
optimize = "accept",
...props
}: ImgProps) => {
const document = useInject(DOCUMENT);
const src = useMemo(() => {
const baseURI = document?.baseURI;
if (!propsSrc || !baseURI) {
return propsSrc;
}
const { other, search, hash } = parseURL(propsSrc);
const searchParams = new URLSearchParams(search);
searchParams.set("optimize", optimize);
return `${other}?${searchParams.toString()}${hash}`;
}, [propsSrc, optimize, document?.baseURI]);
export const Img = (props: ImgProps) => { // biome-ignore lint/nursery/noImgElement: <explanation>
const src = props.src; return <img {...props} alt={props.alt} src={src} />;
const isLegacy = useMemo(() => src?.match(LEGACY_IMAGE_REGEX), [src]);
const [isError, setIsError] = useState(false);
if (!src) {
// biome-ignore lint/nursery/noImgElement: <explanation>
return <img {...props} alt={props.alt} />;
}
return (
<picture {...props}>
{isLegacy && !isError && (
<>
<source
srcSet={src.replace(LEGACY_IMAGE_REGEX, ".webp")}
type="image/webp"
/>
<source
srcSet={src.replace(LEGACY_IMAGE_REGEX, ".avif")}
type="image/avif"
/>
</>
)}
<img {...props} alt={props.alt} onError={() => setIsError(true)} />
</picture>
);
}; };

View File

@ -4,7 +4,7 @@ set dotenv-load := true
prepare-dev: prepare-dev:
cargo install cargo-binstall cargo install cargo-binstall
cargo binstall sea-orm-cli cargo-llvm-cov cargo-nextest cargo binstall sea-orm-cli cargo-llvm-cov cargo-nextest
# <package-manager> install watchexec just zellij # <package-manager> install watchexec just zellij nasm libjxl
prepare-dev-testcontainers: prepare-dev-testcontainers:
docker pull linuxserver/qbittorrent:latest docker pull linuxserver/qbittorrent:latest
@ -22,7 +22,7 @@ dev-proxy:
pnpm run --parallel --filter=proxy dev pnpm run --parallel --filter=proxy dev
dev-recorder: dev-recorder:
watchexec -r -e rs,toml,yaml,json,env -- cargo run -p recorder --bin recorder_cli -- --environment development watchexec -r -e rs,toml,yaml,json,env -- cargo run -p recorder --bin recorder_cli -- --environment=development --graceful-shutdown=false
dev-recorder-migrate-down: dev-recorder-migrate-down:
cargo run -p recorder --bin migrate_down -- --environment development cargo run -p recorder --bin migrate_down -- --environment development