fix: fix subscriptions api
This commit is contained in:
parent
d2aab7369d
commit
8144986a48
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@ -39,7 +39,5 @@
|
|||||||
"username": "konobangu"
|
"username": "konobangu"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"rust-analyzer.cargo.extraArgs": [
|
"rust-analyzer.cargo.features": "all"
|
||||||
"--all-features"
|
|
||||||
]
|
|
||||||
}
|
}
|
10
Cargo.lock
generated
10
Cargo.lock
generated
@ -4930,6 +4930,16 @@ dependencies = [
|
|||||||
"yansi",
|
"yansi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "proxy"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"recorder",
|
||||||
|
"tokio",
|
||||||
|
"tracing",
|
||||||
|
"tracing-subscriber",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "psl-types"
|
name = "psl-types"
|
||||||
version = "2.0.11"
|
version = "2.0.11"
|
||||||
|
@ -8,6 +8,7 @@ members = [
|
|||||||
"packages/fetch",
|
"packages/fetch",
|
||||||
"packages/downloader",
|
"packages/downloader",
|
||||||
"apps/recorder",
|
"apps/recorder",
|
||||||
|
"apps/proxy",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
@ -22,6 +23,8 @@ util = { path = "./packages/util" }
|
|||||||
util-derive = { path = "./packages/util-derive" }
|
util-derive = { path = "./packages/util-derive" }
|
||||||
fetch = { path = "./packages/fetch" }
|
fetch = { path = "./packages/fetch" }
|
||||||
downloader = { path = "./packages/downloader" }
|
downloader = { path = "./packages/downloader" }
|
||||||
|
recorder = { path = "./apps/recorder" }
|
||||||
|
proxy = { path = "./apps/proxy" }
|
||||||
|
|
||||||
reqwest = { version = "0.12", features = [
|
reqwest = { version = "0.12", features = [
|
||||||
"charset",
|
"charset",
|
||||||
|
1
apps/proxy/.whistle/rules/files/1.mikan_doppel
Normal file
1
apps/proxy/.whistle/rules/files/1.mikan_doppel
Normal file
@ -0,0 +1 @@
|
|||||||
|
^https://mikanani.me/*** http://127.0.0.1:5010/$1
|
@ -1 +1 @@
|
|||||||
{"filesOrder":["konobangu"],"selectedList":["konobangu"],"disabledDefalutRules":true,"defalutRules":""}
|
{"filesOrder":["konobangu","mikan_doppel"],"selectedList":["konobangu","mikan_doppel"],"disabledDefalutRules":true,"defalutRules":""}
|
||||||
|
19
apps/proxy/Cargo.toml
Normal file
19
apps/proxy/Cargo.toml
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
[package]
|
||||||
|
name = "proxy"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
[lib]
|
||||||
|
name = "proxy"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "mikan_doppel"
|
||||||
|
path = "src/bin/mikan_doppel.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
recorder = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
tracing-subscriber = { workspace = true }
|
||||||
|
tracing = { workspace = true }
|
@ -3,8 +3,9 @@
|
|||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
|
"whistle": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
|
||||||
"dev": "pnpm run start"
|
"mikan_doppel": "cargo run -p proxy --bin mikan_doppel",
|
||||||
|
"dev": "npm-run-all -p mikan_doppel whistle"
|
||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use recorder::{errors::RecorderResult, test_utils::mikan::MikanMockServer};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
#[allow(unused_variables)]
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(Level::DEBUG)
|
||||||
|
.init();
|
||||||
|
|
||||||
|
let mut mikan_server = MikanMockServer::new_with_port(5010).await.unwrap();
|
||||||
|
|
||||||
|
let resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||||
|
}
|
||||||
|
}
|
0
apps/proxy/src/lib.rs
Normal file
0
apps/proxy/src/lib.rs
Normal file
@ -15,7 +15,7 @@ required-features = []
|
|||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
playground = ["dep:mockito", "dep:inquire", "dep:color-eyre"]
|
playground = ["dep:inquire", "dep:color-eyre"]
|
||||||
testcontainers = [
|
testcontainers = [
|
||||||
"dep:testcontainers",
|
"dep:testcontainers",
|
||||||
"dep:testcontainers-modules",
|
"dep:testcontainers-modules",
|
||||||
@ -54,7 +54,7 @@ serde_with = { workspace = true }
|
|||||||
moka = { workspace = true }
|
moka = { workspace = true }
|
||||||
chrono = { workspace = true }
|
chrono = { workspace = true }
|
||||||
tracing-subscriber = { workspace = true }
|
tracing-subscriber = { workspace = true }
|
||||||
mockito = { workspace = true, optional = true }
|
mockito = { workspace = true }
|
||||||
|
|
||||||
sea-orm = { version = "1.1", features = [
|
sea-orm = { version = "1.1", features = [
|
||||||
"sqlx-sqlite",
|
"sqlx-sqlite",
|
||||||
@ -122,11 +122,11 @@ color-eyre = { workspace = true, optional = true }
|
|||||||
inquire = { workspace = true, optional = true }
|
inquire = { workspace = true, optional = true }
|
||||||
percent-encoding = "2.3.1"
|
percent-encoding = "2.3.1"
|
||||||
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
serial_test = "3"
|
serial_test = "3"
|
||||||
insta = { version = "1", features = ["redactions", "toml", "filters"] }
|
insta = { version = "1", features = ["redactions", "toml", "filters"] }
|
||||||
rstest = "0.25"
|
rstest = "0.25"
|
||||||
ctor = "0.4.0"
|
ctor = "0.4.0"
|
||||||
mockito = { workspace = true }
|
|
||||||
inquire = { workspace = true }
|
inquire = { workspace = true }
|
||||||
color-eyre = { workspace = true }
|
color-eyre = { workspace = true }
|
||||||
|
@ -6,7 +6,7 @@ use inquire::{Password, Text, validator::Validation};
|
|||||||
use recorder::{
|
use recorder::{
|
||||||
crypto::UserPassCredential,
|
crypto::UserPassCredential,
|
||||||
extract::mikan::{
|
extract::mikan::{
|
||||||
MikanClient, MikanConfig, MikanRssItem, build_mikan_bangumi_expand_subscribed_url,
|
MikanClient, MikanConfig, MikanRssEpisodeItem, build_mikan_bangumi_expand_subscribed_url,
|
||||||
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||||
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||||
},
|
},
|
||||||
@ -193,12 +193,12 @@ async fn main() -> Result<()> {
|
|||||||
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items;
|
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items;
|
||||||
rss_items
|
rss_items
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(MikanRssItem::try_from)
|
.map(MikanRssEpisodeItem::try_from)
|
||||||
.collect::<Result<Vec<_>, _>>()
|
.collect::<Result<Vec<_>, _>>()
|
||||||
}?;
|
}?;
|
||||||
for rss_item in rss_items {
|
for rss_item in rss_items {
|
||||||
{
|
{
|
||||||
let episode_homepage_url = rss_item.homepage;
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
let episode_homepage_doppel_path =
|
let episode_homepage_doppel_path =
|
||||||
MikanDoppelPath::new(episode_homepage_url.clone());
|
MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
tracing::info!(title = rss_item.title, "Scraping episode...");
|
tracing::info!(title = rss_item.title, "Scraping episode...");
|
||||||
|
@ -4,7 +4,7 @@ use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image,
|
|||||||
use recorder::{
|
use recorder::{
|
||||||
errors::RecorderResult,
|
errors::RecorderResult,
|
||||||
extract::mikan::{
|
extract::mikan::{
|
||||||
MikanClient, MikanConfig, MikanRssItem,
|
MikanClient, MikanConfig, MikanRssEpisodeItem,
|
||||||
extract_mikan_episode_meta_from_episode_homepage_html,
|
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||||
},
|
},
|
||||||
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||||
@ -43,15 +43,15 @@ async fn main() -> RecorderResult<()> {
|
|||||||
let subscriber_subscription =
|
let subscriber_subscription =
|
||||||
fs::read("tests/resources/mikan/MyBangumi-2025-spring.rss").await?;
|
fs::read("tests/resources/mikan/MyBangumi-2025-spring.rss").await?;
|
||||||
let channel = rss::Channel::read_from(&subscriber_subscription[..])?;
|
let channel = rss::Channel::read_from(&subscriber_subscription[..])?;
|
||||||
let rss_items: Vec<MikanRssItem> = channel
|
let rss_items: Vec<MikanRssEpisodeItem> = channel
|
||||||
.items
|
.items
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(MikanRssItem::try_from)
|
.map(MikanRssEpisodeItem::try_from)
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
for rss_item in rss_items {
|
for rss_item in rss_items {
|
||||||
let episode_homepage_meta = {
|
let episode_homepage_meta = {
|
||||||
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
||||||
let episode_homepage_url = rss_item.homepage;
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
let episode_homepage_doppel_path = MikanDoppelPath::new(episode_homepage_url.clone());
|
let episode_homepage_doppel_path = MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
let episode_homepage_data = if !episode_homepage_doppel_path.exists_any() {
|
let episode_homepage_data = if !episode_homepage_doppel_path.exists_any() {
|
||||||
let episode_homepage_data =
|
let episode_homepage_data =
|
||||||
|
@ -6,7 +6,7 @@ use tracing::instrument;
|
|||||||
|
|
||||||
use super::{builder::AppBuilder, context::AppContextTrait};
|
use super::{builder::AppBuilder, context::AppContextTrait};
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::RecorderResult,
|
errors::{RecorderError, RecorderResult},
|
||||||
web::{
|
web::{
|
||||||
controller::{self, core::ControllerTrait},
|
controller::{self, core::ControllerTrait},
|
||||||
middleware::default_middleware_stack,
|
middleware::default_middleware_stack,
|
||||||
@ -71,12 +71,38 @@ impl App {
|
|||||||
.with_state(context.clone())
|
.with_state(context.clone())
|
||||||
.into_make_service_with_connect_info::<SocketAddr>();
|
.into_make_service_with_connect_info::<SocketAddr>();
|
||||||
|
|
||||||
|
let task = context.task();
|
||||||
|
|
||||||
|
tokio::try_join!(
|
||||||
|
async {
|
||||||
axum::serve(listener, router)
|
axum::serve(listener, router)
|
||||||
.with_graceful_shutdown(async move {
|
.with_graceful_shutdown(async move {
|
||||||
Self::shutdown_signal().await;
|
Self::shutdown_signal().await;
|
||||||
tracing::info!("shutting down...");
|
tracing::info!("axum shutting down...");
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
},
|
||||||
|
async {
|
||||||
|
let monitor = task.setup_monitor().await?;
|
||||||
|
|
||||||
|
monitor
|
||||||
|
.run_with_signal(async move {
|
||||||
|
Self::shutdown_signal().await;
|
||||||
|
tracing::info!("apalis shutting down...");
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
},
|
||||||
|
async {
|
||||||
|
let listener = task.setup_listener().await?;
|
||||||
|
listener.listen().await?;
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
}
|
||||||
|
)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -11,14 +11,16 @@ use super::DatabaseConfig;
|
|||||||
use crate::{errors::RecorderResult, migrations::Migrator};
|
use crate::{errors::RecorderResult, migrations::Migrator};
|
||||||
|
|
||||||
pub struct DatabaseService {
|
pub struct DatabaseService {
|
||||||
|
pub config: DatabaseConfig,
|
||||||
connection: DatabaseConnection,
|
connection: DatabaseConnection,
|
||||||
#[cfg(all(any(test, feature = "playground"), feature = "testcontainers"))]
|
#[cfg(feature = "testcontainers")]
|
||||||
pub container:
|
pub container:
|
||||||
Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>,
|
Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DatabaseService {
|
impl DatabaseService {
|
||||||
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
|
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
|
||||||
|
let db_config = config.clone();
|
||||||
let mut opt = ConnectOptions::new(&config.uri);
|
let mut opt = ConnectOptions::new(&config.uri);
|
||||||
opt.max_connections(config.max_connections)
|
opt.max_connections(config.max_connections)
|
||||||
.min_connections(config.min_connections)
|
.min_connections(config.min_connections)
|
||||||
@ -50,8 +52,9 @@ impl DatabaseService {
|
|||||||
|
|
||||||
let me = Self {
|
let me = Self {
|
||||||
connection: db,
|
connection: db,
|
||||||
#[cfg(all(any(test, feature = "playground"), feature = "testcontainers"))]
|
#[cfg(feature = "testcontainers")]
|
||||||
container: None,
|
container: None,
|
||||||
|
config: db_config,
|
||||||
};
|
};
|
||||||
|
|
||||||
if config.auto_migrate {
|
if config.auto_migrate {
|
||||||
|
@ -78,7 +78,7 @@ pub enum RecorderError {
|
|||||||
},
|
},
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
HttpClientError { source: HttpClientError },
|
HttpClientError { source: HttpClientError },
|
||||||
#[cfg(all(any(test, feature = "playground"), feature = "testcontainers"))]
|
#[cfg(feature = "testcontainers")]
|
||||||
#[snafu(transparent)]
|
#[snafu(transparent)]
|
||||||
TestcontainersError {
|
TestcontainersError {
|
||||||
source: testcontainers::TestcontainersError,
|
source: testcontainers::TestcontainersError,
|
||||||
|
@ -22,7 +22,7 @@ pub use subscription::{
|
|||||||
};
|
};
|
||||||
pub use web::{
|
pub use web::{
|
||||||
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
|
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
|
||||||
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanRssItem,
|
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanRssEpisodeItem,
|
||||||
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionRssUrlMeta,
|
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionRssUrlMeta,
|
||||||
build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url,
|
build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url,
|
||||||
build_mikan_bangumi_subscription_rss_url, build_mikan_episode_homepage_url,
|
build_mikan_bangumi_subscription_rss_url, build_mikan_episode_homepage_url,
|
||||||
|
@ -20,10 +20,10 @@ use crate::{
|
|||||||
app::AppContextTrait,
|
app::AppContextTrait,
|
||||||
errors::{RecorderError, RecorderResult},
|
errors::{RecorderError, RecorderResult},
|
||||||
extract::mikan::{
|
extract::mikan::{
|
||||||
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanRssItem,
|
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
|
||||||
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionRssUrlMeta,
|
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||||
build_mikan_bangumi_subscription_rss_url, build_mikan_season_flow_url,
|
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url,
|
||||||
build_mikan_subscriber_subscription_rss_url,
|
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
|
||||||
scrape_mikan_episode_meta_from_episode_homepage_url,
|
scrape_mikan_episode_meta_from_episode_homepage_url,
|
||||||
},
|
},
|
||||||
models::{
|
models::{
|
||||||
@ -35,10 +35,11 @@ use crate::{
|
|||||||
#[tracing::instrument(err, skip(ctx, rss_item_list))]
|
#[tracing::instrument(err, skip(ctx, rss_item_list))]
|
||||||
async fn sync_mikan_feeds_from_rss_item_list(
|
async fn sync_mikan_feeds_from_rss_item_list(
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
rss_item_list: Vec<MikanRssItem>,
|
rss_item_list: Vec<MikanRssEpisodeItem>,
|
||||||
subscriber_id: i32,
|
subscriber_id: i32,
|
||||||
subscription_id: i32,
|
subscription_id: i32,
|
||||||
) -> RecorderResult<()> {
|
) -> RecorderResult<()> {
|
||||||
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
let (new_episode_meta_list, existed_episode_hash2id_map) = {
|
let (new_episode_meta_list, existed_episode_hash2id_map) = {
|
||||||
let existed_episode_hash2id_map = episodes::Model::get_existed_mikan_episode_list(
|
let existed_episode_hash2id_map = episodes::Model::get_existed_mikan_episode_list(
|
||||||
ctx,
|
ctx,
|
||||||
@ -60,7 +61,7 @@ async fn sync_mikan_feeds_from_rss_item_list(
|
|||||||
}) {
|
}) {
|
||||||
let episode_meta = scrape_mikan_episode_meta_from_episode_homepage_url(
|
let episode_meta = scrape_mikan_episode_meta_from_episode_homepage_url(
|
||||||
mikan_client,
|
mikan_client,
|
||||||
to_insert_rss_item.homepage,
|
to_insert_rss_item.build_homepage_url(mikan_base_url.clone()),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
new_episode_meta_list.push(episode_meta);
|
new_episode_meta_list.push(episode_meta);
|
||||||
@ -215,7 +216,7 @@ impl MikanSubscriberSubscription {
|
|||||||
async fn get_rss_item_list_from_source_url(
|
async fn get_rss_item_list_from_source_url(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
) -> RecorderResult<Vec<MikanRssItem>> {
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
let mikan_base_url = ctx.mikan().base_url().clone();
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
let rss_url = build_mikan_subscriber_subscription_rss_url(
|
let rss_url = build_mikan_subscriber_subscription_rss_url(
|
||||||
mikan_base_url.clone(),
|
mikan_base_url.clone(),
|
||||||
@ -227,7 +228,7 @@ impl MikanSubscriberSubscription {
|
|||||||
|
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
let item = MikanRssItem::try_from(item)
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
format!("failed to extract rss item at idx {idx}")
|
format!("failed to extract rss item at idx {idx}")
|
||||||
})?;
|
})?;
|
||||||
@ -240,7 +241,7 @@ impl MikanSubscriberSubscription {
|
|||||||
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
) -> RecorderResult<Vec<MikanRssItem>> {
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
let subscribed_bangumi_list =
|
let subscribed_bangumi_list =
|
||||||
bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.id).await?;
|
bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.id).await?;
|
||||||
|
|
||||||
@ -259,7 +260,7 @@ impl MikanSubscriberSubscription {
|
|||||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
let item = MikanRssItem::try_from(item)
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
format!("failed to extract rss item at idx {idx}")
|
format!("failed to extract rss item at idx {idx}")
|
||||||
})?;
|
})?;
|
||||||
@ -395,7 +396,7 @@ impl MikanSeasonSubscription {
|
|||||||
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
) -> RecorderResult<Vec<MikanRssItem>> {
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
let db = ctx.db();
|
let db = ctx.db();
|
||||||
|
|
||||||
let subscribed_bangumi_list = bangumi::Entity::find()
|
let subscribed_bangumi_list = bangumi::Entity::find()
|
||||||
@ -422,7 +423,7 @@ impl MikanSeasonSubscription {
|
|||||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||||
|
|
||||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
let item = MikanRssItem::try_from(item)
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
format!("failed to extract rss item at idx {idx}")
|
format!("failed to extract rss item at idx {idx}")
|
||||||
})?;
|
})?;
|
||||||
@ -499,7 +500,7 @@ impl MikanBangumiSubscription {
|
|||||||
async fn get_rss_item_list_from_source_url(
|
async fn get_rss_item_list_from_source_url(
|
||||||
&self,
|
&self,
|
||||||
ctx: &dyn AppContextTrait,
|
ctx: &dyn AppContextTrait,
|
||||||
) -> RecorderResult<Vec<MikanRssItem>> {
|
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||||
let mikan_base_url = ctx.mikan().base_url().clone();
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
||||||
mikan_base_url.clone(),
|
mikan_base_url.clone(),
|
||||||
@ -512,7 +513,7 @@ impl MikanBangumiSubscription {
|
|||||||
|
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||||
let item = MikanRssItem::try_from(item)
|
let item = MikanRssEpisodeItem::try_from(item)
|
||||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||||
format!("failed to extract rss item at idx {idx}")
|
format!("failed to extract rss item at idx {idx}")
|
||||||
})?;
|
})?;
|
||||||
@ -522,106 +523,216 @@ impl MikanBangumiSubscription {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// #[cfg(test)]
|
#[cfg(test)]
|
||||||
// mod tests {
|
#[allow(unused_variables)]
|
||||||
// use std::assert_matches::assert_matches;
|
mod tests {
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
// use downloader::bittorrent::BITTORRENT_MIME_TYPE;
|
use rstest::{fixture, rstest};
|
||||||
// use rstest::rstest;
|
use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait};
|
||||||
// use url::Url;
|
use tracing::Level;
|
||||||
|
|
||||||
// use crate::{
|
use crate::{
|
||||||
// errors::RecorderResult,
|
app::AppContextTrait,
|
||||||
// extract::mikan::{
|
errors::RecorderResult,
|
||||||
// MikanBangumiIndexRssChannel, MikanBangumiRssChannel,
|
extract::mikan::{
|
||||||
// MikanRssChannel, build_mikan_bangumi_subscription_rss_url,
|
MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||||
// extract_mikan_rss_channel_from_rss_link, },
|
MikanSubscriberSubscriptionRssUrlMeta,
|
||||||
// test_utils::mikan::build_testing_mikan_client,
|
},
|
||||||
// };
|
models::{
|
||||||
|
bangumi,
|
||||||
|
subscriptions::{self, SubscriptionTrait},
|
||||||
|
},
|
||||||
|
test_utils::{
|
||||||
|
app::TestingAppContext,
|
||||||
|
crypto::build_testing_crypto_service,
|
||||||
|
database::build_testing_database_service,
|
||||||
|
mikan::{
|
||||||
|
MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential_form,
|
||||||
|
},
|
||||||
|
storage::build_testing_storage_service,
|
||||||
|
tracing::try_init_testing_tracing,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
// #[rstest]
|
struct TestingResources {
|
||||||
// #[tokio::test]
|
pub app_ctx: Arc<dyn AppContextTrait>,
|
||||||
// async fn test_parse_mikan_rss_channel_from_rss_link() ->
|
pub mikan_server: MikanMockServer,
|
||||||
// RecorderResult<()> { let mut mikan_server =
|
}
|
||||||
// mockito::Server::new_async().await;
|
|
||||||
|
|
||||||
// let mikan_base_url = Url::parse(&mikan_server.url())?;
|
async fn build_testing_app_context() -> RecorderResult<TestingResources> {
|
||||||
|
let mikan_server = MikanMockServer::new().await?;
|
||||||
|
|
||||||
// let mikan_client =
|
let mikan_base_url = mikan_server.base_url().clone();
|
||||||
// build_testing_mikan_client(mikan_base_url.clone()).await?;
|
|
||||||
|
|
||||||
// {
|
let app_ctx = {
|
||||||
// let bangumi_rss_url = build_mikan_bangumi_subscription_rss_url(
|
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
|
||||||
// mikan_base_url.clone(),
|
let db_service = build_testing_database_service(Default::default()).await?;
|
||||||
// "3141",
|
let crypto_service = build_testing_crypto_service().await?;
|
||||||
// Some("370"),
|
let storage_service = build_testing_storage_service().await?;
|
||||||
// );
|
let app_ctx = TestingAppContext::builder()
|
||||||
|
.mikan(mikan_client)
|
||||||
|
.db(db_service)
|
||||||
|
.crypto(crypto_service)
|
||||||
|
.storage(storage_service)
|
||||||
|
.build();
|
||||||
|
|
||||||
// let bangumi_rss_mock = mikan_server
|
Arc::new(app_ctx)
|
||||||
// .mock("GET", bangumi_rss_url.path())
|
};
|
||||||
//
|
|
||||||
// .with_body_from_file("tests/resources/mikan/Bangumi-3141-370.rss")
|
|
||||||
// .match_query(mockito::Matcher::Any)
|
|
||||||
// .create_async()
|
|
||||||
// .await;
|
|
||||||
|
|
||||||
// let channel =
|
Ok(TestingResources {
|
||||||
// scrape_mikan_rss_channel_from_rss_link(&mikan_client, bangumi_rss_url)
|
app_ctx,
|
||||||
// .await
|
mikan_server,
|
||||||
// .expect("should get mikan channel from rss url");
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// assert_matches!(
|
#[fixture]
|
||||||
// &channel,
|
fn before_each() {
|
||||||
// MikanRssChannel::Bangumi(MikanBangumiRssChannel { .. })
|
try_init_testing_tracing(Level::DEBUG);
|
||||||
// );
|
}
|
||||||
|
|
||||||
// assert_matches!(&channel.name(), Some("葬送的芙莉莲"));
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_season_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
// let items = channel.items();
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
// let first_sub_item = items
|
|
||||||
// .first()
|
|
||||||
// .expect("mikan subscriptions should have at least one subs");
|
|
||||||
|
|
||||||
// assert_eq!(first_sub_item.mime, BITTORRENT_MIME_TYPE);
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
// assert!(
|
let mikan_client = app_ctx.mikan();
|
||||||
// &first_sub_item
|
|
||||||
// .homepage
|
|
||||||
// .as_str()
|
|
||||||
// .starts_with("https://mikanani.me/Home/Episode")
|
|
||||||
// );
|
|
||||||
|
|
||||||
// let name = first_sub_item.title.as_str();
|
let subscriber_id = 1;
|
||||||
// assert!(name.contains("葬送的芙莉莲"));
|
|
||||||
|
|
||||||
// bangumi_rss_mock.expect(1);
|
let credential = mikan_client
|
||||||
// }
|
.submit_credential_form(
|
||||||
// {
|
app_ctx.as_ref(),
|
||||||
// let bangumi_rss_url =
|
subscriber_id,
|
||||||
// mikan_base_url.join("/RSS/Bangumi?bangumiId=3416")?;
|
build_testing_mikan_credential_form(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// let bangumi_rss_mock = mikan_server
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
// .mock("GET", bangumi_rss_url.path())
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
// .match_query(mockito::Matcher::Any)
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
//
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSeason),
|
||||||
// .with_body_from_file("tests/resources/mikan/Bangumi-3416.rss")
|
source_url: ActiveValue::Set(
|
||||||
// .create_async()
|
MikanSeasonFlowUrlMeta {
|
||||||
// .await;
|
year: 2025,
|
||||||
|
season_str: MikanSeasonStr::Spring,
|
||||||
|
}
|
||||||
|
.build_season_flow_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
credential_id: ActiveValue::Set(Some(credential.id)),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
// let channel =
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
// scrape_mikan_rss_channel_from_rss_link(&mikan_client, bangumi_rss_url)
|
|
||||||
// .await
|
|
||||||
// .expect("should get mikan channel from rss url");
|
|
||||||
|
|
||||||
// assert_matches!(
|
let subscription = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
// &channel,
|
|
||||||
// MikanRssChannel::BangumiIndex(MikanBangumiIndexRssChannel {
|
|
||||||
// .. }) );
|
|
||||||
|
|
||||||
// assert_matches!(&channel.name(), Some("叹气的亡灵想隐退"));
|
{
|
||||||
|
subscription.sync_feeds_incremental(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
// bangumi_rss_mock.expect(1);
|
assert!(bangumi_list.is_empty());
|
||||||
// }
|
}
|
||||||
// Ok(())
|
|
||||||
// }
|
{
|
||||||
// }
|
subscription.sync_feeds_full(app_ctx.clone()).await?;
|
||||||
|
let bangumi_list = bangumi::Entity::find().all(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
assert!(!bangumi_list.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_subscriber_subscription_sync_feeds_incremental(
|
||||||
|
before_each: (),
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber),
|
||||||
|
source_url: ActiveValue::Set(
|
||||||
|
MikanSubscriberSubscriptionRssUrlMeta {
|
||||||
|
mikan_subscription_token: "123".into(),
|
||||||
|
}
|
||||||
|
.build_rss_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
let subscription_task = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
subscription_task
|
||||||
|
.sync_feeds_incremental(app_ctx.clone())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_mikan_bangumi_subscription_sync_feeds(before_each: ()) -> RecorderResult<()> {
|
||||||
|
let TestingResources {
|
||||||
|
app_ctx,
|
||||||
|
mut mikan_server,
|
||||||
|
} = build_testing_app_context().await?;
|
||||||
|
|
||||||
|
let _resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let _login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
let subscriber_id = 1;
|
||||||
|
|
||||||
|
let subscription_am = subscriptions::ActiveModel {
|
||||||
|
display_name: ActiveValue::Set("test subscription".to_string()),
|
||||||
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
|
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanBangumi),
|
||||||
|
source_url: ActiveValue::Set(
|
||||||
|
MikanBangumiHash {
|
||||||
|
mikan_bangumi_id: "3600".into(),
|
||||||
|
mikan_fansub_id: "370".into(),
|
||||||
|
}
|
||||||
|
.build_rss_url(mikan_server.base_url().clone())
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
enabled: ActiveValue::Set(true),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscription_model = subscription_am.insert(app_ctx.db()).await?;
|
||||||
|
|
||||||
|
let subscription_task = subscriptions::Subscription::try_from_model(&subscription_model)?;
|
||||||
|
|
||||||
|
subscription_task
|
||||||
|
.sync_feeds_incremental(app_ctx.clone())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -32,9 +32,8 @@ use crate::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
pub struct MikanRssItem {
|
pub struct MikanRssEpisodeItem {
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub homepage: Url,
|
|
||||||
pub url: Url,
|
pub url: Url,
|
||||||
pub content_length: Option<u64>,
|
pub content_length: Option<u64>,
|
||||||
pub mime: String,
|
pub mime: String,
|
||||||
@ -42,7 +41,13 @@ pub struct MikanRssItem {
|
|||||||
pub mikan_episode_id: String,
|
pub mikan_episode_id: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<rss::Item> for MikanRssItem {
|
impl MikanRssEpisodeItem {
|
||||||
|
pub fn build_homepage_url(&self, mikan_base_url: Url) -> Url {
|
||||||
|
build_mikan_episode_homepage_url(mikan_base_url, &self.mikan_episode_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<rss::Item> for MikanRssEpisodeItem {
|
||||||
type Error = RecorderError;
|
type Error = RecorderError;
|
||||||
|
|
||||||
fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
|
fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
|
||||||
@ -83,9 +88,8 @@ impl TryFrom<rss::Item> for MikanRssItem {
|
|||||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
|
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(MikanRssItem {
|
Ok(MikanRssEpisodeItem {
|
||||||
title,
|
title,
|
||||||
homepage,
|
|
||||||
url: enclosure_url,
|
url: enclosure_url,
|
||||||
content_length: enclosure.length.parse().ok(),
|
content_length: enclosure.length.parse().ok(),
|
||||||
mime: mime_type,
|
mime: mime_type,
|
||||||
@ -436,6 +440,10 @@ impl MikanSeasonFlowUrlMeta {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn build_season_flow_url(self, mikan_base_url: Url) -> Url {
|
||||||
|
build_mikan_season_flow_url(mikan_base_url, self.year, self.season_str)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
pub fn build_mikan_bangumi_homepage_url(
|
pub fn build_mikan_bangumi_homepage_url(
|
||||||
mikan_base_url: Url,
|
mikan_base_url: Url,
|
||||||
@ -511,6 +519,7 @@ pub fn extract_mikan_episode_meta_from_episode_homepage_html(
|
|||||||
.select(&Selector::parse("title").unwrap())
|
.select(&Selector::parse("title").unwrap())
|
||||||
.next()
|
.next()
|
||||||
.map(extract_inner_text_from_element_ref)
|
.map(extract_inner_text_from_element_ref)
|
||||||
|
.map(|s| s.replace(" - Mikan Project", ""))
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| {
|
||||||
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title"))
|
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title"))
|
||||||
})?;
|
})?;
|
||||||
@ -543,7 +552,7 @@ pub fn extract_mikan_episode_meta_from_episode_homepage_html(
|
|||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
tracing::trace!(
|
tracing::debug!(
|
||||||
bangumi_title,
|
bangumi_title,
|
||||||
mikan_bangumi_id,
|
mikan_bangumi_id,
|
||||||
episode_title,
|
episode_title,
|
||||||
@ -566,7 +575,7 @@ pub fn extract_mikan_episode_meta_from_episode_homepage_html(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))]
|
#[instrument(err, skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))]
|
||||||
pub async fn scrape_mikan_episode_meta_from_episode_homepage_url(
|
pub async fn scrape_mikan_episode_meta_from_episode_homepage_url(
|
||||||
http_client: &MikanClient,
|
http_client: &MikanClient,
|
||||||
mikan_episode_homepage_url: Url,
|
mikan_episode_homepage_url: Url,
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
pub mod parser;
|
pub mod parser;
|
||||||
|
|
||||||
pub use parser::{
|
pub use parser::{
|
||||||
extract_season_from_title_body, parse_episode_meta_from_raw_name, RawEpisodeMeta,
|
RawEpisodeMeta, extract_episode_meta_from_raw_name, extract_season_from_title_body,
|
||||||
};
|
};
|
||||||
|
@ -261,7 +261,7 @@ pub fn check_is_movie(title: &str) -> bool {
|
|||||||
MOVIE_TITLE_RE.is_match(title)
|
MOVIE_TITLE_RE.is_match(title)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
|
pub fn extract_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMeta> {
|
||||||
let raw_title = s.trim();
|
let raw_title = s.trim();
|
||||||
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
let raw_title_without_ch_brackets = replace_ch_bracket_to_en(raw_title);
|
||||||
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
let fansub = extract_fansub(&raw_title_without_ch_brackets);
|
||||||
@ -321,11 +321,11 @@ pub fn parse_episode_meta_from_raw_name(s: &str) -> RecorderResult<RawEpisodeMet
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::{RawEpisodeMeta, parse_episode_meta_from_raw_name};
|
use super::{RawEpisodeMeta, extract_episode_meta_from_raw_name};
|
||||||
|
|
||||||
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
|
fn test_raw_ep_parser_case(raw_name: &str, expected: &str) {
|
||||||
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap_or_default();
|
let expected: Option<RawEpisodeMeta> = serde_json::from_str(expected).unwrap_or_default();
|
||||||
let found = parse_episode_meta_from_raw_name(raw_name).ok();
|
let found = extract_episode_meta_from_raw_name(raw_name).ok();
|
||||||
|
|
||||||
if expected != found {
|
if expected != found {
|
||||||
println!(
|
println!(
|
||||||
|
@ -11,7 +11,7 @@ use crate::{
|
|||||||
app::AppContextTrait,
|
app::AppContextTrait,
|
||||||
auth::AuthUserInfo,
|
auth::AuthUserInfo,
|
||||||
models::subscriptions::{self, SubscriptionTrait},
|
models::subscriptions::{self, SubscriptionTrait},
|
||||||
task::SubscriberTaskPayload,
|
task::SubscriberTask,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
|
#[derive(DynamicGraphql, Serialize, Deserialize, Clone, Debug)]
|
||||||
@ -106,7 +106,7 @@ pub fn register_subscriptions_to_schema(mut builder: SeaographyBuilder) -> Seaog
|
|||||||
let task_id = task_service
|
let task_id = task_service
|
||||||
.add_subscriber_task(
|
.add_subscriber_task(
|
||||||
auth_user_info.subscriber_auth.subscriber_id,
|
auth_user_info.subscriber_auth.subscriber_id,
|
||||||
SubscriberTaskPayload::SyncOneSubscriptionFeedsIncremental(
|
SubscriberTask::SyncOneSubscriptionFeedsIncremental(
|
||||||
subscription.into(),
|
subscription.into(),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -156,9 +156,7 @@ pub fn register_subscriptions_to_schema(mut builder: SeaographyBuilder) -> Seaog
|
|||||||
let task_id = task_service
|
let task_id = task_service
|
||||||
.add_subscriber_task(
|
.add_subscriber_task(
|
||||||
auth_user_info.subscriber_auth.subscriber_id,
|
auth_user_info.subscriber_auth.subscriber_id,
|
||||||
SubscriberTaskPayload::SyncOneSubscriptionFeedsFull(
|
SubscriberTask::SyncOneSubscriptionFeedsFull(subscription.into()),
|
||||||
subscription.into(),
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@ -206,7 +204,7 @@ pub fn register_subscriptions_to_schema(mut builder: SeaographyBuilder) -> Seaog
|
|||||||
let task_id = task_service
|
let task_id = task_service
|
||||||
.add_subscriber_task(
|
.add_subscriber_task(
|
||||||
auth_user_info.subscriber_auth.subscriber_id,
|
auth_user_info.subscriber_auth.subscriber_id,
|
||||||
SubscriberTaskPayload::SyncOneSubscriptionSources(subscription.into()),
|
SubscriberTask::SyncOneSubscriptionSources(subscription.into()),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
@ -26,6 +26,5 @@ pub mod migrations;
|
|||||||
pub mod models;
|
pub mod models;
|
||||||
pub mod storage;
|
pub mod storage;
|
||||||
pub mod task;
|
pub mod task;
|
||||||
#[cfg(any(test, feature = "playground"))]
|
|
||||||
pub mod test_utils;
|
pub mod test_utils;
|
||||||
pub mod web;
|
pub mod web;
|
||||||
|
@ -5,4 +5,4 @@ pub mod service;
|
|||||||
pub use core::{LogFormat, LogLevel, LogRotation};
|
pub use core::{LogFormat, LogLevel, LogRotation};
|
||||||
|
|
||||||
pub use config::{LoggerConfig, LoggerFileAppender};
|
pub use config::{LoggerConfig, LoggerFileAppender};
|
||||||
pub use service::LoggerService;
|
pub use service::{LoggerService, MODULE_WHITELIST};
|
||||||
|
@ -13,7 +13,7 @@ use super::{LogFormat, LogLevel, LogRotation, LoggerConfig};
|
|||||||
use crate::errors::RecorderResult;
|
use crate::errors::RecorderResult;
|
||||||
|
|
||||||
// Function to initialize the logger based on the provided configuration
|
// Function to initialize the logger based on the provided configuration
|
||||||
const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sqlx::query", "sidekiq"];
|
pub const MODULE_WHITELIST: &[&str] = &["sea_orm_migration", "tower_http", "sea_orm", "sea_query"];
|
||||||
|
|
||||||
// Keep nonblocking file appender work guard
|
// Keep nonblocking file appender work guard
|
||||||
static NONBLOCKING_WORK_GUARD_KEEP: OnceLock<WorkerGuard> = OnceLock::new();
|
static NONBLOCKING_WORK_GUARD_KEEP: OnceLock<WorkerGuard> = OnceLock::new();
|
||||||
|
@ -53,7 +53,6 @@ pub enum Bangumi {
|
|||||||
PosterLink,
|
PosterLink,
|
||||||
SavePath,
|
SavePath,
|
||||||
Homepage,
|
Homepage,
|
||||||
Extra,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(DeriveIden)]
|
#[derive(DeriveIden)]
|
||||||
|
@ -106,7 +106,6 @@ impl MigrationTrait for Migration {
|
|||||||
.col(text_null(Bangumi::PosterLink))
|
.col(text_null(Bangumi::PosterLink))
|
||||||
.col(text_null(Bangumi::SavePath))
|
.col(text_null(Bangumi::SavePath))
|
||||||
.col(text_null(Bangumi::Homepage))
|
.col(text_null(Bangumi::Homepage))
|
||||||
.col(json_binary_null(Bangumi::Extra))
|
|
||||||
.foreign_key(
|
.foreign_key(
|
||||||
ForeignKey::create()
|
ForeignKey::create()
|
||||||
.name("fk_bangumi_subscriber_id")
|
.name("fk_bangumi_subscriber_id")
|
||||||
@ -209,7 +208,7 @@ impl MigrationTrait for Migration {
|
|||||||
.create_index(
|
.create_index(
|
||||||
Index::create()
|
Index::create()
|
||||||
.if_not_exists()
|
.if_not_exists()
|
||||||
.name("index_subscription_bangumi_subscriber_id")
|
.name("idx_subscription_bangumi_subscriber_id")
|
||||||
.table(SubscriptionBangumi::Table)
|
.table(SubscriptionBangumi::Table)
|
||||||
.col(SubscriptionBangumi::SubscriberId)
|
.col(SubscriptionBangumi::SubscriberId)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
@ -235,7 +234,6 @@ impl MigrationTrait for Migration {
|
|||||||
.col(text_null(Episodes::Homepage))
|
.col(text_null(Episodes::Homepage))
|
||||||
.col(text_null(Episodes::Subtitle))
|
.col(text_null(Episodes::Subtitle))
|
||||||
.col(text_null(Episodes::Source))
|
.col(text_null(Episodes::Source))
|
||||||
.col(json_binary_null(Episodes::Extra))
|
|
||||||
.foreign_key(
|
.foreign_key(
|
||||||
ForeignKey::create()
|
ForeignKey::create()
|
||||||
.name("fk_episodes_bangumi_id")
|
.name("fk_episodes_bangumi_id")
|
||||||
@ -252,6 +250,15 @@ impl MigrationTrait for Migration {
|
|||||||
.on_update(ForeignKeyAction::Cascade)
|
.on_update(ForeignKeyAction::Cascade)
|
||||||
.on_delete(ForeignKeyAction::Cascade),
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
)
|
)
|
||||||
|
.index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_episodes_mikan_episode_id_subscriber_id")
|
||||||
|
.table(Episodes::Table)
|
||||||
|
.col(Episodes::MikanEpisodeId)
|
||||||
|
.col(Episodes::SubscriberId)
|
||||||
|
.unique(),
|
||||||
|
)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
@ -267,19 +274,6 @@ impl MigrationTrait for Migration {
|
|||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
manager
|
|
||||||
.create_index(
|
|
||||||
Index::create()
|
|
||||||
.if_not_exists()
|
|
||||||
.name("idx_episodes_bangumi_id_mikan_episode_id")
|
|
||||||
.table(Episodes::Table)
|
|
||||||
.col(Episodes::BangumiId)
|
|
||||||
.col(Episodes::MikanEpisodeId)
|
|
||||||
.unique()
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
manager
|
manager
|
||||||
.create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
|
.create_postgres_auto_update_ts_trigger_for_col(Episodes::Table, GeneralIds::UpdatedAt)
|
||||||
.await?;
|
.await?;
|
||||||
@ -338,7 +332,7 @@ impl MigrationTrait for Migration {
|
|||||||
.create_index(
|
.create_index(
|
||||||
Index::create()
|
Index::create()
|
||||||
.if_not_exists()
|
.if_not_exists()
|
||||||
.name("index_subscription_episode_subscriber_id")
|
.name("idx_subscription_episode_subscriber_id")
|
||||||
.table(SubscriptionEpisode::Table)
|
.table(SubscriptionEpisode::Table)
|
||||||
.col(SubscriptionEpisode::SubscriberId)
|
.col(SubscriptionEpisode::SubscriberId)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
@ -353,7 +347,7 @@ impl MigrationTrait for Migration {
|
|||||||
.drop_index(
|
.drop_index(
|
||||||
Index::drop()
|
Index::drop()
|
||||||
.if_exists()
|
.if_exists()
|
||||||
.name("index_subscription_episode_subscriber_id")
|
.name("idx_subscription_episode_subscriber_id")
|
||||||
.table(SubscriptionBangumi::Table)
|
.table(SubscriptionBangumi::Table)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
@ -380,7 +374,7 @@ impl MigrationTrait for Migration {
|
|||||||
.drop_index(
|
.drop_index(
|
||||||
Index::drop()
|
Index::drop()
|
||||||
.if_exists()
|
.if_exists()
|
||||||
.name("index_subscription_bangumi_subscriber_id")
|
.name("idx_subscription_bangumi_subscriber_id")
|
||||||
.table(SubscriptionBangumi::Table)
|
.table(SubscriptionBangumi::Table)
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
|
@ -35,14 +35,14 @@ AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#,
|
|||||||
))
|
))
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
db.execute_unprepared(&format!(
|
// db.execute_unprepared(&format!(
|
||||||
r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscriber_id
|
// r#"CREATE INDEX IF NOT EXISTS idx_apalis_jobs_subscriber_id
|
||||||
ON apalis.jobs ((job -> 'subscriber_id'))
|
// ON apalis.jobs (((job -> 'subscriber_id')::integer))
|
||||||
WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
|
// WHERE job_type = '{SUBSCRIBER_TASK_APALIS_NAME}'
|
||||||
AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
|
// AND jsonb_path_exists(job, '$.subscriber_id ? (@.type() == "number")')
|
||||||
AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
|
// AND jsonb_path_exists(job, '$.task_type ? (@.type() == "string")')"#
|
||||||
))
|
// ))
|
||||||
.await?;
|
// .await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@ use crate::{
|
|||||||
MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url,
|
MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url,
|
||||||
scrape_mikan_poster_meta_from_image_url,
|
scrape_mikan_poster_meta_from_image_url,
|
||||||
},
|
},
|
||||||
rawname::parse_episode_meta_from_raw_name,
|
rawname::extract_season_from_title_body,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -29,18 +29,6 @@ pub struct BangumiFilter {
|
|||||||
pub group: Option<Vec<String>>,
|
pub group: Option<Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
|
|
||||||
)]
|
|
||||||
pub struct BangumiExtra {
|
|
||||||
pub name_zh: Option<String>,
|
|
||||||
pub s_name_zh: Option<String>,
|
|
||||||
pub name_en: Option<String>,
|
|
||||||
pub s_name_en: Option<String>,
|
|
||||||
pub name_jp: Option<String>,
|
|
||||||
pub s_name_jp: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)]
|
||||||
#[sea_orm(table_name = "bangumi")]
|
#[sea_orm(table_name = "bangumi")]
|
||||||
pub struct Model {
|
pub struct Model {
|
||||||
@ -63,7 +51,6 @@ pub struct Model {
|
|||||||
pub poster_link: Option<String>,
|
pub poster_link: Option<String>,
|
||||||
pub save_path: Option<String>,
|
pub save_path: Option<String>,
|
||||||
pub homepage: Option<String>,
|
pub homepage: Option<String>,
|
||||||
pub extra: Option<BangumiExtra>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
@ -135,8 +122,7 @@ impl ActiveModel {
|
|||||||
let mikan_client = ctx.mikan();
|
let mikan_client = ctx.mikan();
|
||||||
let storage_service = ctx.storage();
|
let storage_service = ctx.storage();
|
||||||
let mikan_base_url = mikan_client.base_url();
|
let mikan_base_url = mikan_client.base_url();
|
||||||
|
let (_, season_raw, season_index) = extract_season_from_title_body(&meta.bangumi_title);
|
||||||
let rawname_meta = parse_episode_meta_from_raw_name(&meta.bangumi_title)?;
|
|
||||||
|
|
||||||
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
||||||
mikan_base_url.clone(),
|
mikan_base_url.clone(),
|
||||||
@ -163,20 +149,12 @@ impl ActiveModel {
|
|||||||
subscriber_id: ActiveValue::Set(subscriber_id),
|
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||||
display_name: ActiveValue::Set(meta.bangumi_title.clone()),
|
display_name: ActiveValue::Set(meta.bangumi_title.clone()),
|
||||||
raw_name: ActiveValue::Set(meta.bangumi_title),
|
raw_name: ActiveValue::Set(meta.bangumi_title),
|
||||||
season: ActiveValue::Set(rawname_meta.season),
|
season: ActiveValue::Set(season_index),
|
||||||
season_raw: ActiveValue::Set(rawname_meta.season_raw),
|
season_raw: ActiveValue::Set(season_raw),
|
||||||
fansub: ActiveValue::Set(Some(meta.fansub)),
|
fansub: ActiveValue::Set(Some(meta.fansub)),
|
||||||
poster_link: ActiveValue::Set(poster_link),
|
poster_link: ActiveValue::Set(poster_link),
|
||||||
homepage: ActiveValue::Set(Some(meta.homepage.to_string())),
|
homepage: ActiveValue::Set(Some(meta.homepage.to_string())),
|
||||||
rss_link: ActiveValue::Set(Some(rss_url.to_string())),
|
rss_link: ActiveValue::Set(Some(rss_url.to_string())),
|
||||||
extra: ActiveValue::Set(Some(BangumiExtra {
|
|
||||||
name_zh: rawname_meta.name_zh,
|
|
||||||
name_en: rawname_meta.name_en,
|
|
||||||
name_jp: rawname_meta.name_jp,
|
|
||||||
s_name_en: rawname_meta.name_en_no_season,
|
|
||||||
s_name_jp: rawname_meta.name_jp_no_season,
|
|
||||||
s_name_zh: rawname_meta.name_zh_no_season,
|
|
||||||
})),
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -218,15 +196,16 @@ impl Model {
|
|||||||
Expr::col((
|
Expr::col((
|
||||||
subscription_bangumi_alias.clone(),
|
subscription_bangumi_alias.clone(),
|
||||||
subscription_bangumi::Column::SubscriptionId,
|
subscription_bangumi::Column::SubscriptionId,
|
||||||
)),
|
))
|
||||||
|
.is_not_null(),
|
||||||
"is_subscribed",
|
"is_subscribed",
|
||||||
)
|
)
|
||||||
.join_as_rev(
|
.join_as_rev(
|
||||||
JoinType::LeftJoin,
|
JoinType::LeftJoin,
|
||||||
subscription_bangumi::Relation::Bangumi
|
subscription_bangumi::Relation::Bangumi
|
||||||
.def()
|
.def()
|
||||||
.on_condition(move |_left, right| {
|
.on_condition(move |left, _right| {
|
||||||
Expr::col((right, subscription_bangumi::Column::SubscriptionId))
|
Expr::col((left, subscription_bangumi::Column::SubscriptionId))
|
||||||
.eq(subscription_id)
|
.eq(subscription_id)
|
||||||
.into_condition()
|
.into_condition()
|
||||||
}),
|
}),
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use sea_orm::{
|
use sea_orm::{
|
||||||
ActiveValue, FromJsonQueryResult, IntoSimpleExpr, QuerySelect, entity::prelude::*,
|
ActiveValue, IntoSimpleExpr, QuerySelect, entity::prelude::*, sea_query::OnConflict,
|
||||||
sea_query::OnConflict,
|
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
@ -11,20 +10,10 @@ use crate::{
|
|||||||
errors::RecorderResult,
|
errors::RecorderResult,
|
||||||
extract::{
|
extract::{
|
||||||
mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url},
|
mikan::{MikanEpisodeHash, MikanEpisodeMeta, build_mikan_episode_homepage_url},
|
||||||
rawname::parse_episode_meta_from_raw_name,
|
rawname::extract_episode_meta_from_raw_name,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, Default)]
|
|
||||||
pub struct EpisodeExtra {
|
|
||||||
pub name_zh: Option<String>,
|
|
||||||
pub s_name_zh: Option<String>,
|
|
||||||
pub name_en: Option<String>,
|
|
||||||
pub s_name_en: Option<String>,
|
|
||||||
pub name_jp: Option<String>,
|
|
||||||
pub s_name_jp: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
#[sea_orm(table_name = "episodes")]
|
#[sea_orm(table_name = "episodes")]
|
||||||
pub struct Model {
|
pub struct Model {
|
||||||
@ -50,7 +39,6 @@ pub struct Model {
|
|||||||
pub homepage: Option<String>,
|
pub homepage: Option<String>,
|
||||||
pub subtitle: Option<String>,
|
pub subtitle: Option<String>,
|
||||||
pub source: Option<String>,
|
pub source: Option<String>,
|
||||||
pub extra: EpisodeExtra,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
@ -135,42 +123,51 @@ impl ActiveModel {
|
|||||||
episode: MikanEpisodeMeta,
|
episode: MikanEpisodeMeta,
|
||||||
) -> RecorderResult<Self> {
|
) -> RecorderResult<Self> {
|
||||||
let mikan_base_url = ctx.mikan().base_url().clone();
|
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||||
let rawname_meta = parse_episode_meta_from_raw_name(&episode.episode_title)?;
|
let episode_extention_meta = extract_episode_meta_from_raw_name(&episode.episode_title)
|
||||||
|
.inspect_err(|err| {
|
||||||
|
tracing::error!(
|
||||||
|
err = ?err,
|
||||||
|
episode_title = ?episode.episode_title,
|
||||||
|
"Failed to parse episode extension meta from episode title, skip"
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
let homepage = build_mikan_episode_homepage_url(mikan_base_url, &episode.mikan_episode_id);
|
let homepage = build_mikan_episode_homepage_url(mikan_base_url, &episode.mikan_episode_id);
|
||||||
|
|
||||||
Ok(Self {
|
let mut episode_active_model = Self {
|
||||||
mikan_episode_id: ActiveValue::Set(Some(episode.mikan_episode_id)),
|
mikan_episode_id: ActiveValue::Set(Some(episode.mikan_episode_id)),
|
||||||
raw_name: ActiveValue::Set(episode.episode_title.clone()),
|
raw_name: ActiveValue::Set(episode.episode_title.clone()),
|
||||||
display_name: ActiveValue::Set(episode.episode_title.clone()),
|
display_name: ActiveValue::Set(episode.episode_title.clone()),
|
||||||
bangumi_id: ActiveValue::Set(bangumi.id),
|
bangumi_id: ActiveValue::Set(bangumi.id),
|
||||||
subscriber_id: ActiveValue::Set(bangumi.subscriber_id),
|
subscriber_id: ActiveValue::Set(bangumi.subscriber_id),
|
||||||
resolution: ActiveValue::Set(rawname_meta.resolution),
|
|
||||||
season: ActiveValue::Set(if rawname_meta.season > 0 {
|
|
||||||
rawname_meta.season
|
|
||||||
} else {
|
|
||||||
bangumi.season
|
|
||||||
}),
|
|
||||||
season_raw: ActiveValue::Set(
|
|
||||||
rawname_meta
|
|
||||||
.season_raw
|
|
||||||
.or_else(|| bangumi.season_raw.clone()),
|
|
||||||
),
|
|
||||||
fansub: ActiveValue::Set(rawname_meta.fansub.or_else(|| bangumi.fansub.clone())),
|
|
||||||
poster_link: ActiveValue::Set(bangumi.poster_link.clone()),
|
|
||||||
episode_index: ActiveValue::Set(rawname_meta.episode_index),
|
|
||||||
homepage: ActiveValue::Set(Some(homepage.to_string())),
|
homepage: ActiveValue::Set(Some(homepage.to_string())),
|
||||||
subtitle: ActiveValue::Set(rawname_meta.subtitle),
|
season_raw: ActiveValue::Set(bangumi.season_raw.clone()),
|
||||||
source: ActiveValue::Set(rawname_meta.source),
|
season: ActiveValue::Set(bangumi.season),
|
||||||
extra: ActiveValue::Set(EpisodeExtra {
|
fansub: ActiveValue::Set(bangumi.fansub.clone()),
|
||||||
name_zh: rawname_meta.name_zh,
|
poster_link: ActiveValue::Set(bangumi.poster_link.clone()),
|
||||||
name_en: rawname_meta.name_en,
|
episode_index: ActiveValue::Set(0),
|
||||||
name_jp: rawname_meta.name_jp,
|
|
||||||
s_name_en: rawname_meta.name_en_no_season,
|
|
||||||
s_name_jp: rawname_meta.name_jp_no_season,
|
|
||||||
s_name_zh: rawname_meta.name_zh_no_season,
|
|
||||||
}),
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
};
|
||||||
|
|
||||||
|
if let Some(episode_extention_meta) = episode_extention_meta {
|
||||||
|
episode_active_model.episode_index =
|
||||||
|
ActiveValue::Set(episode_extention_meta.episode_index);
|
||||||
|
episode_active_model.subtitle = ActiveValue::Set(episode_extention_meta.subtitle);
|
||||||
|
episode_active_model.source = ActiveValue::Set(episode_extention_meta.source);
|
||||||
|
episode_active_model.resolution = ActiveValue::Set(episode_extention_meta.resolution);
|
||||||
|
if episode_extention_meta.season > 0 {
|
||||||
|
episode_active_model.season = ActiveValue::Set(episode_extention_meta.season);
|
||||||
|
}
|
||||||
|
if episode_extention_meta.season_raw.is_some() {
|
||||||
|
episode_active_model.season_raw =
|
||||||
|
ActiveValue::Set(episode_extention_meta.season_raw);
|
||||||
|
}
|
||||||
|
if episode_extention_meta.fansub.is_some() {
|
||||||
|
episode_active_model.fansub = ActiveValue::Set(episode_extention_meta.fansub);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(episode_active_model)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,7 +7,6 @@ pub use core::{SUBSCRIBER_TASK_APALIS_NAME, SubscriberAsyncTaskTrait, Subscriber
|
|||||||
|
|
||||||
pub use config::TaskConfig;
|
pub use config::TaskConfig;
|
||||||
pub use registry::{
|
pub use registry::{
|
||||||
SubscriberTask, SubscriberTaskPayload, SyncOneSubscriptionFeedsIncrementalTask,
|
SubscriberTask, SyncOneSubscriptionFeedsIncrementalTask, SyncOneSubscriptionSourcesTask,
|
||||||
SyncOneSubscriptionSourcesTask,
|
|
||||||
};
|
};
|
||||||
pub use service::TaskService;
|
pub use service::TaskService;
|
||||||
|
@ -12,6 +12,7 @@ use super::SubscriberAsyncTaskTrait;
|
|||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContextTrait,
|
||||||
errors::{RecorderError, RecorderResult},
|
errors::{RecorderError, RecorderResult},
|
||||||
|
models::subscriptions::SubscriptionTrait,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(async_graphql::Enum, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Copy)]
|
#[derive(async_graphql::Enum, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Copy)]
|
||||||
@ -27,9 +28,26 @@ pub enum SubscriberTaskType {
|
|||||||
SyncOneSubscriptionSources,
|
SyncOneSubscriptionSources,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
impl TryFrom<&SubscriberTask> for serde_json::Value {
|
||||||
|
type Error = RecorderError;
|
||||||
|
|
||||||
|
fn try_from(value: &SubscriberTask) -> Result<Self, Self::Error> {
|
||||||
|
let json_value = serde_json::to_value(value)?;
|
||||||
|
Ok(match json_value {
|
||||||
|
serde_json::Value::Object(mut map) => {
|
||||||
|
map.remove("task_type");
|
||||||
|
serde_json::Value::Object(map)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
unreachable!("subscriber task must be an json object");
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, FromJsonQueryResult)]
|
||||||
#[serde(tag = "task_type")]
|
#[serde(tag = "task_type")]
|
||||||
pub enum SubscriberTaskPayload {
|
pub enum SubscriberTask {
|
||||||
#[serde(rename = "sync_one_subscription_feeds_incremental")]
|
#[serde(rename = "sync_one_subscription_feeds_incremental")]
|
||||||
SyncOneSubscriptionFeedsIncremental(SyncOneSubscriptionFeedsIncrementalTask),
|
SyncOneSubscriptionFeedsIncremental(SyncOneSubscriptionFeedsIncrementalTask),
|
||||||
#[serde(rename = "sync_one_subscription_feeds_full")]
|
#[serde(rename = "sync_one_subscription_feeds_full")]
|
||||||
@ -38,7 +56,15 @@ pub enum SubscriberTaskPayload {
|
|||||||
SyncOneSubscriptionSources(SyncOneSubscriptionSourcesTask),
|
SyncOneSubscriptionSources(SyncOneSubscriptionSourcesTask),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SubscriberTaskPayload {
|
impl SubscriberTask {
|
||||||
|
pub fn get_subscriber_id(&self) -> i32 {
|
||||||
|
match self {
|
||||||
|
Self::SyncOneSubscriptionFeedsIncremental(task) => task.0.get_subscriber_id(),
|
||||||
|
Self::SyncOneSubscriptionFeedsFull(task) => task.0.get_subscriber_id(),
|
||||||
|
Self::SyncOneSubscriptionSources(task) => task.0.get_subscriber_id(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
pub async fn run(self, ctx: Arc<dyn AppContextTrait>) -> RecorderResult<()> {
|
||||||
match self {
|
match self {
|
||||||
Self::SyncOneSubscriptionFeedsIncremental(task) => task.run(ctx).await,
|
Self::SyncOneSubscriptionFeedsIncremental(task) => task.run(ctx).await,
|
||||||
@ -59,27 +85,3 @@ impl SubscriberTaskPayload {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<&SubscriberTaskPayload> for serde_json::Value {
|
|
||||||
type Error = RecorderError;
|
|
||||||
|
|
||||||
fn try_from(value: &SubscriberTaskPayload) -> Result<Self, Self::Error> {
|
|
||||||
let json_value = serde_json::to_value(value)?;
|
|
||||||
Ok(match json_value {
|
|
||||||
serde_json::Value::Object(mut map) => {
|
|
||||||
map.remove("task_type");
|
|
||||||
serde_json::Value::Object(map)
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
unreachable!("subscriber task payload must be an json object");
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, FromJsonQueryResult)]
|
|
||||||
pub struct SubscriberTask {
|
|
||||||
pub subscriber_id: i32,
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub payload: SubscriberTaskPayload,
|
|
||||||
}
|
|
||||||
|
@ -1,19 +1,22 @@
|
|||||||
use std::{ops::Deref, sync::Arc};
|
use std::{ops::Deref, sync::Arc};
|
||||||
|
|
||||||
use apalis::prelude::*;
|
use apalis::prelude::*;
|
||||||
use apalis_sql::{Config, postgres::PostgresStorage};
|
use apalis_sql::{
|
||||||
|
Config,
|
||||||
|
postgres::{PgListen, PostgresStorage},
|
||||||
|
};
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
app::AppContextTrait,
|
app::AppContextTrait,
|
||||||
errors::RecorderResult,
|
errors::RecorderResult,
|
||||||
task::{SUBSCRIBER_TASK_APALIS_NAME, SubscriberTask, SubscriberTaskPayload, TaskConfig},
|
task::{SUBSCRIBER_TASK_APALIS_NAME, SubscriberTask, TaskConfig},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub struct TaskService {
|
pub struct TaskService {
|
||||||
pub config: TaskConfig,
|
pub config: TaskConfig,
|
||||||
ctx: Arc<dyn AppContextTrait>,
|
ctx: Arc<dyn AppContextTrait>,
|
||||||
pub subscriber_task_storage: Arc<RwLock<PostgresStorage<SubscriberTask>>>,
|
subscriber_task_storage: Arc<RwLock<PostgresStorage<SubscriberTask>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TaskService {
|
impl TaskService {
|
||||||
@ -23,15 +26,12 @@ impl TaskService {
|
|||||||
) -> RecorderResult<Self> {
|
) -> RecorderResult<Self> {
|
||||||
let pool = ctx.db().get_postgres_connection_pool().clone();
|
let pool = ctx.db().get_postgres_connection_pool().clone();
|
||||||
let storage_config = Config::new(SUBSCRIBER_TASK_APALIS_NAME);
|
let storage_config = Config::new(SUBSCRIBER_TASK_APALIS_NAME);
|
||||||
let subscriber_task_storage = Arc::new(RwLock::new(PostgresStorage::new_with_config(
|
let subscriber_task_storage = PostgresStorage::new_with_config(pool, storage_config);
|
||||||
pool,
|
|
||||||
storage_config,
|
|
||||||
)));
|
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
config,
|
config,
|
||||||
ctx,
|
ctx,
|
||||||
subscriber_task_storage,
|
subscriber_task_storage: Arc::new(RwLock::new(subscriber_task_storage)),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -41,19 +41,14 @@ impl TaskService {
|
|||||||
) -> RecorderResult<()> {
|
) -> RecorderResult<()> {
|
||||||
let ctx = data.deref().clone();
|
let ctx = data.deref().clone();
|
||||||
|
|
||||||
job.payload.run(ctx).await
|
job.run(ctx).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn add_subscriber_task(
|
pub async fn add_subscriber_task(
|
||||||
&self,
|
&self,
|
||||||
subscriber_id: i32,
|
_subscriber_id: i32,
|
||||||
task_payload: SubscriberTaskPayload,
|
subscriber_task: SubscriberTask,
|
||||||
) -> RecorderResult<TaskId> {
|
) -> RecorderResult<TaskId> {
|
||||||
let subscriber_task = SubscriberTask {
|
|
||||||
subscriber_id,
|
|
||||||
payload: task_payload,
|
|
||||||
};
|
|
||||||
|
|
||||||
let task_id = {
|
let task_id = {
|
||||||
let mut storage = self.subscriber_task_storage.write().await;
|
let mut storage = self.subscriber_task_storage.write().await;
|
||||||
storage.push(subscriber_task).await?.task_id
|
storage.push(subscriber_task).await?.task_id
|
||||||
@ -62,22 +57,27 @@ impl TaskService {
|
|||||||
Ok(task_id)
|
Ok(task_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn setup(&self) -> RecorderResult<()> {
|
pub async fn setup_monitor(&self) -> RecorderResult<Monitor> {
|
||||||
let monitor = Monitor::new();
|
let monitor = Monitor::new();
|
||||||
let worker = WorkerBuilder::new(SUBSCRIBER_TASK_APALIS_NAME)
|
let worker = WorkerBuilder::new(SUBSCRIBER_TASK_APALIS_NAME)
|
||||||
.catch_panic()
|
.catch_panic()
|
||||||
.enable_tracing()
|
.enable_tracing()
|
||||||
.data(self.ctx.clone())
|
.data(self.ctx.clone())
|
||||||
.backend({
|
.backend(self.subscriber_task_storage.read().await.clone())
|
||||||
let storage = self.subscriber_task_storage.read().await;
|
|
||||||
storage.clone()
|
|
||||||
})
|
|
||||||
.build_fn(Self::run_subscriber_task);
|
.build_fn(Self::run_subscriber_task);
|
||||||
|
|
||||||
let monitor = monitor.register(worker);
|
Ok(monitor.register(worker))
|
||||||
|
}
|
||||||
|
|
||||||
monitor.run().await?;
|
pub async fn setup_listener(&self) -> RecorderResult<PgListen> {
|
||||||
|
let pool = self.ctx.db().get_postgres_connection_pool().clone();
|
||||||
|
let mut subscriber_task_listener = PgListen::new(pool).await?;
|
||||||
|
|
||||||
Ok(())
|
{
|
||||||
|
let mut subscriber_task_storage = self.subscriber_task_storage.write().await;
|
||||||
|
subscriber_task_listener.subscribe_with(&mut subscriber_task_storage);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(subscriber_task_listener)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ use std::{fmt::Debug, sync::Arc};
|
|||||||
use once_cell::sync::OnceCell;
|
use once_cell::sync::OnceCell;
|
||||||
use typed_builder::TypedBuilder;
|
use typed_builder::TypedBuilder;
|
||||||
|
|
||||||
use crate::app::AppContextTrait;
|
use crate::{app::AppContextTrait, test_utils::storage::TestingStorageService};
|
||||||
|
|
||||||
#[derive(TypedBuilder)]
|
#[derive(TypedBuilder)]
|
||||||
#[builder(field_defaults(default, setter(strip_option)))]
|
#[builder(field_defaults(default, setter(strip_option)))]
|
||||||
@ -15,7 +15,7 @@ pub struct TestingAppContext {
|
|||||||
mikan: Option<crate::extract::mikan::MikanClient>,
|
mikan: Option<crate::extract::mikan::MikanClient>,
|
||||||
auth: Option<crate::auth::AuthService>,
|
auth: Option<crate::auth::AuthService>,
|
||||||
graphql: Option<crate::graphql::GraphQLService>,
|
graphql: Option<crate::graphql::GraphQLService>,
|
||||||
storage: Option<crate::storage::StorageService>,
|
storage: Option<TestingStorageService>,
|
||||||
crypto: Option<crate::crypto::CryptoService>,
|
crypto: Option<crate::crypto::CryptoService>,
|
||||||
#[builder(default = Arc::new(OnceCell::new()), setter(!strip_option))]
|
#[builder(default = Arc::new(OnceCell::new()), setter(!strip_option))]
|
||||||
task: Arc<OnceCell<crate::task::TaskService>>,
|
task: Arc<OnceCell<crate::task::TaskService>>,
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
path::{self, Path},
|
ops::{Deref, DerefMut},
|
||||||
|
path::{self, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
use chrono::{Duration, Utc};
|
use chrono::{Duration, Utc};
|
||||||
use fetch::{FetchError, HttpClientConfig, IntoUrl, get_random_ua};
|
use fetch::{FetchError, HttpClientConfig, IntoUrl, get_random_ua};
|
||||||
|
use lazy_static::lazy_static;
|
||||||
use percent_encoding::{AsciiSet, CONTROLS, percent_decode, utf8_percent_encode};
|
use percent_encoding::{AsciiSet, CONTROLS, percent_decode, utf8_percent_encode};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
@ -43,9 +45,7 @@ pub async fn build_testing_mikan_client(
|
|||||||
base_mikan_url: impl IntoUrl,
|
base_mikan_url: impl IntoUrl,
|
||||||
) -> RecorderResult<MikanClient> {
|
) -> RecorderResult<MikanClient> {
|
||||||
let mikan_client = MikanClient::from_config(MikanConfig {
|
let mikan_client = MikanClient::from_config(MikanConfig {
|
||||||
http_client: HttpClientConfig {
|
http_client: HttpClientConfig::default(),
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
base_url: base_mikan_url.into_url().map_err(FetchError::from)?,
|
base_url: base_mikan_url.into_url().map_err(FetchError::from)?,
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
@ -147,10 +147,19 @@ impl AsRef<path::Path> for MikanDoppelPath {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TEST_RESOURCES_DIR: String =
|
||||||
|
if cfg!(any(test, debug_assertions, feature = "playground")) {
|
||||||
|
format!("{}/tests/resources", env!("CARGO_MANIFEST_DIR"))
|
||||||
|
} else {
|
||||||
|
"tests/resources".to_string()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
impl From<Url> for MikanDoppelPath {
|
impl From<Url> for MikanDoppelPath {
|
||||||
fn from(value: Url) -> Self {
|
fn from(value: Url) -> Self {
|
||||||
let base_path =
|
let doppel_path = PathBuf::from(format!("{}/mikan/doppel", TEST_RESOURCES_DIR.as_str()));
|
||||||
Path::new("tests/resources/mikan/doppel").join(value.path().trim_matches('/'));
|
let base_path = doppel_path.join(value.path().trim_matches('/'));
|
||||||
let dirname = base_path.parent();
|
let dirname = base_path.parent();
|
||||||
let stem = base_path.file_stem();
|
let stem = base_path.file_stem();
|
||||||
debug_assert!(dirname.is_some() && stem.is_some());
|
debug_assert!(dirname.is_some() && stem.is_some());
|
||||||
@ -187,17 +196,60 @@ pub struct MikanMockServerResourcesMock {
|
|||||||
pub season_flow_noauth_mock: mockito::Mock,
|
pub season_flow_noauth_mock: mockito::Mock,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum MikanMockServerInner {
|
||||||
|
Server(mockito::Server),
|
||||||
|
ServerGuard(mockito::ServerGuard),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for MikanMockServerInner {
|
||||||
|
type Target = mockito::Server;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
match self {
|
||||||
|
MikanMockServerInner::Server(server) => server,
|
||||||
|
MikanMockServerInner::ServerGuard(server) => server,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DerefMut for MikanMockServerInner {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
match self {
|
||||||
|
MikanMockServerInner::Server(server) => server,
|
||||||
|
MikanMockServerInner::ServerGuard(server) => server,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct MikanMockServer {
|
pub struct MikanMockServer {
|
||||||
pub server: mockito::ServerGuard,
|
pub server: MikanMockServerInner,
|
||||||
base_url: Url,
|
base_url: Url,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MikanMockServer {
|
impl MikanMockServer {
|
||||||
|
pub async fn new_with_port(port: u16) -> RecorderResult<Self> {
|
||||||
|
let server = mockito::Server::new_with_opts_async(mockito::ServerOpts {
|
||||||
|
host: "0.0.0.0",
|
||||||
|
port,
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
let base_url = Url::parse(&server.url())?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
server: MikanMockServerInner::Server(server),
|
||||||
|
base_url,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn new() -> RecorderResult<Self> {
|
pub async fn new() -> RecorderResult<Self> {
|
||||||
let server = mockito::Server::new_async().await;
|
let server = mockito::Server::new_async().await;
|
||||||
let base_url = Url::parse(&server.url())?;
|
let base_url = Url::parse(&server.url())?;
|
||||||
|
|
||||||
Ok(Self { server, base_url })
|
Ok(Self {
|
||||||
|
server: MikanMockServerInner::ServerGuard(server),
|
||||||
|
base_url,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn base_url(&self) -> &Url {
|
pub fn base_url(&self) -> &Url {
|
||||||
@ -230,7 +282,10 @@ impl MikanMockServer {
|
|||||||
SameSite=Strict; Path=/"
|
SameSite=Strict; Path=/"
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.with_body_from_file("tests/resources/mikan/LoginPage.html")
|
.with_body_from_file(format!(
|
||||||
|
"{}/mikan/LoginPage.html",
|
||||||
|
TEST_RESOURCES_DIR.as_str()
|
||||||
|
))
|
||||||
.create();
|
.create();
|
||||||
|
|
||||||
let test_identity_expires = (Utc::now() + Duration::days(30)).to_rfc2822();
|
let test_identity_expires = (Utc::now() + Duration::days(30)).to_rfc2822();
|
||||||
@ -284,7 +339,10 @@ impl MikanMockServer {
|
|||||||
.match_query(mockito::Matcher::Any)
|
.match_query(mockito::Matcher::Any)
|
||||||
.match_request(move |req| !match_post_login_body(req))
|
.match_request(move |req| !match_post_login_body(req))
|
||||||
.with_status(200)
|
.with_status(200)
|
||||||
.with_body_from_file("tests/resources/mikan/LoginError.html")
|
.with_body_from_file(format!(
|
||||||
|
"{}/mikan/LoginError.html",
|
||||||
|
TEST_RESOURCES_DIR.as_str()
|
||||||
|
))
|
||||||
.create();
|
.create();
|
||||||
|
|
||||||
let account_get_success_mock = self
|
let account_get_success_mock = self
|
||||||
@ -428,7 +486,10 @@ impl MikanMockServer {
|
|||||||
.starts_with(MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH)
|
.starts_with(MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH)
|
||||||
})
|
})
|
||||||
.with_status(200)
|
.with_status(200)
|
||||||
.with_body_from_file("tests/resources/mikan/ExpandBangumi-noauth.html")
|
.with_body_from_file(format!(
|
||||||
|
"{}/mikan/ExpandBangumi-noauth.html",
|
||||||
|
TEST_RESOURCES_DIR.as_str()
|
||||||
|
))
|
||||||
.create();
|
.create();
|
||||||
|
|
||||||
let season_flow_noauth_mock = self
|
let season_flow_noauth_mock = self
|
||||||
@ -439,7 +500,10 @@ impl MikanMockServer {
|
|||||||
&& req.path().starts_with(MIKAN_SEASON_FLOW_PAGE_PATH)
|
&& req.path().starts_with(MIKAN_SEASON_FLOW_PAGE_PATH)
|
||||||
})
|
})
|
||||||
.with_status(200)
|
.with_status(200)
|
||||||
.with_body_from_file("tests/resources/mikan/BangumiCoverFlow-noauth.html")
|
.with_body_from_file(format!(
|
||||||
|
"{}/mikan/BangumiCoverFlow-noauth.html",
|
||||||
|
TEST_RESOURCES_DIR.as_str()
|
||||||
|
))
|
||||||
.create();
|
.create();
|
||||||
|
|
||||||
MikanMockServerResourcesMock {
|
MikanMockServerResourcesMock {
|
||||||
|
@ -1,11 +1,19 @@
|
|||||||
use tracing::Level;
|
use tracing::Level;
|
||||||
use tracing_subscriber::EnvFilter;
|
use tracing_subscriber::EnvFilter;
|
||||||
|
|
||||||
|
use crate::logger::MODULE_WHITELIST;
|
||||||
|
|
||||||
pub fn try_init_testing_tracing(level: Level) {
|
pub fn try_init_testing_tracing(level: Level) {
|
||||||
let crate_name = env!("CARGO_PKG_NAME");
|
let crate_name = env!("CARGO_PKG_NAME");
|
||||||
let level = level.as_str().to_lowercase();
|
let level = level.as_str().to_lowercase();
|
||||||
let filter = EnvFilter::new(format!("{crate_name}[]={level}"))
|
let mut filter = EnvFilter::new(format!("{crate_name}[]={level}"));
|
||||||
.add_directive(format!("mockito[]={level}").parse().unwrap())
|
|
||||||
.add_directive(format!("sqlx[]={level}").parse().unwrap());
|
let mut modules = vec![];
|
||||||
|
modules.extend(MODULE_WHITELIST.iter());
|
||||||
|
modules.push("mockito");
|
||||||
|
for module in modules {
|
||||||
|
filter = filter.add_directive(format!("{module}[]={level}").parse().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
let _ = tracing_subscriber::fmt().with_env_filter(filter).try_init();
|
let _ = tracing_subscriber::fmt().with_env_filter(filter).try_init();
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
'use client';
|
"use client";
|
||||||
|
|
||||||
import type { Row } from '@tanstack/react-table';
|
import type { Row } from "@tanstack/react-table";
|
||||||
import { MoreHorizontal } from 'lucide-react';
|
import { MoreHorizontal } from "lucide-react";
|
||||||
|
|
||||||
import { Button } from '@/components/ui/button';
|
import { Button } from "@/components/ui/button";
|
||||||
import {
|
import {
|
||||||
DropdownMenu,
|
DropdownMenu,
|
||||||
DropdownMenuContent,
|
DropdownMenuContent,
|
||||||
@ -11,9 +11,9 @@ import {
|
|||||||
DropdownMenuSeparator,
|
DropdownMenuSeparator,
|
||||||
DropdownMenuShortcut,
|
DropdownMenuShortcut,
|
||||||
DropdownMenuTrigger,
|
DropdownMenuTrigger,
|
||||||
} from '@/components/ui/dropdown-menu';
|
} from "@/components/ui/dropdown-menu";
|
||||||
|
|
||||||
import { useMemo } from 'react';
|
import { PropsWithChildren, useMemo } from "react";
|
||||||
|
|
||||||
interface DataTableRowActionsProps<DataView, Id> {
|
interface DataTableRowActionsProps<DataView, Id> {
|
||||||
row: Row<DataView>;
|
row: Row<DataView>;
|
||||||
@ -24,6 +24,7 @@ interface DataTableRowActionsProps<DataView, Id> {
|
|||||||
onDetail?: (id: Id) => void;
|
onDetail?: (id: Id) => void;
|
||||||
onDelete?: (id: Id) => void;
|
onDelete?: (id: Id) => void;
|
||||||
onEdit?: (id: Id) => void;
|
onEdit?: (id: Id) => void;
|
||||||
|
modal?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function DataTableRowActions<DataView, Id>({
|
export function DataTableRowActions<DataView, Id>({
|
||||||
@ -35,10 +36,12 @@ export function DataTableRowActions<DataView, Id>({
|
|||||||
onDetail,
|
onDetail,
|
||||||
onDelete,
|
onDelete,
|
||||||
onEdit,
|
onEdit,
|
||||||
}: DataTableRowActionsProps<DataView, Id>) {
|
children,
|
||||||
|
modal,
|
||||||
|
}: PropsWithChildren<DataTableRowActionsProps<DataView, Id>>) {
|
||||||
const id = useMemo(() => getId(row), [getId, row]);
|
const id = useMemo(() => getId(row), [getId, row]);
|
||||||
return (
|
return (
|
||||||
<DropdownMenu>
|
<DropdownMenu modal={modal}>
|
||||||
<DropdownMenuTrigger asChild>
|
<DropdownMenuTrigger asChild>
|
||||||
<Button
|
<Button
|
||||||
variant="ghost"
|
variant="ghost"
|
||||||
@ -49,6 +52,7 @@ export function DataTableRowActions<DataView, Id>({
|
|||||||
</Button>
|
</Button>
|
||||||
</DropdownMenuTrigger>
|
</DropdownMenuTrigger>
|
||||||
<DropdownMenuContent align="end" className="w-[160px]">
|
<DropdownMenuContent align="end" className="w-[160px]">
|
||||||
|
{children}
|
||||||
{showDetail && (
|
{showDetail && (
|
||||||
<DropdownMenuItem onClick={() => onDetail?.(id)}>
|
<DropdownMenuItem onClick={() => onDetail?.(id)}>
|
||||||
Detail
|
Detail
|
||||||
|
@ -3,6 +3,8 @@ import { Button } from '@/components/ui/button';
|
|||||||
import { DataTablePagination } from '@/components/ui/data-table-pagination';
|
import { DataTablePagination } from '@/components/ui/data-table-pagination';
|
||||||
import { DataTableRowActions } from '@/components/ui/data-table-row-actions';
|
import { DataTableRowActions } from '@/components/ui/data-table-row-actions';
|
||||||
import { DataTableViewOptions } from '@/components/ui/data-table-view-options';
|
import { DataTableViewOptions } from '@/components/ui/data-table-view-options';
|
||||||
|
import { DialogTrigger } from '@/components/ui/dialog';
|
||||||
|
import { DropdownMenuItem } from '@/components/ui/dropdown-menu';
|
||||||
import { QueryErrorView } from '@/components/ui/query-error-view';
|
import { QueryErrorView } from '@/components/ui/query-error-view';
|
||||||
import { Skeleton } from '@/components/ui/skeleton';
|
import { Skeleton } from '@/components/ui/skeleton';
|
||||||
import {
|
import {
|
||||||
@ -28,6 +30,7 @@ import { useDebouncedSkeleton } from '@/presentation/hooks/use-debounded-skeleto
|
|||||||
import { useEvent } from '@/presentation/hooks/use-event';
|
import { useEvent } from '@/presentation/hooks/use-event';
|
||||||
import { cn } from '@/presentation/utils';
|
import { cn } from '@/presentation/utils';
|
||||||
import { useMutation, useQuery } from '@apollo/client';
|
import { useMutation, useQuery } from '@apollo/client';
|
||||||
|
import { Dialog } from '@radix-ui/react-dialog';
|
||||||
import { createFileRoute, useNavigate } from '@tanstack/react-router';
|
import { createFileRoute, useNavigate } from '@tanstack/react-router';
|
||||||
import {
|
import {
|
||||||
type ColumnDef,
|
type ColumnDef,
|
||||||
@ -44,6 +47,7 @@ import { format } from 'date-fns';
|
|||||||
import { Eye, EyeOff, Plus } from 'lucide-react';
|
import { Eye, EyeOff, Plus } from 'lucide-react';
|
||||||
import { useMemo, useState } from 'react';
|
import { useMemo, useState } from 'react';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
|
import { Credential3rdCheckAvailableViewDialogContent } from './-check-available';
|
||||||
|
|
||||||
export const Route = createFileRoute('/_app/credential3rd/manage')({
|
export const Route = createFileRoute('/_app/credential3rd/manage')({
|
||||||
component: CredentialManageRouteComponent,
|
component: CredentialManageRouteComponent,
|
||||||
@ -246,7 +250,18 @@ function CredentialManageRouteComponent() {
|
|||||||
});
|
});
|
||||||
}}
|
}}
|
||||||
onDelete={handleDeleteRecord(row)}
|
onDelete={handleDeleteRecord(row)}
|
||||||
|
>
|
||||||
|
<Dialog>
|
||||||
|
<DialogTrigger asChild>
|
||||||
|
<DropdownMenuItem onSelect={(e) => e.preventDefault()}>
|
||||||
|
Check Available
|
||||||
|
</DropdownMenuItem>
|
||||||
|
</DialogTrigger>
|
||||||
|
<Credential3rdCheckAvailableViewDialogContent
|
||||||
|
id={row.original.id}
|
||||||
/>
|
/>
|
||||||
|
</Dialog>
|
||||||
|
</DataTableRowActions>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
@ -126,7 +126,7 @@ export const SubscriptionSyncView = memo(
|
|||||||
|
|
||||||
export interface SubscriptionSyncDialogContentProps {
|
export interface SubscriptionSyncDialogContentProps {
|
||||||
id: number;
|
id: number;
|
||||||
onCancel: VoidFunction;
|
onCancel?: VoidFunction;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const SubscriptionSyncDialogContent = memo(
|
export const SubscriptionSyncDialogContent = memo(
|
||||||
|
@ -2,6 +2,8 @@ import { Button } from '@/components/ui/button';
|
|||||||
import { DataTablePagination } from '@/components/ui/data-table-pagination';
|
import { DataTablePagination } from '@/components/ui/data-table-pagination';
|
||||||
import { DataTableRowActions } from '@/components/ui/data-table-row-actions';
|
import { DataTableRowActions } from '@/components/ui/data-table-row-actions';
|
||||||
import { DataTableViewOptions } from '@/components/ui/data-table-view-options';
|
import { DataTableViewOptions } from '@/components/ui/data-table-view-options';
|
||||||
|
import { Dialog, DialogTrigger } from '@/components/ui/dialog';
|
||||||
|
import { DropdownMenuItem } from '@/components/ui/dropdown-menu';
|
||||||
import { QueryErrorView } from '@/components/ui/query-error-view';
|
import { QueryErrorView } from '@/components/ui/query-error-view';
|
||||||
import { Skeleton } from '@/components/ui/skeleton';
|
import { Skeleton } from '@/components/ui/skeleton';
|
||||||
import { Switch } from '@/components/ui/switch';
|
import { Switch } from '@/components/ui/switch';
|
||||||
@ -49,6 +51,7 @@ import { format } from 'date-fns';
|
|||||||
import { Plus } from 'lucide-react';
|
import { Plus } from 'lucide-react';
|
||||||
import { useMemo, useState } from 'react';
|
import { useMemo, useState } from 'react';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
|
import { SubscriptionSyncDialogContent } from './-sync';
|
||||||
|
|
||||||
export const Route = createFileRoute('/_app/subscriptions/manage')({
|
export const Route = createFileRoute('/_app/subscriptions/manage')({
|
||||||
component: SubscriptionManageRouteComponent,
|
component: SubscriptionManageRouteComponent,
|
||||||
@ -240,7 +243,16 @@ function SubscriptionManageRouteComponent() {
|
|||||||
});
|
});
|
||||||
}}
|
}}
|
||||||
onDelete={handleDeleteRecord(row)}
|
onDelete={handleDeleteRecord(row)}
|
||||||
/>
|
>
|
||||||
|
<Dialog>
|
||||||
|
<DialogTrigger asChild>
|
||||||
|
<DropdownMenuItem onSelect={(e) => e.preventDefault()}>
|
||||||
|
Sync
|
||||||
|
</DropdownMenuItem>
|
||||||
|
</DialogTrigger>
|
||||||
|
<SubscriptionSyncDialogContent id={row.original.id} />
|
||||||
|
</Dialog>
|
||||||
|
</DataTableRowActions>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
4
justfile
4
justfile
@ -14,8 +14,8 @@ dev-webui:
|
|||||||
pnpm run --filter=webui dev
|
pnpm run --filter=webui dev
|
||||||
|
|
||||||
dev-proxy:
|
dev-proxy:
|
||||||
npx kill-port 8899
|
npx --yes kill-port --port 8899,5010
|
||||||
pnpm run --filter=proxy dev
|
pnpm run --parallel --filter=proxy dev
|
||||||
|
|
||||||
dev-recorder:
|
dev-recorder:
|
||||||
watchexec -r -e rs,toml,yaml,json,env -- cargo run -p recorder --bin recorder_cli -- --environment development
|
watchexec -r -e rs,toml,yaml,json,env -- cargo run -p recorder --bin recorder_cli -- --environment development
|
||||||
|
12
package.json
12
package.json
@ -3,7 +3,10 @@
|
|||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"description": "Kono bangumi?",
|
"description": "Kono bangumi?",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"workspaces": ["packages/*", "apps/*"],
|
"workspaces": [
|
||||||
|
"packages/*",
|
||||||
|
"apps/*"
|
||||||
|
],
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@ -30,11 +33,12 @@
|
|||||||
"@auto-it/first-time-contributor": "^11.3.0",
|
"@auto-it/first-time-contributor": "^11.3.0",
|
||||||
"@biomejs/biome": "1.9.4",
|
"@biomejs/biome": "1.9.4",
|
||||||
"@types/node": "^24.0.0",
|
"@types/node": "^24.0.0",
|
||||||
|
"cross-env": "^7.0.3",
|
||||||
|
"kill-port": "^2.0.1",
|
||||||
|
"npm-run-all": "^4.1.5",
|
||||||
"tsx": "^4.19.4",
|
"tsx": "^4.19.4",
|
||||||
"turbo": "^2.5.4",
|
"turbo": "^2.5.4",
|
||||||
"typescript": "^5.8.3",
|
"typescript": "^5.8.3",
|
||||||
"ultracite": "^4.2.8",
|
"ultracite": "^4.2.8"
|
||||||
"kill-port": "^2.0.1",
|
|
||||||
"cross-env": "^7.0.3"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
209
pnpm-lock.yaml
generated
209
pnpm-lock.yaml
generated
@ -26,6 +26,9 @@ importers:
|
|||||||
kill-port:
|
kill-port:
|
||||||
specifier: ^2.0.1
|
specifier: ^2.0.1
|
||||||
version: 2.0.1
|
version: 2.0.1
|
||||||
|
npm-run-all:
|
||||||
|
specifier: ^4.1.5
|
||||||
|
version: 4.1.5
|
||||||
tsx:
|
tsx:
|
||||||
specifier: ^4.19.4
|
specifier: ^4.19.4
|
||||||
version: 4.19.4
|
version: 4.19.4
|
||||||
@ -63,6 +66,8 @@ importers:
|
|||||||
specifier: ^2.9.93
|
specifier: ^2.9.93
|
||||||
version: 2.9.99
|
version: 2.9.99
|
||||||
|
|
||||||
|
apps/recorder: {}
|
||||||
|
|
||||||
apps/webui:
|
apps/webui:
|
||||||
dependencies:
|
dependencies:
|
||||||
'@abraham/reflection':
|
'@abraham/reflection':
|
||||||
@ -3874,6 +3879,9 @@ packages:
|
|||||||
bottleneck@2.19.5:
|
bottleneck@2.19.5:
|
||||||
resolution: {integrity: sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==}
|
resolution: {integrity: sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==}
|
||||||
|
|
||||||
|
brace-expansion@1.1.11:
|
||||||
|
resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==}
|
||||||
|
|
||||||
brace-expansion@2.0.1:
|
brace-expansion@2.0.1:
|
||||||
resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==}
|
resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==}
|
||||||
|
|
||||||
@ -4124,6 +4132,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==}
|
resolution: {integrity: sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==}
|
||||||
engines: {node: '>=4.0.0'}
|
engines: {node: '>=4.0.0'}
|
||||||
|
|
||||||
|
concat-map@0.0.1:
|
||||||
|
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
|
||||||
|
|
||||||
concat-stream@1.6.2:
|
concat-stream@1.6.2:
|
||||||
resolution: {integrity: sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==}
|
resolution: {integrity: sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==}
|
||||||
engines: {'0': node >= 0.8}
|
engines: {'0': node >= 0.8}
|
||||||
@ -4199,6 +4210,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-Pcw1JTvZLSJH83iiGWt6fRcT+BjZlCDRVwYLbUcHzv/CRpB7r0MlSrGbIyQvVSNyGnbt7G4AXuyCiDR3POvZ1A==}
|
resolution: {integrity: sha512-Pcw1JTvZLSJH83iiGWt6fRcT+BjZlCDRVwYLbUcHzv/CRpB7r0MlSrGbIyQvVSNyGnbt7G4AXuyCiDR3POvZ1A==}
|
||||||
engines: {node: '>=16.0.0'}
|
engines: {node: '>=16.0.0'}
|
||||||
|
|
||||||
|
cross-spawn@6.0.6:
|
||||||
|
resolution: {integrity: sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==}
|
||||||
|
engines: {node: '>=4.8'}
|
||||||
|
|
||||||
cross-spawn@7.0.6:
|
cross-spawn@7.0.6:
|
||||||
resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==}
|
resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==}
|
||||||
engines: {node: '>= 8'}
|
engines: {node: '>= 8'}
|
||||||
@ -4955,6 +4970,9 @@ packages:
|
|||||||
hoist-non-react-statics@3.3.2:
|
hoist-non-react-statics@3.3.2:
|
||||||
resolution: {integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==}
|
resolution: {integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==}
|
||||||
|
|
||||||
|
hosted-git-info@2.8.9:
|
||||||
|
resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==}
|
||||||
|
|
||||||
hparser@0.5.0:
|
hparser@0.5.0:
|
||||||
resolution: {integrity: sha512-8s54Cqc7KFS9jigRPy2EDc+WWFyc1JSKsN2HgFbGe/NGj7rchtER957bxp8rbjypo68IYLoLb6CuYNHQCYjh5g==}
|
resolution: {integrity: sha512-8s54Cqc7KFS9jigRPy2EDc+WWFyc1JSKsN2HgFbGe/NGj7rchtER957bxp8rbjypo68IYLoLb6CuYNHQCYjh5g==}
|
||||||
engines: {node: '>= 0.10.0'}
|
engines: {node: '>= 0.10.0'}
|
||||||
@ -5109,6 +5127,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==}
|
resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==}
|
||||||
engines: {node: '>= 0.4'}
|
engines: {node: '>= 0.4'}
|
||||||
|
|
||||||
|
is-core-module@2.16.1:
|
||||||
|
resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==}
|
||||||
|
engines: {node: '>= 0.4'}
|
||||||
|
|
||||||
is-data-view@1.0.2:
|
is-data-view@1.0.2:
|
||||||
resolution: {integrity: sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==}
|
resolution: {integrity: sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==}
|
||||||
engines: {node: '>= 0.4'}
|
engines: {node: '>= 0.4'}
|
||||||
@ -5539,6 +5561,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==}
|
resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==}
|
||||||
engines: {node: '>= 0.6'}
|
engines: {node: '>= 0.6'}
|
||||||
|
|
||||||
|
memorystream@0.3.1:
|
||||||
|
resolution: {integrity: sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==}
|
||||||
|
engines: {node: '>= 0.10.0'}
|
||||||
|
|
||||||
merge-descriptors@1.0.3:
|
merge-descriptors@1.0.3:
|
||||||
resolution: {integrity: sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==}
|
resolution: {integrity: sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==}
|
||||||
|
|
||||||
@ -5583,6 +5609,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==}
|
resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==}
|
||||||
engines: {node: '>=6'}
|
engines: {node: '>=6'}
|
||||||
|
|
||||||
|
minimatch@3.1.2:
|
||||||
|
resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
|
||||||
|
|
||||||
minimatch@9.0.5:
|
minimatch@9.0.5:
|
||||||
resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==}
|
resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==}
|
||||||
engines: {node: '>=16 || 14 >=14.17'}
|
engines: {node: '>=16 || 14 >=14.17'}
|
||||||
@ -5686,6 +5715,9 @@ packages:
|
|||||||
sass:
|
sass:
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
nice-try@1.0.5:
|
||||||
|
resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==}
|
||||||
|
|
||||||
no-case@3.0.4:
|
no-case@3.0.4:
|
||||||
resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==}
|
resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==}
|
||||||
|
|
||||||
@ -5737,6 +5769,9 @@ packages:
|
|||||||
node-releases@2.0.19:
|
node-releases@2.0.19:
|
||||||
resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==}
|
resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==}
|
||||||
|
|
||||||
|
normalize-package-data@2.5.0:
|
||||||
|
resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==}
|
||||||
|
|
||||||
normalize-path@2.1.1:
|
normalize-path@2.1.1:
|
||||||
resolution: {integrity: sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==}
|
resolution: {integrity: sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
@ -5745,6 +5780,11 @@ packages:
|
|||||||
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
|
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
|
|
||||||
|
npm-run-all@4.1.5:
|
||||||
|
resolution: {integrity: sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==}
|
||||||
|
engines: {node: '>= 4'}
|
||||||
|
hasBin: true
|
||||||
|
|
||||||
npm-run-path@4.0.1:
|
npm-run-path@4.0.1:
|
||||||
resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==}
|
resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==}
|
||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
@ -5914,6 +5954,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==}
|
resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
|
|
||||||
|
path-key@2.0.1:
|
||||||
|
resolution: {integrity: sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==}
|
||||||
|
engines: {node: '>=4'}
|
||||||
|
|
||||||
path-key@3.1.1:
|
path-key@3.1.1:
|
||||||
resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
|
resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
|
||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
@ -5936,6 +5980,10 @@ packages:
|
|||||||
path-to-regexp@0.1.12:
|
path-to-regexp@0.1.12:
|
||||||
resolution: {integrity: sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==}
|
resolution: {integrity: sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==}
|
||||||
|
|
||||||
|
path-type@3.0.0:
|
||||||
|
resolution: {integrity: sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==}
|
||||||
|
engines: {node: '>=4'}
|
||||||
|
|
||||||
path-type@4.0.0:
|
path-type@4.0.0:
|
||||||
resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==}
|
resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==}
|
||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
@ -5966,6 +6014,11 @@ packages:
|
|||||||
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
|
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
|
||||||
engines: {node: '>=8.6'}
|
engines: {node: '>=8.6'}
|
||||||
|
|
||||||
|
pidtree@0.3.1:
|
||||||
|
resolution: {integrity: sha512-qQbW94hLHEqCg7nhby4yRC7G2+jYHY4Rguc2bjw7Uug4GIJuu1tvf2uHaZv5Q8zdt+WKJ6qK1FOI6amaWUo5FA==}
|
||||||
|
engines: {node: '>=0.10'}
|
||||||
|
hasBin: true
|
||||||
|
|
||||||
pify@3.0.0:
|
pify@3.0.0:
|
||||||
resolution: {integrity: sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==}
|
resolution: {integrity: sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==}
|
||||||
engines: {node: '>=4'}
|
engines: {node: '>=4'}
|
||||||
@ -6160,6 +6213,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg==}
|
resolution: {integrity: sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
|
|
||||||
|
read-pkg@3.0.0:
|
||||||
|
resolution: {integrity: sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==}
|
||||||
|
engines: {node: '>=4'}
|
||||||
|
|
||||||
readable-stream@1.1.14:
|
readable-stream@1.1.14:
|
||||||
resolution: {integrity: sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==}
|
resolution: {integrity: sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==}
|
||||||
|
|
||||||
@ -6245,6 +6302,11 @@ packages:
|
|||||||
resolve-pkg-maps@1.0.0:
|
resolve-pkg-maps@1.0.0:
|
||||||
resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
|
resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
|
||||||
|
|
||||||
|
resolve@1.22.10:
|
||||||
|
resolution: {integrity: sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==}
|
||||||
|
engines: {node: '>= 0.4'}
|
||||||
|
hasBin: true
|
||||||
|
|
||||||
resolve@1.7.1:
|
resolve@1.7.1:
|
||||||
resolution: {integrity: sha512-c7rwLofp8g1U+h1KNyHL/jicrKg1Ek4q+Lr33AL65uZTinUZHe30D5HlyN5V9NW0JX1D5dXQ4jqW5l7Sy/kGfw==}
|
resolution: {integrity: sha512-c7rwLofp8g1U+h1KNyHL/jicrKg1Ek4q+Lr33AL65uZTinUZHe30D5HlyN5V9NW0JX1D5dXQ4jqW5l7Sy/kGfw==}
|
||||||
|
|
||||||
@ -6343,6 +6405,10 @@ packages:
|
|||||||
selderee@0.11.0:
|
selderee@0.11.0:
|
||||||
resolution: {integrity: sha512-5TF+l7p4+OsnP8BCCvSyZiSPc4x4//p5uPwK8TCnVPJYRmU2aYKMpOXvw8zM5a5JvuuCGN1jmsMwuU2W02ukfA==}
|
resolution: {integrity: sha512-5TF+l7p4+OsnP8BCCvSyZiSPc4x4//p5uPwK8TCnVPJYRmU2aYKMpOXvw8zM5a5JvuuCGN1jmsMwuU2W02ukfA==}
|
||||||
|
|
||||||
|
semver@5.7.2:
|
||||||
|
resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==}
|
||||||
|
hasBin: true
|
||||||
|
|
||||||
semver@6.3.1:
|
semver@6.3.1:
|
||||||
resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==}
|
resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
@ -6427,10 +6493,18 @@ packages:
|
|||||||
resolution: {integrity: sha512-1j0w61+eVxu7DawFJtnfYcvSv6qPFvfTaqzTQ2BLknVhHTwGS8sc63ZBF4rzkWMBVKybo4S5OBtDdZahh2A1xg==}
|
resolution: {integrity: sha512-1j0w61+eVxu7DawFJtnfYcvSv6qPFvfTaqzTQ2BLknVhHTwGS8sc63ZBF4rzkWMBVKybo4S5OBtDdZahh2A1xg==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
|
||||||
|
shebang-command@1.2.0:
|
||||||
|
resolution: {integrity: sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==}
|
||||||
|
engines: {node: '>=0.10.0'}
|
||||||
|
|
||||||
shebang-command@2.0.0:
|
shebang-command@2.0.0:
|
||||||
resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
|
resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
|
||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
|
|
||||||
|
shebang-regex@1.0.0:
|
||||||
|
resolution: {integrity: sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==}
|
||||||
|
engines: {node: '>=0.10.0'}
|
||||||
|
|
||||||
shebang-regex@3.0.0:
|
shebang-regex@3.0.0:
|
||||||
resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
|
resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
|
||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
@ -6556,6 +6630,18 @@ packages:
|
|||||||
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
|
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
|
|
||||||
|
spdx-correct@3.2.0:
|
||||||
|
resolution: {integrity: sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==}
|
||||||
|
|
||||||
|
spdx-exceptions@2.5.0:
|
||||||
|
resolution: {integrity: sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==}
|
||||||
|
|
||||||
|
spdx-expression-parse@3.0.1:
|
||||||
|
resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==}
|
||||||
|
|
||||||
|
spdx-license-ids@3.0.21:
|
||||||
|
resolution: {integrity: sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg==}
|
||||||
|
|
||||||
sponge-case@1.0.1:
|
sponge-case@1.0.1:
|
||||||
resolution: {integrity: sha512-dblb9Et4DAtiZ5YSUZHLl4XhH4uK80GhAZrVXdN4O2P4gQ40Wa5UIOPUHlA/nFd2PLblBZWUioLMMAVrgpoYcA==}
|
resolution: {integrity: sha512-dblb9Et4DAtiZ5YSUZHLl4XhH4uK80GhAZrVXdN4O2P4gQ40Wa5UIOPUHlA/nFd2PLblBZWUioLMMAVrgpoYcA==}
|
||||||
|
|
||||||
@ -6599,6 +6685,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==}
|
resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==}
|
||||||
engines: {node: '>=12'}
|
engines: {node: '>=12'}
|
||||||
|
|
||||||
|
string.prototype.padend@3.1.6:
|
||||||
|
resolution: {integrity: sha512-XZpspuSB7vJWhvJc9DLSlrXl1mcA2BdoY5jjnS135ydXqLoqhs96JjDtCkjJEQHvfqZIp9hBuBMgI589peyx9Q==}
|
||||||
|
engines: {node: '>= 0.4'}
|
||||||
|
|
||||||
string.prototype.trim@1.2.10:
|
string.prototype.trim@1.2.10:
|
||||||
resolution: {integrity: sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==}
|
resolution: {integrity: sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==}
|
||||||
engines: {node: '>= 0.4'}
|
engines: {node: '>= 0.4'}
|
||||||
@ -6672,6 +6762,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==}
|
resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==}
|
||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
|
|
||||||
|
supports-preserve-symlinks-flag@1.0.0:
|
||||||
|
resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==}
|
||||||
|
engines: {node: '>= 0.4'}
|
||||||
|
|
||||||
swap-case@2.0.2:
|
swap-case@2.0.2:
|
||||||
resolution: {integrity: sha512-kc6S2YS/2yXbtkSMunBtKdah4VFETZ8Oh6ONSmSd9bRxhqTrtARUCBUiWXH3xVPpvR7tz2CSnkuXVE42EcGnMw==}
|
resolution: {integrity: sha512-kc6S2YS/2yXbtkSMunBtKdah4VFETZ8Oh6ONSmSd9bRxhqTrtARUCBUiWXH3xVPpvR7tz2CSnkuXVE42EcGnMw==}
|
||||||
|
|
||||||
@ -7033,6 +7127,9 @@ packages:
|
|||||||
v8-compile-cache-lib@3.0.1:
|
v8-compile-cache-lib@3.0.1:
|
||||||
resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==}
|
resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==}
|
||||||
|
|
||||||
|
validate-npm-package-license@3.0.4:
|
||||||
|
resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==}
|
||||||
|
|
||||||
vary@1.1.2:
|
vary@1.1.2:
|
||||||
resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==}
|
resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==}
|
||||||
engines: {node: '>= 0.8'}
|
engines: {node: '>= 0.8'}
|
||||||
@ -7189,6 +7286,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==}
|
resolution: {integrity: sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==}
|
||||||
engines: {node: '>= 0.4'}
|
engines: {node: '>= 0.4'}
|
||||||
|
|
||||||
|
which@1.3.1:
|
||||||
|
resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==}
|
||||||
|
hasBin: true
|
||||||
|
|
||||||
which@2.0.2:
|
which@2.0.2:
|
||||||
resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
|
resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
|
||||||
engines: {node: '>= 8'}
|
engines: {node: '>= 8'}
|
||||||
@ -11142,6 +11243,11 @@ snapshots:
|
|||||||
|
|
||||||
bottleneck@2.19.5: {}
|
bottleneck@2.19.5: {}
|
||||||
|
|
||||||
|
brace-expansion@1.1.11:
|
||||||
|
dependencies:
|
||||||
|
balanced-match: 1.0.2
|
||||||
|
concat-map: 0.0.1
|
||||||
|
|
||||||
brace-expansion@2.0.1:
|
brace-expansion@2.0.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
balanced-match: 1.0.2
|
balanced-match: 1.0.2
|
||||||
@ -11428,6 +11534,8 @@ snapshots:
|
|||||||
|
|
||||||
common-tags@1.8.2: {}
|
common-tags@1.8.2: {}
|
||||||
|
|
||||||
|
concat-map@0.0.1: {}
|
||||||
|
|
||||||
concat-stream@1.6.2:
|
concat-stream@1.6.2:
|
||||||
dependencies:
|
dependencies:
|
||||||
buffer-from: 1.1.2
|
buffer-from: 1.1.2
|
||||||
@ -11505,6 +11613,14 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
|
|
||||||
|
cross-spawn@6.0.6:
|
||||||
|
dependencies:
|
||||||
|
nice-try: 1.0.5
|
||||||
|
path-key: 2.0.1
|
||||||
|
semver: 5.7.2
|
||||||
|
shebang-command: 1.2.0
|
||||||
|
which: 1.3.1
|
||||||
|
|
||||||
cross-spawn@7.0.6:
|
cross-spawn@7.0.6:
|
||||||
dependencies:
|
dependencies:
|
||||||
path-key: 3.1.1
|
path-key: 3.1.1
|
||||||
@ -12411,6 +12527,8 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
react-is: 16.13.1
|
react-is: 16.13.1
|
||||||
|
|
||||||
|
hosted-git-info@2.8.9: {}
|
||||||
|
|
||||||
hparser@0.5.0: {}
|
hparser@0.5.0: {}
|
||||||
|
|
||||||
html-encoding-sniffer@4.0.0:
|
html-encoding-sniffer@4.0.0:
|
||||||
@ -12601,6 +12719,10 @@ snapshots:
|
|||||||
|
|
||||||
is-callable@1.2.7: {}
|
is-callable@1.2.7: {}
|
||||||
|
|
||||||
|
is-core-module@2.16.1:
|
||||||
|
dependencies:
|
||||||
|
hasown: 2.0.2
|
||||||
|
|
||||||
is-data-view@1.0.2:
|
is-data-view@1.0.2:
|
||||||
dependencies:
|
dependencies:
|
||||||
call-bound: 1.0.4
|
call-bound: 1.0.4
|
||||||
@ -13001,6 +13123,8 @@ snapshots:
|
|||||||
|
|
||||||
media-typer@0.3.0: {}
|
media-typer@0.3.0: {}
|
||||||
|
|
||||||
|
memorystream@0.3.1: {}
|
||||||
|
|
||||||
merge-descriptors@1.0.3: {}
|
merge-descriptors@1.0.3: {}
|
||||||
|
|
||||||
merge-stream@2.0.0: {}
|
merge-stream@2.0.0: {}
|
||||||
@ -13028,6 +13152,10 @@ snapshots:
|
|||||||
|
|
||||||
mimic-fn@2.1.0: {}
|
mimic-fn@2.1.0: {}
|
||||||
|
|
||||||
|
minimatch@3.1.2:
|
||||||
|
dependencies:
|
||||||
|
brace-expansion: 1.1.11
|
||||||
|
|
||||||
minimatch@9.0.5:
|
minimatch@9.0.5:
|
||||||
dependencies:
|
dependencies:
|
||||||
brace-expansion: 2.0.1
|
brace-expansion: 2.0.1
|
||||||
@ -13133,6 +13261,8 @@ snapshots:
|
|||||||
- '@babel/core'
|
- '@babel/core'
|
||||||
- babel-plugin-macros
|
- babel-plugin-macros
|
||||||
|
|
||||||
|
nice-try@1.0.5: {}
|
||||||
|
|
||||||
no-case@3.0.4:
|
no-case@3.0.4:
|
||||||
dependencies:
|
dependencies:
|
||||||
lower-case: 2.0.2
|
lower-case: 2.0.2
|
||||||
@ -13173,12 +13303,31 @@ snapshots:
|
|||||||
|
|
||||||
node-releases@2.0.19: {}
|
node-releases@2.0.19: {}
|
||||||
|
|
||||||
|
normalize-package-data@2.5.0:
|
||||||
|
dependencies:
|
||||||
|
hosted-git-info: 2.8.9
|
||||||
|
resolve: 1.22.10
|
||||||
|
semver: 5.7.2
|
||||||
|
validate-npm-package-license: 3.0.4
|
||||||
|
|
||||||
normalize-path@2.1.1:
|
normalize-path@2.1.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
remove-trailing-separator: 1.1.0
|
remove-trailing-separator: 1.1.0
|
||||||
|
|
||||||
normalize-path@3.0.0: {}
|
normalize-path@3.0.0: {}
|
||||||
|
|
||||||
|
npm-run-all@4.1.5:
|
||||||
|
dependencies:
|
||||||
|
ansi-styles: 3.2.1
|
||||||
|
chalk: 2.4.2
|
||||||
|
cross-spawn: 6.0.6
|
||||||
|
memorystream: 0.3.1
|
||||||
|
minimatch: 3.1.2
|
||||||
|
pidtree: 0.3.1
|
||||||
|
read-pkg: 3.0.0
|
||||||
|
shell-quote: 1.8.2
|
||||||
|
string.prototype.padend: 3.1.6
|
||||||
|
|
||||||
npm-run-path@4.0.1:
|
npm-run-path@4.0.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
path-key: 3.1.1
|
path-key: 3.1.1
|
||||||
@ -13349,6 +13498,8 @@ snapshots:
|
|||||||
|
|
||||||
path-is-absolute@1.0.1: {}
|
path-is-absolute@1.0.1: {}
|
||||||
|
|
||||||
|
path-key@2.0.1: {}
|
||||||
|
|
||||||
path-key@3.1.1: {}
|
path-key@3.1.1: {}
|
||||||
|
|
||||||
path-parse@1.0.7: {}
|
path-parse@1.0.7: {}
|
||||||
@ -13366,6 +13517,10 @@ snapshots:
|
|||||||
|
|
||||||
path-to-regexp@0.1.12: {}
|
path-to-regexp@0.1.12: {}
|
||||||
|
|
||||||
|
path-type@3.0.0:
|
||||||
|
dependencies:
|
||||||
|
pify: 3.0.0
|
||||||
|
|
||||||
path-type@4.0.0: {}
|
path-type@4.0.0: {}
|
||||||
|
|
||||||
pathe@1.1.2: {}
|
pathe@1.1.2: {}
|
||||||
@ -13382,6 +13537,8 @@ snapshots:
|
|||||||
|
|
||||||
picomatch@2.3.1: {}
|
picomatch@2.3.1: {}
|
||||||
|
|
||||||
|
pidtree@0.3.1: {}
|
||||||
|
|
||||||
pify@3.0.0: {}
|
pify@3.0.0: {}
|
||||||
|
|
||||||
pify@5.0.0: {}
|
pify@5.0.0: {}
|
||||||
@ -13588,6 +13745,12 @@ snapshots:
|
|||||||
|
|
||||||
react@19.1.0: {}
|
react@19.1.0: {}
|
||||||
|
|
||||||
|
read-pkg@3.0.0:
|
||||||
|
dependencies:
|
||||||
|
load-json-file: 4.0.0
|
||||||
|
normalize-package-data: 2.5.0
|
||||||
|
path-type: 3.0.0
|
||||||
|
|
||||||
readable-stream@1.1.14:
|
readable-stream@1.1.14:
|
||||||
dependencies:
|
dependencies:
|
||||||
core-util-is: 1.0.3
|
core-util-is: 1.0.3
|
||||||
@ -13692,6 +13855,12 @@ snapshots:
|
|||||||
|
|
||||||
resolve-pkg-maps@1.0.0: {}
|
resolve-pkg-maps@1.0.0: {}
|
||||||
|
|
||||||
|
resolve@1.22.10:
|
||||||
|
dependencies:
|
||||||
|
is-core-module: 2.16.1
|
||||||
|
path-parse: 1.0.7
|
||||||
|
supports-preserve-symlinks-flag: 1.0.0
|
||||||
|
|
||||||
resolve@1.7.1:
|
resolve@1.7.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
path-parse: 1.0.7
|
path-parse: 1.0.7
|
||||||
@ -13818,6 +13987,8 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
parseley: 0.12.1
|
parseley: 0.12.1
|
||||||
|
|
||||||
|
semver@5.7.2: {}
|
||||||
|
|
||||||
semver@6.3.1: {}
|
semver@6.3.1: {}
|
||||||
|
|
||||||
semver@7.6.3:
|
semver@7.6.3:
|
||||||
@ -13970,10 +14141,16 @@ snapshots:
|
|||||||
'@img/sharp-win32-x64': 0.34.1
|
'@img/sharp-win32-x64': 0.34.1
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
shebang-command@1.2.0:
|
||||||
|
dependencies:
|
||||||
|
shebang-regex: 1.0.0
|
||||||
|
|
||||||
shebang-command@2.0.0:
|
shebang-command@2.0.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
shebang-regex: 3.0.0
|
shebang-regex: 3.0.0
|
||||||
|
|
||||||
|
shebang-regex@1.0.0: {}
|
||||||
|
|
||||||
shebang-regex@3.0.0: {}
|
shebang-regex@3.0.0: {}
|
||||||
|
|
||||||
shell-exec@1.0.2: {}
|
shell-exec@1.0.2: {}
|
||||||
@ -14127,6 +14304,20 @@ snapshots:
|
|||||||
|
|
||||||
source-map@0.6.1: {}
|
source-map@0.6.1: {}
|
||||||
|
|
||||||
|
spdx-correct@3.2.0:
|
||||||
|
dependencies:
|
||||||
|
spdx-expression-parse: 3.0.1
|
||||||
|
spdx-license-ids: 3.0.21
|
||||||
|
|
||||||
|
spdx-exceptions@2.5.0: {}
|
||||||
|
|
||||||
|
spdx-expression-parse@3.0.1:
|
||||||
|
dependencies:
|
||||||
|
spdx-exceptions: 2.5.0
|
||||||
|
spdx-license-ids: 3.0.21
|
||||||
|
|
||||||
|
spdx-license-ids@3.0.21: {}
|
||||||
|
|
||||||
sponge-case@1.0.1:
|
sponge-case@1.0.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
@ -14167,6 +14358,13 @@ snapshots:
|
|||||||
emoji-regex: 9.2.2
|
emoji-regex: 9.2.2
|
||||||
strip-ansi: 7.1.0
|
strip-ansi: 7.1.0
|
||||||
|
|
||||||
|
string.prototype.padend@3.1.6:
|
||||||
|
dependencies:
|
||||||
|
call-bind: 1.0.8
|
||||||
|
define-properties: 1.2.1
|
||||||
|
es-abstract: 1.24.0
|
||||||
|
es-object-atoms: 1.1.1
|
||||||
|
|
||||||
string.prototype.trim@1.2.10:
|
string.prototype.trim@1.2.10:
|
||||||
dependencies:
|
dependencies:
|
||||||
call-bind: 1.0.8
|
call-bind: 1.0.8
|
||||||
@ -14241,6 +14439,8 @@ snapshots:
|
|||||||
has-flag: 4.0.0
|
has-flag: 4.0.0
|
||||||
supports-color: 7.2.0
|
supports-color: 7.2.0
|
||||||
|
|
||||||
|
supports-preserve-symlinks-flag@1.0.0: {}
|
||||||
|
|
||||||
swap-case@2.0.2:
|
swap-case@2.0.2:
|
||||||
dependencies:
|
dependencies:
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
@ -14590,6 +14790,11 @@ snapshots:
|
|||||||
|
|
||||||
v8-compile-cache-lib@3.0.1: {}
|
v8-compile-cache-lib@3.0.1: {}
|
||||||
|
|
||||||
|
validate-npm-package-license@3.0.4:
|
||||||
|
dependencies:
|
||||||
|
spdx-correct: 3.2.0
|
||||||
|
spdx-expression-parse: 3.0.1
|
||||||
|
|
||||||
vary@1.1.2: {}
|
vary@1.1.2: {}
|
||||||
|
|
||||||
vaul@1.1.2(@types/react-dom@19.1.6(@types/react@19.1.6))(@types/react@19.1.6)(react-dom@19.1.0(react@19.1.0))(react@19.1.0):
|
vaul@1.1.2(@types/react-dom@19.1.6(@types/react@19.1.6))(@types/react@19.1.6)(react-dom@19.1.0(react@19.1.0))(react@19.1.0):
|
||||||
@ -14816,6 +15021,10 @@ snapshots:
|
|||||||
gopd: 1.2.0
|
gopd: 1.2.0
|
||||||
has-tostringtag: 1.0.2
|
has-tostringtag: 1.0.2
|
||||||
|
|
||||||
|
which@1.3.1:
|
||||||
|
dependencies:
|
||||||
|
isexe: 2.0.0
|
||||||
|
|
||||||
which@2.0.2:
|
which@2.0.2:
|
||||||
dependencies:
|
dependencies:
|
||||||
isexe: 2.0.0
|
isexe: 2.0.0
|
||||||
|
Loading…
Reference in New Issue
Block a user