fix: refactor config
This commit is contained in:
@@ -1,9 +1,7 @@
|
||||
{
|
||||
"name": "Konobangu Recorder",
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
"service": "app",
|
||||
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
|
||||
"forwardPorts": [
|
||||
3001
|
||||
]
|
||||
}
|
||||
"name": "Konobangu Recorder",
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
"service": "app",
|
||||
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
|
||||
"forwardPorts": [5001]
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ required-features = []
|
||||
|
||||
[dependencies]
|
||||
quirks_path = { path = "../../packages/quirks-path" }
|
||||
torrent = { path = "../../packages/torrent" }
|
||||
dlsignal = { path = "../../packages/dlsignal" }
|
||||
loco-rs = { version = "0.13" }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
@@ -29,8 +29,9 @@ sea-orm = { version = "1", features = [
|
||||
"sqlx-postgres",
|
||||
"runtime-tokio-rustls",
|
||||
"macros",
|
||||
"debug-print"
|
||||
"debug-print",
|
||||
] }
|
||||
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
||||
|
||||
axum = "0.7.9"
|
||||
uuid = { version = "1.6.0", features = ["v4"] }
|
||||
|
||||
@@ -34,7 +34,7 @@ pub trait AppContextExt {
|
||||
}
|
||||
|
||||
fn get_auth_service(&self) -> &AppAuthService {
|
||||
&AppAuthService::app_instance()
|
||||
AppAuthService::app_instance()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -93,7 +93,7 @@ impl AuthService for OidcAuthService {
|
||||
|
||||
let token_data = self.authorizer.check_auth(&token).await?;
|
||||
let claims = token_data.claims;
|
||||
if !claims.sub.as_deref().is_some_and(|s| !s.trim().is_empty()) {
|
||||
if claims.sub.as_deref().is_none_or(|s| s.trim().is_empty()) {
|
||||
return Err(AuthError::OidcSubMissingError);
|
||||
}
|
||||
if !claims.contains_audience(&config.audience) {
|
||||
@@ -103,7 +103,7 @@ impl AuthService for OidcAuthService {
|
||||
let found_scopes = claims.scopes().collect::<HashSet<_>>();
|
||||
if !expected_scopes
|
||||
.iter()
|
||||
.all(|es| found_scopes.contains(&es as &str))
|
||||
.all(|es| found_scopes.contains(es as &str))
|
||||
{
|
||||
return Err(AuthError::OidcExtraScopesMatchError {
|
||||
expected: expected_scopes.iter().join(","),
|
||||
|
||||
@@ -107,7 +107,7 @@ impl Initializer for AppAuthServiceInitializer {
|
||||
|
||||
let service = AppAuthService::from_conf(auth_conf)
|
||||
.await
|
||||
.map_err(|e| loco_rs::Error::wrap(e))?;
|
||||
.map_err(loco_rs::Error::wrap)?;
|
||||
|
||||
APP_AUTH_SERVICE.get_or_init(|| service);
|
||||
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
use figment::{
|
||||
providers::{Format, Json, Yaml},
|
||||
Figment,
|
||||
};
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
|
||||
use crate::{auth::AppAuthConfig, dal::config::AppDalConfig, extract::mikan::AppMikanConfig};
|
||||
|
||||
const DEFAULT_APP_SETTINGS_MIXIN: &str = include_str!("./settings_mixin.yaml");
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct AppConfig {
|
||||
pub auth: AppAuthConfig,
|
||||
pub dal: Option<AppDalConfig>,
|
||||
pub mikan: Option<AppMikanConfig>,
|
||||
pub dal: AppDalConfig,
|
||||
pub mikan: AppMikanConfig,
|
||||
}
|
||||
|
||||
pub fn deserialize_key_path_from_json_value<T: DeserializeOwned>(
|
||||
@@ -42,10 +48,19 @@ pub trait AppConfigExt {
|
||||
fn get_root_conf(&self) -> &loco_rs::config::Config;
|
||||
|
||||
fn get_app_conf(&self) -> loco_rs::Result<AppConfig> {
|
||||
Ok(
|
||||
deserialize_key_path_from_app_config(self.get_root_conf(), &[])?
|
||||
.expect("app config must be present"),
|
||||
)
|
||||
let settings_str = self
|
||||
.get_root_conf()
|
||||
.settings
|
||||
.as_ref()
|
||||
.map(serde_json::to_string)
|
||||
.unwrap_or_else(|| Ok(String::new()))?;
|
||||
|
||||
let app_config = Figment::from(Json::string(&settings_str))
|
||||
.merge(Yaml::string(DEFAULT_APP_SETTINGS_MIXIN))
|
||||
.extract()
|
||||
.map_err(loco_rs::Error::wrap)?;
|
||||
|
||||
Ok(app_config)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
12
apps/recorder/src/config/settings_mixin.yaml
Normal file
12
apps/recorder/src/config/settings_mixin.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
dal:
|
||||
data_dir: ./data
|
||||
|
||||
mikan:
|
||||
http_client:
|
||||
exponential_backoff_max_retries: 3
|
||||
leaky_bucket_max_tokens: 3
|
||||
leaky_bucket_initial_tokens: 0
|
||||
leaky_bucket_refill_tokens: 1
|
||||
leaky_bucket_refill_interval: 500
|
||||
user_agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0"
|
||||
base_url: "https://mikanani.me/"
|
||||
@@ -194,7 +194,7 @@ impl Initializer for AppDalInitalizer {
|
||||
let config = &app_context.config;
|
||||
let app_dal_conf = config.get_app_conf()?.dal;
|
||||
|
||||
APP_DAL_CLIENT.get_or_init(|| AppDalClient::new(app_dal_conf.unwrap_or_default()));
|
||||
APP_DAL_CLIENT.get_or_init(|| AppDalClient::new(app_dal_conf));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::ops::Deref;
|
||||
use loco_rs::app::{AppContext, Initializer};
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use super::{AppMikanConfig, MIKAN_BASE_URL};
|
||||
use super::AppMikanConfig;
|
||||
use crate::{config::AppConfigExt, fetch::HttpClient};
|
||||
|
||||
static APP_MIKAN_CLIENT: OnceCell<AppMikanClient> = OnceCell::new();
|
||||
@@ -14,12 +14,10 @@ pub struct AppMikanClient {
|
||||
}
|
||||
|
||||
impl AppMikanClient {
|
||||
pub fn new(mut config: AppMikanConfig) -> loco_rs::Result<Self> {
|
||||
pub fn new(config: AppMikanConfig) -> loco_rs::Result<Self> {
|
||||
let http_client =
|
||||
HttpClient::new(config.http_client.take()).map_err(loco_rs::Error::wrap)?;
|
||||
let base_url = config
|
||||
.base_url
|
||||
.unwrap_or_else(|| String::from(MIKAN_BASE_URL));
|
||||
HttpClient::from_config(config.http_client).map_err(loco_rs::Error::wrap)?;
|
||||
let base_url = config.base_url;
|
||||
Ok(Self {
|
||||
http_client,
|
||||
base_url,
|
||||
@@ -55,7 +53,7 @@ impl Initializer for AppMikanClientInitializer {
|
||||
|
||||
async fn before_run(&self, app_context: &AppContext) -> loco_rs::Result<()> {
|
||||
let config = &app_context.config;
|
||||
let app_mikan_conf = config.get_app_conf()?.mikan.unwrap_or_default();
|
||||
let app_mikan_conf = config.get_app_conf()?.mikan;
|
||||
|
||||
APP_MIKAN_CLIENT.get_or_try_init(|| AppMikanClient::new(app_mikan_conf))?;
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::fetch::HttpClientConfig;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct AppMikanConfig {
|
||||
pub http_client: Option<HttpClientConfig>,
|
||||
pub base_url: Option<String>,
|
||||
pub http_client: HttpClientConfig,
|
||||
pub base_url: String,
|
||||
}
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use chrono::DateTime;
|
||||
use dlsignal::core::BITTORRENT_MIME_TYPE;
|
||||
use itertools::Itertools;
|
||||
use reqwest::IntoUrl;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use torrent::core::BITTORRENT_MIME_TYPE;
|
||||
use url::Url;
|
||||
|
||||
use super::{
|
||||
web_parser::{parse_mikan_episode_id_from_homepage, MikanEpisodeHomepage},
|
||||
AppMikanClient,
|
||||
};
|
||||
use crate::{extract::errors::ParseError, fetch::bytes::download_bytes_with_client};
|
||||
use crate::{extract::errors::ParseError, fetch::bytes::fetch_bytes};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanRssItem {
|
||||
@@ -228,7 +228,7 @@ pub async fn parse_mikan_rss_channel_from_rss_link(
|
||||
url: impl IntoUrl,
|
||||
) -> eyre::Result<MikanRssChannel> {
|
||||
let http_client = client.map(|s| s.deref());
|
||||
let bytes = download_bytes_with_client(http_client, url.as_str()).await?;
|
||||
let bytes = fetch_bytes(http_client, url.as_str()).await?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
|
||||
@@ -297,7 +297,7 @@ pub async fn parse_mikan_rss_channel_from_rss_link(
|
||||
mod tests {
|
||||
use std::assert_matches::assert_matches;
|
||||
|
||||
use torrent::core::BITTORRENT_MIME_TYPE;
|
||||
use dlsignal::core::BITTORRENT_MIME_TYPE;
|
||||
|
||||
use crate::extract::mikan::{
|
||||
parse_mikan_rss_channel_from_rss_link, MikanBangumiAggregationRssChannel,
|
||||
|
||||
@@ -18,7 +18,7 @@ use crate::{
|
||||
app::AppContextExt,
|
||||
dal::DalContentCategory,
|
||||
extract::html::parse_style_attr,
|
||||
fetch::{html::download_html_with_client, image::download_image_with_client},
|
||||
fetch::{html::fetch_html, image::fetch_image},
|
||||
models::subscribers,
|
||||
};
|
||||
|
||||
@@ -95,7 +95,7 @@ pub async fn parse_mikan_bangumi_poster_from_origin_poster_src(
|
||||
origin_poster_src: Url,
|
||||
) -> eyre::Result<MikanBangumiPosterMeta> {
|
||||
let http_client = client.map(|s| s.deref());
|
||||
let poster_data = download_image_with_client(http_client, origin_poster_src.clone()).await?;
|
||||
let poster_data = fetch_image(http_client, origin_poster_src.clone()).await?;
|
||||
Ok(MikanBangumiPosterMeta {
|
||||
origin_poster_src,
|
||||
poster_data: Some(poster_data),
|
||||
@@ -127,8 +127,7 @@ pub async fn parse_mikan_bangumi_poster_from_origin_poster_src_with_cache(
|
||||
});
|
||||
}
|
||||
|
||||
let poster_data =
|
||||
download_image_with_client(Some(mikan_client.deref()), origin_poster_src.clone()).await?;
|
||||
let poster_data = fetch_image(Some(mikan_client.deref()), origin_poster_src.clone()).await?;
|
||||
|
||||
let poster_str = dal_client
|
||||
.store_object(
|
||||
@@ -153,7 +152,7 @@ pub async fn parse_mikan_bangumi_meta_from_mikan_homepage(
|
||||
) -> eyre::Result<MikanBangumiMeta> {
|
||||
let http_client = client.map(|s| s.deref());
|
||||
let url_host = url.origin().unicode_serialization();
|
||||
let content = download_html_with_client(http_client, url.as_str()).await?;
|
||||
let content = fetch_html(http_client, url.as_str()).await?;
|
||||
let html = Html::parse_document(&content);
|
||||
|
||||
let bangumi_fansubs = html
|
||||
@@ -276,7 +275,7 @@ pub async fn parse_mikan_episode_meta_from_mikan_homepage(
|
||||
) -> eyre::Result<MikanEpisodeMeta> {
|
||||
let http_client = client.map(|s| s.deref());
|
||||
let url_host = url.origin().unicode_serialization();
|
||||
let content = download_html_with_client(http_client, url.as_str()).await?;
|
||||
let content = fetch_html(http_client, url.as_str()).await?;
|
||||
|
||||
let html = Html::parse_document(&content);
|
||||
|
||||
@@ -401,6 +400,8 @@ pub async fn parse_mikan_episode_meta_from_mikan_homepage(
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn parse_mikan_bangumis_from_user_home(_client: Option<&AppMikanClient>, _url: Url) {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::assert_matches::assert_matches;
|
||||
|
||||
@@ -1,24 +1,11 @@
|
||||
use bytes::Bytes;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::{core::DEFAULT_HTTP_CLIENT_USER_AGENT, HttpClient};
|
||||
use super::HttpClient;
|
||||
|
||||
pub async fn download_bytes<T: IntoUrl>(url: T) -> eyre::Result<Bytes> {
|
||||
let request_client = reqwest::Client::builder()
|
||||
.user_agent(DEFAULT_HTTP_CLIENT_USER_AGENT)
|
||||
.build()?;
|
||||
let bytes = request_client.get(url).send().await?.bytes().await?;
|
||||
pub async fn fetch_bytes<T: IntoUrl>(client: Option<&HttpClient>, url: T) -> eyre::Result<Bytes> {
|
||||
let client = client.unwrap_or_default();
|
||||
|
||||
let bytes = client.get(url).send().await?.bytes().await?;
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
pub async fn download_bytes_with_client<T: IntoUrl>(
|
||||
client: Option<&HttpClient>,
|
||||
url: T,
|
||||
) -> eyre::Result<Bytes> {
|
||||
if let Some(client) = client {
|
||||
let bytes = client.get(url).send().await?.bytes().await?;
|
||||
Ok(bytes)
|
||||
} else {
|
||||
download_bytes(url).await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::{ops::Deref, time::Duration};
|
||||
|
||||
use axum::http::Extensions;
|
||||
use leaky_bucket::RateLimiter;
|
||||
use once_cell::sync::OnceCell;
|
||||
use reqwest::{ClientBuilder, Request, Response};
|
||||
use reqwest_middleware::{
|
||||
ClientBuilder as ClientWithMiddlewareBuilder, ClientWithMiddleware, Next,
|
||||
@@ -11,7 +12,7 @@ use reqwest_tracing::TracingMiddleware;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::serde_as;
|
||||
|
||||
use super::DEFAULT_HTTP_CLIENT_USER_AGENT;
|
||||
use crate::fetch::DEFAULT_HTTP_CLIENT_USER_AGENT;
|
||||
|
||||
#[serde_as]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
@@ -27,6 +28,7 @@ pub struct HttpClientConfig {
|
||||
|
||||
pub struct HttpClient {
|
||||
client: ClientWithMiddleware,
|
||||
pub config: HttpClientConfig,
|
||||
}
|
||||
|
||||
impl Deref for HttpClient {
|
||||
@@ -55,42 +57,73 @@ impl reqwest_middleware::Middleware for RateLimiterMiddleware {
|
||||
}
|
||||
|
||||
impl HttpClient {
|
||||
pub fn new(config: Option<HttpClientConfig>) -> reqwest::Result<Self> {
|
||||
let mut config = config.unwrap_or_default();
|
||||
let retry_policy = ExponentialBackoff::builder()
|
||||
.build_with_max_retries(config.exponential_backoff_max_retries.take().unwrap_or(3));
|
||||
let rate_limiter = RateLimiter::builder()
|
||||
.max(config.leaky_bucket_max_tokens.take().unwrap_or(3) as usize)
|
||||
.initial(
|
||||
config
|
||||
.leaky_bucket_initial_tokens
|
||||
.take()
|
||||
.unwrap_or_default() as usize,
|
||||
)
|
||||
.refill(config.leaky_bucket_refill_tokens.take().unwrap_or(1) as usize)
|
||||
.interval(
|
||||
config
|
||||
.leaky_bucket_refill_interval
|
||||
.take()
|
||||
.unwrap_or_else(|| Duration::from_millis(500)),
|
||||
)
|
||||
.build();
|
||||
pub fn from_config(config: HttpClientConfig) -> reqwest::Result<Self> {
|
||||
let reqwest_client_builder = ClientBuilder::new().user_agent(
|
||||
config
|
||||
.user_agent
|
||||
.as_deref()
|
||||
.unwrap_or(DEFAULT_HTTP_CLIENT_USER_AGENT),
|
||||
);
|
||||
|
||||
let client = ClientBuilder::new()
|
||||
.user_agent(
|
||||
config
|
||||
.user_agent
|
||||
.take()
|
||||
.unwrap_or_else(|| DEFAULT_HTTP_CLIENT_USER_AGENT.to_owned()),
|
||||
)
|
||||
.build()?;
|
||||
let reqwest_client = reqwest_client_builder.build()?;
|
||||
|
||||
let mut reqwest_with_middleware_builder =
|
||||
ClientWithMiddlewareBuilder::new(reqwest_client).with(TracingMiddleware::default());
|
||||
|
||||
if let Some(ref x) = config.exponential_backoff_max_retries {
|
||||
let retry_policy = ExponentialBackoff::builder().build_with_max_retries(*x);
|
||||
|
||||
reqwest_with_middleware_builder = reqwest_with_middleware_builder
|
||||
.with(RetryTransientMiddleware::new_with_policy(retry_policy));
|
||||
}
|
||||
|
||||
if let (None, None, None, None) = (
|
||||
config.leaky_bucket_initial_tokens.as_ref(),
|
||||
config.leaky_bucket_refill_tokens.as_ref(),
|
||||
config.leaky_bucket_refill_interval.as_ref(),
|
||||
config.leaky_bucket_max_tokens.as_ref(),
|
||||
) {
|
||||
} else {
|
||||
let mut rate_limiter_builder = RateLimiter::builder();
|
||||
|
||||
if let Some(ref x) = config.leaky_bucket_max_tokens {
|
||||
rate_limiter_builder.max(*x as usize);
|
||||
}
|
||||
if let Some(ref x) = config.leaky_bucket_initial_tokens {
|
||||
rate_limiter_builder.initial(*x as usize);
|
||||
}
|
||||
if let Some(ref x) = config.leaky_bucket_refill_tokens {
|
||||
rate_limiter_builder.refill(*x as usize);
|
||||
}
|
||||
if let Some(ref x) = config.leaky_bucket_refill_interval {
|
||||
rate_limiter_builder.interval(*x);
|
||||
}
|
||||
|
||||
let rate_limiter = rate_limiter_builder.build();
|
||||
|
||||
reqwest_with_middleware_builder =
|
||||
reqwest_with_middleware_builder.with(RateLimiterMiddleware { rate_limiter });
|
||||
}
|
||||
|
||||
let reqwest_with_middleware = reqwest_with_middleware_builder.build();
|
||||
|
||||
Ok(Self {
|
||||
client: ClientWithMiddlewareBuilder::new(client)
|
||||
.with(TracingMiddleware::default())
|
||||
.with(RateLimiterMiddleware { rate_limiter })
|
||||
.with(RetryTransientMiddleware::new_with_policy(retry_policy))
|
||||
.build(),
|
||||
client: reqwest_with_middleware,
|
||||
config,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
static DEFAULT_HTTP_CLIENT: OnceCell<HttpClient> = OnceCell::new();
|
||||
|
||||
impl Default for HttpClient {
|
||||
fn default() -> Self {
|
||||
HttpClient::from_config(Default::default()).expect("Failed to create default HttpClient")
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for &HttpClient {
|
||||
fn default() -> Self {
|
||||
DEFAULT_HTTP_CLIENT.get_or_init(HttpClient::default)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,10 @@
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::{core::DEFAULT_HTTP_CLIENT_USER_AGENT, HttpClient};
|
||||
use super::HttpClient;
|
||||
|
||||
pub async fn fetch_html<T: IntoUrl>(client: Option<&HttpClient>, url: T) -> eyre::Result<String> {
|
||||
let client = client.unwrap_or_default();
|
||||
let content = client.get(url).send().await?.text().await?;
|
||||
|
||||
pub async fn download_html<U: IntoUrl>(url: U) -> eyre::Result<String> {
|
||||
let request_client = reqwest::Client::builder()
|
||||
.user_agent(DEFAULT_HTTP_CLIENT_USER_AGENT)
|
||||
.build()?;
|
||||
let content = request_client.get(url).send().await?.text().await?;
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
pub async fn download_html_with_client<T: IntoUrl>(
|
||||
client: Option<&HttpClient>,
|
||||
url: T,
|
||||
) -> eyre::Result<String> {
|
||||
if let Some(client) = client {
|
||||
let content = client.get(url).send().await?.text().await?;
|
||||
Ok(content)
|
||||
} else {
|
||||
download_html(url).await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,8 @@
|
||||
use bytes::Bytes;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
use super::{
|
||||
bytes::{download_bytes, download_bytes_with_client},
|
||||
HttpClient,
|
||||
};
|
||||
use super::{bytes::fetch_bytes, HttpClient};
|
||||
|
||||
pub async fn download_image<U: IntoUrl>(url: U) -> eyre::Result<Bytes> {
|
||||
download_bytes(url).await
|
||||
}
|
||||
|
||||
pub async fn download_image_with_client<T: IntoUrl>(
|
||||
client: Option<&HttpClient>,
|
||||
url: T,
|
||||
) -> eyre::Result<Bytes> {
|
||||
download_bytes_with_client(client, url).await
|
||||
pub async fn fetch_image<T: IntoUrl>(client: Option<&HttpClient>, url: T) -> eyre::Result<Bytes> {
|
||||
fetch_bytes(client, url).await
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ pub mod image;
|
||||
|
||||
pub use core::DEFAULT_HTTP_CLIENT_USER_AGENT;
|
||||
|
||||
pub use bytes::download_bytes;
|
||||
pub use bytes::fetch_bytes;
|
||||
pub use client::{HttpClient, HttpClientConfig};
|
||||
pub use image::download_image;
|
||||
pub use html::fetch_html;
|
||||
pub use image::fetch_image;
|
||||
|
||||
Reference in New Issue
Block a user