fix: refactor config

This commit is contained in:
2024-12-31 23:56:00 +08:00
parent abd399aacd
commit 393f704e52
56 changed files with 274 additions and 536 deletions

View File

@@ -1,14 +0,0 @@
# Server
DATABASE_URL="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"
BETTERSTACK_API_KEY=""
BETTERSTACK_URL=""
FLAGS_SECRET=""
ARCJET_KEY=""
SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# Client
NEXT_PUBLIC_APP_URL="http://localhost:3000"
NEXT_PUBLIC_WEB_URL="http://localhost:3001"
NEXT_PUBLIC_DOCS_URL="http://localhost:3004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://webui.konobangu.com"

View File

@@ -1,15 +0,0 @@
# Server
BETTER_AUTH_SECRET=""
DATABASE_URL=""
BETTERSTACK_API_KEY=""
BETTERSTACK_URL=""
FLAGS_SECRET=""
ARCJET_KEY=""
SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# Client
NEXT_PUBLIC_APP_URL="http://localhost:3000"
NEXT_PUBLIC_WEB_URL="http://localhost:3001"
NEXT_PUBLIC_DOCS_URL="http://localhost:3004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="http://localhost:3000"

45
apps/api/.gitignore vendored
View File

@@ -1,45 +0,0 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# local env files
.env*.local
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts
# prisma
.env
# react.email
.react-email
# Sentry
.sentryclirc

Binary file not shown.

Before

Width:  |  Height:  |  Size: 216 B

View File

@@ -1,29 +0,0 @@
'use client';
import { Button } from '@konobangu/design-system/components/ui/button';
import { fonts } from '@konobangu/design-system/lib/fonts';
import { captureException } from '@sentry/nextjs';
import type NextError from 'next/error';
import { useEffect } from 'react';
type GlobalErrorProperties = {
readonly error: NextError & { digest?: string };
readonly reset: () => void;
};
const GlobalError = ({ error, reset }: GlobalErrorProperties) => {
useEffect(() => {
captureException(error);
}, [error]);
return (
<html lang="en" className={fonts}>
<body>
<h1>Oops, something went wrong</h1>
<Button onClick={() => reset()}>Try again</Button>
</body>
</html>
);
};
export default GlobalError;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 96 B

View File

@@ -1,13 +0,0 @@
import type { ReactNode } from 'react';
type RootLayoutProperties = {
readonly children: ReactNode;
};
const RootLayout = ({ children }: RootLayoutProperties) => (
<html lang="en">
<body>{children}</body>
</html>
);
export default RootLayout;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

View File

@@ -1,3 +0,0 @@
import { initializeSentry } from '@konobangu/next-config/instrumentation';
export const register = initializeSentry();

View File

@@ -1,15 +0,0 @@
import { env } from '@konobangu/env';
import { config, withAnalyzer, withSentry } from '@konobangu/next-config';
import type { NextConfig } from 'next';
let nextConfig: NextConfig = { ...config };
if (env.VERCEL) {
nextConfig = withSentry(nextConfig);
}
if (env.ANALYZE === 'true') {
nextConfig = withAnalyzer(nextConfig);
}
export default nextConfig;

View File

@@ -1,37 +0,0 @@
{
"name": "api",
"private": true,
"scripts": {
"dev": "concurrently \"pnpm:next\"",
"next": "next dev -p 3002 --turbopack",
"build": "next build",
"start": "next start",
"analyze": "ANALYZE=true pnpm build",
"clean": "git clean -xdf .cache .turbo dist node_modules",
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
},
"dependencies": {
"@konobangu/analytics": "workspace:*",
"@konobangu/auth": "workspace:*",
"@konobangu/database": "workspace:*",
"@konobangu/design-system": "workspace:*",
"@konobangu/env": "workspace:*",
"@konobangu/next-config": "workspace:*",
"@konobangu/observability": "workspace:*",
"@sentry/nextjs": "^8.43.0",
"import-in-the-middle": "^1.11.3",
"next": "^15.1.3",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"require-in-the-middle": "^7.4.0",
"svix": "^1.43.0"
},
"devDependencies": {
"@konobangu/typescript-config": "workspace:*",
"@types/node": "22.10.1",
"@types/react": "19.0.1",
"@types/react-dom": "19.0.2",
"concurrently": "^9.1.0",
"typescript": "^5.7.2"
}
}

View File

@@ -1,34 +0,0 @@
/*
* This file configures the initialization of Sentry on the client.
* The config you add here will be used whenever a users loads a page in their browser.
* https://docs.sentry.io/platforms/javascript/guides/nextjs/
*/
import { init, replayIntegration } from '@sentry/nextjs';
init({
dsn: process.env.NEXT_PUBLIC_SENTRY_DSN,
// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
replaysOnErrorSampleRate: 1,
/*
* This sets the sample rate to be 10%. You may want this to be 100% while
* in development and sample at a lower rate in production
*/
replaysSessionSampleRate: 0.1,
// You can remove this option if you're not planning to use the Sentry Session Replay feature:
integrations: [
replayIntegration({
// Additional Replay configuration goes in here, for example:
maskAllText: true,
blockAllMedia: true,
}),
],
});

View File

@@ -1,17 +0,0 @@
{
"extends": "@konobangu/typescript-config/nextjs.json",
"compilerOptions": {
"baseUrl": ".",
"paths": {
"@/*": ["./*"],
"@konobangu/*": ["../../packages/*"]
}
},
"include": [
"next-env.d.ts",
"next.config.ts",
"**/*.ts",
"**/*.tsx",
".next/types/**/*.ts"
]
}

View File

@@ -23,7 +23,7 @@ SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# Client
NEXT_PUBLIC_APP_URL="http://localhost:3000"
NEXT_PUBLIC_WEB_URL="http://localhost:3001"
NEXT_PUBLIC_DOCS_URL="http://localhost:3004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://webui.konobangu.com"
NEXT_PUBLIC_APP_URL="http://localhost:5000"
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"

View File

@@ -23,7 +23,7 @@ SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# WEBUI
NEXT_PUBLIC_APP_URL="http://localhost:3000"
NEXT_PUBLIC_WEB_URL="http://localhost:3001"
NEXT_PUBLIC_DOCS_URL="http://localhost:3004"
NEXT_PUBLIC_APP_URL="http://localhost:5000"
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"

View File

@@ -2,7 +2,7 @@
"name": "app",
"private": true,
"scripts": {
"dev": "next dev -p 3000 --turbopack",
"dev": "next dev -p 5000 --turbopack",
"build": "next build",
"start": "next start",
"analyze": "ANALYZE=true pnpm build",

View File

@@ -28,11 +28,11 @@ Step 2. Go to the docs are located (where you can find `mint.json`) and run the
mintlify dev
```
The documentation website is now available at `http://localhost:3000`.
The documentation website is now available at `http://localhost:5000`.
### Custom Ports
Mintlify uses port 3000 by default. You can use the `--port` flag to customize the port Mintlify runs on. For example, use this command to run in port 3333:
Mintlify uses port 5000 by default. You can use the `--port` flag to customize the port Mintlify runs on. For example, use this command to run in port 3333:
```bash
mintlify dev --port 3333
@@ -41,7 +41,7 @@ mintlify dev --port 3333
You will see an error like this if you try to run Mintlify in a port that's already taken:
```md
Error: listen EADDRINUSE: address already in use :::3000
Error: listen EADDRINUSE: address already in use :::5000
```
## Mintlify Versions

View File

@@ -2,7 +2,7 @@
"name": "docs",
"private": true,
"scripts": {
"dev": "npx --yes mintlify dev --port 3004",
"dev": "npx --yes mintlify dev --port 5004",
"lint": "npx --yes mintlify broken-links"
},
"devDependencies": {

View File

@@ -4,7 +4,7 @@
"private": true,
"scripts": {
"build": "email build",
"dev": "email dev --port 3003",
"dev": "email dev --port 5003",
"export": "email export",
"clean": "git clean -xdf .cache .turbo dist node_modules",
"typecheck": "tsc --noEmit --emitDeclarationOnly false"

View File

@@ -1,2 +1,2 @@
^https://webui.konobangu.com/*** http://127.0.0.1:3000/$1
^wss://webui.konobangu.com/*** ws://127.0.0.1:3000/$1
^https://konobangu.com/*** http://127.0.0.1:5000/$1
^wss://konobangu.com/*** ws://127.0.0.1:5000/$1

View File

@@ -1,9 +1,7 @@
{
"name": "Konobangu Recorder",
"dockerComposeFile": "docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
"forwardPorts": [
3001
]
}
"name": "Konobangu Recorder",
"dockerComposeFile": "docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
"forwardPorts": [5001]
}

View File

@@ -15,7 +15,7 @@ required-features = []
[dependencies]
quirks_path = { path = "../../packages/quirks-path" }
torrent = { path = "../../packages/torrent" }
dlsignal = { path = "../../packages/dlsignal" }
loco-rs = { version = "0.13" }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
@@ -29,8 +29,9 @@ sea-orm = { version = "1", features = [
"sqlx-postgres",
"runtime-tokio-rustls",
"macros",
"debug-print"
"debug-print",
] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
axum = "0.7.9"
uuid = { version = "1.6.0", features = ["v4"] }

View File

@@ -34,7 +34,7 @@ pub trait AppContextExt {
}
fn get_auth_service(&self) -> &AppAuthService {
&AppAuthService::app_instance()
AppAuthService::app_instance()
}
}

View File

@@ -93,7 +93,7 @@ impl AuthService for OidcAuthService {
let token_data = self.authorizer.check_auth(&token).await?;
let claims = token_data.claims;
if !claims.sub.as_deref().is_some_and(|s| !s.trim().is_empty()) {
if claims.sub.as_deref().is_none_or(|s| s.trim().is_empty()) {
return Err(AuthError::OidcSubMissingError);
}
if !claims.contains_audience(&config.audience) {
@@ -103,7 +103,7 @@ impl AuthService for OidcAuthService {
let found_scopes = claims.scopes().collect::<HashSet<_>>();
if !expected_scopes
.iter()
.all(|es| found_scopes.contains(&es as &str))
.all(|es| found_scopes.contains(es as &str))
{
return Err(AuthError::OidcExtraScopesMatchError {
expected: expected_scopes.iter().join(","),

View File

@@ -107,7 +107,7 @@ impl Initializer for AppAuthServiceInitializer {
let service = AppAuthService::from_conf(auth_conf)
.await
.map_err(|e| loco_rs::Error::wrap(e))?;
.map_err(loco_rs::Error::wrap)?;
APP_AUTH_SERVICE.get_or_init(|| service);

View File

@@ -1,12 +1,18 @@
use figment::{
providers::{Format, Json, Yaml},
Figment,
};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::{auth::AppAuthConfig, dal::config::AppDalConfig, extract::mikan::AppMikanConfig};
const DEFAULT_APP_SETTINGS_MIXIN: &str = include_str!("./settings_mixin.yaml");
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AppConfig {
pub auth: AppAuthConfig,
pub dal: Option<AppDalConfig>,
pub mikan: Option<AppMikanConfig>,
pub dal: AppDalConfig,
pub mikan: AppMikanConfig,
}
pub fn deserialize_key_path_from_json_value<T: DeserializeOwned>(
@@ -42,10 +48,19 @@ pub trait AppConfigExt {
fn get_root_conf(&self) -> &loco_rs::config::Config;
fn get_app_conf(&self) -> loco_rs::Result<AppConfig> {
Ok(
deserialize_key_path_from_app_config(self.get_root_conf(), &[])?
.expect("app config must be present"),
)
let settings_str = self
.get_root_conf()
.settings
.as_ref()
.map(serde_json::to_string)
.unwrap_or_else(|| Ok(String::new()))?;
let app_config = Figment::from(Json::string(&settings_str))
.merge(Yaml::string(DEFAULT_APP_SETTINGS_MIXIN))
.extract()
.map_err(loco_rs::Error::wrap)?;
Ok(app_config)
}
}

View File

@@ -0,0 +1,12 @@
dal:
data_dir: ./data
mikan:
http_client:
exponential_backoff_max_retries: 3
leaky_bucket_max_tokens: 3
leaky_bucket_initial_tokens: 0
leaky_bucket_refill_tokens: 1
leaky_bucket_refill_interval: 500
user_agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0"
base_url: "https://mikanani.me/"

View File

@@ -194,7 +194,7 @@ impl Initializer for AppDalInitalizer {
let config = &app_context.config;
let app_dal_conf = config.get_app_conf()?.dal;
APP_DAL_CLIENT.get_or_init(|| AppDalClient::new(app_dal_conf.unwrap_or_default()));
APP_DAL_CLIENT.get_or_init(|| AppDalClient::new(app_dal_conf));
Ok(())
}

View File

@@ -3,7 +3,7 @@ use std::ops::Deref;
use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use super::{AppMikanConfig, MIKAN_BASE_URL};
use super::AppMikanConfig;
use crate::{config::AppConfigExt, fetch::HttpClient};
static APP_MIKAN_CLIENT: OnceCell<AppMikanClient> = OnceCell::new();
@@ -14,12 +14,10 @@ pub struct AppMikanClient {
}
impl AppMikanClient {
pub fn new(mut config: AppMikanConfig) -> loco_rs::Result<Self> {
pub fn new(config: AppMikanConfig) -> loco_rs::Result<Self> {
let http_client =
HttpClient::new(config.http_client.take()).map_err(loco_rs::Error::wrap)?;
let base_url = config
.base_url
.unwrap_or_else(|| String::from(MIKAN_BASE_URL));
HttpClient::from_config(config.http_client).map_err(loco_rs::Error::wrap)?;
let base_url = config.base_url;
Ok(Self {
http_client,
base_url,
@@ -55,7 +53,7 @@ impl Initializer for AppMikanClientInitializer {
async fn before_run(&self, app_context: &AppContext) -> loco_rs::Result<()> {
let config = &app_context.config;
let app_mikan_conf = config.get_app_conf()?.mikan.unwrap_or_default();
let app_mikan_conf = config.get_app_conf()?.mikan;
APP_MIKAN_CLIENT.get_or_try_init(|| AppMikanClient::new(app_mikan_conf))?;

View File

@@ -2,8 +2,8 @@ use serde::{Deserialize, Serialize};
use crate::fetch::HttpClientConfig;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct AppMikanConfig {
pub http_client: Option<HttpClientConfig>,
pub base_url: Option<String>,
pub http_client: HttpClientConfig,
pub base_url: String,
}

View File

@@ -1,17 +1,17 @@
use std::ops::Deref;
use chrono::DateTime;
use dlsignal::core::BITTORRENT_MIME_TYPE;
use itertools::Itertools;
use reqwest::IntoUrl;
use serde::{Deserialize, Serialize};
use torrent::core::BITTORRENT_MIME_TYPE;
use url::Url;
use super::{
web_parser::{parse_mikan_episode_id_from_homepage, MikanEpisodeHomepage},
AppMikanClient,
};
use crate::{extract::errors::ParseError, fetch::bytes::download_bytes_with_client};
use crate::{extract::errors::ParseError, fetch::bytes::fetch_bytes};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct MikanRssItem {
@@ -228,7 +228,7 @@ pub async fn parse_mikan_rss_channel_from_rss_link(
url: impl IntoUrl,
) -> eyre::Result<MikanRssChannel> {
let http_client = client.map(|s| s.deref());
let bytes = download_bytes_with_client(http_client, url.as_str()).await?;
let bytes = fetch_bytes(http_client, url.as_str()).await?;
let channel = rss::Channel::read_from(&bytes[..])?;
@@ -297,7 +297,7 @@ pub async fn parse_mikan_rss_channel_from_rss_link(
mod tests {
use std::assert_matches::assert_matches;
use torrent::core::BITTORRENT_MIME_TYPE;
use dlsignal::core::BITTORRENT_MIME_TYPE;
use crate::extract::mikan::{
parse_mikan_rss_channel_from_rss_link, MikanBangumiAggregationRssChannel,

View File

@@ -18,7 +18,7 @@ use crate::{
app::AppContextExt,
dal::DalContentCategory,
extract::html::parse_style_attr,
fetch::{html::download_html_with_client, image::download_image_with_client},
fetch::{html::fetch_html, image::fetch_image},
models::subscribers,
};
@@ -95,7 +95,7 @@ pub async fn parse_mikan_bangumi_poster_from_origin_poster_src(
origin_poster_src: Url,
) -> eyre::Result<MikanBangumiPosterMeta> {
let http_client = client.map(|s| s.deref());
let poster_data = download_image_with_client(http_client, origin_poster_src.clone()).await?;
let poster_data = fetch_image(http_client, origin_poster_src.clone()).await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src,
poster_data: Some(poster_data),
@@ -127,8 +127,7 @@ pub async fn parse_mikan_bangumi_poster_from_origin_poster_src_with_cache(
});
}
let poster_data =
download_image_with_client(Some(mikan_client.deref()), origin_poster_src.clone()).await?;
let poster_data = fetch_image(Some(mikan_client.deref()), origin_poster_src.clone()).await?;
let poster_str = dal_client
.store_object(
@@ -153,7 +152,7 @@ pub async fn parse_mikan_bangumi_meta_from_mikan_homepage(
) -> eyre::Result<MikanBangumiMeta> {
let http_client = client.map(|s| s.deref());
let url_host = url.origin().unicode_serialization();
let content = download_html_with_client(http_client, url.as_str()).await?;
let content = fetch_html(http_client, url.as_str()).await?;
let html = Html::parse_document(&content);
let bangumi_fansubs = html
@@ -276,7 +275,7 @@ pub async fn parse_mikan_episode_meta_from_mikan_homepage(
) -> eyre::Result<MikanEpisodeMeta> {
let http_client = client.map(|s| s.deref());
let url_host = url.origin().unicode_serialization();
let content = download_html_with_client(http_client, url.as_str()).await?;
let content = fetch_html(http_client, url.as_str()).await?;
let html = Html::parse_document(&content);
@@ -401,6 +400,8 @@ pub async fn parse_mikan_episode_meta_from_mikan_homepage(
})
}
pub async fn parse_mikan_bangumis_from_user_home(_client: Option<&AppMikanClient>, _url: Url) {}
#[cfg(test)]
mod test {
use std::assert_matches::assert_matches;

View File

@@ -1,24 +1,11 @@
use bytes::Bytes;
use reqwest::IntoUrl;
use super::{core::DEFAULT_HTTP_CLIENT_USER_AGENT, HttpClient};
use super::HttpClient;
pub async fn download_bytes<T: IntoUrl>(url: T) -> eyre::Result<Bytes> {
let request_client = reqwest::Client::builder()
.user_agent(DEFAULT_HTTP_CLIENT_USER_AGENT)
.build()?;
let bytes = request_client.get(url).send().await?.bytes().await?;
pub async fn fetch_bytes<T: IntoUrl>(client: Option<&HttpClient>, url: T) -> eyre::Result<Bytes> {
let client = client.unwrap_or_default();
let bytes = client.get(url).send().await?.bytes().await?;
Ok(bytes)
}
pub async fn download_bytes_with_client<T: IntoUrl>(
client: Option<&HttpClient>,
url: T,
) -> eyre::Result<Bytes> {
if let Some(client) = client {
let bytes = client.get(url).send().await?.bytes().await?;
Ok(bytes)
} else {
download_bytes(url).await
}
}

View File

@@ -2,6 +2,7 @@ use std::{ops::Deref, time::Duration};
use axum::http::Extensions;
use leaky_bucket::RateLimiter;
use once_cell::sync::OnceCell;
use reqwest::{ClientBuilder, Request, Response};
use reqwest_middleware::{
ClientBuilder as ClientWithMiddlewareBuilder, ClientWithMiddleware, Next,
@@ -11,7 +12,7 @@ use reqwest_tracing::TracingMiddleware;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use super::DEFAULT_HTTP_CLIENT_USER_AGENT;
use crate::fetch::DEFAULT_HTTP_CLIENT_USER_AGENT;
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
@@ -27,6 +28,7 @@ pub struct HttpClientConfig {
pub struct HttpClient {
client: ClientWithMiddleware,
pub config: HttpClientConfig,
}
impl Deref for HttpClient {
@@ -55,42 +57,73 @@ impl reqwest_middleware::Middleware for RateLimiterMiddleware {
}
impl HttpClient {
pub fn new(config: Option<HttpClientConfig>) -> reqwest::Result<Self> {
let mut config = config.unwrap_or_default();
let retry_policy = ExponentialBackoff::builder()
.build_with_max_retries(config.exponential_backoff_max_retries.take().unwrap_or(3));
let rate_limiter = RateLimiter::builder()
.max(config.leaky_bucket_max_tokens.take().unwrap_or(3) as usize)
.initial(
config
.leaky_bucket_initial_tokens
.take()
.unwrap_or_default() as usize,
)
.refill(config.leaky_bucket_refill_tokens.take().unwrap_or(1) as usize)
.interval(
config
.leaky_bucket_refill_interval
.take()
.unwrap_or_else(|| Duration::from_millis(500)),
)
.build();
pub fn from_config(config: HttpClientConfig) -> reqwest::Result<Self> {
let reqwest_client_builder = ClientBuilder::new().user_agent(
config
.user_agent
.as_deref()
.unwrap_or(DEFAULT_HTTP_CLIENT_USER_AGENT),
);
let client = ClientBuilder::new()
.user_agent(
config
.user_agent
.take()
.unwrap_or_else(|| DEFAULT_HTTP_CLIENT_USER_AGENT.to_owned()),
)
.build()?;
let reqwest_client = reqwest_client_builder.build()?;
let mut reqwest_with_middleware_builder =
ClientWithMiddlewareBuilder::new(reqwest_client).with(TracingMiddleware::default());
if let Some(ref x) = config.exponential_backoff_max_retries {
let retry_policy = ExponentialBackoff::builder().build_with_max_retries(*x);
reqwest_with_middleware_builder = reqwest_with_middleware_builder
.with(RetryTransientMiddleware::new_with_policy(retry_policy));
}
if let (None, None, None, None) = (
config.leaky_bucket_initial_tokens.as_ref(),
config.leaky_bucket_refill_tokens.as_ref(),
config.leaky_bucket_refill_interval.as_ref(),
config.leaky_bucket_max_tokens.as_ref(),
) {
} else {
let mut rate_limiter_builder = RateLimiter::builder();
if let Some(ref x) = config.leaky_bucket_max_tokens {
rate_limiter_builder.max(*x as usize);
}
if let Some(ref x) = config.leaky_bucket_initial_tokens {
rate_limiter_builder.initial(*x as usize);
}
if let Some(ref x) = config.leaky_bucket_refill_tokens {
rate_limiter_builder.refill(*x as usize);
}
if let Some(ref x) = config.leaky_bucket_refill_interval {
rate_limiter_builder.interval(*x);
}
let rate_limiter = rate_limiter_builder.build();
reqwest_with_middleware_builder =
reqwest_with_middleware_builder.with(RateLimiterMiddleware { rate_limiter });
}
let reqwest_with_middleware = reqwest_with_middleware_builder.build();
Ok(Self {
client: ClientWithMiddlewareBuilder::new(client)
.with(TracingMiddleware::default())
.with(RateLimiterMiddleware { rate_limiter })
.with(RetryTransientMiddleware::new_with_policy(retry_policy))
.build(),
client: reqwest_with_middleware,
config,
})
}
}
static DEFAULT_HTTP_CLIENT: OnceCell<HttpClient> = OnceCell::new();
impl Default for HttpClient {
fn default() -> Self {
HttpClient::from_config(Default::default()).expect("Failed to create default HttpClient")
}
}
impl Default for &HttpClient {
fn default() -> Self {
DEFAULT_HTTP_CLIENT.get_or_init(HttpClient::default)
}
}

View File

@@ -1,23 +1,10 @@
use reqwest::IntoUrl;
use super::{core::DEFAULT_HTTP_CLIENT_USER_AGENT, HttpClient};
use super::HttpClient;
pub async fn fetch_html<T: IntoUrl>(client: Option<&HttpClient>, url: T) -> eyre::Result<String> {
let client = client.unwrap_or_default();
let content = client.get(url).send().await?.text().await?;
pub async fn download_html<U: IntoUrl>(url: U) -> eyre::Result<String> {
let request_client = reqwest::Client::builder()
.user_agent(DEFAULT_HTTP_CLIENT_USER_AGENT)
.build()?;
let content = request_client.get(url).send().await?.text().await?;
Ok(content)
}
pub async fn download_html_with_client<T: IntoUrl>(
client: Option<&HttpClient>,
url: T,
) -> eyre::Result<String> {
if let Some(client) = client {
let content = client.get(url).send().await?.text().await?;
Ok(content)
} else {
download_html(url).await
}
}

View File

@@ -1,18 +1,8 @@
use bytes::Bytes;
use reqwest::IntoUrl;
use super::{
bytes::{download_bytes, download_bytes_with_client},
HttpClient,
};
use super::{bytes::fetch_bytes, HttpClient};
pub async fn download_image<U: IntoUrl>(url: U) -> eyre::Result<Bytes> {
download_bytes(url).await
}
pub async fn download_image_with_client<T: IntoUrl>(
client: Option<&HttpClient>,
url: T,
) -> eyre::Result<Bytes> {
download_bytes_with_client(client, url).await
pub async fn fetch_image<T: IntoUrl>(client: Option<&HttpClient>, url: T) -> eyre::Result<Bytes> {
fetch_bytes(client, url).await
}

View File

@@ -6,6 +6,7 @@ pub mod image;
pub use core::DEFAULT_HTTP_CLIENT_USER_AGENT;
pub use bytes::download_bytes;
pub use bytes::fetch_bytes;
pub use client::{HttpClient, HttpClientConfig};
pub use image::download_image;
pub use html::fetch_html;
pub use image::fetch_image;

View File

@@ -14,11 +14,11 @@ pnpm dev
bun dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
Open [http://localhost:5000](http://localhost:5000) with your browser to see the result.
You can start editing the page by modifying `pages/index.tsx`. The page auto-updates as you edit the file.
[API routes](https://nextjs.org/docs/pages/building-your-application/routing/api-routes) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.ts`.
[API routes](https://nextjs.org/docs/pages/building-your-application/routing/api-routes) can be accessed on [http://localhost:5000/api/hello](http://localhost:5000/api/hello). This endpoint can be edited in `pages/api/hello.ts`.
The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/pages/building-your-application/routing/api-routes) instead of React pages.

View File

@@ -9,7 +9,7 @@ SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# Client
NEXT_PUBLIC_APP_URL="http://localhost:3000"
NEXT_PUBLIC_WEB_URL="http://localhost:3001"
NEXT_PUBLIC_DOCS_URL="http://localhost:3004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://webui.konobangu.com"
NEXT_PUBLIC_APP_URL="http://localhost:5000"
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"

View File

@@ -9,7 +9,7 @@ SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# Client
NEXT_PUBLIC_APP_URL="http://localhost:3000"
NEXT_PUBLIC_WEB_URL="http://localhost:3001"
NEXT_PUBLIC_DOCS_URL="http://localhost:3004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="http://localhost:3000"
NEXT_PUBLIC_APP_URL="http://localhost:5000"
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"

View File

@@ -2,7 +2,7 @@
"name": "web",
"private": true,
"scripts": {
"dev": "next dev -p 3001 --turbopack",
"dev": "next dev -p 5001 --turbopack",
"build": "next build",
"start": "next start",
"analyze": "ANALYZE=true pnpm build",