Compare commits

..

119 Commits

Author SHA1 Message Date
9fd3ae6563 feat: basic support rss 2025-06-24 06:37:19 +08:00
cde3361458 feat: add new test resource mikan classic episodes tiny.parquet 2025-06-23 03:07:58 +08:00
f055011b86 feat: add rss feeds and episode enclosure 2025-06-22 01:04:23 +08:00
16429a44b4 fix: fix missing 2025-06-21 03:25:22 +08:00
fe0b7e88e6 feat: classic episodes scraper 2025-06-21 03:21:58 +08:00
28dd9da6ac fix: fix typo 2025-06-20 02:05:23 +08:00
02c16a2972 feat: support optimize images 2025-06-20 01:56:34 +08:00
324427513c refactor: rewrite origin name extractor from regex to nom combinators 2025-06-19 02:37:56 +08:00
c12b9b360a feat: static server support etag 2025-06-18 04:42:33 +08:00
cc06142050 fix: fix middlewares config 2025-06-18 03:09:10 +08:00
6726cafff4 feat: support static server 2025-06-18 02:19:42 +08:00
35312ea1ff fix: fix issues 2025-06-17 02:23:02 +08:00
721eee9c88 fix: fix issues 2025-06-16 08:01:02 +08:00
421f9d0293 feat: task ui & custom filter mutation 2025-06-16 07:56:52 +08:00
7eb4e41708 feat: try views and seaography 2025-06-15 05:02:23 +08:00
a2254bbe80 fix: fix auto accessToken renew 2025-06-15 02:48:48 +08:00
1b5bdadf10 fix: fix tasks 2025-06-14 22:30:58 +08:00
882b29d7a1 feat: task ui basic done 2025-06-13 04:02:01 +08:00
c60f6f511e feat: remove turbo 2025-06-13 00:09:18 +08:00
07955286f1 feat: add tasks manage view 2025-06-12 03:32:18 +08:00
258eeddc74 refactor: refactor graphql 2025-06-12 00:15:26 +08:00
b09e9e6aaa fix: update webui graphql schema 2025-06-11 04:01:00 +08:00
0df371adb7 fix: fix subscription and mikan doppel 2025-05-11 03:41:02 +08:00
8144986a48 fix: fix subscriptions api 2025-05-10 02:31:58 +08:00
d2aab7369d fix: add sync subscription webui and check credential web ui 2025-06-08 00:36:59 +08:00
946d4e8c2c feat: add subscription detail & edit page 2025-06-07 02:50:14 +08:00
0b5f25a263 fix: fix credential 3rd error 2025-06-06 01:58:19 +08:00
c669d66969 fix: just dev-all support windows 2025-06-05 02:44:23 +08:00
082e08e7f4 fcwd: add tui temp 2025-06-04 01:09:18 +08:00
a3fd03d32a refactor: refactor subscriptions 2025-06-03 02:21:49 +08:00
5645645c5f fix: fix table horizontal scroll and collapsed sidebar 2025-05-01 20:49:42 +08:00
ac7d1efb8d feat: support server port reuse 2025-05-31 01:59:04 +08:00
a676061b3e fix: fix testcases 2025-05-29 02:01:36 +08:00
1c34cebbde fix: fix testcases 2025-05-27 01:01:05 +08:00
22a2ce0559 fix: fix testsuite 2025-05-26 02:44:46 +08:00
313b1bf1ba fix: fix credential3rd graphql 2025-05-25 00:04:02 +08:00
66413f92e3 fix: fix credential3rd graphql 2025-05-24 02:32:02 +08:00
0fcbc6bbe9 feat: alter unsafe packages 2025-05-23 02:54:53 +08:00
f1d8318500 fix: fix graphql 2025-05-22 02:11:16 +08:00
b2f327d48f feat: refactor tasks 2025-05-20 01:23:13 +08:00
b772937354 feat: json filter input done mainly 2025-05-19 02:27:23 +08:00
a3b9543d0e refactor: continue 2025-05-16 01:02:17 +08:00
d0a423df9f refactor: continue 2025-05-15 03:03:36 +08:00
8600bf216a refactor: continue 2025-05-14 02:01:59 +08:00
bf270e4e87 refactor: continue 2025-05-13 01:23:59 +08:00
760cb2344e refactor: continue 2025-05-12 08:11:11 +08:00
ed2c1038e6 refactor: refactor subscription 2025-05-11 01:41:11 +08:00
d4bdc677a9 feat: more task system 2025-05-10 02:34:11 +08:00
9d58d961bd feat: add task system 2025-05-09 00:56:26 +08:00
791b75b3af test: add mikan client login test 2025-05-07 02:15:46 +08:00
a7f52fe0eb fix: fix scrape mikan season bangumi list 2025-05-06 02:23:17 +08:00
439353d318 fix: fix some issues 2025-05-05 01:02:58 +08:00
f245a68790 fix: fix some issues 2025-05-04 03:59:59 +08:00
3fe0538468 feature: add new mikan scrapers 2025-05-03 04:23:33 +08:00
dbded94324 feature: rewrite season subscription extractor 2025-05-02 02:23:23 +08:00
4301f1dbab feature: add subscription manage 2025-04-30 01:59:14 +08:00
9fdb778330 feature: add mgraphql codegen 2025-04-29 02:22:06 +08:00
0300d7baf6 feature: add mutation input object transformer 2025-04-28 02:44:16 +08:00
ee1b1ae5e6 refactor: refactor webui 2025-04-26 01:43:23 +08:00
b20f7cd1ad deps: update webui deps 2025-04-25 02:21:20 +08:00
eb8f0be004 refactor: refactor webui structure 2025-04-24 02:23:26 +08:00
68aa13e216 feat: add transformer and refactor graphql guards 2025-04-23 02:57:22 +08:00
2a5c2b18e7 feat: prepare transformers 2025-04-22 03:19:59 +08:00
e64086b7cf fix: fix addrInUse at dev & cursor-point 2025-04-22 01:12:01 +08:00
08946059ad feat: refactor to react for better ai generation 2025-04-21 02:22:28 +08:00
10b17dc66b feat: add zellij tab support 2025-04-17 02:03:21 +08:00
1ff8a311ae feat(downloader): add rqbit impl 2025-04-09 02:26:23 +08:00
2686fa1d76 refactor: split modules 2025-04-08 02:12:06 +08:00
376d2b28d3 refactor: split out testcontainers-rs-ext 2025-04-05 19:51:59 +08:00
a3609696c7 feat: finsih qbit adapter 2025-04-05 14:24:47 +08:00
b0c12acbc6 fix: fix paths 2025-04-05 10:40:48 +08:00
3dfcf2a536 fix: add testing-torrents params 2025-04-05 09:20:51 +08:00
ecb56013a5 fix: temp save 2025-04-05 07:02:47 +08:00
27b52f7fd1 refactor: rewrite qbit downlaoder 2025-04-03 02:22:26 +08:00
234441e6a3 refactor: switch error handle to snafu 2025-04-01 20:53:27 +08:00
011f62829a fix: fix workflows 2025-04-01 03:56:48 +08:00
c34584a215 fix: fix workflows 2025-04-01 03:49:22 +08:00
1fca69fa66 fix: fix testing torrents container 2025-04-01 03:45:56 +08:00
a0fc4c04d9 feat: add testing-torrents 2025-04-01 03:00:29 +08:00
07ac7e3376 fix: do some fix 2025-03-08 03:22:46 +08:00
f94e175082 feat: add replay-stream-tasks pattern support 2025-03-08 00:00:44 +08:00
e66573b315 fix: fix typos 2025-03-06 02:49:15 +08:00
27cdcdef58 refactor: merge playground into webui 2025-03-05 22:53:37 +08:00
383e6340ea feat: add auth to webapi 2025-03-04 23:31:13 +08:00
5a4a4d7e3a fix: remove favicon image padding transparent 2025-03-04 12:28:52 +08:00
6e4c136614 style: rollback tailwindcss to v3 for temp fix 2025-03-04 03:16:06 +08:00
e2fdeaabb2 fix: temp save 2025-03-04 01:09:38 +08:00
408d211f27 refactor: remove useless folders 2025-03-04 01:03:39 +08:00
2844e1fc32 refactor: remove loco-rs deps 2025-02-28 06:14:08 +08:00
a68aab1452 refactor: remove loco-rs 2025-02-28 00:19:40 +08:00
c0707d17bb feat: switch mikan bangumi metas from my bangumi page to stream 2025-02-27 14:32:13 +08:00
6887b2a069 feat: add mikan cookie support 2025-02-26 05:31:28 +08:00
cac0d37e53 repo: fix gitattributes 2025-02-25 02:20:25 +08:00
f327ea29f1 fix: fix mikan rss extractors 2025-02-25 00:03:05 +08:00
5bc5d98823 fix: fix mikan web extractors 2025-02-24 21:38:36 +08:00
09565bd827 feat: add mikan my bangumi page extractor 2025-02-23 23:58:21 +08:00
7adc0582aa feat: add key context for graphql permission error 2025-02-23 16:47:16 +08:00
4f9e74ceb4 feat: add custom types for subscriber id input filter 2025-02-23 15:57:59 +08:00
c2f74dc369 feat: add permission control 2025-02-22 20:26:14 +08:00
ae40a3a7f8 feat: switch to oidc-client-rx 2025-02-21 05:34:30 +08:00
027112db9a style: update recorder api playground styles 2025-01-15 01:22:05 +08:00
9a2a8f029f feat: add assets 2025-01-14 07:30:04 +08:00
877d90d1e2 feat: add oidc and basic support for playground 2025-01-14 07:27:09 +08:00
c6677d414d feat: replace graphql playground to altair 2025-01-12 03:46:28 +08:00
97b7bfb7fb other: temp save 2025-01-11 15:02:04 +08:00
8f76e92804 deps: update deps 2025-01-08 00:49:03 +08:00
2ed2b864b2 Refactor: Extract the quirks_path package as a standalone module and replace eyre with color-eyre. 2025-01-05 23:51:31 +08:00
40cbf86f0f feat: add basic graphql support 2025-01-04 20:38:41 +08:00
caaa5dc0cc refactor: fix database 2025-01-03 05:32:25 +08:00
70932900cd fix: fix qbit torrent test 2025-01-01 07:10:18 +08:00
393f704e52 fix: refactor config 2024-12-31 18:47:42 +08:00
abd399aacd fix: add basic auth and oidc auth 2024-12-31 00:52:44 +08:00
4c6cc1116b fix: fix web build 2024-12-30 06:52:15 +08:00
a4c549e7c3 feat: add basic webui 2024-12-30 06:39:09 +08:00
608a7fb9c6 fix: fix episode collections 2024-12-29 05:32:02 +08:00
4177efc991 fix: fix tests for temp 2024-12-29 03:04:28 +08:00
c8007078c0 fix: fix tests for temp 2024-12-29 00:43:44 +08:00
cd26d5bac4 feat: update recorders 2024-12-28 22:51:34 +08:00
e93a8a0dec feat: support mikan rss links 2024-12-28 07:18:54 +08:00
1599 changed files with 380610 additions and 13607 deletions

View File

@@ -1,34 +1,5 @@
[alias]
recorder = "run -p recorder --bin recorder_cli -- --environment development"
recorder-playground = "run -p recorder --example playground -- --environment development"
recorder-playground = "run -p recorder --example playground -- --environment development"
[build]
rustflags = ["-Zthreads=12", "-Clink-arg=-fuse-ld=lld"]
[target.x86_64-unknown-linux-gnu]
linker = "clang"
rustflags = ["-Zthreads=8", "-Clink-arg=-fuse-ld=lld", "-Zshare-generics=y"]
[target.x86_64-pc-windows-msvc]
linker = "rust-lld.exe"
rustflags = ["-Zthreads=8", "-Zshare-generics=n"]
# NOTE: you must install [Mach-O LLD Port](https://lld.llvm.org/MachO/index.html) on mac. you can easily do this by installing llvm which includes lld with the "brew" package manager:
# `brew install llvm`
#[target.x86_64-apple-darwin]
#rustflags = [
# "-Zthreads=8",
# "-C",
# "link-arg=-fuse-ld=/usr/local/opt/llvm/bin/ld64.lld",
# "-Zshare-generics=y",
#]
# NOTE: you must install [Mach-O LLD Port](https://lld.llvm.org/MachO/index.html) on mac. you can easily do this by installing llvm which includes lld with the "brew" package manager:
# `brew install llvm`
#[target.aarch64-apple-darwin]
#rustflags = [
# "-Zthreads=8",
# "-C",
# "link-arg=-fuse-ld=/opt/homebrew/opt/llvm/bin/ld64.lld",
# "-Zshare-generics=y",
#]
rustflags = ["-Zthreads=8", "-Zshare-generics=y"]

10
.editorconfig Normal file
View File

@@ -0,0 +1,10 @@
root = true
[*]
indent_style = space
indent_size = 4
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
end_of_line = lf
max_line_length = null

1
.gitattributes vendored Normal file
View File

@@ -0,0 +1 @@
**/tests/resources/** linguist-detectable=false

View File

@@ -0,0 +1,36 @@
name: Testing Torrents Container
on:
workflow_dispatch:
env:
REGISTRY: ghcr.io
ORG: dumtruck
PROJECT: konobangu
jobs:
build-container:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GHCR
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: 'packages/testing-torrents'
file: 'packages/testing-torrents/Dockerfile'
push: true
tags: '${{ env.REGISTRY }}/${{ env.ORG }}/${{ env.PROJECT }}-testing-torrents:latest'
cache-from: type=gha
cache-to: type=gha,mode=max

46
.gitignore vendored
View File

@@ -112,7 +112,7 @@ coverage
# nyc tests coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
# Grunt intermediate dal (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
@@ -127,6 +127,8 @@ build/Release
# Dependency directories
node_modules/
jspm_packages/
.pnp
.pnp.js
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
@@ -155,9 +157,9 @@ web_modules/
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# Local env files
.env.local
.env.*.local
# parcel-bundler cache (https://parceljs.org/)
.cache
@@ -214,6 +216,7 @@ index.d.ts.map
# Added by cargo
/target
/ide-target
!/examples/.gitkeep
/.env
/.env.bk
@@ -222,6 +225,35 @@ index.d.ts.map
/temp
/rustc-ice-*
/crates/recorder/config/test.local.env
**/*.local.yaml
**/*.local.env
# Misc
.DS_Store
*.pem
# Sentry Config File
.env.sentry-build-plugin
# BaseHub
.basehub
# Build Outputs
build
dist
# Turbo
.turbo
# Vercel
.vercel
# Payload default media upload directory
public/media/
public/robots.txt
public/sitemap*.xml
# Custom
/data
patches/*
!patches/.gitkeep

14
.prettierignore Normal file
View File

@@ -0,0 +1,14 @@
**/payload-types.ts
.tmp
**/.git
**/.hg
**/.pnp.*
**/.svn
**/.yarn/**
**/build
**/dist/**
**/node_modules
**/temp
**/docs/**
tsconfig.json

11
.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,11 @@
{
"recommendations": [
"biomejs.biome",
"bradlc.vscode-tailwindcss",
"unifiedjs.vscode-mdx",
"mikestead.dotenv",
"christian-kohler.npm-intellisense",
"skellock.just",
"zerotaskx.rust-extension-pack"
]
}

121
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,121 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "debug recorder bin",
"cargo": {
"args": [
"build",
"--bin=recorder_cli",
"--package=recorder",
],
"filter": {
"name": "recorder_cli",
"kind": "bin"
}
},
"args": [
"start",
"--environment",
"development"
],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "debug playground example",
"cargo": {
"args": [
"build",
"--example=playground",
"--package=recorder",
],
"filter": {
"name": "playground",
"kind": "example"
}
},
"args": [
"--environment",
"development"
],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "debug recorder lib",
"cargo": {
"args": [
"test",
"--no-run",
"--test=mod",
"--package=recorder"
],
"filter": {
"name": "mod",
"kind": "test"
}
},
"args": [],
"cwd": "${workspaceFolder}"
},
{
"name": "Next.js: debug server-side",
"type": "node-terminal",
"request": "launch",
"command": "pnpm dev"
},
{
"name": "Next.js: debug client-side (app)",
"type": "chrome",
"request": "launch",
"url": "http://localhost:5000"
},
{
"name": "Next.js: debug client-side (web)",
"type": "chrome",
"request": "launch",
"url": "http://localhost:5001"
},
{
"name": "Next.js: debug client-side (api)",
"type": "chrome",
"request": "launch",
"url": "http://localhost:5002"
},
{
"name": "Next.js: debug client-side (email)",
"type": "chrome",
"request": "launch",
"url": "http://localhost:5003"
},
{
"name": "Next.js: debug client-side (app)",
"type": "chrome",
"request": "launch",
"url": "http://localhost:5004"
},
{
"name": "Next.js: debug full stack",
"type": "node",
"request": "launch",
"program": "${workspaceFolder}/node_modules/.bin/next",
"runtimeArgs": ["--inspect"],
"skipFiles": ["<node_internals>/**"],
"serverReadyAction": {
"action": "debugWithEdge",
"killOnServerStop": true,
"pattern": "- Local:.+(https?://.+)",
"uriFormat": "%s",
"webRoot": "${workspaceFolder}"
}
}
]
}

51
.vscode/settings.json vendored
View File

@@ -1,3 +1,52 @@
{
"rust-analyzer.showUnlinkedFileNotification": false
"npm.packageManager": "pnpm",
"[javascript]": {
"editor.defaultFormatter": "vscode.typescript-language-features",
"editor.formatOnSave": true
},
"[json]": {
"editor.defaultFormatter": "biomejs.biome",
"editor.formatOnSave": true
},
"[jsonc]": {
"editor.defaultFormatter": "biomejs.biome",
"editor.formatOnSave": true
},
"[typescript]": {
"editor.defaultFormatter": "biomejs.biome",
"editor.formatOnSave": true
},
"[typescriptreact]": {
"editor.defaultFormatter": "biomejs.biome",
"editor.formatOnSave": true
},
"editor.codeActionsOnSave": {
"quickfix.biome": "explicit",
"source.organizeImports.biome": "explicit"
},
"emmet.showExpandedAbbreviation": "never",
"prettier.enable": false,
"typescript.tsdk": "node_modules/typescript/lib",
"rust-analyzer.showUnlinkedFileNotification": false,
"sqltools.connections": [
{
"previewLimit": 50,
"server": "localhost",
"port": 5432,
"driver": "PostgreSQL",
"name": "konobangu-dev",
"database": "konobangu",
"username": "konobangu"
}
],
"rust-analyzer.cargo.features": "all",
"rust-analyzer.testExplorer": true
// https://github.com/rust-lang/rust/issues/141540
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
// "rust-analyzer.check.extraEnv": {
// "CARGO_TARGET_DIR": "target/rust-analyzer"
// },
// "rust-analyzer.cargo.extraEnv": {
// "CARGO_TARGET_DIR": "target/analyzer"
// }
}

112
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,112 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "dev-all",
"dependsOn": [
"dev-webui",
"dev-recorder",
"dev-proxy",
"dev-codegen-wait",
"dev-deps",
],
"dependsOrder": "parallel",
"group": {
"kind": "build",
"isDefault": false,
},
"presentation": {
"group": "new-group",
"echo": true,
"reveal": "always",
"panel": "shared",
"clear": false
}
},
{
"label": "dev-webui",
"type": "shell",
"command": "just",
"args": [
"dev-webui"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "always",
"focus": true,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-deps",
"type": "shell",
"command": "just",
"args": [
"dev-deps"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-codegen-wait",
"type": "shell",
"command": "just",
"args": [
"dev-codegen-wait"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-recorder",
"type": "shell",
"command": "just",
"args": [
"dev-recorder"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-proxy",
"type": "shell",
"command": "just",
"args": [
"dev-proxy",
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
}
]
}

8278
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,90 @@
cargo-features = ["codegen-backend"]
# cargo-features = ["codegen-backend"]
[workspace]
members = [
"crates/quirks_path",
"crates/recorder"
"packages/testing-torrents",
"packages/util",
"packages/util-derive",
"packages/fetch",
"packages/downloader",
"apps/recorder",
"apps/proxy",
]
resolver = "2"
[profile.dev]
#debug = 0
codegen-backend = "cranelift"
debug = 0
# https://github.com/rust-lang/rust/issues/141540
incremental = false
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
# codegen-backend = "cranelift"
[workspace.dependencies]
testing-torrents = { path = "./packages/testing-torrents" }
util = { path = "./packages/util" }
util-derive = { path = "./packages/util-derive" }
fetch = { path = "./packages/fetch" }
downloader = { path = "./packages/downloader" }
recorder = { path = "./apps/recorder" }
reqwest = { version = "0.12.20", features = [
"charset",
"http2",
"json",
"macos-system-configuration",
"cookies",
] }
moka = "0.12"
futures = "0.3"
quirks_path = "0.1"
snafu = { version = "0.8", features = ["futures"] }
testcontainers = { version = "0.24" }
testcontainers-modules = { version = "0.12.1" }
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
serde = { version = "1", features = ["derive"] }
tokio = { version = "1.45.1", features = [
"macros",
"fs",
"rt-multi-thread",
"signal",
] }
serde_json = "1"
async-trait = "0.1"
tracing = "0.1"
url = "2.5.2"
anyhow = "1"
itertools = "0.14"
chrono = "0.4"
bytes = "1"
serde_with = "3"
regex = "1.11"
lazy_static = "1.5"
axum = { version = "0.8.3", features = ["macros"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
axum-extra = { version = "0.10", features = ["typed-header"] }
mockito = { version = "1.6.1" }
convert_case = "0.8"
color-eyre = "0.6.5"
inquire = "0.7.5"
image = "0.25.6"
uuid = { version = "1.6.0", features = ["v4"] }
maplit = "1.0.2"
once_cell = "1.20.2"
rand = "0.9.1"
rust_decimal = "1.37.2"
base64 = "0.22.1"
nom = "8.0.0"
percent-encoding = "2.3.1"
num-traits = "0.2.19"
http = "1.2.0"
async-stream = "0.3.6"
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.40"
ipnetwork = "0.21.1"
typed-builder = "0.21.0"
nanoid = "0.4.0"
webp = "0.3.0"
[patch.crates-io]
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" }

View File

@@ -1,3 +1,8 @@
# KONOBUNGU
<h1 align="center">
<img src="./assets/icon.png" height=180>
<br />
<b>Konobangu</b>
<div align="center"><img src="https://img.shields.io/badge/status-work--in--progress-blue" alt="status-badge" /></div>
</h1>
Kono Bangumi?
<p align="center">Kono bangumi?</p>

View File

@@ -0,0 +1,4 @@
---
title: 'Create Plant'
openapi: 'POST /plants'
---

View File

@@ -0,0 +1,4 @@
---
title: 'Delete Plant'
openapi: 'DELETE /plants/{id}'
---

View File

@@ -0,0 +1,4 @@
---
title: 'Get Plants'
openapi: 'GET /plants'
---

View File

@@ -0,0 +1,33 @@
---
title: 'Introduction'
description: 'Example section for showcasing API endpoints'
---
<Note>
If you're not looking to build API reference documentation, you can delete
this section by removing the api-reference folder.
</Note>
## Welcome
There are two ways to build API documentation: [OpenAPI](https://mintlify.com/docs/api-playground/openapi/setup) and [MDX components](https://mintlify.com/docs/api-playground/mdx/configuration). For the starter kit, we are using the following OpenAPI specification.
<Card
title="Plant Store Endpoints"
icon="leaf"
href="https://github.com/mintlify/starter/blob/main/api-reference/openapi.json"
>
View the OpenAPI specification file
</Card>
## Authentication
All API endpoints are authenticated using Bearer tokens and picked up from the specification file.
```json
"security": [
{
"bearerAuth": []
}
]
```

View File

@@ -0,0 +1,195 @@
{
"openapi": "3.0.1",
"info": {
"title": "OpenAPI Plant Store",
"description": "A sample API that uses a plant store as an example to demonstrate features in the OpenAPI specification",
"license": {
"name": "MIT"
},
"version": "1.0.0"
},
"servers": [
{
"url": "http://sandbox.mintlify.com"
}
],
"security": [
{
"bearerAuth": []
}
],
"paths": {
"/plants": {
"get": {
"description": "Returns all plants from the system that the user has access to",
"parameters": [
{
"name": "limit",
"in": "query",
"description": "The maximum number of results to return",
"schema": {
"type": "integer",
"format": "int32"
}
}
],
"responses": {
"200": {
"description": "Plant response",
"content": {
"application/json": {
"schema": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Plant"
}
}
}
}
},
"400": {
"description": "Unexpected error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Error"
}
}
}
}
}
},
"post": {
"description": "Creates a new plant in the store",
"requestBody": {
"description": "Plant to add to the store",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NewPlant"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "plant response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Plant"
}
}
}
},
"400": {
"description": "unexpected error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Error"
}
}
}
}
}
}
},
"/plants/{id}": {
"delete": {
"description": "Deletes a single plant based on the ID supplied",
"parameters": [
{
"name": "id",
"in": "path",
"description": "ID of plant to delete",
"required": true,
"schema": {
"type": "integer",
"format": "int64"
}
}
],
"responses": {
"204": {
"description": "Plant deleted",
"content": {}
},
"400": {
"description": "unexpected error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Error"
}
}
}
}
}
}
}
},
"components": {
"schemas": {
"Plant": {
"required": [
"name"
],
"type": "object",
"properties": {
"name": {
"description": "The name of the plant",
"type": "string"
},
"tag": {
"description": "Tag to specify the type",
"type": "string"
}
}
},
"NewPlant": {
"allOf": [
{
"$ref": "#/components/schemas/Plant"
},
{
"required": [
"id"
],
"type": "object",
"properties": {
"id": {
"description": "Identification number of the plant",
"type": "integer",
"format": "int64"
}
}
}
]
},
"Error": {
"required": [
"error",
"message"
],
"type": "object",
"properties": {
"error": {
"type": "integer",
"format": "int32"
},
"message": {
"type": "string"
}
}
}
},
"securitySchemes": {
"bearerAuth": {
"type": "http",
"scheme": "bearer"
}
}
}
}

98
apps/docs/development.mdx Normal file
View File

@@ -0,0 +1,98 @@
---
title: 'Development'
description: 'Learn how to preview changes locally'
---
<Info>
**Prerequisite** You should have installed Node.js (version 18.10.0 or
higher).
</Info>
Step 1. Install Mintlify on your OS:
<CodeGroup>
```bash npm
npm i -g mintlify
```
```bash yarn
yarn global add mintlify
```
</CodeGroup>
Step 2. Go to the docs are located (where you can find `mint.json`) and run the following command:
```bash
mintlify dev
```
The documentation website is now available at `http://localhost:5000`.
### Custom Ports
Mintlify uses port 5000 by default. You can use the `--port` flag to customize the port Mintlify runs on. For example, use this command to run in port 3333:
```bash
mintlify dev --port 3333
```
You will see an error like this if you try to run Mintlify in a port that's already taken:
```md
Error: listen EADDRINUSE: address already in use :::5000
```
## Mintlify Versions
Each CLI is linked to a specific version of Mintlify. Please update the CLI if your local website looks different than production.
<CodeGroup>
```bash npm
npm i -g mintlify@latest
```
```bash yarn
yarn global upgrade mintlify
```
</CodeGroup>
## Deployment
<Tip>
Unlimited editors available under the [Startup
Plan](https://mintlify.com/pricing)
</Tip>
You should see the following if the deploy successfully went through:
<Frame>
<img src="/images/checks-passed.png" style={{ borderRadius: '0.5rem' }} />
</Frame>
## Troubleshooting
Here's how to solve some common problems when working with the CLI.
<AccordionGroup>
<Accordion title="Mintlify is not loading">
Update to Node v18. Run `mintlify install` and try again.
</Accordion>
<Accordion title="No such file or directory on Windows">
Go to the `C:/Users/Username/.mintlify/` directory and remove the `mint`
folder. Then Open the Git Bash in this location and run `git clone
https://github.com/mintlify/mint.git`.
Repeat step 3.
</Accordion>
<Accordion title="Getting an unknown error">
Try navigating to the root of your device and delete the ~/.mintlify folder.
Then run `mintlify dev` again.
</Accordion>
</AccordionGroup>
Curious about what changed in a CLI version? [Check out the CLI changelog.](/changelog/command-line)

View File

@@ -0,0 +1,37 @@
---
title: 'Code Blocks'
description: 'Display inline code and code blocks'
icon: 'code'
---
## Basic
### Inline Code
To denote a `word` or `phrase` as code, enclose it in backticks (`).
```
To denote a `word` or `phrase` as code, enclose it in backticks (`).
```
### Code Block
Use [fenced code blocks](https://www.markdownguide.org/extended-syntax/#fenced-code-blocks) by enclosing code in three backticks and follow the leading ticks with the programming language of your snippet to get syntax highlighting. Optionally, you can also write the name of your code after the programming language.
```java HelloWorld.java
class HelloWorld {
public static void main(String[] args) {
System.out.println("Hello, World!");
}
}
```
````md
```java HelloWorld.java
class HelloWorld {
public static void main(String[] args) {
System.out.println("Hello, World!");
}
}
```
````

View File

@@ -0,0 +1,59 @@
---
title: 'Images and Embeds'
description: 'Add image, video, and other HTML elements'
icon: 'image'
---
<img
style={{ borderRadius: '0.5rem' }}
src="https://mintlify-assets.b-cdn.net/bigbend.jpg"
/>
## Image
### Using Markdown
The [markdown syntax](https://www.markdownguide.org/basic-syntax/#images) lets you add images using the following code
```md
![title](/path/image.jpg)
```
Note that the image file size must be less than 5MB. Otherwise, we recommend hosting on a service like [Cloudinary](https://cloudinary.com/) or [S3](https://aws.amazon.com/s3/). You can then use that URL and embed.
### Using Embeds
To get more customizability with images, you can also use [embeds](/writing-content/embed) to add images
```html
<img height="200" src="/path/image.jpg" />
```
## Embeds and HTML elements
<iframe
width="560"
height="315"
src="https://www.youtube.com/embed/4KzFe50RQkQ"
title="YouTube video player"
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
style={{ width: '100%', borderRadius: '0.5rem' }}
></iframe>
<br />
<Tip>
Mintlify supports [HTML tags in Markdown](https://www.markdownguide.org/basic-syntax/#html). This is helpful if you prefer HTML tags to Markdown syntax, and lets you create documentation with infinite flexibility.
</Tip>
### iFrames
Loads another HTML page within the document. Most commonly used for embedding videos.
```html
<iframe src="https://www.youtube.com/embed/4KzFe50RQkQ"> </iframe>
```

View File

@@ -0,0 +1,88 @@
---
title: 'Markdown Syntax'
description: 'Text, title, and styling in standard markdown'
icon: 'text-size'
---
## Titles
Best used for section headers.
```md
## Titles
```
### Subtitles
Best use to subsection headers.
```md
### Subtitles
```
<Tip>
Each **title** and **subtitle** creates an anchor and also shows up on the table of contents on the right.
</Tip>
## Text Formatting
We support most markdown formatting. Simply add `**`, `_`, or `~` around text to format it.
| Style | How to write it | Result |
| ------------- | ----------------- | --------------- |
| Bold | `**bold**` | **bold** |
| Italic | `_italic_` | _italic_ |
| Strikethrough | `~strikethrough~` | ~strikethrough~ |
You can combine these. For example, write `**_bold and italic_**` to get **_bold and italic_** text.
You need to use HTML to write superscript and subscript text. That is, add `<sup>` or `<sub>` around your text.
| Text Size | How to write it | Result |
| ----------- | ------------------------ | ---------------------- |
| Superscript | `<sup>superscript</sup>` | <sup>superscript</sup> |
| Subscript | `<sub>subscript</sub>` | <sub>subscript</sub> |
## Linking to Pages
You can add a link by wrapping text in `[]()`. You would write `[link to google](https://google.com)` to [link to google](https://google.com).
Links to pages in your docs need to be root-relative. Basically, you should include the entire folder path. For example, `[link to text](/writing-content/text)` links to the page "Text" in our components section.
Relative links like `[link to text](../text)` will open slower because we cannot optimize them as easily.
## Blockquotes
### Singleline
To create a blockquote, add a `>` in front of a paragraph.
> Dorothy followed her through many of the beautiful rooms in her castle.
```md
> Dorothy followed her through many of the beautiful rooms in her castle.
```
### Multiline
> Dorothy followed her through many of the beautiful rooms in her castle.
>
> The Witch bade her clean the pots and kettles and sweep the floor and keep the fire fed with wood.
```md
> Dorothy followed her through many of the beautiful rooms in her castle.
>
> The Witch bade her clean the pots and kettles and sweep the floor and keep the fire fed with wood.
```
### LaTeX
Mintlify supports [LaTeX](https://www.latex-project.org) through the Latex component.
<Latex>8 x (vk x H1 - H2) = (0,1)</Latex>
```md
<Latex>8 x (vk x H1 - H2) = (0,1)</Latex>
```

View File

@@ -0,0 +1,66 @@
---
title: 'Navigation'
description: 'The navigation field in mint.json defines the pages that go in the navigation menu'
icon: 'map'
---
The navigation menu is the list of links on every website.
You will likely update `mint.json` every time you add a new page. Pages do not show up automatically.
## Navigation syntax
Our navigation syntax is recursive which means you can make nested navigation groups. You don't need to include `.mdx` in page names.
<CodeGroup>
```json Regular Navigation
"navigation": [
{
"group": "Getting Started",
"pages": ["quickstart"]
}
]
```
```json Nested Navigation
"navigation": [
{
"group": "Getting Started",
"pages": [
"quickstart",
{
"group": "Nested Reference Pages",
"pages": ["nested-reference-page"]
}
]
}
]
```
</CodeGroup>
## Folders
Simply put your MDX files in folders and update the paths in `mint.json`.
For example, to have a page at `https://yoursite.com/your-folder/your-page` you would make a folder called `your-folder` containing an MDX file called `your-page.mdx`.
<Warning>
You cannot use `api` for the name of a folder unless you nest it inside another folder. Mintlify uses Next.js which reserves the top-level `api` folder for internal server calls. A folder name such as `api-reference` would be accepted.
</Warning>
```json Navigation With Folder
"navigation": [
{
"group": "Group Name",
"pages": ["your-folder/your-page"]
}
]
```
## Hidden Pages
MDX files not included in `mint.json` will not show up in the sidebar but are accessible through the search bar and by linking directly to them.

View File

@@ -0,0 +1,110 @@
---
title: Reusable Snippets
description: Reusable, custom snippets to keep content in sync
icon: 'recycle'
---
import SnippetIntro from '/snippets/snippet-intro.mdx';
<SnippetIntro />
## Creating a custom snippet
**Pre-condition**: You must create your snippet file in the `snippets` directory.
<Note>
Any page in the `snippets` directory will be treated as a snippet and will not
be rendered into a standalone page. If you want to create a standalone page
from the snippet, import the snippet into another file and call it as a
component.
</Note>
### Default export
1. Add content to your snippet file that you want to re-use across multiple
locations. Optionally, you can add variables that can be filled in via props
when you import the snippet.
```mdx snippets/my-snippet.mdx
Hello world! This is my content I want to reuse across pages. My keyword of the
day is {word}.
```
<Warning>
The content that you want to reuse must be inside the `snippets` directory in
order for the import to work.
</Warning>
2. Import the snippet into your destination file.
```mdx destination-file.mdx
---
title: My title
description: My Description
---
import MySnippet from '/snippets/path/to/my-snippet.mdx';
## Header
Lorem impsum dolor sit amet.
<MySnippet word="bananas" />
```
### Reusable variables
1. Export a variable from your snippet file:
```mdx snippets/path/to/custom-variables.mdx
export const myName = 'my name';
export const myObject = { fruit: 'strawberries' };
```
2. Import the snippet from your destination file and use the variable:
```mdx destination-file.mdx
---
title: My title
description: My Description
---
import { myName, myObject } from '/snippets/path/to/custom-variables.mdx';
Hello, my name is {myName} and I like {myObject.fruit}.
```
### Reusable components
1. Inside your snippet file, create a component that takes in props by exporting
your component in the form of an arrow function.
```mdx snippets/custom-component.mdx
export const MyComponent = ({ title }) => (
<div>
<h1>{title}</h1>
<p>... snippet content ...</p>
</div>
);
```
<Warning>
MDX does not compile inside the body of an arrow function. Stick to HTML
syntax when you can or use a default export if you need to use MDX.
</Warning>
2. Import the snippet into your destination file and pass in the props
```mdx destination-file.mdx
---
title: My title
description: My Description
---
import { MyComponent } from '/snippets/custom-component.mdx';
Lorem ipsum dolor sit amet.
<MyComponent title={'Custom title'} />
```

View File

@@ -0,0 +1,318 @@
---
title: 'Global Settings'
description: 'Mintlify gives you complete control over the look and feel of your documentation using the mint.json file'
icon: 'gear'
---
Every Mintlify site needs a `mint.json` file with the core configuration settings. Learn more about the [properties](#properties) below.
## Properties
<ResponseField name="name" type="string" required>
Name of your project. Used for the global title.
Example: `mintlify`
</ResponseField>
<ResponseField name="navigation" type="Navigation[]" required>
An array of groups with all the pages within that group
<Expandable title="Navigation">
<ResponseField name="group" type="string">
The name of the group.
Example: `Settings`
</ResponseField>
<ResponseField name="pages" type="string[]">
The relative paths to the markdown files that will serve as pages.
Example: `["customization", "page"]`
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="logo" type="string or object">
Path to logo image or object with path to "light" and "dark" mode logo images
<Expandable title="Logo">
<ResponseField name="light" type="string">
Path to the logo in light mode
</ResponseField>
<ResponseField name="dark" type="string">
Path to the logo in dark mode
</ResponseField>
<ResponseField name="href" type="string" default="/">
Where clicking on the logo links you to
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="favicon" type="string">
Path to the favicon image
</ResponseField>
<ResponseField name="colors" type="Colors">
Hex color codes for your global theme
<Expandable title="Colors">
<ResponseField name="primary" type="string" required>
The primary color. Used for most often for highlighted content, section
headers, accents, in light mode
</ResponseField>
<ResponseField name="light" type="string">
The primary color for dark mode. Used for most often for highlighted
content, section headers, accents, in dark mode
</ResponseField>
<ResponseField name="dark" type="string">
The primary color for important buttons
</ResponseField>
<ResponseField name="background" type="object">
The color of the background in both light and dark mode
<Expandable title="Object">
<ResponseField name="light" type="string" required>
The hex color code of the background in light mode
</ResponseField>
<ResponseField name="dark" type="string" required>
The hex color code of the background in dark mode
</ResponseField>
</Expandable>
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="topbarLinks" type="TopbarLink[]">
Array of `name`s and `url`s of links you want to include in the topbar
<Expandable title="TopbarLink">
<ResponseField name="name" type="string">
The name of the button.
Example: `Contact us`
</ResponseField>
<ResponseField name="url" type="string">
The url once you click on the button. Example: `https://mintlify.com/contact`
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="topbarCtaButton" type="Call to Action">
<Expandable title="Topbar Call to Action">
<ResponseField name="type" type={'"link" or "github"'} default="link">
Link shows a button. GitHub shows the repo information at the url provided including the number of GitHub stars.
</ResponseField>
<ResponseField name="url" type="string">
If `link`: What the button links to.
If `github`: Link to the repository to load GitHub information from.
</ResponseField>
<ResponseField name="name" type="string">
Text inside the button. Only required if `type` is a `link`.
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="versions" type="string[]">
Array of version names. Only use this if you want to show different versions
of docs with a dropdown in the navigation bar.
</ResponseField>
<ResponseField name="anchors" type="Anchor[]">
An array of the anchors, includes the `icon`, `color`, and `url`.
<Expandable title="Anchor">
<ResponseField name="icon" type="string">
The [Font Awesome](https://fontawesome.com/search?s=brands%2Cduotone) icon used to feature the anchor.
Example: `comments`
</ResponseField>
<ResponseField name="name" type="string">
The name of the anchor label.
Example: `Community`
</ResponseField>
<ResponseField name="url" type="string">
The start of the URL that marks what pages go in the anchor. Generally, this is the name of the folder you put your pages in.
</ResponseField>
<ResponseField name="color" type="string">
The hex color of the anchor icon background. Can also be a gradient if you pass an object with the properties `from` and `to` that are each a hex color.
</ResponseField>
<ResponseField name="version" type="string">
Used if you want to hide an anchor until the correct docs version is selected.
</ResponseField>
<ResponseField name="isDefaultHidden" type="boolean" default="false">
Pass `true` if you want to hide the anchor until you directly link someone to docs inside it.
</ResponseField>
<ResponseField name="iconType" default="duotone" type="string">
One of: "brands", "duotone", "light", "sharp-solid", "solid", or "thin"
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="topAnchor" type="Object">
Override the default configurations for the top-most anchor.
<Expandable title="Object">
<ResponseField name="name" default="Documentation" type="string">
The name of the top-most anchor
</ResponseField>
<ResponseField name="icon" default="book-open" type="string">
Font Awesome icon.
</ResponseField>
<ResponseField name="iconType" default="duotone" type="string">
One of: "brands", "duotone", "light", "sharp-solid", "solid", or "thin"
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="tabs" type="Tabs[]">
An array of navigational tabs.
<Expandable title="Tabs">
<ResponseField name="name" type="string">
The name of the tab label.
</ResponseField>
<ResponseField name="url" type="string">
The start of the URL that marks what pages go in the tab. Generally, this
is the name of the folder you put your pages in.
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="api" type="API">
Configuration for API settings. Learn more about API pages at [API Components](/api-playground/demo).
<Expandable title="API">
<ResponseField name="baseUrl" type="string">
The base url for all API endpoints. If `baseUrl` is an array, it will enable for multiple base url
options that the user can toggle.
</ResponseField>
<ResponseField name="auth" type="Auth">
<Expandable title="Auth">
<ResponseField name="method" type='"bearer" | "basic" | "key"'>
The authentication strategy used for all API endpoints.
</ResponseField>
<ResponseField name="name" type="string">
The name of the authentication parameter used in the API playground.
If method is `basic`, the format should be `[usernameName]:[passwordName]`
</ResponseField>
<ResponseField name="inputPrefix" type="string">
The default value that's designed to be a prefix for the authentication input field.
E.g. If an `inputPrefix` of `AuthKey` would inherit the default input result of the authentication field as `AuthKey`.
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="playground" type="Playground">
Configurations for the API playground
<Expandable title="Playground">
<ResponseField name="mode" default="show" type='"show" | "simple" | "hide"'>
Whether the playground is showing, hidden, or only displaying the endpoint with no added user interactivity `simple`
Learn more at the [playground guides](/api-playground/demo)
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="maintainOrder" type="boolean">
Enabling this flag ensures that key ordering in OpenAPI pages matches the key ordering defined in the OpenAPI file.
<Warning>This behavior will soon be enabled by default, at which point this field will be deprecated.</Warning>
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="openapi" type="string | string[]">
A string or an array of strings of URL(s) or relative path(s) pointing to your
OpenAPI file.
Examples:
<CodeGroup>
```json Absolute
"openapi": "https://example.com/openapi.json"
```
```json Relative
"openapi": "/openapi.json"
```
```json Multiple
"openapi": ["https://example.com/openapi1.json", "/openapi2.json", "/openapi3.json"]
```
</CodeGroup>
</ResponseField>
<ResponseField name="footerSocials" type="FooterSocials">
An object of social media accounts where the key:property pair represents the social media platform and the account url.
Example:
```json
{
"x": "https://x.com/mintlify",
"website": "https://mintlify.com"
}
```
<Expandable title="FooterSocials">
<ResponseField name="[key]" type="string">
One of the following values `website`, `facebook`, `x`, `discord`, `slack`, `github`, `linkedin`, `instagram`, `hacker-news`
Example: `x`
</ResponseField>
<ResponseField name="property" type="string">
The URL to the social platform.
Example: `https://x.com/mintlify`
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="feedback" type="Feedback">
Configurations to enable feedback buttons
<Expandable title="Feedback">
<ResponseField name="suggestEdit" type="boolean" default="false">
Enables a button to allow users to suggest edits via pull requests
</ResponseField>
<ResponseField name="raiseIssue" type="boolean" default="false">
Enables a button to allow users to raise an issue about the documentation
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="modeToggle" type="ModeToggle">
Customize the dark mode toggle.
<Expandable title="ModeToggle">
<ResponseField name="default" type={'"light" or "dark"'}>
Set if you always want to show light or dark mode for new users. When not
set, we default to the same mode as the user's operating system.
</ResponseField>
<ResponseField name="isHidden" type="boolean" default="false">
Set to true to hide the dark/light mode toggle. You can combine `isHidden` with `default` to force your docs to only use light or dark mode. For example:
<CodeGroup>
```json Only Dark Mode
"modeToggle": {
"default": "dark",
"isHidden": true
}
```
```json Only Light Mode
"modeToggle": {
"default": "light",
"isHidden": true
}
```
</CodeGroup>
</ResponseField>
</Expandable>
</ResponseField>
<ResponseField name="backgroundImage" type="string">
A background image to be displayed behind every page. See example with
[Infisical](https://infisical.com/docs) and [FRPC](https://frpc.io).
</ResponseField>

49
apps/docs/favicon.svg Normal file
View File

@@ -0,0 +1,49 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61505C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5914 7.95343 21.1394Z" fill="white"/>
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61505C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5914 7.95343 21.1394Z" fill="url(#paint0_radial_101_2703)"/>
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61505C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5914 7.95343 21.1394Z" fill="black" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61505C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5914 7.95343 21.1394Z" fill="url(#paint1_linear_101_2703)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M8.68359 10.4755C9.94543 13.63 9.56145 17.5723 7.9354 21.1112C4.89702 21.0957 2.27411 19.4306 1.01347 16.279C-0.248375 13.1245 0.135612 9.18218 1.76165 5.64328C4.80004 5.65883 7.42295 7.32386 8.68359 10.4755Z" stroke="url(#paint2_linear_101_2703)" stroke-opacity="0.05" stroke-width="0.056338"/>
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="white"/>
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="url(#paint3_radial_101_2703)"/>
<path d="M16.6026 13.2251C14.8642 17.349 11.3512 20.1866 7.32411 21.2248C5.25257 17.624 4.82926 13.1324 6.56764 9.00855C8.30603 4.88472 11.819 2.04706 15.8461 1.00889C17.9176 4.60967 18.3409 9.10131 16.6026 13.2251Z" stroke="url(#paint4_linear_101_2703)" stroke-opacity="0.05" stroke-width="0.056338"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="white"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint5_radial_101_2703)"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="black" fill-opacity="0.2" style="mix-blend-mode:hard-light"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint6_linear_101_2703)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M16.5682 22.7874C13.2176 23.9184 9.81361 23.2124 7.2672 21.1975C8.49194 17.9068 11.0444 15.189 14.3959 14.0577C17.7465 12.9266 21.1504 13.6326 23.6968 15.6476C22.4721 18.9383 19.9196 21.656 16.5682 22.7874Z" stroke="url(#paint7_linear_101_2703)" stroke-opacity="0.05" stroke-width="0.056338"/>
<defs>
<radialGradient id="paint0_radial_101_2703" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(-3.00503 15.023) rotate(-10.029) scale(17.9572 17.784)">
<stop stop-color="#00B0BB"/>
<stop offset="1" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint1_linear_101_2703" x1="7.39036" y1="4.81308" x2="1.62975" y2="18.6894" gradientUnits="userSpaceOnUse">
<stop stop-color="#18E299"/>
<stop offset="1"/>
</linearGradient>
<linearGradient id="paint2_linear_101_2703" x1="7.94816" y1="8.01563" x2="1.7612" y2="18.746" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint3_radial_101_2703" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(8.11404 20.8822) rotate(-75.7542) scale(21.6246 23.7772)">
<stop stop-color="#00BBBB"/>
<stop offset="0.712616" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint4_linear_101_2703" x1="7.60205" y1="5.8709" x2="15.5561" y2="16.3719" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint5_radial_101_2703" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(7.84537 21.5181) rotate(-20.3525) scale(18.5603 17.32)">
<stop stop-color="#00B0BB"/>
<stop offset="1" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint6_linear_101_2703" x1="16.8078" y1="13.0071" x2="10.0409" y2="22.9937" gradientUnits="userSpaceOnUse">
<stop stop-color="#00B1BC"/>
<stop offset="1"/>
</linearGradient>
<linearGradient id="paint7_linear_101_2703" x1="16.8078" y1="13.0071" x2="14.1687" y2="23.841" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 5.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 157 KiB

View File

@@ -0,0 +1,161 @@
<svg width="700" height="320" viewBox="0 0 700 320" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_2862_30)">
<rect width="700" height="320" rx="16" fill="url(#paint0_linear_2862_30)"/>
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="white"/>
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="url(#paint1_radial_2862_30)"/>
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="black" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="url(#paint2_linear_2862_30)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M311.72 247.034C283.108 246.887 258.409 231.208 246.538 201.531C234.656 171.825 238.271 134.702 253.583 101.377C282.195 101.524 306.894 117.203 318.765 146.88C330.647 176.586 327.031 213.709 311.72 247.034Z" stroke="url(#paint3_linear_2862_30)" stroke-opacity="0.05" stroke-width="0.530516"/>
<path d="M305.839 247.174C343.92 237.419 377.154 210.619 393.585 171.64C410.017 132.661 405.98 90.1988 386.347 56.1934C348.266 65.9477 315.032 92.7486 298.601 131.728C282.169 170.706 286.206 213.168 305.839 247.174Z" fill="white"/>
<path d="M305.839 247.174C343.92 237.419 377.154 210.619 393.585 171.64C410.017 132.661 405.98 90.1988 386.347 56.1934C348.266 65.9477 315.032 92.7486 298.601 131.728C282.169 170.706 286.206 213.168 305.839 247.174Z" fill="url(#paint4_radial_2862_30)"/>
<path d="M393.341 171.537C376.971 210.369 343.89 237.091 305.969 246.867C286.462 212.959 282.476 170.663 298.845 131.831C315.215 92.9978 348.295 66.2765 386.217 56.5004C405.724 90.4077 409.71 132.704 393.341 171.537Z" stroke="url(#paint5_linear_2862_30)" stroke-opacity="0.05" stroke-width="0.530516"/>
<path d="M305.686 246.995C329.749 266.114 361.965 272.832 393.67 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.045 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="white"/>
<path d="M305.686 246.995C329.749 266.114 361.965 272.832 393.67 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.045 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="url(#paint6_radial_2862_30)"/>
<path d="M305.686 246.995C329.749 266.114 361.965 272.832 393.67 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.045 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="black" fill-opacity="0.2" style="mix-blend-mode:hard-light"/>
<path d="M305.686 246.995C329.749 266.114 361.965 272.832 393.67 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.045 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="url(#paint7_linear_2862_30)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M393.586 261.878C362.034 272.529 329.98 265.88 306.002 246.907C317.534 215.919 341.57 190.327 373.13 179.673C404.681 169.023 436.735 175.671 460.714 194.644C449.181 225.632 425.145 251.224 393.586 261.878Z" stroke="url(#paint8_linear_2862_30)" stroke-opacity="0.05" stroke-width="0.530516"/>
<g opacity="0.8" filter="url(#filter0_f_2862_30)">
<circle cx="660" cy="-60" r="160" fill="#18E244" fill-opacity="0.4"/>
</g>
<g opacity="0.8" filter="url(#filter1_f_2862_30)">
<circle cx="20" cy="213" r="160" fill="#18CAE2" fill-opacity="0.33"/>
</g>
<g opacity="0.8" filter="url(#filter2_f_2862_30)">
<circle cx="660" cy="480" r="160" fill="#18E2B2" fill-opacity="0.52"/>
</g>
<g opacity="0.8" filter="url(#filter3_f_2862_30)">
<circle cx="20" cy="413" r="160" fill="#4018E2" fill-opacity="0.22"/>
</g>
<path opacity="0.2" d="M0 50H700" stroke="url(#paint9_radial_2862_30)" stroke-dasharray="4 4"/>
<path opacity="0.1" d="M0 82H700" stroke="url(#paint10_radial_2862_30)" stroke-dasharray="4 4"/>
<path opacity="0.2" d="M239 0L239 320" stroke="url(#paint11_radial_2862_30)" stroke-dasharray="4 4"/>
<path opacity="0.1" d="M271 0L271 320" stroke="url(#paint12_radial_2862_30)" stroke-dasharray="4 4"/>
<path opacity="0.2" d="M461 0L461 320" stroke="url(#paint13_radial_2862_30)" stroke-dasharray="4 4"/>
<path opacity="0.1" d="M429 0L429 320" stroke="url(#paint14_radial_2862_30)" stroke-dasharray="4 4"/>
<path opacity="0.2" d="M0 271H700" stroke="url(#paint15_radial_2862_30)" stroke-dasharray="4 4"/>
<path opacity="0.1" d="M0 239H700" stroke="url(#paint16_radial_2862_30)" stroke-dasharray="4 4"/>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M0 160H700" stroke="url(#paint17_linear_2862_30)"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.2">
<path d="M511 -1L189 321" stroke="url(#paint18_linear_2862_30)"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.2">
<path d="M511 321L189 -1" stroke="url(#paint19_linear_2862_30)"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<circle cx="350" cy="160" r="111" stroke="white"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<circle cx="350" cy="160" r="79" stroke="white"/>
</g>
</g>
<defs>
<filter id="filter0_f_2862_30" x="260" y="-460" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_30"/>
</filter>
<filter id="filter1_f_2862_30" x="-380" y="-187" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_30"/>
</filter>
<filter id="filter2_f_2862_30" x="260" y="80" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_30"/>
</filter>
<filter id="filter3_f_2862_30" x="-380" y="13" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_30"/>
</filter>
<linearGradient id="paint0_linear_2862_30" x1="1.04308e-05" y1="320" x2="710.784" y2="26.0793" gradientUnits="userSpaceOnUse">
<stop stop-color="#18E299" stop-opacity="0.09"/>
<stop offset="0.729167" stop-color="#0D9373" stop-opacity="0.08"/>
</linearGradient>
<radialGradient id="paint1_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(208.697 189.703) rotate(-10.029) scale(169.097 167.466)">
<stop stop-color="#00B0BB"/>
<stop offset="1" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint2_linear_2862_30" x1="306.587" y1="93.5598" x2="252.341" y2="224.228" gradientUnits="userSpaceOnUse">
<stop stop-color="#18E299"/>
<stop offset="1"/>
</linearGradient>
<linearGradient id="paint3_linear_2862_30" x1="311.84" y1="123.717" x2="253.579" y2="224.761" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint4_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(313.407 243.64) rotate(-75.7542) scale(203.632 223.902)">
<stop stop-color="#00BBBB"/>
<stop offset="0.712616" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint5_linear_2862_30" x1="308.586" y1="102.284" x2="383.487" y2="201.169" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint6_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(311.446 249.925) rotate(-20.3524) scale(174.776 163.096)">
<stop stop-color="#00B0BB"/>
<stop offset="1" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint7_linear_2862_30" x1="395.842" y1="169.781" x2="332.121" y2="263.82" gradientUnits="userSpaceOnUse">
<stop stop-color="#00B1BC"/>
<stop offset="1"/>
</linearGradient>
<linearGradient id="paint8_linear_2862_30" x1="395.842" y1="169.781" x2="370.99" y2="271.799" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint9_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(350 50) scale(398.125 182)">
<stop offset="0.348958" stop-color="#84FFD3"/>
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
</radialGradient>
<radialGradient id="paint10_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(350 82) scale(398.125 182)">
<stop offset="0.348958" stop-color="#84FFD3"/>
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
</radialGradient>
<radialGradient id="paint11_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(239 160) rotate(90) scale(182 182)">
<stop offset="0.348958" stop-color="#84FFD3"/>
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
</radialGradient>
<radialGradient id="paint12_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(271 160) rotate(90) scale(182 182)">
<stop offset="0.348958" stop-color="#84FFD3"/>
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
</radialGradient>
<radialGradient id="paint13_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(461 160) rotate(90) scale(182 182)">
<stop offset="0.348958" stop-color="#84FFD3"/>
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
</radialGradient>
<radialGradient id="paint14_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(429 160) rotate(90) scale(182 182)">
<stop offset="0.348958" stop-color="#84FFD3"/>
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
</radialGradient>
<radialGradient id="paint15_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(350 271) scale(398.125 182)">
<stop offset="0.348958" stop-color="#84FFD3"/>
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
</radialGradient>
<radialGradient id="paint16_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(350 239) scale(398.125 182)">
<stop offset="0.348958" stop-color="#84FFD3"/>
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
</radialGradient>
<linearGradient id="paint17_linear_2862_30" x1="0" y1="160" x2="700" y2="160" gradientUnits="userSpaceOnUse">
<stop stop-color="white" stop-opacity="0.1"/>
<stop offset="0.5" stop-color="white"/>
<stop offset="1" stop-color="white" stop-opacity="0.1"/>
</linearGradient>
<linearGradient id="paint18_linear_2862_30" x1="511" y1="-1" x2="189" y2="321" gradientUnits="userSpaceOnUse">
<stop stop-color="white" stop-opacity="0.1"/>
<stop offset="0.5" stop-color="white"/>
<stop offset="1" stop-color="white" stop-opacity="0.1"/>
</linearGradient>
<linearGradient id="paint19_linear_2862_30" x1="511" y1="321" x2="189" y2="-0.999997" gradientUnits="userSpaceOnUse">
<stop stop-color="white" stop-opacity="0.1"/>
<stop offset="0.5" stop-color="white"/>
<stop offset="1" stop-color="white" stop-opacity="0.1"/>
</linearGradient>
<clipPath id="clip0_2862_30">
<rect width="700" height="320" rx="16" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 12 KiB

View File

@@ -0,0 +1,155 @@
<svg width="700" height="320" viewBox="0 0 700 320" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_2862_278)">
<rect width="700" height="320" rx="16" fill="url(#paint0_linear_2862_278)"/>
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="white"/>
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="url(#paint1_radial_2862_278)"/>
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="black" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="url(#paint2_linear_2862_278)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M311.72 247.034C283.108 246.887 258.409 231.208 246.538 201.531C234.656 171.825 238.271 134.702 253.583 101.377C282.195 101.524 306.894 117.203 318.765 146.88C330.647 176.586 327.031 213.709 311.72 247.034Z" stroke="url(#paint3_linear_2862_278)" stroke-opacity="0.05" stroke-width="0.530516"/>
<path d="M305.839 247.174C343.92 237.419 377.154 210.619 393.585 171.64C410.017 132.661 405.98 90.1988 386.347 56.1934C348.266 65.9477 315.032 92.7486 298.601 131.728C282.169 170.706 286.206 213.168 305.839 247.174Z" fill="white"/>
<path d="M305.839 247.174C343.92 237.419 377.154 210.619 393.585 171.64C410.017 132.661 405.98 90.1988 386.347 56.1934C348.266 65.9477 315.032 92.7486 298.601 131.728C282.169 170.706 286.206 213.168 305.839 247.174Z" fill="url(#paint4_radial_2862_278)"/>
<path d="M393.341 171.537C376.971 210.369 343.89 237.091 305.969 246.867C286.462 212.959 282.476 170.663 298.845 131.831C315.215 92.9978 348.295 66.2765 386.217 56.5004C405.724 90.4077 409.71 132.704 393.341 171.537Z" stroke="url(#paint5_linear_2862_278)" stroke-opacity="0.05" stroke-width="0.530516"/>
<path d="M305.686 246.995C329.75 266.114 361.965 272.832 393.671 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.046 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="white"/>
<path d="M305.686 246.995C329.75 266.114 361.965 272.832 393.671 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.046 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="url(#paint6_radial_2862_278)"/>
<path d="M305.686 246.995C329.75 266.114 361.965 272.832 393.671 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.046 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="black" fill-opacity="0.2" style="mix-blend-mode:hard-light"/>
<path d="M305.686 246.995C329.75 266.114 361.965 272.832 393.671 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.046 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="url(#paint7_linear_2862_278)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M393.586 261.878C362.035 272.529 329.981 265.88 306.002 246.907C317.535 215.919 341.571 190.327 373.13 179.673C404.682 169.023 436.736 175.671 460.715 194.644C449.182 225.632 425.146 251.224 393.586 261.878Z" stroke="url(#paint8_linear_2862_278)" stroke-opacity="0.05" stroke-width="0.530516"/>
<g opacity="0.8" filter="url(#filter0_f_2862_278)">
<circle cx="660" cy="-60" r="160" fill="#18E299" fill-opacity="0.4"/>
</g>
<g opacity="0.8" filter="url(#filter1_f_2862_278)">
<circle cx="20" cy="213" r="160" fill="#18E299" fill-opacity="0.33"/>
</g>
<g opacity="0.8" filter="url(#filter2_f_2862_278)">
<circle cx="660" cy="480" r="160" fill="#18E299" fill-opacity="0.52"/>
</g>
<g opacity="0.8" filter="url(#filter3_f_2862_278)">
<circle cx="20" cy="413" r="160" fill="#18E299" fill-opacity="0.22"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M0 50H700" stroke="black" stroke-dasharray="4 4"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M0 82H700" stroke="black" stroke-dasharray="4 4"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M239 0L239 320" stroke="black" stroke-dasharray="4 4"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M271 0L271 320" stroke="black" stroke-dasharray="4 4"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M461 0L461 320" stroke="black" stroke-dasharray="4 4"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M350 0L350 320" stroke="url(#paint9_linear_2862_278)"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M429 0L429 320" stroke="black" stroke-dasharray="4 4"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M0 271H700" stroke="black" stroke-dasharray="4 4"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M0 239H700" stroke="black" stroke-dasharray="4 4"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M0 160H700" stroke="url(#paint10_linear_2862_278)"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M511 -1L189 321" stroke="url(#paint11_linear_2862_278)"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.1">
<path d="M511 321L189 -1" stroke="url(#paint12_linear_2862_278)"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.05">
<circle cx="350" cy="160" r="111" stroke="black"/>
</g>
<g style="mix-blend-mode:overlay" opacity="0.05">
<circle cx="350" cy="160" r="79" stroke="black"/>
</g>
</g>
<defs>
<filter id="filter0_f_2862_278" x="260" y="-460" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_278"/>
</filter>
<filter id="filter1_f_2862_278" x="-380" y="-187" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_278"/>
</filter>
<filter id="filter2_f_2862_278" x="260" y="80" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_278"/>
</filter>
<filter id="filter3_f_2862_278" x="-380" y="13" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_278"/>
</filter>
<linearGradient id="paint0_linear_2862_278" x1="1.04308e-05" y1="320" x2="710.784" y2="26.0793" gradientUnits="userSpaceOnUse">
<stop stop-color="#18E299" stop-opacity="0.09"/>
<stop offset="0.729167" stop-color="#0D9373" stop-opacity="0.08"/>
</linearGradient>
<radialGradient id="paint1_radial_2862_278" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(208.697 189.703) rotate(-10.029) scale(169.097 167.466)">
<stop stop-color="#00B0BB"/>
<stop offset="1" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint2_linear_2862_278" x1="306.587" y1="93.5598" x2="252.341" y2="224.228" gradientUnits="userSpaceOnUse">
<stop stop-color="#18E299"/>
<stop offset="1"/>
</linearGradient>
<linearGradient id="paint3_linear_2862_278" x1="311.84" y1="123.717" x2="253.579" y2="224.761" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint4_radial_2862_278" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(313.407 243.64) rotate(-75.7542) scale(203.632 223.902)">
<stop stop-color="#00BBBB"/>
<stop offset="0.712616" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint5_linear_2862_278" x1="308.586" y1="102.284" x2="383.487" y2="201.169" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint6_radial_2862_278" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(311.447 249.925) rotate(-20.3524) scale(174.776 163.096)">
<stop stop-color="#00B0BB"/>
<stop offset="1" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint7_linear_2862_278" x1="395.843" y1="169.781" x2="332.121" y2="263.82" gradientUnits="userSpaceOnUse">
<stop stop-color="#00B1BC"/>
<stop offset="1"/>
</linearGradient>
<linearGradient id="paint8_linear_2862_278" x1="395.843" y1="169.781" x2="370.991" y2="271.799" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint9_linear_2862_278" x1="350" y1="0" x2="350" y2="320" gradientUnits="userSpaceOnUse">
<stop stop-opacity="0"/>
<stop offset="0.0001" stop-opacity="0.3"/>
<stop offset="0.333333"/>
<stop offset="0.666667"/>
<stop offset="1" stop-opacity="0.3"/>
</linearGradient>
<linearGradient id="paint10_linear_2862_278" x1="0" y1="160" x2="700" y2="160" gradientUnits="userSpaceOnUse">
<stop stop-opacity="0.1"/>
<stop offset="0.5"/>
<stop offset="1" stop-opacity="0.1"/>
</linearGradient>
<linearGradient id="paint11_linear_2862_278" x1="511" y1="-1" x2="189" y2="321" gradientUnits="userSpaceOnUse">
<stop stop-opacity="0.1"/>
<stop offset="0.5"/>
<stop offset="1" stop-opacity="0.1"/>
</linearGradient>
<linearGradient id="paint12_linear_2862_278" x1="511" y1="321" x2="189" y2="-0.999997" gradientUnits="userSpaceOnUse">
<stop stop-opacity="0.1"/>
<stop offset="0.5"/>
<stop offset="1" stop-opacity="0.1"/>
</linearGradient>
<clipPath id="clip0_2862_278">
<rect width="700" height="320" rx="16" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 10 KiB

View File

@@ -0,0 +1,71 @@
---
title: Introduction
description: 'Welcome to the home of your new documentation'
---
<img
className="block dark:hidden"
src="/images/hero-light.svg"
alt="Hero Light"
/>
<img
className="hidden dark:block"
src="/images/hero-dark.svg"
alt="Hero Dark"
/>
## Setting up
The first step to world-class documentation is setting up your editing environments.
<CardGroup cols={2}>
<Card
title="Edit Your Docs"
icon="pen-to-square"
href="https://mintlify.com/docs/quickstart"
>
Get your docs set up locally for easy development
</Card>
<Card
title="Preview Changes"
icon="image"
href="https://mintlify.com/docs/development"
>
Preview your changes before you push to make sure they're perfect
</Card>
</CardGroup>
## Make it yours
Update your docs to your brand and add valuable content for the best user conversion.
<CardGroup cols={2}>
<Card
title="Customize Style"
icon="palette"
href="https://mintlify.com/docs/settings/global"
>
Customize your docs to your company's colors and brands
</Card>
<Card
title="Reference APIs"
icon="code"
href="https://mintlify.com/docs/api-playground/openapi"
>
Automatically generate endpoints from an OpenAPI spec
</Card>
<Card
title="Add Components"
icon="screwdriver-wrench"
href="https://mintlify.com/docs/components/accordion"
>
Build interactive features and designs to guide your users
</Card>
<Card
title="Get Inspiration"
icon="stars"
href="https://mintlify.com/customers"
>
Check out our showcase of our favorite documentation
</Card>
</CardGroup>

55
apps/docs/logo/dark.svg Normal file
View File

@@ -0,0 +1,55 @@
<svg width="160" height="24" viewBox="0 0 160 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="white"/>
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="url(#paint0_radial_115_109)"/>
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="black" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="url(#paint1_linear_115_109)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M7.9354 21.1112C4.89702 21.0957 2.27411 19.4306 1.01347 16.279C-0.248375 13.1244 0.135612 9.18218 1.76165 5.64327C4.80004 5.65882 7.42295 7.32385 8.68359 10.4755C9.94543 13.63 9.56144 17.5723 7.9354 21.1112Z" stroke="url(#paint2_linear_115_109)" stroke-opacity="0.05" stroke-width="0.056338"/>
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="white"/>
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="url(#paint3_radial_115_109)"/>
<path d="M16.6025 13.2251C14.8642 17.349 11.3512 20.1866 7.32411 21.2248C5.25257 17.624 4.82926 13.1324 6.56764 9.00855C8.30603 4.88472 11.819 2.04706 15.8461 1.00889C17.9176 4.60967 18.3409 9.10131 16.6025 13.2251Z" stroke="url(#paint4_linear_115_109)" stroke-opacity="0.05" stroke-width="0.056338"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="white"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint5_radial_115_109)"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="black" fill-opacity="0.2" style="mix-blend-mode:hard-light"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint6_linear_115_109)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M16.5682 22.7874C13.2176 23.9184 9.81361 23.2124 7.2672 21.1975C8.49194 17.9068 11.0444 15.189 14.3959 14.0577C17.7465 12.9266 21.1504 13.6326 23.6968 15.6476C22.4721 18.9383 19.9196 21.656 16.5682 22.7874Z" stroke="url(#paint7_linear_115_109)" stroke-opacity="0.05" stroke-width="0.056338"/>
<path d="M34.2124 19V5.4H39.4924L41.6924 12.2L42.3724 14.74L43.0524 12.2L45.2524 5.4H50.4124V19H46.3324L46.5924 9.98L45.5324 13.68L43.7924 19H40.8324L39.0524 13.6L38.0324 10.02L38.2924 19H34.2124ZM52.4155 7.3V4.6H56.2955V7.3H52.4155ZM52.4155 19V8.14H56.2955V19H52.4155ZM58.1038 19V8.14H61.9838V9.58C62.6638 8.34 63.7438 7.76 65.0038 7.76C66.9638 7.76 68.6238 8.98 68.6238 11.78V19H64.7438V12.56C64.7438 11.34 64.3038 10.86 63.4838 10.86C62.6038 10.86 61.9838 11.58 61.9838 12.88V19H58.1038ZM70.9327 15.22V11.06H69.7327V8.14H70.9327V5.62H74.8127V8.14H76.9327V11.06H74.8127V14.6C74.8127 15.5 75.0327 16.06 76.2127 16.06H76.9327V19C76.4927 19.2 75.6727 19.38 74.6527 19.38C72.1527 19.38 70.9327 17.88 70.9327 15.22Z" fill="url(#paint8_radial_115_109)"/>
<path d="M87.232 10.519C87.232 13.687 94.1125 11.2285 94.1125 15.832C94.1125 17.9935 92.3635 19.198 89.971 19.198C87.562 19.198 85.912 18.0925 85.417 15.832H87.001C87.364 17.1685 88.3705 17.8945 89.9875 17.8945C91.6705 17.8945 92.5615 17.152 92.5615 16.03C92.5615 12.598 85.681 15.1555 85.681 10.618C85.681 9.001 87.034 7.582 89.509 7.582C91.6705 7.582 93.403 8.6215 93.8155 11.014H92.215C91.8685 9.529 90.9115 8.8855 89.476 8.8855C88.057 8.8855 87.232 9.529 87.232 10.519ZM96.2499 16.4755V11.3935H95.0289V10.2385H96.2499V8.2255H97.7019V10.2385H99.6324V11.3935H97.7019V16.4755C97.7019 17.5315 98.0154 18.0265 99.3024 18.0265H99.5994V19.066C99.4344 19.1485 99.0714 19.198 98.6589 19.198C97.0254 19.198 96.2499 18.3235 96.2499 16.4755ZM102.516 13.093H101.064C101.345 11.1625 102.615 10.024 104.76 10.024C107.103 10.024 108.242 11.3935 108.242 13.4395V16.888C108.242 17.8945 108.324 18.5215 108.555 19H107.021C106.856 18.6535 106.806 18.142 106.79 17.614C106.047 18.7195 104.859 19.198 103.803 19.198C101.988 19.198 100.767 18.3565 100.767 16.69C100.767 15.4855 101.427 14.611 102.714 14.182C103.902 13.786 105.107 13.687 106.79 13.6705V13.4725C106.79 12.0535 106.13 11.278 104.628 11.278C103.374 11.278 102.698 11.971 102.516 13.093ZM102.252 16.657C102.252 17.4655 102.929 17.944 103.952 17.944C105.569 17.944 106.79 16.6735 106.79 15.172V14.7595C103.061 14.7925 102.252 15.5845 102.252 16.657ZM110.787 19V10.2385H112.239V11.5915C112.833 10.519 113.774 10.024 114.83 10.024C115.176 10.024 115.49 10.1065 115.655 10.2385V11.542C115.407 11.4595 115.094 11.4265 114.747 11.4265C112.998 11.4265 112.239 12.5155 112.239 14.0995V19H110.787ZM117.305 16.4755V11.3935H116.084V10.2385H117.305V8.2255H118.757V10.2385H120.688V11.3935H118.757V16.4755C118.757 17.5315 119.071 18.0265 120.358 18.0265H120.655V19.066C120.49 19.1485 120.127 19.198 119.714 19.198C118.081 19.198 117.305 18.3235 117.305 16.4755ZM129.809 16.1455C129.33 18.1915 127.862 19.198 125.865 19.198C123.324 19.198 121.79 17.482 121.79 14.6275C121.79 11.6575 123.324 10.024 125.783 10.024C128.258 10.024 129.743 11.7235 129.743 14.512V14.875H123.275C123.357 16.8385 124.281 17.944 125.865 17.944C127.103 17.944 127.977 17.35 128.291 16.1455H129.809ZM125.783 11.278C124.38 11.278 123.539 12.1525 123.324 13.786H128.225C128.027 12.169 127.152 11.278 125.783 11.278ZM131.843 19V10.2385H133.295V11.5915C133.889 10.519 134.829 10.024 135.885 10.024C136.232 10.024 136.545 10.1065 136.71 10.2385V11.542C136.463 11.4595 136.149 11.4265 135.803 11.4265C134.054 11.4265 133.295 12.5155 133.295 14.0995V19H131.843ZM141.763 19V7.78H143.281V13.192L148.413 7.78H150.327L145.047 13.291L150.459 19H148.413L143.281 13.621V19H141.763ZM152.06 9.067V7.12H153.512V9.067H152.06ZM152.06 19V10.2385H153.512V19H152.06ZM156.178 16.4755V11.3935H154.957V10.2385H156.178V8.2255H157.63V10.2385H159.56V11.3935H157.63V16.4755C157.63 17.5315 157.943 18.0265 159.23 18.0265H159.527V19.066C159.362 19.1485 158.999 19.198 158.587 19.198C156.953 19.198 156.178 18.3235 156.178 16.4755Z" fill="white" fill-opacity="0.55"/>
<defs>
<radialGradient id="paint0_radial_115_109" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(-3.00503 15.023) rotate(-10.029) scale(17.9572 17.784)">
<stop stop-color="#00B0BB"/>
<stop offset="1" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint1_linear_115_109" x1="7.39036" y1="4.81308" x2="1.62975" y2="18.6894" gradientUnits="userSpaceOnUse">
<stop stop-color="#18E299"/>
<stop offset="1"/>
</linearGradient>
<linearGradient id="paint2_linear_115_109" x1="7.94816" y1="8.01562" x2="1.7612" y2="18.746" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint3_radial_115_109" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(8.11404 20.8822) rotate(-75.7542) scale(21.6246 23.7772)">
<stop stop-color="#00BBBB"/>
<stop offset="0.712616" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint4_linear_115_109" x1="7.60205" y1="5.8709" x2="15.5561" y2="16.3719" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint5_radial_115_109" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(7.84537 21.5181) rotate(-20.3525) scale(18.5603 17.32)">
<stop stop-color="#00B0BB"/>
<stop offset="1" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint6_linear_115_109" x1="16.8078" y1="13.0071" x2="10.0409" y2="22.9937" gradientUnits="userSpaceOnUse">
<stop stop-color="#00B1BC"/>
<stop offset="1"/>
</linearGradient>
<linearGradient id="paint7_linear_115_109" x1="16.8078" y1="13.0071" x2="14.1687" y2="23.841" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint8_radial_115_109" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(47.2781 7) rotate(19.0047) scale(67.5582 85.7506)">
<stop stop-color="white"/>
<stop offset="1" stop-color="white" stop-opacity="0.5"/>
</radialGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 9.3 KiB

51
apps/docs/logo/light.svg Normal file
View File

@@ -0,0 +1,51 @@
<svg width="160" height="24" viewBox="0 0 160 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="white"/>
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="url(#paint0_radial_115_86)"/>
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="black" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="url(#paint1_linear_115_86)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M7.9354 21.1112C4.89702 21.0957 2.27411 19.4306 1.01347 16.279C-0.248375 13.1244 0.135612 9.18218 1.76165 5.64327C4.80004 5.65882 7.42295 7.32385 8.68359 10.4755C9.94543 13.63 9.56144 17.5723 7.9354 21.1112Z" stroke="url(#paint2_linear_115_86)" stroke-opacity="0.05" stroke-width="0.056338"/>
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="white"/>
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="url(#paint3_radial_115_86)"/>
<path d="M16.6025 13.2251C14.8642 17.349 11.3512 20.1866 7.32411 21.2248C5.25257 17.624 4.82926 13.1324 6.56764 9.00855C8.30603 4.88472 11.819 2.04706 15.8461 1.00889C17.9176 4.60967 18.3409 9.10131 16.6025 13.2251Z" stroke="url(#paint4_linear_115_86)" stroke-opacity="0.05" stroke-width="0.056338"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="white"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint5_radial_115_86)"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="black" fill-opacity="0.2" style="mix-blend-mode:hard-light"/>
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint6_linear_115_86)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
<path d="M16.5682 22.7874C13.2176 23.9184 9.81361 23.2124 7.2672 21.1975C8.49194 17.9068 11.0444 15.189 14.3959 14.0577C17.7465 12.9266 21.1504 13.6326 23.6968 15.6476C22.4721 18.9383 19.9196 21.656 16.5682 22.7874Z" stroke="url(#paint7_linear_115_86)" stroke-opacity="0.05" stroke-width="0.056338"/>
<path d="M34.2124 19V5.4H39.4924L41.6924 12.2L42.3724 14.74L43.0524 12.2L45.2524 5.4H50.4124V19H46.3324L46.5924 9.98L45.5324 13.68L43.7924 19H40.8324L39.0524 13.6L38.0324 10.02L38.2924 19H34.2124ZM52.4155 7.3V4.6H56.2955V7.3H52.4155ZM52.4155 19V8.14H56.2955V19H52.4155ZM58.1038 19V8.14H61.9838V9.58C62.6638 8.34 63.7438 7.76 65.0038 7.76C66.9638 7.76 68.6238 8.98 68.6238 11.78V19H64.7438V12.56C64.7438 11.34 64.3038 10.86 63.4838 10.86C62.6038 10.86 61.9838 11.58 61.9838 12.88V19H58.1038ZM70.9327 15.22V11.06H69.7327V8.14H70.9327V5.62H74.8127V8.14H76.9327V11.06H74.8127V14.6C74.8127 15.5 75.0327 16.06 76.2127 16.06H76.9327V19C76.4927 19.2 75.6727 19.38 74.6527 19.38C72.1527 19.38 70.9327 17.88 70.9327 15.22Z" fill="#001E13"/>
<path d="M87.232 10.519C87.232 13.687 94.1125 11.2285 94.1125 15.832C94.1125 17.9935 92.3635 19.198 89.971 19.198C87.562 19.198 85.912 18.0925 85.417 15.832H87.001C87.364 17.1685 88.3705 17.8945 89.9875 17.8945C91.6705 17.8945 92.5615 17.152 92.5615 16.03C92.5615 12.598 85.681 15.1555 85.681 10.618C85.681 9.001 87.034 7.582 89.509 7.582C91.6705 7.582 93.403 8.6215 93.8155 11.014H92.215C91.8685 9.529 90.9115 8.8855 89.476 8.8855C88.057 8.8855 87.232 9.529 87.232 10.519ZM96.2499 16.4755V11.3935H95.0289V10.2385H96.2499V8.2255H97.7019V10.2385H99.6324V11.3935H97.7019V16.4755C97.7019 17.5315 98.0154 18.0265 99.3024 18.0265H99.5994V19.066C99.4344 19.1485 99.0714 19.198 98.6589 19.198C97.0254 19.198 96.2499 18.3235 96.2499 16.4755ZM102.516 13.093H101.064C101.345 11.1625 102.615 10.024 104.76 10.024C107.103 10.024 108.242 11.3935 108.242 13.4395V16.888C108.242 17.8945 108.324 18.5215 108.555 19H107.021C106.856 18.6535 106.806 18.142 106.79 17.614C106.047 18.7195 104.859 19.198 103.803 19.198C101.988 19.198 100.767 18.3565 100.767 16.69C100.767 15.4855 101.427 14.611 102.714 14.182C103.902 13.786 105.107 13.687 106.79 13.6705V13.4725C106.79 12.0535 106.13 11.278 104.628 11.278C103.374 11.278 102.698 11.971 102.516 13.093ZM102.252 16.657C102.252 17.4655 102.929 17.944 103.952 17.944C105.569 17.944 106.79 16.6735 106.79 15.172V14.7595C103.061 14.7925 102.252 15.5845 102.252 16.657ZM110.787 19V10.2385H112.239V11.5915C112.833 10.519 113.774 10.024 114.83 10.024C115.176 10.024 115.49 10.1065 115.655 10.2385V11.542C115.407 11.4595 115.094 11.4265 114.747 11.4265C112.998 11.4265 112.239 12.5155 112.239 14.0995V19H110.787ZM117.305 16.4755V11.3935H116.084V10.2385H117.305V8.2255H118.757V10.2385H120.688V11.3935H118.757V16.4755C118.757 17.5315 119.071 18.0265 120.358 18.0265H120.655V19.066C120.49 19.1485 120.127 19.198 119.714 19.198C118.081 19.198 117.305 18.3235 117.305 16.4755ZM129.809 16.1455C129.33 18.1915 127.862 19.198 125.865 19.198C123.324 19.198 121.79 17.482 121.79 14.6275C121.79 11.6575 123.324 10.024 125.783 10.024C128.258 10.024 129.743 11.7235 129.743 14.512V14.875H123.275C123.357 16.8385 124.281 17.944 125.865 17.944C127.103 17.944 127.977 17.35 128.291 16.1455H129.809ZM125.783 11.278C124.38 11.278 123.539 12.1525 123.324 13.786H128.225C128.027 12.169 127.152 11.278 125.783 11.278ZM131.843 19V10.2385H133.295V11.5915C133.889 10.519 134.829 10.024 135.885 10.024C136.232 10.024 136.545 10.1065 136.71 10.2385V11.542C136.463 11.4595 136.149 11.4265 135.803 11.4265C134.054 11.4265 133.295 12.5155 133.295 14.0995V19H131.843ZM141.763 19V7.78H143.281V13.192L148.413 7.78H150.327L145.047 13.291L150.459 19H148.413L143.281 13.621V19H141.763ZM152.06 9.067V7.12H153.512V9.067H152.06ZM152.06 19V10.2385H153.512V19H152.06ZM156.178 16.4755V11.3935H154.957V10.2385H156.178V8.2255H157.63V10.2385H159.56V11.3935H157.63V16.4755C157.63 17.5315 157.943 18.0265 159.23 18.0265H159.527V19.066C159.362 19.1485 158.999 19.198 158.587 19.198C156.953 19.198 156.178 18.3235 156.178 16.4755Z" fill="#002719" fill-opacity="0.6"/>
<defs>
<radialGradient id="paint0_radial_115_86" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(-3.00503 15.023) rotate(-10.029) scale(17.9572 17.784)">
<stop stop-color="#00B0BB"/>
<stop offset="1" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint1_linear_115_86" x1="7.39036" y1="4.81308" x2="1.62975" y2="18.6894" gradientUnits="userSpaceOnUse">
<stop stop-color="#18E299"/>
<stop offset="1"/>
</linearGradient>
<linearGradient id="paint2_linear_115_86" x1="7.94816" y1="8.01562" x2="1.7612" y2="18.746" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint3_radial_115_86" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(8.11404 20.8822) rotate(-75.7542) scale(21.6246 23.7772)">
<stop stop-color="#00BBBB"/>
<stop offset="0.712616" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint4_linear_115_86" x1="7.60205" y1="5.8709" x2="15.5561" y2="16.3719" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint5_radial_115_86" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(7.84537 21.5181) rotate(-20.3525) scale(18.5603 17.32)">
<stop stop-color="#00B0BB"/>
<stop offset="1" stop-color="#00DB65"/>
</radialGradient>
<linearGradient id="paint6_linear_115_86" x1="16.8078" y1="13.0071" x2="10.0409" y2="22.9937" gradientUnits="userSpaceOnUse">
<stop stop-color="#00B1BC"/>
<stop offset="1"/>
</linearGradient>
<linearGradient id="paint7_linear_115_86" x1="16.8078" y1="13.0071" x2="14.1687" y2="23.841" gradientUnits="userSpaceOnUse">
<stop/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 9.0 KiB

85
apps/docs/mint.json Normal file
View File

@@ -0,0 +1,85 @@
{
"$schema": "https://mintlify.com/schema.json",
"name": "Starter Kit",
"logo": {
"dark": "/logo/dark.svg",
"light": "/logo/light.svg"
},
"favicon": "/favicon.svg",
"colors": {
"primary": "#0D9373",
"light": "#07C983",
"dark": "#0D9373",
"anchors": {
"from": "#0D9373",
"to": "#07C983"
}
},
"topbarLinks": [
{
"name": "Support",
"url": "mailto:support@mintlify.com"
}
],
"topbarCtaButton": {
"name": "Dashboard",
"url": "https://dashboard.mintlify.com"
},
"tabs": [
{
"name": "API Reference",
"url": "api-reference"
}
],
"anchors": [
{
"name": "Documentation",
"icon": "book-open-cover",
"url": "https://mintlify.com/docs"
},
{
"name": "Community",
"icon": "slack",
"url": "https://mintlify.com/community"
},
{
"name": "Blog",
"icon": "newspaper",
"url": "https://mintlify.com/blog"
}
],
"navigation": [
{
"group": "Get Started",
"pages": ["introduction", "quickstart", "development"]
},
{
"group": "Essentials",
"pages": [
"essentials/markdown",
"essentials/code",
"essentials/images",
"essentials/settings",
"essentials/navigation",
"essentials/reusable-snippets"
]
},
{
"group": "API Documentation",
"pages": ["api-reference/introduction"]
},
{
"group": "Endpoint Examples",
"pages": [
"api-reference/endpoint/get",
"api-reference/endpoint/create",
"api-reference/endpoint/delete"
]
}
],
"footerSocials": {
"x": "https://x.com/mintlify",
"github": "https://github.com/mintlify",
"linkedin": "https://www.linkedin.com/company/mintlify"
}
}

8
apps/docs/package.json Normal file
View File

@@ -0,0 +1,8 @@
{
"name": "docs",
"private": true,
"scripts": {
"dev": "npx --yes mintlify dev --port 5004",
"lint": "npx --yes mintlify broken-links"
}
}

86
apps/docs/quickstart.mdx Normal file
View File

@@ -0,0 +1,86 @@
---
title: 'Quickstart'
description: 'Start building awesome documentation in under 5 minutes'
---
## Setup your development
Learn how to update your docs locally and and deploy them to the public.
### Edit and preview
<AccordionGroup>
<Accordion icon="github" title="Clone your docs locally">
During the onboarding process, we created a repository on your Github with
your docs content. You can find this repository on our
[dashboard](https://dashboard.mintlify.com). To clone the repository
locally, follow these
[instructions](https://docs.github.com/en/repositories/creating-and-managing-repositories/cloning-a-repository)
in your terminal.
</Accordion>
<Accordion icon="rectangle-terminal" title="Preview changes">
Previewing helps you make sure your changes look as intended. We built a
command line interface to render these changes locally. 1. Install the
[Mintlify CLI](https://www.npmjs.com/package/mintlify) to preview the
documentation changes locally with this command: ``` npm i -g mintlify ```
2. Run the following command at the root of your documentation (where
`mint.json` is): ``` mintlify dev ```
</Accordion>
</AccordionGroup>
### Deploy your changes
<AccordionGroup>
<Accordion icon="message-bot" title="Install our Github app">
Our Github app automatically deploys your changes to your docs site, so you
don't need to manage deployments yourself. You can find the link to install on
your [dashboard](https://dashboard.mintlify.com). Once the bot has been
successfully installed, there should be a check mark next to the commit hash
of the repo.
</Accordion>
<Accordion icon="rocket" title="Push your changes">
[Commit and push your changes to
Git](https://docs.github.com/en/get-started/using-git/pushing-commits-to-a-remote-repository#about-git-push)
for your changes to update in your docs site. If you push and don't see that
the Github app successfully deployed your changes, you can also manually
update your docs through our [dashboard](https://dashboard.mintlify.com).
</Accordion>
</AccordionGroup>
## Update your docs
Add content directly in your files with MDX syntax and React components. You can use any of our components, or even build your own.
<CardGroup>
<Card title="Style Your Docs" icon="paintbrush" href="/settings/global">
Add flair to your docs with personalized branding.
</Card>
<Card
title="Add API Endpoints"
icon="square-code"
href="/api-playground/configuration"
>
Implement your OpenAPI spec and enable API user interaction.
</Card>
<Card
title="Integrate Analytics"
icon="chart-mixed"
href="/analytics/supported-integrations"
>
Draw insights from user interactions with your documentation.
</Card>
<Card
title="Host on a Custom Domain"
icon="browser"
href="/settings/custom-domain/subdomain"
>
Keep your docs on your own website's subdomain.
</Card>
</CardGroup>

View File

@@ -0,0 +1,4 @@
One of the core principles of software development is DRY (Don't Repeat
Yourself). This is a principle that apply to documentation as
well. If you find yourself repeating the same content in multiple places, you
should consider creating a custom snippet to keep your content in sync.

1
apps/email-playground/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
.react-email

View File

@@ -0,0 +1,11 @@
import { ContactTemplate } from '@konobangu/email/templates/contact';
const ExampleContactEmail = () => (
<ContactTemplate
name="Jane Smith"
email="jane.smith@example.com"
message="I'm interested in your services."
/>
);
export default ExampleContactEmail;

View File

@@ -0,0 +1,21 @@
{
"name": "email-playground",
"version": "0.0.0",
"private": true,
"scripts": {
"build": "email build",
"dev": "email dev --port 5003",
"export": "email export",
"clean": "git clean -xdf .cache dist node_modules",
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
},
"dependencies": {
"@react-email/components": "^0.0.42",
"react": "^19.0.0",
"react-email": "^4.0.16",
"@konobangu/email": "workspace:*"
},
"devDependencies": {
"@types/react": "19.0.1"
}
}

View File

@@ -0,0 +1,13 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"composite": true,
"jsx": "react-jsx",
"jsxImportSource": "react",
"module": "ESNext",
"moduleResolution": "bundler"
},
"references": [{ "path": "../../packages/email" }],
"include": ["**/*.ts", "**/*.tsx"],
"exclude": ["node_modules"]
}

View File

@@ -0,0 +1,11 @@
```x-forwarded.json
{
"X-Forwarded-Host": "konobangu.com",
"X-Forwarded-Proto": "https"
}
```
#^https://konobangu.com/api*** statusCode://500
^https://konobangu.com/api*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/api$1
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000/$1 excludeFilter://^https://konobangu.com/api***
^wss://konobangu.com/*** reqHeaders://{x-forwarded.json} ws://127.0.0.1:5000/$1 excludeFilter://^wss://konobangu.com/api

View File

@@ -1,2 +0,0 @@
^https://webui.konobangu.com/*** http://127.0.0.1:3000/$1
^wss://webui.konobangu.com/*** ws://127.0.0.1:3000/$1

View File

@@ -0,0 +1 @@
^https://mikanani.me/*** http://127.0.0.1:5005/$1 excludeFilter://^**/***.svg excludeFilter://^**/***.css excludeFilter://^**/***.js

View File

@@ -1 +0,0 @@
^https://recorder.konobangu.com/*** http://127.0.0.1:7600/$1

View File

@@ -1 +1 @@
{"filesOrder":["webui","recorder"],"selectedList":["webui","recorder"],"disabledDefalutRules":true}
{"filesOrder":["konobangu","mikan_doppel"],"selectedList":["konobangu","mikan_doppel"],"disabledDefalutRules":true,"defalutRules":""}

View File

@@ -0,0 +1 @@
{"filesOrder":[]}

19
apps/proxy/Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[package]
name = "proxy"
version = "0.1.0"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "proxy"
path = "src/lib.rs"
[[bin]]
name = "mikan_doppel"
path = "src/bin/mikan_doppel.rs"
[dependencies]
recorder = { workspace = true }
tokio = { workspace = true }
tracing-subscriber = { workspace = true }
tracing = { workspace = true }

View File

@@ -3,11 +3,13 @@
"version": "0.1.0",
"private": true,
"scripts": {
"start": "whistle run -p 8899 -t 30000 -M \"keepXFF|prod|capture\" -D . --no-global-plugins"
"whistle": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
"mikan_doppel": "cargo run -p proxy --bin mikan_doppel",
"dev": "npm-run-all -p mikan_doppel whistle"
},
"keywords": [],
"license": "MIT",
"devDependencies": {
"whistle": "^2.9.61"
"whistle": "^2.9.99"
}
}
}

View File

@@ -0,0 +1,22 @@
use std::time::Duration;
use recorder::{errors::RecorderResult, test_utils::mikan::MikanMockServer};
use tracing::Level;
#[allow(unused_variables)]
#[tokio::main]
async fn main() -> RecorderResult<()> {
tracing_subscriber::fmt()
.with_max_level(Level::DEBUG)
.init();
let mut mikan_server = MikanMockServer::new_with_port(5005).await.unwrap();
let resources_mock = mikan_server.mock_resources_with_doppel();
let login_mock = mikan_server.mock_get_login_page();
loop {
tokio::time::sleep(Duration::from_secs(1)).await;
}
}

17
apps/recorder/.env Normal file
View File

@@ -0,0 +1,17 @@
HOST="konobangu.com"
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
STORAGE_DATA_DIR = "./data"
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# OIDC_EXTRA_CLAIM_KEY = ""
# OIDC_EXTRA_CLAIM_VALUE = ""
# MIKAN_PROXY = ""
# MIKAN_PROXY_AUTH_HEADER = ""
# MIKAN_NO_PROXY = ""
# MIKAN_PROXY_ACCEPT_INVALID_CERTS = "true"

17
apps/recorder/.env.dev Normal file
View File

@@ -0,0 +1,17 @@
HOST="konobangu.com"
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
STORAGE_DATA_DIR = "./data"
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# OIDC_EXTRA_CLAIM_KEY = ""
# OIDC_EXTRA_CLAIM_VALUE = ""
MIKAN_PROXY = "http://127.0.0.1:8899"
# MIKAN_PROXY_AUTH_HEADER = ""
# MIKAN_NO_PROXY = ""
MIKAN_PROXY_ACCEPT_INVALID_CERTS = true

View File

@@ -16,4 +16,16 @@ Cargo.lock
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
/data
# Local
.DS_Store
*.local
*.log*
# Dist
node_modules
dist/
temp/*
!temp/.gitkeep
tests/resources/mikan/classic_episodes/*/*
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet

169
apps/recorder/Cargo.toml Normal file
View File

@@ -0,0 +1,169 @@
[package]
name = "recorder"
version = "0.1.0"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = ["jxl"]
playground = ["dep:inquire", "dep:color-eyre", "dep:polars"]
testcontainers = [
"dep:testcontainers",
"dep:testcontainers-modules",
"dep:testcontainers-ext",
"downloader/testcontainers",
"testcontainers-modules/postgres",
]
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
[lib]
name = "recorder"
path = "src/lib.rs"
[[bin]]
name = "recorder_cli"
path = "src/bin/main.rs"
required-features = []
[[example]]
name = "mikan_collect_classic_eps"
path = "examples/mikan_collect_classic_eps.rs"
required-features = ["playground"]
[[example]]
name = "mikan_doppel_season_subscription"
path = "examples/mikan_doppel_season_subscription.rs"
required-features = ["playground"]
[[example]]
name = "mikan_doppel_subscriber_subscription"
path = "examples/mikan_doppel_subscriber_subscription.rs"
required-features = ["playground"]
[[example]]
name = "playground"
path = "examples/playground.rs"
required-features = ["playground"]
[dependencies]
downloader = { workspace = true }
util = { workspace = true }
util-derive = { workspace = true }
fetch = { workspace = true }
serde = { workspace = true }
tokio = { workspace = true }
serde_json = { workspace = true }
async-trait = { workspace = true }
testcontainers = { workspace = true, optional = true }
testcontainers-modules = { workspace = true, optional = true }
testcontainers-ext = { workspace = true, optional = true, features = [
"tracing",
] }
tracing = { workspace = true }
axum = { workspace = true }
axum-extra = { workspace = true }
snafu = { workspace = true }
itertools = { workspace = true }
url = { workspace = true }
regex = { workspace = true }
lazy_static = { workspace = true }
quirks_path = { workspace = true }
futures = { workspace = true }
bytes = { workspace = true }
serde_with = { workspace = true }
moka = { workspace = true }
chrono = { workspace = true }
tracing-subscriber = { workspace = true }
mockito = { workspace = true }
color-eyre = { workspace = true, optional = true }
inquire = { workspace = true, optional = true }
convert_case = { workspace = true }
image = { workspace = true }
uuid = { workspace = true }
maplit = { workspace = true }
once_cell = { workspace = true }
rand = { workspace = true }
rust_decimal = { workspace = true }
base64 = { workspace = true }
nom = { workspace = true }
percent-encoding = { workspace = true }
num-traits = { workspace = true }
http = { workspace = true }
async-stream = { workspace = true }
serde_variant = { workspace = true }
tracing-appender = { workspace = true }
clap = { workspace = true }
ipnetwork = { workspace = true }
typed-builder = { workspace = true }
nanoid = { workspace = true }
webp = { workspace = true }
sea-orm = { version = "1.1", features = [
"sqlx-sqlite",
"sqlx-postgres",
"runtime-tokio",
"macros",
"debug-print",
] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
rss = "2"
fancy-regex = "0.14"
lightningcss = "1.0.0-alpha.66"
html-escape = "0.2.13"
opendal = { version = "0.53", features = ["default", "services-fs"] }
scraper = "0.23.1"
async-graphql = { version = "7", features = ["dynamic-schema"] }
async-graphql-axum = "7"
seaography = { version = "1.1", features = [
"with-json",
"with-chrono",
"with-time",
"with-uuid",
"with-decimal",
"with-bigdecimal",
"with-postgres-array",
"with-json-as-scalar",
] }
tower = "0.5.2"
tower-http = { version = "0.6", features = [
"trace",
"catch-panic",
"timeout",
"add-extension",
"cors",
"fs",
"set-header",
"compression-full",
] }
tera = "1.20.0"
openidconnect = { version = "4" }
dotenvy = "0.15.7"
jpegxl-rs = { version = "0.11.2", optional = true }
jpegxl-sys = { version = "0.11.2", optional = true }
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
apalis-sql = { version = "0.7", features = ["postgres"] }
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
reqwest_cookie_store = "0.8.0"
jwtk = "0.4.0"
mime_guess = "2.0.5"
icu_properties = "2.0.1"
icu = "2.0.0"
tracing-tree = "0.4.0"
num_cpus = "1.17.0"
headers-accept = "0.1.4"
polars = { version = "0.49.1", features = [
"parquet",
"lazy",
"diagonal_concat",
], optional = true }
[dev-dependencies]
inquire = { workspace = true }
color-eyre = { workspace = true }
serial_test = "3"
insta = { version = "1", features = ["redactions", "toml", "filters"] }
rstest = "0.25"
ctor = "0.4.0"

View File

@@ -0,0 +1,584 @@
use std::collections::HashSet;
use chrono::{DateTime, Duration, FixedOffset, NaiveDate, NaiveTime, TimeZone, Utc};
use fetch::{HttpClientConfig, fetch_html};
use itertools::Itertools;
use lazy_static::lazy_static;
use nom::{
IResult, Parser,
branch::alt,
bytes::complete::{tag, take, take_till1},
character::complete::space1,
combinator::map,
};
use recorder::{
errors::{RecorderError, RecorderResult},
extract::{
html::extract_inner_text_from_element_ref,
mikan::{MikanClient, MikanConfig, MikanEpisodeHash, MikanFansubHash},
},
};
use regex::Regex;
use scraper::{ElementRef, Html, Selector};
use snafu::FromString;
use url::Url;
lazy_static! {
static ref TEST_FOLDER: std::path::PathBuf =
if cfg!(any(test, debug_assertions, feature = "playground")) {
std::path::PathBuf::from(format!(
"{}/tests/resources/mikan/classic_episodes",
env!("CARGO_MANIFEST_DIR")
))
} else {
std::path::PathBuf::from("tests/resources/mikan/classic_episodes")
};
}
lazy_static! {
static ref TOTAL_PAGE_REGEX: Regex =
Regex::new(r#"\$\(\'\.classic-view-pagination2\'\)\.bootpag\(\{\s*total:\s*(\d+)"#)
.unwrap();
}
pub struct MikanClassicEpisodeTableRow {
pub id: i32,
pub publish_at: DateTime<Utc>,
pub mikan_fansub_id: Option<String>,
pub fansub_name: Option<String>,
pub mikan_episode_id: String,
pub original_name: String,
pub magnet_link: Option<String>,
pub file_size: Option<String>,
pub torrent_link: Option<String>,
}
impl MikanClassicEpisodeTableRow {
fn timezone() -> FixedOffset {
FixedOffset::east_opt(8 * 3600).unwrap()
}
fn fixed_date_parser(input: &str) -> IResult<&str, NaiveDate> {
alt((
map(tag("今天"), move |_| {
Utc::now().with_timezone(&Self::timezone()).date_naive()
}),
map(tag("昨天"), move |_| {
Utc::now().with_timezone(&Self::timezone()).date_naive() - Duration::days(1)
}),
))
.parse(input)
}
fn formatted_date_parser(input: &str) -> IResult<&str, NaiveDate> {
let (remain, date_str) = take_till1(|c: char| c.is_whitespace()).parse(input)?;
let date = NaiveDate::parse_from_str(date_str, "%Y/%m/%d").map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
})?;
Ok((remain, date))
}
fn date_parser(input: &str) -> IResult<&str, NaiveDate> {
alt((Self::fixed_date_parser, Self::formatted_date_parser)).parse(input)
}
fn time_parser(input: &str) -> IResult<&str, NaiveTime> {
let (remain, time_str) = take(5usize).parse(input)?;
let time = NaiveTime::parse_from_str(time_str, "%H:%M").map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
})?;
Ok((remain, time))
}
fn extract_publish_at(text: &str) -> Option<DateTime<Utc>> {
let (_, (date, _, time)) = (Self::date_parser, space1, Self::time_parser)
.parse(text)
.ok()?;
let local_dt = Self::timezone()
.from_local_datetime(&date.and_time(time))
.single()?;
Some(local_dt.with_timezone(&Utc))
}
pub fn from_element_ref(
row: ElementRef<'_>,
rev_id: i32,
idx: i32,
mikan_base_url: &Url,
) -> RecorderResult<Self> {
let publish_at_selector = &Selector::parse("td:nth-of-type(1)").unwrap();
let fansub_selector = &Selector::parse("td:nth-of-type(2) > a").unwrap();
let original_name_selector =
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(1)").unwrap();
let magnet_link_selector =
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(2)").unwrap();
let file_size_selector = &Selector::parse("td:nth-of-type(4)").unwrap();
let torrent_link_selector = &Selector::parse("td:nth-of-type(5) > a").unwrap();
let publish_at = row
.select(publish_at_selector)
.next()
.map(extract_inner_text_from_element_ref)
.and_then(|e| Self::extract_publish_at(&e));
let (mikan_fansub_hash, fansub_name) = row
.select(fansub_selector)
.next()
.and_then(|e| {
e.attr("href")
.and_then(|s| mikan_base_url.join(s).ok())
.and_then(|u| MikanFansubHash::from_homepage_url(&u))
.map(|h| (h, extract_inner_text_from_element_ref(e)))
})
.unzip();
let (mikan_episode_hash, original_name) = row
.select(original_name_selector)
.next()
.and_then(|el| {
el.attr("href")
.and_then(|s| mikan_base_url.join(s).ok())
.and_then(|u| MikanEpisodeHash::from_homepage_url(&u))
.map(|h| (h, extract_inner_text_from_element_ref(el)))
})
.unzip();
let magnet_link = row
.select(magnet_link_selector)
.next()
.and_then(|el| el.attr("data-clipboard-text"));
let file_size = row
.select(file_size_selector)
.next()
.map(extract_inner_text_from_element_ref);
let torrent_link = row
.select(torrent_link_selector)
.next()
.and_then(|el| el.attr("href"));
if let (Some(mikan_episode_hash), Some(original_name), Some(publish_at)) = (
mikan_episode_hash.as_ref(),
original_name.as_ref(),
publish_at.as_ref(),
) {
Ok(Self {
id: rev_id * 1000 + idx,
publish_at: *publish_at,
mikan_fansub_id: mikan_fansub_hash.map(|h| h.mikan_fansub_id.clone()),
fansub_name,
mikan_episode_id: mikan_episode_hash.mikan_episode_id.clone(),
original_name: original_name.clone(),
magnet_link: magnet_link.map(|s| s.to_string()),
file_size: file_size.map(|s| s.to_string()),
torrent_link: torrent_link.map(|s| s.to_string()),
})
} else {
let mut missing_fields = vec![];
if mikan_episode_hash.is_none() {
missing_fields.push("mikan_episode_id");
}
if original_name.is_none() {
missing_fields.push("original_name");
}
if publish_at.is_none() {
missing_fields.push("publish_at");
}
Err(RecorderError::without_source(format!(
"Failed to parse episode table row, missing fields: {missing_fields:?}, row \
index: {idx}"
)))
}
}
}
pub struct MikanClassicEpisodeTablePage {
pub page: i32,
pub total: i32,
pub html: String,
pub rows: Vec<MikanClassicEpisodeTableRow>,
}
impl MikanClassicEpisodeTablePage {
pub fn from_html(
html: String,
mikan_base_url: &Url,
page: i32,
updated_info: Option<(i32, i32)>,
) -> RecorderResult<Self> {
let tr_selector = &Selector::parse("tbody tr").unwrap();
let doc = Html::parse_document(&html);
if let Some(mut total) = TOTAL_PAGE_REGEX
.captures(&html)
.and_then(|c| c.get(1))
.and_then(|s| s.as_str().parse::<i32>().ok())
{
if let Some((_, update_total)) = updated_info {
total = update_total;
}
let rev_id = total - page;
let rows = doc
.select(tr_selector)
.rev()
.enumerate()
.map(|(idx, tr)| {
MikanClassicEpisodeTableRow::from_element_ref(
tr,
rev_id,
idx as i32,
mikan_base_url,
)
})
.collect::<RecorderResult<Vec<_>>>()?;
Ok(Self {
page,
total,
html,
rows,
})
} else {
Err(RecorderError::without_source(
"Failed to parse pagination meta and rows".into(),
))
}
}
pub fn save_to_files(&self) -> RecorderResult<()> {
use polars::prelude::*;
let rev_id = self.total - self.page;
let parquet_path = TEST_FOLDER.join(format!("parquet/rev_{rev_id}.parquet"));
let csv_path = TEST_FOLDER.join(format!("csv/rev_{rev_id}.csv"));
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
std::fs::write(html_path, self.html.clone())?;
let mut id_vec = Vec::new();
let mut publish_at_vec = Vec::new();
let mut mikan_fansub_id_vec = Vec::new();
let mut fansub_name_vec = Vec::new();
let mut mikan_episode_id_vec = Vec::new();
let mut original_name_vec = Vec::new();
let mut magnet_link_vec = Vec::new();
let mut file_size_vec = Vec::new();
let mut torrent_link_vec = Vec::new();
for row in &self.rows {
id_vec.push(row.id);
publish_at_vec.push(row.publish_at.to_rfc3339());
mikan_fansub_id_vec.push(row.mikan_fansub_id.clone());
fansub_name_vec.push(row.fansub_name.clone());
mikan_episode_id_vec.push(row.mikan_episode_id.clone());
original_name_vec.push(row.original_name.clone());
magnet_link_vec.push(row.magnet_link.clone());
file_size_vec.push(row.file_size.clone());
torrent_link_vec.push(row.torrent_link.clone());
}
let df = df! [
"id" => id_vec,
"publish_at_timestamp" => publish_at_vec,
"mikan_fansub_id" => mikan_fansub_id_vec,
"fansub_name" => fansub_name_vec,
"mikan_episode_id" => mikan_episode_id_vec,
"original_name" => original_name_vec,
"magnet_link" => magnet_link_vec,
"file_size" => file_size_vec,
"torrent_link" => torrent_link_vec,
]
.map_err(|e| {
let message = format!("Failed to create DataFrame: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let mut parquet_file = std::fs::File::create(&parquet_path)?;
ParquetWriter::new(&mut parquet_file)
.finish(&mut df.clone())
.map_err(|e| {
let message = format!("Failed to write parquet file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let mut csv_file = std::fs::File::create(&csv_path)?;
CsvWriter::new(&mut csv_file)
.include_header(true)
.with_quote_style(QuoteStyle::Always)
.finish(&mut df.clone())
.map_err(|e| {
let message = format!("Failed to write csv file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
println!(
"[{}/{}] Saved {} rows to rev_{}.{{parquet,html,csv}}",
self.page,
self.total,
self.rows.len(),
rev_id
);
Ok(())
}
pub fn waiting_rev_ids(total: i32) -> RecorderResult<Vec<i32>> {
let dir = TEST_FOLDER.join("csv");
let files = std::fs::read_dir(dir)?;
let rev_ids = files
.filter_map(|f| f.ok())
.filter_map(|f| {
f.path().file_stem().and_then(|s| {
s.to_str().and_then(|s| {
if s.starts_with("rev_") {
s.replace("rev_", "").parse::<i32>().ok()
} else {
None
}
})
})
})
.collect::<HashSet<_>>();
Ok((0..total)
.filter(|rev_id| !rev_ids.contains(rev_id))
.collect::<Vec<_>>())
}
}
async fn scrape_mikan_classic_episode_table_page(
mikan_client: &MikanClient,
page: i32,
updated_info: Option<(i32, i32)>,
) -> RecorderResult<MikanClassicEpisodeTablePage> {
let mikan_base_url = mikan_client.base_url();
let url = mikan_base_url.join(&format!("/Home/Classic/{page}"))?;
if let Some((rev_id, update_total)) = updated_info.as_ref() {
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
if html_path.exists() {
let html = std::fs::read_to_string(&html_path)?;
println!("[{page}/{update_total}] html exists, skipping fetch");
return MikanClassicEpisodeTablePage::from_html(
html,
mikan_base_url,
page,
updated_info,
);
}
}
let total = if let Some((_, update_total)) = updated_info.as_ref() {
update_total.to_string()
} else {
"Unknown".to_string()
};
println!("[{page}/{total}] fetching html...");
let html = fetch_html(mikan_client, url).await?;
println!("[{page}/{total}] fetched html done");
std::fs::write(TEST_FOLDER.join("html/temp.html"), html.clone())?;
MikanClassicEpisodeTablePage::from_html(html, mikan_base_url, page, updated_info)
}
async fn scrape_mikan_classic_episode_table_page_from_rev_id(
mikan_client: &MikanClient,
total: i32,
rev_idx: i32,
) -> RecorderResult<MikanClassicEpisodeTablePage> {
let page = total - rev_idx;
scrape_mikan_classic_episode_table_page(mikan_client, page, Some((rev_idx, total))).await
}
async fn merge_mikan_classic_episodes_and_strip_columns() -> RecorderResult<()> {
use polars::prelude::*;
let dir = TEST_FOLDER.join("parquet");
let files = std::fs::read_dir(dir)?;
let parquet_paths = files
.filter_map(|f| f.ok())
.filter_map(|f| {
let path = f.path();
if let Some(ext) = path.extension()
&& ext == "parquet"
&& path
.file_stem()
.is_some_and(|f| f.to_string_lossy().starts_with("rev_"))
{
Some(path)
} else {
None
}
})
.collect::<Vec<_>>();
if parquet_paths.is_empty() {
return Err(RecorderError::without_source(
"No parquet files found to merge".into(),
));
}
println!("Found {} parquet files to merge", parquet_paths.len());
// 读取并合并所有 parquet 文件
let mut all_dfs = Vec::new();
for path in &parquet_paths {
println!("Reading {path:?}");
let file = std::fs::File::open(path)?;
let df = ParquetReader::new(file).finish().map_err(|e| {
let message = format!("Failed to read parquet file {path:?}: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
all_dfs.push(df);
}
let lazy_frames: Vec<LazyFrame> = all_dfs.into_iter().map(|df| df.lazy()).collect();
let merged_df = concat_lf_diagonal(&lazy_frames, UnionArgs::default())
.map_err(|e| {
let message = format!("Failed to concat DataFrames: {e}");
RecorderError::with_source(Box::new(e), message)
})?
.sort(
["publish_at_timestamp"],
SortMultipleOptions::default().with_order_descending(true),
)
.unique(
Some(vec![
"mikan_fansub_id".to_string(),
"mikan_episode_id".to_string(),
]),
UniqueKeepStrategy::First,
)
.collect()
.map_err(|e| {
let message = format!("Failed to collect lazy DataFrame: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
fn select_columns_and_write(
merged_df: DataFrame,
name: &str,
columns: &[&str],
) -> RecorderResult<()> {
let result_df = merged_df
.lazy()
.sort(["publish_at_timestamp"], SortMultipleOptions::default())
.select(columns.iter().map(|c| col(*c)).collect_vec())
.collect()
.map_err(|e| {
let message = format!("Failed to sort and select columns: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let output_path = TEST_FOLDER.join(format!("parquet/{name}.parquet"));
let mut output_file = std::fs::File::create(&output_path)?;
ParquetWriter::new(&mut output_file)
.set_parallel(true)
.with_compression(ParquetCompression::Zstd(Some(
ZstdLevel::try_new(22).unwrap(),
)))
.finish(&mut result_df.clone())
.map_err(|e| {
let message = format!("Failed to write merged parquet file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
println!("Merged {} rows into {output_path:?}", result_df.height());
Ok(())
}
select_columns_and_write(merged_df.clone(), "tiny", &["fansub_name", "original_name"])?;
// select_columns_and_write(
// merged_df.clone(),
// "lite",
// &[
// "mikan_fansub_id",
// "fansub_name",
// "mikan_episode_id",
// "original_name",
// ],
// )?;
// select_columns_and_write(
// merged_df,
// "full",
// &[
// "id",
// "publish_at_timestamp",
// "mikan_fansub_id",
// "fansub_name",
// "mikan_episode_id",
// "original_name",
// "magnet_link",
// "file_size",
// "torrent_link",
// ],
// )?;
Ok(())
}
#[tokio::main]
async fn main() -> RecorderResult<()> {
std::fs::create_dir_all(TEST_FOLDER.join("html"))?;
std::fs::create_dir_all(TEST_FOLDER.join("parquet"))?;
std::fs::create_dir_all(TEST_FOLDER.join("csv"))?;
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
http_client: HttpClientConfig {
exponential_backoff_max_retries: Some(3),
leaky_bucket_max_tokens: Some(2),
leaky_bucket_initial_tokens: Some(1),
leaky_bucket_refill_tokens: Some(1),
leaky_bucket_refill_interval: Some(std::time::Duration::from_millis(1000)),
user_agent: Some(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
.to_string(),
),
..Default::default()
},
base_url: Url::parse("https://mikanani.me")?,
})
.await?;
let first_page_and_pagination_info =
scrape_mikan_classic_episode_table_page(&mikan_scrape_client, 1, None).await?;
let total_page = first_page_and_pagination_info.total;
first_page_and_pagination_info.save_to_files()?;
let next_rev_ids = MikanClassicEpisodeTablePage::waiting_rev_ids(total_page)?;
for todo_rev_id in next_rev_ids {
let page = scrape_mikan_classic_episode_table_page_from_rev_id(
&mikan_scrape_client,
total_page,
todo_rev_id,
)
.await?;
page.save_to_files()?;
}
// 合并所有 parquet 文件
println!("\nMerging all parquet files...");
merge_mikan_classic_episodes_and_strip_columns().await?;
println!("Merge completed!");
Ok(())
}

View File

@@ -0,0 +1,249 @@
use std::time::Duration;
use color_eyre::{Result, eyre::OptionExt};
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
use inquire::{Password, Text, validator::Validation};
use recorder::{
crypto::UserPassCredential,
extract::mikan::{
MikanClient, MikanConfig, MikanRssEpisodeItem, build_mikan_bangumi_expand_subscribed_url,
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
},
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
};
use scraper::Html;
use tokio::fs;
use url::Url;
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt()
.with_max_level(tracing::Level::INFO)
.init();
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
http_client: HttpClientConfig {
exponential_backoff_max_retries: Some(3),
leaky_bucket_max_tokens: Some(2),
leaky_bucket_initial_tokens: Some(0),
leaky_bucket_refill_tokens: Some(1),
leaky_bucket_refill_interval: Some(Duration::from_millis(1000)),
user_agent: Some(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
.to_string(),
),
..Default::default()
},
base_url: Url::parse("https://mikanani.me")?,
})
.await?;
let username_validator = |input: &str| {
if input.trim().is_empty() {
Ok(Validation::Invalid("Username cannot be empty".into()))
} else {
Ok(Validation::Valid)
}
};
let password_validator = |input: &str| {
if input.trim().is_empty() {
Ok(Validation::Invalid("Password cannot be empty".into()))
} else {
Ok(Validation::Valid)
}
};
let username = Text::new("Please enter your mikan username:")
.with_validator(username_validator)
.prompt()?;
let password = Password::new("Please enter your mikan password:")
.without_confirmation()
.with_display_mode(inquire::PasswordDisplayMode::Masked)
.with_validator(password_validator)
.prompt()?;
let mikan_scrape_client = mikan_scrape_client
.fork_with_userpass_credential(UserPassCredential {
username,
password,
user_agent: None,
cookies: None,
})
.await?;
tracing::info!("Checking if logged in...");
if !mikan_scrape_client.has_login().await? {
tracing::info!("Logging in to mikan...");
mikan_scrape_client.login().await?;
tracing::info!("Logged in to mikan");
}
let mikan_base_url = mikan_scrape_client.base_url().clone();
tracing::info!("Scraping season subscription...");
let season_subscription =
fs::read("tests/resources/mikan/BangumiCoverFlow-2025-spring.html").await?;
let html = Html::parse_fragment(String::from_utf8(season_subscription)?.as_str());
let bangumi_index_list =
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(&html, &mikan_base_url);
for bangumi_index in bangumi_index_list {
let bangumi_meta = {
let bangumi_expand_subscribed_url = build_mikan_bangumi_expand_subscribed_url(
mikan_base_url.clone(),
bangumi_index.mikan_bangumi_id.as_ref(),
);
let bangumi_expand_subscribed_doppel_path =
MikanDoppelPath::new(bangumi_expand_subscribed_url.clone());
tracing::info!(
bangumi_title = bangumi_index.bangumi_title,
"Scraping bangumi expand subscribed..."
);
let bangumi_expand_subscribed_data =
if !bangumi_expand_subscribed_doppel_path.exists_any() {
let bangumi_expand_subscribed_data =
fetch_html(&mikan_scrape_client, bangumi_expand_subscribed_url).await?;
bangumi_expand_subscribed_doppel_path.write(&bangumi_expand_subscribed_data)?;
tracing::info!(
bangumi_title = bangumi_index.bangumi_title,
"Bangumi expand subscribed saved"
);
bangumi_expand_subscribed_data
} else {
tracing::info!(
bangumi_title = bangumi_index.bangumi_title,
"Bangumi expand subscribed already exists"
);
String::from_utf8(bangumi_expand_subscribed_doppel_path.read()?)?
};
let html = Html::parse_fragment(&bangumi_expand_subscribed_data);
extract_mikan_bangumi_meta_from_expand_subscribed_fragment(
&html,
bangumi_index.clone(),
mikan_base_url.clone(),
)
.ok_or_eyre(format!(
"Failed to extract bangumi meta from expand subscribed fragment: {:?}",
bangumi_index.bangumi_title
))
}?;
{
if let Some(poster_url) = bangumi_meta.origin_poster_src.as_ref() {
let poster_doppel_path = MikanDoppelPath::new(poster_url.clone());
tracing::info!(
title = bangumi_meta.bangumi_title,
"Scraping bangumi poster..."
);
if !poster_doppel_path.exists_any() {
let poster_data = fetch_image(&mikan_scrape_client, poster_url.clone()).await?;
poster_doppel_path.write(&poster_data)?;
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi poster saved");
} else {
tracing::info!(
title = bangumi_meta.bangumi_title,
"Bangumi poster already exists"
);
}
}
}
{
let bangumi_homepage_url = bangumi_meta
.bangumi_hash()
.build_homepage_url(mikan_base_url.clone());
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
tracing::info!(
title = bangumi_meta.bangumi_title,
"Scraping bangumi homepage..."
);
if !bangumi_homepage_doppel_path.exists_any() {
let bangumi_homepage_data =
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi homepage saved");
} else {
tracing::info!(
title = bangumi_meta.bangumi_title,
"Bangumi homepage already exists"
);
}
}
let rss_items = {
let bangumi_rss_url = bangumi_meta
.bangumi_hash()
.build_rss_url(mikan_base_url.clone());
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
tracing::info!(
title = bangumi_meta.bangumi_title,
"Scraping bangumi rss..."
);
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi rss saved");
bangumi_rss_data
} else {
tracing::info!(
title = bangumi_meta.bangumi_title,
"Bangumi rss already exists"
);
String::from_utf8(bangumi_rss_doppel_path.read()?)?
};
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items;
rss_items
.into_iter()
.map(MikanRssEpisodeItem::try_from)
.collect::<Result<Vec<_>, _>>()
}?;
for rss_item in rss_items {
{
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
let episode_homepage_doppel_path =
MikanDoppelPath::new(episode_homepage_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode...");
if !episode_homepage_doppel_path.exists_any() {
let episode_homepage_data =
fetch_html(&mikan_scrape_client, episode_homepage_url).await?;
episode_homepage_doppel_path.write(&episode_homepage_data)?;
tracing::info!(title = rss_item.title, "Episode saved");
} else {
tracing::info!(title = rss_item.title, "Episode already exists");
};
}
{
let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() {
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
Ok(episode_torrent_data) => {
episode_torrent_doppel_path.write(&episode_torrent_data)?;
tracing::info!(title = rss_item.title, "Episode torrent saved");
}
Err(e) => {
if let FetchError::ReqwestError { source } = &e
&& source
.status()
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
{
tracing::warn!(
title = rss_item.title,
"Episode torrent not found, maybe deleted since new version"
);
episode_torrent_doppel_path
.write_meta(MikanDoppelMeta { status: 404 })?;
} else {
Err(e)?;
}
}
}
} else {
tracing::info!(title = rss_item.title, "Episode torrent already exists");
}
}
}
}
tracing::info!("Scraping season subscription done");
Ok(())
}

View File

@@ -0,0 +1,215 @@
use std::time::Duration;
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
use recorder::{
errors::RecorderResult,
extract::mikan::{
MikanClient, MikanConfig, MikanRssEpisodeItem,
extract_mikan_episode_meta_from_episode_homepage_html,
},
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
};
use scraper::Html;
use tokio::fs;
use url::Url;
#[tokio::main]
async fn main() -> RecorderResult<()> {
tracing_subscriber::fmt()
.with_max_level(tracing::Level::INFO)
.init();
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
http_client: HttpClientConfig {
exponential_backoff_max_retries: Some(3),
leaky_bucket_max_tokens: Some(2),
leaky_bucket_initial_tokens: Some(0),
leaky_bucket_refill_tokens: Some(1),
leaky_bucket_refill_interval: Some(Duration::from_millis(500)),
user_agent: Some(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
.to_string(),
),
..Default::default()
},
base_url: Url::parse("https://mikanani.me")?,
})
.await?;
let mikan_base_url = mikan_scrape_client.base_url().clone();
tracing::info!("Scraping subscriber subscription...");
let subscriber_subscription =
fs::read("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
let channel = rss::Channel::read_from(&subscriber_subscription[..])?;
let rss_items: Vec<MikanRssEpisodeItem> = channel
.items
.into_iter()
.map(MikanRssEpisodeItem::try_from)
.collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items {
let episode_homepage_meta = {
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
let episode_homepage_doppel_path = MikanDoppelPath::new(episode_homepage_url.clone());
let episode_homepage_data = if !episode_homepage_doppel_path.exists_any() {
let episode_homepage_data =
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
episode_homepage_doppel_path.write(&episode_homepage_data)?;
tracing::info!(title = rss_item.title, "Episode homepage saved");
episode_homepage_data
} else {
tracing::info!(title = rss_item.title, "Episode homepage already exists");
String::from_utf8(episode_homepage_doppel_path.read()?)?
};
let html = Html::parse_document(&episode_homepage_data);
extract_mikan_episode_meta_from_episode_homepage_html(
&html,
mikan_base_url.clone(),
episode_homepage_url,
)
}?;
{
let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() {
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
Ok(episode_torrent_data) => {
episode_torrent_doppel_path.write(&episode_torrent_data)?;
tracing::info!(title = rss_item.title, "Episode torrent saved");
}
Err(e) => {
if let FetchError::ReqwestError { source } = &e
&& source
.status()
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
{
tracing::warn!(
title = rss_item.title,
"Episode torrent not found, maybe deleted since new version"
);
episode_torrent_doppel_path
.write_meta(MikanDoppelMeta { status: 404 })?;
} else {
Err(e)?;
}
}
}
tracing::info!(title = rss_item.title, "Episode torrent saved");
} else {
tracing::info!(title = rss_item.title, "Episode torrent already exists");
}
}
{
if let Some(episode_poster_url) = episode_homepage_meta.origin_poster_src.as_ref() {
let episode_poster_doppel_path = MikanDoppelPath::new(episode_poster_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode poster...");
if !episode_poster_doppel_path.exists_any() {
let episode_poster_data =
fetch_image(&mikan_scrape_client, episode_poster_url.clone()).await?;
episode_poster_doppel_path.write(&episode_poster_data)?;
tracing::info!(title = rss_item.title, "Episode poster saved");
} else {
tracing::info!(title = rss_item.title, "Episode poster already exists");
}
}
}
{
let bangumi_homepage_url = episode_homepage_meta
.bangumi_hash()
.build_homepage_url(mikan_base_url.clone());
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
tracing::info!(title = rss_item.title, "Scraping bangumi homepage...");
if !bangumi_homepage_doppel_path.exists_any() {
let bangumi_homepage_data =
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
tracing::info!(title = rss_item.title, "Bangumi homepage saved");
} else {
tracing::info!(title = rss_item.title, "Bangumi homepage already exists");
};
}
{
let bangumi_rss_url = episode_homepage_meta
.bangumi_hash()
.build_rss_url(mikan_base_url.clone());
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
tracing::info!(title = rss_item.title, "Scraping bangumi rss...");
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
tracing::info!(title = rss_item.title, "Bangumi rss saved");
bangumi_rss_data
} else {
tracing::info!(title = rss_item.title, "Bangumi rss already exists");
String::from_utf8(bangumi_rss_doppel_path.read()?)?
};
let channel = rss::Channel::read_from(bangumi_rss_data.as_bytes())?;
let rss_items: Vec<MikanRssEpisodeItem> = channel
.items
.into_iter()
.map(MikanRssEpisodeItem::try_from)
.collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items {
{
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
let episode_homepage_doppel_path =
MikanDoppelPath::new(episode_homepage_url.clone());
if !episode_homepage_doppel_path.exists_any() {
let episode_homepage_data =
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
episode_homepage_doppel_path.write(&episode_homepage_data)?;
tracing::info!(title = rss_item.title, "Episode homepage saved");
} else {
tracing::info!(title = rss_item.title, "Episode homepage already exists");
};
};
{
let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path =
MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() {
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
Ok(episode_torrent_data) => {
episode_torrent_doppel_path.write(&episode_torrent_data)?;
tracing::info!(title = rss_item.title, "Episode torrent saved");
}
Err(e) => {
if let FetchError::ReqwestError { source } = &e
&& source.status().is_some_and(|status| {
status == reqwest::StatusCode::NOT_FOUND
})
{
tracing::warn!(
title = rss_item.title,
"Episode torrent not found, maybe deleted since new \
version"
);
episode_torrent_doppel_path
.write_meta(MikanDoppelMeta { status: 404 })?;
} else {
Err(e)?;
}
}
}
tracing::info!(title = rss_item.title, "Episode torrent saved");
} else {
tracing::info!(title = rss_item.title, "Episode torrent already exists");
}
}
}
}
}
tracing::info!("Scraping subscriber subscription done");
Ok(())
}

View File

@@ -0,0 +1,33 @@
#![feature(duration_constructors_lite)]
use std::{sync::Arc, time::Duration};
use apalis_sql::postgres::PostgresStorage;
use recorder::{
app::AppContextTrait,
errors::RecorderResult,
test_utils::{
app::TestingAppContext,
database::{TestingDatabaseServiceConfig, build_testing_database_service},
},
};
#[tokio::main]
async fn main() -> RecorderResult<()> {
let app_ctx = {
let db_service = build_testing_database_service(TestingDatabaseServiceConfig {
auto_migrate: false,
})
.await?;
Arc::new(TestingAppContext::builder().db(db_service).build())
};
let db = app_ctx.db();
PostgresStorage::setup(db.get_postgres_connection_pool()).await?;
dbg!(db.get_postgres_connection_pool().connect_options());
tokio::time::sleep(Duration::from_hours(1)).await;
Ok(())
}

View File

@@ -0,0 +1,114 @@
# Application logging configuration
[logger]
# Enable or disable logging.
enable = true
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace = true
# Log level, options: trace, debug, info, warn or error.
level = "debug"
# Define the logging format. options: compact, pretty or Json
format = "compact"
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
# override_filter: trace
# Web server configuration
[server]
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
port = 5001
binding = "0.0.0.0"
# The UI hostname or IP address that mailers will point to.
host = '{{ get_env(name="HOST", default="localhost") }}'
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
# Enable Etag cache header middleware
[server.middlewares.etag]
enable = true
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
[server.middlewares.request_id]
enable = true
[server.middlewares.logger]
enable = true
# when your code is panicked, the request still returns 500 status code.
[server.middlewares.catch_panic]
enable = true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
[server.middlewares.timeout_request]
enable = false
# Duration time in milliseconds.
timeout = 5000
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://konobangu.com
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
# allow_methods:
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
[server.middlewares.cors]
enable = true
[server.middlewares.compression]
enable = true
# Database Configuration
[database]
# Database connection URI
uri = '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@localhost:5432/konobangu") }}'
# When enabled, the sql query will be logged.
enable_logging = true
# Set the timeout duration when acquiring a connection.
connect_timeout = 500
# Set the idle duration before closing a connection.
idle_timeout = 500
# Minimum number of connections for a pool.
min_connections = 1
# Maximum number of connections for a pool.
max_connections = 10
# Run migration up when application loaded
auto_migrate = true
[storage]
data_dir = '{{ get_env(name="STORAGE_DATA_DIR", default="./data") }}'
[mikan]
base_url = "https://mikanani.me/"
[mikan.http_client]
exponential_backoff_max_retries = 3
leaky_bucket_max_tokens = 2
leaky_bucket_initial_tokens = 1
leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500
[mikan.http_client.proxy]
server = '{{ get_env(name="MIKAN_PROXY", default = "") }}'
auth_header = '{{ get_env(name="MIKAN_PROXY_AUTH_HEADER", default = "") }}'
no_proxy = '{{ get_env(name="MIKAN_NO_PROXY", default = "") }}'
accept_invalid_certs = '{{ get_env(name="MIKAN_PROXY_ACCEPT_INVALID_CERTS", default = "false") }}'
[auth]
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
basic_password = '{{ get_env(name="BASIC_PASSWORD", default = "konobangu") }}'
oidc_issuer = '{{ get_env(name="OIDC_ISSUER", default = "") }}'
oidc_audience = '{{ get_env(name="OIDC_AUDIENCE", default = "") }}'
oidc_client_id = '{{ get_env(name="OIDC_CLIENT_ID", default = "") }}'
oidc_client_secret = '{{ get_env(name="OIDC_CLIENT_SECRET", default = "") }}'
oidc_extra_scopes = '{{ get_env(name="OIDC_EXTRA_SCOPES", default = "") }}'
oidc_extra_claim_key = '{{ get_env(name="OIDC_EXTRA_CLAIM_KEY", default = "") }}'
oidc_extra_claim_value = '{{ get_env(name="OIDC_EXTRA_CLAIM_VALUE", default = "") }}'
[graphql]
# depth_limit = inf
# complexity_limit = inf

View File

@@ -0,0 +1,158 @@
use clap::{Parser, command};
use super::{AppContext, core::App, env::Environment};
use crate::{app::config::AppConfig, errors::RecorderResult};
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
pub struct MainCliArgs {
/// Explicit config file path
#[arg(short, long)]
config_file: Option<String>,
/// Explicit dotenv file path
#[arg(short, long)]
dotenv_file: Option<String>,
/// Explicit working dir
#[arg(short, long)]
working_dir: Option<String>,
/// Explicit environment
#[arg(short, long)]
environment: Option<Environment>,
#[arg(long)]
graceful_shutdown: Option<bool>,
}
pub struct AppBuilder {
dotenv_file: Option<String>,
config_file: Option<String>,
working_dir: String,
environment: Environment,
pub graceful_shutdown: bool,
}
impl AppBuilder {
pub async fn from_main_cli(environment: Option<Environment>) -> RecorderResult<Self> {
let args = MainCliArgs::parse();
let environment = environment.unwrap_or_else(|| {
args.environment.unwrap_or({
if cfg!(test) {
Environment::Testing
} else if cfg!(debug_assertions) {
Environment::Development
} else {
Environment::Production
}
})
});
let mut builder = Self::default();
if let Some(working_dir) = args.working_dir {
builder = builder.working_dir(working_dir);
}
if matches!(
&environment,
Environment::Testing | Environment::Development
) {
builder = builder.working_dir_from_manifest_dir();
}
builder = builder
.config_file(args.config_file)
.dotenv_file(args.dotenv_file)
.environment(environment)
.graceful_shutdown(args.graceful_shutdown.unwrap_or(true));
Ok(builder)
}
pub async fn build(self) -> RecorderResult<App> {
self.load_env().await?;
let config = self.load_config().await?;
let app_context =
AppContext::new(self.environment.clone(), config, self.working_dir.clone()).await?;
Ok(App {
context: app_context,
builder: self,
})
}
pub async fn load_env(&self) -> RecorderResult<()> {
AppConfig::load_dotenv(
&self.environment,
&self.working_dir,
self.dotenv_file.as_deref(),
)
.await?;
Ok(())
}
pub async fn load_config(&self) -> RecorderResult<AppConfig> {
let config = AppConfig::load_config(
&self.environment,
&self.working_dir,
self.config_file.as_deref(),
)
.await?;
Ok(config)
}
pub fn working_dir(self, working_dir: String) -> Self {
let mut ret = self;
ret.working_dir = working_dir;
ret
}
pub fn environment(self, environment: Environment) -> Self {
let mut ret = self;
ret.environment = environment;
ret
}
pub fn config_file(self, config_file: Option<String>) -> Self {
let mut ret = self;
ret.config_file = config_file;
ret
}
pub fn graceful_shutdown(self, graceful_shutdown: bool) -> Self {
let mut ret = self;
ret.graceful_shutdown = graceful_shutdown;
ret
}
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
let mut ret = self;
ret.dotenv_file = dotenv_file;
ret
}
pub fn working_dir_from_manifest_dir(self) -> Self {
let manifest_dir = if cfg!(debug_assertions) || cfg!(test) {
env!("CARGO_MANIFEST_DIR")
} else {
"./apps/recorder"
};
self.working_dir(manifest_dir.to_string())
}
}
impl Default for AppBuilder {
fn default() -> Self {
Self {
environment: Environment::Production,
dotenv_file: None,
config_file: None,
working_dir: String::from("."),
graceful_shutdown: true,
}
}
}

View File

@@ -0,0 +1,31 @@
[storage]
data_dir = "./data"
[mikan]
base_url = "https://mikanani.me/"
[mikan.http_client]
exponential_backoff_max_retries = 3
leaky_bucket_max_tokens = 2
leaky_bucket_initial_tokens = 0
leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500
[mikan.http_client.proxy]
[mikan.http_client.proxy.headers]
[graphql]
depth_limit = inf
complexity_limit = inf
[cache]
[crypto]
[task]
[message]
[media]

View File

@@ -0,0 +1,181 @@
use std::{fs, path::Path, str};
use figment::{
Figment, Provider,
providers::{Format, Json, Toml, Yaml},
};
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use super::env::Environment;
use crate::{
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
logger::LoggerConfig, media::MediaConfig, message::MessageConfig, storage::StorageConfig,
task::TaskConfig, web::WebServerConfig,
};
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
const CONFIG_ALLOWED_EXTENSIONS: &[&str] = &[".toml", ".json", ".yaml", ".yml"];
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AppConfig {
pub server: WebServerConfig,
pub cache: CacheConfig,
pub auth: AuthConfig,
pub storage: StorageConfig,
pub mikan: MikanConfig,
pub crypto: CryptoConfig,
pub graphql: GraphQLConfig,
pub media: MediaConfig,
pub logger: LoggerConfig,
pub database: DatabaseConfig,
pub task: TaskConfig,
pub message: MessageConfig,
}
impl AppConfig {
pub fn config_prefix() -> String {
format!("{}.config", env!("CARGO_PKG_NAME"))
}
pub fn dotenv_prefix() -> String {
String::from(".env")
}
pub fn allowed_extension() -> Vec<String> {
CONFIG_ALLOWED_EXTENSIONS
.iter()
.map(|s| s.to_string())
.collect_vec()
}
pub fn priority_suffix(environment: &Environment) -> Vec<String> {
vec![
format!(".{}.local", environment.full_name()),
format!(".{}.local", environment.short_name()),
String::from(".local"),
environment.full_name().to_string(),
environment.short_name().to_string(),
String::from(""),
]
}
pub fn default_provider() -> impl Provider {
Toml::string(DEFAULT_CONFIG_MIXIN)
}
pub fn merge_provider_from_file(
fig: Figment,
filepath: impl AsRef<Path>,
ext: &str,
) -> RecorderResult<Figment> {
let content = fs::read_to_string(filepath)?;
let rendered = tera::Tera::one_off(
&content,
&tera::Context::from_value(serde_json::json!({}))?,
false,
)?;
Ok(match ext {
".toml" => fig.merge(Toml::string(&rendered)),
".json" => fig.merge(Json::string(&rendered)),
".yaml" | ".yml" => fig.merge(Yaml::string(&rendered)),
_ => unreachable!("unsupported config extension"),
})
}
pub async fn load_dotenv(
environment: &Environment,
working_dir: &str,
dotenv_file: Option<&str>,
) -> RecorderResult<()> {
let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
vec![dotenv_file]
} else {
vec![Some(working_dir)]
};
let priority_suffix = &AppConfig::priority_suffix(environment);
let dotenv_prefix = AppConfig::dotenv_prefix();
let try_filenames = priority_suffix
.iter()
.map(|ps| format!("{}{}", &dotenv_prefix, ps))
.collect_vec();
for try_dotenv_file_or_dir in try_dotenv_file_or_dirs.into_iter().flatten() {
let try_dotenv_file_or_dir_path = Path::new(try_dotenv_file_or_dir);
if try_dotenv_file_or_dir_path.exists() {
if try_dotenv_file_or_dir_path.is_dir() {
for f in try_filenames.iter() {
let p = try_dotenv_file_or_dir_path.join(f);
if p.exists() && p.is_file() {
dotenvy::from_path(p)?;
break;
}
}
} else if try_dotenv_file_or_dir_path.is_file() {
dotenvy::from_path(try_dotenv_file_or_dir_path)?;
break;
}
}
}
Ok(())
}
pub async fn load_config(
environment: &Environment,
working_dir: &str,
config_file: Option<&str>,
) -> RecorderResult<AppConfig> {
let try_config_file_or_dirs = if config_file.is_some() {
vec![config_file]
} else {
vec![Some(working_dir)]
};
let allowed_extensions = &AppConfig::allowed_extension();
let priority_suffix = &AppConfig::priority_suffix(environment);
let convention_prefix = &AppConfig::config_prefix();
let try_filenames = priority_suffix
.iter()
.flat_map(|ps| {
allowed_extensions
.iter()
.map(move |ext| (format!("{convention_prefix}{ps}{ext}"), ext))
})
.collect_vec();
let mut fig = Figment::from(AppConfig::default_provider());
for try_config_file_or_dir in try_config_file_or_dirs.into_iter().flatten() {
let try_config_file_or_dir_path = Path::new(try_config_file_or_dir);
if try_config_file_or_dir_path.exists() {
if try_config_file_or_dir_path.is_dir() {
for (f, ext) in try_filenames.iter() {
let p = try_config_file_or_dir_path.join(f);
if p.exists() && p.is_file() {
fig = AppConfig::merge_provider_from_file(fig, &p, ext)?;
break;
}
}
} else if let Some(ext) = try_config_file_or_dir_path
.extension()
.and_then(|s| s.to_str())
&& try_config_file_or_dir_path.is_file()
{
fig =
AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?;
break;
}
}
}
let app_config: AppConfig = fig.extract()?;
Ok(app_config)
}
}

View File

@@ -0,0 +1,147 @@
use std::{fmt::Debug, sync::Arc};
use tokio::sync::OnceCell;
use super::{Environment, config::AppConfig};
use crate::{
auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
logger::LoggerService, media::MediaService, message::MessageService, storage::StorageService,
task::TaskService,
};
pub trait AppContextTrait: Send + Sync + Debug {
fn logger(&self) -> &LoggerService;
fn db(&self) -> &DatabaseService;
fn config(&self) -> &AppConfig;
fn cache(&self) -> &CacheService;
fn mikan(&self) -> &MikanClient;
fn auth(&self) -> &AuthService;
fn graphql(&self) -> &GraphQLService;
fn storage(&self) -> &StorageService;
fn working_dir(&self) -> &String;
fn environment(&self) -> &Environment;
fn crypto(&self) -> &CryptoService;
fn task(&self) -> &TaskService;
fn message(&self) -> &MessageService;
fn media(&self) -> &MediaService;
}
pub struct AppContext {
logger: LoggerService,
db: DatabaseService,
config: AppConfig,
cache: CacheService,
mikan: MikanClient,
auth: AuthService,
storage: StorageService,
crypto: CryptoService,
working_dir: String,
environment: Environment,
message: MessageService,
media: MediaService,
task: OnceCell<TaskService>,
graphql: OnceCell<GraphQLService>,
}
impl AppContext {
pub async fn new(
environment: Environment,
config: AppConfig,
working_dir: impl ToString,
) -> RecorderResult<Arc<Self>> {
let config_cloned = config.clone();
let logger = LoggerService::from_config(config.logger).await?;
let cache = CacheService::from_config(config.cache).await?;
let db = DatabaseService::from_config(config.database).await?;
let storage = StorageService::from_config(config.storage).await?;
let message = MessageService::from_config(config.message).await?;
let auth = AuthService::from_conf(config.auth).await?;
let mikan = MikanClient::from_config(config.mikan).await?;
let crypto = CryptoService::from_config(config.crypto).await?;
let media = MediaService::from_config(config.media).await?;
let ctx = Arc::new(AppContext {
config: config_cloned,
environment,
logger,
auth,
cache,
db,
storage,
mikan,
working_dir: working_dir.to_string(),
crypto,
message,
media,
task: OnceCell::new(),
graphql: OnceCell::new(),
});
ctx.task
.get_or_try_init(async || {
TaskService::from_config_and_ctx(config.task, ctx.clone()).await
})
.await?;
ctx.graphql
.get_or_try_init(async || {
GraphQLService::from_config_and_ctx(config.graphql, ctx.clone()).await
})
.await?;
Ok(ctx)
}
}
impl Debug for AppContext {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "AppContext")
}
}
impl AppContextTrait for AppContext {
fn logger(&self) -> &LoggerService {
&self.logger
}
fn db(&self) -> &DatabaseService {
&self.db
}
fn config(&self) -> &AppConfig {
&self.config
}
fn cache(&self) -> &CacheService {
&self.cache
}
fn mikan(&self) -> &MikanClient {
&self.mikan
}
fn auth(&self) -> &AuthService {
&self.auth
}
fn graphql(&self) -> &GraphQLService {
self.graphql.get().expect("graphql should be set")
}
fn storage(&self) -> &StorageService {
&self.storage
}
fn working_dir(&self) -> &String {
&self.working_dir
}
fn environment(&self) -> &Environment {
&self.environment
}
fn crypto(&self) -> &CryptoService {
&self.crypto
}
fn task(&self) -> &TaskService {
self.task.get().expect("task should be set")
}
fn message(&self) -> &MessageService {
&self.message
}
fn media(&self) -> &MediaService {
&self.media
}
}

View File

@@ -0,0 +1,166 @@
use std::{net::SocketAddr, sync::Arc};
use axum::Router;
use tokio::{net::TcpSocket, signal};
use tracing::instrument;
use super::{builder::AppBuilder, context::AppContextTrait};
use crate::{
errors::{RecorderError, RecorderResult},
web::{
controller::{self, core::ControllerTrait},
middleware::default_middleware_stack,
},
};
pub const PROJECT_NAME: &str = "konobangu";
pub struct App {
pub context: Arc<dyn AppContextTrait>,
pub builder: AppBuilder,
}
impl App {
pub fn builder() -> AppBuilder {
AppBuilder::default()
}
#[instrument(err, skip(self))]
pub async fn serve(&self) -> RecorderResult<()> {
let context = &self.context;
let config = context.config();
let listener = {
let addr: SocketAddr =
format!("{}:{}", config.server.binding, config.server.port).parse()?;
let socket = if addr.is_ipv4() {
TcpSocket::new_v4()
} else {
TcpSocket::new_v6()
}?;
socket.set_reuseaddr(true)?;
#[cfg(all(unix, not(target_os = "solaris")))]
if let Err(e) = socket.set_reuseport(true) {
tracing::warn!("Failed to set SO_REUSEPORT: {}", e);
}
socket.bind(addr)?;
socket.listen(1024)
}?;
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
let (graphql_c, oidc_c, metadata_c, static_c, feeds_c) = futures::try_join!(
controller::graphql::create(context.clone()),
controller::oidc::create(context.clone()),
controller::metadata::create(context.clone()),
controller::r#static::create(context.clone()),
controller::feeds::create(context.clone()),
)?;
for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
router = c.apply_to(router);
}
let middlewares = default_middleware_stack(context.clone());
for mid in middlewares {
if mid.is_enabled() {
router = mid.apply(router)?;
tracing::info!(name = mid.name(), "+middleware");
}
}
let router = router
.with_state(context.clone())
.into_make_service_with_connect_info::<SocketAddr>();
let task = context.task();
let graceful_shutdown = self.builder.graceful_shutdown;
tokio::try_join!(
async {
let axum_serve = axum::serve(listener, router);
if graceful_shutdown {
axum_serve
.with_graceful_shutdown(async move {
Self::shutdown_signal().await;
tracing::info!("axum shutting down...");
})
.await?;
} else {
axum_serve.await?;
}
Ok::<(), RecorderError>(())
},
async {
{
let monitor = task.setup_monitor().await?;
if graceful_shutdown {
monitor
.run_with_signal(async move {
Self::shutdown_signal().await;
tracing::info!("apalis shutting down...");
Ok(())
})
.await?;
} else {
monitor.run().await?;
}
}
Ok::<(), RecorderError>(())
},
async {
let listener = task.setup_listener().await?;
listener.listen().await?;
Ok::<(), RecorderError>(())
}
)?;
Ok(())
}
async fn shutdown_signal() {
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(all(unix, debug_assertions))]
let quit = async {
signal::unix::signal(signal::unix::SignalKind::quit())
.expect("Failed to install SIGQUIT handler")
.recv()
.await;
println!("Received SIGQUIT");
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
#[cfg(not(all(unix, debug_assertions)))]
let quit = std::future::pending::<()>();
tokio::select! {
() = ctrl_c => {},
() = terminate => {},
() = quit => {},
}
}
}

View File

@@ -0,0 +1,35 @@
use clap::ValueEnum;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, ValueEnum)]
#[serde(rename_all = "snake_case")]
#[value(rename_all = "snake_case")]
pub enum Environment {
#[serde(alias = "dev")]
#[value(alias = "dev")]
Development,
#[serde(alias = "prod")]
#[value(alias = "prod")]
Production,
#[serde(alias = "test")]
#[value(alias = "test")]
Testing,
}
impl Environment {
pub fn full_name(&self) -> &'static str {
match &self {
Self::Development => "development",
Self::Production => "production",
Self::Testing => "testing",
}
}
pub fn short_name(&self) -> &'static str {
match &self {
Self::Development => "dev",
Self::Production => "prod",
Self::Testing => "test",
}
}
}

View File

@@ -0,0 +1,12 @@
pub mod builder;
pub mod config;
pub mod context;
pub mod core;
pub mod env;
pub use core::{App, PROJECT_NAME};
pub use builder::AppBuilder;
pub use config::AppConfig;
pub use context::{AppContext, AppContextTrait};
pub use env::Environment;

View File

@@ -0,0 +1,95 @@
use async_trait::async_trait;
use axum::http::{HeaderValue, request::Parts};
use base64::{self, Engine};
use http::header::AUTHORIZATION;
use super::{
config::BasicAuthConfig,
errors::AuthError,
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{
app::{AppContextTrait, PROJECT_NAME},
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
};
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct AuthBasic {
pub user: String,
pub password: Option<String>,
}
impl AuthBasic {
fn decode_request_parts(req: &mut Parts) -> Result<Self, AuthError> {
let authorization = req
.headers
.get(AUTHORIZATION)
.and_then(|s| s.to_str().ok())
.ok_or(AuthError::BasicInvalidCredentials)?;
let split = authorization.split_once(' ');
match split {
Some(("Basic", contents)) => {
let decoded = base64::engine::general_purpose::STANDARD
.decode(contents)
.map_err(|_| AuthError::BasicInvalidCredentials)?;
let decoded =
String::from_utf8(decoded).map_err(|_| AuthError::BasicInvalidCredentials)?;
Ok(if let Some((user, password)) = decoded.split_once(':') {
Self {
user: String::from(user),
password: Some(String::from(password)),
}
} else {
Self {
user: decoded,
password: None,
}
})
}
_ => Err(AuthError::BasicInvalidCredentials),
}
}
}
#[derive(Debug)]
pub struct BasicAuthService {
pub config: BasicAuthConfig,
}
#[async_trait]
impl AuthServiceTrait for BasicAuthService {
async fn extract_user_info(
&self,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
if let Ok(AuthBasic {
user: found_user,
password: found_password,
}) = AuthBasic::decode_request_parts(request)
&& self.config.user == found_user
&& self.config.password == found_password.unwrap_or_default()
{
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER)
.await
.map_err(|_| AuthError::FindAuthRecordError)?;
return Ok(AuthUserInfo {
subscriber_auth,
auth_type: AuthType::Basic,
});
}
Err(AuthError::BasicInvalidCredentials)
}
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
Some(HeaderValue::from_str(format!("Basic realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
}
fn auth_type(&self) -> AuthType {
AuthType::Basic
}
}

View File

@@ -0,0 +1,37 @@
use std::collections::HashMap;
use jwtk::OneOrMany;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct BasicAuthConfig {
#[serde(rename = "basic_user")]
pub user: String,
#[serde(rename = "basic_password")]
pub password: String,
}
#[serde_as]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct OidcAuthConfig {
#[serde(rename = "oidc_issuer")]
pub issuer: String,
#[serde(rename = "oidc_audience")]
pub audience: String,
#[serde(rename = "oidc_client_id")]
pub client_id: String,
#[serde(rename = "oidc_client_secret")]
pub client_secret: String,
#[serde(rename = "oidc_extra_scopes")]
pub extra_scopes: Option<OneOrMany<String>>,
#[serde(rename = "oidc_extra_claims")]
pub extra_claims: Option<HashMap<String, Option<String>>>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "auth_type", rename_all = "snake_case")]
pub enum AuthConfig {
Basic(BasicAuthConfig),
Oidc(OidcAuthConfig),
}

View File

@@ -0,0 +1,142 @@
use async_graphql::dynamic::ResolverContext;
use axum::{
Json,
http::StatusCode,
response::{IntoResponse, Response},
};
use fetch::HttpClientError;
use openidconnect::{
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
StandardErrorResponse, core::CoreErrorResponseType,
};
use serde::{Deserialize, Serialize};
use snafu::prelude::*;
use crate::models::auth::AuthType;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum AuthError {
#[snafu(display("Permission denied"))]
PermissionError,
#[snafu(display("Not support auth method"))]
NotSupportAuthMethod {
supported: Vec<AuthType>,
current: AuthType,
},
#[snafu(display("Failed to find auth record"))]
FindAuthRecordError,
#[snafu(display("Invalid credentials"))]
BasicInvalidCredentials,
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
OidcProviderHttpClientError { source: HttpClientError },
#[snafu(transparent)]
OidcProviderMetaError {
source: openidconnect::DiscoveryError<HttpClientError>,
},
#[snafu(display("Invalid oidc provider URL: {source}"))]
OidcProviderUrlError { source: url::ParseError },
#[snafu(display("Invalid oidc redirect URI: {source}"))]
OidcRequestRedirectUriError {
#[snafu(source)]
source: url::ParseError,
},
#[snafu(display("Oidc request session not found or expired"))]
OidcCallbackRecordNotFoundOrExpiredError,
#[snafu(display("Invalid oidc request callback nonce"))]
OidcInvalidNonceError,
#[snafu(display("Invalid oidc request callback state"))]
OidcInvalidStateError,
#[snafu(display("Invalid oidc request callback code"))]
OidcInvalidCodeError,
#[snafu(transparent)]
OidcCallbackTokenConfigurationError { source: ConfigurationError },
#[snafu(transparent)]
OidcRequestTokenError {
source: RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>,
},
#[snafu(display("Invalid oidc id token"))]
OidcInvalidIdTokenError,
#[snafu(display("Invalid oidc access token"))]
OidcInvalidAccessTokenError,
#[snafu(transparent)]
OidcSignatureVerificationError { source: SignatureVerificationError },
#[snafu(transparent)]
OidcSigningError { source: SigningError },
#[snafu(display("Missing Bearer token"))]
OidcMissingBearerToken,
#[snafu(transparent)]
OidcJwtkError { source: jwtk::Error },
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
OidcExtraScopesMatchError { expected: String, found: String },
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
OidcExtraClaimMatchError {
key: String,
expected: String,
found: String,
},
#[snafu(display("Extra claim {claim} missing"))]
OidcExtraClaimMissingError { claim: String },
#[snafu(display("Audience {aud} missing"))]
OidcAudMissingError { aud: String },
#[snafu(display("Subject missing"))]
OidcSubMissingError,
#[snafu(display(
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
(if field.is_empty() { "" } else { "." }),
(if column.is_empty() { "" } else { "." }),
source.message
))]
GraphqlDynamicPermissionError {
#[snafu(source(false))]
source: Box<async_graphql::Error>,
field: String,
column: String,
context_path: String,
},
}
impl AuthError {
pub fn from_graphql_dynamic_subscribe_id_guard(
source: async_graphql::Error,
context: &ResolverContext,
field_name: &str,
column_name: &str,
) -> AuthError {
AuthError::GraphqlDynamicPermissionError {
source: Box::new(source),
field: field_name.to_string(),
column: column_name.to_string(),
context_path: context
.ctx
.path_node
.map(|p| p.to_string_vec().join(""))
.unwrap_or_default(),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AuthErrorResponse {
pub success: bool,
pub message: String,
}
impl From<AuthError> for AuthErrorResponse {
fn from(value: AuthError) -> Self {
AuthErrorResponse {
success: false,
message: value.to_string(),
}
}
}
impl IntoResponse for AuthError {
fn into_response(self) -> Response {
(
StatusCode::UNAUTHORIZED,
Json(AuthErrorResponse::from(self)),
)
.into_response()
}
}

View File

@@ -0,0 +1,40 @@
use std::sync::Arc;
use axum::{
extract::{Request, State},
http::header,
middleware::Next,
response::{IntoResponse, Response},
};
use crate::{app::AppContextTrait, auth::AuthServiceTrait};
pub async fn auth_middleware(
State(ctx): State<Arc<dyn AppContextTrait>>,
request: Request,
next: Next,
) -> Response {
let auth_service = ctx.auth();
let (mut parts, body) = request.into_parts();
let mut response = match auth_service
.extract_user_info(ctx.as_ref() as &dyn AppContextTrait, &mut parts)
.await
{
Ok(auth_user_info) => {
let mut request = Request::from_parts(parts, body);
request.extensions_mut().insert(auth_user_info);
next.run(request).await
}
Err(auth_error) => auth_error.into_response(),
};
if let Some(header_value) = auth_service.www_authenticate_header_value() {
response
.headers_mut()
.insert(header::WWW_AUTHENTICATE, header_value);
};
response
}

View File

@@ -0,0 +1,11 @@
pub mod basic;
pub mod config;
pub mod errors;
pub mod middleware;
pub mod oidc;
pub mod service;
pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig};
pub use errors::AuthError;
pub use middleware::auth_middleware;
pub use service::{AuthService, AuthServiceTrait, AuthUserInfo};

View File

@@ -0,0 +1,364 @@
use std::{
collections::{HashMap, HashSet},
future::Future,
ops::Deref,
pin::Pin,
sync::Arc,
};
use async_trait::async_trait;
use axum::{
http,
http::{HeaderValue, request::Parts},
};
use fetch::{HttpClient, client::HttpClientError};
use http::header::AUTHORIZATION;
use itertools::Itertools;
use jwtk::jwk::RemoteJwksVerifier;
use moka::future::Cache;
use openidconnect::{
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse,
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata},
};
use sea_orm::DbErr;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use snafu::ResultExt;
use url::Url;
use super::{
config::OidcAuthConfig,
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{
app::{AppContextTrait, PROJECT_NAME},
errors::RecorderError,
models::auth::AuthType,
};
pub struct OidcHttpClient(pub Arc<HttpClient>);
impl Deref for OidcHttpClient {
type Target = HttpClient;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'c> openidconnect::AsyncHttpClient<'c> for OidcHttpClient {
type Error = HttpClientError;
#[cfg(target_arch = "wasm32")]
type Future =
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + 'c>>;
#[cfg(not(target_arch = "wasm32"))]
type Future =
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + Send + 'c>>;
fn call(&'c self, request: openidconnect::HttpRequest) -> Self::Future {
Box::pin(async move {
let response = self.execute(request.try_into()?).await?;
let mut builder = http::Response::builder().status(response.status());
#[cfg(not(target_arch = "wasm32"))]
{
builder = builder.version(response.version());
}
for (name, value) in response.headers().iter() {
builder = builder.header(name, value);
}
builder
.body(response.bytes().await?.to_vec())
.map_err(HttpClientError::from)
})
}
}
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OidcAuthClaims {
pub scope: Option<String>,
#[serde(flatten)]
pub custom: HashMap<String, Value>,
}
impl OidcAuthClaims {
pub fn scopes(&self) -> std::str::Split<'_, char> {
self.scope.as_deref().unwrap_or_default().split(',')
}
}
#[derive(Debug, Clone, Serialize)]
pub struct OidcAuthRequest {
pub auth_uri: Url,
#[serde(skip)]
pub redirect_uri: RedirectUrl,
#[serde(skip)]
pub csrf_token: CsrfToken,
#[serde(skip)]
pub nonce: Nonce,
#[serde(skip)]
pub pkce_verifier: Arc<PkceCodeVerifier>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OidcAuthCallbackQuery {
pub state: Option<String>,
pub code: Option<String>,
pub redirect_uri: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OidcAuthCallbackPayload {
pub access_token: String,
}
pub struct OidcAuthService {
pub config: OidcAuthConfig,
pub jwk_verifier: RemoteJwksVerifier,
pub oidc_provider_client: Arc<HttpClient>,
pub oidc_request_cache: Cache<String, OidcAuthRequest>,
}
impl OidcAuthService {
pub async fn build_authorization_request(
&self,
redirect_uri: &str,
) -> Result<OidcAuthRequest, AuthError> {
let oidc_provider_client = OidcHttpClient(self.oidc_provider_client.clone());
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&oidc_provider_client,
)
.await?;
let redirect_uri =
RedirectUrl::new(redirect_uri.to_string()).context(OidcRequestRedirectUriSnafu)?;
let oidc_client = CoreClient::from_provider_metadata(
provider_metadata,
ClientId::new(self.config.client_id.clone()),
Some(ClientSecret::new(self.config.client_secret.clone())),
)
.set_redirect_uri(redirect_uri.clone());
let (pkce_chanllenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256();
let mut authorization_request = oidc_client
.authorize_url(
CoreAuthenticationFlow::AuthorizationCode,
CsrfToken::new_random,
Nonce::new_random,
)
.set_pkce_challenge(pkce_chanllenge);
{
if let Some(scopes) = self.config.extra_scopes.as_ref() {
authorization_request = authorization_request.add_scopes(
scopes
.iter()
.map(|s| openidconnect::Scope::new(s.to_string())),
)
}
}
let (auth_uri, csrf_token, nonce) = authorization_request.url();
Ok(OidcAuthRequest {
auth_uri,
csrf_token,
nonce,
pkce_verifier: Arc::new(pkce_verifier),
redirect_uri,
})
}
pub async fn store_authorization_request(
&self,
request: OidcAuthRequest,
) -> Result<(), AuthError> {
self.oidc_request_cache
.insert(request.csrf_token.secret().to_string(), request)
.await;
Ok(())
}
pub async fn load_authorization_request(
&self,
state: &str,
) -> Result<OidcAuthRequest, AuthError> {
let result = self
.oidc_request_cache
.get(state)
.await
.ok_or(AuthError::OidcCallbackRecordNotFoundOrExpiredError)?;
self.oidc_request_cache.invalidate(state).await;
Ok(result)
}
pub async fn extract_authorization_request_callback(
&self,
query: OidcAuthCallbackQuery,
) -> Result<OidcAuthCallbackPayload, AuthError> {
let oidc_http_client = OidcHttpClient(self.oidc_provider_client.clone());
let csrf_token = query.state.ok_or(AuthError::OidcInvalidStateError)?;
let code = query.code.ok_or(AuthError::OidcInvalidCodeError)?;
let request_cache = self.load_authorization_request(&csrf_token).await?;
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&oidc_http_client,
)
.await?;
let oidc_client = CoreClient::from_provider_metadata(
provider_metadata,
ClientId::new(self.config.client_id.clone()),
Some(ClientSecret::new(self.config.client_secret.clone())),
)
.set_redirect_uri(request_cache.redirect_uri);
let pkce_verifier = PkceCodeVerifier::new(request_cache.pkce_verifier.secret().to_string());
let token_response = oidc_client
.exchange_code(AuthorizationCode::new(code))?
.set_pkce_verifier(pkce_verifier)
.request_async(&oidc_http_client)
.await?;
let id_token = token_response
.id_token()
.ok_or(AuthError::OidcInvalidIdTokenError)?;
let id_token_verifier = &oidc_client.id_token_verifier();
let claims = id_token
.claims(id_token_verifier, &request_cache.nonce)
.map_err(|_| AuthError::OidcInvalidNonceError)?;
let access_token = token_response.access_token();
let actual_access_token_hash = AccessTokenHash::from_token(
access_token,
id_token.signing_alg()?,
id_token.signing_key(id_token_verifier)?,
)?;
if let Some(expected_access_token_hash) = claims.access_token_hash()
&& actual_access_token_hash != *expected_access_token_hash
{
return Err(AuthError::OidcInvalidAccessTokenError);
}
Ok(OidcAuthCallbackPayload {
access_token: access_token.secret().to_string(),
})
}
}
#[async_trait]
impl AuthServiceTrait for OidcAuthService {
async fn extract_user_info(
&self,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
let config = &self.config;
let token = request
.headers
.get(AUTHORIZATION)
.and_then(|authorization| {
authorization
.to_str()
.ok()
.and_then(|s| s.strip_prefix("Bearer "))
})
.ok_or(AuthError::OidcMissingBearerToken)?;
let token_data = self.jwk_verifier.verify::<OidcAuthClaims>(token).await?;
let claims = token_data.claims();
let sub = if let Some(sub) = claims.sub.as_deref() {
sub
} else {
return Err(AuthError::OidcSubMissingError);
};
if !claims.aud.iter().any(|aud| aud == &config.audience) {
return Err(AuthError::OidcAudMissingError {
aud: config.audience.clone(),
});
}
let extra_claims = &claims.extra;
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
let found_scopes = extra_claims.scopes().collect::<HashSet<_>>();
if !expected_scopes
.iter()
.all(|es| found_scopes.contains(es as &str))
{
return Err(AuthError::OidcExtraScopesMatchError {
expected: expected_scopes.iter().join(","),
found: extra_claims
.scope
.as_deref()
.unwrap_or_default()
.to_string(),
});
}
}
if let Some(expected_extra_claims) = config.extra_claims.as_ref() {
for (expected_key, expected_value) in expected_extra_claims.iter() {
match (extra_claims.custom.get(expected_key), expected_value) {
(found_value, Some(expected_value)) => {
if let Some(Value::String(found_value)) = found_value
&& expected_value == found_value
{
} else {
return Err(AuthError::OidcExtraClaimMatchError {
expected: expected_value.clone(),
found: found_value.map(|v| v.to_string()).unwrap_or_default(),
key: expected_key.clone(),
});
}
}
(None, None) => {
return Err(AuthError::OidcExtraClaimMissingError {
claim: expected_key.clone(),
});
}
_ => {}
}
}
}
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(RecorderError::DbError {
source: DbErr::RecordNotFound(..),
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await,
r => r,
}
.map_err(|e| {
tracing::error!("Error finding auth record: {:?}", e);
AuthError::FindAuthRecordError
})?;
Ok(AuthUserInfo {
subscriber_auth,
auth_type: AuthType::Oidc,
})
}
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
Some(HeaderValue::from_str(format!("Bearer realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
}
fn auth_type(&self) -> AuthType {
AuthType::Oidc
}
}

View File

@@ -0,0 +1,115 @@
use std::{sync::Arc, time::Duration};
use async_trait::async_trait;
use axum::http::request::Parts;
use fetch::{
HttpClient, HttpClientConfig,
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
};
use http::header::HeaderValue;
use jwtk::jwk::RemoteJwksVerifier;
use moka::future::Cache;
use openidconnect::{IssuerUrl, core::CoreProviderMetadata};
use snafu::prelude::*;
use super::{
AuthConfig,
basic::BasicAuthService,
errors::{AuthError, OidcProviderHttpClientSnafu, OidcProviderUrlSnafu},
oidc::{OidcAuthService, OidcHttpClient},
};
use crate::{app::AppContextTrait, models::auth::AuthType};
#[derive(Clone, Debug)]
pub struct AuthUserInfo {
pub subscriber_auth: crate::models::auth::Model,
pub auth_type: AuthType,
}
#[async_trait]
pub trait AuthServiceTrait {
async fn extract_user_info(
&self,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError>;
fn www_authenticate_header_value(&self) -> Option<HeaderValue>;
fn auth_type(&self) -> AuthType;
}
pub enum AuthService {
Basic(Box<BasicAuthService>),
Oidc(Box<OidcAuthService>),
}
impl AuthService {
pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> {
let result = match config {
AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
AuthConfig::Oidc(config) => {
let oidc_provider_client = Arc::new(
HttpClient::from_config(HttpClientConfig {
exponential_backoff_max_retries: Some(3),
cache_backend: Some(HttpClientCacheBackendConfig::Moka { cache_size: 1 }),
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
..Default::default()
})
.context(OidcProviderHttpClientSnafu)?,
);
let provider_metadata = {
let client = OidcHttpClient(oidc_provider_client.clone());
let issuer_url =
IssuerUrl::new(config.issuer.clone()).context(OidcProviderUrlSnafu)?;
CoreProviderMetadata::discover_async(issuer_url, &client).await
}?;
let jwk_verifier = RemoteJwksVerifier::new(
provider_metadata.jwks_uri().to_string().clone(),
None,
Duration::from_secs(300),
);
AuthService::Oidc(Box::new(OidcAuthService {
config,
jwk_verifier,
oidc_provider_client,
oidc_request_cache: Cache::builder()
.time_to_live(Duration::from_mins(5))
.name("oidc_request_cache")
.build(),
}))
}
};
Ok(result)
}
}
#[async_trait]
impl AuthServiceTrait for AuthService {
#[tracing::instrument(skip(self, ctx, request))]
async fn extract_user_info(
&self,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
match self {
AuthService::Basic(service) => service.extract_user_info(ctx, request).await,
AuthService::Oidc(service) => service.extract_user_info(ctx, request).await,
}
}
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
match self {
AuthService::Basic(service) => service.www_authenticate_header_value(),
AuthService::Oidc(service) => service.www_authenticate_header_value(),
}
}
fn auth_type(&self) -> AuthType {
match self {
AuthService::Basic(service) => service.auth_type(),
AuthService::Oidc(service) => service.auth_type(),
}
}
}

View File

@@ -0,0 +1,12 @@
use recorder::{app::AppBuilder, errors::RecorderResult};
#[tokio::main]
async fn main() -> RecorderResult<()> {
let builder = AppBuilder::from_main_cli(None).await?;
let app = builder.build().await?;
app.serve().await?;
Ok(())
}

View File

@@ -0,0 +1,16 @@
use recorder::{app::AppBuilder, database::DatabaseService, errors::RecorderResult};
#[tokio::main]
async fn main() -> RecorderResult<()> {
let builder = AppBuilder::from_main_cli(None).await?;
builder.load_env().await?;
let mut database_config = builder.load_config().await?.database;
database_config.auto_migrate = false;
let database_service = DatabaseService::from_config(database_config).await?;
database_service.migrate_down().await?;
Ok(())
}

4
apps/recorder/src/cache/config.rs vendored Normal file
View File

@@ -0,0 +1,4 @@
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CacheConfig {}

5
apps/recorder/src/cache/mod.rs vendored Normal file
View File

@@ -0,0 +1,5 @@
pub mod config;
pub mod service;
pub use config::CacheConfig;
pub use service::CacheService;

10
apps/recorder/src/cache/service.rs vendored Normal file
View File

@@ -0,0 +1,10 @@
use super::CacheConfig;
use crate::errors::RecorderResult;
pub struct CacheService {}
impl CacheService {
pub async fn from_config(_config: CacheConfig) -> RecorderResult<Self> {
Ok(Self {})
}
}

View File

@@ -0,0 +1,4 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CryptoConfig {}

View File

@@ -0,0 +1,20 @@
use async_graphql::Error as AsyncGraphQLError;
use seaography::SeaographyError;
#[derive(Debug, snafu::Snafu)]
pub enum CryptoError {
#[snafu(transparent)]
Base64DecodeError { source: base64::DecodeError },
#[snafu(display("CocoonError: {source:?}"), context(false))]
CocoonError { source: cocoon::Error },
#[snafu(transparent)]
FromUtf8Error { source: std::string::FromUtf8Error },
#[snafu(transparent)]
SerdeJsonError { source: serde_json::Error },
}
impl From<CryptoError> for SeaographyError {
fn from(error: CryptoError) -> Self {
SeaographyError::AsyncGraphQLError(AsyncGraphQLError::new(error.to_string()))
}
}

View File

@@ -0,0 +1,9 @@
pub mod config;
pub mod error;
pub mod service;
pub mod userpass;
pub use config::CryptoConfig;
pub use error::CryptoError;
pub use service::CryptoService;
pub use userpass::UserPassCredential;

View File

@@ -0,0 +1,62 @@
use base64::prelude::{BASE64_URL_SAFE, *};
use cocoon::Cocoon;
use rand::Rng;
use serde::{Deserialize, Serialize};
use super::CryptoConfig;
use crate::crypto::error::CryptoError;
pub struct CryptoService {
#[allow(dead_code)]
config: CryptoConfig,
}
impl CryptoService {
pub async fn from_config(config: CryptoConfig) -> Result<Self, CryptoError> {
Ok(Self { config })
}
pub fn encrypt_string(&self, data: String) -> Result<String, CryptoError> {
let key = rand::rng().random::<[u8; 32]>();
let mut cocoon = Cocoon::new(&key);
let mut data = data.into_bytes();
let detached_prefix = cocoon.encrypt(&mut data)?;
let mut combined = Vec::with_capacity(key.len() + detached_prefix.len() + data.len());
combined.extend_from_slice(&key);
combined.extend_from_slice(&detached_prefix);
combined.extend_from_slice(&data);
Ok(BASE64_URL_SAFE.encode(combined))
}
pub fn decrypt_string(&self, data: &str) -> Result<String, CryptoError> {
let decoded = BASE64_URL_SAFE.decode(data)?;
let (key, remain) = decoded.split_at(32);
let (detached_prefix, data) = remain.split_at(60);
let mut data = data.to_vec();
let cocoon = Cocoon::new(key);
cocoon.decrypt(&mut data, detached_prefix)?;
String::from_utf8(data).map_err(CryptoError::from)
}
pub fn encrypt_serialize<T: Serialize>(&self, credentials: &T) -> Result<String, CryptoError> {
let json = serde_json::to_string(credentials)?;
self.encrypt_string(json)
}
pub fn decrypt_deserialize<T: for<'de> Deserialize<'de>>(
&self,
encrypted: &str,
) -> Result<T, CryptoError> {
let data = self.decrypt_string(encrypted)?;
serde_json::from_str(&data).map_err(CryptoError::from)
}
}

View File

@@ -0,0 +1,19 @@
use std::fmt::Debug;
pub struct UserPassCredential {
pub username: String,
pub password: String,
pub user_agent: Option<String>,
pub cookies: Option<String>,
}
impl Debug for UserPassCredential {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("UserPassCredential")
.field("username", &"[Secret]")
.field("password", &"[Secret]")
.field("cookies", &"[Secret]")
.field("user_agent", &self.user_agent)
.finish()
}
}

View File

@@ -0,0 +1,14 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct DatabaseConfig {
pub uri: String,
pub enable_logging: bool,
pub min_connections: u32,
pub max_connections: u32,
pub connect_timeout: u64,
pub idle_timeout: u64,
pub acquire_timeout: Option<u64>,
#[serde(default)]
pub auto_migrate: bool,
}

View File

@@ -0,0 +1,5 @@
pub mod config;
pub mod service;
pub use config::DatabaseConfig;
pub use service::DatabaseService;

View File

@@ -0,0 +1,129 @@
use std::{ops::Deref, time::Duration};
use apalis_sql::postgres::PostgresStorage;
use sea_orm::{
ConnectOptions, ConnectionTrait, Database, DatabaseConnection, DbBackend, DbErr, ExecResult,
QueryResult, Statement,
};
use sea_orm_migration::MigratorTrait;
use super::DatabaseConfig;
use crate::{errors::RecorderResult, migrations::Migrator};
pub struct DatabaseService {
pub config: DatabaseConfig,
connection: DatabaseConnection,
#[cfg(feature = "testcontainers")]
pub container:
Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>,
}
impl DatabaseService {
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
let db_config = config.clone();
let mut opt = ConnectOptions::new(&config.uri);
opt.max_connections(config.max_connections)
.min_connections(config.min_connections)
.connect_timeout(Duration::from_millis(config.connect_timeout))
.idle_timeout(Duration::from_millis(config.idle_timeout))
.sqlx_logging(config.enable_logging);
if let Some(acquire_timeout) = config.acquire_timeout {
opt.acquire_timeout(Duration::from_millis(acquire_timeout));
}
let db = Database::connect(opt).await?;
// only support postgres for now
// if db.get_database_backend() == DatabaseBackend::Sqlite {
// db.execute(Statement::from_string(
// DatabaseBackend::Sqlite,
// "
// PRAGMA foreign_keys = ON;
// PRAGMA journal_mode = WAL;
// PRAGMA synchronous = NORMAL;
// PRAGMA mmap_size = 134217728;
// PRAGMA journal_size_limit = 67108864;
// PRAGMA cache_size = 2000;
// ",
// ))
// .await?;
// }
let me = Self {
connection: db,
#[cfg(feature = "testcontainers")]
container: None,
config: db_config,
};
if config.auto_migrate {
me.migrate_up().await?;
}
Ok(me)
}
pub async fn migrate_up(&self) -> RecorderResult<()> {
{
let pool = &self.get_postgres_connection_pool();
PostgresStorage::setup(pool).await?;
}
Migrator::up(&self.connection, None).await?;
Ok(())
}
pub async fn migrate_down(&self) -> RecorderResult<()> {
Migrator::down(&self.connection, None).await?;
{
self.execute_unprepared(r#"DROP SCHEMA IF EXISTS apalis CASCADE"#)
.await?;
}
Ok(())
}
}
impl Deref for DatabaseService {
type Target = DatabaseConnection;
fn deref(&self) -> &Self::Target {
&self.connection
}
}
impl AsRef<DatabaseConnection> for DatabaseService {
fn as_ref(&self) -> &DatabaseConnection {
&self.connection
}
}
#[async_trait::async_trait]
impl ConnectionTrait for DatabaseService {
fn get_database_backend(&self) -> DbBackend {
self.deref().get_database_backend()
}
async fn execute(&self, stmt: Statement) -> Result<ExecResult, DbErr> {
self.deref().execute(stmt).await
}
async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> {
self.deref().execute_unprepared(sql).await
}
async fn query_one(&self, stmt: Statement) -> Result<Option<QueryResult>, DbErr> {
self.deref().query_one(stmt).await
}
async fn query_all(&self, stmt: Statement) -> Result<Vec<QueryResult>, DbErr> {
self.deref().query_all(stmt).await
}
fn support_returning(&self) -> bool {
self.deref().support_returning()
}
fn is_mock_connection(&self) -> bool {
self.deref().is_mock_connection()
}
}

View File

@@ -0,0 +1,297 @@
use std::borrow::Cow;
use axum::{
Json,
response::{IntoResponse, Response},
};
use fetch::{FetchError, HttpClientError, reqwest, reqwest_middleware};
use http::{HeaderMap, StatusCode};
use snafu::Snafu;
use crate::{
auth::AuthError,
crypto::CryptoError,
downloader::DownloaderError,
errors::{OptDynErr, response::StandardErrorResponse},
};
#[derive(Snafu, Debug)]
#[snafu(visibility(pub(crate)))]
pub enum RecorderError {
#[snafu(display(
"HTTP {status} {reason}, source = {source:?}",
status = status,
reason = status.canonical_reason().unwrap_or("Unknown")
))]
HttpResponseError {
status: StatusCode,
headers: Option<HeaderMap>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(transparent)]
ImageError { source: image::ImageError },
#[cfg(feature = "jxl")]
#[snafu(transparent)]
JxlEncodeError { source: jpegxl_rs::EncodeError },
#[snafu(transparent, context(false))]
HttpError { source: http::Error },
#[snafu(transparent, context(false))]
FancyRegexError {
#[snafu(source(from(fancy_regex::Error, Box::new)))]
source: Box<fancy_regex::Error>,
},
#[snafu(transparent)]
NetAddrParseError { source: std::net::AddrParseError },
#[snafu(transparent)]
RegexError { source: regex::Error },
#[snafu(display("Invalid method"))]
InvalidMethodError,
#[snafu(display("Invalid header value"))]
InvalidHeaderValueError,
#[snafu(display("Invalid header name"))]
InvalidHeaderNameError,
#[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))]
MissingOriginError,
#[snafu(transparent)]
TracingAppenderInitError {
source: tracing_appender::rolling::InitError,
},
#[snafu(transparent)]
GraphQLSchemaError {
source: async_graphql::dynamic::SchemaError,
},
#[snafu(transparent)]
AuthError { source: AuthError },
#[snafu(transparent)]
DownloadError { source: DownloaderError },
#[snafu(transparent)]
RSSError { source: rss::Error },
#[snafu(transparent)]
DotEnvError { source: dotenvy::Error },
#[snafu(transparent)]
TeraError { source: tera::Error },
#[snafu(transparent)]
IOError { source: std::io::Error },
#[snafu(transparent)]
DbError { source: sea_orm::DbErr },
#[snafu(transparent)]
DbSqlxError { source: sea_orm::SqlxError },
#[snafu(transparent, context(false))]
FigmentError {
#[snafu(source(from(figment::Error, Box::new)))]
source: Box<figment::Error>,
},
#[snafu(transparent)]
SerdeJsonError { source: serde_json::Error },
#[snafu(transparent)]
ParseUrlError { source: url::ParseError },
#[snafu(display("{source}"), context(false))]
OpenDALError {
#[snafu(source(from(opendal::Error, Box::new)))]
source: Box<opendal::Error>,
},
#[snafu(transparent)]
HttpClientError { source: HttpClientError },
#[cfg(feature = "testcontainers")]
#[snafu(transparent)]
TestcontainersError {
source: testcontainers::TestcontainersError,
},
#[snafu(display("Extract {desc} with mime error, expected {expected}, but got {found}"))]
MimeError {
desc: String,
expected: String,
found: String,
},
#[snafu(display("Invalid or unknown format in extracting mikan rss"))]
MikanRssInvalidFormatError,
#[snafu(display("Invalid field {field} in extracting mikan rss"))]
MikanRssInvalidFieldError {
field: Cow<'static, str>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Missing field {field} in extracting mikan meta"))]
MikanMetaMissingFieldError {
field: Cow<'static, str>,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Model Entity {entity} not found or not belong to subscriber"))]
ModelEntityNotFound { entity: Cow<'static, str> },
#[snafu(transparent)]
FetchError { source: FetchError },
#[snafu(display("Credential3rdError: {message}, source = {source}"))]
Credential3rdError {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(transparent)]
CryptoError { source: CryptoError },
#[snafu(transparent)]
StringFromUtf8Error { source: std::string::FromUtf8Error },
#[snafu(display("{message}"))]
Whatever {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
source: OptDynErr,
},
#[snafu(display("Invalid task id: {message}"))]
InvalidTaskId { message: String },
}
impl RecorderError {
pub fn from_status(status: StatusCode) -> Self {
Self::HttpResponseError {
status,
headers: None,
source: None.into(),
}
}
pub fn from_status_and_headers(status: StatusCode, headers: HeaderMap) -> Self {
Self::HttpResponseError {
status,
headers: Some(headers),
source: None.into(),
}
}
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
source: None.into(),
}
}
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: None.into(),
}
}
pub fn from_mikan_rss_invalid_field_and_source(
field: Cow<'static, str>,
source: impl std::error::Error + Send + Sync + 'static,
) -> Self {
Self::MikanRssInvalidFieldError {
field,
source: OptDynErr::some_boxed(source),
}
}
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
Self::DbError {
source: sea_orm::DbErr::RecordNotFound(detail.to_string()),
}
}
}
impl snafu::FromString for RecorderError {
type Source = Box<dyn std::error::Error + Send + Sync>;
fn without_source(message: String) -> Self {
Self::Whatever {
message,
source: OptDynErr::none(),
}
}
fn with_source(source: Self::Source, message: String) -> Self {
Self::Whatever {
message,
source: OptDynErr::some(source),
}
}
}
impl From<StatusCode> for RecorderError {
fn from(status: StatusCode) -> Self {
Self::HttpResponseError {
status,
headers: None,
source: None.into(),
}
}
}
impl From<(StatusCode, HeaderMap)> for RecorderError {
fn from((status, headers): (StatusCode, HeaderMap)) -> Self {
Self::HttpResponseError {
status,
headers: Some(headers),
source: None.into(),
}
}
}
impl IntoResponse for RecorderError {
fn into_response(self) -> Response {
match self {
Self::AuthError { source: auth_error } => auth_error.into_response(),
Self::HttpResponseError {
status,
headers,
source,
} => {
let message = source
.into_inner()
.map(|s| s.to_string())
.unwrap_or_else(|| {
String::from(status.canonical_reason().unwrap_or("Unknown"))
});
(
status,
headers,
Json::<StandardErrorResponse>(StandardErrorResponse::from(message)),
)
.into_response()
}
Self::ModelEntityNotFound { entity } => (
StatusCode::NOT_FOUND,
Json::<StandardErrorResponse>(StandardErrorResponse::from(entity.to_string())),
)
.into_response(),
err => (
StatusCode::INTERNAL_SERVER_ERROR,
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
)
.into_response(),
}
}
}
impl From<reqwest::Error> for RecorderError {
fn from(error: reqwest::Error) -> Self {
FetchError::from(error).into()
}
}
impl From<reqwest_middleware::Error> for RecorderError {
fn from(error: reqwest_middleware::Error) -> Self {
FetchError::from(error).into()
}
}
impl From<http::header::InvalidHeaderValue> for RecorderError {
fn from(_error: http::header::InvalidHeaderValue) -> Self {
Self::InvalidHeaderValueError
}
}
impl From<http::header::InvalidHeaderName> for RecorderError {
fn from(_error: http::header::InvalidHeaderName) -> Self {
Self::InvalidHeaderNameError
}
}
impl From<http::method::InvalidMethod> for RecorderError {
fn from(_error: http::method::InvalidMethod) -> Self {
Self::InvalidMethodError
}
}
pub type RecorderResult<T> = Result<T, RecorderError>;

View File

@@ -0,0 +1,6 @@
pub mod app_error;
pub mod response;
pub use app_error::{RecorderError, RecorderResult};
pub use response::StandardErrorResponse;
pub use util::errors::OptDynErr;

View File

@@ -0,0 +1,19 @@
use serde::Serialize;
#[derive(Serialize, Debug, Clone)]
pub struct StandardErrorResponse<T = ()> {
pub success: bool,
pub message: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub result: Option<T>,
}
impl<T> From<String> for StandardErrorResponse<T> {
fn from(value: String) -> Self {
StandardErrorResponse {
success: false,
message: value,
result: None,
}
}
}

View File

@@ -1,11 +1,15 @@
use eyre::OptionExt;
use chrono::{DateTime, Utc};
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use quirks_path::Path;
use regex::Regex;
use serde::{Deserialize, Serialize};
use snafu::{OptionExt, whatever};
use crate::i18n::LanguagePreset;
use crate::{
errors::app_error::{RecorderError, RecorderResult},
extract::defs::SUBTITLE_LANG,
};
lazy_static! {
static ref TORRENT_EP_PARSE_RULES: Vec<FancyRegex> = {
@@ -30,6 +34,14 @@ lazy_static! {
Regex::new(r"([Ss]|Season )(\d{1,3})").unwrap();
}
#[derive(Clone, Debug)]
pub struct EpisodeEnclosureMeta {
pub magnet_link: Option<String>,
pub torrent_link: Option<String>,
pub pub_date: Option<DateTime<Utc>>,
pub content_length: Option<i64>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct TorrentEpisodeMediaMeta {
pub fansub: Option<String>,
@@ -56,7 +68,7 @@ fn get_fansub(group_and_title: &str) -> (Option<&str>, &str) {
(None, None) => (None, ""),
(Some(n0), None) => (None, *n0),
(Some(n0), Some(n1)) => {
if GET_FANSUB_FULL_MATCH_RE.is_match(*n1) {
if GET_FANSUB_FULL_MATCH_RE.is_match(n1) {
(None, group_and_title)
} else {
(Some(*n0), *n1)
@@ -84,20 +96,29 @@ fn get_season_and_title(season_and_title: &str) -> (String, i32) {
(title, season)
}
fn get_subtitle_lang(subtitle_str: &str) -> Option<LanguagePreset> {
let lowercase = subtitle_str.to_lowercase();
let media_name_lower = lowercase.trim();
LanguagePreset::parse(media_name_lower).ok()
fn get_subtitle_lang(media_name: &str) -> Option<&str> {
let media_name_lower = media_name.to_lowercase();
for (lang, lang_aliases) in SUBTITLE_LANG.iter() {
if lang_aliases
.iter()
.any(|alias| media_name_lower.contains(alias))
{
return Some(lang);
}
}
None
}
pub fn parse_episode_media_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> eyre::Result<TorrentEpisodeMediaMeta> {
) -> RecorderResult<TorrentEpisodeMediaMeta> {
let media_name = torrent_path
.file_name()
.ok_or_else(|| eyre::eyre!("failed to get file name of {}", torrent_path))?;
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {torrent_path}")
})?;
let mut match_obj = None;
for rule in TORRENT_EP_PARSE_RULES.iter() {
match_obj = if let Some(torrent_name) = torrent_name.as_ref() {
@@ -112,7 +133,7 @@ pub fn parse_episode_media_meta_from_torrent(
if let Some(match_obj) = match_obj {
let group_season_and_title = match_obj
.get(1)
.ok_or_else(|| eyre::eyre!("should have 1 group"))?
.whatever_context::<_, RecorderError>("should have 1 group")?
.as_str();
let (fansub, season_and_title) = get_fansub(group_season_and_title);
let (title, season) = if let Some(season) = season {
@@ -123,13 +144,13 @@ pub fn parse_episode_media_meta_from_torrent(
};
let episode_index = match_obj
.get(2)
.ok_or_eyre("should have 2 group")?
.whatever_context::<_, RecorderError>("should have 2 group")?
.as_str()
.parse::<i32>()
.unwrap_or(1);
let extname = torrent_path
.extension()
.map(|e| format!(".{}", e))
.map(|e| format!(".{e}"))
.unwrap_or_default();
Ok(TorrentEpisodeMediaMeta {
fansub: fansub.map(|s| s.to_string()),
@@ -139,11 +160,11 @@ pub fn parse_episode_media_meta_from_torrent(
extname,
})
} else {
Err(eyre::eyre!(
whatever!(
"failed to parse episode media meta from torrent_path='{}' torrent_name='{:?}'",
torrent_path,
torrent_name
))
)
}
}
@@ -151,17 +172,19 @@ pub fn parse_episode_subtitle_meta_from_torrent(
torrent_path: &Path,
torrent_name: Option<&str>,
season: Option<i32>,
) -> eyre::Result<TorrentEpisodeSubtitleMeta> {
) -> RecorderResult<TorrentEpisodeSubtitleMeta> {
let media_meta = parse_episode_media_meta_from_torrent(torrent_path, torrent_name, season)?;
let media_name = torrent_path
.file_name()
.ok_or_else(|| eyre::eyre!("failed to get file name of {}", torrent_path))?;
.with_whatever_context::<_, _, RecorderError>(|| {
format!("failed to get file name of {torrent_path}")
})?;
let lang = get_subtitle_lang(media_name);
Ok(TorrentEpisodeSubtitleMeta {
media: media_meta,
lang: lang.map(|s| s.name_str().to_string()),
lang: lang.map(|s| s.to_string()),
})
}
@@ -170,8 +193,8 @@ mod tests {
use quirks_path::Path;
use super::{
parse_episode_media_meta_from_torrent, parse_episode_subtitle_meta_from_torrent,
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta,
TorrentEpisodeMediaMeta, TorrentEpisodeSubtitleMeta, parse_episode_media_meta_from_torrent,
parse_episode_subtitle_meta_from_torrent,
};
#[test]
@@ -254,17 +277,17 @@ mod tests {
)
}
pub fn test_torrent_ep_parser(raw_name: &str, expected: &str) {
let extname = Path::new(raw_name)
pub fn test_torrent_ep_parser(origin_name: &str, expected: &str) {
let extname = Path::new(origin_name)
.extension()
.map(|e| format!(".{}", e))
.map(|e| format!(".{e}"))
.unwrap_or_default()
.to_lowercase();
if extname == ".srt" || extname == ".ass" {
let expected: Option<TorrentEpisodeSubtitleMeta> = serde_json::from_str(expected).ok();
let found_raw =
parse_episode_subtitle_meta_from_torrent(Path::new(raw_name), None, None);
parse_episode_subtitle_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {
@@ -285,7 +308,8 @@ mod tests {
assert_eq!(expected, found);
} else {
let expected: Option<TorrentEpisodeMediaMeta> = serde_json::from_str(expected).ok();
let found_raw = parse_episode_media_meta_from_torrent(Path::new(raw_name), None, None);
let found_raw =
parse_episode_media_meta_from_torrent(Path::new(origin_name), None, None);
let found = found_raw.as_ref().ok().cloned();
if expected != found {

View File

@@ -0,0 +1,3 @@
pub mod extract;
pub use extract::*;

View File

@@ -0,0 +1,34 @@
use fancy_regex::Regex as FancyRegex;
use lazy_static::lazy_static;
use regex::Regex;
const LANG_ZH_TW: &str = "zh-tw";
const LANG_ZH: &str = "zh";
const LANG_EN: &str = "en";
const LANG_JP: &str = "jp";
lazy_static! {
pub static ref SEASON_REGEX: Regex =
Regex::new(r"(S\|[Ss]eason\s+)(\d+)").expect("Invalid regex");
pub static ref TORRENT_PRASE_RULE_REGS: Vec<FancyRegex> = vec![
FancyRegex::new(
r"(.*) - (\d{1,4}(?!\d|p)|\d{1,4}\.\d{1,2}(?!\d|p))(?:v\d{1,2})?(?: )?(?:END)?(.*)"
)
.unwrap(),
FancyRegex::new(
r"(.*)[\[\ E](\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)"
)
.unwrap(),
FancyRegex::new(r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)").unwrap(),
FancyRegex::new(r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)").unwrap(),
FancyRegex::new(r"(.*)(?:S\d{2})?EP?(\d+)(.*)").unwrap(),
];
pub static ref SUBTITLE_LANG: Vec<(&'static str, Vec<&'static str>)> = {
vec![
(LANG_ZH_TW, vec!["tc", "cht", "", "zh-tw"]),
(LANG_ZH, vec!["sc", "chs", "", "zh", "zh-cn"]),
(LANG_EN, vec!["en", "eng", ""]),
(LANG_JP, vec!["jp", "jpn", ""]),
]
};
}

View File

@@ -0,0 +1,11 @@
pub mod styles;
use html_escape::decode_html_entities;
use itertools::Itertools;
use scraper::ElementRef;
pub use styles::{extract_background_image_src_from_style_attr, extract_style_from_attr};
pub fn extract_inner_text_from_element_ref(el: ElementRef<'_>) -> String {
let raw_text = el.text().collect_vec().join(",");
decode_html_entities(&raw_text).trim().to_string()
}

View File

@@ -0,0 +1,43 @@
use lightningcss::{
declaration::DeclarationBlock, properties::Property, values::image::Image as CSSImage,
};
use url::Url;
use crate::extract::media::extract_image_src_from_str;
pub fn extract_style_from_attr(style_attr: &str) -> Option<DeclarationBlock> {
let result = DeclarationBlock::parse_string(style_attr, Default::default()).ok()?;
Some(result)
}
pub fn extract_background_image_src_from_style_attr(
style_attr: &str,
base_url: &Url,
) -> Option<Url> {
extract_style_from_attr(style_attr).and_then(|style| {
style.iter().find_map(|(prop, _)| {
match prop {
Property::BackgroundImage(images) => {
for img in images {
if let CSSImage::Url(path) = img
&& let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
{
return Some(url);
}
}
}
Property::Background(backgrounds) => {
for bg in backgrounds {
if let CSSImage::Url(path) = &bg.image
&& let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
{
return Some(url);
}
}
}
_ => {}
}
None
})
})
}

View File

@@ -0,0 +1,181 @@
use axum::{
extract::FromRequestParts,
http::{HeaderName, HeaderValue, Uri, header, request::Parts},
};
use itertools::Itertools;
use url::Url;
use crate::errors::RecorderError;
/// Fields from a "Forwarded" header per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4)
#[derive(Debug, Clone)]
pub struct ForwardedHeader {
pub for_field: Vec<String>,
pub by: Option<String>,
pub host: Option<String>,
pub proto: Option<String>,
}
impl ForwardedHeader {
/// Return the 'for' headers as a list of [std::net::IpAddr]'s.
pub fn for_as_ipaddr(self) -> Vec<std::net::IpAddr> {
self.for_field
.iter()
.filter_map(|ip| {
if ip.contains(']') {
// this is an IPv6 address, get what's between the []
ip.split(']')
.next()?
.split('[')
.next_back()?
.parse::<std::net::IpAddr>()
.ok()
} else {
ip.parse::<std::net::IpAddr>().ok()
}
})
.collect::<Vec<std::net::IpAddr>>()
}
}
/// This parses the Forwarded header, and returns a list of the IPs in the
/// "for=" fields. Per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4)
impl TryFrom<HeaderValue> for ForwardedHeader {
type Error = String;
fn try_from(forwarded: HeaderValue) -> Result<ForwardedHeader, String> {
ForwardedHeader::try_from(&forwarded)
}
}
/// This parses the Forwarded header, and returns a list of the IPs in the
/// "for=" fields. Per [RFC7239 sec 4](https://www.rfc-editor.org/rfc/rfc7239#section-4)
impl TryFrom<&HeaderValue> for ForwardedHeader {
type Error = String;
fn try_from(forwarded: &HeaderValue) -> Result<ForwardedHeader, String> {
let mut for_field: Vec<String> = Vec::new();
let mut by: Option<String> = None;
let mut host: Option<String> = None;
let mut proto: Option<String> = None;
// first get the k=v pairs
forwarded
.to_str()
.map_err(|err| err.to_string())?
.split(';')
.for_each(|s| {
let s = s.trim().to_lowercase();
// The for value can look like this:
// for=192.0.2.43, for=198.51.100.17
// so we need to handle this case
if s.starts_with("for=") || s.starts_with("for =") {
// we have a valid thing to grab
let chunks: Vec<String> = s
.split(',')
.filter_map(|chunk| {
chunk.trim().split('=').next_back().map(|c| c.to_string())
})
.collect::<Vec<String>>();
for_field.extend(chunks);
} else if s.starts_with("by=") {
by = s.split('=').next_back().map(|c| c.to_string());
} else if s.starts_with("host=") {
host = s.split('=').next_back().map(|c| c.to_string());
} else if s.starts_with("proto=") {
proto = s.split('=').next_back().map(|c| c.to_string());
} else {
// probably need to work out what to do here
}
});
Ok(ForwardedHeader {
for_field,
by,
host,
proto,
})
}
}
#[derive(Clone, Debug)]
pub struct ForwardedRelatedInfo {
pub forwarded: Option<ForwardedHeader>,
pub x_forwarded_proto: Option<String>,
pub x_forwarded_host: Option<String>,
pub x_forwarded_for: Option<Vec<String>>,
pub host: Option<String>,
pub uri: Uri,
pub origin: Option<String>,
}
impl<T> FromRequestParts<T> for ForwardedRelatedInfo {
type Rejection = RecorderError;
fn from_request_parts(
parts: &mut Parts,
_state: &T,
) -> impl Future<Output = Result<Self, Self::Rejection>> + Send {
let headers = &parts.headers;
let forwarded = headers
.get(header::FORWARDED)
.and_then(|s| ForwardedHeader::try_from(s.clone()).ok());
let x_forwarded_proto = headers
.get(HeaderName::from_static("x-forwarded-proto"))
.and_then(|s| s.to_str().map(String::from).ok());
let x_forwarded_host = headers
.get(HeaderName::from_static("x-forwarded-host"))
.and_then(|s| s.to_str().map(String::from).ok());
let x_forwarded_for = headers
.get(HeaderName::from_static("x-forwarded-for"))
.and_then(|s| s.to_str().ok())
.and_then(|s| {
let l = s.split(",").map(|s| s.trim().to_string()).collect_vec();
if l.is_empty() { None } else { Some(l) }
});
let host = headers
.get(header::HOST)
.and_then(|s| s.to_str().map(String::from).ok());
let origin = headers
.get(header::ORIGIN)
.and_then(|s| s.to_str().map(String::from).ok());
futures::future::ready(Ok(ForwardedRelatedInfo {
host,
x_forwarded_for,
x_forwarded_host,
x_forwarded_proto,
forwarded,
uri: parts.uri.clone(),
origin,
}))
}
}
impl ForwardedRelatedInfo {
pub fn resolved_protocol(&self) -> Option<&str> {
self.forwarded
.as_ref()
.and_then(|s| s.proto.as_deref())
.or(self.x_forwarded_proto.as_deref())
.or(self.uri.scheme_str())
}
pub fn resolved_host(&self) -> Option<&str> {
self.forwarded
.as_ref()
.and_then(|s| s.host.as_deref())
.or(self.x_forwarded_host.as_deref())
.or(self.uri.host())
}
pub fn resolved_origin(&self) -> Option<Url> {
if let (Some(protocol), Some(host)) = (self.resolved_protocol(), self.resolved_host()) {
let origin = format!("{protocol}://{host}");
Url::parse(&origin).ok()
} else {
None
}
}
}

View File

@@ -0,0 +1,7 @@
use url::Url;
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
let mut image_url = base_url.join(image_src).ok()?;
image_url.set_query(None);
Some(image_url)
}

View File

@@ -0,0 +1,344 @@
use std::{fmt::Debug, ops::Deref};
use fetch::{HttpClient, HttpClientTrait};
use maplit::hashmap;
use scraper::{Html, Selector};
use sea_orm::{
ActiveModelTrait, ActiveValue::Set, ColumnTrait, DbErr, EntityTrait, QueryFilter, TryIntoModel,
};
use url::Url;
use util::OptDynErr;
use super::{MikanConfig, MikanCredentialForm, constants::MIKAN_ACCOUNT_MANAGE_PAGE_PATH};
use crate::{
app::AppContextTrait,
crypto::UserPassCredential,
errors::{RecorderError, RecorderResult},
extract::mikan::constants::{MIKAN_LOGIN_PAGE_PATH, MIKAN_LOGIN_PAGE_SEARCH},
models::credential_3rd::{self, Credential3rdType},
};
#[derive(Debug)]
pub struct MikanClient {
http_client: HttpClient,
base_url: Url,
origin_url: Url,
userpass_credential: Option<UserPassCredential>,
}
impl MikanClient {
pub async fn from_config(config: MikanConfig) -> Result<Self, RecorderError> {
let http_client = HttpClient::from_config(config.http_client)?;
let base_url = config.base_url;
let origin_url = Url::parse(&base_url.origin().unicode_serialization())?;
Ok(Self {
http_client,
base_url,
origin_url,
userpass_credential: None,
})
}
pub async fn has_login(&self) -> RecorderResult<bool> {
let account_manage_page_url = self.base_url.join(MIKAN_ACCOUNT_MANAGE_PAGE_PATH)?;
let res = self.http_client.get(account_manage_page_url).send().await?;
let status = res.status();
if status.is_success() {
Ok(true)
} else if status.is_redirection()
&& res.headers().get("location").is_some_and(|location| {
location
.to_str()
.is_ok_and(|location_str| location_str.contains(MIKAN_LOGIN_PAGE_PATH))
})
{
Ok(false)
} else {
Err(RecorderError::Credential3rdError {
message: format!("mikan account check has login failed, status = {status}"),
source: None.into(),
})
}
}
pub async fn login(&self) -> RecorderResult<()> {
let userpass_credential =
self.userpass_credential
.as_ref()
.ok_or_else(|| RecorderError::Credential3rdError {
message: "mikan login failed, credential required".to_string(),
source: None.into(),
})?;
let login_page_url = {
let mut u = self.base_url.join(MIKAN_LOGIN_PAGE_PATH)?;
u.set_query(Some(MIKAN_LOGIN_PAGE_SEARCH));
u
};
let antiforgery_token = {
// access login page to get antiforgery cookie
let login_page_html = self
.http_client
.get(login_page_url.clone())
.send()
.await
.map_err(|error| RecorderError::Credential3rdError {
message: "failed to get mikan login page".to_string(),
source: OptDynErr::some_boxed(error),
})?
.text()
.await?;
let login_page_html = Html::parse_document(&login_page_html);
let antiforgery_selector =
Selector::parse("input[name='__RequestVerificationToken']").unwrap();
login_page_html
.select(&antiforgery_selector)
.next()
.and_then(|element| element.value().attr("value").map(|value| value.to_string()))
.ok_or_else(|| RecorderError::Credential3rdError {
message: "mikan login failed, failed to get antiforgery token".to_string(),
source: None.into(),
})
}?;
let login_post_form = hashmap! {
"__RequestVerificationToken".to_string() => antiforgery_token,
"UserName".to_string() => userpass_credential.username.clone(),
"Password".to_string() => userpass_credential.password.clone(),
"RememberMe".to_string() => "true".to_string(),
};
let login_post_res = self
.http_client
.post(login_page_url.clone())
.form(&login_post_form)
.send()
.await
.map_err(|err| RecorderError::Credential3rdError {
message: "mikan login failed".to_string(),
source: OptDynErr::some_boxed(err),
})?;
if login_post_res.status().is_redirection()
&& login_post_res.headers().contains_key("location")
{
Ok(())
} else {
Err(RecorderError::Credential3rdError {
message: "mikan login failed, no redirecting".to_string(),
source: None.into(),
})
}
}
pub async fn submit_credential_form(
&self,
ctx: &dyn AppContextTrait,
subscriber_id: i32,
credential_form: MikanCredentialForm,
) -> RecorderResult<credential_3rd::Model> {
let db = ctx.db();
let am = credential_3rd::ActiveModel {
username: Set(Some(credential_form.username)),
password: Set(Some(credential_form.password)),
user_agent: Set(Some(credential_form.user_agent)),
credential_type: Set(Credential3rdType::Mikan),
subscriber_id: Set(subscriber_id),
..Default::default()
}
.try_encrypt(ctx)
.await?;
let credential: credential_3rd::Model = am.save(db).await?.try_into_model()?;
Ok(credential)
}
pub async fn sync_credential_cookies(
&self,
ctx: &dyn AppContextTrait,
credential_id: i32,
subscriber_id: i32,
) -> RecorderResult<()> {
let cookies = self.http_client.save_cookie_store_to_json()?;
if let Some(cookies) = cookies {
let am = credential_3rd::ActiveModel {
cookies: Set(Some(cookies)),
..Default::default()
}
.try_encrypt(ctx)
.await?;
credential_3rd::Entity::update_many()
.set(am)
.filter(credential_3rd::Column::Id.eq(credential_id))
.filter(credential_3rd::Column::SubscriberId.eq(subscriber_id))
.exec(ctx.db())
.await?;
}
Ok(())
}
pub async fn fork_with_userpass_credential(
&self,
userpass_credential: UserPassCredential,
) -> RecorderResult<Self> {
let mut fork = self
.http_client
.fork()
.attach_cookies(userpass_credential.cookies.as_deref())?;
if let Some(user_agent) = userpass_credential.user_agent.as_ref() {
fork = fork.attach_user_agent(user_agent);
}
let userpass_credential_opt = Some(userpass_credential);
Ok(Self {
http_client: HttpClient::from_fork(fork)?,
base_url: self.base_url.clone(),
origin_url: self.origin_url.clone(),
userpass_credential: userpass_credential_opt,
})
}
pub async fn fork_with_credential_id(
&self,
ctx: &dyn AppContextTrait,
credential_id: i32,
subscriber_id: i32,
) -> RecorderResult<Self> {
let credential =
credential_3rd::Model::find_by_id_and_subscriber_id(ctx, credential_id, subscriber_id)
.await?;
if let Some(credential) = credential {
if credential.credential_type != Credential3rdType::Mikan {
return Err(RecorderError::Credential3rdError {
message: "credential is not a mikan credential".to_string(),
source: None.into(),
});
}
let userpass_credential: UserPassCredential =
credential.try_into_userpass_credential(ctx)?;
self.fork_with_userpass_credential(userpass_credential)
.await
} else {
Err(RecorderError::from_db_record_not_found(
DbErr::RecordNotFound(format!("credential={credential_id} not found")),
))
}
}
pub fn base_url(&self) -> &Url {
&self.base_url
}
pub fn client(&self) -> &HttpClient {
&self.http_client
}
}
impl Deref for MikanClient {
type Target = fetch::reqwest_middleware::ClientWithMiddleware;
fn deref(&self) -> &Self::Target {
&self.http_client
}
}
impl HttpClientTrait for MikanClient {}
#[cfg(test)]
mod tests {
#![allow(unused_variables)]
use std::{assert_matches::assert_matches, sync::Arc};
use rstest::{fixture, rstest};
use tracing::Level;
use super::*;
use crate::test_utils::{
app::TestingAppContext,
crypto::build_testing_crypto_service,
database::build_testing_database_service,
mikan::{MikanMockServer, build_testing_mikan_client, build_testing_mikan_credential_form},
tracing::try_init_testing_tracing,
};
async fn create_testing_context(
mikan_base_url: Url,
) -> RecorderResult<Arc<dyn AppContextTrait>> {
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
let db_service = build_testing_database_service(Default::default()).await?;
let crypto_service = build_testing_crypto_service().await?;
let ctx = TestingAppContext::builder()
.db(db_service)
.crypto(crypto_service)
.mikan(mikan_client)
.build();
Ok(Arc::new(ctx))
}
#[fixture]
fn before_each() {
try_init_testing_tracing(Level::DEBUG);
}
#[rstest]
#[tokio::test]
async fn test_mikan_client_submit_credential_form(before_each: ()) -> RecorderResult<()> {
let mut mikan_server = MikanMockServer::new().await?;
let app_ctx = create_testing_context(mikan_server.base_url().clone()).await?;
let _login_mock = mikan_server.mock_get_login_page();
let mikan_client = app_ctx.mikan();
let crypto_service = app_ctx.crypto();
let credential_form = build_testing_mikan_credential_form();
let subscriber_id = 1;
let credential_model = mikan_client
.submit_credential_form(app_ctx.as_ref(), subscriber_id, credential_form.clone())
.await?;
let expected_username = &credential_form.username;
let expected_password = &credential_form.password;
let found_username = crypto_service
.decrypt_string(credential_model.username.as_deref().unwrap_or_default())?;
let found_password = crypto_service
.decrypt_string(credential_model.password.as_deref().unwrap_or_default())?;
assert_eq!(&found_username, expected_username);
assert_eq!(&found_password, expected_password);
let has_login = mikan_client.has_login().await?;
assert!(!has_login);
assert_matches!(
mikan_client.login().await,
Err(RecorderError::Credential3rdError { .. })
);
let mikan_client = mikan_client
.fork_with_credential_id(app_ctx.as_ref(), credential_model.id, subscriber_id)
.await?;
mikan_client.login().await?;
let has_login = mikan_client.has_login().await?;
assert!(has_login);
Ok(())
}
}

View File

@@ -0,0 +1,9 @@
use fetch::HttpClientConfig;
use serde::{Deserialize, Serialize};
use url::Url;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MikanConfig {
pub http_client: HttpClientConfig,
pub base_url: Url,
}

View File

@@ -0,0 +1,20 @@
pub const MIKAN_POSTER_BUCKET_KEY: &str = "mikan_poster";
pub const MIKAN_UNKNOWN_FANSUB_NAME: &str = "生肉/不明字幕";
pub const MIKAN_UNKNOWN_FANSUB_ID: &str = "202";
pub const MIKAN_LOGIN_PAGE_PATH: &str = "/Account/Login";
pub const MIKAN_LOGIN_PAGE_SEARCH: &str = "ReturnUrl=%2F";
pub const MIKAN_ACCOUNT_MANAGE_PAGE_PATH: &str = "/Account/Manage";
pub const MIKAN_SEASON_FLOW_PAGE_PATH: &str = "/Home/BangumiCoverFlow";
pub const MIKAN_BANGUMI_HOMEPAGE_PATH: &str = "/Home/Bangumi";
pub const MIKAN_BANGUMI_EXPAND_SUBSCRIBED_PAGE_PATH: &str = "/Home/ExpandBangumi";
pub const MIKAN_EPISODE_HOMEPAGE_PATH: &str = "/Home/Episode";
pub const MIKAN_BANGUMI_POSTER_PATH: &str = "/images/Bangumi";
pub const MIKAN_EPISODE_TORRENT_PATH: &str = "/Download";
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH: &str = "/RSS/MyBangumi";
pub const MIKAN_BANGUMI_RSS_PATH: &str = "/RSS/Bangumi";
pub const MIKAN_FANSUB_HOMEPAGE_PATH: &str = "/Home/PublishGroup";
pub const MIKAN_BANGUMI_ID_QUERY_KEY: &str = "bangumiId";
pub const MIKAN_FANSUB_ID_QUERY_KEY: &str = "subgroupid";
pub const MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY: &str = "token";
pub const MIKAN_SEASON_STR_QUERY_KEY: &str = "seasonStr";
pub const MIKAN_YEAR_QUERY_KEY: &str = "year";

View File

@@ -0,0 +1,20 @@
use std::fmt::Debug;
use serde::{Deserialize, Serialize};
#[derive(Default, Clone, Deserialize, Serialize)]
pub struct MikanCredentialForm {
pub password: String,
pub username: String,
pub user_agent: String,
}
impl Debug for MikanCredentialForm {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MikanCredentialForm")
.field("username", &String::from("[secrecy]"))
.field("password", &String::from("[secrecy]"))
.field("user_agent", &String::from("[secrecy]"))
.finish()
}
}

Some files were not shown because too many files have changed in this diff Show More