Compare commits
144 Commits
2f5b001bb6
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
| 94919878ea | |||
| 81bf27ed28 | |||
| 5be5b9f634 | |||
| 6cdd8c27ce | |||
| 4174cea728 | |||
| 3aad31a36b | |||
| 004fed9b2e | |||
| a1c2eeded1 | |||
| 147df00155 | |||
| 5155c59293 | |||
| b5b3c77ba3 | |||
| 1d0aa8d7f1 | |||
| 5b001f9584 | |||
| d06acde882 | |||
| bacfe99ef2 | |||
| b4090e74c0 | |||
| c3e546e256 | |||
| f83371bbf9 | |||
| c858cc7d44 | |||
| 65505f91b2 | |||
| c8501b1768 | |||
| 3a8eb88e1a | |||
| 003d8840fd | |||
| 41ff5c2a11 | |||
| 571caf50ff | |||
| 9fd3ae6563 | |||
| cde3361458 | |||
| f055011b86 | |||
| 16429a44b4 | |||
| fe0b7e88e6 | |||
| 28dd9da6ac | |||
| 02c16a2972 | |||
| 324427513c | |||
| c12b9b360a | |||
| cc06142050 | |||
| 6726cafff4 | |||
| 35312ea1ff | |||
| 721eee9c88 | |||
| 421f9d0293 | |||
| 7eb4e41708 | |||
| a2254bbe80 | |||
| 1b5bdadf10 | |||
| 882b29d7a1 | |||
| c60f6f511e | |||
| 07955286f1 | |||
| 258eeddc74 | |||
| b09e9e6aaa | |||
| 0df371adb7 | |||
| 8144986a48 | |||
| d2aab7369d | |||
| 946d4e8c2c | |||
| 0b5f25a263 | |||
| c669d66969 | |||
| 082e08e7f4 | |||
| a3fd03d32a | |||
| 5645645c5f | |||
| ac7d1efb8d | |||
| a676061b3e | |||
| 1c34cebbde | |||
| 22a2ce0559 | |||
| 313b1bf1ba | |||
| 66413f92e3 | |||
| 0fcbc6bbe9 | |||
| f1d8318500 | |||
| b2f327d48f | |||
| b772937354 | |||
| a3b9543d0e | |||
| d0a423df9f | |||
| 8600bf216a | |||
| bf270e4e87 | |||
| 760cb2344e | |||
| ed2c1038e6 | |||
| d4bdc677a9 | |||
| 9d58d961bd | |||
| 791b75b3af | |||
| a7f52fe0eb | |||
| 439353d318 | |||
| f245a68790 | |||
| 3fe0538468 | |||
| dbded94324 | |||
| 4301f1dbab | |||
| 9fdb778330 | |||
| 0300d7baf6 | |||
| ee1b1ae5e6 | |||
| b20f7cd1ad | |||
| eb8f0be004 | |||
| 68aa13e216 | |||
| 2a5c2b18e7 | |||
| e64086b7cf | |||
| 08946059ad | |||
| 10b17dc66b | |||
| 1ff8a311ae | |||
| 2686fa1d76 | |||
| 376d2b28d3 | |||
| a3609696c7 | |||
| b0c12acbc6 | |||
| 3dfcf2a536 | |||
| ecb56013a5 | |||
| 27b52f7fd1 | |||
| 234441e6a3 | |||
| 011f62829a | |||
| c34584a215 | |||
| 1fca69fa66 | |||
| a0fc4c04d9 | |||
| 07ac7e3376 | |||
| f94e175082 | |||
| e66573b315 | |||
| 27cdcdef58 | |||
| 383e6340ea | |||
| 5a4a4d7e3a | |||
| 6e4c136614 | |||
| e2fdeaabb2 | |||
| 408d211f27 | |||
| 2844e1fc32 | |||
| a68aab1452 | |||
| c0707d17bb | |||
| 6887b2a069 | |||
| cac0d37e53 | |||
| f327ea29f1 | |||
| 5bc5d98823 | |||
| 09565bd827 | |||
| 7adc0582aa | |||
| 4f9e74ceb4 | |||
| c2f74dc369 | |||
| ae40a3a7f8 | |||
| 027112db9a | |||
| 9a2a8f029f | |||
| 877d90d1e2 | |||
| c6677d414d | |||
| 97b7bfb7fb | |||
| 8f76e92804 | |||
| 2ed2b864b2 | |||
| 40cbf86f0f | |||
| caaa5dc0cc | |||
| 70932900cd | |||
| 393f704e52 | |||
| abd399aacd | |||
| 4c6cc1116b | |||
| a4c549e7c3 | |||
| 608a7fb9c6 | |||
| 4177efc991 | |||
| c8007078c0 | |||
| cd26d5bac4 | |||
| e93a8a0dec |
@@ -1,34 +1,5 @@
|
|||||||
[alias]
|
[alias]
|
||||||
recorder = "run -p recorder --bin recorder_cli -- --environment development"
|
|
||||||
recorder-playground = "run -p recorder --example playground -- --environment development"
|
recorder-playground = "run -p recorder --example playground -- --environment development"
|
||||||
|
|
||||||
[build]
|
[build]
|
||||||
rustflags = ["-Zthreads=12", "-Clink-arg=-fuse-ld=lld"]
|
rustflags = ["-Zthreads=8", "-Zshare-generics=y"]
|
||||||
|
|
||||||
[target.x86_64-unknown-linux-gnu]
|
|
||||||
linker = "clang"
|
|
||||||
rustflags = ["-Zthreads=8", "-Clink-arg=-fuse-ld=lld", "-Zshare-generics=y"]
|
|
||||||
|
|
||||||
[target.x86_64-pc-windows-msvc]
|
|
||||||
linker = "rust-lld.exe"
|
|
||||||
rustflags = ["-Zthreads=8", "-Zshare-generics=n"]
|
|
||||||
|
|
||||||
# NOTE: you must install [Mach-O LLD Port](https://lld.llvm.org/MachO/index.html) on mac. you can easily do this by installing llvm which includes lld with the "brew" package manager:
|
|
||||||
# `brew install llvm`
|
|
||||||
#[target.x86_64-apple-darwin]
|
|
||||||
#rustflags = [
|
|
||||||
# "-Zthreads=8",
|
|
||||||
# "-C",
|
|
||||||
# "link-arg=-fuse-ld=/usr/local/opt/llvm/bin/ld64.lld",
|
|
||||||
# "-Zshare-generics=y",
|
|
||||||
#]
|
|
||||||
|
|
||||||
# NOTE: you must install [Mach-O LLD Port](https://lld.llvm.org/MachO/index.html) on mac. you can easily do this by installing llvm which includes lld with the "brew" package manager:
|
|
||||||
# `brew install llvm`
|
|
||||||
#[target.aarch64-apple-darwin]
|
|
||||||
#rustflags = [
|
|
||||||
# "-Zthreads=8",
|
|
||||||
# "-C",
|
|
||||||
# "link-arg=-fuse-ld=/opt/homebrew/opt/llvm/bin/ld64.lld",
|
|
||||||
# "-Zshare-generics=y",
|
|
||||||
#]
|
|
||||||
|
|||||||
10
.editorconfig
Normal file
10
.editorconfig
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
charset = utf-8
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
insert_final_newline = true
|
||||||
|
end_of_line = lf
|
||||||
|
max_line_length = null
|
||||||
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
**/tests/resources/** linguist-detectable=false
|
||||||
36
.github/workflows/testing-torrents-container.yaml
vendored
Normal file
36
.github/workflows/testing-torrents-container.yaml
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
name: Testing Torrents Container
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
ORG: dumtruck
|
||||||
|
PROJECT: konobangu
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-container:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
- name: Log in to GHCR
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Build and push Docker image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: 'packages/testing-torrents'
|
||||||
|
file: 'packages/testing-torrents/Dockerfile'
|
||||||
|
push: true
|
||||||
|
tags: '${{ env.REGISTRY }}/${{ env.ORG }}/${{ env.PROJECT }}-testing-torrents:latest'
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
46
.gitignore
vendored
46
.gitignore
vendored
@@ -112,7 +112,7 @@ coverage
|
|||||||
# nyc tests coverage
|
# nyc tests coverage
|
||||||
.nyc_output
|
.nyc_output
|
||||||
|
|
||||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
# Grunt intermediate dal (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
.grunt
|
.grunt
|
||||||
|
|
||||||
# Bower dependency directory (https://bower.io/)
|
# Bower dependency directory (https://bower.io/)
|
||||||
@@ -127,6 +127,8 @@ build/Release
|
|||||||
# Dependency directories
|
# Dependency directories
|
||||||
node_modules/
|
node_modules/
|
||||||
jspm_packages/
|
jspm_packages/
|
||||||
|
.pnp
|
||||||
|
.pnp.js
|
||||||
|
|
||||||
# Snowpack dependency directory (https://snowpack.dev/)
|
# Snowpack dependency directory (https://snowpack.dev/)
|
||||||
web_modules/
|
web_modules/
|
||||||
@@ -155,9 +157,9 @@ web_modules/
|
|||||||
# Yarn Integrity file
|
# Yarn Integrity file
|
||||||
.yarn-integrity
|
.yarn-integrity
|
||||||
|
|
||||||
# dotenv environment variables file
|
# Local env files
|
||||||
.env
|
.env.local
|
||||||
.env.test
|
.env.*.local
|
||||||
|
|
||||||
# parcel-bundler cache (https://parceljs.org/)
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
.cache
|
.cache
|
||||||
@@ -214,6 +216,7 @@ index.d.ts.map
|
|||||||
# Added by cargo
|
# Added by cargo
|
||||||
|
|
||||||
/target
|
/target
|
||||||
|
/ide-target
|
||||||
!/examples/.gitkeep
|
!/examples/.gitkeep
|
||||||
/.env
|
/.env
|
||||||
/.env.bk
|
/.env.bk
|
||||||
@@ -222,6 +225,35 @@ index.d.ts.map
|
|||||||
|
|
||||||
/temp
|
/temp
|
||||||
/rustc-ice-*
|
/rustc-ice-*
|
||||||
/crates/recorder/config/test.local.env
|
|
||||||
**/*.local.yaml
|
# Misc
|
||||||
**/*.local.env
|
.DS_Store
|
||||||
|
*.pem
|
||||||
|
|
||||||
|
# Sentry Config File
|
||||||
|
.env.sentry-build-plugin
|
||||||
|
|
||||||
|
# BaseHub
|
||||||
|
.basehub
|
||||||
|
|
||||||
|
# Build Outputs
|
||||||
|
build
|
||||||
|
dist
|
||||||
|
|
||||||
|
# Turbo
|
||||||
|
.turbo
|
||||||
|
|
||||||
|
# Vercel
|
||||||
|
.vercel
|
||||||
|
|
||||||
|
# Payload default media upload directory
|
||||||
|
public/media/
|
||||||
|
|
||||||
|
public/robots.txt
|
||||||
|
public/sitemap*.xml
|
||||||
|
|
||||||
|
# Custom
|
||||||
|
/data
|
||||||
|
|
||||||
|
patches/*
|
||||||
|
!patches/.gitkeep
|
||||||
14
.prettierignore
Normal file
14
.prettierignore
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
**/payload-types.ts
|
||||||
|
.tmp
|
||||||
|
**/.git
|
||||||
|
**/.hg
|
||||||
|
**/.pnp.*
|
||||||
|
**/.svn
|
||||||
|
**/.yarn/**
|
||||||
|
**/build
|
||||||
|
**/dist/**
|
||||||
|
**/node_modules
|
||||||
|
**/temp
|
||||||
|
**/docs/**
|
||||||
|
tsconfig.json
|
||||||
|
|
||||||
11
.vscode/extensions.json
vendored
Normal file
11
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"recommendations": [
|
||||||
|
"biomejs.biome",
|
||||||
|
"bradlc.vscode-tailwindcss",
|
||||||
|
"unifiedjs.vscode-mdx",
|
||||||
|
"mikestead.dotenv",
|
||||||
|
"christian-kohler.npm-intellisense",
|
||||||
|
"skellock.just",
|
||||||
|
"zerotaskx.rust-extension-pack"
|
||||||
|
]
|
||||||
|
}
|
||||||
121
.vscode/launch.json
vendored
Normal file
121
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
{
|
||||||
|
// Use IntelliSense to learn about possible attributes.
|
||||||
|
// Hover to view descriptions of existing attributes.
|
||||||
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"type": "lldb",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "debug recorder bin",
|
||||||
|
"cargo": {
|
||||||
|
"args": [
|
||||||
|
"build",
|
||||||
|
"--bin=recorder_cli",
|
||||||
|
"--package=recorder",
|
||||||
|
],
|
||||||
|
"filter": {
|
||||||
|
"name": "recorder_cli",
|
||||||
|
"kind": "bin"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"args": [
|
||||||
|
"start",
|
||||||
|
"--environment",
|
||||||
|
"development"
|
||||||
|
],
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "lldb",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "debug playground example",
|
||||||
|
"cargo": {
|
||||||
|
"args": [
|
||||||
|
"build",
|
||||||
|
"--example=playground",
|
||||||
|
"--package=recorder",
|
||||||
|
],
|
||||||
|
"filter": {
|
||||||
|
"name": "playground",
|
||||||
|
"kind": "example"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"args": [
|
||||||
|
"--environment",
|
||||||
|
"development"
|
||||||
|
],
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "lldb",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "debug recorder lib",
|
||||||
|
"cargo": {
|
||||||
|
"args": [
|
||||||
|
"test",
|
||||||
|
"--no-run",
|
||||||
|
"--test=mod",
|
||||||
|
"--package=recorder"
|
||||||
|
],
|
||||||
|
"filter": {
|
||||||
|
"name": "mod",
|
||||||
|
"kind": "test"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"args": [],
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Next.js: debug server-side",
|
||||||
|
"type": "node-terminal",
|
||||||
|
"request": "launch",
|
||||||
|
"command": "pnpm dev"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Next.js: debug client-side (app)",
|
||||||
|
"type": "chrome",
|
||||||
|
"request": "launch",
|
||||||
|
"url": "http://localhost:5000"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Next.js: debug client-side (web)",
|
||||||
|
"type": "chrome",
|
||||||
|
"request": "launch",
|
||||||
|
"url": "http://localhost:5001"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Next.js: debug client-side (api)",
|
||||||
|
"type": "chrome",
|
||||||
|
"request": "launch",
|
||||||
|
"url": "http://localhost:5002"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Next.js: debug client-side (email)",
|
||||||
|
"type": "chrome",
|
||||||
|
"request": "launch",
|
||||||
|
"url": "http://localhost:5003"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Next.js: debug client-side (app)",
|
||||||
|
"type": "chrome",
|
||||||
|
"request": "launch",
|
||||||
|
"url": "http://localhost:5004"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Next.js: debug full stack",
|
||||||
|
"type": "node",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "${workspaceFolder}/node_modules/.bin/next",
|
||||||
|
"runtimeArgs": ["--inspect"],
|
||||||
|
"skipFiles": ["<node_internals>/**"],
|
||||||
|
"serverReadyAction": {
|
||||||
|
"action": "debugWithEdge",
|
||||||
|
"killOnServerStop": true,
|
||||||
|
"pattern": "- Local:.+(https?://.+)",
|
||||||
|
"uriFormat": "%s",
|
||||||
|
"webRoot": "${workspaceFolder}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
43
.vscode/settings.json
vendored
43
.vscode/settings.json
vendored
@@ -1,3 +1,44 @@
|
|||||||
{
|
{
|
||||||
"rust-analyzer.showUnlinkedFileNotification": false
|
"npm.packageManager": "pnpm",
|
||||||
|
"[javascript]": {
|
||||||
|
"editor.defaultFormatter": "vscode.typescript-language-features",
|
||||||
|
"editor.formatOnSave": true
|
||||||
|
},
|
||||||
|
"[json]": {
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.formatOnSave": true
|
||||||
|
},
|
||||||
|
"[jsonc]": {
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.formatOnSave": true
|
||||||
|
},
|
||||||
|
"[typescript]": {
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.formatOnSave": true
|
||||||
|
},
|
||||||
|
"[typescriptreact]": {
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.formatOnSave": true
|
||||||
|
},
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"quickfix.biome": "explicit",
|
||||||
|
"source.organizeImports.biome": "explicit"
|
||||||
|
},
|
||||||
|
"emmet.showExpandedAbbreviation": "never",
|
||||||
|
"prettier.enable": false,
|
||||||
|
"typescript.tsdk": "node_modules/typescript/lib",
|
||||||
|
"rust-analyzer.showUnlinkedFileNotification": false,
|
||||||
|
"sqltools.connections": [
|
||||||
|
{
|
||||||
|
"previewLimit": 50,
|
||||||
|
"server": "localhost",
|
||||||
|
"port": 5432,
|
||||||
|
"driver": "PostgreSQL",
|
||||||
|
"name": "konobangu-dev",
|
||||||
|
"database": "konobangu",
|
||||||
|
"username": "konobangu"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"rust-analyzer.cargo.features": "all",
|
||||||
|
"rust-analyzer.testExplorer": true
|
||||||
}
|
}
|
||||||
112
.vscode/tasks.json
vendored
Normal file
112
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "dev-all",
|
||||||
|
"dependsOn": [
|
||||||
|
"dev-webui",
|
||||||
|
"dev-recorder",
|
||||||
|
"dev-proxy",
|
||||||
|
"dev-codegen-wait",
|
||||||
|
"dev-deps",
|
||||||
|
],
|
||||||
|
"dependsOrder": "parallel",
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": false,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"group": "new-group",
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "shared",
|
||||||
|
"clear": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-webui",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-webui"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": true,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-deps",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-deps"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-codegen-wait",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-codegen-wait"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-recorder",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-recorder"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-proxy",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-proxy",
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
8358
Cargo.lock
generated
8358
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
87
Cargo.toml
87
Cargo.toml
@@ -1,11 +1,88 @@
|
|||||||
cargo-features = ["codegen-backend"]
|
# cargo-features = ["codegen-backend"]
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
"crates/quirks_path",
|
"packages/testing-torrents",
|
||||||
"crates/recorder"
|
"packages/util",
|
||||||
|
"packages/util-derive",
|
||||||
|
"packages/fetch",
|
||||||
|
"packages/downloader",
|
||||||
|
"apps/recorder",
|
||||||
|
"apps/proxy",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
#debug = 0
|
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
|
||||||
codegen-backend = "cranelift"
|
# codegen-backend = "cranelift"
|
||||||
|
|
||||||
|
[workspace.dependencies]
|
||||||
|
testing-torrents = { path = "./packages/testing-torrents" }
|
||||||
|
util = { path = "./packages/util" }
|
||||||
|
util-derive = { path = "./packages/util-derive" }
|
||||||
|
fetch = { path = "./packages/fetch" }
|
||||||
|
downloader = { path = "./packages/downloader" }
|
||||||
|
recorder = { path = "./apps/recorder" }
|
||||||
|
|
||||||
|
reqwest = { version = "0.12.20", features = [
|
||||||
|
"charset",
|
||||||
|
"http2",
|
||||||
|
"json",
|
||||||
|
"macos-system-configuration",
|
||||||
|
"cookies",
|
||||||
|
] }
|
||||||
|
moka = "0.12.10"
|
||||||
|
futures = "0.3.31"
|
||||||
|
quirks_path = "0.1.1"
|
||||||
|
snafu = { version = "0.8.0", features = ["futures"] }
|
||||||
|
testcontainers = { version = "0.24.0" }
|
||||||
|
testcontainers-modules = { version = "0.12.1" }
|
||||||
|
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
|
||||||
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
tokio = { version = "1.46", features = [
|
||||||
|
"macros",
|
||||||
|
"fs",
|
||||||
|
"rt-multi-thread",
|
||||||
|
"signal",
|
||||||
|
] }
|
||||||
|
serde_json = "1.0.140"
|
||||||
|
async-trait = "0.1.88"
|
||||||
|
tracing = "0.1"
|
||||||
|
url = "2.5.2"
|
||||||
|
anyhow = "1"
|
||||||
|
itertools = "0.14"
|
||||||
|
chrono = "0.4"
|
||||||
|
bytes = "1"
|
||||||
|
serde_with = "3"
|
||||||
|
regex = "1.11"
|
||||||
|
lazy_static = "1.5"
|
||||||
|
axum = { version = "0.8.3", features = ["macros"] }
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
|
||||||
|
axum-extra = { version = "0.10", features = ["typed-header"] }
|
||||||
|
mockito = { version = "1.6.1" }
|
||||||
|
convert_case = "0.8"
|
||||||
|
color-eyre = "0.6.5"
|
||||||
|
inquire = "0.7.5"
|
||||||
|
image = "0.25.6"
|
||||||
|
uuid = { version = "1.6.0", features = ["v7"] }
|
||||||
|
maplit = "1.0.2"
|
||||||
|
once_cell = "1.20.2"
|
||||||
|
rand = "0.9.1"
|
||||||
|
rust_decimal = "1.37.2"
|
||||||
|
base64 = "0.22.1"
|
||||||
|
nom = "8.0.0"
|
||||||
|
percent-encoding = "2.3.1"
|
||||||
|
num-traits = "0.2.19"
|
||||||
|
http = "1.2.0"
|
||||||
|
async-stream = "0.3.6"
|
||||||
|
serde_variant = "0.1.3"
|
||||||
|
tracing-appender = "0.2.3"
|
||||||
|
clap = "4.5.41"
|
||||||
|
ipnetwork = "0.21.1"
|
||||||
|
typed-builder = "0.21.0"
|
||||||
|
nanoid = "0.4.0"
|
||||||
|
webp = "0.3.0"
|
||||||
|
|
||||||
|
|
||||||
|
[patch.crates-io]
|
||||||
|
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "292cdd2" }
|
||||||
|
|||||||
@@ -1,3 +1,8 @@
|
|||||||
# KONOBUNGU
|
<h1 align="center">
|
||||||
|
<img src="./assets/icon.png" height=180>
|
||||||
|
<br />
|
||||||
|
<b>Konobangu</b>
|
||||||
|
<div align="center"><img src="https://img.shields.io/badge/status-work--in--progress-blue" alt="status-badge" /></div>
|
||||||
|
</h1>
|
||||||
|
|
||||||
Kono Bangumi?
|
<p align="center">Kono bangumi?</p>
|
||||||
4
apps/docs/api-reference/endpoint/create.mdx
Normal file
4
apps/docs/api-reference/endpoint/create.mdx
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
title: 'Create Plant'
|
||||||
|
openapi: 'POST /plants'
|
||||||
|
---
|
||||||
4
apps/docs/api-reference/endpoint/delete.mdx
Normal file
4
apps/docs/api-reference/endpoint/delete.mdx
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
title: 'Delete Plant'
|
||||||
|
openapi: 'DELETE /plants/{id}'
|
||||||
|
---
|
||||||
4
apps/docs/api-reference/endpoint/get.mdx
Normal file
4
apps/docs/api-reference/endpoint/get.mdx
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
title: 'Get Plants'
|
||||||
|
openapi: 'GET /plants'
|
||||||
|
---
|
||||||
33
apps/docs/api-reference/introduction.mdx
Normal file
33
apps/docs/api-reference/introduction.mdx
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
---
|
||||||
|
title: 'Introduction'
|
||||||
|
description: 'Example section for showcasing API endpoints'
|
||||||
|
---
|
||||||
|
|
||||||
|
<Note>
|
||||||
|
If you're not looking to build API reference documentation, you can delete
|
||||||
|
this section by removing the api-reference folder.
|
||||||
|
</Note>
|
||||||
|
|
||||||
|
## Welcome
|
||||||
|
|
||||||
|
There are two ways to build API documentation: [OpenAPI](https://mintlify.com/docs/api-playground/openapi/setup) and [MDX components](https://mintlify.com/docs/api-playground/mdx/configuration). For the starter kit, we are using the following OpenAPI specification.
|
||||||
|
|
||||||
|
<Card
|
||||||
|
title="Plant Store Endpoints"
|
||||||
|
icon="leaf"
|
||||||
|
href="https://github.com/mintlify/starter/blob/main/api-reference/openapi.json"
|
||||||
|
>
|
||||||
|
View the OpenAPI specification file
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
## Authentication
|
||||||
|
|
||||||
|
All API endpoints are authenticated using Bearer tokens and picked up from the specification file.
|
||||||
|
|
||||||
|
```json
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"bearerAuth": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
195
apps/docs/api-reference/openapi.json
Normal file
195
apps/docs/api-reference/openapi.json
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
{
|
||||||
|
"openapi": "3.0.1",
|
||||||
|
"info": {
|
||||||
|
"title": "OpenAPI Plant Store",
|
||||||
|
"description": "A sample API that uses a plant store as an example to demonstrate features in the OpenAPI specification",
|
||||||
|
"license": {
|
||||||
|
"name": "MIT"
|
||||||
|
},
|
||||||
|
"version": "1.0.0"
|
||||||
|
},
|
||||||
|
"servers": [
|
||||||
|
{
|
||||||
|
"url": "http://sandbox.mintlify.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"bearerAuth": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"paths": {
|
||||||
|
"/plants": {
|
||||||
|
"get": {
|
||||||
|
"description": "Returns all plants from the system that the user has access to",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "limit",
|
||||||
|
"in": "query",
|
||||||
|
"description": "The maximum number of results to return",
|
||||||
|
"schema": {
|
||||||
|
"type": "integer",
|
||||||
|
"format": "int32"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Plant response",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/components/schemas/Plant"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"400": {
|
||||||
|
"description": "Unexpected error",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/Error"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"post": {
|
||||||
|
"description": "Creates a new plant in the store",
|
||||||
|
"requestBody": {
|
||||||
|
"description": "Plant to add to the store",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/NewPlant"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": true
|
||||||
|
},
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "plant response",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/Plant"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"400": {
|
||||||
|
"description": "unexpected error",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/Error"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"/plants/{id}": {
|
||||||
|
"delete": {
|
||||||
|
"description": "Deletes a single plant based on the ID supplied",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"in": "path",
|
||||||
|
"description": "ID of plant to delete",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "integer",
|
||||||
|
"format": "int64"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"204": {
|
||||||
|
"description": "Plant deleted",
|
||||||
|
"content": {}
|
||||||
|
},
|
||||||
|
"400": {
|
||||||
|
"description": "unexpected error",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/Error"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"components": {
|
||||||
|
"schemas": {
|
||||||
|
"Plant": {
|
||||||
|
"required": [
|
||||||
|
"name"
|
||||||
|
],
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"description": "The name of the plant",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"tag": {
|
||||||
|
"description": "Tag to specify the type",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"NewPlant": {
|
||||||
|
"allOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/components/schemas/Plant"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"required": [
|
||||||
|
"id"
|
||||||
|
],
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"id": {
|
||||||
|
"description": "Identification number of the plant",
|
||||||
|
"type": "integer",
|
||||||
|
"format": "int64"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"Error": {
|
||||||
|
"required": [
|
||||||
|
"error",
|
||||||
|
"message"
|
||||||
|
],
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"error": {
|
||||||
|
"type": "integer",
|
||||||
|
"format": "int32"
|
||||||
|
},
|
||||||
|
"message": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"securitySchemes": {
|
||||||
|
"bearerAuth": {
|
||||||
|
"type": "http",
|
||||||
|
"scheme": "bearer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
98
apps/docs/development.mdx
Normal file
98
apps/docs/development.mdx
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
---
|
||||||
|
title: 'Development'
|
||||||
|
description: 'Learn how to preview changes locally'
|
||||||
|
---
|
||||||
|
|
||||||
|
<Info>
|
||||||
|
**Prerequisite** You should have installed Node.js (version 18.10.0 or
|
||||||
|
higher).
|
||||||
|
</Info>
|
||||||
|
|
||||||
|
Step 1. Install Mintlify on your OS:
|
||||||
|
|
||||||
|
<CodeGroup>
|
||||||
|
|
||||||
|
```bash npm
|
||||||
|
npm i -g mintlify
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash yarn
|
||||||
|
yarn global add mintlify
|
||||||
|
```
|
||||||
|
|
||||||
|
</CodeGroup>
|
||||||
|
|
||||||
|
Step 2. Go to the docs are located (where you can find `mint.json`) and run the following command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mintlify dev
|
||||||
|
```
|
||||||
|
|
||||||
|
The documentation website is now available at `http://localhost:5000`.
|
||||||
|
|
||||||
|
### Custom Ports
|
||||||
|
|
||||||
|
Mintlify uses port 5000 by default. You can use the `--port` flag to customize the port Mintlify runs on. For example, use this command to run in port 3333:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mintlify dev --port 3333
|
||||||
|
```
|
||||||
|
|
||||||
|
You will see an error like this if you try to run Mintlify in a port that's already taken:
|
||||||
|
|
||||||
|
```md
|
||||||
|
Error: listen EADDRINUSE: address already in use :::5000
|
||||||
|
```
|
||||||
|
|
||||||
|
## Mintlify Versions
|
||||||
|
|
||||||
|
Each CLI is linked to a specific version of Mintlify. Please update the CLI if your local website looks different than production.
|
||||||
|
|
||||||
|
<CodeGroup>
|
||||||
|
|
||||||
|
```bash npm
|
||||||
|
npm i -g mintlify@latest
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash yarn
|
||||||
|
yarn global upgrade mintlify
|
||||||
|
```
|
||||||
|
|
||||||
|
</CodeGroup>
|
||||||
|
|
||||||
|
## Deployment
|
||||||
|
|
||||||
|
<Tip>
|
||||||
|
Unlimited editors available under the [Startup
|
||||||
|
Plan](https://mintlify.com/pricing)
|
||||||
|
</Tip>
|
||||||
|
|
||||||
|
You should see the following if the deploy successfully went through:
|
||||||
|
|
||||||
|
<Frame>
|
||||||
|
<img src="/images/checks-passed.png" style={{ borderRadius: '0.5rem' }} />
|
||||||
|
</Frame>
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
Here's how to solve some common problems when working with the CLI.
|
||||||
|
|
||||||
|
<AccordionGroup>
|
||||||
|
<Accordion title="Mintlify is not loading">
|
||||||
|
Update to Node v18. Run `mintlify install` and try again.
|
||||||
|
</Accordion>
|
||||||
|
<Accordion title="No such file or directory on Windows">
|
||||||
|
Go to the `C:/Users/Username/.mintlify/` directory and remove the `mint`
|
||||||
|
folder. Then Open the Git Bash in this location and run `git clone
|
||||||
|
https://github.com/mintlify/mint.git`.
|
||||||
|
|
||||||
|
Repeat step 3.
|
||||||
|
|
||||||
|
</Accordion>
|
||||||
|
<Accordion title="Getting an unknown error">
|
||||||
|
Try navigating to the root of your device and delete the ~/.mintlify folder.
|
||||||
|
Then run `mintlify dev` again.
|
||||||
|
</Accordion>
|
||||||
|
</AccordionGroup>
|
||||||
|
|
||||||
|
Curious about what changed in a CLI version? [Check out the CLI changelog.](/changelog/command-line)
|
||||||
37
apps/docs/essentials/code.mdx
Normal file
37
apps/docs/essentials/code.mdx
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
title: 'Code Blocks'
|
||||||
|
description: 'Display inline code and code blocks'
|
||||||
|
icon: 'code'
|
||||||
|
---
|
||||||
|
|
||||||
|
## Basic
|
||||||
|
|
||||||
|
### Inline Code
|
||||||
|
|
||||||
|
To denote a `word` or `phrase` as code, enclose it in backticks (`).
|
||||||
|
|
||||||
|
```
|
||||||
|
To denote a `word` or `phrase` as code, enclose it in backticks (`).
|
||||||
|
```
|
||||||
|
|
||||||
|
### Code Block
|
||||||
|
|
||||||
|
Use [fenced code blocks](https://www.markdownguide.org/extended-syntax/#fenced-code-blocks) by enclosing code in three backticks and follow the leading ticks with the programming language of your snippet to get syntax highlighting. Optionally, you can also write the name of your code after the programming language.
|
||||||
|
|
||||||
|
```java HelloWorld.java
|
||||||
|
class HelloWorld {
|
||||||
|
public static void main(String[] args) {
|
||||||
|
System.out.println("Hello, World!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
````md
|
||||||
|
```java HelloWorld.java
|
||||||
|
class HelloWorld {
|
||||||
|
public static void main(String[] args) {
|
||||||
|
System.out.println("Hello, World!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
````
|
||||||
59
apps/docs/essentials/images.mdx
Normal file
59
apps/docs/essentials/images.mdx
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
---
|
||||||
|
title: 'Images and Embeds'
|
||||||
|
description: 'Add image, video, and other HTML elements'
|
||||||
|
icon: 'image'
|
||||||
|
---
|
||||||
|
|
||||||
|
<img
|
||||||
|
style={{ borderRadius: '0.5rem' }}
|
||||||
|
src="https://mintlify-assets.b-cdn.net/bigbend.jpg"
|
||||||
|
/>
|
||||||
|
|
||||||
|
## Image
|
||||||
|
|
||||||
|
### Using Markdown
|
||||||
|
|
||||||
|
The [markdown syntax](https://www.markdownguide.org/basic-syntax/#images) lets you add images using the following code
|
||||||
|
|
||||||
|
```md
|
||||||
|

|
||||||
|
```
|
||||||
|
|
||||||
|
Note that the image file size must be less than 5MB. Otherwise, we recommend hosting on a service like [Cloudinary](https://cloudinary.com/) or [S3](https://aws.amazon.com/s3/). You can then use that URL and embed.
|
||||||
|
|
||||||
|
### Using Embeds
|
||||||
|
|
||||||
|
To get more customizability with images, you can also use [embeds](/writing-content/embed) to add images
|
||||||
|
|
||||||
|
```html
|
||||||
|
<img height="200" src="/path/image.jpg" />
|
||||||
|
```
|
||||||
|
|
||||||
|
## Embeds and HTML elements
|
||||||
|
|
||||||
|
<iframe
|
||||||
|
width="560"
|
||||||
|
height="315"
|
||||||
|
src="https://www.youtube.com/embed/4KzFe50RQkQ"
|
||||||
|
title="YouTube video player"
|
||||||
|
frameBorder="0"
|
||||||
|
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
||||||
|
allowFullScreen
|
||||||
|
style={{ width: '100%', borderRadius: '0.5rem' }}
|
||||||
|
></iframe>
|
||||||
|
|
||||||
|
<br />
|
||||||
|
|
||||||
|
<Tip>
|
||||||
|
|
||||||
|
Mintlify supports [HTML tags in Markdown](https://www.markdownguide.org/basic-syntax/#html). This is helpful if you prefer HTML tags to Markdown syntax, and lets you create documentation with infinite flexibility.
|
||||||
|
|
||||||
|
</Tip>
|
||||||
|
|
||||||
|
### iFrames
|
||||||
|
|
||||||
|
Loads another HTML page within the document. Most commonly used for embedding videos.
|
||||||
|
|
||||||
|
```html
|
||||||
|
<iframe src="https://www.youtube.com/embed/4KzFe50RQkQ"> </iframe>
|
||||||
|
```
|
||||||
88
apps/docs/essentials/markdown.mdx
Normal file
88
apps/docs/essentials/markdown.mdx
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
---
|
||||||
|
title: 'Markdown Syntax'
|
||||||
|
description: 'Text, title, and styling in standard markdown'
|
||||||
|
icon: 'text-size'
|
||||||
|
---
|
||||||
|
|
||||||
|
## Titles
|
||||||
|
|
||||||
|
Best used for section headers.
|
||||||
|
|
||||||
|
```md
|
||||||
|
## Titles
|
||||||
|
```
|
||||||
|
|
||||||
|
### Subtitles
|
||||||
|
|
||||||
|
Best use to subsection headers.
|
||||||
|
|
||||||
|
```md
|
||||||
|
### Subtitles
|
||||||
|
```
|
||||||
|
|
||||||
|
<Tip>
|
||||||
|
|
||||||
|
Each **title** and **subtitle** creates an anchor and also shows up on the table of contents on the right.
|
||||||
|
|
||||||
|
</Tip>
|
||||||
|
|
||||||
|
## Text Formatting
|
||||||
|
|
||||||
|
We support most markdown formatting. Simply add `**`, `_`, or `~` around text to format it.
|
||||||
|
|
||||||
|
| Style | How to write it | Result |
|
||||||
|
| ------------- | ----------------- | --------------- |
|
||||||
|
| Bold | `**bold**` | **bold** |
|
||||||
|
| Italic | `_italic_` | _italic_ |
|
||||||
|
| Strikethrough | `~strikethrough~` | ~strikethrough~ |
|
||||||
|
|
||||||
|
You can combine these. For example, write `**_bold and italic_**` to get **_bold and italic_** text.
|
||||||
|
|
||||||
|
You need to use HTML to write superscript and subscript text. That is, add `<sup>` or `<sub>` around your text.
|
||||||
|
|
||||||
|
| Text Size | How to write it | Result |
|
||||||
|
| ----------- | ------------------------ | ---------------------- |
|
||||||
|
| Superscript | `<sup>superscript</sup>` | <sup>superscript</sup> |
|
||||||
|
| Subscript | `<sub>subscript</sub>` | <sub>subscript</sub> |
|
||||||
|
|
||||||
|
## Linking to Pages
|
||||||
|
|
||||||
|
You can add a link by wrapping text in `[]()`. You would write `[link to google](https://google.com)` to [link to google](https://google.com).
|
||||||
|
|
||||||
|
Links to pages in your docs need to be root-relative. Basically, you should include the entire folder path. For example, `[link to text](/writing-content/text)` links to the page "Text" in our components section.
|
||||||
|
|
||||||
|
Relative links like `[link to text](../text)` will open slower because we cannot optimize them as easily.
|
||||||
|
|
||||||
|
## Blockquotes
|
||||||
|
|
||||||
|
### Singleline
|
||||||
|
|
||||||
|
To create a blockquote, add a `>` in front of a paragraph.
|
||||||
|
|
||||||
|
> Dorothy followed her through many of the beautiful rooms in her castle.
|
||||||
|
|
||||||
|
```md
|
||||||
|
> Dorothy followed her through many of the beautiful rooms in her castle.
|
||||||
|
```
|
||||||
|
|
||||||
|
### Multiline
|
||||||
|
|
||||||
|
> Dorothy followed her through many of the beautiful rooms in her castle.
|
||||||
|
>
|
||||||
|
> The Witch bade her clean the pots and kettles and sweep the floor and keep the fire fed with wood.
|
||||||
|
|
||||||
|
```md
|
||||||
|
> Dorothy followed her through many of the beautiful rooms in her castle.
|
||||||
|
>
|
||||||
|
> The Witch bade her clean the pots and kettles and sweep the floor and keep the fire fed with wood.
|
||||||
|
```
|
||||||
|
|
||||||
|
### LaTeX
|
||||||
|
|
||||||
|
Mintlify supports [LaTeX](https://www.latex-project.org) through the Latex component.
|
||||||
|
|
||||||
|
<Latex>8 x (vk x H1 - H2) = (0,1)</Latex>
|
||||||
|
|
||||||
|
```md
|
||||||
|
<Latex>8 x (vk x H1 - H2) = (0,1)</Latex>
|
||||||
|
```
|
||||||
66
apps/docs/essentials/navigation.mdx
Normal file
66
apps/docs/essentials/navigation.mdx
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
---
|
||||||
|
title: 'Navigation'
|
||||||
|
description: 'The navigation field in mint.json defines the pages that go in the navigation menu'
|
||||||
|
icon: 'map'
|
||||||
|
---
|
||||||
|
|
||||||
|
The navigation menu is the list of links on every website.
|
||||||
|
|
||||||
|
You will likely update `mint.json` every time you add a new page. Pages do not show up automatically.
|
||||||
|
|
||||||
|
## Navigation syntax
|
||||||
|
|
||||||
|
Our navigation syntax is recursive which means you can make nested navigation groups. You don't need to include `.mdx` in page names.
|
||||||
|
|
||||||
|
<CodeGroup>
|
||||||
|
|
||||||
|
```json Regular Navigation
|
||||||
|
"navigation": [
|
||||||
|
{
|
||||||
|
"group": "Getting Started",
|
||||||
|
"pages": ["quickstart"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
```json Nested Navigation
|
||||||
|
"navigation": [
|
||||||
|
{
|
||||||
|
"group": "Getting Started",
|
||||||
|
"pages": [
|
||||||
|
"quickstart",
|
||||||
|
{
|
||||||
|
"group": "Nested Reference Pages",
|
||||||
|
"pages": ["nested-reference-page"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
</CodeGroup>
|
||||||
|
|
||||||
|
## Folders
|
||||||
|
|
||||||
|
Simply put your MDX files in folders and update the paths in `mint.json`.
|
||||||
|
|
||||||
|
For example, to have a page at `https://yoursite.com/your-folder/your-page` you would make a folder called `your-folder` containing an MDX file called `your-page.mdx`.
|
||||||
|
|
||||||
|
<Warning>
|
||||||
|
|
||||||
|
You cannot use `api` for the name of a folder unless you nest it inside another folder. Mintlify uses Next.js which reserves the top-level `api` folder for internal server calls. A folder name such as `api-reference` would be accepted.
|
||||||
|
|
||||||
|
</Warning>
|
||||||
|
|
||||||
|
```json Navigation With Folder
|
||||||
|
"navigation": [
|
||||||
|
{
|
||||||
|
"group": "Group Name",
|
||||||
|
"pages": ["your-folder/your-page"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Hidden Pages
|
||||||
|
|
||||||
|
MDX files not included in `mint.json` will not show up in the sidebar but are accessible through the search bar and by linking directly to them.
|
||||||
110
apps/docs/essentials/reusable-snippets.mdx
Normal file
110
apps/docs/essentials/reusable-snippets.mdx
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
---
|
||||||
|
title: Reusable Snippets
|
||||||
|
description: Reusable, custom snippets to keep content in sync
|
||||||
|
icon: 'recycle'
|
||||||
|
---
|
||||||
|
|
||||||
|
import SnippetIntro from '/snippets/snippet-intro.mdx';
|
||||||
|
|
||||||
|
<SnippetIntro />
|
||||||
|
|
||||||
|
## Creating a custom snippet
|
||||||
|
|
||||||
|
**Pre-condition**: You must create your snippet file in the `snippets` directory.
|
||||||
|
|
||||||
|
<Note>
|
||||||
|
Any page in the `snippets` directory will be treated as a snippet and will not
|
||||||
|
be rendered into a standalone page. If you want to create a standalone page
|
||||||
|
from the snippet, import the snippet into another file and call it as a
|
||||||
|
component.
|
||||||
|
</Note>
|
||||||
|
|
||||||
|
### Default export
|
||||||
|
|
||||||
|
1. Add content to your snippet file that you want to re-use across multiple
|
||||||
|
locations. Optionally, you can add variables that can be filled in via props
|
||||||
|
when you import the snippet.
|
||||||
|
|
||||||
|
```mdx snippets/my-snippet.mdx
|
||||||
|
Hello world! This is my content I want to reuse across pages. My keyword of the
|
||||||
|
day is {word}.
|
||||||
|
```
|
||||||
|
|
||||||
|
<Warning>
|
||||||
|
The content that you want to reuse must be inside the `snippets` directory in
|
||||||
|
order for the import to work.
|
||||||
|
</Warning>
|
||||||
|
|
||||||
|
2. Import the snippet into your destination file.
|
||||||
|
|
||||||
|
```mdx destination-file.mdx
|
||||||
|
---
|
||||||
|
title: My title
|
||||||
|
description: My Description
|
||||||
|
---
|
||||||
|
|
||||||
|
import MySnippet from '/snippets/path/to/my-snippet.mdx';
|
||||||
|
|
||||||
|
## Header
|
||||||
|
|
||||||
|
Lorem impsum dolor sit amet.
|
||||||
|
|
||||||
|
<MySnippet word="bananas" />
|
||||||
|
```
|
||||||
|
|
||||||
|
### Reusable variables
|
||||||
|
|
||||||
|
1. Export a variable from your snippet file:
|
||||||
|
|
||||||
|
```mdx snippets/path/to/custom-variables.mdx
|
||||||
|
export const myName = 'my name';
|
||||||
|
|
||||||
|
export const myObject = { fruit: 'strawberries' };
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Import the snippet from your destination file and use the variable:
|
||||||
|
|
||||||
|
```mdx destination-file.mdx
|
||||||
|
---
|
||||||
|
title: My title
|
||||||
|
description: My Description
|
||||||
|
---
|
||||||
|
|
||||||
|
import { myName, myObject } from '/snippets/path/to/custom-variables.mdx';
|
||||||
|
|
||||||
|
Hello, my name is {myName} and I like {myObject.fruit}.
|
||||||
|
```
|
||||||
|
|
||||||
|
### Reusable components
|
||||||
|
|
||||||
|
1. Inside your snippet file, create a component that takes in props by exporting
|
||||||
|
your component in the form of an arrow function.
|
||||||
|
|
||||||
|
```mdx snippets/custom-component.mdx
|
||||||
|
export const MyComponent = ({ title }) => (
|
||||||
|
<div>
|
||||||
|
<h1>{title}</h1>
|
||||||
|
<p>... snippet content ...</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
<Warning>
|
||||||
|
MDX does not compile inside the body of an arrow function. Stick to HTML
|
||||||
|
syntax when you can or use a default export if you need to use MDX.
|
||||||
|
</Warning>
|
||||||
|
|
||||||
|
2. Import the snippet into your destination file and pass in the props
|
||||||
|
|
||||||
|
```mdx destination-file.mdx
|
||||||
|
---
|
||||||
|
title: My title
|
||||||
|
description: My Description
|
||||||
|
---
|
||||||
|
|
||||||
|
import { MyComponent } from '/snippets/custom-component.mdx';
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
|
||||||
|
<MyComponent title={'Custom title'} />
|
||||||
|
```
|
||||||
318
apps/docs/essentials/settings.mdx
Normal file
318
apps/docs/essentials/settings.mdx
Normal file
@@ -0,0 +1,318 @@
|
|||||||
|
---
|
||||||
|
title: 'Global Settings'
|
||||||
|
description: 'Mintlify gives you complete control over the look and feel of your documentation using the mint.json file'
|
||||||
|
icon: 'gear'
|
||||||
|
---
|
||||||
|
|
||||||
|
Every Mintlify site needs a `mint.json` file with the core configuration settings. Learn more about the [properties](#properties) below.
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
<ResponseField name="name" type="string" required>
|
||||||
|
Name of your project. Used for the global title.
|
||||||
|
|
||||||
|
Example: `mintlify`
|
||||||
|
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="navigation" type="Navigation[]" required>
|
||||||
|
An array of groups with all the pages within that group
|
||||||
|
<Expandable title="Navigation">
|
||||||
|
<ResponseField name="group" type="string">
|
||||||
|
The name of the group.
|
||||||
|
|
||||||
|
Example: `Settings`
|
||||||
|
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="pages" type="string[]">
|
||||||
|
The relative paths to the markdown files that will serve as pages.
|
||||||
|
|
||||||
|
Example: `["customization", "page"]`
|
||||||
|
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="logo" type="string or object">
|
||||||
|
Path to logo image or object with path to "light" and "dark" mode logo images
|
||||||
|
<Expandable title="Logo">
|
||||||
|
<ResponseField name="light" type="string">
|
||||||
|
Path to the logo in light mode
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="dark" type="string">
|
||||||
|
Path to the logo in dark mode
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="href" type="string" default="/">
|
||||||
|
Where clicking on the logo links you to
|
||||||
|
</ResponseField>
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="favicon" type="string">
|
||||||
|
Path to the favicon image
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="colors" type="Colors">
|
||||||
|
Hex color codes for your global theme
|
||||||
|
<Expandable title="Colors">
|
||||||
|
<ResponseField name="primary" type="string" required>
|
||||||
|
The primary color. Used for most often for highlighted content, section
|
||||||
|
headers, accents, in light mode
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="light" type="string">
|
||||||
|
The primary color for dark mode. Used for most often for highlighted
|
||||||
|
content, section headers, accents, in dark mode
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="dark" type="string">
|
||||||
|
The primary color for important buttons
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="background" type="object">
|
||||||
|
The color of the background in both light and dark mode
|
||||||
|
<Expandable title="Object">
|
||||||
|
<ResponseField name="light" type="string" required>
|
||||||
|
The hex color code of the background in light mode
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="dark" type="string" required>
|
||||||
|
The hex color code of the background in dark mode
|
||||||
|
</ResponseField>
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="topbarLinks" type="TopbarLink[]">
|
||||||
|
Array of `name`s and `url`s of links you want to include in the topbar
|
||||||
|
<Expandable title="TopbarLink">
|
||||||
|
<ResponseField name="name" type="string">
|
||||||
|
The name of the button.
|
||||||
|
|
||||||
|
Example: `Contact us`
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="url" type="string">
|
||||||
|
The url once you click on the button. Example: `https://mintlify.com/contact`
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="topbarCtaButton" type="Call to Action">
|
||||||
|
<Expandable title="Topbar Call to Action">
|
||||||
|
<ResponseField name="type" type={'"link" or "github"'} default="link">
|
||||||
|
Link shows a button. GitHub shows the repo information at the url provided including the number of GitHub stars.
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="url" type="string">
|
||||||
|
If `link`: What the button links to.
|
||||||
|
|
||||||
|
If `github`: Link to the repository to load GitHub information from.
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="name" type="string">
|
||||||
|
Text inside the button. Only required if `type` is a `link`.
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="versions" type="string[]">
|
||||||
|
Array of version names. Only use this if you want to show different versions
|
||||||
|
of docs with a dropdown in the navigation bar.
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="anchors" type="Anchor[]">
|
||||||
|
An array of the anchors, includes the `icon`, `color`, and `url`.
|
||||||
|
<Expandable title="Anchor">
|
||||||
|
<ResponseField name="icon" type="string">
|
||||||
|
The [Font Awesome](https://fontawesome.com/search?s=brands%2Cduotone) icon used to feature the anchor.
|
||||||
|
|
||||||
|
Example: `comments`
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="name" type="string">
|
||||||
|
The name of the anchor label.
|
||||||
|
|
||||||
|
Example: `Community`
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="url" type="string">
|
||||||
|
The start of the URL that marks what pages go in the anchor. Generally, this is the name of the folder you put your pages in.
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="color" type="string">
|
||||||
|
The hex color of the anchor icon background. Can also be a gradient if you pass an object with the properties `from` and `to` that are each a hex color.
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="version" type="string">
|
||||||
|
Used if you want to hide an anchor until the correct docs version is selected.
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="isDefaultHidden" type="boolean" default="false">
|
||||||
|
Pass `true` if you want to hide the anchor until you directly link someone to docs inside it.
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="iconType" default="duotone" type="string">
|
||||||
|
One of: "brands", "duotone", "light", "sharp-solid", "solid", or "thin"
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="topAnchor" type="Object">
|
||||||
|
Override the default configurations for the top-most anchor.
|
||||||
|
<Expandable title="Object">
|
||||||
|
<ResponseField name="name" default="Documentation" type="string">
|
||||||
|
The name of the top-most anchor
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="icon" default="book-open" type="string">
|
||||||
|
Font Awesome icon.
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="iconType" default="duotone" type="string">
|
||||||
|
One of: "brands", "duotone", "light", "sharp-solid", "solid", or "thin"
|
||||||
|
</ResponseField>
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="tabs" type="Tabs[]">
|
||||||
|
An array of navigational tabs.
|
||||||
|
<Expandable title="Tabs">
|
||||||
|
<ResponseField name="name" type="string">
|
||||||
|
The name of the tab label.
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="url" type="string">
|
||||||
|
The start of the URL that marks what pages go in the tab. Generally, this
|
||||||
|
is the name of the folder you put your pages in.
|
||||||
|
</ResponseField>
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="api" type="API">
|
||||||
|
Configuration for API settings. Learn more about API pages at [API Components](/api-playground/demo).
|
||||||
|
<Expandable title="API">
|
||||||
|
<ResponseField name="baseUrl" type="string">
|
||||||
|
The base url for all API endpoints. If `baseUrl` is an array, it will enable for multiple base url
|
||||||
|
options that the user can toggle.
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="auth" type="Auth">
|
||||||
|
<Expandable title="Auth">
|
||||||
|
<ResponseField name="method" type='"bearer" | "basic" | "key"'>
|
||||||
|
The authentication strategy used for all API endpoints.
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="name" type="string">
|
||||||
|
The name of the authentication parameter used in the API playground.
|
||||||
|
|
||||||
|
If method is `basic`, the format should be `[usernameName]:[passwordName]`
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="inputPrefix" type="string">
|
||||||
|
The default value that's designed to be a prefix for the authentication input field.
|
||||||
|
|
||||||
|
E.g. If an `inputPrefix` of `AuthKey` would inherit the default input result of the authentication field as `AuthKey`.
|
||||||
|
</ResponseField>
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="playground" type="Playground">
|
||||||
|
Configurations for the API playground
|
||||||
|
|
||||||
|
<Expandable title="Playground">
|
||||||
|
<ResponseField name="mode" default="show" type='"show" | "simple" | "hide"'>
|
||||||
|
Whether the playground is showing, hidden, or only displaying the endpoint with no added user interactivity `simple`
|
||||||
|
|
||||||
|
Learn more at the [playground guides](/api-playground/demo)
|
||||||
|
</ResponseField>
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="maintainOrder" type="boolean">
|
||||||
|
Enabling this flag ensures that key ordering in OpenAPI pages matches the key ordering defined in the OpenAPI file.
|
||||||
|
|
||||||
|
<Warning>This behavior will soon be enabled by default, at which point this field will be deprecated.</Warning>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="openapi" type="string | string[]">
|
||||||
|
A string or an array of strings of URL(s) or relative path(s) pointing to your
|
||||||
|
OpenAPI file.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
<CodeGroup>
|
||||||
|
```json Absolute
|
||||||
|
"openapi": "https://example.com/openapi.json"
|
||||||
|
```
|
||||||
|
```json Relative
|
||||||
|
"openapi": "/openapi.json"
|
||||||
|
```
|
||||||
|
```json Multiple
|
||||||
|
"openapi": ["https://example.com/openapi1.json", "/openapi2.json", "/openapi3.json"]
|
||||||
|
```
|
||||||
|
</CodeGroup>
|
||||||
|
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="footerSocials" type="FooterSocials">
|
||||||
|
An object of social media accounts where the key:property pair represents the social media platform and the account url.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"x": "https://x.com/mintlify",
|
||||||
|
"website": "https://mintlify.com"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
<Expandable title="FooterSocials">
|
||||||
|
<ResponseField name="[key]" type="string">
|
||||||
|
One of the following values `website`, `facebook`, `x`, `discord`, `slack`, `github`, `linkedin`, `instagram`, `hacker-news`
|
||||||
|
|
||||||
|
Example: `x`
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="property" type="string">
|
||||||
|
The URL to the social platform.
|
||||||
|
|
||||||
|
Example: `https://x.com/mintlify`
|
||||||
|
</ResponseField>
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="feedback" type="Feedback">
|
||||||
|
Configurations to enable feedback buttons
|
||||||
|
|
||||||
|
<Expandable title="Feedback">
|
||||||
|
<ResponseField name="suggestEdit" type="boolean" default="false">
|
||||||
|
Enables a button to allow users to suggest edits via pull requests
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="raiseIssue" type="boolean" default="false">
|
||||||
|
Enables a button to allow users to raise an issue about the documentation
|
||||||
|
</ResponseField>
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="modeToggle" type="ModeToggle">
|
||||||
|
Customize the dark mode toggle.
|
||||||
|
<Expandable title="ModeToggle">
|
||||||
|
<ResponseField name="default" type={'"light" or "dark"'}>
|
||||||
|
Set if you always want to show light or dark mode for new users. When not
|
||||||
|
set, we default to the same mode as the user's operating system.
|
||||||
|
</ResponseField>
|
||||||
|
<ResponseField name="isHidden" type="boolean" default="false">
|
||||||
|
Set to true to hide the dark/light mode toggle. You can combine `isHidden` with `default` to force your docs to only use light or dark mode. For example:
|
||||||
|
|
||||||
|
<CodeGroup>
|
||||||
|
```json Only Dark Mode
|
||||||
|
"modeToggle": {
|
||||||
|
"default": "dark",
|
||||||
|
"isHidden": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```json Only Light Mode
|
||||||
|
"modeToggle": {
|
||||||
|
"default": "light",
|
||||||
|
"isHidden": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
</CodeGroup>
|
||||||
|
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
</Expandable>
|
||||||
|
</ResponseField>
|
||||||
|
|
||||||
|
<ResponseField name="backgroundImage" type="string">
|
||||||
|
A background image to be displayed behind every page. See example with
|
||||||
|
[Infisical](https://infisical.com/docs) and [FRPC](https://frpc.io).
|
||||||
|
</ResponseField>
|
||||||
49
apps/docs/favicon.svg
Normal file
49
apps/docs/favicon.svg
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61505C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5914 7.95343 21.1394Z" fill="white"/>
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61505C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5914 7.95343 21.1394Z" fill="url(#paint0_radial_101_2703)"/>
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61505C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5914 7.95343 21.1394Z" fill="black" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61505C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5914 7.95343 21.1394Z" fill="url(#paint1_linear_101_2703)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M8.68359 10.4755C9.94543 13.63 9.56145 17.5723 7.9354 21.1112C4.89702 21.0957 2.27411 19.4306 1.01347 16.279C-0.248375 13.1245 0.135612 9.18218 1.76165 5.64328C4.80004 5.65883 7.42295 7.32386 8.68359 10.4755Z" stroke="url(#paint2_linear_101_2703)" stroke-opacity="0.05" stroke-width="0.056338"/>
|
||||||
|
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="white"/>
|
||||||
|
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="url(#paint3_radial_101_2703)"/>
|
||||||
|
<path d="M16.6026 13.2251C14.8642 17.349 11.3512 20.1866 7.32411 21.2248C5.25257 17.624 4.82926 13.1324 6.56764 9.00855C8.30603 4.88472 11.819 2.04706 15.8461 1.00889C17.9176 4.60967 18.3409 9.10131 16.6026 13.2251Z" stroke="url(#paint4_linear_101_2703)" stroke-opacity="0.05" stroke-width="0.056338"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="white"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint5_radial_101_2703)"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="black" fill-opacity="0.2" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint6_linear_101_2703)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M16.5682 22.7874C13.2176 23.9184 9.81361 23.2124 7.2672 21.1975C8.49194 17.9068 11.0444 15.189 14.3959 14.0577C17.7465 12.9266 21.1504 13.6326 23.6968 15.6476C22.4721 18.9383 19.9196 21.656 16.5682 22.7874Z" stroke="url(#paint7_linear_101_2703)" stroke-opacity="0.05" stroke-width="0.056338"/>
|
||||||
|
<defs>
|
||||||
|
<radialGradient id="paint0_radial_101_2703" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(-3.00503 15.023) rotate(-10.029) scale(17.9572 17.784)">
|
||||||
|
<stop stop-color="#00B0BB"/>
|
||||||
|
<stop offset="1" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint1_linear_101_2703" x1="7.39036" y1="4.81308" x2="1.62975" y2="18.6894" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#18E299"/>
|
||||||
|
<stop offset="1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint2_linear_101_2703" x1="7.94816" y1="8.01563" x2="1.7612" y2="18.746" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint3_radial_101_2703" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(8.11404 20.8822) rotate(-75.7542) scale(21.6246 23.7772)">
|
||||||
|
<stop stop-color="#00BBBB"/>
|
||||||
|
<stop offset="0.712616" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint4_linear_101_2703" x1="7.60205" y1="5.8709" x2="15.5561" y2="16.3719" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint5_radial_101_2703" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(7.84537 21.5181) rotate(-20.3525) scale(18.5603 17.32)">
|
||||||
|
<stop stop-color="#00B0BB"/>
|
||||||
|
<stop offset="1" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint6_linear_101_2703" x1="16.8078" y1="13.0071" x2="10.0409" y2="22.9937" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#00B1BC"/>
|
||||||
|
<stop offset="1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint7_linear_101_2703" x1="16.8078" y1="13.0071" x2="14.1687" y2="23.841" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
</defs>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 5.3 KiB |
BIN
apps/docs/images/checks-passed.png
Normal file
BIN
apps/docs/images/checks-passed.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 157 KiB |
161
apps/docs/images/hero-dark.svg
Normal file
161
apps/docs/images/hero-dark.svg
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
<svg width="700" height="320" viewBox="0 0 700 320" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<g clip-path="url(#clip0_2862_30)">
|
||||||
|
<rect width="700" height="320" rx="16" fill="url(#paint0_linear_2862_30)"/>
|
||||||
|
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="white"/>
|
||||||
|
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="url(#paint1_radial_2862_30)"/>
|
||||||
|
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="black" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="url(#paint2_linear_2862_30)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M311.72 247.034C283.108 246.887 258.409 231.208 246.538 201.531C234.656 171.825 238.271 134.702 253.583 101.377C282.195 101.524 306.894 117.203 318.765 146.88C330.647 176.586 327.031 213.709 311.72 247.034Z" stroke="url(#paint3_linear_2862_30)" stroke-opacity="0.05" stroke-width="0.530516"/>
|
||||||
|
<path d="M305.839 247.174C343.92 237.419 377.154 210.619 393.585 171.64C410.017 132.661 405.98 90.1988 386.347 56.1934C348.266 65.9477 315.032 92.7486 298.601 131.728C282.169 170.706 286.206 213.168 305.839 247.174Z" fill="white"/>
|
||||||
|
<path d="M305.839 247.174C343.92 237.419 377.154 210.619 393.585 171.64C410.017 132.661 405.98 90.1988 386.347 56.1934C348.266 65.9477 315.032 92.7486 298.601 131.728C282.169 170.706 286.206 213.168 305.839 247.174Z" fill="url(#paint4_radial_2862_30)"/>
|
||||||
|
<path d="M393.341 171.537C376.971 210.369 343.89 237.091 305.969 246.867C286.462 212.959 282.476 170.663 298.845 131.831C315.215 92.9978 348.295 66.2765 386.217 56.5004C405.724 90.4077 409.71 132.704 393.341 171.537Z" stroke="url(#paint5_linear_2862_30)" stroke-opacity="0.05" stroke-width="0.530516"/>
|
||||||
|
<path d="M305.686 246.995C329.749 266.114 361.965 272.832 393.67 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.045 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="white"/>
|
||||||
|
<path d="M305.686 246.995C329.749 266.114 361.965 272.832 393.67 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.045 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="url(#paint6_radial_2862_30)"/>
|
||||||
|
<path d="M305.686 246.995C329.749 266.114 361.965 272.832 393.67 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.045 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="black" fill-opacity="0.2" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M305.686 246.995C329.749 266.114 361.965 272.832 393.67 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.045 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="url(#paint7_linear_2862_30)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M393.586 261.878C362.034 272.529 329.98 265.88 306.002 246.907C317.534 215.919 341.57 190.327 373.13 179.673C404.681 169.023 436.735 175.671 460.714 194.644C449.181 225.632 425.145 251.224 393.586 261.878Z" stroke="url(#paint8_linear_2862_30)" stroke-opacity="0.05" stroke-width="0.530516"/>
|
||||||
|
<g opacity="0.8" filter="url(#filter0_f_2862_30)">
|
||||||
|
<circle cx="660" cy="-60" r="160" fill="#18E244" fill-opacity="0.4"/>
|
||||||
|
</g>
|
||||||
|
<g opacity="0.8" filter="url(#filter1_f_2862_30)">
|
||||||
|
<circle cx="20" cy="213" r="160" fill="#18CAE2" fill-opacity="0.33"/>
|
||||||
|
</g>
|
||||||
|
<g opacity="0.8" filter="url(#filter2_f_2862_30)">
|
||||||
|
<circle cx="660" cy="480" r="160" fill="#18E2B2" fill-opacity="0.52"/>
|
||||||
|
</g>
|
||||||
|
<g opacity="0.8" filter="url(#filter3_f_2862_30)">
|
||||||
|
<circle cx="20" cy="413" r="160" fill="#4018E2" fill-opacity="0.22"/>
|
||||||
|
</g>
|
||||||
|
<path opacity="0.2" d="M0 50H700" stroke="url(#paint9_radial_2862_30)" stroke-dasharray="4 4"/>
|
||||||
|
<path opacity="0.1" d="M0 82H700" stroke="url(#paint10_radial_2862_30)" stroke-dasharray="4 4"/>
|
||||||
|
<path opacity="0.2" d="M239 0L239 320" stroke="url(#paint11_radial_2862_30)" stroke-dasharray="4 4"/>
|
||||||
|
<path opacity="0.1" d="M271 0L271 320" stroke="url(#paint12_radial_2862_30)" stroke-dasharray="4 4"/>
|
||||||
|
<path opacity="0.2" d="M461 0L461 320" stroke="url(#paint13_radial_2862_30)" stroke-dasharray="4 4"/>
|
||||||
|
<path opacity="0.1" d="M429 0L429 320" stroke="url(#paint14_radial_2862_30)" stroke-dasharray="4 4"/>
|
||||||
|
<path opacity="0.2" d="M0 271H700" stroke="url(#paint15_radial_2862_30)" stroke-dasharray="4 4"/>
|
||||||
|
<path opacity="0.1" d="M0 239H700" stroke="url(#paint16_radial_2862_30)" stroke-dasharray="4 4"/>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M0 160H700" stroke="url(#paint17_linear_2862_30)"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.2">
|
||||||
|
<path d="M511 -1L189 321" stroke="url(#paint18_linear_2862_30)"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.2">
|
||||||
|
<path d="M511 321L189 -1" stroke="url(#paint19_linear_2862_30)"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<circle cx="350" cy="160" r="111" stroke="white"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<circle cx="350" cy="160" r="79" stroke="white"/>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
<defs>
|
||||||
|
<filter id="filter0_f_2862_30" x="260" y="-460" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||||
|
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||||
|
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||||
|
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_30"/>
|
||||||
|
</filter>
|
||||||
|
<filter id="filter1_f_2862_30" x="-380" y="-187" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||||
|
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||||
|
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||||
|
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_30"/>
|
||||||
|
</filter>
|
||||||
|
<filter id="filter2_f_2862_30" x="260" y="80" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||||
|
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||||
|
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||||
|
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_30"/>
|
||||||
|
</filter>
|
||||||
|
<filter id="filter3_f_2862_30" x="-380" y="13" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||||
|
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||||
|
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||||
|
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_30"/>
|
||||||
|
</filter>
|
||||||
|
<linearGradient id="paint0_linear_2862_30" x1="1.04308e-05" y1="320" x2="710.784" y2="26.0793" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#18E299" stop-opacity="0.09"/>
|
||||||
|
<stop offset="0.729167" stop-color="#0D9373" stop-opacity="0.08"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint1_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(208.697 189.703) rotate(-10.029) scale(169.097 167.466)">
|
||||||
|
<stop stop-color="#00B0BB"/>
|
||||||
|
<stop offset="1" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint2_linear_2862_30" x1="306.587" y1="93.5598" x2="252.341" y2="224.228" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#18E299"/>
|
||||||
|
<stop offset="1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint3_linear_2862_30" x1="311.84" y1="123.717" x2="253.579" y2="224.761" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint4_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(313.407 243.64) rotate(-75.7542) scale(203.632 223.902)">
|
||||||
|
<stop stop-color="#00BBBB"/>
|
||||||
|
<stop offset="0.712616" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint5_linear_2862_30" x1="308.586" y1="102.284" x2="383.487" y2="201.169" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint6_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(311.446 249.925) rotate(-20.3524) scale(174.776 163.096)">
|
||||||
|
<stop stop-color="#00B0BB"/>
|
||||||
|
<stop offset="1" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint7_linear_2862_30" x1="395.842" y1="169.781" x2="332.121" y2="263.82" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#00B1BC"/>
|
||||||
|
<stop offset="1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint8_linear_2862_30" x1="395.842" y1="169.781" x2="370.99" y2="271.799" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint9_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(350 50) scale(398.125 182)">
|
||||||
|
<stop offset="0.348958" stop-color="#84FFD3"/>
|
||||||
|
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
|
||||||
|
</radialGradient>
|
||||||
|
<radialGradient id="paint10_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(350 82) scale(398.125 182)">
|
||||||
|
<stop offset="0.348958" stop-color="#84FFD3"/>
|
||||||
|
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
|
||||||
|
</radialGradient>
|
||||||
|
<radialGradient id="paint11_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(239 160) rotate(90) scale(182 182)">
|
||||||
|
<stop offset="0.348958" stop-color="#84FFD3"/>
|
||||||
|
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
|
||||||
|
</radialGradient>
|
||||||
|
<radialGradient id="paint12_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(271 160) rotate(90) scale(182 182)">
|
||||||
|
<stop offset="0.348958" stop-color="#84FFD3"/>
|
||||||
|
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
|
||||||
|
</radialGradient>
|
||||||
|
<radialGradient id="paint13_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(461 160) rotate(90) scale(182 182)">
|
||||||
|
<stop offset="0.348958" stop-color="#84FFD3"/>
|
||||||
|
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
|
||||||
|
</radialGradient>
|
||||||
|
<radialGradient id="paint14_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(429 160) rotate(90) scale(182 182)">
|
||||||
|
<stop offset="0.348958" stop-color="#84FFD3"/>
|
||||||
|
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
|
||||||
|
</radialGradient>
|
||||||
|
<radialGradient id="paint15_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(350 271) scale(398.125 182)">
|
||||||
|
<stop offset="0.348958" stop-color="#84FFD3"/>
|
||||||
|
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
|
||||||
|
</radialGradient>
|
||||||
|
<radialGradient id="paint16_radial_2862_30" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(350 239) scale(398.125 182)">
|
||||||
|
<stop offset="0.348958" stop-color="#84FFD3"/>
|
||||||
|
<stop offset="0.880208" stop-color="#18E299" stop-opacity="0"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint17_linear_2862_30" x1="0" y1="160" x2="700" y2="160" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="white" stop-opacity="0.1"/>
|
||||||
|
<stop offset="0.5" stop-color="white"/>
|
||||||
|
<stop offset="1" stop-color="white" stop-opacity="0.1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint18_linear_2862_30" x1="511" y1="-1" x2="189" y2="321" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="white" stop-opacity="0.1"/>
|
||||||
|
<stop offset="0.5" stop-color="white"/>
|
||||||
|
<stop offset="1" stop-color="white" stop-opacity="0.1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint19_linear_2862_30" x1="511" y1="321" x2="189" y2="-0.999997" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="white" stop-opacity="0.1"/>
|
||||||
|
<stop offset="0.5" stop-color="white"/>
|
||||||
|
<stop offset="1" stop-color="white" stop-opacity="0.1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<clipPath id="clip0_2862_30">
|
||||||
|
<rect width="700" height="320" rx="16" fill="white"/>
|
||||||
|
</clipPath>
|
||||||
|
</defs>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 12 KiB |
155
apps/docs/images/hero-light.svg
Normal file
155
apps/docs/images/hero-light.svg
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
<svg width="700" height="320" viewBox="0 0 700 320" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<g clip-path="url(#clip0_2862_278)">
|
||||||
|
<rect width="700" height="320" rx="16" fill="url(#paint0_linear_2862_278)"/>
|
||||||
|
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="white"/>
|
||||||
|
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="url(#paint1_radial_2862_278)"/>
|
||||||
|
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="black" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M311.889 247.3C283.097 247.215 258.226 231.466 246.292 201.629C234.357 171.793 238.02 134.523 253.414 101.112C282.206 101.197 307.077 116.945 319.011 146.782C330.946 176.619 327.283 213.888 311.889 247.3Z" fill="url(#paint2_linear_2862_278)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M311.72 247.034C283.108 246.887 258.409 231.208 246.538 201.531C234.656 171.825 238.271 134.702 253.583 101.377C282.195 101.524 306.894 117.203 318.765 146.88C330.647 176.586 327.031 213.709 311.72 247.034Z" stroke="url(#paint3_linear_2862_278)" stroke-opacity="0.05" stroke-width="0.530516"/>
|
||||||
|
<path d="M305.839 247.174C343.92 237.419 377.154 210.619 393.585 171.64C410.017 132.661 405.98 90.1988 386.347 56.1934C348.266 65.9477 315.032 92.7486 298.601 131.728C282.169 170.706 286.206 213.168 305.839 247.174Z" fill="white"/>
|
||||||
|
<path d="M305.839 247.174C343.92 237.419 377.154 210.619 393.585 171.64C410.017 132.661 405.98 90.1988 386.347 56.1934C348.266 65.9477 315.032 92.7486 298.601 131.728C282.169 170.706 286.206 213.168 305.839 247.174Z" fill="url(#paint4_radial_2862_278)"/>
|
||||||
|
<path d="M393.341 171.537C376.971 210.369 343.89 237.091 305.969 246.867C286.462 212.959 282.476 170.663 298.845 131.831C315.215 92.9978 348.295 66.2765 386.217 56.5004C405.724 90.4077 409.71 132.704 393.341 171.537Z" stroke="url(#paint5_linear_2862_278)" stroke-opacity="0.05" stroke-width="0.530516"/>
|
||||||
|
<path d="M305.686 246.995C329.75 266.114 361.965 272.832 393.671 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.046 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="white"/>
|
||||||
|
<path d="M305.686 246.995C329.75 266.114 361.965 272.832 393.671 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.046 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="url(#paint6_radial_2862_278)"/>
|
||||||
|
<path d="M305.686 246.995C329.75 266.114 361.965 272.832 393.671 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.046 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="black" fill-opacity="0.2" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M305.686 246.995C329.75 266.114 361.965 272.832 393.671 262.129C425.376 251.426 449.499 225.691 461.03 194.556C436.967 175.437 404.751 168.719 373.046 179.422C341.34 190.125 317.217 215.86 305.686 246.995Z" fill="url(#paint7_linear_2862_278)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M393.586 261.878C362.035 272.529 329.981 265.88 306.002 246.907C317.535 215.919 341.571 190.327 373.13 179.673C404.682 169.023 436.736 175.671 460.715 194.644C449.182 225.632 425.146 251.224 393.586 261.878Z" stroke="url(#paint8_linear_2862_278)" stroke-opacity="0.05" stroke-width="0.530516"/>
|
||||||
|
<g opacity="0.8" filter="url(#filter0_f_2862_278)">
|
||||||
|
<circle cx="660" cy="-60" r="160" fill="#18E299" fill-opacity="0.4"/>
|
||||||
|
</g>
|
||||||
|
<g opacity="0.8" filter="url(#filter1_f_2862_278)">
|
||||||
|
<circle cx="20" cy="213" r="160" fill="#18E299" fill-opacity="0.33"/>
|
||||||
|
</g>
|
||||||
|
<g opacity="0.8" filter="url(#filter2_f_2862_278)">
|
||||||
|
<circle cx="660" cy="480" r="160" fill="#18E299" fill-opacity="0.52"/>
|
||||||
|
</g>
|
||||||
|
<g opacity="0.8" filter="url(#filter3_f_2862_278)">
|
||||||
|
<circle cx="20" cy="413" r="160" fill="#18E299" fill-opacity="0.22"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M0 50H700" stroke="black" stroke-dasharray="4 4"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M0 82H700" stroke="black" stroke-dasharray="4 4"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M239 0L239 320" stroke="black" stroke-dasharray="4 4"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M271 0L271 320" stroke="black" stroke-dasharray="4 4"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M461 0L461 320" stroke="black" stroke-dasharray="4 4"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M350 0L350 320" stroke="url(#paint9_linear_2862_278)"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M429 0L429 320" stroke="black" stroke-dasharray="4 4"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M0 271H700" stroke="black" stroke-dasharray="4 4"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M0 239H700" stroke="black" stroke-dasharray="4 4"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M0 160H700" stroke="url(#paint10_linear_2862_278)"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M511 -1L189 321" stroke="url(#paint11_linear_2862_278)"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.1">
|
||||||
|
<path d="M511 321L189 -1" stroke="url(#paint12_linear_2862_278)"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.05">
|
||||||
|
<circle cx="350" cy="160" r="111" stroke="black"/>
|
||||||
|
</g>
|
||||||
|
<g style="mix-blend-mode:overlay" opacity="0.05">
|
||||||
|
<circle cx="350" cy="160" r="79" stroke="black"/>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
<defs>
|
||||||
|
<filter id="filter0_f_2862_278" x="260" y="-460" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||||
|
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||||
|
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||||
|
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_278"/>
|
||||||
|
</filter>
|
||||||
|
<filter id="filter1_f_2862_278" x="-380" y="-187" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||||
|
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||||
|
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||||
|
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_278"/>
|
||||||
|
</filter>
|
||||||
|
<filter id="filter2_f_2862_278" x="260" y="80" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||||
|
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||||
|
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||||
|
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_278"/>
|
||||||
|
</filter>
|
||||||
|
<filter id="filter3_f_2862_278" x="-380" y="13" width="800" height="800" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||||
|
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||||
|
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||||
|
<feGaussianBlur stdDeviation="120" result="effect1_foregroundBlur_2862_278"/>
|
||||||
|
</filter>
|
||||||
|
<linearGradient id="paint0_linear_2862_278" x1="1.04308e-05" y1="320" x2="710.784" y2="26.0793" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#18E299" stop-opacity="0.09"/>
|
||||||
|
<stop offset="0.729167" stop-color="#0D9373" stop-opacity="0.08"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint1_radial_2862_278" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(208.697 189.703) rotate(-10.029) scale(169.097 167.466)">
|
||||||
|
<stop stop-color="#00B0BB"/>
|
||||||
|
<stop offset="1" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint2_linear_2862_278" x1="306.587" y1="93.5598" x2="252.341" y2="224.228" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#18E299"/>
|
||||||
|
<stop offset="1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint3_linear_2862_278" x1="311.84" y1="123.717" x2="253.579" y2="224.761" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint4_radial_2862_278" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(313.407 243.64) rotate(-75.7542) scale(203.632 223.902)">
|
||||||
|
<stop stop-color="#00BBBB"/>
|
||||||
|
<stop offset="0.712616" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint5_linear_2862_278" x1="308.586" y1="102.284" x2="383.487" y2="201.169" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint6_radial_2862_278" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(311.447 249.925) rotate(-20.3524) scale(174.776 163.096)">
|
||||||
|
<stop stop-color="#00B0BB"/>
|
||||||
|
<stop offset="1" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint7_linear_2862_278" x1="395.843" y1="169.781" x2="332.121" y2="263.82" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#00B1BC"/>
|
||||||
|
<stop offset="1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint8_linear_2862_278" x1="395.843" y1="169.781" x2="370.991" y2="271.799" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint9_linear_2862_278" x1="350" y1="0" x2="350" y2="320" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-opacity="0"/>
|
||||||
|
<stop offset="0.0001" stop-opacity="0.3"/>
|
||||||
|
<stop offset="0.333333"/>
|
||||||
|
<stop offset="0.666667"/>
|
||||||
|
<stop offset="1" stop-opacity="0.3"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint10_linear_2862_278" x1="0" y1="160" x2="700" y2="160" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-opacity="0.1"/>
|
||||||
|
<stop offset="0.5"/>
|
||||||
|
<stop offset="1" stop-opacity="0.1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint11_linear_2862_278" x1="511" y1="-1" x2="189" y2="321" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-opacity="0.1"/>
|
||||||
|
<stop offset="0.5"/>
|
||||||
|
<stop offset="1" stop-opacity="0.1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint12_linear_2862_278" x1="511" y1="321" x2="189" y2="-0.999997" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-opacity="0.1"/>
|
||||||
|
<stop offset="0.5"/>
|
||||||
|
<stop offset="1" stop-opacity="0.1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<clipPath id="clip0_2862_278">
|
||||||
|
<rect width="700" height="320" rx="16" fill="white"/>
|
||||||
|
</clipPath>
|
||||||
|
</defs>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 10 KiB |
71
apps/docs/introduction.mdx
Normal file
71
apps/docs/introduction.mdx
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
---
|
||||||
|
title: Introduction
|
||||||
|
description: 'Welcome to the home of your new documentation'
|
||||||
|
---
|
||||||
|
|
||||||
|
<img
|
||||||
|
className="block dark:hidden"
|
||||||
|
src="/images/hero-light.svg"
|
||||||
|
alt="Hero Light"
|
||||||
|
/>
|
||||||
|
<img
|
||||||
|
className="hidden dark:block"
|
||||||
|
src="/images/hero-dark.svg"
|
||||||
|
alt="Hero Dark"
|
||||||
|
/>
|
||||||
|
|
||||||
|
## Setting up
|
||||||
|
|
||||||
|
The first step to world-class documentation is setting up your editing environments.
|
||||||
|
|
||||||
|
<CardGroup cols={2}>
|
||||||
|
<Card
|
||||||
|
title="Edit Your Docs"
|
||||||
|
icon="pen-to-square"
|
||||||
|
href="https://mintlify.com/docs/quickstart"
|
||||||
|
>
|
||||||
|
Get your docs set up locally for easy development
|
||||||
|
</Card>
|
||||||
|
<Card
|
||||||
|
title="Preview Changes"
|
||||||
|
icon="image"
|
||||||
|
href="https://mintlify.com/docs/development"
|
||||||
|
>
|
||||||
|
Preview your changes before you push to make sure they're perfect
|
||||||
|
</Card>
|
||||||
|
</CardGroup>
|
||||||
|
|
||||||
|
## Make it yours
|
||||||
|
|
||||||
|
Update your docs to your brand and add valuable content for the best user conversion.
|
||||||
|
|
||||||
|
<CardGroup cols={2}>
|
||||||
|
<Card
|
||||||
|
title="Customize Style"
|
||||||
|
icon="palette"
|
||||||
|
href="https://mintlify.com/docs/settings/global"
|
||||||
|
>
|
||||||
|
Customize your docs to your company's colors and brands
|
||||||
|
</Card>
|
||||||
|
<Card
|
||||||
|
title="Reference APIs"
|
||||||
|
icon="code"
|
||||||
|
href="https://mintlify.com/docs/api-playground/openapi"
|
||||||
|
>
|
||||||
|
Automatically generate endpoints from an OpenAPI spec
|
||||||
|
</Card>
|
||||||
|
<Card
|
||||||
|
title="Add Components"
|
||||||
|
icon="screwdriver-wrench"
|
||||||
|
href="https://mintlify.com/docs/components/accordion"
|
||||||
|
>
|
||||||
|
Build interactive features and designs to guide your users
|
||||||
|
</Card>
|
||||||
|
<Card
|
||||||
|
title="Get Inspiration"
|
||||||
|
icon="stars"
|
||||||
|
href="https://mintlify.com/customers"
|
||||||
|
>
|
||||||
|
Check out our showcase of our favorite documentation
|
||||||
|
</Card>
|
||||||
|
</CardGroup>
|
||||||
55
apps/docs/logo/dark.svg
Normal file
55
apps/docs/logo/dark.svg
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
<svg width="160" height="24" viewBox="0 0 160 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="white"/>
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="url(#paint0_radial_115_109)"/>
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="black" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="url(#paint1_linear_115_109)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M7.9354 21.1112C4.89702 21.0957 2.27411 19.4306 1.01347 16.279C-0.248375 13.1244 0.135612 9.18218 1.76165 5.64327C4.80004 5.65882 7.42295 7.32385 8.68359 10.4755C9.94543 13.63 9.56144 17.5723 7.9354 21.1112Z" stroke="url(#paint2_linear_115_109)" stroke-opacity="0.05" stroke-width="0.056338"/>
|
||||||
|
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="white"/>
|
||||||
|
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="url(#paint3_radial_115_109)"/>
|
||||||
|
<path d="M16.6025 13.2251C14.8642 17.349 11.3512 20.1866 7.32411 21.2248C5.25257 17.624 4.82926 13.1324 6.56764 9.00855C8.30603 4.88472 11.819 2.04706 15.8461 1.00889C17.9176 4.60967 18.3409 9.10131 16.6025 13.2251Z" stroke="url(#paint4_linear_115_109)" stroke-opacity="0.05" stroke-width="0.056338"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="white"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint5_radial_115_109)"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="black" fill-opacity="0.2" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint6_linear_115_109)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M16.5682 22.7874C13.2176 23.9184 9.81361 23.2124 7.2672 21.1975C8.49194 17.9068 11.0444 15.189 14.3959 14.0577C17.7465 12.9266 21.1504 13.6326 23.6968 15.6476C22.4721 18.9383 19.9196 21.656 16.5682 22.7874Z" stroke="url(#paint7_linear_115_109)" stroke-opacity="0.05" stroke-width="0.056338"/>
|
||||||
|
<path d="M34.2124 19V5.4H39.4924L41.6924 12.2L42.3724 14.74L43.0524 12.2L45.2524 5.4H50.4124V19H46.3324L46.5924 9.98L45.5324 13.68L43.7924 19H40.8324L39.0524 13.6L38.0324 10.02L38.2924 19H34.2124ZM52.4155 7.3V4.6H56.2955V7.3H52.4155ZM52.4155 19V8.14H56.2955V19H52.4155ZM58.1038 19V8.14H61.9838V9.58C62.6638 8.34 63.7438 7.76 65.0038 7.76C66.9638 7.76 68.6238 8.98 68.6238 11.78V19H64.7438V12.56C64.7438 11.34 64.3038 10.86 63.4838 10.86C62.6038 10.86 61.9838 11.58 61.9838 12.88V19H58.1038ZM70.9327 15.22V11.06H69.7327V8.14H70.9327V5.62H74.8127V8.14H76.9327V11.06H74.8127V14.6C74.8127 15.5 75.0327 16.06 76.2127 16.06H76.9327V19C76.4927 19.2 75.6727 19.38 74.6527 19.38C72.1527 19.38 70.9327 17.88 70.9327 15.22Z" fill="url(#paint8_radial_115_109)"/>
|
||||||
|
<path d="M87.232 10.519C87.232 13.687 94.1125 11.2285 94.1125 15.832C94.1125 17.9935 92.3635 19.198 89.971 19.198C87.562 19.198 85.912 18.0925 85.417 15.832H87.001C87.364 17.1685 88.3705 17.8945 89.9875 17.8945C91.6705 17.8945 92.5615 17.152 92.5615 16.03C92.5615 12.598 85.681 15.1555 85.681 10.618C85.681 9.001 87.034 7.582 89.509 7.582C91.6705 7.582 93.403 8.6215 93.8155 11.014H92.215C91.8685 9.529 90.9115 8.8855 89.476 8.8855C88.057 8.8855 87.232 9.529 87.232 10.519ZM96.2499 16.4755V11.3935H95.0289V10.2385H96.2499V8.2255H97.7019V10.2385H99.6324V11.3935H97.7019V16.4755C97.7019 17.5315 98.0154 18.0265 99.3024 18.0265H99.5994V19.066C99.4344 19.1485 99.0714 19.198 98.6589 19.198C97.0254 19.198 96.2499 18.3235 96.2499 16.4755ZM102.516 13.093H101.064C101.345 11.1625 102.615 10.024 104.76 10.024C107.103 10.024 108.242 11.3935 108.242 13.4395V16.888C108.242 17.8945 108.324 18.5215 108.555 19H107.021C106.856 18.6535 106.806 18.142 106.79 17.614C106.047 18.7195 104.859 19.198 103.803 19.198C101.988 19.198 100.767 18.3565 100.767 16.69C100.767 15.4855 101.427 14.611 102.714 14.182C103.902 13.786 105.107 13.687 106.79 13.6705V13.4725C106.79 12.0535 106.13 11.278 104.628 11.278C103.374 11.278 102.698 11.971 102.516 13.093ZM102.252 16.657C102.252 17.4655 102.929 17.944 103.952 17.944C105.569 17.944 106.79 16.6735 106.79 15.172V14.7595C103.061 14.7925 102.252 15.5845 102.252 16.657ZM110.787 19V10.2385H112.239V11.5915C112.833 10.519 113.774 10.024 114.83 10.024C115.176 10.024 115.49 10.1065 115.655 10.2385V11.542C115.407 11.4595 115.094 11.4265 114.747 11.4265C112.998 11.4265 112.239 12.5155 112.239 14.0995V19H110.787ZM117.305 16.4755V11.3935H116.084V10.2385H117.305V8.2255H118.757V10.2385H120.688V11.3935H118.757V16.4755C118.757 17.5315 119.071 18.0265 120.358 18.0265H120.655V19.066C120.49 19.1485 120.127 19.198 119.714 19.198C118.081 19.198 117.305 18.3235 117.305 16.4755ZM129.809 16.1455C129.33 18.1915 127.862 19.198 125.865 19.198C123.324 19.198 121.79 17.482 121.79 14.6275C121.79 11.6575 123.324 10.024 125.783 10.024C128.258 10.024 129.743 11.7235 129.743 14.512V14.875H123.275C123.357 16.8385 124.281 17.944 125.865 17.944C127.103 17.944 127.977 17.35 128.291 16.1455H129.809ZM125.783 11.278C124.38 11.278 123.539 12.1525 123.324 13.786H128.225C128.027 12.169 127.152 11.278 125.783 11.278ZM131.843 19V10.2385H133.295V11.5915C133.889 10.519 134.829 10.024 135.885 10.024C136.232 10.024 136.545 10.1065 136.71 10.2385V11.542C136.463 11.4595 136.149 11.4265 135.803 11.4265C134.054 11.4265 133.295 12.5155 133.295 14.0995V19H131.843ZM141.763 19V7.78H143.281V13.192L148.413 7.78H150.327L145.047 13.291L150.459 19H148.413L143.281 13.621V19H141.763ZM152.06 9.067V7.12H153.512V9.067H152.06ZM152.06 19V10.2385H153.512V19H152.06ZM156.178 16.4755V11.3935H154.957V10.2385H156.178V8.2255H157.63V10.2385H159.56V11.3935H157.63V16.4755C157.63 17.5315 157.943 18.0265 159.23 18.0265H159.527V19.066C159.362 19.1485 158.999 19.198 158.587 19.198C156.953 19.198 156.178 18.3235 156.178 16.4755Z" fill="white" fill-opacity="0.55"/>
|
||||||
|
<defs>
|
||||||
|
<radialGradient id="paint0_radial_115_109" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(-3.00503 15.023) rotate(-10.029) scale(17.9572 17.784)">
|
||||||
|
<stop stop-color="#00B0BB"/>
|
||||||
|
<stop offset="1" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint1_linear_115_109" x1="7.39036" y1="4.81308" x2="1.62975" y2="18.6894" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#18E299"/>
|
||||||
|
<stop offset="1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint2_linear_115_109" x1="7.94816" y1="8.01562" x2="1.7612" y2="18.746" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint3_radial_115_109" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(8.11404 20.8822) rotate(-75.7542) scale(21.6246 23.7772)">
|
||||||
|
<stop stop-color="#00BBBB"/>
|
||||||
|
<stop offset="0.712616" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint4_linear_115_109" x1="7.60205" y1="5.8709" x2="15.5561" y2="16.3719" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint5_radial_115_109" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(7.84537 21.5181) rotate(-20.3525) scale(18.5603 17.32)">
|
||||||
|
<stop stop-color="#00B0BB"/>
|
||||||
|
<stop offset="1" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint6_linear_115_109" x1="16.8078" y1="13.0071" x2="10.0409" y2="22.9937" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#00B1BC"/>
|
||||||
|
<stop offset="1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint7_linear_115_109" x1="16.8078" y1="13.0071" x2="14.1687" y2="23.841" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint8_radial_115_109" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(47.2781 7) rotate(19.0047) scale(67.5582 85.7506)">
|
||||||
|
<stop stop-color="white"/>
|
||||||
|
<stop offset="1" stop-color="white" stop-opacity="0.5"/>
|
||||||
|
</radialGradient>
|
||||||
|
</defs>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 9.3 KiB |
51
apps/docs/logo/light.svg
Normal file
51
apps/docs/logo/light.svg
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
<svg width="160" height="24" viewBox="0 0 160 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="white"/>
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="url(#paint0_radial_115_86)"/>
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="black" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M7.95343 21.1394C4.89586 21.1304 2.25471 19.458 0.987296 16.2895C-0.280118 13.121 0.108924 9.16314 1.74363 5.61504C4.8012 5.62409 7.44235 7.29648 8.70976 10.465C9.97718 13.6335 9.58814 17.5913 7.95343 21.1394Z" fill="url(#paint1_linear_115_86)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M7.9354 21.1112C4.89702 21.0957 2.27411 19.4306 1.01347 16.279C-0.248375 13.1244 0.135612 9.18218 1.76165 5.64327C4.80004 5.65882 7.42295 7.32385 8.68359 10.4755C9.94543 13.63 9.56144 17.5723 7.9354 21.1112Z" stroke="url(#paint2_linear_115_86)" stroke-opacity="0.05" stroke-width="0.056338"/>
|
||||||
|
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="white"/>
|
||||||
|
<path d="M7.31038 21.2574C11.3543 20.2215 14.8836 17.3754 16.6285 13.2361C18.3735 9.09671 17.9448 4.58749 15.8598 0.976291C11.8159 2.01214 8.2866 4.85826 6.54167 8.99762C4.79674 13.137 5.2254 17.6462 7.31038 21.2574Z" fill="url(#paint3_radial_115_86)"/>
|
||||||
|
<path d="M16.6025 13.2251C14.8642 17.349 11.3512 20.1866 7.32411 21.2248C5.25257 17.624 4.82926 13.1324 6.56764 9.00855C8.30603 4.88472 11.819 2.04706 15.8461 1.00889C17.9176 4.60967 18.3409 9.10131 16.6025 13.2251Z" stroke="url(#paint4_linear_115_86)" stroke-opacity="0.05" stroke-width="0.056338"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="white"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint5_radial_115_86)"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="black" fill-opacity="0.2" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M7.23368 21.2069C9.78906 23.2373 13.2102 23.9506 16.5772 22.8141C19.9441 21.6775 22.5058 18.9445 23.7304 15.6382C21.175 13.6078 17.7538 12.8944 14.3869 14.031C11.0199 15.1676 8.45822 17.9006 7.23368 21.2069Z" fill="url(#paint6_linear_115_86)" fill-opacity="0.5" style="mix-blend-mode:hard-light"/>
|
||||||
|
<path d="M16.5682 22.7874C13.2176 23.9184 9.81361 23.2124 7.2672 21.1975C8.49194 17.9068 11.0444 15.189 14.3959 14.0577C17.7465 12.9266 21.1504 13.6326 23.6968 15.6476C22.4721 18.9383 19.9196 21.656 16.5682 22.7874Z" stroke="url(#paint7_linear_115_86)" stroke-opacity="0.05" stroke-width="0.056338"/>
|
||||||
|
<path d="M34.2124 19V5.4H39.4924L41.6924 12.2L42.3724 14.74L43.0524 12.2L45.2524 5.4H50.4124V19H46.3324L46.5924 9.98L45.5324 13.68L43.7924 19H40.8324L39.0524 13.6L38.0324 10.02L38.2924 19H34.2124ZM52.4155 7.3V4.6H56.2955V7.3H52.4155ZM52.4155 19V8.14H56.2955V19H52.4155ZM58.1038 19V8.14H61.9838V9.58C62.6638 8.34 63.7438 7.76 65.0038 7.76C66.9638 7.76 68.6238 8.98 68.6238 11.78V19H64.7438V12.56C64.7438 11.34 64.3038 10.86 63.4838 10.86C62.6038 10.86 61.9838 11.58 61.9838 12.88V19H58.1038ZM70.9327 15.22V11.06H69.7327V8.14H70.9327V5.62H74.8127V8.14H76.9327V11.06H74.8127V14.6C74.8127 15.5 75.0327 16.06 76.2127 16.06H76.9327V19C76.4927 19.2 75.6727 19.38 74.6527 19.38C72.1527 19.38 70.9327 17.88 70.9327 15.22Z" fill="#001E13"/>
|
||||||
|
<path d="M87.232 10.519C87.232 13.687 94.1125 11.2285 94.1125 15.832C94.1125 17.9935 92.3635 19.198 89.971 19.198C87.562 19.198 85.912 18.0925 85.417 15.832H87.001C87.364 17.1685 88.3705 17.8945 89.9875 17.8945C91.6705 17.8945 92.5615 17.152 92.5615 16.03C92.5615 12.598 85.681 15.1555 85.681 10.618C85.681 9.001 87.034 7.582 89.509 7.582C91.6705 7.582 93.403 8.6215 93.8155 11.014H92.215C91.8685 9.529 90.9115 8.8855 89.476 8.8855C88.057 8.8855 87.232 9.529 87.232 10.519ZM96.2499 16.4755V11.3935H95.0289V10.2385H96.2499V8.2255H97.7019V10.2385H99.6324V11.3935H97.7019V16.4755C97.7019 17.5315 98.0154 18.0265 99.3024 18.0265H99.5994V19.066C99.4344 19.1485 99.0714 19.198 98.6589 19.198C97.0254 19.198 96.2499 18.3235 96.2499 16.4755ZM102.516 13.093H101.064C101.345 11.1625 102.615 10.024 104.76 10.024C107.103 10.024 108.242 11.3935 108.242 13.4395V16.888C108.242 17.8945 108.324 18.5215 108.555 19H107.021C106.856 18.6535 106.806 18.142 106.79 17.614C106.047 18.7195 104.859 19.198 103.803 19.198C101.988 19.198 100.767 18.3565 100.767 16.69C100.767 15.4855 101.427 14.611 102.714 14.182C103.902 13.786 105.107 13.687 106.79 13.6705V13.4725C106.79 12.0535 106.13 11.278 104.628 11.278C103.374 11.278 102.698 11.971 102.516 13.093ZM102.252 16.657C102.252 17.4655 102.929 17.944 103.952 17.944C105.569 17.944 106.79 16.6735 106.79 15.172V14.7595C103.061 14.7925 102.252 15.5845 102.252 16.657ZM110.787 19V10.2385H112.239V11.5915C112.833 10.519 113.774 10.024 114.83 10.024C115.176 10.024 115.49 10.1065 115.655 10.2385V11.542C115.407 11.4595 115.094 11.4265 114.747 11.4265C112.998 11.4265 112.239 12.5155 112.239 14.0995V19H110.787ZM117.305 16.4755V11.3935H116.084V10.2385H117.305V8.2255H118.757V10.2385H120.688V11.3935H118.757V16.4755C118.757 17.5315 119.071 18.0265 120.358 18.0265H120.655V19.066C120.49 19.1485 120.127 19.198 119.714 19.198C118.081 19.198 117.305 18.3235 117.305 16.4755ZM129.809 16.1455C129.33 18.1915 127.862 19.198 125.865 19.198C123.324 19.198 121.79 17.482 121.79 14.6275C121.79 11.6575 123.324 10.024 125.783 10.024C128.258 10.024 129.743 11.7235 129.743 14.512V14.875H123.275C123.357 16.8385 124.281 17.944 125.865 17.944C127.103 17.944 127.977 17.35 128.291 16.1455H129.809ZM125.783 11.278C124.38 11.278 123.539 12.1525 123.324 13.786H128.225C128.027 12.169 127.152 11.278 125.783 11.278ZM131.843 19V10.2385H133.295V11.5915C133.889 10.519 134.829 10.024 135.885 10.024C136.232 10.024 136.545 10.1065 136.71 10.2385V11.542C136.463 11.4595 136.149 11.4265 135.803 11.4265C134.054 11.4265 133.295 12.5155 133.295 14.0995V19H131.843ZM141.763 19V7.78H143.281V13.192L148.413 7.78H150.327L145.047 13.291L150.459 19H148.413L143.281 13.621V19H141.763ZM152.06 9.067V7.12H153.512V9.067H152.06ZM152.06 19V10.2385H153.512V19H152.06ZM156.178 16.4755V11.3935H154.957V10.2385H156.178V8.2255H157.63V10.2385H159.56V11.3935H157.63V16.4755C157.63 17.5315 157.943 18.0265 159.23 18.0265H159.527V19.066C159.362 19.1485 158.999 19.198 158.587 19.198C156.953 19.198 156.178 18.3235 156.178 16.4755Z" fill="#002719" fill-opacity="0.6"/>
|
||||||
|
<defs>
|
||||||
|
<radialGradient id="paint0_radial_115_86" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(-3.00503 15.023) rotate(-10.029) scale(17.9572 17.784)">
|
||||||
|
<stop stop-color="#00B0BB"/>
|
||||||
|
<stop offset="1" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint1_linear_115_86" x1="7.39036" y1="4.81308" x2="1.62975" y2="18.6894" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#18E299"/>
|
||||||
|
<stop offset="1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint2_linear_115_86" x1="7.94816" y1="8.01562" x2="1.7612" y2="18.746" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint3_radial_115_86" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(8.11404 20.8822) rotate(-75.7542) scale(21.6246 23.7772)">
|
||||||
|
<stop stop-color="#00BBBB"/>
|
||||||
|
<stop offset="0.712616" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint4_linear_115_86" x1="7.60205" y1="5.8709" x2="15.5561" y2="16.3719" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<radialGradient id="paint5_radial_115_86" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(7.84537 21.5181) rotate(-20.3525) scale(18.5603 17.32)">
|
||||||
|
<stop stop-color="#00B0BB"/>
|
||||||
|
<stop offset="1" stop-color="#00DB65"/>
|
||||||
|
</radialGradient>
|
||||||
|
<linearGradient id="paint6_linear_115_86" x1="16.8078" y1="13.0071" x2="10.0409" y2="22.9937" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#00B1BC"/>
|
||||||
|
<stop offset="1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint7_linear_115_86" x1="16.8078" y1="13.0071" x2="14.1687" y2="23.841" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop/>
|
||||||
|
<stop offset="1" stop-opacity="0"/>
|
||||||
|
</linearGradient>
|
||||||
|
</defs>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 9.0 KiB |
85
apps/docs/mint.json
Normal file
85
apps/docs/mint.json
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://mintlify.com/schema.json",
|
||||||
|
"name": "Starter Kit",
|
||||||
|
"logo": {
|
||||||
|
"dark": "/logo/dark.svg",
|
||||||
|
"light": "/logo/light.svg"
|
||||||
|
},
|
||||||
|
"favicon": "/favicon.svg",
|
||||||
|
"colors": {
|
||||||
|
"primary": "#0D9373",
|
||||||
|
"light": "#07C983",
|
||||||
|
"dark": "#0D9373",
|
||||||
|
"anchors": {
|
||||||
|
"from": "#0D9373",
|
||||||
|
"to": "#07C983"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"topbarLinks": [
|
||||||
|
{
|
||||||
|
"name": "Support",
|
||||||
|
"url": "mailto:support@mintlify.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"topbarCtaButton": {
|
||||||
|
"name": "Dashboard",
|
||||||
|
"url": "https://dashboard.mintlify.com"
|
||||||
|
},
|
||||||
|
"tabs": [
|
||||||
|
{
|
||||||
|
"name": "API Reference",
|
||||||
|
"url": "api-reference"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"anchors": [
|
||||||
|
{
|
||||||
|
"name": "Documentation",
|
||||||
|
"icon": "book-open-cover",
|
||||||
|
"url": "https://mintlify.com/docs"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Community",
|
||||||
|
"icon": "slack",
|
||||||
|
"url": "https://mintlify.com/community"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Blog",
|
||||||
|
"icon": "newspaper",
|
||||||
|
"url": "https://mintlify.com/blog"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"navigation": [
|
||||||
|
{
|
||||||
|
"group": "Get Started",
|
||||||
|
"pages": ["introduction", "quickstart", "development"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"group": "Essentials",
|
||||||
|
"pages": [
|
||||||
|
"essentials/markdown",
|
||||||
|
"essentials/code",
|
||||||
|
"essentials/images",
|
||||||
|
"essentials/settings",
|
||||||
|
"essentials/navigation",
|
||||||
|
"essentials/reusable-snippets"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"group": "API Documentation",
|
||||||
|
"pages": ["api-reference/introduction"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"group": "Endpoint Examples",
|
||||||
|
"pages": [
|
||||||
|
"api-reference/endpoint/get",
|
||||||
|
"api-reference/endpoint/create",
|
||||||
|
"api-reference/endpoint/delete"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"footerSocials": {
|
||||||
|
"x": "https://x.com/mintlify",
|
||||||
|
"github": "https://github.com/mintlify",
|
||||||
|
"linkedin": "https://www.linkedin.com/company/mintlify"
|
||||||
|
}
|
||||||
|
}
|
||||||
8
apps/docs/package.json
Normal file
8
apps/docs/package.json
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"name": "docs",
|
||||||
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"dev": "npx --yes mintlify dev --port 5004",
|
||||||
|
"lint": "npx --yes mintlify broken-links"
|
||||||
|
}
|
||||||
|
}
|
||||||
86
apps/docs/quickstart.mdx
Normal file
86
apps/docs/quickstart.mdx
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
---
|
||||||
|
title: 'Quickstart'
|
||||||
|
description: 'Start building awesome documentation in under 5 minutes'
|
||||||
|
---
|
||||||
|
|
||||||
|
## Setup your development
|
||||||
|
|
||||||
|
Learn how to update your docs locally and and deploy them to the public.
|
||||||
|
|
||||||
|
### Edit and preview
|
||||||
|
|
||||||
|
<AccordionGroup>
|
||||||
|
<Accordion icon="github" title="Clone your docs locally">
|
||||||
|
During the onboarding process, we created a repository on your Github with
|
||||||
|
your docs content. You can find this repository on our
|
||||||
|
[dashboard](https://dashboard.mintlify.com). To clone the repository
|
||||||
|
locally, follow these
|
||||||
|
[instructions](https://docs.github.com/en/repositories/creating-and-managing-repositories/cloning-a-repository)
|
||||||
|
in your terminal.
|
||||||
|
</Accordion>
|
||||||
|
<Accordion icon="rectangle-terminal" title="Preview changes">
|
||||||
|
Previewing helps you make sure your changes look as intended. We built a
|
||||||
|
command line interface to render these changes locally. 1. Install the
|
||||||
|
[Mintlify CLI](https://www.npmjs.com/package/mintlify) to preview the
|
||||||
|
documentation changes locally with this command: ``` npm i -g mintlify ```
|
||||||
|
2. Run the following command at the root of your documentation (where
|
||||||
|
`mint.json` is): ``` mintlify dev ```
|
||||||
|
</Accordion>
|
||||||
|
</AccordionGroup>
|
||||||
|
|
||||||
|
### Deploy your changes
|
||||||
|
|
||||||
|
<AccordionGroup>
|
||||||
|
|
||||||
|
<Accordion icon="message-bot" title="Install our Github app">
|
||||||
|
Our Github app automatically deploys your changes to your docs site, so you
|
||||||
|
don't need to manage deployments yourself. You can find the link to install on
|
||||||
|
your [dashboard](https://dashboard.mintlify.com). Once the bot has been
|
||||||
|
successfully installed, there should be a check mark next to the commit hash
|
||||||
|
of the repo.
|
||||||
|
</Accordion>
|
||||||
|
<Accordion icon="rocket" title="Push your changes">
|
||||||
|
[Commit and push your changes to
|
||||||
|
Git](https://docs.github.com/en/get-started/using-git/pushing-commits-to-a-remote-repository#about-git-push)
|
||||||
|
for your changes to update in your docs site. If you push and don't see that
|
||||||
|
the Github app successfully deployed your changes, you can also manually
|
||||||
|
update your docs through our [dashboard](https://dashboard.mintlify.com).
|
||||||
|
</Accordion>
|
||||||
|
|
||||||
|
</AccordionGroup>
|
||||||
|
|
||||||
|
## Update your docs
|
||||||
|
|
||||||
|
Add content directly in your files with MDX syntax and React components. You can use any of our components, or even build your own.
|
||||||
|
|
||||||
|
<CardGroup>
|
||||||
|
|
||||||
|
<Card title="Style Your Docs" icon="paintbrush" href="/settings/global">
|
||||||
|
Add flair to your docs with personalized branding.
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card
|
||||||
|
title="Add API Endpoints"
|
||||||
|
icon="square-code"
|
||||||
|
href="/api-playground/configuration"
|
||||||
|
>
|
||||||
|
Implement your OpenAPI spec and enable API user interaction.
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card
|
||||||
|
title="Integrate Analytics"
|
||||||
|
icon="chart-mixed"
|
||||||
|
href="/analytics/supported-integrations"
|
||||||
|
>
|
||||||
|
Draw insights from user interactions with your documentation.
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card
|
||||||
|
title="Host on a Custom Domain"
|
||||||
|
icon="browser"
|
||||||
|
href="/settings/custom-domain/subdomain"
|
||||||
|
>
|
||||||
|
Keep your docs on your own website's subdomain.
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
</CardGroup>
|
||||||
4
apps/docs/snippets/snippet-intro.mdx
Normal file
4
apps/docs/snippets/snippet-intro.mdx
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
One of the core principles of software development is DRY (Don't Repeat
|
||||||
|
Yourself). This is a principle that apply to documentation as
|
||||||
|
well. If you find yourself repeating the same content in multiple places, you
|
||||||
|
should consider creating a custom snippet to keep your content in sync.
|
||||||
1
apps/email-playground/.gitignore
vendored
Normal file
1
apps/email-playground/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.react-email
|
||||||
11
apps/email-playground/emails/contact.tsx
Normal file
11
apps/email-playground/emails/contact.tsx
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { ContactTemplate } from '@konobangu/email/templates/contact';
|
||||||
|
|
||||||
|
const ExampleContactEmail = () => (
|
||||||
|
<ContactTemplate
|
||||||
|
name="Jane Smith"
|
||||||
|
email="jane.smith@example.com"
|
||||||
|
message="I'm interested in your services."
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
|
||||||
|
export default ExampleContactEmail;
|
||||||
21
apps/email-playground/package.json
Normal file
21
apps/email-playground/package.json
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"name": "email-playground",
|
||||||
|
"version": "0.0.0",
|
||||||
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"build": "email build",
|
||||||
|
"dev": "email dev --port 5003",
|
||||||
|
"export": "email export",
|
||||||
|
"clean": "git clean -xdf .cache dist node_modules",
|
||||||
|
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@react-email/components": "^0.0.42",
|
||||||
|
"react": "^19.0.0",
|
||||||
|
"react-email": "^4.0.16",
|
||||||
|
"@konobangu/email": "workspace:*"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/react": "19.0.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
13
apps/email-playground/tsconfig.json
Normal file
13
apps/email-playground/tsconfig.json
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"extends": "../../tsconfig.base.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"composite": true,
|
||||||
|
"jsx": "react-jsx",
|
||||||
|
"jsxImportSource": "react",
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleResolution": "bundler"
|
||||||
|
},
|
||||||
|
"references": [{ "path": "../../packages/email" }],
|
||||||
|
"include": ["**/*.ts", "**/*.tsx"],
|
||||||
|
"exclude": ["node_modules"]
|
||||||
|
}
|
||||||
11
apps/proxy/.whistle/rules/files/0.konobangu
Normal file
11
apps/proxy/.whistle/rules/files/0.konobangu
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
```x-forwarded.json
|
||||||
|
{
|
||||||
|
"X-Forwarded-Host": "konobangu.com",
|
||||||
|
"X-Forwarded-Proto": "https"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#^https://konobangu.com/api*** statusCode://500
|
||||||
|
^https://konobangu.com/api*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/api$1
|
||||||
|
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000/$1 excludeFilter://^https://konobangu.com/api***
|
||||||
|
^wss://konobangu.com/*** reqHeaders://{x-forwarded.json} ws://127.0.0.1:5000/$1 excludeFilter://^wss://konobangu.com/api
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
^https://webui.konobangu.com/*** http://127.0.0.1:3000/$1
|
|
||||||
^wss://webui.konobangu.com/*** ws://127.0.0.1:3000/$1
|
|
||||||
1
apps/proxy/.whistle/rules/files/1.mikan-doppel
Normal file
1
apps/proxy/.whistle/rules/files/1.mikan-doppel
Normal file
@@ -0,0 +1 @@
|
|||||||
|
^https://mikanani.me/*** http://127.0.0.1:5005/$1 excludeFilter://^**/***.svg excludeFilter://^**/***.css excludeFilter://^**/***.js
|
||||||
8
apps/proxy/.whistle/rules/files/2.konobangu-prod
Normal file
8
apps/proxy/.whistle/rules/files/2.konobangu-prod
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
```x-forwarded.json
|
||||||
|
{
|
||||||
|
"X-Forwarded-Host": "konobangu.com",
|
||||||
|
"X-Forwarded-Proto": "https"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/$1
|
||||||
@@ -1 +0,0 @@
|
|||||||
^https://recorder.konobangu.com/*** http://127.0.0.1:7600/$1
|
|
||||||
@@ -1 +1 @@
|
|||||||
{"filesOrder":["webui","recorder"],"selectedList":["webui","recorder"],"disabledDefalutRules":true}
|
{"filesOrder":["konobangu","konobangu-prod","mikan-doppel"],"selectedList":["mikan-doppel","konobangu"],"disabledDefalutRules":true,"defalutRules":""}
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
{"filesOrder":[]}
|
||||||
|
|||||||
19
apps/proxy/Cargo.toml
Normal file
19
apps/proxy/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[package]
|
||||||
|
name = "proxy"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
[lib]
|
||||||
|
name = "proxy"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "mikan_doppel"
|
||||||
|
path = "src/bin/mikan_doppel.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
recorder = { workspace = true, features = ["playground"] }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
tracing-subscriber = { workspace = true }
|
||||||
|
tracing = { workspace = true }
|
||||||
@@ -3,11 +3,13 @@
|
|||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "whistle run -p 8899 -t 30000 -M \"keepXFF|prod|capture\" -D . --no-global-plugins"
|
"whistle": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
|
||||||
|
"mikan_doppel": "cargo run -p proxy --bin mikan_doppel",
|
||||||
|
"dev": "npm-run-all -p mikan_doppel whistle"
|
||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"whistle": "^2.9.61"
|
"whistle": "^2.9.99"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use recorder::{errors::RecorderResult, test_utils::mikan::MikanMockServer};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
#[allow(unused_variables)]
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(Level::DEBUG)
|
||||||
|
.init();
|
||||||
|
|
||||||
|
let mut mikan_server = MikanMockServer::new_with_port(5005).await.unwrap();
|
||||||
|
|
||||||
|
let resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
18
apps/recorder/.env.development
Normal file
18
apps/recorder/.env.development
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
LOGGER__LEVEL = "debug"
|
||||||
|
|
||||||
|
DATABASE__URI = "postgres://konobangu:konobangu@localhost:5432/konobangu"
|
||||||
|
|
||||||
|
AUTH__AUTH_TYPE = "basic"
|
||||||
|
AUTH__BASIC_USER = "konobangu"
|
||||||
|
AUTH__BASIC_PASSWORD = "konobangu"
|
||||||
|
|
||||||
|
# AUTH__OIDC_ISSUER = "https://auth.logto.io/oidc"
|
||||||
|
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
|
||||||
|
# AUTH__OIDC_CLIENT_ID = "client_id"
|
||||||
|
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
|
||||||
|
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
||||||
|
# AUTH__OIDC_EXTRA_CLAIM_KEY = ""
|
||||||
|
# AUTH__OIDC_EXTRA_CLAIM_VALUE = ""
|
||||||
|
|
||||||
|
MIKAN__HTTP_CLIENT__PROXY__ACCEPT_INVALID_CERTS = true
|
||||||
|
MIKAN__HTTP_CLIENT__PROXY__SERVER = "http://127.0.0.1:8899"
|
||||||
15
apps/recorder/.env.production.example
Normal file
15
apps/recorder/.env.production.example
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
HOST="konobangu.com"
|
||||||
|
|
||||||
|
DATABASE__URI = "postgres://konobangu:konobangu@localhost:5432/konobangu"
|
||||||
|
|
||||||
|
AUTH__AUTH_TYPE = "basic" # or oidc
|
||||||
|
AUTH__BASIC_USER = "konobangu"
|
||||||
|
AUTH__BASIC_PASSWORD = "konobangu"
|
||||||
|
|
||||||
|
# AUTH__OIDC_ISSUER="https://auth.logto.io/oidc"
|
||||||
|
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
|
||||||
|
# AUTH__OIDC_CLIENT_ID = "client_id"
|
||||||
|
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
|
||||||
|
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
||||||
|
# AUTH__OIDC_EXTRA_CLAIM_KEY = ""
|
||||||
|
# AUTH__OIDC_EXTRA_CLAIM_VALUE = ""
|
||||||
@@ -16,4 +16,18 @@ Cargo.lock
|
|||||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||||
*.pdb
|
*.pdb
|
||||||
|
|
||||||
/data
|
|
||||||
|
# Local
|
||||||
|
.DS_Store
|
||||||
|
*.local
|
||||||
|
*.log*
|
||||||
|
|
||||||
|
# Dist
|
||||||
|
node_modules
|
||||||
|
dist/
|
||||||
|
temp/*
|
||||||
|
!temp/.gitkeep
|
||||||
|
tests/resources/mikan/classic_episodes/*/*
|
||||||
|
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet
|
||||||
|
webui/
|
||||||
|
data/
|
||||||
181
apps/recorder/Cargo.toml
Normal file
181
apps/recorder/Cargo.toml
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
[package]
|
||||||
|
name = "recorder"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["jxl"]
|
||||||
|
playground = ["dep:inquire", "dep:color-eyre", "dep:polars", "test-utils"]
|
||||||
|
testcontainers = [
|
||||||
|
"dep:testcontainers",
|
||||||
|
"dep:testcontainers-modules",
|
||||||
|
"dep:testcontainers-ext",
|
||||||
|
"downloader/testcontainers",
|
||||||
|
"testcontainers-modules/postgres",
|
||||||
|
]
|
||||||
|
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
|
||||||
|
test-utils = []
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "recorder"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "recorder_cli"
|
||||||
|
path = "src/bin/main.rs"
|
||||||
|
required-features = []
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "mikan_collect_classic_eps"
|
||||||
|
path = "examples/mikan_collect_classic_eps.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "mikan_doppel_season_subscription"
|
||||||
|
path = "examples/mikan_doppel_season_subscription.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "mikan_doppel_subscriber_subscription"
|
||||||
|
path = "examples/mikan_doppel_subscriber_subscription.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "playground"
|
||||||
|
path = "examples/playground.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
downloader = { workspace = true }
|
||||||
|
util = { workspace = true }
|
||||||
|
util-derive = { workspace = true }
|
||||||
|
fetch = { workspace = true }
|
||||||
|
|
||||||
|
serde = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
serde_json = { workspace = true }
|
||||||
|
async-trait = { workspace = true }
|
||||||
|
testcontainers = { workspace = true, optional = true }
|
||||||
|
testcontainers-modules = { workspace = true, optional = true }
|
||||||
|
testcontainers-ext = { workspace = true, optional = true, features = [
|
||||||
|
"tracing",
|
||||||
|
] }
|
||||||
|
tracing = { workspace = true }
|
||||||
|
axum = { workspace = true }
|
||||||
|
axum-extra = { workspace = true }
|
||||||
|
snafu = { workspace = true }
|
||||||
|
itertools = { workspace = true }
|
||||||
|
url = { workspace = true }
|
||||||
|
regex = { workspace = true }
|
||||||
|
lazy_static = { workspace = true }
|
||||||
|
quirks_path = { workspace = true }
|
||||||
|
futures = { workspace = true }
|
||||||
|
bytes = { workspace = true }
|
||||||
|
serde_with = { workspace = true }
|
||||||
|
moka = { workspace = true }
|
||||||
|
chrono = { workspace = true }
|
||||||
|
tracing-subscriber = { workspace = true }
|
||||||
|
mockito = { workspace = true }
|
||||||
|
color-eyre = { workspace = true, optional = true }
|
||||||
|
inquire = { workspace = true, optional = true }
|
||||||
|
convert_case = { workspace = true }
|
||||||
|
image = { workspace = true }
|
||||||
|
uuid = { workspace = true }
|
||||||
|
maplit = { workspace = true }
|
||||||
|
once_cell = { workspace = true }
|
||||||
|
rand = { workspace = true }
|
||||||
|
rust_decimal = { workspace = true }
|
||||||
|
base64 = { workspace = true }
|
||||||
|
nom = { workspace = true }
|
||||||
|
percent-encoding = { workspace = true }
|
||||||
|
num-traits = { workspace = true }
|
||||||
|
http = { workspace = true }
|
||||||
|
async-stream = { workspace = true }
|
||||||
|
serde_variant = { workspace = true }
|
||||||
|
tracing-appender = { workspace = true }
|
||||||
|
clap = { workspace = true }
|
||||||
|
ipnetwork = { workspace = true }
|
||||||
|
typed-builder = { workspace = true }
|
||||||
|
webp = { workspace = true }
|
||||||
|
|
||||||
|
sea-orm = { version = "1.1", features = [
|
||||||
|
"sqlx-sqlite",
|
||||||
|
"sqlx-postgres",
|
||||||
|
"runtime-tokio",
|
||||||
|
"macros",
|
||||||
|
"debug-print",
|
||||||
|
] }
|
||||||
|
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
||||||
|
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
|
||||||
|
rss = { version = "2", features = ["builders", "with-serde"] }
|
||||||
|
fancy-regex = "0.15"
|
||||||
|
lightningcss = "1.0.0-alpha.66"
|
||||||
|
html-escape = "0.2.13"
|
||||||
|
opendal = { version = "0.53", features = ["default", "services-fs"] }
|
||||||
|
scraper = "0.23.1"
|
||||||
|
async-graphql = { version = "7", features = ["dynamic-schema"] }
|
||||||
|
async-graphql-axum = "7"
|
||||||
|
seaography = { version = "1.1", features = [
|
||||||
|
"with-json",
|
||||||
|
"with-chrono",
|
||||||
|
"with-time",
|
||||||
|
"with-uuid",
|
||||||
|
"with-decimal",
|
||||||
|
"with-bigdecimal",
|
||||||
|
"with-postgres-array",
|
||||||
|
"with-json-as-scalar",
|
||||||
|
"with-custom-as-json",
|
||||||
|
] }
|
||||||
|
tower = { version = "0.5.2", features = ["util"] }
|
||||||
|
tower-http = { version = "0.6", features = [
|
||||||
|
"trace",
|
||||||
|
"catch-panic",
|
||||||
|
"timeout",
|
||||||
|
"add-extension",
|
||||||
|
"cors",
|
||||||
|
"fs",
|
||||||
|
"set-header",
|
||||||
|
"compression-full",
|
||||||
|
] }
|
||||||
|
tera = "1.20.0"
|
||||||
|
openidconnect = { version = "4" }
|
||||||
|
dotenvy = "0.15.7"
|
||||||
|
jpegxl-rs = { version = "0.11.2", optional = true }
|
||||||
|
jpegxl-sys = { version = "0.11.2", optional = true }
|
||||||
|
|
||||||
|
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
|
||||||
|
apalis-sql = { version = "0.7", features = ["postgres"] }
|
||||||
|
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
|
||||||
|
reqwest_cookie_store = "0.8.0"
|
||||||
|
jwtk = "0.4.0"
|
||||||
|
mime_guess = "2.0.5"
|
||||||
|
icu_properties = "2.0.1"
|
||||||
|
icu = "2.0.0"
|
||||||
|
tracing-tree = "0.4.0"
|
||||||
|
num_cpus = "1.17.0"
|
||||||
|
headers-accept = "0.1.4"
|
||||||
|
polars = { version = "0.49.1", features = [
|
||||||
|
"parquet",
|
||||||
|
"lazy",
|
||||||
|
"diagonal_concat",
|
||||||
|
], optional = true }
|
||||||
|
quick-xml = { version = "0.38", features = [
|
||||||
|
"serialize",
|
||||||
|
"serde-types",
|
||||||
|
"serde",
|
||||||
|
] }
|
||||||
|
croner = "2.2.0"
|
||||||
|
ts-rs = "11.0.1"
|
||||||
|
secrecy = { version = "0.10.3", features = ["serde"] }
|
||||||
|
paste = "1.0.15"
|
||||||
|
chrono-tz = "0.10.3"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
inquire = { workspace = true }
|
||||||
|
color-eyre = { workspace = true }
|
||||||
|
serial_test = "3"
|
||||||
|
insta = { version = "1", features = ["redactions", "toml", "filters"] }
|
||||||
|
ctor = "0.4.0"
|
||||||
|
tracing-test = "0.2.5"
|
||||||
|
rstest = "0.25"
|
||||||
6
apps/recorder/bindings/SubscriberTaskInput.ts
Normal file
6
apps/recorder/bindings/SubscriberTaskInput.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
import type { SyncOneSubscriptionFeedsFullTaskInput } from "./SyncOneSubscriptionFeedsFullTaskInput";
|
||||||
|
import type { SyncOneSubscriptionFeedsIncrementalTaskInput } from "./SyncOneSubscriptionFeedsIncrementalTaskInput";
|
||||||
|
import type { SyncOneSubscriptionSourcesTaskInput } from "./SyncOneSubscriptionSourcesTaskInput";
|
||||||
|
|
||||||
|
export type SubscriberTaskInput = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTaskInput | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTaskInput | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTaskInput;
|
||||||
6
apps/recorder/bindings/SubscriberTaskType.ts
Normal file
6
apps/recorder/bindings/SubscriberTaskType.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
import type { SyncOneSubscriptionFeedsFullTask } from "./SyncOneSubscriptionFeedsFullTask";
|
||||||
|
import type { SyncOneSubscriptionFeedsIncrementalTask } from "./SyncOneSubscriptionFeedsIncrementalTask";
|
||||||
|
import type { SyncOneSubscriptionSourcesTask } from "./SyncOneSubscriptionSourcesTask";
|
||||||
|
|
||||||
|
export type SubscriberTaskType = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTask | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTask | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTask;
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionFeedsFullTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionFeedsFullTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionFeedsIncrementalTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionFeedsIncrementalTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };
|
||||||
3
apps/recorder/bindings/SyncOneSubscriptionSourcesTask.ts
Normal file
3
apps/recorder/bindings/SyncOneSubscriptionSourcesTask.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionSourcesTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionSourcesTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };
|
||||||
584
apps/recorder/examples/mikan_collect_classic_eps.rs
Normal file
584
apps/recorder/examples/mikan_collect_classic_eps.rs
Normal file
@@ -0,0 +1,584 @@
|
|||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use chrono::{DateTime, Duration, FixedOffset, NaiveDate, NaiveTime, TimeZone, Utc};
|
||||||
|
use fetch::{HttpClientConfig, fetch_html};
|
||||||
|
use itertools::Itertools;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use nom::{
|
||||||
|
IResult, Parser,
|
||||||
|
branch::alt,
|
||||||
|
bytes::complete::{tag, take, take_till1},
|
||||||
|
character::complete::space1,
|
||||||
|
combinator::map,
|
||||||
|
};
|
||||||
|
use recorder::{
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
extract::{
|
||||||
|
html::extract_inner_text_from_element_ref,
|
||||||
|
mikan::{MikanClient, MikanConfig, MikanEpisodeHash, MikanFansubHash},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use regex::Regex;
|
||||||
|
use scraper::{ElementRef, Html, Selector};
|
||||||
|
use snafu::FromString;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TEST_FOLDER: std::path::PathBuf =
|
||||||
|
if cfg!(any(test, debug_assertions, feature = "playground")) {
|
||||||
|
std::path::PathBuf::from(format!(
|
||||||
|
"{}/tests/resources/mikan/classic_episodes",
|
||||||
|
env!("CARGO_MANIFEST_DIR")
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
std::path::PathBuf::from("tests/resources/mikan/classic_episodes")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TOTAL_PAGE_REGEX: Regex =
|
||||||
|
Regex::new(r#"\$\(\'\.classic-view-pagination2\'\)\.bootpag\(\{\s*total:\s*(\d+)"#)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MikanClassicEpisodeTableRow {
|
||||||
|
pub id: i32,
|
||||||
|
pub publish_at: DateTime<Utc>,
|
||||||
|
pub mikan_fansub_id: Option<String>,
|
||||||
|
pub fansub_name: Option<String>,
|
||||||
|
pub mikan_episode_id: String,
|
||||||
|
pub original_name: String,
|
||||||
|
pub magnet_link: Option<String>,
|
||||||
|
pub file_size: Option<String>,
|
||||||
|
pub torrent_link: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanClassicEpisodeTableRow {
|
||||||
|
fn timezone() -> FixedOffset {
|
||||||
|
FixedOffset::east_opt(8 * 3600).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fixed_date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
alt((
|
||||||
|
map(tag("今天"), move |_| {
|
||||||
|
Utc::now().with_timezone(&Self::timezone()).date_naive()
|
||||||
|
}),
|
||||||
|
map(tag("昨天"), move |_| {
|
||||||
|
Utc::now().with_timezone(&Self::timezone()).date_naive() - Duration::days(1)
|
||||||
|
}),
|
||||||
|
))
|
||||||
|
.parse(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn formatted_date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
let (remain, date_str) = take_till1(|c: char| c.is_whitespace()).parse(input)?;
|
||||||
|
let date = NaiveDate::parse_from_str(date_str, "%Y/%m/%d").map_err(|_| {
|
||||||
|
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
|
||||||
|
})?;
|
||||||
|
Ok((remain, date))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
alt((Self::fixed_date_parser, Self::formatted_date_parser)).parse(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn time_parser(input: &str) -> IResult<&str, NaiveTime> {
|
||||||
|
let (remain, time_str) = take(5usize).parse(input)?;
|
||||||
|
let time = NaiveTime::parse_from_str(time_str, "%H:%M").map_err(|_| {
|
||||||
|
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
|
||||||
|
})?;
|
||||||
|
Ok((remain, time))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_publish_at(text: &str) -> Option<DateTime<Utc>> {
|
||||||
|
let (_, (date, _, time)) = (Self::date_parser, space1, Self::time_parser)
|
||||||
|
.parse(text)
|
||||||
|
.ok()?;
|
||||||
|
let local_dt = Self::timezone()
|
||||||
|
.from_local_datetime(&date.and_time(time))
|
||||||
|
.single()?;
|
||||||
|
Some(local_dt.with_timezone(&Utc))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_element_ref(
|
||||||
|
row: ElementRef<'_>,
|
||||||
|
rev_id: i32,
|
||||||
|
idx: i32,
|
||||||
|
mikan_base_url: &Url,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let publish_at_selector = &Selector::parse("td:nth-of-type(1)").unwrap();
|
||||||
|
let fansub_selector = &Selector::parse("td:nth-of-type(2) > a").unwrap();
|
||||||
|
let original_name_selector =
|
||||||
|
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(1)").unwrap();
|
||||||
|
let magnet_link_selector =
|
||||||
|
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(2)").unwrap();
|
||||||
|
let file_size_selector = &Selector::parse("td:nth-of-type(4)").unwrap();
|
||||||
|
let torrent_link_selector = &Selector::parse("td:nth-of-type(5) > a").unwrap();
|
||||||
|
|
||||||
|
let publish_at = row
|
||||||
|
.select(publish_at_selector)
|
||||||
|
.next()
|
||||||
|
.map(extract_inner_text_from_element_ref)
|
||||||
|
.and_then(|e| Self::extract_publish_at(&e));
|
||||||
|
|
||||||
|
let (mikan_fansub_hash, fansub_name) = row
|
||||||
|
.select(fansub_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|e| {
|
||||||
|
e.attr("href")
|
||||||
|
.and_then(|s| mikan_base_url.join(s).ok())
|
||||||
|
.and_then(|u| MikanFansubHash::from_homepage_url(&u))
|
||||||
|
.map(|h| (h, extract_inner_text_from_element_ref(e)))
|
||||||
|
})
|
||||||
|
.unzip();
|
||||||
|
|
||||||
|
let (mikan_episode_hash, original_name) = row
|
||||||
|
.select(original_name_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| {
|
||||||
|
el.attr("href")
|
||||||
|
.and_then(|s| mikan_base_url.join(s).ok())
|
||||||
|
.and_then(|u| MikanEpisodeHash::from_homepage_url(&u))
|
||||||
|
.map(|h| (h, extract_inner_text_from_element_ref(el)))
|
||||||
|
})
|
||||||
|
.unzip();
|
||||||
|
|
||||||
|
let magnet_link = row
|
||||||
|
.select(magnet_link_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| el.attr("data-clipboard-text"));
|
||||||
|
|
||||||
|
let file_size = row
|
||||||
|
.select(file_size_selector)
|
||||||
|
.next()
|
||||||
|
.map(extract_inner_text_from_element_ref);
|
||||||
|
|
||||||
|
let torrent_link = row
|
||||||
|
.select(torrent_link_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| el.attr("href"));
|
||||||
|
|
||||||
|
if let (Some(mikan_episode_hash), Some(original_name), Some(publish_at)) = (
|
||||||
|
mikan_episode_hash.as_ref(),
|
||||||
|
original_name.as_ref(),
|
||||||
|
publish_at.as_ref(),
|
||||||
|
) {
|
||||||
|
Ok(Self {
|
||||||
|
id: rev_id * 1000 + idx,
|
||||||
|
publish_at: *publish_at,
|
||||||
|
mikan_fansub_id: mikan_fansub_hash.map(|h| h.mikan_fansub_id.clone()),
|
||||||
|
fansub_name,
|
||||||
|
mikan_episode_id: mikan_episode_hash.mikan_episode_id.clone(),
|
||||||
|
original_name: original_name.clone(),
|
||||||
|
magnet_link: magnet_link.map(|s| s.to_string()),
|
||||||
|
file_size: file_size.map(|s| s.to_string()),
|
||||||
|
torrent_link: torrent_link.map(|s| s.to_string()),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
let mut missing_fields = vec![];
|
||||||
|
if mikan_episode_hash.is_none() {
|
||||||
|
missing_fields.push("mikan_episode_id");
|
||||||
|
}
|
||||||
|
if original_name.is_none() {
|
||||||
|
missing_fields.push("original_name");
|
||||||
|
}
|
||||||
|
if publish_at.is_none() {
|
||||||
|
missing_fields.push("publish_at");
|
||||||
|
}
|
||||||
|
Err(RecorderError::without_source(format!(
|
||||||
|
"Failed to parse episode table row, missing fields: {missing_fields:?}, row \
|
||||||
|
index: {idx}"
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MikanClassicEpisodeTablePage {
|
||||||
|
pub page: i32,
|
||||||
|
pub total: i32,
|
||||||
|
pub html: String,
|
||||||
|
pub rows: Vec<MikanClassicEpisodeTableRow>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanClassicEpisodeTablePage {
|
||||||
|
pub fn from_html(
|
||||||
|
html: String,
|
||||||
|
mikan_base_url: &Url,
|
||||||
|
page: i32,
|
||||||
|
updated_info: Option<(i32, i32)>,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let tr_selector = &Selector::parse("tbody tr").unwrap();
|
||||||
|
let doc = Html::parse_document(&html);
|
||||||
|
if let Some(mut total) = TOTAL_PAGE_REGEX
|
||||||
|
.captures(&html)
|
||||||
|
.and_then(|c| c.get(1))
|
||||||
|
.and_then(|s| s.as_str().parse::<i32>().ok())
|
||||||
|
{
|
||||||
|
if let Some((_, update_total)) = updated_info {
|
||||||
|
total = update_total;
|
||||||
|
}
|
||||||
|
|
||||||
|
let rev_id = total - page;
|
||||||
|
let rows = doc
|
||||||
|
.select(tr_selector)
|
||||||
|
.rev()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(idx, tr)| {
|
||||||
|
MikanClassicEpisodeTableRow::from_element_ref(
|
||||||
|
tr,
|
||||||
|
rev_id,
|
||||||
|
idx as i32,
|
||||||
|
mikan_base_url,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<RecorderResult<Vec<_>>>()?;
|
||||||
|
Ok(Self {
|
||||||
|
page,
|
||||||
|
total,
|
||||||
|
html,
|
||||||
|
rows,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(RecorderError::without_source(
|
||||||
|
"Failed to parse pagination meta and rows".into(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save_to_files(&self) -> RecorderResult<()> {
|
||||||
|
use polars::prelude::*;
|
||||||
|
|
||||||
|
let rev_id = self.total - self.page;
|
||||||
|
let parquet_path = TEST_FOLDER.join(format!("parquet/rev_{rev_id}.parquet"));
|
||||||
|
let csv_path = TEST_FOLDER.join(format!("csv/rev_{rev_id}.csv"));
|
||||||
|
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
|
||||||
|
|
||||||
|
std::fs::write(html_path, self.html.clone())?;
|
||||||
|
|
||||||
|
let mut id_vec = Vec::new();
|
||||||
|
let mut publish_at_vec = Vec::new();
|
||||||
|
let mut mikan_fansub_id_vec = Vec::new();
|
||||||
|
let mut fansub_name_vec = Vec::new();
|
||||||
|
let mut mikan_episode_id_vec = Vec::new();
|
||||||
|
let mut original_name_vec = Vec::new();
|
||||||
|
let mut magnet_link_vec = Vec::new();
|
||||||
|
let mut file_size_vec = Vec::new();
|
||||||
|
let mut torrent_link_vec = Vec::new();
|
||||||
|
|
||||||
|
for row in &self.rows {
|
||||||
|
id_vec.push(row.id);
|
||||||
|
publish_at_vec.push(row.publish_at.to_rfc3339());
|
||||||
|
mikan_fansub_id_vec.push(row.mikan_fansub_id.clone());
|
||||||
|
fansub_name_vec.push(row.fansub_name.clone());
|
||||||
|
mikan_episode_id_vec.push(row.mikan_episode_id.clone());
|
||||||
|
original_name_vec.push(row.original_name.clone());
|
||||||
|
magnet_link_vec.push(row.magnet_link.clone());
|
||||||
|
file_size_vec.push(row.file_size.clone());
|
||||||
|
torrent_link_vec.push(row.torrent_link.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let df = df! [
|
||||||
|
"id" => id_vec,
|
||||||
|
"publish_at_timestamp" => publish_at_vec,
|
||||||
|
"mikan_fansub_id" => mikan_fansub_id_vec,
|
||||||
|
"fansub_name" => fansub_name_vec,
|
||||||
|
"mikan_episode_id" => mikan_episode_id_vec,
|
||||||
|
"original_name" => original_name_vec,
|
||||||
|
"magnet_link" => magnet_link_vec,
|
||||||
|
"file_size" => file_size_vec,
|
||||||
|
"torrent_link" => torrent_link_vec,
|
||||||
|
]
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to create DataFrame: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut parquet_file = std::fs::File::create(&parquet_path)?;
|
||||||
|
|
||||||
|
ParquetWriter::new(&mut parquet_file)
|
||||||
|
.finish(&mut df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write parquet file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut csv_file = std::fs::File::create(&csv_path)?;
|
||||||
|
|
||||||
|
CsvWriter::new(&mut csv_file)
|
||||||
|
.include_header(true)
|
||||||
|
.with_quote_style(QuoteStyle::Always)
|
||||||
|
.finish(&mut df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write csv file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"[{}/{}] Saved {} rows to rev_{}.{{parquet,html,csv}}",
|
||||||
|
self.page,
|
||||||
|
self.total,
|
||||||
|
self.rows.len(),
|
||||||
|
rev_id
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn waiting_rev_ids(total: i32) -> RecorderResult<Vec<i32>> {
|
||||||
|
let dir = TEST_FOLDER.join("csv");
|
||||||
|
|
||||||
|
let files = std::fs::read_dir(dir)?;
|
||||||
|
|
||||||
|
let rev_ids = files
|
||||||
|
.filter_map(|f| f.ok())
|
||||||
|
.filter_map(|f| {
|
||||||
|
f.path().file_stem().and_then(|s| {
|
||||||
|
s.to_str().and_then(|s| {
|
||||||
|
if s.starts_with("rev_") {
|
||||||
|
s.replace("rev_", "").parse::<i32>().ok()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<HashSet<_>>();
|
||||||
|
|
||||||
|
Ok((0..total)
|
||||||
|
.filter(|rev_id| !rev_ids.contains(rev_id))
|
||||||
|
.collect::<Vec<_>>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn scrape_mikan_classic_episode_table_page(
|
||||||
|
mikan_client: &MikanClient,
|
||||||
|
page: i32,
|
||||||
|
updated_info: Option<(i32, i32)>,
|
||||||
|
) -> RecorderResult<MikanClassicEpisodeTablePage> {
|
||||||
|
let mikan_base_url = mikan_client.base_url();
|
||||||
|
let url = mikan_base_url.join(&format!("/Home/Classic/{page}"))?;
|
||||||
|
|
||||||
|
if let Some((rev_id, update_total)) = updated_info.as_ref() {
|
||||||
|
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
|
||||||
|
if html_path.exists() {
|
||||||
|
let html = std::fs::read_to_string(&html_path)?;
|
||||||
|
println!("[{page}/{update_total}] html exists, skipping fetch");
|
||||||
|
return MikanClassicEpisodeTablePage::from_html(
|
||||||
|
html,
|
||||||
|
mikan_base_url,
|
||||||
|
page,
|
||||||
|
updated_info,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let total = if let Some((_, update_total)) = updated_info.as_ref() {
|
||||||
|
update_total.to_string()
|
||||||
|
} else {
|
||||||
|
"Unknown".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
println!("[{page}/{total}] fetching html...");
|
||||||
|
|
||||||
|
let html = fetch_html(mikan_client, url).await?;
|
||||||
|
|
||||||
|
println!("[{page}/{total}] fetched html done");
|
||||||
|
|
||||||
|
std::fs::write(TEST_FOLDER.join("html/temp.html"), html.clone())?;
|
||||||
|
|
||||||
|
MikanClassicEpisodeTablePage::from_html(html, mikan_base_url, page, updated_info)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn scrape_mikan_classic_episode_table_page_from_rev_id(
|
||||||
|
mikan_client: &MikanClient,
|
||||||
|
total: i32,
|
||||||
|
rev_idx: i32,
|
||||||
|
) -> RecorderResult<MikanClassicEpisodeTablePage> {
|
||||||
|
let page = total - rev_idx;
|
||||||
|
|
||||||
|
scrape_mikan_classic_episode_table_page(mikan_client, page, Some((rev_idx, total))).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn merge_mikan_classic_episodes_and_strip_columns() -> RecorderResult<()> {
|
||||||
|
use polars::prelude::*;
|
||||||
|
|
||||||
|
let dir = TEST_FOLDER.join("parquet");
|
||||||
|
let files = std::fs::read_dir(dir)?;
|
||||||
|
|
||||||
|
let parquet_paths = files
|
||||||
|
.filter_map(|f| f.ok())
|
||||||
|
.filter_map(|f| {
|
||||||
|
let path = f.path();
|
||||||
|
if let Some(ext) = path.extension()
|
||||||
|
&& ext == "parquet"
|
||||||
|
&& path
|
||||||
|
.file_stem()
|
||||||
|
.is_some_and(|f| f.to_string_lossy().starts_with("rev_"))
|
||||||
|
{
|
||||||
|
Some(path)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if parquet_paths.is_empty() {
|
||||||
|
return Err(RecorderError::without_source(
|
||||||
|
"No parquet files found to merge".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Found {} parquet files to merge", parquet_paths.len());
|
||||||
|
|
||||||
|
// 读取并合并所有 parquet 文件
|
||||||
|
let mut all_dfs = Vec::new();
|
||||||
|
for path in &parquet_paths {
|
||||||
|
println!("Reading {path:?}");
|
||||||
|
let file = std::fs::File::open(path)?;
|
||||||
|
let df = ParquetReader::new(file).finish().map_err(|e| {
|
||||||
|
let message = format!("Failed to read parquet file {path:?}: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
all_dfs.push(df);
|
||||||
|
}
|
||||||
|
|
||||||
|
let lazy_frames: Vec<LazyFrame> = all_dfs.into_iter().map(|df| df.lazy()).collect();
|
||||||
|
|
||||||
|
let merged_df = concat_lf_diagonal(&lazy_frames, UnionArgs::default())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to concat DataFrames: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?
|
||||||
|
.sort(
|
||||||
|
["publish_at_timestamp"],
|
||||||
|
SortMultipleOptions::default().with_order_descending(true),
|
||||||
|
)
|
||||||
|
.unique(
|
||||||
|
Some(vec![
|
||||||
|
"mikan_fansub_id".to_string(),
|
||||||
|
"mikan_episode_id".to_string(),
|
||||||
|
]),
|
||||||
|
UniqueKeepStrategy::First,
|
||||||
|
)
|
||||||
|
.collect()
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to collect lazy DataFrame: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
fn select_columns_and_write(
|
||||||
|
merged_df: DataFrame,
|
||||||
|
name: &str,
|
||||||
|
columns: &[&str],
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let result_df = merged_df
|
||||||
|
.lazy()
|
||||||
|
.sort(["publish_at_timestamp"], SortMultipleOptions::default())
|
||||||
|
.select(columns.iter().map(|c| col(*c)).collect_vec())
|
||||||
|
.collect()
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to sort and select columns: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let output_path = TEST_FOLDER.join(format!("parquet/{name}.parquet"));
|
||||||
|
let mut output_file = std::fs::File::create(&output_path)?;
|
||||||
|
|
||||||
|
ParquetWriter::new(&mut output_file)
|
||||||
|
.set_parallel(true)
|
||||||
|
.with_compression(ParquetCompression::Zstd(Some(
|
||||||
|
ZstdLevel::try_new(22).unwrap(),
|
||||||
|
)))
|
||||||
|
.finish(&mut result_df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write merged parquet file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
println!("Merged {} rows into {output_path:?}", result_df.height());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
select_columns_and_write(merged_df.clone(), "tiny", &["fansub_name", "original_name"])?;
|
||||||
|
// select_columns_and_write(
|
||||||
|
// merged_df.clone(),
|
||||||
|
// "lite",
|
||||||
|
// &[
|
||||||
|
// "mikan_fansub_id",
|
||||||
|
// "fansub_name",
|
||||||
|
// "mikan_episode_id",
|
||||||
|
// "original_name",
|
||||||
|
// ],
|
||||||
|
// )?;
|
||||||
|
// select_columns_and_write(
|
||||||
|
// merged_df,
|
||||||
|
// "full",
|
||||||
|
// &[
|
||||||
|
// "id",
|
||||||
|
// "publish_at_timestamp",
|
||||||
|
// "mikan_fansub_id",
|
||||||
|
// "fansub_name",
|
||||||
|
// "mikan_episode_id",
|
||||||
|
// "original_name",
|
||||||
|
// "magnet_link",
|
||||||
|
// "file_size",
|
||||||
|
// "torrent_link",
|
||||||
|
// ],
|
||||||
|
// )?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("html"))?;
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("parquet"))?;
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("csv"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(std::time::Duration::from_millis(1000)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let first_page_and_pagination_info =
|
||||||
|
scrape_mikan_classic_episode_table_page(&mikan_scrape_client, 1, None).await?;
|
||||||
|
|
||||||
|
let total_page = first_page_and_pagination_info.total;
|
||||||
|
|
||||||
|
first_page_and_pagination_info.save_to_files()?;
|
||||||
|
|
||||||
|
let next_rev_ids = MikanClassicEpisodeTablePage::waiting_rev_ids(total_page)?;
|
||||||
|
|
||||||
|
for todo_rev_id in next_rev_ids {
|
||||||
|
let page = scrape_mikan_classic_episode_table_page_from_rev_id(
|
||||||
|
&mikan_scrape_client,
|
||||||
|
total_page,
|
||||||
|
todo_rev_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
page.save_to_files()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 合并所有 parquet 文件
|
||||||
|
println!("\nMerging all parquet files...");
|
||||||
|
|
||||||
|
merge_mikan_classic_episodes_and_strip_columns().await?;
|
||||||
|
|
||||||
|
println!("Merge completed!");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
250
apps/recorder/examples/mikan_doppel_season_subscription.rs
Normal file
250
apps/recorder/examples/mikan_doppel_season_subscription.rs
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
use std::{str::FromStr, time::Duration};
|
||||||
|
|
||||||
|
use color_eyre::{Result, eyre::OptionExt};
|
||||||
|
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||||
|
use inquire::{Password, Text, validator::Validation};
|
||||||
|
use recorder::{
|
||||||
|
crypto::UserPassCredential,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
|
||||||
|
build_mikan_bangumi_expand_subscribed_url,
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||||
|
},
|
||||||
|
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||||
|
};
|
||||||
|
use scraper::Html;
|
||||||
|
use tokio::fs;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(tracing::Level::INFO)
|
||||||
|
.init();
|
||||||
|
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(0),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(Duration::from_millis(1000)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let username_validator = |input: &str| {
|
||||||
|
if input.trim().is_empty() {
|
||||||
|
Ok(Validation::Invalid("Username cannot be empty".into()))
|
||||||
|
} else {
|
||||||
|
Ok(Validation::Valid)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let password_validator = |input: &str| {
|
||||||
|
if input.trim().is_empty() {
|
||||||
|
Ok(Validation::Invalid("Password cannot be empty".into()))
|
||||||
|
} else {
|
||||||
|
Ok(Validation::Valid)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let username = Text::new("Please enter your mikan username:")
|
||||||
|
.with_validator(username_validator)
|
||||||
|
.prompt()?;
|
||||||
|
let password = Password::new("Please enter your mikan password:")
|
||||||
|
.without_confirmation()
|
||||||
|
.with_display_mode(inquire::PasswordDisplayMode::Masked)
|
||||||
|
.with_validator(password_validator)
|
||||||
|
.prompt()?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = mikan_scrape_client
|
||||||
|
.fork_with_userpass_credential(UserPassCredential {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
user_agent: None,
|
||||||
|
cookies: None,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
tracing::info!("Checking if logged in...");
|
||||||
|
if !mikan_scrape_client.has_login().await? {
|
||||||
|
tracing::info!("Logging in to mikan...");
|
||||||
|
mikan_scrape_client.login().await?;
|
||||||
|
tracing::info!("Logged in to mikan");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_scrape_client.base_url().clone();
|
||||||
|
tracing::info!("Scraping season subscription...");
|
||||||
|
let season_subscription =
|
||||||
|
fs::read("tests/resources/mikan/BangumiCoverFlow-2025-spring.html").await?;
|
||||||
|
let html = Html::parse_fragment(String::from_utf8(season_subscription)?.as_str());
|
||||||
|
let bangumi_index_list =
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(&html, &mikan_base_url);
|
||||||
|
|
||||||
|
for bangumi_index in bangumi_index_list {
|
||||||
|
let bangumi_meta = {
|
||||||
|
let bangumi_expand_subscribed_url = build_mikan_bangumi_expand_subscribed_url(
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
bangumi_index.mikan_bangumi_id.as_ref(),
|
||||||
|
);
|
||||||
|
let bangumi_expand_subscribed_doppel_path =
|
||||||
|
MikanDoppelPath::new(bangumi_expand_subscribed_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Scraping bangumi expand subscribed..."
|
||||||
|
);
|
||||||
|
let bangumi_expand_subscribed_data =
|
||||||
|
if !bangumi_expand_subscribed_doppel_path.exists_any() {
|
||||||
|
let bangumi_expand_subscribed_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_expand_subscribed_url).await?;
|
||||||
|
bangumi_expand_subscribed_doppel_path.write(&bangumi_expand_subscribed_data)?;
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Bangumi expand subscribed saved"
|
||||||
|
);
|
||||||
|
bangumi_expand_subscribed_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Bangumi expand subscribed already exists"
|
||||||
|
);
|
||||||
|
String::from_utf8(bangumi_expand_subscribed_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let html = Html::parse_fragment(&bangumi_expand_subscribed_data);
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment(
|
||||||
|
&html,
|
||||||
|
bangumi_index.clone(),
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
)
|
||||||
|
.ok_or_eyre(format!(
|
||||||
|
"Failed to extract bangumi meta from expand subscribed fragment: {:?}",
|
||||||
|
bangumi_index.bangumi_title
|
||||||
|
))
|
||||||
|
}?;
|
||||||
|
{
|
||||||
|
if let Some(poster_url) = bangumi_meta.origin_poster_src.as_ref() {
|
||||||
|
let poster_doppel_path = MikanDoppelPath::new(poster_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi poster..."
|
||||||
|
);
|
||||||
|
if !poster_doppel_path.exists_any() {
|
||||||
|
let poster_data = fetch_image(&mikan_scrape_client, poster_url.clone()).await?;
|
||||||
|
poster_doppel_path.write(&poster_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi poster saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi poster already exists"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let bangumi_homepage_url = bangumi_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi homepage..."
|
||||||
|
);
|
||||||
|
if !bangumi_homepage_doppel_path.exists_any() {
|
||||||
|
let bangumi_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
|
||||||
|
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi homepage already exists"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let rss_items = {
|
||||||
|
let bangumi_rss_url = bangumi_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_rss_url(mikan_base_url.clone());
|
||||||
|
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi rss..."
|
||||||
|
);
|
||||||
|
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
|
||||||
|
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
|
||||||
|
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi rss saved");
|
||||||
|
bangumi_rss_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi rss already exists"
|
||||||
|
);
|
||||||
|
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
let rss_items = MikanRssRoot::from_str(&bangumi_rss_data)?.channel.items;
|
||||||
|
rss_items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssItemMeta::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
}?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
{
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode...");
|
||||||
|
if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode already exists");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source
|
||||||
|
.status()
|
||||||
|
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("Scraping season subscription done");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
215
apps/recorder/examples/mikan_doppel_subscriber_subscription.rs
Normal file
215
apps/recorder/examples/mikan_doppel_subscriber_subscription.rs
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
use std::{str::FromStr, time::Duration};
|
||||||
|
|
||||||
|
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||||
|
use recorder::{
|
||||||
|
errors::RecorderResult,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||||
|
},
|
||||||
|
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||||
|
};
|
||||||
|
use scraper::Html;
|
||||||
|
use tokio::fs;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(tracing::Level::INFO)
|
||||||
|
.init();
|
||||||
|
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(0),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(Duration::from_millis(500)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_scrape_client.base_url().clone();
|
||||||
|
tracing::info!("Scraping subscriber subscription...");
|
||||||
|
let subscriber_subscription =
|
||||||
|
fs::read_to_string("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
|
||||||
|
let channel = MikanRssRoot::from_str(&subscriber_subscription)?.channel;
|
||||||
|
let rss_items: Vec<MikanRssItemMeta> = channel
|
||||||
|
.items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssItemMeta::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
let episode_homepage_meta = {
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path = MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
let episode_homepage_data = if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage saved");
|
||||||
|
episode_homepage_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage already exists");
|
||||||
|
String::from_utf8(episode_homepage_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
let html = Html::parse_document(&episode_homepage_data);
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html(
|
||||||
|
&html,
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
episode_homepage_url,
|
||||||
|
)
|
||||||
|
}?;
|
||||||
|
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source
|
||||||
|
.status()
|
||||||
|
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
if let Some(episode_poster_url) = episode_homepage_meta.origin_poster_src.as_ref() {
|
||||||
|
let episode_poster_doppel_path = MikanDoppelPath::new(episode_poster_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode poster...");
|
||||||
|
if !episode_poster_doppel_path.exists_any() {
|
||||||
|
let episode_poster_data =
|
||||||
|
fetch_image(&mikan_scrape_client, episode_poster_url.clone()).await?;
|
||||||
|
episode_poster_doppel_path.write(&episode_poster_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode poster saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode poster already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let bangumi_homepage_url = episode_homepage_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping bangumi homepage...");
|
||||||
|
if !bangumi_homepage_doppel_path.exists_any() {
|
||||||
|
let bangumi_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
|
||||||
|
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi homepage already exists");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let bangumi_rss_url = episode_homepage_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_rss_url(mikan_base_url.clone());
|
||||||
|
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping bangumi rss...");
|
||||||
|
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
|
||||||
|
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
|
||||||
|
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi rss saved");
|
||||||
|
bangumi_rss_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi rss already exists");
|
||||||
|
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let rss_items: Vec<MikanRssItemMeta> = MikanRssRoot::from_str(&bangumi_rss_data)?
|
||||||
|
.channel
|
||||||
|
.items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssItemMeta::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
{
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage already exists");
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source.status().is_some_and(|status| {
|
||||||
|
status == reqwest::StatusCode::NOT_FOUND
|
||||||
|
})
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new \
|
||||||
|
version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("Scraping subscriber subscription done");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
33
apps/recorder/examples/playground.rs
Normal file
33
apps/recorder/examples/playground.rs
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
#![feature(duration_constructors_lite)]
|
||||||
|
use std::{sync::Arc, time::Duration};
|
||||||
|
|
||||||
|
use apalis_sql::postgres::PostgresStorage;
|
||||||
|
use recorder::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
errors::RecorderResult,
|
||||||
|
test_utils::{
|
||||||
|
app::TestingAppContext,
|
||||||
|
database::{TestingDatabaseServiceConfig, build_testing_database_service},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
let app_ctx = {
|
||||||
|
let db_service = build_testing_database_service(TestingDatabaseServiceConfig {
|
||||||
|
auto_migrate: false,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
Arc::new(TestingAppContext::builder().db(db_service).build())
|
||||||
|
};
|
||||||
|
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
PostgresStorage::setup(db.get_postgres_connection_pool()).await?;
|
||||||
|
|
||||||
|
dbg!(db.get_postgres_connection_pool().connect_options());
|
||||||
|
|
||||||
|
tokio::time::sleep(Duration::from_hours(1)).await;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
6
apps/recorder/package.json
Normal file
6
apps/recorder/package.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"name": "recorder",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"private": true,
|
||||||
|
"type": "module"
|
||||||
|
}
|
||||||
94
apps/recorder/recorder.config.toml
Normal file
94
apps/recorder/recorder.config.toml
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
# Application logging configuration
|
||||||
|
[logger]
|
||||||
|
# Enable or disable logging.
|
||||||
|
enable = true
|
||||||
|
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
|
||||||
|
pretty_backtrace = true
|
||||||
|
level = "info"
|
||||||
|
# Log level, options: trace, debug, info, warn or error.
|
||||||
|
# Define the logging format. options: compact, pretty or Json
|
||||||
|
format = "compact"
|
||||||
|
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
|
||||||
|
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
|
||||||
|
# override_filter: trace
|
||||||
|
|
||||||
|
# Web server configuration
|
||||||
|
[server]
|
||||||
|
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
|
||||||
|
port = 5001
|
||||||
|
binding = "0.0.0.0"
|
||||||
|
# The UI hostname or IP address that mailers will point to.
|
||||||
|
host = '{{ get_env(name="HOST", default="localhost") }}'
|
||||||
|
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
|
||||||
|
|
||||||
|
# Enable Etag cache header middleware
|
||||||
|
[server.middlewares.etag]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
|
||||||
|
[server.middlewares.request_id]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
[server.middlewares.logger]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
# when your code is panicked, the request still returns 500 status code.
|
||||||
|
[server.middlewares.catch_panic]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
|
||||||
|
[server.middlewares.timeout_request]
|
||||||
|
enable = false
|
||||||
|
# Duration time in milliseconds.
|
||||||
|
timeout = 5000
|
||||||
|
|
||||||
|
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
|
||||||
|
# allow_origins:
|
||||||
|
# - https://konobangu.com
|
||||||
|
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
|
||||||
|
# allow_headers:
|
||||||
|
# - Content-Type
|
||||||
|
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
|
||||||
|
# allow_methods:
|
||||||
|
# - POST
|
||||||
|
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
|
||||||
|
# max_age: 3600
|
||||||
|
[server.middlewares.cors]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
[server.middlewares.compression]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
# Database Configuration
|
||||||
|
[database]
|
||||||
|
# Database connection URI
|
||||||
|
uri = '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@localhost:5432/konobangu") }}'
|
||||||
|
# When enabled, the sql query will be logged.
|
||||||
|
enable_logging = true
|
||||||
|
# Set the timeout duration when acquiring a connection.
|
||||||
|
connect_timeout = 500
|
||||||
|
# Set the idle duration before closing a connection.
|
||||||
|
idle_timeout = 500
|
||||||
|
# Minimum number of connections for a pool.
|
||||||
|
min_connections = 1
|
||||||
|
# Maximum number of connections for a pool.
|
||||||
|
max_connections = 10
|
||||||
|
# Run migration up when application loaded
|
||||||
|
auto_migrate = true
|
||||||
|
|
||||||
|
[storage]
|
||||||
|
data_dir = './data'
|
||||||
|
|
||||||
|
[mikan]
|
||||||
|
base_url = "https://mikanani.me/"
|
||||||
|
|
||||||
|
[mikan.http_client]
|
||||||
|
exponential_backoff_max_retries = 3
|
||||||
|
leaky_bucket_max_tokens = 2
|
||||||
|
leaky_bucket_initial_tokens = 1
|
||||||
|
leaky_bucket_refill_tokens = 1
|
||||||
|
leaky_bucket_refill_interval = 500
|
||||||
|
|
||||||
|
[graphql]
|
||||||
|
# depth_limit = inf
|
||||||
|
# complexity_limit = inf
|
||||||
154
apps/recorder/src/app/builder.rs
Normal file
154
apps/recorder/src/app/builder.rs
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
use clap::{Parser, command};
|
||||||
|
|
||||||
|
use super::{AppContext, core::App, env::Environment};
|
||||||
|
use crate::{app::config::AppConfig, errors::RecorderResult};
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(version, about, long_about = None)]
|
||||||
|
pub struct MainCliArgs {
|
||||||
|
/// Explicit config file path
|
||||||
|
#[arg(short, long)]
|
||||||
|
config_file: Option<String>,
|
||||||
|
|
||||||
|
/// Explicit dotenv file path
|
||||||
|
#[arg(short, long)]
|
||||||
|
dotenv_file: Option<String>,
|
||||||
|
|
||||||
|
/// Explicit working dir
|
||||||
|
#[arg(short, long)]
|
||||||
|
working_dir: Option<String>,
|
||||||
|
|
||||||
|
/// Explicit environment
|
||||||
|
#[arg(short, long)]
|
||||||
|
environment: Option<Environment>,
|
||||||
|
|
||||||
|
#[arg(long)]
|
||||||
|
graceful_shutdown: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AppBuilder {
|
||||||
|
dotenv_file: Option<String>,
|
||||||
|
config_file: Option<String>,
|
||||||
|
working_dir: String,
|
||||||
|
environment: Environment,
|
||||||
|
pub graceful_shutdown: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppBuilder {
|
||||||
|
pub async fn from_main_cli(environment: Option<Environment>) -> RecorderResult<Self> {
|
||||||
|
let args = MainCliArgs::parse();
|
||||||
|
|
||||||
|
let environment = environment.unwrap_or_else(|| {
|
||||||
|
args.environment.unwrap_or({
|
||||||
|
if cfg!(test) {
|
||||||
|
Environment::Testing
|
||||||
|
} else if cfg!(debug_assertions) {
|
||||||
|
Environment::Development
|
||||||
|
} else {
|
||||||
|
Environment::Production
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut builder = Self::default();
|
||||||
|
|
||||||
|
if let Some(working_dir) = args.working_dir {
|
||||||
|
builder = builder.working_dir(working_dir);
|
||||||
|
}
|
||||||
|
if matches!(
|
||||||
|
&environment,
|
||||||
|
Environment::Testing | Environment::Development
|
||||||
|
) {
|
||||||
|
builder = builder.working_dir_from_manifest_dir();
|
||||||
|
}
|
||||||
|
|
||||||
|
builder = builder
|
||||||
|
.config_file(args.config_file)
|
||||||
|
.dotenv_file(args.dotenv_file)
|
||||||
|
.environment(environment)
|
||||||
|
.graceful_shutdown(args.graceful_shutdown.unwrap_or(true));
|
||||||
|
|
||||||
|
Ok(builder)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn build(self) -> RecorderResult<App> {
|
||||||
|
if self.working_dir != "." {
|
||||||
|
std::env::set_current_dir(&self.working_dir)?;
|
||||||
|
println!("set current dir to working dir: {}", self.working_dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.load_env().await?;
|
||||||
|
|
||||||
|
let config = self.load_config().await?;
|
||||||
|
|
||||||
|
let app_context =
|
||||||
|
AppContext::new(self.environment.clone(), config, self.working_dir.clone()).await?;
|
||||||
|
|
||||||
|
Ok(App {
|
||||||
|
context: app_context,
|
||||||
|
builder: self,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_env(&self) -> RecorderResult<()> {
|
||||||
|
AppConfig::load_dotenv(&self.environment, self.dotenv_file.as_deref()).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_config(&self) -> RecorderResult<AppConfig> {
|
||||||
|
let config = AppConfig::load_config(&self.environment, self.config_file.as_deref()).await?;
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn working_dir(self, working_dir: String) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.working_dir = working_dir;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn environment(self, environment: Environment) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.environment = environment;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn config_file(self, config_file: Option<String>) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.config_file = config_file;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn graceful_shutdown(self, graceful_shutdown: bool) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.graceful_shutdown = graceful_shutdown;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.dotenv_file = dotenv_file;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn working_dir_from_manifest_dir(self) -> Self {
|
||||||
|
#[cfg(any(test, debug_assertions, feature = "test-utils"))]
|
||||||
|
let manifest_dir = env!("CARGO_MANIFEST_DIR");
|
||||||
|
|
||||||
|
#[cfg(not(any(test, debug_assertions, feature = "test-utils")))]
|
||||||
|
let manifest_dir = "./apps/recorder";
|
||||||
|
|
||||||
|
self.working_dir(manifest_dir.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for AppBuilder {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
environment: Environment::Production,
|
||||||
|
dotenv_file: None,
|
||||||
|
config_file: None,
|
||||||
|
working_dir: String::from("."),
|
||||||
|
graceful_shutdown: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
31
apps/recorder/src/app/config/default_mixin.toml
Normal file
31
apps/recorder/src/app/config/default_mixin.toml
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
[storage]
|
||||||
|
data_dir = "./data"
|
||||||
|
|
||||||
|
[mikan]
|
||||||
|
base_url = "https://mikanani.me/"
|
||||||
|
|
||||||
|
[mikan.http_client]
|
||||||
|
exponential_backoff_max_retries = 3
|
||||||
|
leaky_bucket_max_tokens = 2
|
||||||
|
leaky_bucket_initial_tokens = 0
|
||||||
|
leaky_bucket_refill_tokens = 1
|
||||||
|
leaky_bucket_refill_interval = 500
|
||||||
|
|
||||||
|
|
||||||
|
[mikan.http_client.proxy]
|
||||||
|
|
||||||
|
[mikan.http_client.proxy.headers]
|
||||||
|
|
||||||
|
[graphql]
|
||||||
|
depth_limit = inf
|
||||||
|
complexity_limit = inf
|
||||||
|
|
||||||
|
[cache]
|
||||||
|
|
||||||
|
[crypto]
|
||||||
|
|
||||||
|
[task]
|
||||||
|
|
||||||
|
[message]
|
||||||
|
|
||||||
|
[media]
|
||||||
323
apps/recorder/src/app/config/mod.rs
Normal file
323
apps/recorder/src/app/config/mod.rs
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
fs,
|
||||||
|
path::Path,
|
||||||
|
str::{self, FromStr},
|
||||||
|
};
|
||||||
|
|
||||||
|
use figment::{
|
||||||
|
Figment, Provider,
|
||||||
|
providers::{Env, Format, Json, Toml, Yaml},
|
||||||
|
};
|
||||||
|
use itertools::Itertools;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use super::env::Environment;
|
||||||
|
use crate::{
|
||||||
|
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
|
||||||
|
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
|
||||||
|
logger::LoggerConfig, media::MediaConfig, message::MessageConfig, storage::StorageConfig,
|
||||||
|
task::TaskConfig, web::WebServerConfig,
|
||||||
|
};
|
||||||
|
|
||||||
|
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
|
||||||
|
const CONFIG_ALLOWED_EXTENSIONS: &[&str] = &[".toml", ".json", ".yaml", ".yml"];
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct AppConfig {
|
||||||
|
pub server: WebServerConfig,
|
||||||
|
pub cache: CacheConfig,
|
||||||
|
pub auth: AuthConfig,
|
||||||
|
pub storage: StorageConfig,
|
||||||
|
pub mikan: MikanConfig,
|
||||||
|
pub crypto: CryptoConfig,
|
||||||
|
pub graphql: GraphQLConfig,
|
||||||
|
pub media: MediaConfig,
|
||||||
|
pub logger: LoggerConfig,
|
||||||
|
pub database: DatabaseConfig,
|
||||||
|
pub task: TaskConfig,
|
||||||
|
pub message: MessageConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppConfig {
|
||||||
|
pub fn config_prefix() -> String {
|
||||||
|
format!("{}.config", env!("CARGO_PKG_NAME"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dotenv_prefix() -> String {
|
||||||
|
String::from(".env")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn allowed_extension() -> Vec<String> {
|
||||||
|
CONFIG_ALLOWED_EXTENSIONS
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.collect_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn priority_suffix(environment: &Environment) -> Vec<String> {
|
||||||
|
vec![
|
||||||
|
format!(".{}.local", environment.full_name()),
|
||||||
|
format!(".{}.local", environment.short_name()),
|
||||||
|
String::from(".local"),
|
||||||
|
format!(".{}", environment.full_name()),
|
||||||
|
format!(".{}", environment.short_name()),
|
||||||
|
String::from(""),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn default_provider() -> impl Provider {
|
||||||
|
Toml::string(DEFAULT_CONFIG_MIXIN)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_enhanced_tera_engine() -> tera::Tera {
|
||||||
|
let mut tera = tera::Tera::default();
|
||||||
|
tera.register_filter(
|
||||||
|
"cast_to",
|
||||||
|
|value: &tera::Value,
|
||||||
|
args: &HashMap<String, tera::Value>|
|
||||||
|
-> tera::Result<tera::Value> {
|
||||||
|
let target_type = args
|
||||||
|
.get("type")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.ok_or_else(|| tera::Error::msg("invalid target type: should be string"))?;
|
||||||
|
|
||||||
|
let target_type = TeraCastToFilterType::from_str(target_type)
|
||||||
|
.map_err(|e| tera::Error::msg(format!("invalid target type: {e}")))?;
|
||||||
|
|
||||||
|
let input_str = value.as_str().unwrap_or("");
|
||||||
|
|
||||||
|
match target_type {
|
||||||
|
TeraCastToFilterType::Boolean => {
|
||||||
|
let is_true = matches!(input_str.to_lowercase().as_str(), "true" | "1");
|
||||||
|
let is_false = matches!(input_str.to_lowercase().as_str(), "false" | "0");
|
||||||
|
if is_true {
|
||||||
|
Ok(tera::Value::Bool(true))
|
||||||
|
} else if is_false {
|
||||||
|
Ok(tera::Value::Bool(false))
|
||||||
|
} else {
|
||||||
|
Err(tera::Error::msg(
|
||||||
|
"target type is bool but value is not a boolean like true, false, \
|
||||||
|
1, 0",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TeraCastToFilterType::Integer => {
|
||||||
|
let parsed = input_str.parse::<i64>().map_err(|e| {
|
||||||
|
tera::Error::call_filter("invalid integer".to_string(), e)
|
||||||
|
})?;
|
||||||
|
Ok(tera::Value::Number(serde_json::Number::from(parsed)))
|
||||||
|
}
|
||||||
|
TeraCastToFilterType::Unsigned => {
|
||||||
|
let parsed = input_str.parse::<u64>().map_err(|e| {
|
||||||
|
tera::Error::call_filter("invalid unsigned integer".to_string(), e)
|
||||||
|
})?;
|
||||||
|
Ok(tera::Value::Number(serde_json::Number::from(parsed)))
|
||||||
|
}
|
||||||
|
TeraCastToFilterType::Float => {
|
||||||
|
let parsed = input_str.parse::<f64>().map_err(|e| {
|
||||||
|
tera::Error::call_filter("invalid float".to_string(), e)
|
||||||
|
})?;
|
||||||
|
Ok(tera::Value::Number(
|
||||||
|
serde_json::Number::from_f64(parsed).ok_or_else(|| {
|
||||||
|
tera::Error::msg("failed to convert f64 to serde_json::Number")
|
||||||
|
})?,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
TeraCastToFilterType::String => Ok(tera::Value::String(input_str.to_string())),
|
||||||
|
TeraCastToFilterType::Null => Ok(tera::Value::Null),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
tera.register_filter(
|
||||||
|
"try_auto_cast",
|
||||||
|
|value: &tera::Value,
|
||||||
|
_args: &HashMap<String, tera::Value>|
|
||||||
|
-> tera::Result<tera::Value> {
|
||||||
|
let input_str = value.as_str().unwrap_or("");
|
||||||
|
|
||||||
|
if input_str == "null" {
|
||||||
|
return Ok(tera::Value::Null);
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches!(input_str, "true" | "false") {
|
||||||
|
return Ok(tera::Value::Bool(input_str == "true"));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(parsed) = input_str.parse::<i64>() {
|
||||||
|
return Ok(tera::Value::Number(serde_json::Number::from(parsed)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(parsed) = input_str.parse::<u64>() {
|
||||||
|
return Ok(tera::Value::Number(serde_json::Number::from(parsed)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(parsed) = input_str.parse::<f64>() {
|
||||||
|
return Ok(tera::Value::Number(
|
||||||
|
serde_json::Number::from_f64(parsed).ok_or_else(|| {
|
||||||
|
tera::Error::msg("failed to convert f64 to serde_json::Number")
|
||||||
|
})?,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(tera::Value::String(input_str.to_string()))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
tera
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn merge_provider_from_file(
|
||||||
|
fig: Figment,
|
||||||
|
filepath: impl AsRef<Path>,
|
||||||
|
ext: &str,
|
||||||
|
) -> RecorderResult<Figment> {
|
||||||
|
let content = fs::read_to_string(filepath)?;
|
||||||
|
|
||||||
|
let mut tera_engine = AppConfig::build_enhanced_tera_engine();
|
||||||
|
let rendered =
|
||||||
|
tera_engine.render_str(&content, &tera::Context::from_value(serde_json::json!({}))?)?;
|
||||||
|
|
||||||
|
Ok(match ext {
|
||||||
|
".toml" => fig.merge(Toml::string(&rendered)),
|
||||||
|
".json" => fig.merge(Json::string(&rendered)),
|
||||||
|
".yaml" | ".yml" => fig.merge(Yaml::string(&rendered)),
|
||||||
|
_ => unreachable!("unsupported config extension"),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_dotenv(
|
||||||
|
environment: &Environment,
|
||||||
|
dotenv_file: Option<&str>,
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
|
||||||
|
vec![dotenv_file]
|
||||||
|
} else {
|
||||||
|
vec![Some(".")]
|
||||||
|
};
|
||||||
|
|
||||||
|
let priority_suffix = &AppConfig::priority_suffix(environment);
|
||||||
|
let dotenv_prefix = AppConfig::dotenv_prefix();
|
||||||
|
let try_filenames = priority_suffix
|
||||||
|
.iter()
|
||||||
|
.map(|ps| format!("{}{}", &dotenv_prefix, ps))
|
||||||
|
.collect_vec();
|
||||||
|
|
||||||
|
for try_dotenv_file_or_dir in try_dotenv_file_or_dirs.into_iter().flatten() {
|
||||||
|
let try_dotenv_file_or_dir_path = Path::new(try_dotenv_file_or_dir);
|
||||||
|
if try_dotenv_file_or_dir_path.exists() {
|
||||||
|
if try_dotenv_file_or_dir_path.is_dir() {
|
||||||
|
for f in try_filenames.iter() {
|
||||||
|
let p = try_dotenv_file_or_dir_path.join(f);
|
||||||
|
if p.exists() && p.is_file() {
|
||||||
|
println!("Loading dotenv file: {}", p.display());
|
||||||
|
dotenvy::from_path(p)?;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if try_dotenv_file_or_dir_path.is_file() {
|
||||||
|
println!(
|
||||||
|
"Loading dotenv file: {}",
|
||||||
|
try_dotenv_file_or_dir_path.display()
|
||||||
|
);
|
||||||
|
dotenvy::from_path(try_dotenv_file_or_dir_path)?;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_config(
|
||||||
|
environment: &Environment,
|
||||||
|
config_file: Option<&str>,
|
||||||
|
) -> RecorderResult<AppConfig> {
|
||||||
|
let try_config_file_or_dirs = if config_file.is_some() {
|
||||||
|
vec![config_file]
|
||||||
|
} else {
|
||||||
|
vec![Some(".")]
|
||||||
|
};
|
||||||
|
|
||||||
|
let allowed_extensions = &AppConfig::allowed_extension();
|
||||||
|
let priority_suffix = &AppConfig::priority_suffix(environment);
|
||||||
|
let convention_prefix = &AppConfig::config_prefix();
|
||||||
|
|
||||||
|
let try_filenames = priority_suffix
|
||||||
|
.iter()
|
||||||
|
.flat_map(|ps| {
|
||||||
|
allowed_extensions
|
||||||
|
.iter()
|
||||||
|
.map(move |ext| (format!("{convention_prefix}{ps}{ext}"), ext))
|
||||||
|
})
|
||||||
|
.collect_vec();
|
||||||
|
|
||||||
|
let mut fig = Figment::from(AppConfig::default_provider());
|
||||||
|
|
||||||
|
for try_config_file_or_dir in try_config_file_or_dirs.into_iter().flatten() {
|
||||||
|
let try_config_file_or_dir_path = Path::new(try_config_file_or_dir);
|
||||||
|
if try_config_file_or_dir_path.exists() {
|
||||||
|
if try_config_file_or_dir_path.is_dir() {
|
||||||
|
for (f, ext) in try_filenames.iter() {
|
||||||
|
let p = try_config_file_or_dir_path.join(f);
|
||||||
|
if p.exists() && p.is_file() {
|
||||||
|
fig = AppConfig::merge_provider_from_file(fig, &p, ext)?;
|
||||||
|
println!("Loaded config file: {}", p.display());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if let Some(ext) = try_config_file_or_dir_path
|
||||||
|
.extension()
|
||||||
|
.and_then(|s| s.to_str())
|
||||||
|
&& try_config_file_or_dir_path.is_file()
|
||||||
|
{
|
||||||
|
fig =
|
||||||
|
AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?;
|
||||||
|
println!(
|
||||||
|
"Loaded config file: {}",
|
||||||
|
try_config_file_or_dir_path.display()
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fig = fig.merge(Env::prefixed("").split("__").lowercase(true));
|
||||||
|
|
||||||
|
let app_config: AppConfig = fig.extract()?;
|
||||||
|
|
||||||
|
Ok(app_config)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "lowercase")]
|
||||||
|
enum TeraCastToFilterType {
|
||||||
|
#[serde(alias = "str")]
|
||||||
|
String,
|
||||||
|
#[serde(alias = "bool")]
|
||||||
|
Boolean,
|
||||||
|
#[serde(alias = "int")]
|
||||||
|
Integer,
|
||||||
|
#[serde(alias = "uint")]
|
||||||
|
Unsigned,
|
||||||
|
#[serde(alias = "float")]
|
||||||
|
Float,
|
||||||
|
#[serde(alias = "null")]
|
||||||
|
Null,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for TeraCastToFilterType {
|
||||||
|
type Err = String;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
match s {
|
||||||
|
"string" | "str" => Ok(TeraCastToFilterType::String),
|
||||||
|
"boolean" | "bool" => Ok(TeraCastToFilterType::Boolean),
|
||||||
|
"integer" | "int" => Ok(TeraCastToFilterType::Integer),
|
||||||
|
"unsigned" | "uint" => Ok(TeraCastToFilterType::Unsigned),
|
||||||
|
"float" => Ok(TeraCastToFilterType::Float),
|
||||||
|
"null" => Ok(TeraCastToFilterType::Null),
|
||||||
|
_ => Err(format!("invalid target type: {s}")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
147
apps/recorder/src/app/context.rs
Normal file
147
apps/recorder/src/app/context.rs
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
use std::{fmt::Debug, sync::Arc};
|
||||||
|
|
||||||
|
use tokio::sync::OnceCell;
|
||||||
|
|
||||||
|
use super::{Environment, config::AppConfig};
|
||||||
|
use crate::{
|
||||||
|
auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
|
||||||
|
errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
|
||||||
|
logger::LoggerService, media::MediaService, message::MessageService, storage::StorageService,
|
||||||
|
task::TaskService,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub trait AppContextTrait: Send + Sync + Debug {
|
||||||
|
fn logger(&self) -> &LoggerService;
|
||||||
|
fn db(&self) -> &DatabaseService;
|
||||||
|
fn config(&self) -> &AppConfig;
|
||||||
|
fn cache(&self) -> &CacheService;
|
||||||
|
fn mikan(&self) -> &MikanClient;
|
||||||
|
fn auth(&self) -> &AuthService;
|
||||||
|
fn graphql(&self) -> &GraphQLService;
|
||||||
|
fn storage(&self) -> &StorageService;
|
||||||
|
fn working_dir(&self) -> &String;
|
||||||
|
fn environment(&self) -> &Environment;
|
||||||
|
fn crypto(&self) -> &CryptoService;
|
||||||
|
fn task(&self) -> &TaskService;
|
||||||
|
fn message(&self) -> &MessageService;
|
||||||
|
fn media(&self) -> &MediaService;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AppContext {
|
||||||
|
logger: LoggerService,
|
||||||
|
db: DatabaseService,
|
||||||
|
config: AppConfig,
|
||||||
|
cache: CacheService,
|
||||||
|
mikan: MikanClient,
|
||||||
|
auth: AuthService,
|
||||||
|
storage: StorageService,
|
||||||
|
crypto: CryptoService,
|
||||||
|
working_dir: String,
|
||||||
|
environment: Environment,
|
||||||
|
message: MessageService,
|
||||||
|
media: MediaService,
|
||||||
|
task: OnceCell<TaskService>,
|
||||||
|
graphql: OnceCell<GraphQLService>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppContext {
|
||||||
|
pub async fn new(
|
||||||
|
environment: Environment,
|
||||||
|
config: AppConfig,
|
||||||
|
working_dir: impl ToString,
|
||||||
|
) -> RecorderResult<Arc<Self>> {
|
||||||
|
let config_cloned = config.clone();
|
||||||
|
|
||||||
|
let logger = LoggerService::from_config(config.logger).await?;
|
||||||
|
let cache = CacheService::from_config(config.cache).await?;
|
||||||
|
let db = DatabaseService::from_config(config.database).await?;
|
||||||
|
let storage = StorageService::from_config(config.storage).await?;
|
||||||
|
let message = MessageService::from_config(config.message).await?;
|
||||||
|
let auth = AuthService::from_conf(config.auth).await?;
|
||||||
|
let mikan = MikanClient::from_config(config.mikan).await?;
|
||||||
|
let crypto = CryptoService::from_config(config.crypto).await?;
|
||||||
|
let media = MediaService::from_config(config.media).await?;
|
||||||
|
|
||||||
|
let ctx = Arc::new(AppContext {
|
||||||
|
config: config_cloned,
|
||||||
|
environment,
|
||||||
|
logger,
|
||||||
|
auth,
|
||||||
|
cache,
|
||||||
|
db,
|
||||||
|
storage,
|
||||||
|
mikan,
|
||||||
|
working_dir: working_dir.to_string(),
|
||||||
|
crypto,
|
||||||
|
message,
|
||||||
|
media,
|
||||||
|
task: OnceCell::new(),
|
||||||
|
graphql: OnceCell::new(),
|
||||||
|
});
|
||||||
|
|
||||||
|
ctx.task
|
||||||
|
.get_or_try_init(async || {
|
||||||
|
TaskService::from_config_and_ctx(config.task, ctx.clone()).await
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
ctx.graphql
|
||||||
|
.get_or_try_init(async || {
|
||||||
|
GraphQLService::from_config_and_ctx(config.graphql, ctx.clone()).await
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(ctx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for AppContext {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "AppContext")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppContextTrait for AppContext {
|
||||||
|
fn logger(&self) -> &LoggerService {
|
||||||
|
&self.logger
|
||||||
|
}
|
||||||
|
fn db(&self) -> &DatabaseService {
|
||||||
|
&self.db
|
||||||
|
}
|
||||||
|
fn config(&self) -> &AppConfig {
|
||||||
|
&self.config
|
||||||
|
}
|
||||||
|
fn cache(&self) -> &CacheService {
|
||||||
|
&self.cache
|
||||||
|
}
|
||||||
|
fn mikan(&self) -> &MikanClient {
|
||||||
|
&self.mikan
|
||||||
|
}
|
||||||
|
fn auth(&self) -> &AuthService {
|
||||||
|
&self.auth
|
||||||
|
}
|
||||||
|
fn graphql(&self) -> &GraphQLService {
|
||||||
|
self.graphql.get().expect("graphql should be set")
|
||||||
|
}
|
||||||
|
fn storage(&self) -> &StorageService {
|
||||||
|
&self.storage
|
||||||
|
}
|
||||||
|
fn working_dir(&self) -> &String {
|
||||||
|
&self.working_dir
|
||||||
|
}
|
||||||
|
fn environment(&self) -> &Environment {
|
||||||
|
&self.environment
|
||||||
|
}
|
||||||
|
fn crypto(&self) -> &CryptoService {
|
||||||
|
&self.crypto
|
||||||
|
}
|
||||||
|
fn task(&self) -> &TaskService {
|
||||||
|
self.task.get().expect("task should be set")
|
||||||
|
}
|
||||||
|
fn message(&self) -> &MessageService {
|
||||||
|
&self.message
|
||||||
|
}
|
||||||
|
fn media(&self) -> &MediaService {
|
||||||
|
&self.media
|
||||||
|
}
|
||||||
|
}
|
||||||
160
apps/recorder/src/app/core.rs
Normal file
160
apps/recorder/src/app/core.rs
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
use std::{net::SocketAddr, sync::Arc};
|
||||||
|
|
||||||
|
use axum::{Router, middleware::from_fn_with_state};
|
||||||
|
use tokio::{net::TcpSocket, signal};
|
||||||
|
use tower_http::services::{ServeDir, ServeFile};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
|
use super::{builder::AppBuilder, context::AppContextTrait};
|
||||||
|
use crate::{
|
||||||
|
auth::webui_auth_middleware,
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
web::{
|
||||||
|
controller::{self, core::ControllerTrait},
|
||||||
|
middleware::default_middleware_stack,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const PROJECT_NAME: &str = "konobangu";
|
||||||
|
|
||||||
|
pub struct App {
|
||||||
|
pub context: Arc<dyn AppContextTrait>,
|
||||||
|
pub builder: AppBuilder,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl App {
|
||||||
|
pub fn builder() -> AppBuilder {
|
||||||
|
AppBuilder::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(err, skip(self))]
|
||||||
|
pub async fn serve(&self) -> RecorderResult<()> {
|
||||||
|
let context = &self.context;
|
||||||
|
let config = context.config();
|
||||||
|
|
||||||
|
let listener = {
|
||||||
|
let addr: SocketAddr =
|
||||||
|
format!("{}:{}", config.server.binding, config.server.port).parse()?;
|
||||||
|
|
||||||
|
let socket = if addr.is_ipv4() {
|
||||||
|
TcpSocket::new_v4()
|
||||||
|
} else {
|
||||||
|
TcpSocket::new_v6()
|
||||||
|
}?;
|
||||||
|
|
||||||
|
socket.set_reuseaddr(true)?;
|
||||||
|
|
||||||
|
#[cfg(all(unix, not(target_os = "solaris")))]
|
||||||
|
if let Err(e) = socket.set_reuseport(true) {
|
||||||
|
tracing::warn!("Failed to set SO_REUSEPORT: {}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
socket.bind(addr)?;
|
||||||
|
socket.listen(1024)
|
||||||
|
}?;
|
||||||
|
|
||||||
|
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
||||||
|
|
||||||
|
let (graphql_c, oidc_c, metadata_c, static_c, feeds_c) = futures::try_join!(
|
||||||
|
controller::graphql::create(context.clone()),
|
||||||
|
controller::oidc::create(context.clone()),
|
||||||
|
controller::metadata::create(context.clone()),
|
||||||
|
controller::r#static::create(context.clone()),
|
||||||
|
controller::feeds::create(context.clone())
|
||||||
|
)?;
|
||||||
|
|
||||||
|
for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
|
||||||
|
router = c.apply_to(router);
|
||||||
|
}
|
||||||
|
|
||||||
|
router = router
|
||||||
|
.fallback_service(
|
||||||
|
ServeDir::new("webui").not_found_service(ServeFile::new("webui/index.html")),
|
||||||
|
)
|
||||||
|
.layer(from_fn_with_state(context.clone(), webui_auth_middleware));
|
||||||
|
|
||||||
|
let middlewares = default_middleware_stack(context.clone());
|
||||||
|
for mid in middlewares {
|
||||||
|
if mid.is_enabled() {
|
||||||
|
router = mid.apply(router)?;
|
||||||
|
tracing::info!(name = mid.name(), "+middleware");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let router = router
|
||||||
|
.with_state(context.clone())
|
||||||
|
.into_make_service_with_connect_info::<SocketAddr>();
|
||||||
|
|
||||||
|
let task = context.task();
|
||||||
|
|
||||||
|
let graceful_shutdown = self.builder.graceful_shutdown;
|
||||||
|
|
||||||
|
tokio::try_join!(
|
||||||
|
async {
|
||||||
|
let axum_serve = axum::serve(listener, router);
|
||||||
|
|
||||||
|
if graceful_shutdown {
|
||||||
|
axum_serve
|
||||||
|
.with_graceful_shutdown(async move {
|
||||||
|
Self::shutdown_signal().await;
|
||||||
|
tracing::info!("axum shutting down...");
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
axum_serve.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
},
|
||||||
|
async {
|
||||||
|
task.run_with_signal(if graceful_shutdown {
|
||||||
|
Some(Self::shutdown_signal)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
}
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn shutdown_signal() {
|
||||||
|
let ctrl_c = async {
|
||||||
|
signal::ctrl_c()
|
||||||
|
.await
|
||||||
|
.expect("failed to install Ctrl+C handler");
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
let terminate = async {
|
||||||
|
signal::unix::signal(signal::unix::SignalKind::terminate())
|
||||||
|
.expect("failed to install signal handler")
|
||||||
|
.recv()
|
||||||
|
.await;
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(all(unix, debug_assertions))]
|
||||||
|
let quit = async {
|
||||||
|
signal::unix::signal(signal::unix::SignalKind::quit())
|
||||||
|
.expect("Failed to install SIGQUIT handler")
|
||||||
|
.recv()
|
||||||
|
.await;
|
||||||
|
println!("Received SIGQUIT");
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
let terminate = std::future::pending::<()>();
|
||||||
|
|
||||||
|
#[cfg(not(all(unix, debug_assertions)))]
|
||||||
|
let quit = std::future::pending::<()>();
|
||||||
|
|
||||||
|
tokio::select! {
|
||||||
|
() = ctrl_c => {},
|
||||||
|
() = terminate => {},
|
||||||
|
() = quit => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
35
apps/recorder/src/app/env.rs
Normal file
35
apps/recorder/src/app/env.rs
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
use clap::ValueEnum;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, ValueEnum)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
#[value(rename_all = "snake_case")]
|
||||||
|
pub enum Environment {
|
||||||
|
#[serde(alias = "dev")]
|
||||||
|
#[value(alias = "dev")]
|
||||||
|
Development,
|
||||||
|
#[serde(alias = "prod")]
|
||||||
|
#[value(alias = "prod")]
|
||||||
|
Production,
|
||||||
|
#[serde(alias = "test")]
|
||||||
|
#[value(alias = "test")]
|
||||||
|
Testing,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Environment {
|
||||||
|
pub fn full_name(&self) -> &'static str {
|
||||||
|
match &self {
|
||||||
|
Self::Development => "development",
|
||||||
|
Self::Production => "production",
|
||||||
|
Self::Testing => "testing",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn short_name(&self) -> &'static str {
|
||||||
|
match &self {
|
||||||
|
Self::Development => "dev",
|
||||||
|
Self::Production => "prod",
|
||||||
|
Self::Testing => "test",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
12
apps/recorder/src/app/mod.rs
Normal file
12
apps/recorder/src/app/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
pub mod builder;
|
||||||
|
pub mod config;
|
||||||
|
pub mod context;
|
||||||
|
pub mod core;
|
||||||
|
pub mod env;
|
||||||
|
|
||||||
|
pub use core::{App, PROJECT_NAME};
|
||||||
|
|
||||||
|
pub use builder::AppBuilder;
|
||||||
|
pub use config::AppConfig;
|
||||||
|
pub use context::{AppContext, AppContextTrait};
|
||||||
|
pub use env::Environment;
|
||||||
95
apps/recorder/src/auth/basic.rs
Normal file
95
apps/recorder/src/auth/basic.rs
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
use async_trait::async_trait;
|
||||||
|
use axum::http::{HeaderValue, request::Parts};
|
||||||
|
use base64::{self, Engine};
|
||||||
|
use http::header::AUTHORIZATION;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
config::BasicAuthConfig,
|
||||||
|
errors::AuthError,
|
||||||
|
service::{AuthServiceTrait, AuthUserInfo},
|
||||||
|
};
|
||||||
|
use crate::{
|
||||||
|
app::{AppContextTrait, PROJECT_NAME},
|
||||||
|
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub struct AuthBasic {
|
||||||
|
pub user: String,
|
||||||
|
pub password: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuthBasic {
|
||||||
|
fn decode_request_parts(req: &mut Parts) -> Result<Self, AuthError> {
|
||||||
|
let authorization = req
|
||||||
|
.headers
|
||||||
|
.get(AUTHORIZATION)
|
||||||
|
.and_then(|s| s.to_str().ok())
|
||||||
|
.ok_or(AuthError::BasicInvalidCredentials)?;
|
||||||
|
|
||||||
|
let split = authorization.split_once(' ');
|
||||||
|
|
||||||
|
match split {
|
||||||
|
Some(("Basic", contents)) => {
|
||||||
|
let decoded = base64::engine::general_purpose::STANDARD
|
||||||
|
.decode(contents)
|
||||||
|
.map_err(|_| AuthError::BasicInvalidCredentials)?;
|
||||||
|
|
||||||
|
let decoded =
|
||||||
|
String::from_utf8(decoded).map_err(|_| AuthError::BasicInvalidCredentials)?;
|
||||||
|
|
||||||
|
Ok(if let Some((user, password)) = decoded.split_once(':') {
|
||||||
|
Self {
|
||||||
|
user: String::from(user),
|
||||||
|
password: Some(String::from(password)),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Self {
|
||||||
|
user: decoded,
|
||||||
|
password: None,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
_ => Err(AuthError::BasicInvalidCredentials),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BasicAuthService {
|
||||||
|
pub config: BasicAuthConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl AuthServiceTrait for BasicAuthService {
|
||||||
|
async fn extract_user_info(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
request: &mut Parts,
|
||||||
|
) -> Result<AuthUserInfo, AuthError> {
|
||||||
|
if let Ok(AuthBasic {
|
||||||
|
user: found_user,
|
||||||
|
password: found_password,
|
||||||
|
}) = AuthBasic::decode_request_parts(request)
|
||||||
|
&& self.config.user == found_user
|
||||||
|
&& self.config.password == found_password.unwrap_or_default()
|
||||||
|
{
|
||||||
|
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER)
|
||||||
|
.await
|
||||||
|
.map_err(|_| AuthError::FindAuthRecordError)?;
|
||||||
|
return Ok(AuthUserInfo {
|
||||||
|
subscriber_auth,
|
||||||
|
auth_type: AuthType::Basic,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Err(AuthError::BasicInvalidCredentials)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
||||||
|
Some(HeaderValue::from_str(format!("Basic realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn auth_type(&self) -> AuthType {
|
||||||
|
AuthType::Basic
|
||||||
|
}
|
||||||
|
}
|
||||||
37
apps/recorder/src/auth/config.rs
Normal file
37
apps/recorder/src/auth/config.rs
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use jwtk::OneOrMany;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_with::serde_as;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct BasicAuthConfig {
|
||||||
|
#[serde(rename = "basic_user")]
|
||||||
|
pub user: String,
|
||||||
|
#[serde(rename = "basic_password")]
|
||||||
|
pub password: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[serde_as]
|
||||||
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct OidcAuthConfig {
|
||||||
|
#[serde(rename = "oidc_issuer")]
|
||||||
|
pub issuer: String,
|
||||||
|
#[serde(rename = "oidc_audience")]
|
||||||
|
pub audience: String,
|
||||||
|
#[serde(rename = "oidc_client_id")]
|
||||||
|
pub client_id: String,
|
||||||
|
#[serde(rename = "oidc_client_secret")]
|
||||||
|
pub client_secret: String,
|
||||||
|
#[serde(rename = "oidc_extra_scopes")]
|
||||||
|
pub extra_scopes: Option<OneOrMany<String>>,
|
||||||
|
#[serde(rename = "oidc_extra_claims")]
|
||||||
|
pub extra_claims: Option<HashMap<String, Option<String>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
#[serde(tag = "auth_type", rename_all = "snake_case")]
|
||||||
|
pub enum AuthConfig {
|
||||||
|
Basic(BasicAuthConfig),
|
||||||
|
Oidc(OidcAuthConfig),
|
||||||
|
}
|
||||||
142
apps/recorder/src/auth/errors.rs
Normal file
142
apps/recorder/src/auth/errors.rs
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
use async_graphql::dynamic::ResolverContext;
|
||||||
|
use axum::{
|
||||||
|
Json,
|
||||||
|
http::StatusCode,
|
||||||
|
response::{IntoResponse, Response},
|
||||||
|
};
|
||||||
|
use fetch::HttpClientError;
|
||||||
|
use openidconnect::{
|
||||||
|
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
|
||||||
|
StandardErrorResponse, core::CoreErrorResponseType,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use snafu::prelude::*;
|
||||||
|
|
||||||
|
use crate::models::auth::AuthType;
|
||||||
|
|
||||||
|
#[derive(Debug, Snafu)]
|
||||||
|
#[snafu(visibility(pub(crate)))]
|
||||||
|
pub enum AuthError {
|
||||||
|
#[snafu(display("Permission denied"))]
|
||||||
|
PermissionError,
|
||||||
|
#[snafu(display("Not support auth method"))]
|
||||||
|
NotSupportAuthMethod {
|
||||||
|
supported: Vec<AuthType>,
|
||||||
|
current: AuthType,
|
||||||
|
},
|
||||||
|
#[snafu(display("Failed to find auth record"))]
|
||||||
|
FindAuthRecordError,
|
||||||
|
#[snafu(display("Invalid credentials"))]
|
||||||
|
BasicInvalidCredentials,
|
||||||
|
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
|
||||||
|
OidcProviderHttpClientError { source: HttpClientError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
OidcProviderMetaError {
|
||||||
|
source: openidconnect::DiscoveryError<HttpClientError>,
|
||||||
|
},
|
||||||
|
#[snafu(display("Invalid oidc provider URL: {source}"))]
|
||||||
|
OidcProviderUrlError { source: url::ParseError },
|
||||||
|
#[snafu(display("Invalid oidc redirect URI: {source}"))]
|
||||||
|
OidcRequestRedirectUriError {
|
||||||
|
#[snafu(source)]
|
||||||
|
source: url::ParseError,
|
||||||
|
},
|
||||||
|
#[snafu(display("Oidc request session not found or expired"))]
|
||||||
|
OidcCallbackRecordNotFoundOrExpiredError,
|
||||||
|
#[snafu(display("Invalid oidc request callback nonce"))]
|
||||||
|
OidcInvalidNonceError,
|
||||||
|
#[snafu(display("Invalid oidc request callback state"))]
|
||||||
|
OidcInvalidStateError,
|
||||||
|
#[snafu(display("Invalid oidc request callback code"))]
|
||||||
|
OidcInvalidCodeError,
|
||||||
|
#[snafu(transparent)]
|
||||||
|
OidcCallbackTokenConfigurationError { source: ConfigurationError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
OidcRequestTokenError {
|
||||||
|
source: RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>,
|
||||||
|
},
|
||||||
|
#[snafu(display("Invalid oidc id token"))]
|
||||||
|
OidcInvalidIdTokenError,
|
||||||
|
#[snafu(display("Invalid oidc access token"))]
|
||||||
|
OidcInvalidAccessTokenError,
|
||||||
|
#[snafu(transparent)]
|
||||||
|
OidcSignatureVerificationError { source: SignatureVerificationError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
OidcSigningError { source: SigningError },
|
||||||
|
#[snafu(display("Missing Bearer token"))]
|
||||||
|
OidcMissingBearerToken,
|
||||||
|
#[snafu(transparent)]
|
||||||
|
OidcJwtkError { source: jwtk::Error },
|
||||||
|
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
|
||||||
|
OidcExtraScopesMatchError { expected: String, found: String },
|
||||||
|
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
|
||||||
|
OidcExtraClaimMatchError {
|
||||||
|
key: String,
|
||||||
|
expected: String,
|
||||||
|
found: String,
|
||||||
|
},
|
||||||
|
#[snafu(display("Extra claim {claim} missing"))]
|
||||||
|
OidcExtraClaimMissingError { claim: String },
|
||||||
|
#[snafu(display("Audience {aud} missing"))]
|
||||||
|
OidcAudMissingError { aud: String },
|
||||||
|
#[snafu(display("Subject missing"))]
|
||||||
|
OidcSubMissingError,
|
||||||
|
#[snafu(display(
|
||||||
|
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
|
||||||
|
(if field.is_empty() { "" } else { "." }),
|
||||||
|
(if column.is_empty() { "" } else { "." }),
|
||||||
|
source.message
|
||||||
|
))]
|
||||||
|
GraphqlDynamicPermissionError {
|
||||||
|
#[snafu(source(false))]
|
||||||
|
source: Box<async_graphql::Error>,
|
||||||
|
field: String,
|
||||||
|
column: String,
|
||||||
|
context_path: String,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuthError {
|
||||||
|
pub fn from_graphql_dynamic_subscribe_id_guard(
|
||||||
|
source: async_graphql::Error,
|
||||||
|
context: &ResolverContext,
|
||||||
|
field_name: &str,
|
||||||
|
column_name: &str,
|
||||||
|
) -> AuthError {
|
||||||
|
AuthError::GraphqlDynamicPermissionError {
|
||||||
|
source: Box::new(source),
|
||||||
|
field: field_name.to_string(),
|
||||||
|
column: column_name.to_string(),
|
||||||
|
context_path: context
|
||||||
|
.ctx
|
||||||
|
.path_node
|
||||||
|
.map(|p| p.to_string_vec().join(""))
|
||||||
|
.unwrap_or_default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct AuthErrorResponse {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<AuthError> for AuthErrorResponse {
|
||||||
|
fn from(value: AuthError) -> Self {
|
||||||
|
AuthErrorResponse {
|
||||||
|
success: false,
|
||||||
|
message: value.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoResponse for AuthError {
|
||||||
|
fn into_response(self) -> Response {
|
||||||
|
(
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
Json(AuthErrorResponse::from(self)),
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
|
}
|
||||||
|
}
|
||||||
77
apps/recorder/src/auth/middleware.rs
Normal file
77
apps/recorder/src/auth/middleware.rs
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use axum::{
|
||||||
|
extract::{Request, State},
|
||||||
|
http::header,
|
||||||
|
middleware::Next,
|
||||||
|
response::{IntoResponse, Response},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
auth::{AuthService, AuthServiceTrait},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub async fn auth_middleware(
|
||||||
|
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||||
|
request: Request,
|
||||||
|
next: Next,
|
||||||
|
) -> Response {
|
||||||
|
let auth_service = ctx.auth();
|
||||||
|
|
||||||
|
let (mut parts, body) = request.into_parts();
|
||||||
|
|
||||||
|
let mut response = match auth_service
|
||||||
|
.extract_user_info(ctx.as_ref() as &dyn AppContextTrait, &mut parts)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(auth_user_info) => {
|
||||||
|
let mut request = Request::from_parts(parts, body);
|
||||||
|
request.extensions_mut().insert(auth_user_info);
|
||||||
|
next.run(request).await
|
||||||
|
}
|
||||||
|
Err(auth_error) => auth_error.into_response(),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(header_value) = auth_service.www_authenticate_header_value() {
|
||||||
|
response
|
||||||
|
.headers_mut()
|
||||||
|
.insert(header::WWW_AUTHENTICATE, header_value);
|
||||||
|
};
|
||||||
|
|
||||||
|
response
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn webui_auth_middleware(
|
||||||
|
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||||
|
request: Request,
|
||||||
|
next: Next,
|
||||||
|
) -> Response {
|
||||||
|
if (!request.uri().path().starts_with("/api"))
|
||||||
|
&& let AuthService::Basic(auth_service) = ctx.auth()
|
||||||
|
{
|
||||||
|
let (mut parts, body) = request.into_parts();
|
||||||
|
|
||||||
|
let mut response = match auth_service
|
||||||
|
.extract_user_info(ctx.as_ref() as &dyn AppContextTrait, &mut parts)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(auth_user_info) => {
|
||||||
|
let mut request = Request::from_parts(parts, body);
|
||||||
|
request.extensions_mut().insert(auth_user_info);
|
||||||
|
next.run(request).await
|
||||||
|
}
|
||||||
|
Err(auth_error) => auth_error.into_response(),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(header_value) = auth_service.www_authenticate_header_value() {
|
||||||
|
response
|
||||||
|
.headers_mut()
|
||||||
|
.insert(header::WWW_AUTHENTICATE, header_value);
|
||||||
|
};
|
||||||
|
|
||||||
|
response
|
||||||
|
} else {
|
||||||
|
next.run(request).await
|
||||||
|
}
|
||||||
|
}
|
||||||
11
apps/recorder/src/auth/mod.rs
Normal file
11
apps/recorder/src/auth/mod.rs
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
pub mod basic;
|
||||||
|
pub mod config;
|
||||||
|
pub mod errors;
|
||||||
|
pub mod middleware;
|
||||||
|
pub mod oidc;
|
||||||
|
pub mod service;
|
||||||
|
|
||||||
|
pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig};
|
||||||
|
pub use errors::AuthError;
|
||||||
|
pub use middleware::{auth_middleware, webui_auth_middleware};
|
||||||
|
pub use service::{AuthService, AuthServiceTrait, AuthUserInfo};
|
||||||
363
apps/recorder/src/auth/oidc.rs
Normal file
363
apps/recorder/src/auth/oidc.rs
Normal file
@@ -0,0 +1,363 @@
|
|||||||
|
use std::{
|
||||||
|
collections::{HashMap, HashSet},
|
||||||
|
future::Future,
|
||||||
|
ops::Deref,
|
||||||
|
pin::Pin,
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use axum::{
|
||||||
|
http,
|
||||||
|
http::{HeaderValue, request::Parts},
|
||||||
|
};
|
||||||
|
use fetch::{HttpClient, client::HttpClientError};
|
||||||
|
use http::header::AUTHORIZATION;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use jwtk::jwk::RemoteJwksVerifier;
|
||||||
|
use moka::future::Cache;
|
||||||
|
use openidconnect::{
|
||||||
|
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
|
||||||
|
OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse,
|
||||||
|
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata},
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
use snafu::ResultExt;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
config::OidcAuthConfig,
|
||||||
|
errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
|
||||||
|
service::{AuthServiceTrait, AuthUserInfo},
|
||||||
|
};
|
||||||
|
use crate::{
|
||||||
|
app::{AppContextTrait, PROJECT_NAME},
|
||||||
|
errors::RecorderError,
|
||||||
|
models::auth::AuthType,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct OidcHttpClient(pub Arc<HttpClient>);
|
||||||
|
|
||||||
|
impl Deref for OidcHttpClient {
|
||||||
|
type Target = HttpClient;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'c> openidconnect::AsyncHttpClient<'c> for OidcHttpClient {
|
||||||
|
type Error = HttpClientError;
|
||||||
|
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
type Future =
|
||||||
|
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + 'c>>;
|
||||||
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
|
type Future =
|
||||||
|
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + Send + 'c>>;
|
||||||
|
|
||||||
|
fn call(&'c self, request: openidconnect::HttpRequest) -> Self::Future {
|
||||||
|
Box::pin(async move {
|
||||||
|
let response = self.execute(request.try_into()?).await?;
|
||||||
|
|
||||||
|
let mut builder = http::Response::builder().status(response.status());
|
||||||
|
|
||||||
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
|
{
|
||||||
|
builder = builder.version(response.version());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (name, value) in response.headers().iter() {
|
||||||
|
builder = builder.header(name, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder
|
||||||
|
.body(response.bytes().await?.to_vec())
|
||||||
|
.map_err(HttpClientError::from)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Clone, Debug)]
|
||||||
|
pub struct OidcAuthClaims {
|
||||||
|
pub scope: Option<String>,
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub custom: HashMap<String, Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OidcAuthClaims {
|
||||||
|
pub fn scopes(&self) -> std::str::Split<'_, char> {
|
||||||
|
self.scope.as_deref().unwrap_or_default().split(',')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize)]
|
||||||
|
pub struct OidcAuthRequest {
|
||||||
|
pub auth_uri: Url,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub redirect_uri: RedirectUrl,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub csrf_token: CsrfToken,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub nonce: Nonce,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub pkce_verifier: Arc<PkceCodeVerifier>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct OidcAuthCallbackQuery {
|
||||||
|
pub state: Option<String>,
|
||||||
|
pub code: Option<String>,
|
||||||
|
pub redirect_uri: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct OidcAuthCallbackPayload {
|
||||||
|
pub access_token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct OidcAuthService {
|
||||||
|
pub config: OidcAuthConfig,
|
||||||
|
pub jwk_verifier: RemoteJwksVerifier,
|
||||||
|
pub oidc_provider_client: Arc<HttpClient>,
|
||||||
|
pub oidc_request_cache: Cache<String, OidcAuthRequest>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OidcAuthService {
|
||||||
|
pub async fn build_authorization_request(
|
||||||
|
&self,
|
||||||
|
redirect_uri: &str,
|
||||||
|
) -> Result<OidcAuthRequest, AuthError> {
|
||||||
|
let oidc_provider_client = OidcHttpClient(self.oidc_provider_client.clone());
|
||||||
|
let provider_metadata = CoreProviderMetadata::discover_async(
|
||||||
|
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
|
||||||
|
&oidc_provider_client,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let redirect_uri =
|
||||||
|
RedirectUrl::new(redirect_uri.to_string()).context(OidcRequestRedirectUriSnafu)?;
|
||||||
|
|
||||||
|
let oidc_client = CoreClient::from_provider_metadata(
|
||||||
|
provider_metadata,
|
||||||
|
ClientId::new(self.config.client_id.clone()),
|
||||||
|
Some(ClientSecret::new(self.config.client_secret.clone())),
|
||||||
|
)
|
||||||
|
.set_redirect_uri(redirect_uri.clone());
|
||||||
|
|
||||||
|
let (pkce_chanllenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256();
|
||||||
|
|
||||||
|
let mut authorization_request = oidc_client
|
||||||
|
.authorize_url(
|
||||||
|
CoreAuthenticationFlow::AuthorizationCode,
|
||||||
|
CsrfToken::new_random,
|
||||||
|
Nonce::new_random,
|
||||||
|
)
|
||||||
|
.set_pkce_challenge(pkce_chanllenge);
|
||||||
|
|
||||||
|
{
|
||||||
|
if let Some(scopes) = self.config.extra_scopes.as_ref() {
|
||||||
|
authorization_request = authorization_request.add_scopes(
|
||||||
|
scopes
|
||||||
|
.iter()
|
||||||
|
.map(|s| openidconnect::Scope::new(s.to_string())),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let (auth_uri, csrf_token, nonce) = authorization_request.url();
|
||||||
|
|
||||||
|
Ok(OidcAuthRequest {
|
||||||
|
auth_uri,
|
||||||
|
csrf_token,
|
||||||
|
nonce,
|
||||||
|
pkce_verifier: Arc::new(pkce_verifier),
|
||||||
|
redirect_uri,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn store_authorization_request(
|
||||||
|
&self,
|
||||||
|
request: OidcAuthRequest,
|
||||||
|
) -> Result<(), AuthError> {
|
||||||
|
self.oidc_request_cache
|
||||||
|
.insert(request.csrf_token.secret().to_string(), request)
|
||||||
|
.await;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_authorization_request(
|
||||||
|
&self,
|
||||||
|
state: &str,
|
||||||
|
) -> Result<OidcAuthRequest, AuthError> {
|
||||||
|
let result = self
|
||||||
|
.oidc_request_cache
|
||||||
|
.get(state)
|
||||||
|
.await
|
||||||
|
.ok_or(AuthError::OidcCallbackRecordNotFoundOrExpiredError)?;
|
||||||
|
|
||||||
|
self.oidc_request_cache.invalidate(state).await;
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn extract_authorization_request_callback(
|
||||||
|
&self,
|
||||||
|
query: OidcAuthCallbackQuery,
|
||||||
|
) -> Result<OidcAuthCallbackPayload, AuthError> {
|
||||||
|
let oidc_http_client = OidcHttpClient(self.oidc_provider_client.clone());
|
||||||
|
let csrf_token = query.state.ok_or(AuthError::OidcInvalidStateError)?;
|
||||||
|
|
||||||
|
let code = query.code.ok_or(AuthError::OidcInvalidCodeError)?;
|
||||||
|
|
||||||
|
let request_cache = self.load_authorization_request(&csrf_token).await?;
|
||||||
|
|
||||||
|
let provider_metadata = CoreProviderMetadata::discover_async(
|
||||||
|
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
|
||||||
|
&oidc_http_client,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let oidc_client = CoreClient::from_provider_metadata(
|
||||||
|
provider_metadata,
|
||||||
|
ClientId::new(self.config.client_id.clone()),
|
||||||
|
Some(ClientSecret::new(self.config.client_secret.clone())),
|
||||||
|
)
|
||||||
|
.set_redirect_uri(request_cache.redirect_uri);
|
||||||
|
|
||||||
|
let pkce_verifier = PkceCodeVerifier::new(request_cache.pkce_verifier.secret().to_string());
|
||||||
|
|
||||||
|
let token_response = oidc_client
|
||||||
|
.exchange_code(AuthorizationCode::new(code))?
|
||||||
|
.set_pkce_verifier(pkce_verifier)
|
||||||
|
.request_async(&oidc_http_client)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let id_token = token_response
|
||||||
|
.id_token()
|
||||||
|
.ok_or(AuthError::OidcInvalidIdTokenError)?;
|
||||||
|
|
||||||
|
let id_token_verifier = &oidc_client.id_token_verifier();
|
||||||
|
|
||||||
|
let claims = id_token
|
||||||
|
.claims(id_token_verifier, &request_cache.nonce)
|
||||||
|
.map_err(|_| AuthError::OidcInvalidNonceError)?;
|
||||||
|
|
||||||
|
let access_token = token_response.access_token();
|
||||||
|
|
||||||
|
let actual_access_token_hash = AccessTokenHash::from_token(
|
||||||
|
access_token,
|
||||||
|
id_token.signing_alg()?,
|
||||||
|
id_token.signing_key(id_token_verifier)?,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if let Some(expected_access_token_hash) = claims.access_token_hash()
|
||||||
|
&& actual_access_token_hash != *expected_access_token_hash
|
||||||
|
{
|
||||||
|
return Err(AuthError::OidcInvalidAccessTokenError);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(OidcAuthCallbackPayload {
|
||||||
|
access_token: access_token.secret().to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl AuthServiceTrait for OidcAuthService {
|
||||||
|
async fn extract_user_info(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
request: &mut Parts,
|
||||||
|
) -> Result<AuthUserInfo, AuthError> {
|
||||||
|
let config = &self.config;
|
||||||
|
let token = request
|
||||||
|
.headers
|
||||||
|
.get(AUTHORIZATION)
|
||||||
|
.and_then(|authorization| {
|
||||||
|
authorization
|
||||||
|
.to_str()
|
||||||
|
.ok()
|
||||||
|
.and_then(|s| s.strip_prefix("Bearer "))
|
||||||
|
})
|
||||||
|
.ok_or(AuthError::OidcMissingBearerToken)?;
|
||||||
|
|
||||||
|
let token_data = self.jwk_verifier.verify::<OidcAuthClaims>(token).await?;
|
||||||
|
let claims = token_data.claims();
|
||||||
|
let sub = if let Some(sub) = claims.sub.as_deref() {
|
||||||
|
sub
|
||||||
|
} else {
|
||||||
|
return Err(AuthError::OidcSubMissingError);
|
||||||
|
};
|
||||||
|
if !claims.aud.iter().any(|aud| aud == &config.audience) {
|
||||||
|
return Err(AuthError::OidcAudMissingError {
|
||||||
|
aud: config.audience.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let extra_claims = &claims.extra;
|
||||||
|
if let Some(expected_scopes) = config.extra_scopes.as_ref() {
|
||||||
|
let found_scopes = extra_claims.scopes().collect::<HashSet<_>>();
|
||||||
|
if !expected_scopes
|
||||||
|
.iter()
|
||||||
|
.all(|es| found_scopes.contains(es as &str))
|
||||||
|
{
|
||||||
|
return Err(AuthError::OidcExtraScopesMatchError {
|
||||||
|
expected: expected_scopes.iter().join(","),
|
||||||
|
found: extra_claims
|
||||||
|
.scope
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(expected_extra_claims) = config.extra_claims.as_ref() {
|
||||||
|
for (expected_key, expected_value) in expected_extra_claims.iter() {
|
||||||
|
match (extra_claims.custom.get(expected_key), expected_value) {
|
||||||
|
(found_value, Some(expected_value)) => {
|
||||||
|
if let Some(Value::String(found_value)) = found_value
|
||||||
|
&& expected_value == found_value
|
||||||
|
{
|
||||||
|
} else {
|
||||||
|
return Err(AuthError::OidcExtraClaimMatchError {
|
||||||
|
expected: expected_value.clone(),
|
||||||
|
found: found_value.map(|v| v.to_string()).unwrap_or_default(),
|
||||||
|
key: expected_key.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(None, None) => {
|
||||||
|
return Err(AuthError::OidcExtraClaimMissingError {
|
||||||
|
claim: expected_key.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
|
||||||
|
Err(RecorderError::ModelEntityNotFound { .. }) => {
|
||||||
|
crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await
|
||||||
|
}
|
||||||
|
r => r,
|
||||||
|
}
|
||||||
|
.map_err(|e| {
|
||||||
|
tracing::error!("Error finding auth record: {:?}", e);
|
||||||
|
AuthError::FindAuthRecordError
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(AuthUserInfo {
|
||||||
|
subscriber_auth,
|
||||||
|
auth_type: AuthType::Oidc,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
||||||
|
Some(HeaderValue::from_str(format!("Bearer realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn auth_type(&self) -> AuthType {
|
||||||
|
AuthType::Oidc
|
||||||
|
}
|
||||||
|
}
|
||||||
115
apps/recorder/src/auth/service.rs
Normal file
115
apps/recorder/src/auth/service.rs
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
use std::{sync::Arc, time::Duration};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use axum::http::request::Parts;
|
||||||
|
use fetch::{
|
||||||
|
HttpClient, HttpClientConfig,
|
||||||
|
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
|
||||||
|
};
|
||||||
|
use http::header::HeaderValue;
|
||||||
|
use jwtk::jwk::RemoteJwksVerifier;
|
||||||
|
use moka::future::Cache;
|
||||||
|
use openidconnect::{IssuerUrl, core::CoreProviderMetadata};
|
||||||
|
use snafu::prelude::*;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
AuthConfig,
|
||||||
|
basic::BasicAuthService,
|
||||||
|
errors::{AuthError, OidcProviderHttpClientSnafu, OidcProviderUrlSnafu},
|
||||||
|
oidc::{OidcAuthService, OidcHttpClient},
|
||||||
|
};
|
||||||
|
use crate::{app::AppContextTrait, models::auth::AuthType};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct AuthUserInfo {
|
||||||
|
pub subscriber_auth: crate::models::auth::Model,
|
||||||
|
pub auth_type: AuthType,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait AuthServiceTrait {
|
||||||
|
async fn extract_user_info(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
request: &mut Parts,
|
||||||
|
) -> Result<AuthUserInfo, AuthError>;
|
||||||
|
fn www_authenticate_header_value(&self) -> Option<HeaderValue>;
|
||||||
|
fn auth_type(&self) -> AuthType;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum AuthService {
|
||||||
|
Basic(Box<BasicAuthService>),
|
||||||
|
Oidc(Box<OidcAuthService>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuthService {
|
||||||
|
pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> {
|
||||||
|
let result = match config {
|
||||||
|
AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
|
||||||
|
AuthConfig::Oidc(config) => {
|
||||||
|
let oidc_provider_client = Arc::new(
|
||||||
|
HttpClient::from_config(HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
cache_backend: Some(HttpClientCacheBackendConfig::Moka { cache_size: 1 }),
|
||||||
|
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.context(OidcProviderHttpClientSnafu)?,
|
||||||
|
);
|
||||||
|
|
||||||
|
let provider_metadata = {
|
||||||
|
let client = OidcHttpClient(oidc_provider_client.clone());
|
||||||
|
let issuer_url =
|
||||||
|
IssuerUrl::new(config.issuer.clone()).context(OidcProviderUrlSnafu)?;
|
||||||
|
CoreProviderMetadata::discover_async(issuer_url, &client).await
|
||||||
|
}?;
|
||||||
|
|
||||||
|
let jwk_verifier = RemoteJwksVerifier::new(
|
||||||
|
provider_metadata.jwks_uri().to_string().clone(),
|
||||||
|
None,
|
||||||
|
Duration::from_secs(300),
|
||||||
|
);
|
||||||
|
|
||||||
|
AuthService::Oidc(Box::new(OidcAuthService {
|
||||||
|
config,
|
||||||
|
jwk_verifier,
|
||||||
|
oidc_provider_client,
|
||||||
|
oidc_request_cache: Cache::builder()
|
||||||
|
.time_to_live(Duration::from_mins(5))
|
||||||
|
.name("oidc_request_cache")
|
||||||
|
.build(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl AuthServiceTrait for AuthService {
|
||||||
|
#[tracing::instrument(skip(self, ctx, request))]
|
||||||
|
async fn extract_user_info(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
request: &mut Parts,
|
||||||
|
) -> Result<AuthUserInfo, AuthError> {
|
||||||
|
match self {
|
||||||
|
AuthService::Basic(service) => service.extract_user_info(ctx, request).await,
|
||||||
|
AuthService::Oidc(service) => service.extract_user_info(ctx, request).await,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
||||||
|
match self {
|
||||||
|
AuthService::Basic(service) => service.www_authenticate_header_value(),
|
||||||
|
AuthService::Oidc(service) => service.www_authenticate_header_value(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn auth_type(&self) -> AuthType {
|
||||||
|
match self {
|
||||||
|
AuthService::Basic(service) => service.auth_type(),
|
||||||
|
AuthService::Oidc(service) => service.auth_type(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
12
apps/recorder/src/bin/main.rs
Normal file
12
apps/recorder/src/bin/main.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
use recorder::{app::AppBuilder, errors::RecorderResult};
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
let builder = AppBuilder::from_main_cli(None).await?;
|
||||||
|
|
||||||
|
let app = builder.build().await?;
|
||||||
|
|
||||||
|
app.serve().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
16
apps/recorder/src/bin/migrate_down.rs
Normal file
16
apps/recorder/src/bin/migrate_down.rs
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
use recorder::{app::AppBuilder, database::DatabaseService, errors::RecorderResult};
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
let builder = AppBuilder::from_main_cli(None).await?;
|
||||||
|
|
||||||
|
builder.load_env().await?;
|
||||||
|
let mut database_config = builder.load_config().await?.database;
|
||||||
|
database_config.auto_migrate = false;
|
||||||
|
|
||||||
|
let database_service = DatabaseService::from_config(database_config).await?;
|
||||||
|
|
||||||
|
database_service.migrate_down().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
4
apps/recorder/src/cache/config.rs
vendored
Normal file
4
apps/recorder/src/cache/config.rs
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct CacheConfig {}
|
||||||
5
apps/recorder/src/cache/mod.rs
vendored
Normal file
5
apps/recorder/src/cache/mod.rs
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
pub mod config;
|
||||||
|
pub mod service;
|
||||||
|
|
||||||
|
pub use config::CacheConfig;
|
||||||
|
pub use service::CacheService;
|
||||||
10
apps/recorder/src/cache/service.rs
vendored
Normal file
10
apps/recorder/src/cache/service.rs
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
use super::CacheConfig;
|
||||||
|
use crate::errors::RecorderResult;
|
||||||
|
|
||||||
|
pub struct CacheService {}
|
||||||
|
|
||||||
|
impl CacheService {
|
||||||
|
pub async fn from_config(_config: CacheConfig) -> RecorderResult<Self> {
|
||||||
|
Ok(Self {})
|
||||||
|
}
|
||||||
|
}
|
||||||
4
apps/recorder/src/crypto/config.rs
Normal file
4
apps/recorder/src/crypto/config.rs
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CryptoConfig {}
|
||||||
20
apps/recorder/src/crypto/error.rs
Normal file
20
apps/recorder/src/crypto/error.rs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
use async_graphql::Error as AsyncGraphQLError;
|
||||||
|
use seaography::SeaographyError;
|
||||||
|
|
||||||
|
#[derive(Debug, snafu::Snafu)]
|
||||||
|
pub enum CryptoError {
|
||||||
|
#[snafu(transparent)]
|
||||||
|
Base64DecodeError { source: base64::DecodeError },
|
||||||
|
#[snafu(display("CocoonError: {source:?}"), context(false))]
|
||||||
|
CocoonError { source: cocoon::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
FromUtf8Error { source: std::string::FromUtf8Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
SerdeJsonError { source: serde_json::Error },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CryptoError> for SeaographyError {
|
||||||
|
fn from(error: CryptoError) -> Self {
|
||||||
|
SeaographyError::AsyncGraphQLError(AsyncGraphQLError::new(error.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
9
apps/recorder/src/crypto/mod.rs
Normal file
9
apps/recorder/src/crypto/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
pub mod config;
|
||||||
|
pub mod error;
|
||||||
|
pub mod service;
|
||||||
|
pub mod userpass;
|
||||||
|
|
||||||
|
pub use config::CryptoConfig;
|
||||||
|
pub use error::CryptoError;
|
||||||
|
pub use service::CryptoService;
|
||||||
|
pub use userpass::UserPassCredential;
|
||||||
62
apps/recorder/src/crypto/service.rs
Normal file
62
apps/recorder/src/crypto/service.rs
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
use base64::prelude::{BASE64_URL_SAFE, *};
|
||||||
|
use cocoon::Cocoon;
|
||||||
|
use rand::Rng;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use super::CryptoConfig;
|
||||||
|
use crate::crypto::error::CryptoError;
|
||||||
|
|
||||||
|
pub struct CryptoService {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
config: CryptoConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CryptoService {
|
||||||
|
pub async fn from_config(config: CryptoConfig) -> Result<Self, CryptoError> {
|
||||||
|
Ok(Self { config })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn encrypt_string(&self, data: String) -> Result<String, CryptoError> {
|
||||||
|
let key = rand::rng().random::<[u8; 32]>();
|
||||||
|
let mut cocoon = Cocoon::new(&key);
|
||||||
|
|
||||||
|
let mut data = data.into_bytes();
|
||||||
|
|
||||||
|
let detached_prefix = cocoon.encrypt(&mut data)?;
|
||||||
|
|
||||||
|
let mut combined = Vec::with_capacity(key.len() + detached_prefix.len() + data.len());
|
||||||
|
combined.extend_from_slice(&key);
|
||||||
|
combined.extend_from_slice(&detached_prefix);
|
||||||
|
combined.extend_from_slice(&data);
|
||||||
|
|
||||||
|
Ok(BASE64_URL_SAFE.encode(combined))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decrypt_string(&self, data: &str) -> Result<String, CryptoError> {
|
||||||
|
let decoded = BASE64_URL_SAFE.decode(data)?;
|
||||||
|
|
||||||
|
let (key, remain) = decoded.split_at(32);
|
||||||
|
let (detached_prefix, data) = remain.split_at(60);
|
||||||
|
let mut data = data.to_vec();
|
||||||
|
let cocoon = Cocoon::new(key);
|
||||||
|
|
||||||
|
cocoon.decrypt(&mut data, detached_prefix)?;
|
||||||
|
|
||||||
|
String::from_utf8(data).map_err(CryptoError::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn encrypt_serialize<T: Serialize>(&self, credentials: &T) -> Result<String, CryptoError> {
|
||||||
|
let json = serde_json::to_string(credentials)?;
|
||||||
|
|
||||||
|
self.encrypt_string(json)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decrypt_deserialize<T: for<'de> Deserialize<'de>>(
|
||||||
|
&self,
|
||||||
|
encrypted: &str,
|
||||||
|
) -> Result<T, CryptoError> {
|
||||||
|
let data = self.decrypt_string(encrypted)?;
|
||||||
|
|
||||||
|
serde_json::from_str(&data).map_err(CryptoError::from)
|
||||||
|
}
|
||||||
|
}
|
||||||
19
apps/recorder/src/crypto/userpass.rs
Normal file
19
apps/recorder/src/crypto/userpass.rs
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
pub struct UserPassCredential {
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
pub user_agent: Option<String>,
|
||||||
|
pub cookies: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for UserPassCredential {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("UserPassCredential")
|
||||||
|
.field("username", &"[Secret]")
|
||||||
|
.field("password", &"[Secret]")
|
||||||
|
.field("cookies", &"[Secret]")
|
||||||
|
.field("user_agent", &self.user_agent)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
14
apps/recorder/src/database/config.rs
Normal file
14
apps/recorder/src/database/config.rs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
|
pub struct DatabaseConfig {
|
||||||
|
pub uri: String,
|
||||||
|
pub enable_logging: bool,
|
||||||
|
pub min_connections: u32,
|
||||||
|
pub max_connections: u32,
|
||||||
|
pub connect_timeout: u64,
|
||||||
|
pub idle_timeout: u64,
|
||||||
|
pub acquire_timeout: Option<u64>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub auto_migrate: bool,
|
||||||
|
}
|
||||||
5
apps/recorder/src/database/mod.rs
Normal file
5
apps/recorder/src/database/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
pub mod config;
|
||||||
|
pub mod service;
|
||||||
|
|
||||||
|
pub use config::DatabaseConfig;
|
||||||
|
pub use service::DatabaseService;
|
||||||
129
apps/recorder/src/database/service.rs
Normal file
129
apps/recorder/src/database/service.rs
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
use std::{ops::Deref, time::Duration};
|
||||||
|
|
||||||
|
use apalis_sql::postgres::PostgresStorage;
|
||||||
|
use sea_orm::{
|
||||||
|
ConnectOptions, ConnectionTrait, Database, DatabaseConnection, DbBackend, DbErr, ExecResult,
|
||||||
|
QueryResult, Statement,
|
||||||
|
};
|
||||||
|
use sea_orm_migration::MigratorTrait;
|
||||||
|
|
||||||
|
use super::DatabaseConfig;
|
||||||
|
use crate::{errors::RecorderResult, migrations::Migrator};
|
||||||
|
|
||||||
|
pub struct DatabaseService {
|
||||||
|
pub config: DatabaseConfig,
|
||||||
|
connection: DatabaseConnection,
|
||||||
|
#[cfg(feature = "testcontainers")]
|
||||||
|
pub container:
|
||||||
|
Option<testcontainers::ContainerAsync<testcontainers_modules::postgres::Postgres>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DatabaseService {
|
||||||
|
pub async fn from_config(config: DatabaseConfig) -> RecorderResult<Self> {
|
||||||
|
let db_config = config.clone();
|
||||||
|
let mut opt = ConnectOptions::new(&config.uri);
|
||||||
|
opt.max_connections(config.max_connections)
|
||||||
|
.min_connections(config.min_connections)
|
||||||
|
.connect_timeout(Duration::from_millis(config.connect_timeout))
|
||||||
|
.idle_timeout(Duration::from_millis(config.idle_timeout))
|
||||||
|
.sqlx_logging(config.enable_logging);
|
||||||
|
|
||||||
|
if let Some(acquire_timeout) = config.acquire_timeout {
|
||||||
|
opt.acquire_timeout(Duration::from_millis(acquire_timeout));
|
||||||
|
}
|
||||||
|
|
||||||
|
let db = Database::connect(opt).await?;
|
||||||
|
|
||||||
|
// only support postgres for now
|
||||||
|
// if db.get_database_backend() == DatabaseBackend::Sqlite {
|
||||||
|
// db.execute(Statement::from_string(
|
||||||
|
// DatabaseBackend::Sqlite,
|
||||||
|
// "
|
||||||
|
// PRAGMA foreign_keys = ON;
|
||||||
|
// PRAGMA journal_mode = WAL;
|
||||||
|
// PRAGMA synchronous = NORMAL;
|
||||||
|
// PRAGMA mmap_size = 134217728;
|
||||||
|
// PRAGMA journal_size_limit = 67108864;
|
||||||
|
// PRAGMA cache_size = 2000;
|
||||||
|
// ",
|
||||||
|
// ))
|
||||||
|
// .await?;
|
||||||
|
// }
|
||||||
|
|
||||||
|
let me = Self {
|
||||||
|
connection: db,
|
||||||
|
#[cfg(feature = "testcontainers")]
|
||||||
|
container: None,
|
||||||
|
config: db_config,
|
||||||
|
};
|
||||||
|
|
||||||
|
if config.auto_migrate {
|
||||||
|
me.migrate_up().await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(me)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn migrate_up(&self) -> RecorderResult<()> {
|
||||||
|
{
|
||||||
|
let pool = &self.get_postgres_connection_pool();
|
||||||
|
PostgresStorage::setup(pool).await?;
|
||||||
|
}
|
||||||
|
Migrator::up(&self.connection, None).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn migrate_down(&self) -> RecorderResult<()> {
|
||||||
|
Migrator::down(&self.connection, None).await?;
|
||||||
|
{
|
||||||
|
self.execute_unprepared(r#"DROP SCHEMA IF EXISTS apalis CASCADE"#)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for DatabaseService {
|
||||||
|
type Target = DatabaseConnection;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.connection
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<DatabaseConnection> for DatabaseService {
|
||||||
|
fn as_ref(&self) -> &DatabaseConnection {
|
||||||
|
&self.connection
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl ConnectionTrait for DatabaseService {
|
||||||
|
fn get_database_backend(&self) -> DbBackend {
|
||||||
|
self.deref().get_database_backend()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn execute(&self, stmt: Statement) -> Result<ExecResult, DbErr> {
|
||||||
|
self.deref().execute(stmt).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> {
|
||||||
|
self.deref().execute_unprepared(sql).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn query_one(&self, stmt: Statement) -> Result<Option<QueryResult>, DbErr> {
|
||||||
|
self.deref().query_one(stmt).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn query_all(&self, stmt: Statement) -> Result<Vec<QueryResult>, DbErr> {
|
||||||
|
self.deref().query_all(stmt).await
|
||||||
|
}
|
||||||
|
|
||||||
|
fn support_returning(&self) -> bool {
|
||||||
|
self.deref().support_returning()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_mock_connection(&self) -> bool {
|
||||||
|
self.deref().is_mock_connection()
|
||||||
|
}
|
||||||
|
}
|
||||||
324
apps/recorder/src/errors/app_error.rs
Normal file
324
apps/recorder/src/errors/app_error.rs
Normal file
@@ -0,0 +1,324 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use axum::{
|
||||||
|
Json,
|
||||||
|
response::{IntoResponse, Response},
|
||||||
|
};
|
||||||
|
use fetch::{FetchError, HttpClientError, reqwest, reqwest_middleware};
|
||||||
|
use http::{HeaderMap, StatusCode};
|
||||||
|
use snafu::Snafu;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
auth::AuthError,
|
||||||
|
crypto::CryptoError,
|
||||||
|
downloader::DownloaderError,
|
||||||
|
errors::{OptDynErr, response::StandardErrorResponse},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Snafu, Debug)]
|
||||||
|
#[snafu(visibility(pub(crate)))]
|
||||||
|
pub enum RecorderError {
|
||||||
|
#[snafu(transparent)]
|
||||||
|
ChronoTzParseError { source: chrono_tz::ParseError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
SeaographyError { source: seaography::SeaographyError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
CronError { source: croner::errors::CronError },
|
||||||
|
#[snafu(display(
|
||||||
|
"HTTP {status} {reason}, source = {source:?}",
|
||||||
|
status = status,
|
||||||
|
reason = status.canonical_reason().unwrap_or("Unknown")
|
||||||
|
))]
|
||||||
|
HttpResponseError {
|
||||||
|
status: StatusCode,
|
||||||
|
headers: Option<HeaderMap>,
|
||||||
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
|
source: OptDynErr,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
ImageError { source: image::ImageError },
|
||||||
|
#[cfg(feature = "jxl")]
|
||||||
|
#[snafu(transparent)]
|
||||||
|
JxlEncodeError { source: jpegxl_rs::EncodeError },
|
||||||
|
#[snafu(transparent, context(false))]
|
||||||
|
HttpError { source: http::Error },
|
||||||
|
#[snafu(transparent, context(false))]
|
||||||
|
FancyRegexError {
|
||||||
|
#[snafu(source(from(fancy_regex::Error, Box::new)))]
|
||||||
|
source: Box<fancy_regex::Error>,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
NetAddrParseError { source: std::net::AddrParseError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
RegexError { source: regex::Error },
|
||||||
|
#[snafu(display("Invalid method"))]
|
||||||
|
InvalidMethodError,
|
||||||
|
#[snafu(display("Invalid header value"))]
|
||||||
|
InvalidHeaderValueError,
|
||||||
|
#[snafu(transparent)]
|
||||||
|
QuickXmlDeserializeError { source: quick_xml::DeError },
|
||||||
|
#[snafu(display("Invalid header name"))]
|
||||||
|
InvalidHeaderNameError,
|
||||||
|
#[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))]
|
||||||
|
MissingOriginError,
|
||||||
|
#[snafu(transparent)]
|
||||||
|
TracingAppenderInitError {
|
||||||
|
source: tracing_appender::rolling::InitError,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
GraphQLSchemaError {
|
||||||
|
source: async_graphql::dynamic::SchemaError,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
AuthError { source: AuthError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
DownloadError { source: DownloaderError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
RSSError { source: rss::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
DotEnvError { source: dotenvy::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
TeraError { source: tera::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
IOError { source: std::io::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
DbError { source: sea_orm::DbErr },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
DbSqlxError { source: sea_orm::SqlxError },
|
||||||
|
#[snafu(transparent, context(false))]
|
||||||
|
FigmentError {
|
||||||
|
#[snafu(source(from(figment::Error, Box::new)))]
|
||||||
|
source: Box<figment::Error>,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
SerdeJsonError { source: serde_json::Error },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
ParseUrlError { source: url::ParseError },
|
||||||
|
#[snafu(display("{source}"), context(false))]
|
||||||
|
OpenDALError {
|
||||||
|
#[snafu(source(from(opendal::Error, Box::new)))]
|
||||||
|
source: Box<opendal::Error>,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
HttpClientError { source: HttpClientError },
|
||||||
|
#[cfg(feature = "testcontainers")]
|
||||||
|
#[snafu(transparent)]
|
||||||
|
TestcontainersError {
|
||||||
|
source: testcontainers::TestcontainersError,
|
||||||
|
},
|
||||||
|
#[snafu(display("Extract {desc} with mime error, expected {expected}, but got {found}"))]
|
||||||
|
MimeError {
|
||||||
|
desc: String,
|
||||||
|
expected: String,
|
||||||
|
found: String,
|
||||||
|
},
|
||||||
|
#[snafu(display("Invalid or unknown format in extracting mikan rss"))]
|
||||||
|
MikanRssInvalidFormatError,
|
||||||
|
#[snafu(display("Invalid field {field} in extracting mikan rss"))]
|
||||||
|
MikanRssInvalidFieldError {
|
||||||
|
field: Cow<'static, str>,
|
||||||
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
|
source: OptDynErr,
|
||||||
|
},
|
||||||
|
#[snafu(display("Missing field {field} in extracting mikan meta"))]
|
||||||
|
MikanMetaMissingFieldError {
|
||||||
|
field: Cow<'static, str>,
|
||||||
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
|
source: OptDynErr,
|
||||||
|
},
|
||||||
|
#[snafu(display("Model Entity {entity} not found or not belong to subscriber{}", (
|
||||||
|
detail.as_ref().map(|detail| format!(" : {detail}"))).unwrap_or_default()
|
||||||
|
))]
|
||||||
|
ModelEntityNotFound {
|
||||||
|
entity: Cow<'static, str>,
|
||||||
|
detail: Option<String>,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
FetchError { source: FetchError },
|
||||||
|
#[snafu(display("Credential3rdError: {message}, source = {source}"))]
|
||||||
|
Credential3rdError {
|
||||||
|
message: String,
|
||||||
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
|
source: OptDynErr,
|
||||||
|
},
|
||||||
|
#[snafu(transparent)]
|
||||||
|
CryptoError { source: CryptoError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
StringFromUtf8Error { source: std::string::FromUtf8Error },
|
||||||
|
#[snafu(display("{message}"))]
|
||||||
|
Whatever {
|
||||||
|
message: String,
|
||||||
|
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, OptDynErr::some)))]
|
||||||
|
source: OptDynErr,
|
||||||
|
},
|
||||||
|
#[snafu(display("Invalid task id: {message}"))]
|
||||||
|
InvalidTaskId { message: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RecorderError {
|
||||||
|
pub fn from_status(status: StatusCode) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: None,
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_status_and_headers(status: StatusCode, headers: HeaderMap) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: Some(headers),
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
|
||||||
|
Self::MikanMetaMissingFieldError {
|
||||||
|
field,
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_mikan_rss_invalid_field(field: Cow<'static, str>) -> Self {
|
||||||
|
Self::MikanRssInvalidFieldError {
|
||||||
|
field,
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_mikan_rss_invalid_field_and_source(
|
||||||
|
field: Cow<'static, str>,
|
||||||
|
source: impl std::error::Error + Send + Sync + 'static,
|
||||||
|
) -> Self {
|
||||||
|
Self::MikanRssInvalidFieldError {
|
||||||
|
field,
|
||||||
|
source: OptDynErr::some_boxed(source),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_entity_not_found<E: sea_orm::EntityTrait>() -> Self {
|
||||||
|
Self::ModelEntityNotFound {
|
||||||
|
entity: std::any::type_name::<E::Model>().into(),
|
||||||
|
detail: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_entity_not_found_detail<E: sea_orm::EntityTrait, T: ToString>(detail: T) -> Self {
|
||||||
|
Self::ModelEntityNotFound {
|
||||||
|
entity: std::any::type_name::<E::Model>().into(),
|
||||||
|
detail: Some(detail.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl snafu::FromString for RecorderError {
|
||||||
|
type Source = Box<dyn std::error::Error + Send + Sync>;
|
||||||
|
|
||||||
|
fn without_source(message: String) -> Self {
|
||||||
|
Self::Whatever {
|
||||||
|
message,
|
||||||
|
source: OptDynErr::none(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_source(source: Self::Source, message: String) -> Self {
|
||||||
|
Self::Whatever {
|
||||||
|
message,
|
||||||
|
source: OptDynErr::some(source),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<StatusCode> for RecorderError {
|
||||||
|
fn from(status: StatusCode) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: None,
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<(StatusCode, HeaderMap)> for RecorderError {
|
||||||
|
fn from((status, headers): (StatusCode, HeaderMap)) -> Self {
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers: Some(headers),
|
||||||
|
source: None.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoResponse for RecorderError {
|
||||||
|
fn into_response(self) -> Response {
|
||||||
|
match self {
|
||||||
|
Self::AuthError { source: auth_error } => auth_error.into_response(),
|
||||||
|
Self::HttpResponseError {
|
||||||
|
status,
|
||||||
|
headers,
|
||||||
|
source,
|
||||||
|
} => {
|
||||||
|
let message = source
|
||||||
|
.into_inner()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
String::from(status.canonical_reason().unwrap_or("Unknown"))
|
||||||
|
});
|
||||||
|
(
|
||||||
|
status,
|
||||||
|
headers,
|
||||||
|
Json::<StandardErrorResponse>(StandardErrorResponse::from(message)),
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
|
}
|
||||||
|
merr @ Self::ModelEntityNotFound { .. } => (
|
||||||
|
StatusCode::NOT_FOUND,
|
||||||
|
Json::<StandardErrorResponse>(StandardErrorResponse::from(merr.to_string())),
|
||||||
|
)
|
||||||
|
.into_response(),
|
||||||
|
err => (
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
Json::<StandardErrorResponse>(StandardErrorResponse::from(err.to_string())),
|
||||||
|
)
|
||||||
|
.into_response(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<reqwest::Error> for RecorderError {
|
||||||
|
fn from(error: reqwest::Error) -> Self {
|
||||||
|
FetchError::from(error).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<reqwest_middleware::Error> for RecorderError {
|
||||||
|
fn from(error: reqwest_middleware::Error) -> Self {
|
||||||
|
FetchError::from(error).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<http::header::InvalidHeaderValue> for RecorderError {
|
||||||
|
fn from(_error: http::header::InvalidHeaderValue) -> Self {
|
||||||
|
Self::InvalidHeaderValueError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<http::header::InvalidHeaderName> for RecorderError {
|
||||||
|
fn from(_error: http::header::InvalidHeaderName) -> Self {
|
||||||
|
Self::InvalidHeaderNameError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<http::method::InvalidMethod> for RecorderError {
|
||||||
|
fn from(_error: http::method::InvalidMethod) -> Self {
|
||||||
|
Self::InvalidMethodError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<async_graphql::Error> for RecorderError {
|
||||||
|
fn from(error: async_graphql::Error) -> Self {
|
||||||
|
seaography::SeaographyError::AsyncGraphQLError(error).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type RecorderResult<T> = Result<T, RecorderError>;
|
||||||
6
apps/recorder/src/errors/mod.rs
Normal file
6
apps/recorder/src/errors/mod.rs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
pub mod app_error;
|
||||||
|
pub mod response;
|
||||||
|
|
||||||
|
pub use app_error::{RecorderError, RecorderResult};
|
||||||
|
pub use response::StandardErrorResponse;
|
||||||
|
pub use util::errors::OptDynErr;
|
||||||
19
apps/recorder/src/errors/response.rs
Normal file
19
apps/recorder/src/errors/response.rs
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
#[derive(Serialize, Debug, Clone)]
|
||||||
|
pub struct StandardErrorResponse<T = ()> {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub result: Option<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> From<String> for StandardErrorResponse<T> {
|
||||||
|
fn from(value: String) -> Self {
|
||||||
|
StandardErrorResponse {
|
||||||
|
success: false,
|
||||||
|
message: value,
|
||||||
|
result: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
9
apps/recorder/src/extract/bittorrent/extract.rs
Normal file
9
apps/recorder/src/extract/bittorrent/extract.rs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct EpisodeEnclosureMeta {
|
||||||
|
pub magnet_link: Option<String>,
|
||||||
|
pub torrent_link: Option<String>,
|
||||||
|
pub pub_date: Option<DateTime<Utc>>,
|
||||||
|
pub content_length: Option<i64>,
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user