Compare commits
129 Commits
97b7bfb7fb
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
| 94919878ea | |||
| 81bf27ed28 | |||
| 5be5b9f634 | |||
| 6cdd8c27ce | |||
| 4174cea728 | |||
| 3aad31a36b | |||
| 004fed9b2e | |||
| a1c2eeded1 | |||
| 147df00155 | |||
| 5155c59293 | |||
| b5b3c77ba3 | |||
| 1d0aa8d7f1 | |||
| 5b001f9584 | |||
| d06acde882 | |||
| bacfe99ef2 | |||
| b4090e74c0 | |||
| c3e546e256 | |||
| f83371bbf9 | |||
| c858cc7d44 | |||
| 65505f91b2 | |||
| c8501b1768 | |||
| 3a8eb88e1a | |||
| 003d8840fd | |||
| 41ff5c2a11 | |||
| 571caf50ff | |||
| 9fd3ae6563 | |||
| cde3361458 | |||
| f055011b86 | |||
| 16429a44b4 | |||
| fe0b7e88e6 | |||
| 28dd9da6ac | |||
| 02c16a2972 | |||
| 324427513c | |||
| c12b9b360a | |||
| cc06142050 | |||
| 6726cafff4 | |||
| 35312ea1ff | |||
| 721eee9c88 | |||
| 421f9d0293 | |||
| 7eb4e41708 | |||
| a2254bbe80 | |||
| 1b5bdadf10 | |||
| 882b29d7a1 | |||
| c60f6f511e | |||
| 07955286f1 | |||
| 258eeddc74 | |||
| b09e9e6aaa | |||
| 0df371adb7 | |||
| 8144986a48 | |||
| d2aab7369d | |||
| 946d4e8c2c | |||
| 0b5f25a263 | |||
| c669d66969 | |||
| 082e08e7f4 | |||
| a3fd03d32a | |||
| 5645645c5f | |||
| ac7d1efb8d | |||
| a676061b3e | |||
| 1c34cebbde | |||
| 22a2ce0559 | |||
| 313b1bf1ba | |||
| 66413f92e3 | |||
| 0fcbc6bbe9 | |||
| f1d8318500 | |||
| b2f327d48f | |||
| b772937354 | |||
| a3b9543d0e | |||
| d0a423df9f | |||
| 8600bf216a | |||
| bf270e4e87 | |||
| 760cb2344e | |||
| ed2c1038e6 | |||
| d4bdc677a9 | |||
| 9d58d961bd | |||
| 791b75b3af | |||
| a7f52fe0eb | |||
| 439353d318 | |||
| f245a68790 | |||
| 3fe0538468 | |||
| dbded94324 | |||
| 4301f1dbab | |||
| 9fdb778330 | |||
| 0300d7baf6 | |||
| ee1b1ae5e6 | |||
| b20f7cd1ad | |||
| eb8f0be004 | |||
| 68aa13e216 | |||
| 2a5c2b18e7 | |||
| e64086b7cf | |||
| 08946059ad | |||
| 10b17dc66b | |||
| 1ff8a311ae | |||
| 2686fa1d76 | |||
| 376d2b28d3 | |||
| a3609696c7 | |||
| b0c12acbc6 | |||
| 3dfcf2a536 | |||
| ecb56013a5 | |||
| 27b52f7fd1 | |||
| 234441e6a3 | |||
| 011f62829a | |||
| c34584a215 | |||
| 1fca69fa66 | |||
| a0fc4c04d9 | |||
| 07ac7e3376 | |||
| f94e175082 | |||
| e66573b315 | |||
| 27cdcdef58 | |||
| 383e6340ea | |||
| 5a4a4d7e3a | |||
| 6e4c136614 | |||
| e2fdeaabb2 | |||
| 408d211f27 | |||
| 2844e1fc32 | |||
| a68aab1452 | |||
| c0707d17bb | |||
| 6887b2a069 | |||
| cac0d37e53 | |||
| f327ea29f1 | |||
| 5bc5d98823 | |||
| 09565bd827 | |||
| 7adc0582aa | |||
| 4f9e74ceb4 | |||
| c2f74dc369 | |||
| ae40a3a7f8 | |||
| 027112db9a | |||
| 9a2a8f029f | |||
| 877d90d1e2 | |||
| c6677d414d |
@@ -1,34 +1,5 @@
|
|||||||
[alias]
|
[alias]
|
||||||
recorder = "run -p recorder --bin recorder_cli -- --environment development"
|
|
||||||
recorder-playground = "run -p recorder --example playground -- --environment development"
|
recorder-playground = "run -p recorder --example playground -- --environment development"
|
||||||
|
|
||||||
[build]
|
[build]
|
||||||
rustflags = ["-Zthreads=8"]
|
rustflags = ["-Zthreads=8", "-Zshare-generics=y"]
|
||||||
|
|
||||||
[target.x86_64-unknown-linux-gnu]
|
|
||||||
linker = "clang"
|
|
||||||
rustflags = ["-Zthreads=8", "-Clink-arg=-fuse-ld=lld", "-Zshare-generics=y"]
|
|
||||||
|
|
||||||
[target.x86_64-pc-windows-msvc]
|
|
||||||
linker = "rust-lld.exe"
|
|
||||||
rustflags = ["-Zthreads=8", "-Zshare-generics=n"]
|
|
||||||
|
|
||||||
# NOTE: you must install [Mach-O LLD Port](https://lld.llvm.org/MachO/index.html) on mac. you can easily do this by installing llvm which includes lld with the "brew" package manager:
|
|
||||||
# `brew install llvm`
|
|
||||||
#[target.x86_64-apple-darwin]
|
|
||||||
#rustflags = [
|
|
||||||
# "-Zthreads=8",
|
|
||||||
# "-C",
|
|
||||||
# "link-arg=-fuse-ld=/usr/local/opt/llvm/bin/ld64.lld",
|
|
||||||
# "-Zshare-generics=y",
|
|
||||||
#]
|
|
||||||
|
|
||||||
# NOTE: you must install [Mach-O LLD Port](https://lld.llvm.org/MachO/index.html) on mac. you can easily do this by installing llvm which includes lld with the "brew" package manager:
|
|
||||||
# `brew install llvm`
|
|
||||||
#[target.aarch64-apple-darwin]
|
|
||||||
#rustflags = [
|
|
||||||
# "-Zthreads=8",
|
|
||||||
# "-C",
|
|
||||||
# "link-arg=-fuse-ld=/opt/homebrew/opt/llvm/bin/ld64.lld",
|
|
||||||
# "-Zshare-generics=y",
|
|
||||||
#]
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ root = true
|
|||||||
|
|
||||||
[*]
|
[*]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 4
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
trim_trailing_whitespace = true
|
trim_trailing_whitespace = true
|
||||||
insert_final_newline = true
|
insert_final_newline = true
|
||||||
|
|||||||
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
**/tests/resources/** linguist-detectable=false
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
name: CI
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
- main
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_TOOLCHAIN: stable
|
|
||||||
TOOLCHAIN_PROFILE: minimal
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
rustfmt:
|
|
||||||
name: Check Style
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout the code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
|
||||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
|
||||||
override: true
|
|
||||||
components: rustfmt
|
|
||||||
- name: Run cargo fmt
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: fmt
|
|
||||||
args: --all -- --check
|
|
||||||
|
|
||||||
clippy:
|
|
||||||
name: Run Clippy
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout the code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
|
||||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
|
||||||
override: true
|
|
||||||
- name: Setup Rust cache
|
|
||||||
uses: Swatinem/rust-cache@v2
|
|
||||||
- name: Run cargo clippy
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: clippy
|
|
||||||
args: --all-features -- -D warnings -W clippy::pedantic -W clippy::nursery -W rust-2018-idioms
|
|
||||||
|
|
||||||
test:
|
|
||||||
name: Run Tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
services:
|
|
||||||
redis:
|
|
||||||
image: redis
|
|
||||||
options: >-
|
|
||||||
--health-cmd "redis-cli ping"
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
ports:
|
|
||||||
- "6379:6379"
|
|
||||||
postgres:
|
|
||||||
image: postgres
|
|
||||||
env:
|
|
||||||
POSTGRES_DB: postgress_test
|
|
||||||
POSTGRES_USER: postgress
|
|
||||||
POSTGRES_PASSWORD: postgress
|
|
||||||
ports:
|
|
||||||
- "5432:5432"
|
|
||||||
# Set health checks to wait until postgres has started
|
|
||||||
options: --health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout the code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: ${{ env.TOOLCHAIN_PROFILE }}
|
|
||||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
|
||||||
override: true
|
|
||||||
- name: Setup Rust cache
|
|
||||||
uses: Swatinem/rust-cache@v2
|
|
||||||
- name: Run cargo test
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: test
|
|
||||||
args: --all-features --all
|
|
||||||
env:
|
|
||||||
REDIS_URL: redis://localhost:${{job.services.redis.ports[6379]}}
|
|
||||||
DATABASE_URL: postgres://postgress:postgress@localhost:5432/postgress_test
|
|
||||||
36
.github/workflows/testing-torrents-container.yaml
vendored
Normal file
36
.github/workflows/testing-torrents-container.yaml
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
name: Testing Torrents Container
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
ORG: dumtruck
|
||||||
|
PROJECT: konobangu
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-container:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
- name: Log in to GHCR
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Build and push Docker image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: 'packages/testing-torrents'
|
||||||
|
file: 'packages/testing-torrents/Dockerfile'
|
||||||
|
push: true
|
||||||
|
tags: '${{ env.REGISTRY }}/${{ env.ORG }}/${{ env.PROJECT }}-testing-torrents:latest'
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -158,11 +158,8 @@ web_modules/
|
|||||||
.yarn-integrity
|
.yarn-integrity
|
||||||
|
|
||||||
# Local env files
|
# Local env files
|
||||||
.env
|
|
||||||
.env.local
|
.env.local
|
||||||
.env.development.local
|
.env.*.local
|
||||||
.env.test.local
|
|
||||||
.env.production.local
|
|
||||||
|
|
||||||
# parcel-bundler cache (https://parceljs.org/)
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
.cache
|
.cache
|
||||||
@@ -219,6 +216,7 @@ index.d.ts.map
|
|||||||
# Added by cargo
|
# Added by cargo
|
||||||
|
|
||||||
/target
|
/target
|
||||||
|
/ide-target
|
||||||
!/examples/.gitkeep
|
!/examples/.gitkeep
|
||||||
/.env
|
/.env
|
||||||
/.env.bk
|
/.env.bk
|
||||||
|
|||||||
3
.vscode/extensions.json
vendored
3
.vscode/extensions.json
vendored
@@ -5,6 +5,7 @@
|
|||||||
"unifiedjs.vscode-mdx",
|
"unifiedjs.vscode-mdx",
|
||||||
"mikestead.dotenv",
|
"mikestead.dotenv",
|
||||||
"christian-kohler.npm-intellisense",
|
"christian-kohler.npm-intellisense",
|
||||||
"skellock.just"
|
"skellock.just",
|
||||||
|
"zerotaskx.rust-extension-pack"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
17
.vscode/settings.json
vendored
17
.vscode/settings.json
vendored
@@ -1,6 +1,5 @@
|
|||||||
{
|
{
|
||||||
"npm.packageManager": "pnpm",
|
"npm.packageManager": "pnpm",
|
||||||
"rust-analyzer.showUnlinkedFileNotification": false,
|
|
||||||
"[javascript]": {
|
"[javascript]": {
|
||||||
"editor.defaultFormatter": "vscode.typescript-language-features",
|
"editor.defaultFormatter": "vscode.typescript-language-features",
|
||||||
"editor.formatOnSave": true
|
"editor.formatOnSave": true
|
||||||
@@ -27,7 +26,19 @@
|
|||||||
},
|
},
|
||||||
"emmet.showExpandedAbbreviation": "never",
|
"emmet.showExpandedAbbreviation": "never",
|
||||||
"prettier.enable": false,
|
"prettier.enable": false,
|
||||||
"tailwindCSS.experimental.configFile": "./packages/tailwind-config/config.ts",
|
|
||||||
"typescript.tsdk": "node_modules/typescript/lib",
|
"typescript.tsdk": "node_modules/typescript/lib",
|
||||||
"rust-analyzer.cargo.features": ["testcontainers"]
|
"rust-analyzer.showUnlinkedFileNotification": false,
|
||||||
|
"sqltools.connections": [
|
||||||
|
{
|
||||||
|
"previewLimit": 50,
|
||||||
|
"server": "localhost",
|
||||||
|
"port": 5432,
|
||||||
|
"driver": "PostgreSQL",
|
||||||
|
"name": "konobangu-dev",
|
||||||
|
"database": "konobangu",
|
||||||
|
"username": "konobangu"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"rust-analyzer.cargo.features": "all",
|
||||||
|
"rust-analyzer.testExplorer": true
|
||||||
}
|
}
|
||||||
|
|||||||
112
.vscode/tasks.json
vendored
Normal file
112
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "dev-all",
|
||||||
|
"dependsOn": [
|
||||||
|
"dev-webui",
|
||||||
|
"dev-recorder",
|
||||||
|
"dev-proxy",
|
||||||
|
"dev-codegen-wait",
|
||||||
|
"dev-deps",
|
||||||
|
],
|
||||||
|
"dependsOrder": "parallel",
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": false,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"group": "new-group",
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "shared",
|
||||||
|
"clear": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-webui",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-webui"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": true,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-deps",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-deps"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-codegen-wait",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-codegen-wait"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-recorder",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-recorder"
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "dev-proxy",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "just",
|
||||||
|
"args": [
|
||||||
|
"dev-proxy",
|
||||||
|
],
|
||||||
|
"isBackground": true,
|
||||||
|
"problemMatcher": [],
|
||||||
|
"presentation": {
|
||||||
|
"panel": "dedicated",
|
||||||
|
"reveal": "never",
|
||||||
|
"focus": false,
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
6529
Cargo.lock
generated
6529
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
94
Cargo.toml
94
Cargo.toml
@@ -1,14 +1,88 @@
|
|||||||
|
# cargo-features = ["codegen-backend"]
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = ["apps/recorder"]
|
members = [
|
||||||
|
"packages/testing-torrents",
|
||||||
|
"packages/util",
|
||||||
|
"packages/util-derive",
|
||||||
|
"packages/fetch",
|
||||||
|
"packages/downloader",
|
||||||
|
"apps/recorder",
|
||||||
|
"apps/proxy",
|
||||||
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[patch.crates-io]
|
[profile.dev]
|
||||||
testcontainers = { git = "https://github.com/testcontainers/testcontainers-rs.git", rev = "af21727" }
|
# [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
|
||||||
# loco-rs = { git = "https://github.com/lonelyhentxi/loco.git", rev = "beb890e" }
|
# codegen-backend = "cranelift"
|
||||||
# loco-rs = { git = "https://github.com/loco-rs/loco.git" }
|
|
||||||
async-graphql = { git = "https://github.com/aumetra/async-graphql.git", rev = "690ece7" }
|
|
||||||
async-graphql-axum = { git = "https://github.com/aumetra/async-graphql.git", rev = "690ece7" }
|
|
||||||
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
|
|
||||||
|
|
||||||
# [patch."https://github.com/lonelyhentxi/qbit.git"]
|
[workspace.dependencies]
|
||||||
# qbit-rs = { path = "./patches/qbit-rs" }
|
testing-torrents = { path = "./packages/testing-torrents" }
|
||||||
|
util = { path = "./packages/util" }
|
||||||
|
util-derive = { path = "./packages/util-derive" }
|
||||||
|
fetch = { path = "./packages/fetch" }
|
||||||
|
downloader = { path = "./packages/downloader" }
|
||||||
|
recorder = { path = "./apps/recorder" }
|
||||||
|
|
||||||
|
reqwest = { version = "0.12.20", features = [
|
||||||
|
"charset",
|
||||||
|
"http2",
|
||||||
|
"json",
|
||||||
|
"macos-system-configuration",
|
||||||
|
"cookies",
|
||||||
|
] }
|
||||||
|
moka = "0.12.10"
|
||||||
|
futures = "0.3.31"
|
||||||
|
quirks_path = "0.1.1"
|
||||||
|
snafu = { version = "0.8.0", features = ["futures"] }
|
||||||
|
testcontainers = { version = "0.24.0" }
|
||||||
|
testcontainers-modules = { version = "0.12.1" }
|
||||||
|
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
|
||||||
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
tokio = { version = "1.46", features = [
|
||||||
|
"macros",
|
||||||
|
"fs",
|
||||||
|
"rt-multi-thread",
|
||||||
|
"signal",
|
||||||
|
] }
|
||||||
|
serde_json = "1.0.140"
|
||||||
|
async-trait = "0.1.88"
|
||||||
|
tracing = "0.1"
|
||||||
|
url = "2.5.2"
|
||||||
|
anyhow = "1"
|
||||||
|
itertools = "0.14"
|
||||||
|
chrono = "0.4"
|
||||||
|
bytes = "1"
|
||||||
|
serde_with = "3"
|
||||||
|
regex = "1.11"
|
||||||
|
lazy_static = "1.5"
|
||||||
|
axum = { version = "0.8.3", features = ["macros"] }
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
|
||||||
|
axum-extra = { version = "0.10", features = ["typed-header"] }
|
||||||
|
mockito = { version = "1.6.1" }
|
||||||
|
convert_case = "0.8"
|
||||||
|
color-eyre = "0.6.5"
|
||||||
|
inquire = "0.7.5"
|
||||||
|
image = "0.25.6"
|
||||||
|
uuid = { version = "1.6.0", features = ["v7"] }
|
||||||
|
maplit = "1.0.2"
|
||||||
|
once_cell = "1.20.2"
|
||||||
|
rand = "0.9.1"
|
||||||
|
rust_decimal = "1.37.2"
|
||||||
|
base64 = "0.22.1"
|
||||||
|
nom = "8.0.0"
|
||||||
|
percent-encoding = "2.3.1"
|
||||||
|
num-traits = "0.2.19"
|
||||||
|
http = "1.2.0"
|
||||||
|
async-stream = "0.3.6"
|
||||||
|
serde_variant = "0.1.3"
|
||||||
|
tracing-appender = "0.2.3"
|
||||||
|
clap = "4.5.41"
|
||||||
|
ipnetwork = "0.21.1"
|
||||||
|
typed-builder = "0.21.0"
|
||||||
|
nanoid = "0.4.0"
|
||||||
|
webp = "0.3.0"
|
||||||
|
|
||||||
|
|
||||||
|
[patch.crates-io]
|
||||||
|
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "292cdd2" }
|
||||||
|
|||||||
@@ -1,3 +1,8 @@
|
|||||||
# KONOBUNGU
|
<h1 align="center">
|
||||||
|
<img src="./assets/icon.png" height=180>
|
||||||
|
<br />
|
||||||
|
<b>Konobangu</b>
|
||||||
|
<div align="center"><img src="https://img.shields.io/badge/status-work--in--progress-blue" alt="status-badge" /></div>
|
||||||
|
</h1>
|
||||||
|
|
||||||
Kono Bangumi?
|
<p align="center">Kono bangumi?</p>
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
# Server
|
|
||||||
AUTH_TYPE="basic" #
|
|
||||||
|
|
||||||
BASIC_USER="konobangu"
|
|
||||||
BASIC_PASSWORD="konobangu"
|
|
||||||
|
|
||||||
OIDC_PROVIDER_ENDPOINT="https://some-oidc-auth.com/oidc/.well-known/openid-configuration"
|
|
||||||
OIDC_CLIENT_ID=""
|
|
||||||
OIDC_CLIENT_SECRET=""
|
|
||||||
OIDC_API_ISSUER="https://some-oidc-auth.com/oidc"
|
|
||||||
OIDC_API_AUDIENCE="https://konobangu.com/api"
|
|
||||||
OIDC_ICON_URL=""
|
|
||||||
OIDC_EXTRA_SCOPE_REGEX=""
|
|
||||||
OIDC_EXTRA_CLAIM_KEY=""
|
|
||||||
OIDC_EXTRA_CLAIM_VALUE=""
|
|
||||||
|
|
||||||
DATABASE_URL="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"
|
|
||||||
BETTERSTACK_API_KEY=""
|
|
||||||
BETTERSTACK_URL=""
|
|
||||||
FLAGS_SECRET=""
|
|
||||||
ARCJET_KEY=""
|
|
||||||
SVIX_TOKEN=""
|
|
||||||
LIVEBLOCKS_SECRET=""
|
|
||||||
|
|
||||||
# Client
|
|
||||||
NEXT_PUBLIC_APP_URL="http://localhost:5000"
|
|
||||||
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
|
|
||||||
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
|
|
||||||
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
# AUTH
|
|
||||||
AUTH_TYPE="basic"
|
|
||||||
|
|
||||||
NEXT_PUBLIC_OIDC_PROVIDER_ENDPOINT="https://some-oidc-auth.com/oidc/.well-known/openid-configuration"
|
|
||||||
NEXT_PUBLIC_OIDC_CLIENT_ID=""
|
|
||||||
NEXT_PUBLIC_OIDC_CLIENT_SECRET=""
|
|
||||||
NEXT_PUBLIC_OIDC_ICON_URL=""
|
|
||||||
OIDC_API_ISSUER="https://some-oidc-auth.com/oidc"
|
|
||||||
OIDC_API_AUDIENCE="https://konobangu.com/api"
|
|
||||||
OIDC_EXTRA_SCOPES="" # 如 "read:konobangu,write:konobangu"
|
|
||||||
OIDC_EXTRA_CLAIM_KEY=""
|
|
||||||
OIDC_EXTRA_CLAIM_VALUE=""
|
|
||||||
|
|
||||||
# DATABASE
|
|
||||||
DATABASE_URL="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"
|
|
||||||
|
|
||||||
# SERVER MISC
|
|
||||||
BETTERSTACK_API_KEY=""
|
|
||||||
BETTERSTACK_URL=""
|
|
||||||
FLAGS_SECRET=""
|
|
||||||
ARCJET_KEY=""
|
|
||||||
SVIX_TOKEN=""
|
|
||||||
LIVEBLOCKS_SECRET=""
|
|
||||||
|
|
||||||
# WEBUI
|
|
||||||
NEXT_PUBLIC_APP_URL="http://localhost:5000"
|
|
||||||
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
|
|
||||||
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
|
|
||||||
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"
|
|
||||||
45
apps/app/.gitignore
vendored
45
apps/app/.gitignore
vendored
@@ -1,45 +0,0 @@
|
|||||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
|
||||||
|
|
||||||
# dependencies
|
|
||||||
/node_modules
|
|
||||||
/.pnp
|
|
||||||
.pnp.js
|
|
||||||
|
|
||||||
# testing
|
|
||||||
/coverage
|
|
||||||
|
|
||||||
# next.js
|
|
||||||
/.next/
|
|
||||||
/out/
|
|
||||||
|
|
||||||
# production
|
|
||||||
/build
|
|
||||||
|
|
||||||
# misc
|
|
||||||
.DS_Store
|
|
||||||
*.pem
|
|
||||||
|
|
||||||
# debug
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
.pnpm-debug.log*
|
|
||||||
|
|
||||||
# local env files
|
|
||||||
.env*.local
|
|
||||||
|
|
||||||
# vercel
|
|
||||||
.vercel
|
|
||||||
|
|
||||||
# typescript
|
|
||||||
*.tsbuildinfo
|
|
||||||
next-env.d.ts
|
|
||||||
|
|
||||||
# prisma
|
|
||||||
.env
|
|
||||||
|
|
||||||
# react.email
|
|
||||||
.react-email
|
|
||||||
|
|
||||||
# Sentry
|
|
||||||
.sentryclirc
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
import { render, screen } from '@testing-library/react';
|
|
||||||
import { expect, test } from 'vitest';
|
|
||||||
import Page from '../app/(unauthenticated)/sign-in/[[...sign-in]]/page';
|
|
||||||
|
|
||||||
test('Sign In Page', () => {
|
|
||||||
render(<Page />);
|
|
||||||
expect(
|
|
||||||
screen.getByRole('heading', {
|
|
||||||
level: 1,
|
|
||||||
name: 'Welcome back',
|
|
||||||
})
|
|
||||||
).toBeDefined();
|
|
||||||
});
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
import { render, screen } from '@testing-library/react';
|
|
||||||
import { expect, test } from 'vitest';
|
|
||||||
import Page from '../app/(unauthenticated)/sign-up/[[...sign-up]]/page';
|
|
||||||
|
|
||||||
test('Sign Up Page', () => {
|
|
||||||
render(<Page />);
|
|
||||||
expect(
|
|
||||||
screen.getByRole('heading', {
|
|
||||||
level: 1,
|
|
||||||
name: 'Create an account',
|
|
||||||
})
|
|
||||||
).toBeDefined();
|
|
||||||
});
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
'use client';
|
|
||||||
|
|
||||||
import { useOthers, useSelf } from '@konobangu/collaboration/hooks';
|
|
||||||
import {
|
|
||||||
Avatar,
|
|
||||||
AvatarFallback,
|
|
||||||
AvatarImage,
|
|
||||||
} from '@konobangu/design-system/components/ui/avatar';
|
|
||||||
import {
|
|
||||||
Tooltip,
|
|
||||||
TooltipContent,
|
|
||||||
TooltipTrigger,
|
|
||||||
} from '@konobangu/design-system/components/ui/tooltip';
|
|
||||||
import { tailwind } from '@konobangu/tailwind-config';
|
|
||||||
|
|
||||||
type PresenceAvatarProps = {
|
|
||||||
info?: Liveblocks['UserMeta']['info'];
|
|
||||||
};
|
|
||||||
|
|
||||||
const PresenceAvatar = ({ info }: PresenceAvatarProps) => (
|
|
||||||
<Tooltip delayDuration={0}>
|
|
||||||
<TooltipTrigger>
|
|
||||||
<Avatar className="h-7 w-7 bg-secondary ring-1 ring-background">
|
|
||||||
<AvatarImage src={info?.avatar} alt={info?.name} />
|
|
||||||
<AvatarFallback className="text-xs">
|
|
||||||
{info?.name?.slice(0, 2)}
|
|
||||||
</AvatarFallback>
|
|
||||||
</Avatar>
|
|
||||||
</TooltipTrigger>
|
|
||||||
<TooltipContent collisionPadding={4}>
|
|
||||||
<p>{info?.name ?? 'Unknown'}</p>
|
|
||||||
</TooltipContent>
|
|
||||||
</Tooltip>
|
|
||||||
);
|
|
||||||
|
|
||||||
export const AvatarStack = () => {
|
|
||||||
const others = useOthers();
|
|
||||||
const self = useSelf();
|
|
||||||
const hasMoreUsers = others.length > 3;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="-space-x-1 flex items-center px-4">
|
|
||||||
{others.slice(0, 3).map(({ connectionId, info }) => (
|
|
||||||
<PresenceAvatar key={connectionId} info={info} />
|
|
||||||
))}
|
|
||||||
|
|
||||||
{hasMoreUsers && (
|
|
||||||
<PresenceAvatar
|
|
||||||
info={{
|
|
||||||
name: `+${others.length - 3}`,
|
|
||||||
color: tailwind.theme.colors.gray[500],
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{self && <PresenceAvatar info={self.info} />}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
'use client';
|
|
||||||
|
|
||||||
import { getUsers } from '@/app/actions/users/get';
|
|
||||||
import { searchUsers } from '@/app/actions/users/search';
|
|
||||||
import { Room } from '@konobangu/collaboration/room';
|
|
||||||
import type { ReactNode } from 'react';
|
|
||||||
|
|
||||||
export const CollaborationProvider = ({
|
|
||||||
orgId,
|
|
||||||
children,
|
|
||||||
}: {
|
|
||||||
orgId: string;
|
|
||||||
children: ReactNode;
|
|
||||||
}) => {
|
|
||||||
const resolveUsers = async ({ userIds }: { userIds: string[] }) => {
|
|
||||||
const response = await getUsers(userIds);
|
|
||||||
|
|
||||||
if ('error' in response) {
|
|
||||||
throw new Error('Problem resolving users');
|
|
||||||
}
|
|
||||||
|
|
||||||
return response.data;
|
|
||||||
};
|
|
||||||
|
|
||||||
const resolveMentionSuggestions = async ({ text }: { text: string }) => {
|
|
||||||
const response = await searchUsers(text);
|
|
||||||
|
|
||||||
if ('error' in response) {
|
|
||||||
throw new Error('Problem resolving mention suggestions');
|
|
||||||
}
|
|
||||||
|
|
||||||
return response.data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Room
|
|
||||||
id={`${orgId}:presence`}
|
|
||||||
authEndpoint="/api/collaboration/auth"
|
|
||||||
fallback={
|
|
||||||
<div className="px-3 text-muted-foreground text-xs">Loading...</div>
|
|
||||||
}
|
|
||||||
resolveUsers={resolveUsers}
|
|
||||||
resolveMentionSuggestions={resolveMentionSuggestions}
|
|
||||||
>
|
|
||||||
{children}
|
|
||||||
</Room>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
@@ -1,106 +0,0 @@
|
|||||||
'use client';
|
|
||||||
|
|
||||||
import { useMyPresence, useOthers } from '@konobangu/collaboration/hooks';
|
|
||||||
import { useEffect } from 'react';
|
|
||||||
|
|
||||||
const Cursor = ({
|
|
||||||
name,
|
|
||||||
color,
|
|
||||||
x,
|
|
||||||
y,
|
|
||||||
}: {
|
|
||||||
name: string | undefined;
|
|
||||||
color: string;
|
|
||||||
x: number;
|
|
||||||
y: number;
|
|
||||||
}) => (
|
|
||||||
<div
|
|
||||||
className="pointer-events-none absolute top-0 left-0 z-[999] select-none transition-transform duration-100"
|
|
||||||
style={{
|
|
||||||
transform: `translateX(${x}px) translateY(${y}px)`,
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<svg
|
|
||||||
className="absolute top-0 left-0"
|
|
||||||
width="24"
|
|
||||||
height="36"
|
|
||||||
viewBox="0 0 24 36"
|
|
||||||
fill="none"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
>
|
|
||||||
<title>Cursor</title>
|
|
||||||
<path
|
|
||||||
d="M5.65376 12.3673H5.46026L5.31717 12.4976L0.500002 16.8829L0.500002 1.19841L11.7841 12.3673H5.65376Z"
|
|
||||||
fill={color}
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
<div
|
|
||||||
className="absolute top-4 left-1.5 whitespace-nowrap rounded-full px-2 py-0.5 text-white text-xs"
|
|
||||||
style={{
|
|
||||||
backgroundColor: color,
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{name}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
|
|
||||||
export const Cursors = () => {
|
|
||||||
/**
|
|
||||||
* useMyPresence returns the presence of the current user and a function to update it.
|
|
||||||
* updateMyPresence is different than the setState function returned by the useState hook from React.
|
|
||||||
* You don't need to pass the full presence object to update it.
|
|
||||||
* See https://liveblocks.io/docs/api-reference/liveblocks-react#useMyPresence for more information
|
|
||||||
*/
|
|
||||||
const [_cursor, updateMyPresence] = useMyPresence();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return all the other users in the room and their presence (a cursor position in this case)
|
|
||||||
*/
|
|
||||||
const others = useOthers();
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const onPointerMove = (event: PointerEvent) => {
|
|
||||||
// Update the user cursor position on every pointer move
|
|
||||||
updateMyPresence({
|
|
||||||
cursor: {
|
|
||||||
x: Math.round(event.clientX),
|
|
||||||
y: Math.round(event.clientY),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const onPointerLeave = () => {
|
|
||||||
// When the pointer goes out, set cursor to null
|
|
||||||
updateMyPresence({
|
|
||||||
cursor: null,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
document.body.addEventListener('pointermove', onPointerMove);
|
|
||||||
document.body.addEventListener('pointerleave', onPointerLeave);
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
document.body.removeEventListener('pointermove', onPointerMove);
|
|
||||||
document.body.removeEventListener('pointerleave', onPointerLeave);
|
|
||||||
};
|
|
||||||
}, [updateMyPresence]);
|
|
||||||
|
|
||||||
return others.map(({ connectionId, presence, info }) => {
|
|
||||||
if (!presence.cursor) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Cursor
|
|
||||||
key={`cursor-${connectionId}`}
|
|
||||||
// connectionId is an integer that is incremented at every new connections
|
|
||||||
// Assigning a color with a modulo makes sure that a specific user has the same colors on every clients
|
|
||||||
color={info.color}
|
|
||||||
x={presence.cursor.x}
|
|
||||||
y={presence.cursor.y}
|
|
||||||
name={info?.name}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
import {
|
|
||||||
Breadcrumb,
|
|
||||||
BreadcrumbItem,
|
|
||||||
BreadcrumbLink,
|
|
||||||
BreadcrumbList,
|
|
||||||
BreadcrumbPage,
|
|
||||||
BreadcrumbSeparator,
|
|
||||||
} from '@konobangu/design-system/components/ui/breadcrumb';
|
|
||||||
import { Separator } from '@konobangu/design-system/components/ui/separator';
|
|
||||||
import { SidebarTrigger } from '@konobangu/design-system/components/ui/sidebar';
|
|
||||||
import { Fragment, type ReactNode } from 'react';
|
|
||||||
|
|
||||||
type HeaderProps = {
|
|
||||||
pages: string[];
|
|
||||||
page: string;
|
|
||||||
children?: ReactNode;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const Header = ({ pages, page, children }: HeaderProps) => (
|
|
||||||
<header className="flex h-16 shrink-0 items-center justify-between gap-2">
|
|
||||||
<div className="flex items-center gap-2 px-4">
|
|
||||||
<SidebarTrigger className="-ml-1" />
|
|
||||||
<Separator orientation="vertical" className="mr-2 h-4" />
|
|
||||||
<Breadcrumb>
|
|
||||||
<BreadcrumbList>
|
|
||||||
{pages.map((page, index) => (
|
|
||||||
<Fragment key={page}>
|
|
||||||
{index > 0 && <BreadcrumbSeparator className="hidden md:block" />}
|
|
||||||
<BreadcrumbItem className="hidden md:block">
|
|
||||||
<BreadcrumbLink href="#">{page}</BreadcrumbLink>
|
|
||||||
</BreadcrumbItem>
|
|
||||||
</Fragment>
|
|
||||||
))}
|
|
||||||
<BreadcrumbSeparator className="hidden md:block" />
|
|
||||||
<BreadcrumbItem>
|
|
||||||
<BreadcrumbPage>{page}</BreadcrumbPage>
|
|
||||||
</BreadcrumbItem>
|
|
||||||
</BreadcrumbList>
|
|
||||||
</Breadcrumb>
|
|
||||||
</div>
|
|
||||||
{children}
|
|
||||||
</header>
|
|
||||||
);
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
'use client';
|
|
||||||
|
|
||||||
import { analytics } from '@konobangu/analytics/client';
|
|
||||||
import { useSession } from '@konobangu/auth/client';
|
|
||||||
import { usePathname, useSearchParams } from 'next/navigation';
|
|
||||||
import { useEffect, useRef } from 'react';
|
|
||||||
|
|
||||||
export const PostHogIdentifier = () => {
|
|
||||||
const session = useSession();
|
|
||||||
const user = session?.data?.user;
|
|
||||||
const identified = useRef(false);
|
|
||||||
const pathname = usePathname();
|
|
||||||
const searchParams = useSearchParams();
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
// Track pageviews
|
|
||||||
if (pathname && analytics) {
|
|
||||||
let url = window.origin + pathname;
|
|
||||||
if (searchParams.toString()) {
|
|
||||||
url = `${url}?${searchParams.toString()}`;
|
|
||||||
}
|
|
||||||
analytics.capture('$pageview', {
|
|
||||||
$current_url: url,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, [pathname, searchParams]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (!user || identified.current) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
analytics.identify(user.id, {
|
|
||||||
email: user.email,
|
|
||||||
name: user.name,
|
|
||||||
createdAt: user.createdAt,
|
|
||||||
avatar: user.image,
|
|
||||||
});
|
|
||||||
|
|
||||||
identified.current = true;
|
|
||||||
}, [user]);
|
|
||||||
|
|
||||||
return null;
|
|
||||||
};
|
|
||||||
@@ -1,342 +0,0 @@
|
|||||||
'use client';
|
|
||||||
|
|
||||||
// import { OrganizationSwitcher, UserButton } from '@konobangu/auth/client';
|
|
||||||
import { ModeToggle } from '@konobangu/design-system/components/mode-toggle';
|
|
||||||
import {
|
|
||||||
Collapsible,
|
|
||||||
CollapsibleContent,
|
|
||||||
CollapsibleTrigger,
|
|
||||||
} from '@konobangu/design-system/components/ui/collapsible';
|
|
||||||
import {
|
|
||||||
DropdownMenu,
|
|
||||||
DropdownMenuContent,
|
|
||||||
DropdownMenuItem,
|
|
||||||
DropdownMenuSeparator,
|
|
||||||
DropdownMenuTrigger,
|
|
||||||
} from '@konobangu/design-system/components/ui/dropdown-menu';
|
|
||||||
import {
|
|
||||||
Sidebar,
|
|
||||||
SidebarContent,
|
|
||||||
SidebarFooter,
|
|
||||||
SidebarGroup,
|
|
||||||
SidebarGroupContent,
|
|
||||||
SidebarGroupLabel,
|
|
||||||
SidebarHeader,
|
|
||||||
SidebarInset,
|
|
||||||
SidebarMenu,
|
|
||||||
SidebarMenuAction,
|
|
||||||
SidebarMenuButton,
|
|
||||||
SidebarMenuItem,
|
|
||||||
SidebarMenuSub,
|
|
||||||
SidebarMenuSubButton,
|
|
||||||
SidebarMenuSubItem,
|
|
||||||
useSidebar,
|
|
||||||
} from '@konobangu/design-system/components/ui/sidebar';
|
|
||||||
import { cn } from '@konobangu/design-system/lib/utils';
|
|
||||||
import {
|
|
||||||
AnchorIcon,
|
|
||||||
BookOpenIcon,
|
|
||||||
BotIcon,
|
|
||||||
ChevronRightIcon,
|
|
||||||
FolderIcon,
|
|
||||||
FrameIcon,
|
|
||||||
LifeBuoyIcon,
|
|
||||||
MapIcon,
|
|
||||||
MoreHorizontalIcon,
|
|
||||||
PieChartIcon,
|
|
||||||
SendIcon,
|
|
||||||
Settings2Icon,
|
|
||||||
ShareIcon,
|
|
||||||
SquareTerminalIcon,
|
|
||||||
Trash2Icon,
|
|
||||||
} from 'lucide-react';
|
|
||||||
import type { ReactNode } from 'react';
|
|
||||||
|
|
||||||
type GlobalSidebarProperties = {
|
|
||||||
readonly children: ReactNode;
|
|
||||||
};
|
|
||||||
|
|
||||||
const data = {
|
|
||||||
user: {
|
|
||||||
name: 'shadcn',
|
|
||||||
email: 'm@example.com',
|
|
||||||
avatar: '/avatars/shadcn.jpg',
|
|
||||||
},
|
|
||||||
navMain: [
|
|
||||||
{
|
|
||||||
title: 'Playground',
|
|
||||||
url: '#',
|
|
||||||
icon: SquareTerminalIcon,
|
|
||||||
isActive: true,
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
title: 'History',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Starred',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Settings',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Models',
|
|
||||||
url: '#',
|
|
||||||
icon: BotIcon,
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
title: 'Genesis',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Explorer',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Quantum',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Documentation',
|
|
||||||
url: '#',
|
|
||||||
icon: BookOpenIcon,
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
title: 'Introduction',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Get Started',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Tutorials',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Changelog',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Settings',
|
|
||||||
url: '#',
|
|
||||||
icon: Settings2Icon,
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
title: 'General',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Team',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Billing',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Limits',
|
|
||||||
url: '#',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
navSecondary: [
|
|
||||||
{
|
|
||||||
title: 'Webhooks',
|
|
||||||
url: '/webhooks',
|
|
||||||
icon: AnchorIcon,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Support',
|
|
||||||
url: '#',
|
|
||||||
icon: LifeBuoyIcon,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Feedback',
|
|
||||||
url: '#',
|
|
||||||
icon: SendIcon,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
projects: [
|
|
||||||
{
|
|
||||||
name: 'Design Engineering',
|
|
||||||
url: '#',
|
|
||||||
icon: FrameIcon,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'Sales & Marketing',
|
|
||||||
url: '#',
|
|
||||||
icon: PieChartIcon,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'Travel',
|
|
||||||
url: '#',
|
|
||||||
icon: MapIcon,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
export const GlobalSidebar = ({ children }: GlobalSidebarProperties) => {
|
|
||||||
const sidebar = useSidebar();
|
|
||||||
|
|
||||||
return (
|
|
||||||
<>
|
|
||||||
<Sidebar variant="inset">
|
|
||||||
<SidebarHeader>
|
|
||||||
<SidebarMenu>
|
|
||||||
<SidebarMenuItem>
|
|
||||||
<div
|
|
||||||
className={cn(
|
|
||||||
'h-[36px] overflow-hidden transition-all [&>div]:w-full',
|
|
||||||
sidebar.open ? '' : '-mx-1'
|
|
||||||
)}
|
|
||||||
>
|
|
||||||
{/* <OrganizationSwitcher
|
|
||||||
hidePersonal
|
|
||||||
afterSelectOrganizationUrl="/"
|
|
||||||
/> */}
|
|
||||||
</div>
|
|
||||||
</SidebarMenuItem>
|
|
||||||
</SidebarMenu>
|
|
||||||
</SidebarHeader>
|
|
||||||
<SidebarContent>
|
|
||||||
<SidebarGroup>
|
|
||||||
<SidebarGroupLabel>Platform</SidebarGroupLabel>
|
|
||||||
<SidebarMenu>
|
|
||||||
{data.navMain.map((item) => (
|
|
||||||
<Collapsible
|
|
||||||
key={item.title}
|
|
||||||
asChild
|
|
||||||
defaultOpen={item.isActive}
|
|
||||||
>
|
|
||||||
<SidebarMenuItem>
|
|
||||||
<SidebarMenuButton asChild tooltip={item.title}>
|
|
||||||
<a href={item.url}>
|
|
||||||
<item.icon />
|
|
||||||
<span>{item.title}</span>
|
|
||||||
</a>
|
|
||||||
</SidebarMenuButton>
|
|
||||||
{item.items?.length ? (
|
|
||||||
<>
|
|
||||||
<CollapsibleTrigger asChild>
|
|
||||||
<SidebarMenuAction className="data-[state=open]:rotate-90">
|
|
||||||
<ChevronRightIcon />
|
|
||||||
<span className="sr-only">Toggle</span>
|
|
||||||
</SidebarMenuAction>
|
|
||||||
</CollapsibleTrigger>
|
|
||||||
<CollapsibleContent>
|
|
||||||
<SidebarMenuSub>
|
|
||||||
{item.items?.map((subItem) => (
|
|
||||||
<SidebarMenuSubItem key={subItem.title}>
|
|
||||||
<SidebarMenuSubButton asChild>
|
|
||||||
<a href={subItem.url}>
|
|
||||||
<span>{subItem.title}</span>
|
|
||||||
</a>
|
|
||||||
</SidebarMenuSubButton>
|
|
||||||
</SidebarMenuSubItem>
|
|
||||||
))}
|
|
||||||
</SidebarMenuSub>
|
|
||||||
</CollapsibleContent>
|
|
||||||
</>
|
|
||||||
) : null}
|
|
||||||
</SidebarMenuItem>
|
|
||||||
</Collapsible>
|
|
||||||
))}
|
|
||||||
</SidebarMenu>
|
|
||||||
</SidebarGroup>
|
|
||||||
<SidebarGroup className="group-data-[collapsible=icon]:hidden">
|
|
||||||
<SidebarGroupLabel>Projects</SidebarGroupLabel>
|
|
||||||
<SidebarMenu>
|
|
||||||
{data.projects.map((item) => (
|
|
||||||
<SidebarMenuItem key={item.name}>
|
|
||||||
<SidebarMenuButton asChild>
|
|
||||||
<a href={item.url}>
|
|
||||||
<item.icon />
|
|
||||||
<span>{item.name}</span>
|
|
||||||
</a>
|
|
||||||
</SidebarMenuButton>
|
|
||||||
<DropdownMenu>
|
|
||||||
<DropdownMenuTrigger asChild>
|
|
||||||
<SidebarMenuAction showOnHover>
|
|
||||||
<MoreHorizontalIcon />
|
|
||||||
<span className="sr-only">More</span>
|
|
||||||
</SidebarMenuAction>
|
|
||||||
</DropdownMenuTrigger>
|
|
||||||
<DropdownMenuContent
|
|
||||||
className="w-48"
|
|
||||||
side="bottom"
|
|
||||||
align="end"
|
|
||||||
>
|
|
||||||
<DropdownMenuItem>
|
|
||||||
<FolderIcon className="text-muted-foreground" />
|
|
||||||
<span>View Project</span>
|
|
||||||
</DropdownMenuItem>
|
|
||||||
<DropdownMenuItem>
|
|
||||||
<ShareIcon className="text-muted-foreground" />
|
|
||||||
<span>Share Project</span>
|
|
||||||
</DropdownMenuItem>
|
|
||||||
<DropdownMenuSeparator />
|
|
||||||
<DropdownMenuItem>
|
|
||||||
<Trash2Icon className="text-muted-foreground" />
|
|
||||||
<span>Delete Project</span>
|
|
||||||
</DropdownMenuItem>
|
|
||||||
</DropdownMenuContent>
|
|
||||||
</DropdownMenu>
|
|
||||||
</SidebarMenuItem>
|
|
||||||
))}
|
|
||||||
<SidebarMenuItem>
|
|
||||||
<SidebarMenuButton>
|
|
||||||
<MoreHorizontalIcon />
|
|
||||||
<span>More</span>
|
|
||||||
</SidebarMenuButton>
|
|
||||||
</SidebarMenuItem>
|
|
||||||
</SidebarMenu>
|
|
||||||
</SidebarGroup>
|
|
||||||
<SidebarGroup className="mt-auto">
|
|
||||||
<SidebarGroupContent>
|
|
||||||
<SidebarMenu>
|
|
||||||
{data.navSecondary.map((item) => (
|
|
||||||
<SidebarMenuItem key={item.title}>
|
|
||||||
<SidebarMenuButton asChild>
|
|
||||||
<a href={item.url}>
|
|
||||||
<item.icon />
|
|
||||||
<span>{item.title}</span>
|
|
||||||
</a>
|
|
||||||
</SidebarMenuButton>
|
|
||||||
</SidebarMenuItem>
|
|
||||||
))}
|
|
||||||
</SidebarMenu>
|
|
||||||
</SidebarGroupContent>
|
|
||||||
</SidebarGroup>
|
|
||||||
</SidebarContent>
|
|
||||||
<SidebarFooter>
|
|
||||||
<SidebarMenu>
|
|
||||||
<SidebarMenuItem className="flex items-center gap-2">
|
|
||||||
{/* <UserButton
|
|
||||||
showName
|
|
||||||
appearance={{
|
|
||||||
elements: {
|
|
||||||
rootBox: 'flex overflow-hidden w-full',
|
|
||||||
userButtonBox: 'flex-row-reverse',
|
|
||||||
userButtonOuterIdentifier: 'truncate pl-0',
|
|
||||||
},
|
|
||||||
}}
|
|
||||||
/> */}
|
|
||||||
<ModeToggle />
|
|
||||||
</SidebarMenuItem>
|
|
||||||
</SidebarMenu>
|
|
||||||
</SidebarFooter>
|
|
||||||
</Sidebar>
|
|
||||||
<SidebarInset>{children}</SidebarInset>
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
import { getSessionFromHeaders } from '@konobangu/auth/server';
|
|
||||||
import { SidebarProvider } from '@konobangu/design-system/components/ui/sidebar';
|
|
||||||
import { env } from '@konobangu/env';
|
|
||||||
import { showBetaFeature } from '@konobangu/feature-flags';
|
|
||||||
import { secure } from '@konobangu/security';
|
|
||||||
import { redirect } from 'next/navigation';
|
|
||||||
import type { ReactNode } from 'react';
|
|
||||||
import { PostHogIdentifier } from './components/posthog-identifier';
|
|
||||||
import { GlobalSidebar } from './components/sidebar';
|
|
||||||
|
|
||||||
type AppLayoutProperties = {
|
|
||||||
readonly children: ReactNode;
|
|
||||||
};
|
|
||||||
|
|
||||||
const AppLayout = async ({ children }: AppLayoutProperties) => {
|
|
||||||
if (env.ARCJET_KEY) {
|
|
||||||
await secure(['CATEGORY:PREVIEW']);
|
|
||||||
}
|
|
||||||
|
|
||||||
const { user } = await getSessionFromHeaders();
|
|
||||||
|
|
||||||
if (!user) {
|
|
||||||
return redirect('/sign-in'); // from next/navigation
|
|
||||||
}
|
|
||||||
const betaFeature = await showBetaFeature();
|
|
||||||
|
|
||||||
return (
|
|
||||||
<SidebarProvider>
|
|
||||||
<GlobalSidebar>
|
|
||||||
{betaFeature && (
|
|
||||||
<div className="m-4 rounded-full bg-success p-1.5 text-center text-sm text-success-foreground">
|
|
||||||
Beta feature now available
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{children}
|
|
||||||
</GlobalSidebar>
|
|
||||||
<PostHogIdentifier />
|
|
||||||
</SidebarProvider>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default AppLayout;
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
import { getSessionFromHeaders } from '@konobangu/auth/server';
|
|
||||||
import { database } from '@konobangu/database';
|
|
||||||
import { env } from '@konobangu/env';
|
|
||||||
import type { Metadata } from 'next';
|
|
||||||
import dynamic from 'next/dynamic';
|
|
||||||
import { notFound } from 'next/navigation';
|
|
||||||
import { AvatarStack } from './components/avatar-stack';
|
|
||||||
import { Cursors } from './components/cursors';
|
|
||||||
import { Header } from './components/header';
|
|
||||||
|
|
||||||
const title = 'Acme Inc';
|
|
||||||
const description = 'My application.';
|
|
||||||
|
|
||||||
const CollaborationProvider = dynamic(() =>
|
|
||||||
import('./components/collaboration-provider').then(
|
|
||||||
(mod) => mod.CollaborationProvider
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
export const metadata: Metadata = {
|
|
||||||
title,
|
|
||||||
description,
|
|
||||||
};
|
|
||||||
|
|
||||||
const App = async () => {
|
|
||||||
const pages = await database.selectFrom('page').selectAll().execute();
|
|
||||||
const { orgId } = await getSessionFromHeaders();
|
|
||||||
|
|
||||||
if (!orgId) {
|
|
||||||
notFound();
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<>
|
|
||||||
<Header pages={['Building Your Application']} page="Data Fetching">
|
|
||||||
{env.LIVEBLOCKS_SECRET && (
|
|
||||||
<CollaborationProvider orgId={orgId}>
|
|
||||||
<AvatarStack />
|
|
||||||
<Cursors />
|
|
||||||
</CollaborationProvider>
|
|
||||||
)}
|
|
||||||
</Header>
|
|
||||||
<div className="flex flex-1 flex-col gap-4 p-4 pt-0">
|
|
||||||
<div className="grid auto-rows-min gap-4 md:grid-cols-3">
|
|
||||||
{pages.map((page) => (
|
|
||||||
<div key={page.id} className="aspect-video rounded-xl bg-muted/50">
|
|
||||||
{page.name}
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
<div className="min-h-[100vh] flex-1 rounded-xl bg-muted/50 md:min-h-min" />
|
|
||||||
</div>
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default App;
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
import { webhooks } from '@konobangu/webhooks';
|
|
||||||
import { notFound } from 'next/navigation';
|
|
||||||
|
|
||||||
export const metadata = {
|
|
||||||
title: 'Webhooks',
|
|
||||||
description: 'Send webhooks to your users.',
|
|
||||||
};
|
|
||||||
|
|
||||||
const WebhooksPage = async () => {
|
|
||||||
const response = await webhooks.getAppPortal();
|
|
||||||
|
|
||||||
if (!response?.url) {
|
|
||||||
notFound();
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="h-full w-full overflow-hidden">
|
|
||||||
<iframe
|
|
||||||
title="Webhooks"
|
|
||||||
src={response.url}
|
|
||||||
className="h-full w-full border-none"
|
|
||||||
allow="clipboard-write"
|
|
||||||
loading="lazy"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default WebhooksPage;
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
import { ModeToggle } from '@konobangu/design-system/components/mode-toggle';
|
|
||||||
import { env } from '@konobangu/env';
|
|
||||||
import { CommandIcon } from 'lucide-react';
|
|
||||||
import Link from 'next/link';
|
|
||||||
import type { ReactNode } from 'react';
|
|
||||||
|
|
||||||
type AuthLayoutProps = {
|
|
||||||
readonly children: ReactNode;
|
|
||||||
};
|
|
||||||
|
|
||||||
const AuthLayout = ({ children }: AuthLayoutProps) => (
|
|
||||||
<div className="container relative grid h-dvh flex-col items-center justify-center lg:max-w-none lg:grid-cols-2 lg:px-0">
|
|
||||||
<div className="relative hidden h-full flex-col bg-muted p-10 text-white lg:flex dark:border-r">
|
|
||||||
<div className="absolute inset-0 bg-zinc-900" />
|
|
||||||
<div className="relative z-20 flex items-center font-medium text-lg">
|
|
||||||
<CommandIcon className="mr-2 h-6 w-6" />
|
|
||||||
Acme Inc
|
|
||||||
</div>
|
|
||||||
<div className="absolute top-4 right-4">
|
|
||||||
<ModeToggle />
|
|
||||||
</div>
|
|
||||||
<div className="relative z-20 mt-auto">
|
|
||||||
<blockquote className="space-y-2">
|
|
||||||
<p className="text-lg">
|
|
||||||
“This library has saved me countless hours of work and helped
|
|
||||||
me deliver stunning designs to my clients faster than ever
|
|
||||||
before.”
|
|
||||||
</p>
|
|
||||||
<footer className="text-sm">Sofia Davis</footer>
|
|
||||||
</blockquote>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="lg:p-8">
|
|
||||||
<div className="mx-auto flex w-full max-w-[400px] flex-col justify-center space-y-6">
|
|
||||||
{children}
|
|
||||||
<p className="px-8 text-center text-muted-foreground text-sm">
|
|
||||||
By clicking continue, you agree to our{' '}
|
|
||||||
<Link
|
|
||||||
href={new URL('/legal/terms', env.NEXT_PUBLIC_WEB_URL).toString()}
|
|
||||||
className="underline underline-offset-4 hover:text-primary"
|
|
||||||
>
|
|
||||||
Terms of Service
|
|
||||||
</Link>{' '}
|
|
||||||
and{' '}
|
|
||||||
<Link
|
|
||||||
href={new URL('/legal/privacy', env.NEXT_PUBLIC_WEB_URL).toString()}
|
|
||||||
className="underline underline-offset-4 hover:text-primary"
|
|
||||||
>
|
|
||||||
Privacy Policy
|
|
||||||
</Link>
|
|
||||||
.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
|
|
||||||
export default AuthLayout;
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
import { createMetadata } from '@konobangu/seo/metadata';
|
|
||||||
import type { Metadata } from 'next';
|
|
||||||
import dynamic from 'next/dynamic';
|
|
||||||
|
|
||||||
const title = 'Welcome back';
|
|
||||||
const description = 'Enter your details to sign in.';
|
|
||||||
const SignIn = dynamic(() =>
|
|
||||||
import('@konobangu/auth/components/sign-in').then((mod) => mod.SignIn)
|
|
||||||
);
|
|
||||||
|
|
||||||
export const metadata: Metadata = createMetadata({ title, description });
|
|
||||||
|
|
||||||
const SignInPage = () => (
|
|
||||||
<>
|
|
||||||
<div className="flex flex-col space-y-2 text-center">
|
|
||||||
<h1 className="font-semibold text-2xl tracking-tight">{title}</h1>
|
|
||||||
<p className="text-muted-foreground text-sm">{description}</p>
|
|
||||||
</div>
|
|
||||||
<SignIn />
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
|
|
||||||
export default SignInPage;
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
import { createMetadata } from '@konobangu/seo/metadata';
|
|
||||||
import type { Metadata } from 'next';
|
|
||||||
import dynamic from 'next/dynamic';
|
|
||||||
|
|
||||||
const title = 'Create an account';
|
|
||||||
const description = 'Enter your details to get started.';
|
|
||||||
const SignUp = dynamic(() =>
|
|
||||||
import('@konobangu/auth/components/sign-up').then((mod) => mod.SignUp)
|
|
||||||
);
|
|
||||||
|
|
||||||
export const metadata: Metadata = createMetadata({ title, description });
|
|
||||||
|
|
||||||
const SignUpPage = () => (
|
|
||||||
<>
|
|
||||||
<div className="flex flex-col space-y-2 text-center">
|
|
||||||
<h1 className="font-semibold text-2xl tracking-tight">{title}</h1>
|
|
||||||
<p className="text-muted-foreground text-sm">{description}</p>
|
|
||||||
</div>
|
|
||||||
<SignUp />
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
|
|
||||||
export default SignUpPage;
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
import { getFlags } from '@konobangu/feature-flags/access';
|
|
||||||
|
|
||||||
export const GET = getFlags;
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
'use server';
|
|
||||||
|
|
||||||
import {
|
|
||||||
getFullOrganizationFromSession,
|
|
||||||
getSessionFromHeaders,
|
|
||||||
} from '@konobangu/auth/server';
|
|
||||||
import { tailwind } from '@konobangu/tailwind-config';
|
|
||||||
|
|
||||||
const colors = [
|
|
||||||
tailwind.theme.colors.red[500],
|
|
||||||
tailwind.theme.colors.orange[500],
|
|
||||||
tailwind.theme.colors.amber[500],
|
|
||||||
tailwind.theme.colors.yellow[500],
|
|
||||||
tailwind.theme.colors.lime[500],
|
|
||||||
tailwind.theme.colors.green[500],
|
|
||||||
tailwind.theme.colors.emerald[500],
|
|
||||||
tailwind.theme.colors.teal[500],
|
|
||||||
tailwind.theme.colors.cyan[500],
|
|
||||||
tailwind.theme.colors.sky[500],
|
|
||||||
tailwind.theme.colors.blue[500],
|
|
||||||
tailwind.theme.colors.indigo[500],
|
|
||||||
tailwind.theme.colors.violet[500],
|
|
||||||
tailwind.theme.colors.purple[500],
|
|
||||||
tailwind.theme.colors.fuchsia[500],
|
|
||||||
tailwind.theme.colors.pink[500],
|
|
||||||
tailwind.theme.colors.rose[500],
|
|
||||||
];
|
|
||||||
|
|
||||||
export const getUsers = async (
|
|
||||||
userIds: string[]
|
|
||||||
): Promise<
|
|
||||||
| {
|
|
||||||
data: Liveblocks['UserMeta']['info'][];
|
|
||||||
}
|
|
||||||
| {
|
|
||||||
error: unknown;
|
|
||||||
}
|
|
||||||
> => {
|
|
||||||
try {
|
|
||||||
const session = await getSessionFromHeaders();
|
|
||||||
const { orgId } = session;
|
|
||||||
|
|
||||||
if (!orgId) {
|
|
||||||
throw new Error('Not logged in');
|
|
||||||
}
|
|
||||||
|
|
||||||
const { fullOrganization } = await getFullOrganizationFromSession(session);
|
|
||||||
|
|
||||||
const members = fullOrganization?.members || [];
|
|
||||||
|
|
||||||
const data: Liveblocks['UserMeta']['info'][] = members
|
|
||||||
.filter((user) => user?.userId && userIds.includes(user?.userId))
|
|
||||||
.map((user) => ({
|
|
||||||
name: user.user.name ?? user.user.email ?? 'Unknown user',
|
|
||||||
picture: user.user.image,
|
|
||||||
color: colors[Math.floor(Math.random() * colors.length)],
|
|
||||||
}));
|
|
||||||
|
|
||||||
return { data };
|
|
||||||
} catch (error) {
|
|
||||||
return { error };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
'use server';
|
|
||||||
|
|
||||||
import {
|
|
||||||
getFullOrganizationFromSession,
|
|
||||||
getSessionFromHeaders,
|
|
||||||
} from '@konobangu/auth/server';
|
|
||||||
import Fuse from 'fuse.js';
|
|
||||||
|
|
||||||
export const searchUsers = async (
|
|
||||||
query: string
|
|
||||||
): Promise<
|
|
||||||
| {
|
|
||||||
data: string[];
|
|
||||||
}
|
|
||||||
| {
|
|
||||||
error: unknown;
|
|
||||||
}
|
|
||||||
> => {
|
|
||||||
try {
|
|
||||||
const session = await getSessionFromHeaders();
|
|
||||||
const { orgId } = session;
|
|
||||||
|
|
||||||
if (!orgId) {
|
|
||||||
throw new Error('Not logged in');
|
|
||||||
}
|
|
||||||
|
|
||||||
const { fullOrganization } = await getFullOrganizationFromSession(session);
|
|
||||||
|
|
||||||
const members = fullOrganization?.members || [];
|
|
||||||
|
|
||||||
const users = members.map((user) => ({
|
|
||||||
id: user.id,
|
|
||||||
name: user.user.name ?? user.user.email ?? 'Unknown user',
|
|
||||||
imageUrl: user.user.image,
|
|
||||||
}));
|
|
||||||
|
|
||||||
const fuse = new Fuse(users, {
|
|
||||||
keys: ['name'],
|
|
||||||
minMatchCharLength: 1,
|
|
||||||
threshold: 0.3,
|
|
||||||
});
|
|
||||||
|
|
||||||
const results = fuse.search(query);
|
|
||||||
const data = results.map((result) => result.item.id);
|
|
||||||
|
|
||||||
return { data };
|
|
||||||
} catch (error) {
|
|
||||||
return { error };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
import { getSessionFromHeaders } from '@konobangu/auth/server';
|
|
||||||
import { authenticate } from '@konobangu/collaboration/auth';
|
|
||||||
import { tailwind } from '@konobangu/tailwind-config';
|
|
||||||
|
|
||||||
const COLORS = [
|
|
||||||
tailwind.theme.colors.red[500],
|
|
||||||
tailwind.theme.colors.orange[500],
|
|
||||||
tailwind.theme.colors.amber[500],
|
|
||||||
tailwind.theme.colors.yellow[500],
|
|
||||||
tailwind.theme.colors.lime[500],
|
|
||||||
tailwind.theme.colors.green[500],
|
|
||||||
tailwind.theme.colors.emerald[500],
|
|
||||||
tailwind.theme.colors.teal[500],
|
|
||||||
tailwind.theme.colors.cyan[500],
|
|
||||||
tailwind.theme.colors.sky[500],
|
|
||||||
tailwind.theme.colors.blue[500],
|
|
||||||
tailwind.theme.colors.indigo[500],
|
|
||||||
tailwind.theme.colors.violet[500],
|
|
||||||
tailwind.theme.colors.purple[500],
|
|
||||||
tailwind.theme.colors.fuchsia[500],
|
|
||||||
tailwind.theme.colors.pink[500],
|
|
||||||
tailwind.theme.colors.rose[500],
|
|
||||||
];
|
|
||||||
|
|
||||||
export const POST = async () => {
|
|
||||||
const session = await getSessionFromHeaders();
|
|
||||||
const { orgId, user } = session;
|
|
||||||
|
|
||||||
if (!user || !orgId) {
|
|
||||||
return new Response('Unauthorized', { status: 401 });
|
|
||||||
}
|
|
||||||
|
|
||||||
return authenticate({
|
|
||||||
userId: user.id,
|
|
||||||
orgId,
|
|
||||||
userInfo: {
|
|
||||||
name: user.name ?? user.email ?? undefined,
|
|
||||||
avatar: user.image ?? undefined,
|
|
||||||
color: COLORS[Math.floor(Math.random() * COLORS.length)],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
};
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 216 B |
@@ -1,17 +0,0 @@
|
|||||||
import { database } from '@konobangu/database';
|
|
||||||
|
|
||||||
export const POST = async () => {
|
|
||||||
const newPage = await database
|
|
||||||
.insertInto('page')
|
|
||||||
.values([
|
|
||||||
{
|
|
||||||
name: 'cron-temp',
|
|
||||||
},
|
|
||||||
])
|
|
||||||
.returning('id')
|
|
||||||
.executeTakeFirstOrThrow();
|
|
||||||
|
|
||||||
await database.deleteFrom('page').where('id', '=', newPage.id);
|
|
||||||
|
|
||||||
return new Response('OK', { status: 200 });
|
|
||||||
};
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
'use client';
|
|
||||||
|
|
||||||
import { Button } from '@konobangu/design-system/components/ui/button';
|
|
||||||
import { fonts } from '@konobangu/design-system/lib/fonts';
|
|
||||||
import { captureException } from '@sentry/nextjs';
|
|
||||||
import type NextError from 'next/error';
|
|
||||||
import { useEffect } from 'react';
|
|
||||||
|
|
||||||
type GlobalErrorProperties = {
|
|
||||||
readonly error: NextError & { digest?: string };
|
|
||||||
readonly reset: () => void;
|
|
||||||
};
|
|
||||||
|
|
||||||
const GlobalError = ({ error, reset }: GlobalErrorProperties) => {
|
|
||||||
useEffect(() => {
|
|
||||||
captureException(error);
|
|
||||||
}, [error]);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<html lang="en" className={fonts}>
|
|
||||||
<body>
|
|
||||||
<h1>Oops, something went wrong</h1>
|
|
||||||
<Button onClick={() => reset()}>Try again</Button>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default GlobalError;
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
export const runtime = 'edge';
|
|
||||||
|
|
||||||
export const GET = (): Response => new Response('OK', { status: 200 });
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 96 B |
@@ -1,18 +0,0 @@
|
|||||||
import '@konobangu/design-system/styles/globals.css';
|
|
||||||
import { DesignSystemProvider } from '@konobangu/design-system';
|
|
||||||
import { fonts } from '@konobangu/design-system/lib/fonts';
|
|
||||||
import type { ReactNode } from 'react';
|
|
||||||
|
|
||||||
type RootLayoutProperties = {
|
|
||||||
readonly children: ReactNode;
|
|
||||||
};
|
|
||||||
|
|
||||||
const RootLayout = ({ children }: RootLayoutProperties) => (
|
|
||||||
<html lang="en" className={fonts} suppressHydrationWarning>
|
|
||||||
<body>
|
|
||||||
<DesignSystemProvider>{children}</DesignSystemProvider>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
);
|
|
||||||
|
|
||||||
export default RootLayout;
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 57 KiB |
@@ -1,3 +0,0 @@
|
|||||||
import { initializeSentry } from '@konobangu/next-config/instrumentation';
|
|
||||||
|
|
||||||
export const register = initializeSentry();
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
export * from '@konobangu/collaboration/config';
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
import { authMiddleware } from '@konobangu/auth/middleware';
|
|
||||||
import {
|
|
||||||
noseconeConfig,
|
|
||||||
noseconeMiddleware,
|
|
||||||
} from '@konobangu/security/middleware';
|
|
||||||
import { NextRequest } from 'next/server';
|
|
||||||
|
|
||||||
const securityHeaders = noseconeMiddleware(noseconeConfig);
|
|
||||||
|
|
||||||
export async function middleware (_request: NextRequest) {
|
|
||||||
const response = await securityHeaders();
|
|
||||||
return authMiddleware(response as any);
|
|
||||||
}
|
|
||||||
|
|
||||||
export const config = {
|
|
||||||
matcher: [
|
|
||||||
// Skip Next.js internals and all static files, unless found in search params
|
|
||||||
'/((?!_next|[^?]*\\.(?:html?|css|js(?!on)|jpe?g|webp|png|gif|svg|ttf|woff2?|ico|csv|docx?|xlsx?|zip|webmanifest)).*)',
|
|
||||||
// Always run for API routes
|
|
||||||
'/(api|trpc)(.*)',
|
|
||||||
],
|
|
||||||
};
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
import { env } from '@konobangu/env';
|
|
||||||
import { config, withAnalyzer, withSentry } from '@konobangu/next-config';
|
|
||||||
import type { NextConfig } from 'next';
|
|
||||||
|
|
||||||
let nextConfig: NextConfig = { ...config };
|
|
||||||
|
|
||||||
if (env.VERCEL) {
|
|
||||||
nextConfig = withSentry(nextConfig);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (env.ANALYZE === 'true') {
|
|
||||||
nextConfig = withAnalyzer(nextConfig);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default nextConfig;
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "app",
|
|
||||||
"private": true,
|
|
||||||
"scripts": {
|
|
||||||
"dev": "next dev -p 5000 --turbopack",
|
|
||||||
"build": "next build",
|
|
||||||
"start": "next start",
|
|
||||||
"analyze": "ANALYZE=true pnpm build",
|
|
||||||
"test": "vitest run",
|
|
||||||
"clean": "git clean -xdf .cache .turbo dist node_modules",
|
|
||||||
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@prisma/client": "6.0.1",
|
|
||||||
"@konobangu/analytics": "workspace:*",
|
|
||||||
"@konobangu/auth": "workspace:*",
|
|
||||||
"@konobangu/collaboration": "workspace:*",
|
|
||||||
"@konobangu/database": "workspace:*",
|
|
||||||
"@konobangu/migrate": "workspace:*",
|
|
||||||
"@konobangu/design-system": "workspace:*",
|
|
||||||
"@konobangu/env": "workspace:*",
|
|
||||||
"@konobangu/feature-flags": "workspace:*",
|
|
||||||
"@konobangu/next-config": "workspace:*",
|
|
||||||
"@konobangu/security": "workspace:*",
|
|
||||||
"@konobangu/seo": "workspace:*",
|
|
||||||
"@konobangu/tailwind-config": "workspace:*",
|
|
||||||
"@konobangu/webhooks": "workspace:*",
|
|
||||||
"@sentry/nextjs": "^8.43.0",
|
|
||||||
"fuse.js": "^7.0.0",
|
|
||||||
"import-in-the-middle": "^1.11.3",
|
|
||||||
"lucide-react": "^0.468.0",
|
|
||||||
"next": "^15.1.3",
|
|
||||||
"next-themes": "^0.4.4",
|
|
||||||
"react": "^19.0.0",
|
|
||||||
"react-dom": "^19.0.0",
|
|
||||||
"require-in-the-middle": "^7.4.0"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@konobangu/testing": "workspace:*",
|
|
||||||
"@konobangu/typescript-config": "workspace:*",
|
|
||||||
"@testing-library/dom": "^10.4.0",
|
|
||||||
"@testing-library/react": "^16.1.0",
|
|
||||||
"@types/node": "22.10.1",
|
|
||||||
"@types/react": "19.0.1",
|
|
||||||
"@types/react-dom": "19.0.2",
|
|
||||||
"jsdom": "^25.0.1",
|
|
||||||
"tailwindcss": "^3.4.16",
|
|
||||||
"typescript": "^5.7.2",
|
|
||||||
"vitest": "^2.1.8"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
export { default } from '@konobangu/design-system/postcss.config.mjs';
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
/*
|
|
||||||
* This file configures the initialization of Sentry on the client.
|
|
||||||
* The config you add here will be used whenever a users loads a page in their browser.
|
|
||||||
* https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { init, replayIntegration } from '@sentry/nextjs';
|
|
||||||
|
|
||||||
init({
|
|
||||||
dsn: process.env.NEXT_PUBLIC_SENTRY_DSN,
|
|
||||||
|
|
||||||
// Adjust this value in production, or use tracesSampler for greater control
|
|
||||||
tracesSampleRate: 1,
|
|
||||||
|
|
||||||
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
|
||||||
debug: false,
|
|
||||||
|
|
||||||
replaysOnErrorSampleRate: 1,
|
|
||||||
|
|
||||||
/*
|
|
||||||
* This sets the sample rate to be 10%. You may want this to be 100% while
|
|
||||||
* in development and sample at a lower rate in production
|
|
||||||
*/
|
|
||||||
replaysSessionSampleRate: 0.1,
|
|
||||||
|
|
||||||
// You can remove this option if you're not planning to use the Sentry Session Replay feature:
|
|
||||||
integrations: [
|
|
||||||
replayIntegration({
|
|
||||||
// Additional Replay configuration goes in here, for example:
|
|
||||||
maskAllText: true,
|
|
||||||
blockAllMedia: true,
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
});
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
export { config as default } from '@konobangu/tailwind-config/config';
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"extends": "@konobangu/typescript-config/nextjs.json",
|
|
||||||
"compilerOptions": {
|
|
||||||
"baseUrl": ".",
|
|
||||||
"paths": {
|
|
||||||
"@/*": ["./*"],
|
|
||||||
"@konobangu/*": ["../../packages/*"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"include": [
|
|
||||||
"next-env.d.ts",
|
|
||||||
"next.config.ts",
|
|
||||||
"**/*.ts",
|
|
||||||
"**/*.tsx",
|
|
||||||
".next/types/**/*.ts"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
{
|
|
||||||
"crons": [
|
|
||||||
{
|
|
||||||
"path": "/cron/keep-alive",
|
|
||||||
"schedule": "0 1 * * *"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
export { default } from '@konobangu/testing';
|
|
||||||
@@ -4,8 +4,5 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "npx --yes mintlify dev --port 5004",
|
"dev": "npx --yes mintlify dev --port 5004",
|
||||||
"lint": "npx --yes mintlify broken-links"
|
"lint": "npx --yes mintlify broken-links"
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"typescript": "^5.7.2"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,19 +6,16 @@
|
|||||||
"build": "email build",
|
"build": "email build",
|
||||||
"dev": "email dev --port 5003",
|
"dev": "email dev --port 5003",
|
||||||
"export": "email export",
|
"export": "email export",
|
||||||
"clean": "git clean -xdf .cache .turbo dist node_modules",
|
"clean": "git clean -xdf .cache dist node_modules",
|
||||||
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
|
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@react-email/components": "0.0.31",
|
"@react-email/components": "^0.0.42",
|
||||||
"@konobangu/email": "workspace:*",
|
|
||||||
"react": "^19.0.0",
|
"react": "^19.0.0",
|
||||||
"react-email": "3.0.4"
|
"react-email": "^4.0.16",
|
||||||
|
"@konobangu/email": "workspace:*"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@konobangu/typescript-config": "workspace:*",
|
"@types/react": "19.0.1"
|
||||||
"@types/node": "22.10.1",
|
|
||||||
"@types/react": "19.0.1",
|
|
||||||
"typescript": "^5.7.2"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,13 @@
|
|||||||
{
|
{
|
||||||
"extends": "@konobangu/typescript-config/nextjs.json",
|
"extends": "../../tsconfig.base.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"composite": true,
|
||||||
|
"jsx": "react-jsx",
|
||||||
|
"jsxImportSource": "react",
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleResolution": "bundler"
|
||||||
|
},
|
||||||
|
"references": [{ "path": "../../packages/email" }],
|
||||||
"include": ["**/*.ts", "**/*.tsx"],
|
"include": ["**/*.ts", "**/*.tsx"],
|
||||||
"exclude": ["node_modules"]
|
"exclude": ["node_modules"]
|
||||||
}
|
}
|
||||||
|
|||||||
11
apps/proxy/.whistle/rules/files/0.konobangu
Normal file
11
apps/proxy/.whistle/rules/files/0.konobangu
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
```x-forwarded.json
|
||||||
|
{
|
||||||
|
"X-Forwarded-Host": "konobangu.com",
|
||||||
|
"X-Forwarded-Proto": "https"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#^https://konobangu.com/api*** statusCode://500
|
||||||
|
^https://konobangu.com/api*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/api$1
|
||||||
|
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000/$1 excludeFilter://^https://konobangu.com/api***
|
||||||
|
^wss://konobangu.com/*** reqHeaders://{x-forwarded.json} ws://127.0.0.1:5000/$1 excludeFilter://^wss://konobangu.com/api
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
^https://konobangu.com/*** http://127.0.0.1:5000/$1 excludeFilter://^https://konobangu.com/api/***
|
|
||||||
^wss://konobangu.com/*** ws://127.0.0.1:5000/$1 ^excludeFilter://^wss://konobangu.com/api/***
|
|
||||||
1
apps/proxy/.whistle/rules/files/1.mikan-doppel
Normal file
1
apps/proxy/.whistle/rules/files/1.mikan-doppel
Normal file
@@ -0,0 +1 @@
|
|||||||
|
^https://mikanani.me/*** http://127.0.0.1:5005/$1 excludeFilter://^**/***.svg excludeFilter://^**/***.css excludeFilter://^**/***.js
|
||||||
8
apps/proxy/.whistle/rules/files/2.konobangu-prod
Normal file
8
apps/proxy/.whistle/rules/files/2.konobangu-prod
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
```x-forwarded.json
|
||||||
|
{
|
||||||
|
"X-Forwarded-Host": "konobangu.com",
|
||||||
|
"X-Forwarded-Proto": "https"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/$1
|
||||||
@@ -1 +0,0 @@
|
|||||||
^https://konobangu.com/api/*** http://127.0.0.1:5001/api/$1
|
|
||||||
@@ -1 +1 @@
|
|||||||
{"filesOrder":["webui","recorder"],"selectedList":["webui","recorder"],"disabledDefalutRules":true}
|
{"filesOrder":["konobangu","konobangu-prod","mikan-doppel"],"selectedList":["mikan-doppel","konobangu"],"disabledDefalutRules":true,"defalutRules":""}
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
{"filesOrder":[]}
|
||||||
|
|||||||
19
apps/proxy/Cargo.toml
Normal file
19
apps/proxy/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[package]
|
||||||
|
name = "proxy"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
[lib]
|
||||||
|
name = "proxy"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "mikan_doppel"
|
||||||
|
path = "src/bin/mikan_doppel.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
recorder = { workspace = true, features = ["playground"] }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
tracing-subscriber = { workspace = true }
|
||||||
|
tracing = { workspace = true }
|
||||||
@@ -3,13 +3,13 @@
|
|||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF\" whistle run -p 8899 -t 30000 -D . --no-global-plugins",
|
"whistle": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
|
||||||
"dev": "pnpm run start"
|
"mikan_doppel": "cargo run -p proxy --bin mikan_doppel",
|
||||||
|
"dev": "npm-run-all -p mikan_doppel whistle"
|
||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"whistle": "^2.9.61",
|
"whistle": "^2.9.99"
|
||||||
"cross-env": "^7.0.3"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
22
apps/proxy/src/bin/mikan_doppel.rs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use recorder::{errors::RecorderResult, test_utils::mikan::MikanMockServer};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
#[allow(unused_variables)]
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(Level::DEBUG)
|
||||||
|
.init();
|
||||||
|
|
||||||
|
let mut mikan_server = MikanMockServer::new_with_port(5005).await.unwrap();
|
||||||
|
|
||||||
|
let resources_mock = mikan_server.mock_resources_with_doppel();
|
||||||
|
|
||||||
|
let login_mock = mikan_server.mock_get_login_page();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
FROM mcr.microsoft.com/vscode/devcontainers/rust:0-1
|
|
||||||
|
|
||||||
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
|
||||||
&& apt-get -y install --no-install-recommends postgresql-client \
|
|
||||||
&& cargo install sea-orm-cli cargo-insta \
|
|
||||||
&& chown -R vscode /usr/local/cargo
|
|
||||||
|
|
||||||
COPY .env /.env
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "Konobangu Recorder",
|
|
||||||
"dockerComposeFile": "docker-compose.yml",
|
|
||||||
"service": "app",
|
|
||||||
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
|
|
||||||
"forwardPorts": [5001]
|
|
||||||
}
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
version: "3"
|
|
||||||
|
|
||||||
services:
|
|
||||||
app:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
command: sleep infinity
|
|
||||||
networks:
|
|
||||||
- db
|
|
||||||
- redis
|
|
||||||
volumes:
|
|
||||||
- ../..:/workspaces:cached
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
db:
|
|
||||||
image: postgres:15.3-alpine
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
networks:
|
|
||||||
- db
|
|
||||||
volumes:
|
|
||||||
- postgres-data:/var/lib/postgresql/data
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
redis:
|
|
||||||
image: redis:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- 6379:6379
|
|
||||||
networks:
|
|
||||||
- redis
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
postgres-data:
|
|
||||||
|
|
||||||
networks:
|
|
||||||
db:
|
|
||||||
redis:
|
|
||||||
18
apps/recorder/.env.development
Normal file
18
apps/recorder/.env.development
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
LOGGER__LEVEL = "debug"
|
||||||
|
|
||||||
|
DATABASE__URI = "postgres://konobangu:konobangu@localhost:5432/konobangu"
|
||||||
|
|
||||||
|
AUTH__AUTH_TYPE = "basic"
|
||||||
|
AUTH__BASIC_USER = "konobangu"
|
||||||
|
AUTH__BASIC_PASSWORD = "konobangu"
|
||||||
|
|
||||||
|
# AUTH__OIDC_ISSUER = "https://auth.logto.io/oidc"
|
||||||
|
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
|
||||||
|
# AUTH__OIDC_CLIENT_ID = "client_id"
|
||||||
|
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
|
||||||
|
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
||||||
|
# AUTH__OIDC_EXTRA_CLAIM_KEY = ""
|
||||||
|
# AUTH__OIDC_EXTRA_CLAIM_VALUE = ""
|
||||||
|
|
||||||
|
MIKAN__HTTP_CLIENT__PROXY__ACCEPT_INVALID_CERTS = true
|
||||||
|
MIKAN__HTTP_CLIENT__PROXY__SERVER = "http://127.0.0.1:8899"
|
||||||
15
apps/recorder/.env.production.example
Normal file
15
apps/recorder/.env.production.example
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
HOST="konobangu.com"
|
||||||
|
|
||||||
|
DATABASE__URI = "postgres://konobangu:konobangu@localhost:5432/konobangu"
|
||||||
|
|
||||||
|
AUTH__AUTH_TYPE = "basic" # or oidc
|
||||||
|
AUTH__BASIC_USER = "konobangu"
|
||||||
|
AUTH__BASIC_PASSWORD = "konobangu"
|
||||||
|
|
||||||
|
# AUTH__OIDC_ISSUER="https://auth.logto.io/oidc"
|
||||||
|
# AUTH__OIDC_AUDIENCE = "https://konobangu.com/api"
|
||||||
|
# AUTH__OIDC_CLIENT_ID = "client_id"
|
||||||
|
# AUTH__OIDC_CLIENT_SECRET = "client_secret" # optional
|
||||||
|
# AUTH__OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
|
||||||
|
# AUTH__OIDC_EXTRA_CLAIM_KEY = ""
|
||||||
|
# AUTH__OIDC_EXTRA_CLAIM_VALUE = ""
|
||||||
16
apps/recorder/.gitignore
vendored
16
apps/recorder/.gitignore
vendored
@@ -15,3 +15,19 @@ Cargo.lock
|
|||||||
|
|
||||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||||
*.pdb
|
*.pdb
|
||||||
|
|
||||||
|
|
||||||
|
# Local
|
||||||
|
.DS_Store
|
||||||
|
*.local
|
||||||
|
*.log*
|
||||||
|
|
||||||
|
# Dist
|
||||||
|
node_modules
|
||||||
|
dist/
|
||||||
|
temp/*
|
||||||
|
!temp/.gitkeep
|
||||||
|
tests/resources/mikan/classic_episodes/*/*
|
||||||
|
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet
|
||||||
|
webui/
|
||||||
|
data/
|
||||||
@@ -1,9 +1,22 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "recorder"
|
name = "recorder"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["jxl"]
|
||||||
|
playground = ["dep:inquire", "dep:color-eyre", "dep:polars", "test-utils"]
|
||||||
|
testcontainers = [
|
||||||
|
"dep:testcontainers",
|
||||||
|
"dep:testcontainers-modules",
|
||||||
|
"dep:testcontainers-ext",
|
||||||
|
"downloader/testcontainers",
|
||||||
|
"testcontainers-modules/postgres",
|
||||||
|
]
|
||||||
|
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
|
||||||
|
test-utils = []
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "recorder"
|
name = "recorder"
|
||||||
path = "src/lib.rs"
|
path = "src/lib.rs"
|
||||||
@@ -13,94 +26,156 @@ name = "recorder_cli"
|
|||||||
path = "src/bin/main.rs"
|
path = "src/bin/main.rs"
|
||||||
required-features = []
|
required-features = []
|
||||||
|
|
||||||
[features]
|
[[example]]
|
||||||
default = []
|
name = "mikan_collect_classic_eps"
|
||||||
testcontainers = [
|
path = "examples/mikan_collect_classic_eps.rs"
|
||||||
"dep:testcontainers",
|
required-features = ["playground"]
|
||||||
"dep:testcontainers-modules",
|
|
||||||
"dep:bollard",
|
[[example]]
|
||||||
]
|
name = "mikan_doppel_season_subscription"
|
||||||
|
path = "examples/mikan_doppel_season_subscription.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "mikan_doppel_subscriber_subscription"
|
||||||
|
path = "examples/mikan_doppel_subscriber_subscription.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "playground"
|
||||||
|
path = "examples/playground.rs"
|
||||||
|
required-features = ["playground"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
loco-rs = { version = "0.14" }
|
downloader = { workspace = true }
|
||||||
serde = { version = "1", features = ["derive"] }
|
util = { workspace = true }
|
||||||
serde_json = "1"
|
util-derive = { workspace = true }
|
||||||
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
|
fetch = { workspace = true }
|
||||||
async-trait = "0.1.83"
|
|
||||||
tracing = "0.1"
|
serde = { workspace = true }
|
||||||
chrono = "0.4"
|
tokio = { workspace = true }
|
||||||
sea-orm = { version = "1", features = [
|
serde_json = { workspace = true }
|
||||||
|
async-trait = { workspace = true }
|
||||||
|
testcontainers = { workspace = true, optional = true }
|
||||||
|
testcontainers-modules = { workspace = true, optional = true }
|
||||||
|
testcontainers-ext = { workspace = true, optional = true, features = [
|
||||||
|
"tracing",
|
||||||
|
] }
|
||||||
|
tracing = { workspace = true }
|
||||||
|
axum = { workspace = true }
|
||||||
|
axum-extra = { workspace = true }
|
||||||
|
snafu = { workspace = true }
|
||||||
|
itertools = { workspace = true }
|
||||||
|
url = { workspace = true }
|
||||||
|
regex = { workspace = true }
|
||||||
|
lazy_static = { workspace = true }
|
||||||
|
quirks_path = { workspace = true }
|
||||||
|
futures = { workspace = true }
|
||||||
|
bytes = { workspace = true }
|
||||||
|
serde_with = { workspace = true }
|
||||||
|
moka = { workspace = true }
|
||||||
|
chrono = { workspace = true }
|
||||||
|
tracing-subscriber = { workspace = true }
|
||||||
|
mockito = { workspace = true }
|
||||||
|
color-eyre = { workspace = true, optional = true }
|
||||||
|
inquire = { workspace = true, optional = true }
|
||||||
|
convert_case = { workspace = true }
|
||||||
|
image = { workspace = true }
|
||||||
|
uuid = { workspace = true }
|
||||||
|
maplit = { workspace = true }
|
||||||
|
once_cell = { workspace = true }
|
||||||
|
rand = { workspace = true }
|
||||||
|
rust_decimal = { workspace = true }
|
||||||
|
base64 = { workspace = true }
|
||||||
|
nom = { workspace = true }
|
||||||
|
percent-encoding = { workspace = true }
|
||||||
|
num-traits = { workspace = true }
|
||||||
|
http = { workspace = true }
|
||||||
|
async-stream = { workspace = true }
|
||||||
|
serde_variant = { workspace = true }
|
||||||
|
tracing-appender = { workspace = true }
|
||||||
|
clap = { workspace = true }
|
||||||
|
ipnetwork = { workspace = true }
|
||||||
|
typed-builder = { workspace = true }
|
||||||
|
webp = { workspace = true }
|
||||||
|
|
||||||
|
sea-orm = { version = "1.1", features = [
|
||||||
"sqlx-sqlite",
|
"sqlx-sqlite",
|
||||||
"sqlx-postgres",
|
"sqlx-postgres",
|
||||||
"runtime-tokio-rustls",
|
"runtime-tokio",
|
||||||
"macros",
|
"macros",
|
||||||
"debug-print",
|
"debug-print",
|
||||||
] }
|
] }
|
||||||
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
||||||
|
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
|
||||||
axum = "0.8"
|
rss = { version = "2", features = ["builders", "with-serde"] }
|
||||||
uuid = { version = "1.6.0", features = ["v4"] }
|
fancy-regex = "0.15"
|
||||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
|
lightningcss = "1.0.0-alpha.66"
|
||||||
sea-orm-migration = { version = "1", features = ["runtime-tokio-rustls"] }
|
|
||||||
reqwest = { version = "0.12", features = [
|
|
||||||
"charset",
|
|
||||||
"http2",
|
|
||||||
"json",
|
|
||||||
"macos-system-configuration",
|
|
||||||
"rustls-tls",
|
|
||||||
] }
|
|
||||||
thiserror = "2"
|
|
||||||
rss = "2"
|
|
||||||
bytes = "1.9"
|
|
||||||
itertools = "0.13.0"
|
|
||||||
url = "2.5"
|
|
||||||
fancy-regex = "0.14"
|
|
||||||
regex = "1.11"
|
|
||||||
lazy_static = "1.5"
|
|
||||||
maplit = "1.0.2"
|
|
||||||
lightningcss = "1.0.0-alpha.61"
|
|
||||||
html-escape = "0.2.13"
|
html-escape = "0.2.13"
|
||||||
opendal = { version = "0.51.0", features = ["default", "services-fs"] }
|
opendal = { version = "0.53", features = ["default", "services-fs"] }
|
||||||
zune-image = "0.4.15"
|
scraper = "0.23.1"
|
||||||
once_cell = "1.20.2"
|
async-graphql = { version = "7", features = ["dynamic-schema"] }
|
||||||
reqwest-middleware = "0.4.0"
|
async-graphql-axum = "7"
|
||||||
reqwest-retry = "0.7.0"
|
seaography = { version = "1.1", features = [
|
||||||
reqwest-tracing = "0.5.5"
|
"with-json",
|
||||||
scraper = "0.22.0"
|
"with-chrono",
|
||||||
leaky-bucket = "1.1.2"
|
"with-time",
|
||||||
serde_with = "3"
|
"with-uuid",
|
||||||
jwt-authorizer = "0.15.0"
|
"with-decimal",
|
||||||
futures = "0.3.31"
|
"with-bigdecimal",
|
||||||
librqbit-core = "4"
|
"with-postgres-array",
|
||||||
qbit-rs = { git = "https://github.com/lonelyhentxi/qbit.git", rev = "72d53138ebe", features = [
|
"with-json-as-scalar",
|
||||||
"default",
|
"with-custom-as-json",
|
||||||
"builder",
|
|
||||||
] }
|
] }
|
||||||
testcontainers = { version = "0.23.1", features = [
|
tower = { version = "0.5.2", features = ["util"] }
|
||||||
"default",
|
tower-http = { version = "0.6", features = [
|
||||||
"properties-config",
|
"trace",
|
||||||
"watchdog",
|
"catch-panic",
|
||||||
"http_wait",
|
"timeout",
|
||||||
"reusable-containers",
|
"add-extension",
|
||||||
|
"cors",
|
||||||
|
"fs",
|
||||||
|
"set-header",
|
||||||
|
"compression-full",
|
||||||
|
] }
|
||||||
|
tera = "1.20.0"
|
||||||
|
openidconnect = { version = "4" }
|
||||||
|
dotenvy = "0.15.7"
|
||||||
|
jpegxl-rs = { version = "0.11.2", optional = true }
|
||||||
|
jpegxl-sys = { version = "0.11.2", optional = true }
|
||||||
|
|
||||||
|
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
|
||||||
|
apalis-sql = { version = "0.7", features = ["postgres"] }
|
||||||
|
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
|
||||||
|
reqwest_cookie_store = "0.8.0"
|
||||||
|
jwtk = "0.4.0"
|
||||||
|
mime_guess = "2.0.5"
|
||||||
|
icu_properties = "2.0.1"
|
||||||
|
icu = "2.0.0"
|
||||||
|
tracing-tree = "0.4.0"
|
||||||
|
num_cpus = "1.17.0"
|
||||||
|
headers-accept = "0.1.4"
|
||||||
|
polars = { version = "0.49.1", features = [
|
||||||
|
"parquet",
|
||||||
|
"lazy",
|
||||||
|
"diagonal_concat",
|
||||||
], optional = true }
|
], optional = true }
|
||||||
testcontainers-modules = { version = "0.11.4", optional = true }
|
quick-xml = { version = "0.38", features = [
|
||||||
|
"serialize",
|
||||||
color-eyre = "0.6"
|
"serde-types",
|
||||||
|
"serde",
|
||||||
|
] }
|
||||||
log = "0.4.22"
|
croner = "2.2.0"
|
||||||
anyhow = "1.0.95"
|
ts-rs = "11.0.1"
|
||||||
bollard = { version = "0.18", optional = true }
|
secrecy = { version = "0.10.3", features = ["serde"] }
|
||||||
async-graphql = { version = "7.0.13", features = [] }
|
paste = "1.0.15"
|
||||||
async-graphql-axum = "7.0.13"
|
chrono-tz = "0.10.3"
|
||||||
fastrand = "2.3.0"
|
|
||||||
seaography = "1.1.2"
|
|
||||||
quirks_path = "0.1.0"
|
|
||||||
base64 = "0.22.1"
|
|
||||||
tower = "0.5.2"
|
|
||||||
axum-extra = "0.10.0"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
inquire = { workspace = true }
|
||||||
|
color-eyre = { workspace = true }
|
||||||
serial_test = "3"
|
serial_test = "3"
|
||||||
loco-rs = { version = "0.14", features = ["testing"] }
|
insta = { version = "1", features = ["redactions", "toml", "filters"] }
|
||||||
insta = { version = "1", features = ["redactions", "yaml", "filters"] }
|
ctor = "0.4.0"
|
||||||
|
tracing-test = "0.2.5"
|
||||||
|
rstest = "0.25"
|
||||||
|
|||||||
6
apps/recorder/bindings/SubscriberTaskInput.ts
Normal file
6
apps/recorder/bindings/SubscriberTaskInput.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
import type { SyncOneSubscriptionFeedsFullTaskInput } from "./SyncOneSubscriptionFeedsFullTaskInput";
|
||||||
|
import type { SyncOneSubscriptionFeedsIncrementalTaskInput } from "./SyncOneSubscriptionFeedsIncrementalTaskInput";
|
||||||
|
import type { SyncOneSubscriptionSourcesTaskInput } from "./SyncOneSubscriptionSourcesTaskInput";
|
||||||
|
|
||||||
|
export type SubscriberTaskInput = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTaskInput | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTaskInput | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTaskInput;
|
||||||
6
apps/recorder/bindings/SubscriberTaskType.ts
Normal file
6
apps/recorder/bindings/SubscriberTaskType.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
import type { SyncOneSubscriptionFeedsFullTask } from "./SyncOneSubscriptionFeedsFullTask";
|
||||||
|
import type { SyncOneSubscriptionFeedsIncrementalTask } from "./SyncOneSubscriptionFeedsIncrementalTask";
|
||||||
|
import type { SyncOneSubscriptionSourcesTask } from "./SyncOneSubscriptionSourcesTask";
|
||||||
|
|
||||||
|
export type SubscriberTaskType = { "taskType": "sync_one_subscription_feeds_incremental" } & SyncOneSubscriptionFeedsIncrementalTask | { "taskType": "sync_one_subscription_feeds_full" } & SyncOneSubscriptionFeedsFullTask | { "taskType": "sync_one_subscription_sources" } & SyncOneSubscriptionSourcesTask;
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionFeedsFullTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionFeedsFullTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionFeedsIncrementalTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionFeedsIncrementalTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };
|
||||||
3
apps/recorder/bindings/SyncOneSubscriptionSourcesTask.ts
Normal file
3
apps/recorder/bindings/SyncOneSubscriptionSourcesTask.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionSourcesTask = { subscriptionId: number, subscriberId: number, cronId?: number | null, };
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||||
|
|
||||||
|
export type SyncOneSubscriptionSourcesTaskInput = { subscriptionId: number, subscriberId?: number | null, cronId?: number | null, };
|
||||||
@@ -1,138 +0,0 @@
|
|||||||
# Loco configuration file documentation
|
|
||||||
|
|
||||||
# Application logging configuration
|
|
||||||
logger:
|
|
||||||
# Enable or disable logging.
|
|
||||||
enable: true
|
|
||||||
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
|
|
||||||
pretty_backtrace: true
|
|
||||||
# Log level, options: trace, debug, info, warn or error.
|
|
||||||
level: debug
|
|
||||||
# Define the logging format. options: compact, pretty or Json
|
|
||||||
format: compact
|
|
||||||
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
|
|
||||||
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
|
|
||||||
# override_filter: trace
|
|
||||||
|
|
||||||
# Web server configuration
|
|
||||||
server:
|
|
||||||
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
|
|
||||||
port: 5001
|
|
||||||
# The UI hostname or IP address that mailers will point to.
|
|
||||||
host: http://webui.konobangu.com
|
|
||||||
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
|
|
||||||
middlewares:
|
|
||||||
# Enable Etag cache header middleware
|
|
||||||
etag:
|
|
||||||
enable: true
|
|
||||||
# Allows to limit the payload size request. payload that bigger than this file will blocked the request.
|
|
||||||
limit_payload:
|
|
||||||
# Enable/Disable the middleware.
|
|
||||||
enable: true
|
|
||||||
# the limit size. can be b,kb,kib,mb,mib,gb,gib
|
|
||||||
body_limit: 5mb
|
|
||||||
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
|
|
||||||
logger:
|
|
||||||
# Enable/Disable the middleware.
|
|
||||||
enable: true
|
|
||||||
# when your code is panicked, the request still returns 500 status code.
|
|
||||||
catch_panic:
|
|
||||||
# Enable/Disable the middleware.
|
|
||||||
enable: true
|
|
||||||
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
|
|
||||||
timeout_request:
|
|
||||||
# Enable/Disable the middleware.
|
|
||||||
enable: false
|
|
||||||
# Duration time in milliseconds.
|
|
||||||
timeout: 5000
|
|
||||||
cors:
|
|
||||||
enable: true
|
|
||||||
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
|
|
||||||
# allow_origins:
|
|
||||||
# - https://loco.rs
|
|
||||||
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
|
|
||||||
# allow_headers:
|
|
||||||
# - Content-Type
|
|
||||||
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
|
|
||||||
# allow_methods:
|
|
||||||
# - POST
|
|
||||||
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
|
|
||||||
# max_age: 3600
|
|
||||||
fallback:
|
|
||||||
enable: false
|
|
||||||
|
|
||||||
# Worker Configuration
|
|
||||||
workers:
|
|
||||||
# specifies the worker mode. Options:
|
|
||||||
# - BackgroundQueue - Workers operate asynchronously in the background, processing queued.
|
|
||||||
# - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed.
|
|
||||||
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
|
|
||||||
mode: BackgroundQueue
|
|
||||||
|
|
||||||
# Mailer Configuration.
|
|
||||||
mailer:
|
|
||||||
# SMTP mailer configuration.
|
|
||||||
smtp:
|
|
||||||
# Enable/Disable smtp mailer.
|
|
||||||
enable: true
|
|
||||||
# SMTP server host. e.x localhost, smtp.gmail.com
|
|
||||||
host: '{{ get_env(name="MAILER_HOST", default="localhost") }}'
|
|
||||||
# SMTP server port
|
|
||||||
port: 1025
|
|
||||||
# Use secure connection (SSL/TLS).
|
|
||||||
secure: false
|
|
||||||
# auth:
|
|
||||||
# user:
|
|
||||||
# password:
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
database:
|
|
||||||
# Database connection URI
|
|
||||||
uri: '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu") }}'
|
|
||||||
# When enabled, the sql query will be logged.
|
|
||||||
enable_logging: true
|
|
||||||
# Set the timeout duration when acquiring a connection.
|
|
||||||
connect_timeout: 500
|
|
||||||
# Set the idle duration before closing a connection.
|
|
||||||
idle_timeout: 500
|
|
||||||
# Minimum number of connections for a pool.
|
|
||||||
min_connections: 1
|
|
||||||
# Maximum number of connections for a pool.
|
|
||||||
max_connections: 1
|
|
||||||
# Run migration up when application loaded
|
|
||||||
auto_migrate: true
|
|
||||||
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
|
|
||||||
dangerously_truncate: false
|
|
||||||
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
|
|
||||||
dangerously_recreate: false
|
|
||||||
|
|
||||||
# Redis Configuration
|
|
||||||
redis:
|
|
||||||
# Redis connection URI
|
|
||||||
uri: '{{ get_env(name="REDIS_URL", default="redis://127.0.0.1:6379") }}'
|
|
||||||
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
|
|
||||||
dangerously_flush: false
|
|
||||||
|
|
||||||
settings:
|
|
||||||
|
|
||||||
dal:
|
|
||||||
data_dir: ./data
|
|
||||||
|
|
||||||
mikan:
|
|
||||||
base_url: "https://mikanani.me/"
|
|
||||||
http_client:
|
|
||||||
exponential_backoff_max_retries: 3
|
|
||||||
leaky_bucket_max_tokens: 2
|
|
||||||
leaky_bucket_initial_tokens: 0
|
|
||||||
leaky_bucket_refill_tokens: 1
|
|
||||||
leaky_bucket_refill_interval: 500
|
|
||||||
|
|
||||||
auth:
|
|
||||||
auth_type: "oidc" # or "basic"
|
|
||||||
basic_user: "konobangu"
|
|
||||||
basic_password: "konobangu"
|
|
||||||
oidc_api_issuer: "https://some-oidc-auth.com/oidc"
|
|
||||||
oidc_api_audience: "https://konobangu.com/api"
|
|
||||||
oidc_extra_scopes: "read:konobangu,write:konobangu"
|
|
||||||
oidc_extra_claim_key: ""
|
|
||||||
oidc_extra_claim_value: ""
|
|
||||||
@@ -1,125 +0,0 @@
|
|||||||
# Loco configuration file documentation
|
|
||||||
|
|
||||||
# Application logging configuration
|
|
||||||
logger:
|
|
||||||
# Enable or disable logging.
|
|
||||||
enable: true
|
|
||||||
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
|
|
||||||
pretty_backtrace: true
|
|
||||||
# Log level, options: trace, debug, info, warn or error.
|
|
||||||
level: debug
|
|
||||||
# Define the logging format. options: compact, pretty or Json
|
|
||||||
format: compact
|
|
||||||
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
|
|
||||||
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
|
|
||||||
# override_filter: trace
|
|
||||||
|
|
||||||
# Web server configuration
|
|
||||||
server:
|
|
||||||
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
|
|
||||||
port: 5001
|
|
||||||
# The UI hostname or IP address that mailers will point to.
|
|
||||||
host: http://webui.konobangu.com
|
|
||||||
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
|
|
||||||
middlewares:
|
|
||||||
# Enable Etag cache header middleware
|
|
||||||
etag:
|
|
||||||
enable: true
|
|
||||||
# Allows to limit the payload size request. payload that bigger than this file will blocked the request.
|
|
||||||
limit_payload:
|
|
||||||
# Enable/Disable the middleware.
|
|
||||||
enable: true
|
|
||||||
# the limit size. can be b,kb,kib,mb,mib,gb,gib
|
|
||||||
body_limit: 5mb
|
|
||||||
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
|
|
||||||
logger:
|
|
||||||
# Enable/Disable the middleware.
|
|
||||||
enable: true
|
|
||||||
# when your code is panicked, the request still returns 500 status code.
|
|
||||||
catch_panic:
|
|
||||||
# Enable/Disable the middleware.
|
|
||||||
enable: true
|
|
||||||
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
|
|
||||||
timeout_request:
|
|
||||||
# Enable/Disable the middleware.
|
|
||||||
enable: false
|
|
||||||
# Duration time in milliseconds.
|
|
||||||
timeout: 5000
|
|
||||||
cors:
|
|
||||||
enable: true
|
|
||||||
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
|
|
||||||
# allow_origins:
|
|
||||||
# - https://loco.rs
|
|
||||||
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
|
|
||||||
# allow_headers:
|
|
||||||
# - Content-Type
|
|
||||||
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
|
|
||||||
# allow_methods:
|
|
||||||
# - POST
|
|
||||||
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
|
|
||||||
# max_age: 3600
|
|
||||||
|
|
||||||
# Worker Configuration
|
|
||||||
workers:
|
|
||||||
# specifies the worker mode. Options:
|
|
||||||
# - BackgroundQueue - Workers operate asynchronously in the background, processing queued.
|
|
||||||
# - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed.
|
|
||||||
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
|
|
||||||
mode: BackgroundQueue
|
|
||||||
|
|
||||||
# Mailer Configuration.
|
|
||||||
mailer:
|
|
||||||
# SMTP mailer configuration.
|
|
||||||
smtp:
|
|
||||||
# Enable/Disable smtp mailer.
|
|
||||||
enable: true
|
|
||||||
# SMTP server host. e.x localhost, smtp.gmail.com
|
|
||||||
host: '{{ get_env(name="MAILER_HOST", default="localhost") }}'
|
|
||||||
# SMTP server port
|
|
||||||
port: 1025
|
|
||||||
# Use secure connection (SSL/TLS).
|
|
||||||
secure: false
|
|
||||||
# auth:
|
|
||||||
# user:
|
|
||||||
# password:
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
database:
|
|
||||||
# Database connection URI
|
|
||||||
uri: '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu") }}'
|
|
||||||
# When enabled, the sql query will be logged.
|
|
||||||
enable_logging: true
|
|
||||||
# Set the timeout duration when acquiring a connection.
|
|
||||||
connect_timeout: 500
|
|
||||||
# Set the idle duration before closing a connection.
|
|
||||||
idle_timeout: 500
|
|
||||||
# Minimum number of connections for a pool.
|
|
||||||
min_connections: 1
|
|
||||||
# Maximum number of connections for a pool.
|
|
||||||
max_connections: 1
|
|
||||||
# Run migration up when application loaded
|
|
||||||
auto_migrate: true
|
|
||||||
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
|
|
||||||
dangerously_truncate: false
|
|
||||||
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
|
|
||||||
dangerously_recreate: false
|
|
||||||
|
|
||||||
# Redis Configuration
|
|
||||||
redis:
|
|
||||||
# Redis connection URI
|
|
||||||
uri: '{{ get_env(name="REDIS_URL", default="redis://127.0.0.1:6379") }}'
|
|
||||||
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
|
|
||||||
dangerously_flush: false
|
|
||||||
|
|
||||||
settings:
|
|
||||||
dal:
|
|
||||||
data_dir: ./temp
|
|
||||||
mikan:
|
|
||||||
http_client:
|
|
||||||
exponential_backoff_max_retries: 3
|
|
||||||
leaky_bucket_max_tokens: 2
|
|
||||||
leaky_bucket_initial_tokens: 0
|
|
||||||
leaky_bucket_refill_tokens: 1
|
|
||||||
leaky_bucket_refill_interval: 500
|
|
||||||
user_agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0"
|
|
||||||
base_url: "https://mikanani.me/"
|
|
||||||
584
apps/recorder/examples/mikan_collect_classic_eps.rs
Normal file
584
apps/recorder/examples/mikan_collect_classic_eps.rs
Normal file
@@ -0,0 +1,584 @@
|
|||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use chrono::{DateTime, Duration, FixedOffset, NaiveDate, NaiveTime, TimeZone, Utc};
|
||||||
|
use fetch::{HttpClientConfig, fetch_html};
|
||||||
|
use itertools::Itertools;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use nom::{
|
||||||
|
IResult, Parser,
|
||||||
|
branch::alt,
|
||||||
|
bytes::complete::{tag, take, take_till1},
|
||||||
|
character::complete::space1,
|
||||||
|
combinator::map,
|
||||||
|
};
|
||||||
|
use recorder::{
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
extract::{
|
||||||
|
html::extract_inner_text_from_element_ref,
|
||||||
|
mikan::{MikanClient, MikanConfig, MikanEpisodeHash, MikanFansubHash},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use regex::Regex;
|
||||||
|
use scraper::{ElementRef, Html, Selector};
|
||||||
|
use snafu::FromString;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TEST_FOLDER: std::path::PathBuf =
|
||||||
|
if cfg!(any(test, debug_assertions, feature = "playground")) {
|
||||||
|
std::path::PathBuf::from(format!(
|
||||||
|
"{}/tests/resources/mikan/classic_episodes",
|
||||||
|
env!("CARGO_MANIFEST_DIR")
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
std::path::PathBuf::from("tests/resources/mikan/classic_episodes")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TOTAL_PAGE_REGEX: Regex =
|
||||||
|
Regex::new(r#"\$\(\'\.classic-view-pagination2\'\)\.bootpag\(\{\s*total:\s*(\d+)"#)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MikanClassicEpisodeTableRow {
|
||||||
|
pub id: i32,
|
||||||
|
pub publish_at: DateTime<Utc>,
|
||||||
|
pub mikan_fansub_id: Option<String>,
|
||||||
|
pub fansub_name: Option<String>,
|
||||||
|
pub mikan_episode_id: String,
|
||||||
|
pub original_name: String,
|
||||||
|
pub magnet_link: Option<String>,
|
||||||
|
pub file_size: Option<String>,
|
||||||
|
pub torrent_link: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanClassicEpisodeTableRow {
|
||||||
|
fn timezone() -> FixedOffset {
|
||||||
|
FixedOffset::east_opt(8 * 3600).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fixed_date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
alt((
|
||||||
|
map(tag("今天"), move |_| {
|
||||||
|
Utc::now().with_timezone(&Self::timezone()).date_naive()
|
||||||
|
}),
|
||||||
|
map(tag("昨天"), move |_| {
|
||||||
|
Utc::now().with_timezone(&Self::timezone()).date_naive() - Duration::days(1)
|
||||||
|
}),
|
||||||
|
))
|
||||||
|
.parse(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn formatted_date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
let (remain, date_str) = take_till1(|c: char| c.is_whitespace()).parse(input)?;
|
||||||
|
let date = NaiveDate::parse_from_str(date_str, "%Y/%m/%d").map_err(|_| {
|
||||||
|
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
|
||||||
|
})?;
|
||||||
|
Ok((remain, date))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn date_parser(input: &str) -> IResult<&str, NaiveDate> {
|
||||||
|
alt((Self::fixed_date_parser, Self::formatted_date_parser)).parse(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn time_parser(input: &str) -> IResult<&str, NaiveTime> {
|
||||||
|
let (remain, time_str) = take(5usize).parse(input)?;
|
||||||
|
let time = NaiveTime::parse_from_str(time_str, "%H:%M").map_err(|_| {
|
||||||
|
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
|
||||||
|
})?;
|
||||||
|
Ok((remain, time))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_publish_at(text: &str) -> Option<DateTime<Utc>> {
|
||||||
|
let (_, (date, _, time)) = (Self::date_parser, space1, Self::time_parser)
|
||||||
|
.parse(text)
|
||||||
|
.ok()?;
|
||||||
|
let local_dt = Self::timezone()
|
||||||
|
.from_local_datetime(&date.and_time(time))
|
||||||
|
.single()?;
|
||||||
|
Some(local_dt.with_timezone(&Utc))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_element_ref(
|
||||||
|
row: ElementRef<'_>,
|
||||||
|
rev_id: i32,
|
||||||
|
idx: i32,
|
||||||
|
mikan_base_url: &Url,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let publish_at_selector = &Selector::parse("td:nth-of-type(1)").unwrap();
|
||||||
|
let fansub_selector = &Selector::parse("td:nth-of-type(2) > a").unwrap();
|
||||||
|
let original_name_selector =
|
||||||
|
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(1)").unwrap();
|
||||||
|
let magnet_link_selector =
|
||||||
|
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(2)").unwrap();
|
||||||
|
let file_size_selector = &Selector::parse("td:nth-of-type(4)").unwrap();
|
||||||
|
let torrent_link_selector = &Selector::parse("td:nth-of-type(5) > a").unwrap();
|
||||||
|
|
||||||
|
let publish_at = row
|
||||||
|
.select(publish_at_selector)
|
||||||
|
.next()
|
||||||
|
.map(extract_inner_text_from_element_ref)
|
||||||
|
.and_then(|e| Self::extract_publish_at(&e));
|
||||||
|
|
||||||
|
let (mikan_fansub_hash, fansub_name) = row
|
||||||
|
.select(fansub_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|e| {
|
||||||
|
e.attr("href")
|
||||||
|
.and_then(|s| mikan_base_url.join(s).ok())
|
||||||
|
.and_then(|u| MikanFansubHash::from_homepage_url(&u))
|
||||||
|
.map(|h| (h, extract_inner_text_from_element_ref(e)))
|
||||||
|
})
|
||||||
|
.unzip();
|
||||||
|
|
||||||
|
let (mikan_episode_hash, original_name) = row
|
||||||
|
.select(original_name_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| {
|
||||||
|
el.attr("href")
|
||||||
|
.and_then(|s| mikan_base_url.join(s).ok())
|
||||||
|
.and_then(|u| MikanEpisodeHash::from_homepage_url(&u))
|
||||||
|
.map(|h| (h, extract_inner_text_from_element_ref(el)))
|
||||||
|
})
|
||||||
|
.unzip();
|
||||||
|
|
||||||
|
let magnet_link = row
|
||||||
|
.select(magnet_link_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| el.attr("data-clipboard-text"));
|
||||||
|
|
||||||
|
let file_size = row
|
||||||
|
.select(file_size_selector)
|
||||||
|
.next()
|
||||||
|
.map(extract_inner_text_from_element_ref);
|
||||||
|
|
||||||
|
let torrent_link = row
|
||||||
|
.select(torrent_link_selector)
|
||||||
|
.next()
|
||||||
|
.and_then(|el| el.attr("href"));
|
||||||
|
|
||||||
|
if let (Some(mikan_episode_hash), Some(original_name), Some(publish_at)) = (
|
||||||
|
mikan_episode_hash.as_ref(),
|
||||||
|
original_name.as_ref(),
|
||||||
|
publish_at.as_ref(),
|
||||||
|
) {
|
||||||
|
Ok(Self {
|
||||||
|
id: rev_id * 1000 + idx,
|
||||||
|
publish_at: *publish_at,
|
||||||
|
mikan_fansub_id: mikan_fansub_hash.map(|h| h.mikan_fansub_id.clone()),
|
||||||
|
fansub_name,
|
||||||
|
mikan_episode_id: mikan_episode_hash.mikan_episode_id.clone(),
|
||||||
|
original_name: original_name.clone(),
|
||||||
|
magnet_link: magnet_link.map(|s| s.to_string()),
|
||||||
|
file_size: file_size.map(|s| s.to_string()),
|
||||||
|
torrent_link: torrent_link.map(|s| s.to_string()),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
let mut missing_fields = vec![];
|
||||||
|
if mikan_episode_hash.is_none() {
|
||||||
|
missing_fields.push("mikan_episode_id");
|
||||||
|
}
|
||||||
|
if original_name.is_none() {
|
||||||
|
missing_fields.push("original_name");
|
||||||
|
}
|
||||||
|
if publish_at.is_none() {
|
||||||
|
missing_fields.push("publish_at");
|
||||||
|
}
|
||||||
|
Err(RecorderError::without_source(format!(
|
||||||
|
"Failed to parse episode table row, missing fields: {missing_fields:?}, row \
|
||||||
|
index: {idx}"
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MikanClassicEpisodeTablePage {
|
||||||
|
pub page: i32,
|
||||||
|
pub total: i32,
|
||||||
|
pub html: String,
|
||||||
|
pub rows: Vec<MikanClassicEpisodeTableRow>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MikanClassicEpisodeTablePage {
|
||||||
|
pub fn from_html(
|
||||||
|
html: String,
|
||||||
|
mikan_base_url: &Url,
|
||||||
|
page: i32,
|
||||||
|
updated_info: Option<(i32, i32)>,
|
||||||
|
) -> RecorderResult<Self> {
|
||||||
|
let tr_selector = &Selector::parse("tbody tr").unwrap();
|
||||||
|
let doc = Html::parse_document(&html);
|
||||||
|
if let Some(mut total) = TOTAL_PAGE_REGEX
|
||||||
|
.captures(&html)
|
||||||
|
.and_then(|c| c.get(1))
|
||||||
|
.and_then(|s| s.as_str().parse::<i32>().ok())
|
||||||
|
{
|
||||||
|
if let Some((_, update_total)) = updated_info {
|
||||||
|
total = update_total;
|
||||||
|
}
|
||||||
|
|
||||||
|
let rev_id = total - page;
|
||||||
|
let rows = doc
|
||||||
|
.select(tr_selector)
|
||||||
|
.rev()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(idx, tr)| {
|
||||||
|
MikanClassicEpisodeTableRow::from_element_ref(
|
||||||
|
tr,
|
||||||
|
rev_id,
|
||||||
|
idx as i32,
|
||||||
|
mikan_base_url,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<RecorderResult<Vec<_>>>()?;
|
||||||
|
Ok(Self {
|
||||||
|
page,
|
||||||
|
total,
|
||||||
|
html,
|
||||||
|
rows,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(RecorderError::without_source(
|
||||||
|
"Failed to parse pagination meta and rows".into(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save_to_files(&self) -> RecorderResult<()> {
|
||||||
|
use polars::prelude::*;
|
||||||
|
|
||||||
|
let rev_id = self.total - self.page;
|
||||||
|
let parquet_path = TEST_FOLDER.join(format!("parquet/rev_{rev_id}.parquet"));
|
||||||
|
let csv_path = TEST_FOLDER.join(format!("csv/rev_{rev_id}.csv"));
|
||||||
|
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
|
||||||
|
|
||||||
|
std::fs::write(html_path, self.html.clone())?;
|
||||||
|
|
||||||
|
let mut id_vec = Vec::new();
|
||||||
|
let mut publish_at_vec = Vec::new();
|
||||||
|
let mut mikan_fansub_id_vec = Vec::new();
|
||||||
|
let mut fansub_name_vec = Vec::new();
|
||||||
|
let mut mikan_episode_id_vec = Vec::new();
|
||||||
|
let mut original_name_vec = Vec::new();
|
||||||
|
let mut magnet_link_vec = Vec::new();
|
||||||
|
let mut file_size_vec = Vec::new();
|
||||||
|
let mut torrent_link_vec = Vec::new();
|
||||||
|
|
||||||
|
for row in &self.rows {
|
||||||
|
id_vec.push(row.id);
|
||||||
|
publish_at_vec.push(row.publish_at.to_rfc3339());
|
||||||
|
mikan_fansub_id_vec.push(row.mikan_fansub_id.clone());
|
||||||
|
fansub_name_vec.push(row.fansub_name.clone());
|
||||||
|
mikan_episode_id_vec.push(row.mikan_episode_id.clone());
|
||||||
|
original_name_vec.push(row.original_name.clone());
|
||||||
|
magnet_link_vec.push(row.magnet_link.clone());
|
||||||
|
file_size_vec.push(row.file_size.clone());
|
||||||
|
torrent_link_vec.push(row.torrent_link.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let df = df! [
|
||||||
|
"id" => id_vec,
|
||||||
|
"publish_at_timestamp" => publish_at_vec,
|
||||||
|
"mikan_fansub_id" => mikan_fansub_id_vec,
|
||||||
|
"fansub_name" => fansub_name_vec,
|
||||||
|
"mikan_episode_id" => mikan_episode_id_vec,
|
||||||
|
"original_name" => original_name_vec,
|
||||||
|
"magnet_link" => magnet_link_vec,
|
||||||
|
"file_size" => file_size_vec,
|
||||||
|
"torrent_link" => torrent_link_vec,
|
||||||
|
]
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to create DataFrame: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut parquet_file = std::fs::File::create(&parquet_path)?;
|
||||||
|
|
||||||
|
ParquetWriter::new(&mut parquet_file)
|
||||||
|
.finish(&mut df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write parquet file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut csv_file = std::fs::File::create(&csv_path)?;
|
||||||
|
|
||||||
|
CsvWriter::new(&mut csv_file)
|
||||||
|
.include_header(true)
|
||||||
|
.with_quote_style(QuoteStyle::Always)
|
||||||
|
.finish(&mut df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write csv file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"[{}/{}] Saved {} rows to rev_{}.{{parquet,html,csv}}",
|
||||||
|
self.page,
|
||||||
|
self.total,
|
||||||
|
self.rows.len(),
|
||||||
|
rev_id
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn waiting_rev_ids(total: i32) -> RecorderResult<Vec<i32>> {
|
||||||
|
let dir = TEST_FOLDER.join("csv");
|
||||||
|
|
||||||
|
let files = std::fs::read_dir(dir)?;
|
||||||
|
|
||||||
|
let rev_ids = files
|
||||||
|
.filter_map(|f| f.ok())
|
||||||
|
.filter_map(|f| {
|
||||||
|
f.path().file_stem().and_then(|s| {
|
||||||
|
s.to_str().and_then(|s| {
|
||||||
|
if s.starts_with("rev_") {
|
||||||
|
s.replace("rev_", "").parse::<i32>().ok()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<HashSet<_>>();
|
||||||
|
|
||||||
|
Ok((0..total)
|
||||||
|
.filter(|rev_id| !rev_ids.contains(rev_id))
|
||||||
|
.collect::<Vec<_>>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn scrape_mikan_classic_episode_table_page(
|
||||||
|
mikan_client: &MikanClient,
|
||||||
|
page: i32,
|
||||||
|
updated_info: Option<(i32, i32)>,
|
||||||
|
) -> RecorderResult<MikanClassicEpisodeTablePage> {
|
||||||
|
let mikan_base_url = mikan_client.base_url();
|
||||||
|
let url = mikan_base_url.join(&format!("/Home/Classic/{page}"))?;
|
||||||
|
|
||||||
|
if let Some((rev_id, update_total)) = updated_info.as_ref() {
|
||||||
|
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
|
||||||
|
if html_path.exists() {
|
||||||
|
let html = std::fs::read_to_string(&html_path)?;
|
||||||
|
println!("[{page}/{update_total}] html exists, skipping fetch");
|
||||||
|
return MikanClassicEpisodeTablePage::from_html(
|
||||||
|
html,
|
||||||
|
mikan_base_url,
|
||||||
|
page,
|
||||||
|
updated_info,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let total = if let Some((_, update_total)) = updated_info.as_ref() {
|
||||||
|
update_total.to_string()
|
||||||
|
} else {
|
||||||
|
"Unknown".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
println!("[{page}/{total}] fetching html...");
|
||||||
|
|
||||||
|
let html = fetch_html(mikan_client, url).await?;
|
||||||
|
|
||||||
|
println!("[{page}/{total}] fetched html done");
|
||||||
|
|
||||||
|
std::fs::write(TEST_FOLDER.join("html/temp.html"), html.clone())?;
|
||||||
|
|
||||||
|
MikanClassicEpisodeTablePage::from_html(html, mikan_base_url, page, updated_info)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn scrape_mikan_classic_episode_table_page_from_rev_id(
|
||||||
|
mikan_client: &MikanClient,
|
||||||
|
total: i32,
|
||||||
|
rev_idx: i32,
|
||||||
|
) -> RecorderResult<MikanClassicEpisodeTablePage> {
|
||||||
|
let page = total - rev_idx;
|
||||||
|
|
||||||
|
scrape_mikan_classic_episode_table_page(mikan_client, page, Some((rev_idx, total))).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn merge_mikan_classic_episodes_and_strip_columns() -> RecorderResult<()> {
|
||||||
|
use polars::prelude::*;
|
||||||
|
|
||||||
|
let dir = TEST_FOLDER.join("parquet");
|
||||||
|
let files = std::fs::read_dir(dir)?;
|
||||||
|
|
||||||
|
let parquet_paths = files
|
||||||
|
.filter_map(|f| f.ok())
|
||||||
|
.filter_map(|f| {
|
||||||
|
let path = f.path();
|
||||||
|
if let Some(ext) = path.extension()
|
||||||
|
&& ext == "parquet"
|
||||||
|
&& path
|
||||||
|
.file_stem()
|
||||||
|
.is_some_and(|f| f.to_string_lossy().starts_with("rev_"))
|
||||||
|
{
|
||||||
|
Some(path)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if parquet_paths.is_empty() {
|
||||||
|
return Err(RecorderError::without_source(
|
||||||
|
"No parquet files found to merge".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Found {} parquet files to merge", parquet_paths.len());
|
||||||
|
|
||||||
|
// 读取并合并所有 parquet 文件
|
||||||
|
let mut all_dfs = Vec::new();
|
||||||
|
for path in &parquet_paths {
|
||||||
|
println!("Reading {path:?}");
|
||||||
|
let file = std::fs::File::open(path)?;
|
||||||
|
let df = ParquetReader::new(file).finish().map_err(|e| {
|
||||||
|
let message = format!("Failed to read parquet file {path:?}: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
all_dfs.push(df);
|
||||||
|
}
|
||||||
|
|
||||||
|
let lazy_frames: Vec<LazyFrame> = all_dfs.into_iter().map(|df| df.lazy()).collect();
|
||||||
|
|
||||||
|
let merged_df = concat_lf_diagonal(&lazy_frames, UnionArgs::default())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to concat DataFrames: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?
|
||||||
|
.sort(
|
||||||
|
["publish_at_timestamp"],
|
||||||
|
SortMultipleOptions::default().with_order_descending(true),
|
||||||
|
)
|
||||||
|
.unique(
|
||||||
|
Some(vec![
|
||||||
|
"mikan_fansub_id".to_string(),
|
||||||
|
"mikan_episode_id".to_string(),
|
||||||
|
]),
|
||||||
|
UniqueKeepStrategy::First,
|
||||||
|
)
|
||||||
|
.collect()
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to collect lazy DataFrame: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
fn select_columns_and_write(
|
||||||
|
merged_df: DataFrame,
|
||||||
|
name: &str,
|
||||||
|
columns: &[&str],
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let result_df = merged_df
|
||||||
|
.lazy()
|
||||||
|
.sort(["publish_at_timestamp"], SortMultipleOptions::default())
|
||||||
|
.select(columns.iter().map(|c| col(*c)).collect_vec())
|
||||||
|
.collect()
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to sort and select columns: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let output_path = TEST_FOLDER.join(format!("parquet/{name}.parquet"));
|
||||||
|
let mut output_file = std::fs::File::create(&output_path)?;
|
||||||
|
|
||||||
|
ParquetWriter::new(&mut output_file)
|
||||||
|
.set_parallel(true)
|
||||||
|
.with_compression(ParquetCompression::Zstd(Some(
|
||||||
|
ZstdLevel::try_new(22).unwrap(),
|
||||||
|
)))
|
||||||
|
.finish(&mut result_df.clone())
|
||||||
|
.map_err(|e| {
|
||||||
|
let message = format!("Failed to write merged parquet file: {e}");
|
||||||
|
RecorderError::with_source(Box::new(e), message)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
println!("Merged {} rows into {output_path:?}", result_df.height());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
select_columns_and_write(merged_df.clone(), "tiny", &["fansub_name", "original_name"])?;
|
||||||
|
// select_columns_and_write(
|
||||||
|
// merged_df.clone(),
|
||||||
|
// "lite",
|
||||||
|
// &[
|
||||||
|
// "mikan_fansub_id",
|
||||||
|
// "fansub_name",
|
||||||
|
// "mikan_episode_id",
|
||||||
|
// "original_name",
|
||||||
|
// ],
|
||||||
|
// )?;
|
||||||
|
// select_columns_and_write(
|
||||||
|
// merged_df,
|
||||||
|
// "full",
|
||||||
|
// &[
|
||||||
|
// "id",
|
||||||
|
// "publish_at_timestamp",
|
||||||
|
// "mikan_fansub_id",
|
||||||
|
// "fansub_name",
|
||||||
|
// "mikan_episode_id",
|
||||||
|
// "original_name",
|
||||||
|
// "magnet_link",
|
||||||
|
// "file_size",
|
||||||
|
// "torrent_link",
|
||||||
|
// ],
|
||||||
|
// )?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("html"))?;
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("parquet"))?;
|
||||||
|
std::fs::create_dir_all(TEST_FOLDER.join("csv"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(std::time::Duration::from_millis(1000)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let first_page_and_pagination_info =
|
||||||
|
scrape_mikan_classic_episode_table_page(&mikan_scrape_client, 1, None).await?;
|
||||||
|
|
||||||
|
let total_page = first_page_and_pagination_info.total;
|
||||||
|
|
||||||
|
first_page_and_pagination_info.save_to_files()?;
|
||||||
|
|
||||||
|
let next_rev_ids = MikanClassicEpisodeTablePage::waiting_rev_ids(total_page)?;
|
||||||
|
|
||||||
|
for todo_rev_id in next_rev_ids {
|
||||||
|
let page = scrape_mikan_classic_episode_table_page_from_rev_id(
|
||||||
|
&mikan_scrape_client,
|
||||||
|
total_page,
|
||||||
|
todo_rev_id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
page.save_to_files()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 合并所有 parquet 文件
|
||||||
|
println!("\nMerging all parquet files...");
|
||||||
|
|
||||||
|
merge_mikan_classic_episodes_and_strip_columns().await?;
|
||||||
|
|
||||||
|
println!("Merge completed!");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
250
apps/recorder/examples/mikan_doppel_season_subscription.rs
Normal file
250
apps/recorder/examples/mikan_doppel_season_subscription.rs
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
use std::{str::FromStr, time::Duration};
|
||||||
|
|
||||||
|
use color_eyre::{Result, eyre::OptionExt};
|
||||||
|
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||||
|
use inquire::{Password, Text, validator::Validation};
|
||||||
|
use recorder::{
|
||||||
|
crypto::UserPassCredential,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
|
||||||
|
build_mikan_bangumi_expand_subscribed_url,
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||||
|
},
|
||||||
|
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||||
|
};
|
||||||
|
use scraper::Html;
|
||||||
|
use tokio::fs;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(tracing::Level::INFO)
|
||||||
|
.init();
|
||||||
|
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(0),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(Duration::from_millis(1000)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let username_validator = |input: &str| {
|
||||||
|
if input.trim().is_empty() {
|
||||||
|
Ok(Validation::Invalid("Username cannot be empty".into()))
|
||||||
|
} else {
|
||||||
|
Ok(Validation::Valid)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let password_validator = |input: &str| {
|
||||||
|
if input.trim().is_empty() {
|
||||||
|
Ok(Validation::Invalid("Password cannot be empty".into()))
|
||||||
|
} else {
|
||||||
|
Ok(Validation::Valid)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let username = Text::new("Please enter your mikan username:")
|
||||||
|
.with_validator(username_validator)
|
||||||
|
.prompt()?;
|
||||||
|
let password = Password::new("Please enter your mikan password:")
|
||||||
|
.without_confirmation()
|
||||||
|
.with_display_mode(inquire::PasswordDisplayMode::Masked)
|
||||||
|
.with_validator(password_validator)
|
||||||
|
.prompt()?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = mikan_scrape_client
|
||||||
|
.fork_with_userpass_credential(UserPassCredential {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
user_agent: None,
|
||||||
|
cookies: None,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
tracing::info!("Checking if logged in...");
|
||||||
|
if !mikan_scrape_client.has_login().await? {
|
||||||
|
tracing::info!("Logging in to mikan...");
|
||||||
|
mikan_scrape_client.login().await?;
|
||||||
|
tracing::info!("Logged in to mikan");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_scrape_client.base_url().clone();
|
||||||
|
tracing::info!("Scraping season subscription...");
|
||||||
|
let season_subscription =
|
||||||
|
fs::read("tests/resources/mikan/BangumiCoverFlow-2025-spring.html").await?;
|
||||||
|
let html = Html::parse_fragment(String::from_utf8(season_subscription)?.as_str());
|
||||||
|
let bangumi_index_list =
|
||||||
|
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(&html, &mikan_base_url);
|
||||||
|
|
||||||
|
for bangumi_index in bangumi_index_list {
|
||||||
|
let bangumi_meta = {
|
||||||
|
let bangumi_expand_subscribed_url = build_mikan_bangumi_expand_subscribed_url(
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
bangumi_index.mikan_bangumi_id.as_ref(),
|
||||||
|
);
|
||||||
|
let bangumi_expand_subscribed_doppel_path =
|
||||||
|
MikanDoppelPath::new(bangumi_expand_subscribed_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Scraping bangumi expand subscribed..."
|
||||||
|
);
|
||||||
|
let bangumi_expand_subscribed_data =
|
||||||
|
if !bangumi_expand_subscribed_doppel_path.exists_any() {
|
||||||
|
let bangumi_expand_subscribed_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_expand_subscribed_url).await?;
|
||||||
|
bangumi_expand_subscribed_doppel_path.write(&bangumi_expand_subscribed_data)?;
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Bangumi expand subscribed saved"
|
||||||
|
);
|
||||||
|
bangumi_expand_subscribed_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
bangumi_title = bangumi_index.bangumi_title,
|
||||||
|
"Bangumi expand subscribed already exists"
|
||||||
|
);
|
||||||
|
String::from_utf8(bangumi_expand_subscribed_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let html = Html::parse_fragment(&bangumi_expand_subscribed_data);
|
||||||
|
extract_mikan_bangumi_meta_from_expand_subscribed_fragment(
|
||||||
|
&html,
|
||||||
|
bangumi_index.clone(),
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
)
|
||||||
|
.ok_or_eyre(format!(
|
||||||
|
"Failed to extract bangumi meta from expand subscribed fragment: {:?}",
|
||||||
|
bangumi_index.bangumi_title
|
||||||
|
))
|
||||||
|
}?;
|
||||||
|
{
|
||||||
|
if let Some(poster_url) = bangumi_meta.origin_poster_src.as_ref() {
|
||||||
|
let poster_doppel_path = MikanDoppelPath::new(poster_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi poster..."
|
||||||
|
);
|
||||||
|
if !poster_doppel_path.exists_any() {
|
||||||
|
let poster_data = fetch_image(&mikan_scrape_client, poster_url.clone()).await?;
|
||||||
|
poster_doppel_path.write(&poster_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi poster saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi poster already exists"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let bangumi_homepage_url = bangumi_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi homepage..."
|
||||||
|
);
|
||||||
|
if !bangumi_homepage_doppel_path.exists_any() {
|
||||||
|
let bangumi_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
|
||||||
|
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi homepage already exists"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let rss_items = {
|
||||||
|
let bangumi_rss_url = bangumi_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_rss_url(mikan_base_url.clone());
|
||||||
|
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Scraping bangumi rss..."
|
||||||
|
);
|
||||||
|
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
|
||||||
|
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
|
||||||
|
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
|
||||||
|
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi rss saved");
|
||||||
|
bangumi_rss_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(
|
||||||
|
title = bangumi_meta.bangumi_title,
|
||||||
|
"Bangumi rss already exists"
|
||||||
|
);
|
||||||
|
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
let rss_items = MikanRssRoot::from_str(&bangumi_rss_data)?.channel.items;
|
||||||
|
rss_items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssItemMeta::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
}?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
{
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode...");
|
||||||
|
if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode already exists");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source
|
||||||
|
.status()
|
||||||
|
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("Scraping season subscription done");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
215
apps/recorder/examples/mikan_doppel_subscriber_subscription.rs
Normal file
215
apps/recorder/examples/mikan_doppel_subscriber_subscription.rs
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
use std::{str::FromStr, time::Duration};
|
||||||
|
|
||||||
|
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||||
|
use recorder::{
|
||||||
|
errors::RecorderResult,
|
||||||
|
extract::mikan::{
|
||||||
|
MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||||
|
},
|
||||||
|
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||||
|
};
|
||||||
|
use scraper::Html;
|
||||||
|
use tokio::fs;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> RecorderResult<()> {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(tracing::Level::INFO)
|
||||||
|
.init();
|
||||||
|
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
|
||||||
|
|
||||||
|
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
|
||||||
|
http_client: HttpClientConfig {
|
||||||
|
exponential_backoff_max_retries: Some(3),
|
||||||
|
leaky_bucket_max_tokens: Some(2),
|
||||||
|
leaky_bucket_initial_tokens: Some(0),
|
||||||
|
leaky_bucket_refill_tokens: Some(1),
|
||||||
|
leaky_bucket_refill_interval: Some(Duration::from_millis(500)),
|
||||||
|
user_agent: Some(
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||||
|
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
base_url: Url::parse("https://mikanani.me")?,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mikan_base_url = mikan_scrape_client.base_url().clone();
|
||||||
|
tracing::info!("Scraping subscriber subscription...");
|
||||||
|
let subscriber_subscription =
|
||||||
|
fs::read_to_string("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
|
||||||
|
let channel = MikanRssRoot::from_str(&subscriber_subscription)?.channel;
|
||||||
|
let rss_items: Vec<MikanRssItemMeta> = channel
|
||||||
|
.items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssItemMeta::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
let episode_homepage_meta = {
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path = MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
let episode_homepage_data = if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage saved");
|
||||||
|
episode_homepage_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage already exists");
|
||||||
|
String::from_utf8(episode_homepage_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
let html = Html::parse_document(&episode_homepage_data);
|
||||||
|
extract_mikan_episode_meta_from_episode_homepage_html(
|
||||||
|
&html,
|
||||||
|
mikan_base_url.clone(),
|
||||||
|
episode_homepage_url,
|
||||||
|
)
|
||||||
|
}?;
|
||||||
|
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source
|
||||||
|
.status()
|
||||||
|
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
if let Some(episode_poster_url) = episode_homepage_meta.origin_poster_src.as_ref() {
|
||||||
|
let episode_poster_doppel_path = MikanDoppelPath::new(episode_poster_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode poster...");
|
||||||
|
if !episode_poster_doppel_path.exists_any() {
|
||||||
|
let episode_poster_data =
|
||||||
|
fetch_image(&mikan_scrape_client, episode_poster_url.clone()).await?;
|
||||||
|
episode_poster_doppel_path.write(&episode_poster_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode poster saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode poster already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let bangumi_homepage_url = episode_homepage_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping bangumi homepage...");
|
||||||
|
if !bangumi_homepage_doppel_path.exists_any() {
|
||||||
|
let bangumi_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
|
||||||
|
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi homepage already exists");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let bangumi_rss_url = episode_homepage_meta
|
||||||
|
.bangumi_hash()
|
||||||
|
.build_rss_url(mikan_base_url.clone());
|
||||||
|
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping bangumi rss...");
|
||||||
|
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
|
||||||
|
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
|
||||||
|
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi rss saved");
|
||||||
|
bangumi_rss_data
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Bangumi rss already exists");
|
||||||
|
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let rss_items: Vec<MikanRssItemMeta> = MikanRssRoot::from_str(&bangumi_rss_data)?
|
||||||
|
.channel
|
||||||
|
.items
|
||||||
|
.into_iter()
|
||||||
|
.map(MikanRssItemMeta::try_from)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
for rss_item in rss_items {
|
||||||
|
{
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
|
||||||
|
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
|
||||||
|
let episode_homepage_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_homepage_url.clone());
|
||||||
|
if !episode_homepage_doppel_path.exists_any() {
|
||||||
|
let episode_homepage_data =
|
||||||
|
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
|
||||||
|
episode_homepage_doppel_path.write(&episode_homepage_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode homepage already exists");
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
{
|
||||||
|
let episode_torrent_url = rss_item.torrent_link;
|
||||||
|
let episode_torrent_doppel_path =
|
||||||
|
MikanDoppelPath::new(episode_torrent_url.clone());
|
||||||
|
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
|
||||||
|
if !episode_torrent_doppel_path.exists_any() {
|
||||||
|
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
|
||||||
|
Ok(episode_torrent_data) => {
|
||||||
|
episode_torrent_doppel_path.write(&episode_torrent_data)?;
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let FetchError::ReqwestError { source } = &e
|
||||||
|
&& source.status().is_some_and(|status| {
|
||||||
|
status == reqwest::StatusCode::NOT_FOUND
|
||||||
|
})
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
title = rss_item.title,
|
||||||
|
"Episode torrent not found, maybe deleted since new \
|
||||||
|
version"
|
||||||
|
);
|
||||||
|
episode_torrent_doppel_path
|
||||||
|
.write_meta(MikanDoppelMeta { status: 404 })?;
|
||||||
|
} else {
|
||||||
|
Err(e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent saved");
|
||||||
|
} else {
|
||||||
|
tracing::info!(title = rss_item.title, "Episode torrent already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("Scraping subscriber subscription done");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -1,66 +1,33 @@
|
|||||||
#![allow(unused_imports)]
|
#![feature(duration_constructors_lite)]
|
||||||
use color_eyre::eyre::Context;
|
use std::{sync::Arc, time::Duration};
|
||||||
use itertools::Itertools;
|
|
||||||
use loco_rs::{
|
use apalis_sql::postgres::PostgresStorage;
|
||||||
app::Hooks,
|
|
||||||
boot::{BootResult, StartMode},
|
|
||||||
environment::Environment,
|
|
||||||
prelude::*,
|
|
||||||
};
|
|
||||||
use recorder::{
|
use recorder::{
|
||||||
app::App,
|
app::AppContextTrait,
|
||||||
extract::mikan::parse_mikan_rss_items_from_rss_link,
|
errors::RecorderResult,
|
||||||
migrations::Migrator,
|
test_utils::{
|
||||||
models::{
|
app::TestingAppContext,
|
||||||
subscribers::SEED_SUBSCRIBER,
|
database::{TestingDatabaseServiceConfig, build_testing_database_service},
|
||||||
subscriptions::{self, SubscriptionCreateFromRssDto},
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use sea_orm_migration::MigratorTrait;
|
|
||||||
|
|
||||||
async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> color_eyre::eyre::Result<()> {
|
|
||||||
let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
|
|
||||||
|
|
||||||
// let rss_link =
|
|
||||||
// "https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
|
|
||||||
let subscription = if let Some(subscription) = subscriptions::Entity::find()
|
|
||||||
.filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
|
|
||||||
.one(&ctx.db)
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
subscription
|
|
||||||
} else {
|
|
||||||
subscriptions::Model::add_subscription(
|
|
||||||
ctx,
|
|
||||||
subscriptions::SubscriptionCreateDto::Mikan(SubscriptionCreateFromRssDto {
|
|
||||||
rss_link: rss_link.to_string(),
|
|
||||||
display_name: String::from("Mikan Project - 我的番组"),
|
|
||||||
enabled: Some(true),
|
|
||||||
}),
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
};
|
|
||||||
|
|
||||||
subscription.pull_subscription(ctx).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn init() -> color_eyre::eyre::Result<AppContext> {
|
|
||||||
color_eyre::install()?;
|
|
||||||
let ctx = loco_rs::cli::playground::<App>().await?;
|
|
||||||
let BootResult {
|
|
||||||
app_context: ctx, ..
|
|
||||||
} = loco_rs::boot::run_app::<App>(&StartMode::ServerOnly, ctx).await?;
|
|
||||||
Migrator::up(&ctx.db, None).await?;
|
|
||||||
Ok(ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> color_eyre::eyre::Result<()> {
|
async fn main() -> RecorderResult<()> {
|
||||||
let ctx = init().await?;
|
let app_ctx = {
|
||||||
pull_mikan_bangumi_rss(&ctx).await?;
|
let db_service = build_testing_database_service(TestingDatabaseServiceConfig {
|
||||||
|
auto_migrate: false,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
Arc::new(TestingAppContext::builder().db(db_service).build())
|
||||||
|
};
|
||||||
|
|
||||||
|
let db = app_ctx.db();
|
||||||
|
|
||||||
|
PostgresStorage::setup(db.get_postgres_connection_pool()).await?;
|
||||||
|
|
||||||
|
dbg!(db.get_postgres_connection_pool().connect_options());
|
||||||
|
|
||||||
|
tokio::time::sleep(Duration::from_hours(1)).await;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
6
apps/recorder/package.json
Normal file
6
apps/recorder/package.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"name": "recorder",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"private": true,
|
||||||
|
"type": "module"
|
||||||
|
}
|
||||||
94
apps/recorder/recorder.config.toml
Normal file
94
apps/recorder/recorder.config.toml
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
# Application logging configuration
|
||||||
|
[logger]
|
||||||
|
# Enable or disable logging.
|
||||||
|
enable = true
|
||||||
|
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
|
||||||
|
pretty_backtrace = true
|
||||||
|
level = "info"
|
||||||
|
# Log level, options: trace, debug, info, warn or error.
|
||||||
|
# Define the logging format. options: compact, pretty or Json
|
||||||
|
format = "compact"
|
||||||
|
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
|
||||||
|
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
|
||||||
|
# override_filter: trace
|
||||||
|
|
||||||
|
# Web server configuration
|
||||||
|
[server]
|
||||||
|
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
|
||||||
|
port = 5001
|
||||||
|
binding = "0.0.0.0"
|
||||||
|
# The UI hostname or IP address that mailers will point to.
|
||||||
|
host = '{{ get_env(name="HOST", default="localhost") }}'
|
||||||
|
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
|
||||||
|
|
||||||
|
# Enable Etag cache header middleware
|
||||||
|
[server.middlewares.etag]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
|
||||||
|
[server.middlewares.request_id]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
[server.middlewares.logger]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
# when your code is panicked, the request still returns 500 status code.
|
||||||
|
[server.middlewares.catch_panic]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
|
||||||
|
[server.middlewares.timeout_request]
|
||||||
|
enable = false
|
||||||
|
# Duration time in milliseconds.
|
||||||
|
timeout = 5000
|
||||||
|
|
||||||
|
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
|
||||||
|
# allow_origins:
|
||||||
|
# - https://konobangu.com
|
||||||
|
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
|
||||||
|
# allow_headers:
|
||||||
|
# - Content-Type
|
||||||
|
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
|
||||||
|
# allow_methods:
|
||||||
|
# - POST
|
||||||
|
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
|
||||||
|
# max_age: 3600
|
||||||
|
[server.middlewares.cors]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
[server.middlewares.compression]
|
||||||
|
enable = true
|
||||||
|
|
||||||
|
# Database Configuration
|
||||||
|
[database]
|
||||||
|
# Database connection URI
|
||||||
|
uri = '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@localhost:5432/konobangu") }}'
|
||||||
|
# When enabled, the sql query will be logged.
|
||||||
|
enable_logging = true
|
||||||
|
# Set the timeout duration when acquiring a connection.
|
||||||
|
connect_timeout = 500
|
||||||
|
# Set the idle duration before closing a connection.
|
||||||
|
idle_timeout = 500
|
||||||
|
# Minimum number of connections for a pool.
|
||||||
|
min_connections = 1
|
||||||
|
# Maximum number of connections for a pool.
|
||||||
|
max_connections = 10
|
||||||
|
# Run migration up when application loaded
|
||||||
|
auto_migrate = true
|
||||||
|
|
||||||
|
[storage]
|
||||||
|
data_dir = './data'
|
||||||
|
|
||||||
|
[mikan]
|
||||||
|
base_url = "https://mikanani.me/"
|
||||||
|
|
||||||
|
[mikan.http_client]
|
||||||
|
exponential_backoff_max_retries = 3
|
||||||
|
leaky_bucket_max_tokens = 2
|
||||||
|
leaky_bucket_initial_tokens = 1
|
||||||
|
leaky_bucket_refill_tokens = 1
|
||||||
|
leaky_bucket_refill_interval = 500
|
||||||
|
|
||||||
|
[graphql]
|
||||||
|
# depth_limit = inf
|
||||||
|
# complexity_limit = inf
|
||||||
@@ -1,130 +0,0 @@
|
|||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use loco_rs::{
|
|
||||||
app::{AppContext, Hooks},
|
|
||||||
boot::{create_app, BootResult, StartMode},
|
|
||||||
cache,
|
|
||||||
config::Config,
|
|
||||||
controller::AppRoutes,
|
|
||||||
db::truncate_table,
|
|
||||||
environment::Environment,
|
|
||||||
prelude::*,
|
|
||||||
task::Tasks,
|
|
||||||
Result,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
auth::service::{AppAuthService, AppAuthServiceInitializer},
|
|
||||||
controllers::{self},
|
|
||||||
dal::{AppDalClient, AppDalInitalizer},
|
|
||||||
extract::mikan::{client::AppMikanClientInitializer, AppMikanClient},
|
|
||||||
graphql::service::{AppGraphQLService, AppGraphQLServiceInitializer},
|
|
||||||
migrations::Migrator,
|
|
||||||
models::subscribers,
|
|
||||||
workers::subscription_worker::SubscriptionWorker,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const CONFIG_FOLDER: &str = "LOCO_CONFIG_FOLDER";
|
|
||||||
|
|
||||||
pub trait AppContextExt {
|
|
||||||
fn get_dal_client(&self) -> &AppDalClient {
|
|
||||||
AppDalClient::app_instance()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_mikan_client(&self) -> &AppMikanClient {
|
|
||||||
AppMikanClient::app_instance()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_auth_service(&self) -> &AppAuthService {
|
|
||||||
AppAuthService::app_instance()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_graphql_service(&self) -> &AppGraphQLService {
|
|
||||||
AppGraphQLService::app_instance()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AppContextExt for AppContext {}
|
|
||||||
|
|
||||||
pub struct App;
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl Hooks for App {
|
|
||||||
async fn load_config(env: &Environment) -> Result<Config> {
|
|
||||||
std::env::var(CONFIG_FOLDER).map_or_else(
|
|
||||||
|_| {
|
|
||||||
let monorepo_project_config_dir = Path::new("./apps/recorder/config");
|
|
||||||
if monorepo_project_config_dir.exists() && monorepo_project_config_dir.is_dir() {
|
|
||||||
return env.load_from_folder(monorepo_project_config_dir);
|
|
||||||
}
|
|
||||||
let current_config_dir = Path::new("./config");
|
|
||||||
env.load_from_folder(current_config_dir)
|
|
||||||
},
|
|
||||||
|config_folder| env.load_from_folder(Path::new(&config_folder)),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn app_name() -> &'static str {
|
|
||||||
env!("CARGO_CRATE_NAME")
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
|
|
||||||
let initializers: Vec<Box<dyn Initializer>> = vec![
|
|
||||||
Box::new(AppDalInitalizer),
|
|
||||||
Box::new(AppMikanClientInitializer),
|
|
||||||
Box::new(AppGraphQLServiceInitializer),
|
|
||||||
Box::new(AppAuthServiceInitializer),
|
|
||||||
];
|
|
||||||
|
|
||||||
Ok(initializers)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn app_version() -> String {
|
|
||||||
format!(
|
|
||||||
"{} ({})",
|
|
||||||
env!("CARGO_PKG_VERSION"),
|
|
||||||
option_env!("BUILD_SHA")
|
|
||||||
.or(option_env!("GITHUB_SHA"))
|
|
||||||
.unwrap_or("dev")
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn boot(
|
|
||||||
mode: StartMode,
|
|
||||||
environment: &Environment,
|
|
||||||
config: Config,
|
|
||||||
) -> Result<BootResult> {
|
|
||||||
create_app::<Self, Migrator>(mode, environment, config).await
|
|
||||||
}
|
|
||||||
|
|
||||||
fn routes(ctx: &AppContext) -> AppRoutes {
|
|
||||||
AppRoutes::with_default_routes()
|
|
||||||
.prefix("/api")
|
|
||||||
.add_route(controllers::auth::routes())
|
|
||||||
.add_route(controllers::graphql::routes(ctx.clone()))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn connect_workers(ctx: &AppContext, queue: &Queue) -> Result<()> {
|
|
||||||
queue.register(SubscriptionWorker::build(ctx)).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn after_context(ctx: AppContext) -> Result<AppContext> {
|
|
||||||
Ok(AppContext {
|
|
||||||
cache: cache::Cache::new(cache::drivers::inmem::new()).into(),
|
|
||||||
..ctx
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn register_tasks(_tasks: &mut Tasks) {}
|
|
||||||
|
|
||||||
async fn truncate(ctx: &AppContext) -> Result<()> {
|
|
||||||
truncate_table(&ctx.db, subscribers::Entity).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn seed(_ctx: &AppContext, _base: &Path) -> Result<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
154
apps/recorder/src/app/builder.rs
Normal file
154
apps/recorder/src/app/builder.rs
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
use clap::{Parser, command};
|
||||||
|
|
||||||
|
use super::{AppContext, core::App, env::Environment};
|
||||||
|
use crate::{app::config::AppConfig, errors::RecorderResult};
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(version, about, long_about = None)]
|
||||||
|
pub struct MainCliArgs {
|
||||||
|
/// Explicit config file path
|
||||||
|
#[arg(short, long)]
|
||||||
|
config_file: Option<String>,
|
||||||
|
|
||||||
|
/// Explicit dotenv file path
|
||||||
|
#[arg(short, long)]
|
||||||
|
dotenv_file: Option<String>,
|
||||||
|
|
||||||
|
/// Explicit working dir
|
||||||
|
#[arg(short, long)]
|
||||||
|
working_dir: Option<String>,
|
||||||
|
|
||||||
|
/// Explicit environment
|
||||||
|
#[arg(short, long)]
|
||||||
|
environment: Option<Environment>,
|
||||||
|
|
||||||
|
#[arg(long)]
|
||||||
|
graceful_shutdown: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AppBuilder {
|
||||||
|
dotenv_file: Option<String>,
|
||||||
|
config_file: Option<String>,
|
||||||
|
working_dir: String,
|
||||||
|
environment: Environment,
|
||||||
|
pub graceful_shutdown: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppBuilder {
|
||||||
|
pub async fn from_main_cli(environment: Option<Environment>) -> RecorderResult<Self> {
|
||||||
|
let args = MainCliArgs::parse();
|
||||||
|
|
||||||
|
let environment = environment.unwrap_or_else(|| {
|
||||||
|
args.environment.unwrap_or({
|
||||||
|
if cfg!(test) {
|
||||||
|
Environment::Testing
|
||||||
|
} else if cfg!(debug_assertions) {
|
||||||
|
Environment::Development
|
||||||
|
} else {
|
||||||
|
Environment::Production
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut builder = Self::default();
|
||||||
|
|
||||||
|
if let Some(working_dir) = args.working_dir {
|
||||||
|
builder = builder.working_dir(working_dir);
|
||||||
|
}
|
||||||
|
if matches!(
|
||||||
|
&environment,
|
||||||
|
Environment::Testing | Environment::Development
|
||||||
|
) {
|
||||||
|
builder = builder.working_dir_from_manifest_dir();
|
||||||
|
}
|
||||||
|
|
||||||
|
builder = builder
|
||||||
|
.config_file(args.config_file)
|
||||||
|
.dotenv_file(args.dotenv_file)
|
||||||
|
.environment(environment)
|
||||||
|
.graceful_shutdown(args.graceful_shutdown.unwrap_or(true));
|
||||||
|
|
||||||
|
Ok(builder)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn build(self) -> RecorderResult<App> {
|
||||||
|
if self.working_dir != "." {
|
||||||
|
std::env::set_current_dir(&self.working_dir)?;
|
||||||
|
println!("set current dir to working dir: {}", self.working_dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.load_env().await?;
|
||||||
|
|
||||||
|
let config = self.load_config().await?;
|
||||||
|
|
||||||
|
let app_context =
|
||||||
|
AppContext::new(self.environment.clone(), config, self.working_dir.clone()).await?;
|
||||||
|
|
||||||
|
Ok(App {
|
||||||
|
context: app_context,
|
||||||
|
builder: self,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_env(&self) -> RecorderResult<()> {
|
||||||
|
AppConfig::load_dotenv(&self.environment, self.dotenv_file.as_deref()).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_config(&self) -> RecorderResult<AppConfig> {
|
||||||
|
let config = AppConfig::load_config(&self.environment, self.config_file.as_deref()).await?;
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn working_dir(self, working_dir: String) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.working_dir = working_dir;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn environment(self, environment: Environment) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.environment = environment;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn config_file(self, config_file: Option<String>) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.config_file = config_file;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn graceful_shutdown(self, graceful_shutdown: bool) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.graceful_shutdown = graceful_shutdown;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
|
||||||
|
let mut ret = self;
|
||||||
|
ret.dotenv_file = dotenv_file;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn working_dir_from_manifest_dir(self) -> Self {
|
||||||
|
#[cfg(any(test, debug_assertions, feature = "test-utils"))]
|
||||||
|
let manifest_dir = env!("CARGO_MANIFEST_DIR");
|
||||||
|
|
||||||
|
#[cfg(not(any(test, debug_assertions, feature = "test-utils")))]
|
||||||
|
let manifest_dir = "./apps/recorder";
|
||||||
|
|
||||||
|
self.working_dir(manifest_dir.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for AppBuilder {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
environment: Environment::Production,
|
||||||
|
dotenv_file: None,
|
||||||
|
config_file: None,
|
||||||
|
working_dir: String::from("."),
|
||||||
|
graceful_shutdown: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
31
apps/recorder/src/app/config/default_mixin.toml
Normal file
31
apps/recorder/src/app/config/default_mixin.toml
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
[storage]
|
||||||
|
data_dir = "./data"
|
||||||
|
|
||||||
|
[mikan]
|
||||||
|
base_url = "https://mikanani.me/"
|
||||||
|
|
||||||
|
[mikan.http_client]
|
||||||
|
exponential_backoff_max_retries = 3
|
||||||
|
leaky_bucket_max_tokens = 2
|
||||||
|
leaky_bucket_initial_tokens = 0
|
||||||
|
leaky_bucket_refill_tokens = 1
|
||||||
|
leaky_bucket_refill_interval = 500
|
||||||
|
|
||||||
|
|
||||||
|
[mikan.http_client.proxy]
|
||||||
|
|
||||||
|
[mikan.http_client.proxy.headers]
|
||||||
|
|
||||||
|
[graphql]
|
||||||
|
depth_limit = inf
|
||||||
|
complexity_limit = inf
|
||||||
|
|
||||||
|
[cache]
|
||||||
|
|
||||||
|
[crypto]
|
||||||
|
|
||||||
|
[task]
|
||||||
|
|
||||||
|
[message]
|
||||||
|
|
||||||
|
[media]
|
||||||
323
apps/recorder/src/app/config/mod.rs
Normal file
323
apps/recorder/src/app/config/mod.rs
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
fs,
|
||||||
|
path::Path,
|
||||||
|
str::{self, FromStr},
|
||||||
|
};
|
||||||
|
|
||||||
|
use figment::{
|
||||||
|
Figment, Provider,
|
||||||
|
providers::{Env, Format, Json, Toml, Yaml},
|
||||||
|
};
|
||||||
|
use itertools::Itertools;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use super::env::Environment;
|
||||||
|
use crate::{
|
||||||
|
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
|
||||||
|
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
|
||||||
|
logger::LoggerConfig, media::MediaConfig, message::MessageConfig, storage::StorageConfig,
|
||||||
|
task::TaskConfig, web::WebServerConfig,
|
||||||
|
};
|
||||||
|
|
||||||
|
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
|
||||||
|
const CONFIG_ALLOWED_EXTENSIONS: &[&str] = &[".toml", ".json", ".yaml", ".yml"];
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct AppConfig {
|
||||||
|
pub server: WebServerConfig,
|
||||||
|
pub cache: CacheConfig,
|
||||||
|
pub auth: AuthConfig,
|
||||||
|
pub storage: StorageConfig,
|
||||||
|
pub mikan: MikanConfig,
|
||||||
|
pub crypto: CryptoConfig,
|
||||||
|
pub graphql: GraphQLConfig,
|
||||||
|
pub media: MediaConfig,
|
||||||
|
pub logger: LoggerConfig,
|
||||||
|
pub database: DatabaseConfig,
|
||||||
|
pub task: TaskConfig,
|
||||||
|
pub message: MessageConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppConfig {
|
||||||
|
pub fn config_prefix() -> String {
|
||||||
|
format!("{}.config", env!("CARGO_PKG_NAME"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dotenv_prefix() -> String {
|
||||||
|
String::from(".env")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn allowed_extension() -> Vec<String> {
|
||||||
|
CONFIG_ALLOWED_EXTENSIONS
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.collect_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn priority_suffix(environment: &Environment) -> Vec<String> {
|
||||||
|
vec![
|
||||||
|
format!(".{}.local", environment.full_name()),
|
||||||
|
format!(".{}.local", environment.short_name()),
|
||||||
|
String::from(".local"),
|
||||||
|
format!(".{}", environment.full_name()),
|
||||||
|
format!(".{}", environment.short_name()),
|
||||||
|
String::from(""),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn default_provider() -> impl Provider {
|
||||||
|
Toml::string(DEFAULT_CONFIG_MIXIN)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_enhanced_tera_engine() -> tera::Tera {
|
||||||
|
let mut tera = tera::Tera::default();
|
||||||
|
tera.register_filter(
|
||||||
|
"cast_to",
|
||||||
|
|value: &tera::Value,
|
||||||
|
args: &HashMap<String, tera::Value>|
|
||||||
|
-> tera::Result<tera::Value> {
|
||||||
|
let target_type = args
|
||||||
|
.get("type")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.ok_or_else(|| tera::Error::msg("invalid target type: should be string"))?;
|
||||||
|
|
||||||
|
let target_type = TeraCastToFilterType::from_str(target_type)
|
||||||
|
.map_err(|e| tera::Error::msg(format!("invalid target type: {e}")))?;
|
||||||
|
|
||||||
|
let input_str = value.as_str().unwrap_or("");
|
||||||
|
|
||||||
|
match target_type {
|
||||||
|
TeraCastToFilterType::Boolean => {
|
||||||
|
let is_true = matches!(input_str.to_lowercase().as_str(), "true" | "1");
|
||||||
|
let is_false = matches!(input_str.to_lowercase().as_str(), "false" | "0");
|
||||||
|
if is_true {
|
||||||
|
Ok(tera::Value::Bool(true))
|
||||||
|
} else if is_false {
|
||||||
|
Ok(tera::Value::Bool(false))
|
||||||
|
} else {
|
||||||
|
Err(tera::Error::msg(
|
||||||
|
"target type is bool but value is not a boolean like true, false, \
|
||||||
|
1, 0",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TeraCastToFilterType::Integer => {
|
||||||
|
let parsed = input_str.parse::<i64>().map_err(|e| {
|
||||||
|
tera::Error::call_filter("invalid integer".to_string(), e)
|
||||||
|
})?;
|
||||||
|
Ok(tera::Value::Number(serde_json::Number::from(parsed)))
|
||||||
|
}
|
||||||
|
TeraCastToFilterType::Unsigned => {
|
||||||
|
let parsed = input_str.parse::<u64>().map_err(|e| {
|
||||||
|
tera::Error::call_filter("invalid unsigned integer".to_string(), e)
|
||||||
|
})?;
|
||||||
|
Ok(tera::Value::Number(serde_json::Number::from(parsed)))
|
||||||
|
}
|
||||||
|
TeraCastToFilterType::Float => {
|
||||||
|
let parsed = input_str.parse::<f64>().map_err(|e| {
|
||||||
|
tera::Error::call_filter("invalid float".to_string(), e)
|
||||||
|
})?;
|
||||||
|
Ok(tera::Value::Number(
|
||||||
|
serde_json::Number::from_f64(parsed).ok_or_else(|| {
|
||||||
|
tera::Error::msg("failed to convert f64 to serde_json::Number")
|
||||||
|
})?,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
TeraCastToFilterType::String => Ok(tera::Value::String(input_str.to_string())),
|
||||||
|
TeraCastToFilterType::Null => Ok(tera::Value::Null),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
tera.register_filter(
|
||||||
|
"try_auto_cast",
|
||||||
|
|value: &tera::Value,
|
||||||
|
_args: &HashMap<String, tera::Value>|
|
||||||
|
-> tera::Result<tera::Value> {
|
||||||
|
let input_str = value.as_str().unwrap_or("");
|
||||||
|
|
||||||
|
if input_str == "null" {
|
||||||
|
return Ok(tera::Value::Null);
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches!(input_str, "true" | "false") {
|
||||||
|
return Ok(tera::Value::Bool(input_str == "true"));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(parsed) = input_str.parse::<i64>() {
|
||||||
|
return Ok(tera::Value::Number(serde_json::Number::from(parsed)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(parsed) = input_str.parse::<u64>() {
|
||||||
|
return Ok(tera::Value::Number(serde_json::Number::from(parsed)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(parsed) = input_str.parse::<f64>() {
|
||||||
|
return Ok(tera::Value::Number(
|
||||||
|
serde_json::Number::from_f64(parsed).ok_or_else(|| {
|
||||||
|
tera::Error::msg("failed to convert f64 to serde_json::Number")
|
||||||
|
})?,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(tera::Value::String(input_str.to_string()))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
tera
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn merge_provider_from_file(
|
||||||
|
fig: Figment,
|
||||||
|
filepath: impl AsRef<Path>,
|
||||||
|
ext: &str,
|
||||||
|
) -> RecorderResult<Figment> {
|
||||||
|
let content = fs::read_to_string(filepath)?;
|
||||||
|
|
||||||
|
let mut tera_engine = AppConfig::build_enhanced_tera_engine();
|
||||||
|
let rendered =
|
||||||
|
tera_engine.render_str(&content, &tera::Context::from_value(serde_json::json!({}))?)?;
|
||||||
|
|
||||||
|
Ok(match ext {
|
||||||
|
".toml" => fig.merge(Toml::string(&rendered)),
|
||||||
|
".json" => fig.merge(Json::string(&rendered)),
|
||||||
|
".yaml" | ".yml" => fig.merge(Yaml::string(&rendered)),
|
||||||
|
_ => unreachable!("unsupported config extension"),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_dotenv(
|
||||||
|
environment: &Environment,
|
||||||
|
dotenv_file: Option<&str>,
|
||||||
|
) -> RecorderResult<()> {
|
||||||
|
let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
|
||||||
|
vec![dotenv_file]
|
||||||
|
} else {
|
||||||
|
vec![Some(".")]
|
||||||
|
};
|
||||||
|
|
||||||
|
let priority_suffix = &AppConfig::priority_suffix(environment);
|
||||||
|
let dotenv_prefix = AppConfig::dotenv_prefix();
|
||||||
|
let try_filenames = priority_suffix
|
||||||
|
.iter()
|
||||||
|
.map(|ps| format!("{}{}", &dotenv_prefix, ps))
|
||||||
|
.collect_vec();
|
||||||
|
|
||||||
|
for try_dotenv_file_or_dir in try_dotenv_file_or_dirs.into_iter().flatten() {
|
||||||
|
let try_dotenv_file_or_dir_path = Path::new(try_dotenv_file_or_dir);
|
||||||
|
if try_dotenv_file_or_dir_path.exists() {
|
||||||
|
if try_dotenv_file_or_dir_path.is_dir() {
|
||||||
|
for f in try_filenames.iter() {
|
||||||
|
let p = try_dotenv_file_or_dir_path.join(f);
|
||||||
|
if p.exists() && p.is_file() {
|
||||||
|
println!("Loading dotenv file: {}", p.display());
|
||||||
|
dotenvy::from_path(p)?;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if try_dotenv_file_or_dir_path.is_file() {
|
||||||
|
println!(
|
||||||
|
"Loading dotenv file: {}",
|
||||||
|
try_dotenv_file_or_dir_path.display()
|
||||||
|
);
|
||||||
|
dotenvy::from_path(try_dotenv_file_or_dir_path)?;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_config(
|
||||||
|
environment: &Environment,
|
||||||
|
config_file: Option<&str>,
|
||||||
|
) -> RecorderResult<AppConfig> {
|
||||||
|
let try_config_file_or_dirs = if config_file.is_some() {
|
||||||
|
vec![config_file]
|
||||||
|
} else {
|
||||||
|
vec![Some(".")]
|
||||||
|
};
|
||||||
|
|
||||||
|
let allowed_extensions = &AppConfig::allowed_extension();
|
||||||
|
let priority_suffix = &AppConfig::priority_suffix(environment);
|
||||||
|
let convention_prefix = &AppConfig::config_prefix();
|
||||||
|
|
||||||
|
let try_filenames = priority_suffix
|
||||||
|
.iter()
|
||||||
|
.flat_map(|ps| {
|
||||||
|
allowed_extensions
|
||||||
|
.iter()
|
||||||
|
.map(move |ext| (format!("{convention_prefix}{ps}{ext}"), ext))
|
||||||
|
})
|
||||||
|
.collect_vec();
|
||||||
|
|
||||||
|
let mut fig = Figment::from(AppConfig::default_provider());
|
||||||
|
|
||||||
|
for try_config_file_or_dir in try_config_file_or_dirs.into_iter().flatten() {
|
||||||
|
let try_config_file_or_dir_path = Path::new(try_config_file_or_dir);
|
||||||
|
if try_config_file_or_dir_path.exists() {
|
||||||
|
if try_config_file_or_dir_path.is_dir() {
|
||||||
|
for (f, ext) in try_filenames.iter() {
|
||||||
|
let p = try_config_file_or_dir_path.join(f);
|
||||||
|
if p.exists() && p.is_file() {
|
||||||
|
fig = AppConfig::merge_provider_from_file(fig, &p, ext)?;
|
||||||
|
println!("Loaded config file: {}", p.display());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if let Some(ext) = try_config_file_or_dir_path
|
||||||
|
.extension()
|
||||||
|
.and_then(|s| s.to_str())
|
||||||
|
&& try_config_file_or_dir_path.is_file()
|
||||||
|
{
|
||||||
|
fig =
|
||||||
|
AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?;
|
||||||
|
println!(
|
||||||
|
"Loaded config file: {}",
|
||||||
|
try_config_file_or_dir_path.display()
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fig = fig.merge(Env::prefixed("").split("__").lowercase(true));
|
||||||
|
|
||||||
|
let app_config: AppConfig = fig.extract()?;
|
||||||
|
|
||||||
|
Ok(app_config)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "lowercase")]
|
||||||
|
enum TeraCastToFilterType {
|
||||||
|
#[serde(alias = "str")]
|
||||||
|
String,
|
||||||
|
#[serde(alias = "bool")]
|
||||||
|
Boolean,
|
||||||
|
#[serde(alias = "int")]
|
||||||
|
Integer,
|
||||||
|
#[serde(alias = "uint")]
|
||||||
|
Unsigned,
|
||||||
|
#[serde(alias = "float")]
|
||||||
|
Float,
|
||||||
|
#[serde(alias = "null")]
|
||||||
|
Null,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for TeraCastToFilterType {
|
||||||
|
type Err = String;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
match s {
|
||||||
|
"string" | "str" => Ok(TeraCastToFilterType::String),
|
||||||
|
"boolean" | "bool" => Ok(TeraCastToFilterType::Boolean),
|
||||||
|
"integer" | "int" => Ok(TeraCastToFilterType::Integer),
|
||||||
|
"unsigned" | "uint" => Ok(TeraCastToFilterType::Unsigned),
|
||||||
|
"float" => Ok(TeraCastToFilterType::Float),
|
||||||
|
"null" => Ok(TeraCastToFilterType::Null),
|
||||||
|
_ => Err(format!("invalid target type: {s}")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
147
apps/recorder/src/app/context.rs
Normal file
147
apps/recorder/src/app/context.rs
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
use std::{fmt::Debug, sync::Arc};
|
||||||
|
|
||||||
|
use tokio::sync::OnceCell;
|
||||||
|
|
||||||
|
use super::{Environment, config::AppConfig};
|
||||||
|
use crate::{
|
||||||
|
auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
|
||||||
|
errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
|
||||||
|
logger::LoggerService, media::MediaService, message::MessageService, storage::StorageService,
|
||||||
|
task::TaskService,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub trait AppContextTrait: Send + Sync + Debug {
|
||||||
|
fn logger(&self) -> &LoggerService;
|
||||||
|
fn db(&self) -> &DatabaseService;
|
||||||
|
fn config(&self) -> &AppConfig;
|
||||||
|
fn cache(&self) -> &CacheService;
|
||||||
|
fn mikan(&self) -> &MikanClient;
|
||||||
|
fn auth(&self) -> &AuthService;
|
||||||
|
fn graphql(&self) -> &GraphQLService;
|
||||||
|
fn storage(&self) -> &StorageService;
|
||||||
|
fn working_dir(&self) -> &String;
|
||||||
|
fn environment(&self) -> &Environment;
|
||||||
|
fn crypto(&self) -> &CryptoService;
|
||||||
|
fn task(&self) -> &TaskService;
|
||||||
|
fn message(&self) -> &MessageService;
|
||||||
|
fn media(&self) -> &MediaService;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AppContext {
|
||||||
|
logger: LoggerService,
|
||||||
|
db: DatabaseService,
|
||||||
|
config: AppConfig,
|
||||||
|
cache: CacheService,
|
||||||
|
mikan: MikanClient,
|
||||||
|
auth: AuthService,
|
||||||
|
storage: StorageService,
|
||||||
|
crypto: CryptoService,
|
||||||
|
working_dir: String,
|
||||||
|
environment: Environment,
|
||||||
|
message: MessageService,
|
||||||
|
media: MediaService,
|
||||||
|
task: OnceCell<TaskService>,
|
||||||
|
graphql: OnceCell<GraphQLService>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppContext {
|
||||||
|
pub async fn new(
|
||||||
|
environment: Environment,
|
||||||
|
config: AppConfig,
|
||||||
|
working_dir: impl ToString,
|
||||||
|
) -> RecorderResult<Arc<Self>> {
|
||||||
|
let config_cloned = config.clone();
|
||||||
|
|
||||||
|
let logger = LoggerService::from_config(config.logger).await?;
|
||||||
|
let cache = CacheService::from_config(config.cache).await?;
|
||||||
|
let db = DatabaseService::from_config(config.database).await?;
|
||||||
|
let storage = StorageService::from_config(config.storage).await?;
|
||||||
|
let message = MessageService::from_config(config.message).await?;
|
||||||
|
let auth = AuthService::from_conf(config.auth).await?;
|
||||||
|
let mikan = MikanClient::from_config(config.mikan).await?;
|
||||||
|
let crypto = CryptoService::from_config(config.crypto).await?;
|
||||||
|
let media = MediaService::from_config(config.media).await?;
|
||||||
|
|
||||||
|
let ctx = Arc::new(AppContext {
|
||||||
|
config: config_cloned,
|
||||||
|
environment,
|
||||||
|
logger,
|
||||||
|
auth,
|
||||||
|
cache,
|
||||||
|
db,
|
||||||
|
storage,
|
||||||
|
mikan,
|
||||||
|
working_dir: working_dir.to_string(),
|
||||||
|
crypto,
|
||||||
|
message,
|
||||||
|
media,
|
||||||
|
task: OnceCell::new(),
|
||||||
|
graphql: OnceCell::new(),
|
||||||
|
});
|
||||||
|
|
||||||
|
ctx.task
|
||||||
|
.get_or_try_init(async || {
|
||||||
|
TaskService::from_config_and_ctx(config.task, ctx.clone()).await
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
ctx.graphql
|
||||||
|
.get_or_try_init(async || {
|
||||||
|
GraphQLService::from_config_and_ctx(config.graphql, ctx.clone()).await
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(ctx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for AppContext {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "AppContext")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppContextTrait for AppContext {
|
||||||
|
fn logger(&self) -> &LoggerService {
|
||||||
|
&self.logger
|
||||||
|
}
|
||||||
|
fn db(&self) -> &DatabaseService {
|
||||||
|
&self.db
|
||||||
|
}
|
||||||
|
fn config(&self) -> &AppConfig {
|
||||||
|
&self.config
|
||||||
|
}
|
||||||
|
fn cache(&self) -> &CacheService {
|
||||||
|
&self.cache
|
||||||
|
}
|
||||||
|
fn mikan(&self) -> &MikanClient {
|
||||||
|
&self.mikan
|
||||||
|
}
|
||||||
|
fn auth(&self) -> &AuthService {
|
||||||
|
&self.auth
|
||||||
|
}
|
||||||
|
fn graphql(&self) -> &GraphQLService {
|
||||||
|
self.graphql.get().expect("graphql should be set")
|
||||||
|
}
|
||||||
|
fn storage(&self) -> &StorageService {
|
||||||
|
&self.storage
|
||||||
|
}
|
||||||
|
fn working_dir(&self) -> &String {
|
||||||
|
&self.working_dir
|
||||||
|
}
|
||||||
|
fn environment(&self) -> &Environment {
|
||||||
|
&self.environment
|
||||||
|
}
|
||||||
|
fn crypto(&self) -> &CryptoService {
|
||||||
|
&self.crypto
|
||||||
|
}
|
||||||
|
fn task(&self) -> &TaskService {
|
||||||
|
self.task.get().expect("task should be set")
|
||||||
|
}
|
||||||
|
fn message(&self) -> &MessageService {
|
||||||
|
&self.message
|
||||||
|
}
|
||||||
|
fn media(&self) -> &MediaService {
|
||||||
|
&self.media
|
||||||
|
}
|
||||||
|
}
|
||||||
160
apps/recorder/src/app/core.rs
Normal file
160
apps/recorder/src/app/core.rs
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
use std::{net::SocketAddr, sync::Arc};
|
||||||
|
|
||||||
|
use axum::{Router, middleware::from_fn_with_state};
|
||||||
|
use tokio::{net::TcpSocket, signal};
|
||||||
|
use tower_http::services::{ServeDir, ServeFile};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
|
use super::{builder::AppBuilder, context::AppContextTrait};
|
||||||
|
use crate::{
|
||||||
|
auth::webui_auth_middleware,
|
||||||
|
errors::{RecorderError, RecorderResult},
|
||||||
|
web::{
|
||||||
|
controller::{self, core::ControllerTrait},
|
||||||
|
middleware::default_middleware_stack,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const PROJECT_NAME: &str = "konobangu";
|
||||||
|
|
||||||
|
pub struct App {
|
||||||
|
pub context: Arc<dyn AppContextTrait>,
|
||||||
|
pub builder: AppBuilder,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl App {
|
||||||
|
pub fn builder() -> AppBuilder {
|
||||||
|
AppBuilder::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(err, skip(self))]
|
||||||
|
pub async fn serve(&self) -> RecorderResult<()> {
|
||||||
|
let context = &self.context;
|
||||||
|
let config = context.config();
|
||||||
|
|
||||||
|
let listener = {
|
||||||
|
let addr: SocketAddr =
|
||||||
|
format!("{}:{}", config.server.binding, config.server.port).parse()?;
|
||||||
|
|
||||||
|
let socket = if addr.is_ipv4() {
|
||||||
|
TcpSocket::new_v4()
|
||||||
|
} else {
|
||||||
|
TcpSocket::new_v6()
|
||||||
|
}?;
|
||||||
|
|
||||||
|
socket.set_reuseaddr(true)?;
|
||||||
|
|
||||||
|
#[cfg(all(unix, not(target_os = "solaris")))]
|
||||||
|
if let Err(e) = socket.set_reuseport(true) {
|
||||||
|
tracing::warn!("Failed to set SO_REUSEPORT: {}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
socket.bind(addr)?;
|
||||||
|
socket.listen(1024)
|
||||||
|
}?;
|
||||||
|
|
||||||
|
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
||||||
|
|
||||||
|
let (graphql_c, oidc_c, metadata_c, static_c, feeds_c) = futures::try_join!(
|
||||||
|
controller::graphql::create(context.clone()),
|
||||||
|
controller::oidc::create(context.clone()),
|
||||||
|
controller::metadata::create(context.clone()),
|
||||||
|
controller::r#static::create(context.clone()),
|
||||||
|
controller::feeds::create(context.clone())
|
||||||
|
)?;
|
||||||
|
|
||||||
|
for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
|
||||||
|
router = c.apply_to(router);
|
||||||
|
}
|
||||||
|
|
||||||
|
router = router
|
||||||
|
.fallback_service(
|
||||||
|
ServeDir::new("webui").not_found_service(ServeFile::new("webui/index.html")),
|
||||||
|
)
|
||||||
|
.layer(from_fn_with_state(context.clone(), webui_auth_middleware));
|
||||||
|
|
||||||
|
let middlewares = default_middleware_stack(context.clone());
|
||||||
|
for mid in middlewares {
|
||||||
|
if mid.is_enabled() {
|
||||||
|
router = mid.apply(router)?;
|
||||||
|
tracing::info!(name = mid.name(), "+middleware");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let router = router
|
||||||
|
.with_state(context.clone())
|
||||||
|
.into_make_service_with_connect_info::<SocketAddr>();
|
||||||
|
|
||||||
|
let task = context.task();
|
||||||
|
|
||||||
|
let graceful_shutdown = self.builder.graceful_shutdown;
|
||||||
|
|
||||||
|
tokio::try_join!(
|
||||||
|
async {
|
||||||
|
let axum_serve = axum::serve(listener, router);
|
||||||
|
|
||||||
|
if graceful_shutdown {
|
||||||
|
axum_serve
|
||||||
|
.with_graceful_shutdown(async move {
|
||||||
|
Self::shutdown_signal().await;
|
||||||
|
tracing::info!("axum shutting down...");
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
axum_serve.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
},
|
||||||
|
async {
|
||||||
|
task.run_with_signal(if graceful_shutdown {
|
||||||
|
Some(Self::shutdown_signal)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok::<(), RecorderError>(())
|
||||||
|
}
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn shutdown_signal() {
|
||||||
|
let ctrl_c = async {
|
||||||
|
signal::ctrl_c()
|
||||||
|
.await
|
||||||
|
.expect("failed to install Ctrl+C handler");
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
let terminate = async {
|
||||||
|
signal::unix::signal(signal::unix::SignalKind::terminate())
|
||||||
|
.expect("failed to install signal handler")
|
||||||
|
.recv()
|
||||||
|
.await;
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(all(unix, debug_assertions))]
|
||||||
|
let quit = async {
|
||||||
|
signal::unix::signal(signal::unix::SignalKind::quit())
|
||||||
|
.expect("Failed to install SIGQUIT handler")
|
||||||
|
.recv()
|
||||||
|
.await;
|
||||||
|
println!("Received SIGQUIT");
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
let terminate = std::future::pending::<()>();
|
||||||
|
|
||||||
|
#[cfg(not(all(unix, debug_assertions)))]
|
||||||
|
let quit = std::future::pending::<()>();
|
||||||
|
|
||||||
|
tokio::select! {
|
||||||
|
() = ctrl_c => {},
|
||||||
|
() = terminate => {},
|
||||||
|
() = quit => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
35
apps/recorder/src/app/env.rs
Normal file
35
apps/recorder/src/app/env.rs
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
use clap::ValueEnum;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, ValueEnum)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
#[value(rename_all = "snake_case")]
|
||||||
|
pub enum Environment {
|
||||||
|
#[serde(alias = "dev")]
|
||||||
|
#[value(alias = "dev")]
|
||||||
|
Development,
|
||||||
|
#[serde(alias = "prod")]
|
||||||
|
#[value(alias = "prod")]
|
||||||
|
Production,
|
||||||
|
#[serde(alias = "test")]
|
||||||
|
#[value(alias = "test")]
|
||||||
|
Testing,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Environment {
|
||||||
|
pub fn full_name(&self) -> &'static str {
|
||||||
|
match &self {
|
||||||
|
Self::Development => "development",
|
||||||
|
Self::Production => "production",
|
||||||
|
Self::Testing => "testing",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn short_name(&self) -> &'static str {
|
||||||
|
match &self {
|
||||||
|
Self::Development => "dev",
|
||||||
|
Self::Production => "prod",
|
||||||
|
Self::Testing => "test",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
12
apps/recorder/src/app/mod.rs
Normal file
12
apps/recorder/src/app/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
pub mod builder;
|
||||||
|
pub mod config;
|
||||||
|
pub mod context;
|
||||||
|
pub mod core;
|
||||||
|
pub mod env;
|
||||||
|
|
||||||
|
pub use core::{App, PROJECT_NAME};
|
||||||
|
|
||||||
|
pub use builder::AppBuilder;
|
||||||
|
pub use config::AppConfig;
|
||||||
|
pub use context::{AppContext, AppContextTrait};
|
||||||
|
pub use env::Environment;
|
||||||
@@ -1,14 +1,17 @@
|
|||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use axum::http::request::Parts;
|
use axum::http::{HeaderValue, request::Parts};
|
||||||
use base64::{self, Engine};
|
use base64::{self, Engine};
|
||||||
use reqwest::header::AUTHORIZATION;
|
use http::header::AUTHORIZATION;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
config::BasicAuthConfig,
|
config::BasicAuthConfig,
|
||||||
errors::AuthError,
|
errors::AuthError,
|
||||||
service::{AuthService, AuthUserInfo},
|
service::{AuthServiceTrait, AuthUserInfo},
|
||||||
|
};
|
||||||
|
use crate::{
|
||||||
|
app::{AppContextTrait, PROJECT_NAME},
|
||||||
|
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
|
||||||
};
|
};
|
||||||
use crate::models::{auth::AuthType, subscribers::SEED_SUBSCRIBER};
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
pub struct AuthBasic {
|
pub struct AuthBasic {
|
||||||
@@ -22,12 +25,12 @@ impl AuthBasic {
|
|||||||
.headers
|
.headers
|
||||||
.get(AUTHORIZATION)
|
.get(AUTHORIZATION)
|
||||||
.and_then(|s| s.to_str().ok())
|
.and_then(|s| s.to_str().ok())
|
||||||
.ok_or_else(|| AuthError::BasicInvalidCredentials)?;
|
.ok_or(AuthError::BasicInvalidCredentials)?;
|
||||||
|
|
||||||
let split = authorization.split_once(' ');
|
let split = authorization.split_once(' ');
|
||||||
|
|
||||||
match split {
|
match split {
|
||||||
Some((name, contents)) if name == "Basic" => {
|
Some(("Basic", contents)) => {
|
||||||
let decoded = base64::engine::general_purpose::STANDARD
|
let decoded = base64::engine::general_purpose::STANDARD
|
||||||
.decode(contents)
|
.decode(contents)
|
||||||
.map_err(|_| AuthError::BasicInvalidCredentials)?;
|
.map_err(|_| AuthError::BasicInvalidCredentials)?;
|
||||||
@@ -58,22 +61,35 @@ pub struct BasicAuthService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl AuthService for BasicAuthService {
|
impl AuthServiceTrait for BasicAuthService {
|
||||||
async fn extract_user_info(&self, request: &mut Parts) -> Result<AuthUserInfo, AuthError> {
|
async fn extract_user_info(
|
||||||
|
&self,
|
||||||
|
ctx: &dyn AppContextTrait,
|
||||||
|
request: &mut Parts,
|
||||||
|
) -> Result<AuthUserInfo, AuthError> {
|
||||||
if let Ok(AuthBasic {
|
if let Ok(AuthBasic {
|
||||||
user: found_user,
|
user: found_user,
|
||||||
password: found_password,
|
password: found_password,
|
||||||
}) = AuthBasic::decode_request_parts(request)
|
}) = AuthBasic::decode_request_parts(request)
|
||||||
{
|
&& self.config.user == found_user
|
||||||
if self.config.user == found_user
|
|
||||||
&& self.config.password == found_password.unwrap_or_default()
|
&& self.config.password == found_password.unwrap_or_default()
|
||||||
{
|
{
|
||||||
|
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER)
|
||||||
|
.await
|
||||||
|
.map_err(|_| AuthError::FindAuthRecordError)?;
|
||||||
return Ok(AuthUserInfo {
|
return Ok(AuthUserInfo {
|
||||||
user_pid: SEED_SUBSCRIBER.to_string(),
|
subscriber_auth,
|
||||||
auth_type: AuthType::Basic,
|
auth_type: AuthType::Basic,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Err(AuthError::BasicInvalidCredentials)
|
Err(AuthError::BasicInvalidCredentials)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
|
||||||
|
Some(HeaderValue::from_str(format!("Basic realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn auth_type(&self) -> AuthType {
|
||||||
|
AuthType::Basic
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
use jwt_authorizer::OneOrArray;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use jwtk::OneOrMany;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_with::serde_as;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
pub struct BasicAuthConfig {
|
pub struct BasicAuthConfig {
|
||||||
@@ -9,23 +12,26 @@ pub struct BasicAuthConfig {
|
|||||||
pub password: String,
|
pub password: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[serde_as]
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
pub struct OidcAuthConfig {
|
pub struct OidcAuthConfig {
|
||||||
#[serde(rename = "oidc_api_issuer")]
|
#[serde(rename = "oidc_issuer")]
|
||||||
pub issuer: String,
|
pub issuer: String,
|
||||||
#[serde(rename = "oidc_api_audience")]
|
#[serde(rename = "oidc_audience")]
|
||||||
pub audience: String,
|
pub audience: String,
|
||||||
|
#[serde(rename = "oidc_client_id")]
|
||||||
|
pub client_id: String,
|
||||||
|
#[serde(rename = "oidc_client_secret")]
|
||||||
|
pub client_secret: String,
|
||||||
#[serde(rename = "oidc_extra_scopes")]
|
#[serde(rename = "oidc_extra_scopes")]
|
||||||
pub extra_scopes: Option<OneOrArray<String>>,
|
pub extra_scopes: Option<OneOrMany<String>>,
|
||||||
#[serde(rename = "oidc_extra_claim_key")]
|
#[serde(rename = "oidc_extra_claims")]
|
||||||
pub extra_claim_key: Option<String>,
|
pub extra_claims: Option<HashMap<String, Option<String>>>,
|
||||||
#[serde(rename = "oidc_extra_claim_value")]
|
|
||||||
pub extra_claim_value: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
#[serde(tag = "auth_type", rename_all = "snake_case")]
|
#[serde(tag = "auth_type", rename_all = "snake_case")]
|
||||||
pub enum AppAuthConfig {
|
pub enum AuthConfig {
|
||||||
Basic(BasicAuthConfig),
|
Basic(BasicAuthConfig),
|
||||||
Oidc(OidcAuthConfig),
|
Oidc(OidcAuthConfig),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,36 +1,142 @@
|
|||||||
|
use async_graphql::dynamic::ResolverContext;
|
||||||
use axum::{
|
use axum::{
|
||||||
|
Json,
|
||||||
http::StatusCode,
|
http::StatusCode,
|
||||||
response::{IntoResponse, Response},
|
response::{IntoResponse, Response},
|
||||||
Json,
|
|
||||||
};
|
};
|
||||||
use thiserror::Error;
|
use fetch::HttpClientError;
|
||||||
|
use openidconnect::{
|
||||||
|
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
|
||||||
|
StandardErrorResponse, core::CoreErrorResponseType,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use snafu::prelude::*;
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
use crate::models::auth::AuthType;
|
||||||
|
|
||||||
|
#[derive(Debug, Snafu)]
|
||||||
|
#[snafu(visibility(pub(crate)))]
|
||||||
pub enum AuthError {
|
pub enum AuthError {
|
||||||
#[error(transparent)]
|
#[snafu(display("Permission denied"))]
|
||||||
OidcInitError(#[from] jwt_authorizer::error::InitError),
|
PermissionError,
|
||||||
#[error("Invalid credentials")]
|
#[snafu(display("Not support auth method"))]
|
||||||
|
NotSupportAuthMethod {
|
||||||
|
supported: Vec<AuthType>,
|
||||||
|
current: AuthType,
|
||||||
|
},
|
||||||
|
#[snafu(display("Failed to find auth record"))]
|
||||||
|
FindAuthRecordError,
|
||||||
|
#[snafu(display("Invalid credentials"))]
|
||||||
BasicInvalidCredentials,
|
BasicInvalidCredentials,
|
||||||
#[error(transparent)]
|
#[snafu(display("Invalid oidc provider meta client error: {source}"))]
|
||||||
OidcJwtAuthError(#[from] jwt_authorizer::AuthError),
|
OidcProviderHttpClientError { source: HttpClientError },
|
||||||
#[error("Extra scopes {expected} do not match found scopes {found}")]
|
#[snafu(transparent)]
|
||||||
|
OidcProviderMetaError {
|
||||||
|
source: openidconnect::DiscoveryError<HttpClientError>,
|
||||||
|
},
|
||||||
|
#[snafu(display("Invalid oidc provider URL: {source}"))]
|
||||||
|
OidcProviderUrlError { source: url::ParseError },
|
||||||
|
#[snafu(display("Invalid oidc redirect URI: {source}"))]
|
||||||
|
OidcRequestRedirectUriError {
|
||||||
|
#[snafu(source)]
|
||||||
|
source: url::ParseError,
|
||||||
|
},
|
||||||
|
#[snafu(display("Oidc request session not found or expired"))]
|
||||||
|
OidcCallbackRecordNotFoundOrExpiredError,
|
||||||
|
#[snafu(display("Invalid oidc request callback nonce"))]
|
||||||
|
OidcInvalidNonceError,
|
||||||
|
#[snafu(display("Invalid oidc request callback state"))]
|
||||||
|
OidcInvalidStateError,
|
||||||
|
#[snafu(display("Invalid oidc request callback code"))]
|
||||||
|
OidcInvalidCodeError,
|
||||||
|
#[snafu(transparent)]
|
||||||
|
OidcCallbackTokenConfigurationError { source: ConfigurationError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
OidcRequestTokenError {
|
||||||
|
source: RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>,
|
||||||
|
},
|
||||||
|
#[snafu(display("Invalid oidc id token"))]
|
||||||
|
OidcInvalidIdTokenError,
|
||||||
|
#[snafu(display("Invalid oidc access token"))]
|
||||||
|
OidcInvalidAccessTokenError,
|
||||||
|
#[snafu(transparent)]
|
||||||
|
OidcSignatureVerificationError { source: SignatureVerificationError },
|
||||||
|
#[snafu(transparent)]
|
||||||
|
OidcSigningError { source: SigningError },
|
||||||
|
#[snafu(display("Missing Bearer token"))]
|
||||||
|
OidcMissingBearerToken,
|
||||||
|
#[snafu(transparent)]
|
||||||
|
OidcJwtkError { source: jwtk::Error },
|
||||||
|
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
|
||||||
OidcExtraScopesMatchError { expected: String, found: String },
|
OidcExtraScopesMatchError { expected: String, found: String },
|
||||||
#[error("Extra claim {key} does not match expected value {expected}, found {found}")]
|
#[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
|
||||||
OidcExtraClaimMatchError {
|
OidcExtraClaimMatchError {
|
||||||
key: String,
|
key: String,
|
||||||
expected: String,
|
expected: String,
|
||||||
found: String,
|
found: String,
|
||||||
},
|
},
|
||||||
#[error("Extra claim {0} missing")]
|
#[snafu(display("Extra claim {claim} missing"))]
|
||||||
OidcExtraClaimMissingError(String),
|
OidcExtraClaimMissingError { claim: String },
|
||||||
#[error("Audience {0} missing")]
|
#[snafu(display("Audience {aud} missing"))]
|
||||||
OidcAudMissingError(String),
|
OidcAudMissingError { aud: String },
|
||||||
#[error("Subject missing")]
|
#[snafu(display("Subject missing"))]
|
||||||
OidcSubMissingError,
|
OidcSubMissingError,
|
||||||
|
#[snafu(display(
|
||||||
|
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
|
||||||
|
(if field.is_empty() { "" } else { "." }),
|
||||||
|
(if column.is_empty() { "" } else { "." }),
|
||||||
|
source.message
|
||||||
|
))]
|
||||||
|
GraphqlDynamicPermissionError {
|
||||||
|
#[snafu(source(false))]
|
||||||
|
source: Box<async_graphql::Error>,
|
||||||
|
field: String,
|
||||||
|
column: String,
|
||||||
|
context_path: String,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuthError {
|
||||||
|
pub fn from_graphql_dynamic_subscribe_id_guard(
|
||||||
|
source: async_graphql::Error,
|
||||||
|
context: &ResolverContext,
|
||||||
|
field_name: &str,
|
||||||
|
column_name: &str,
|
||||||
|
) -> AuthError {
|
||||||
|
AuthError::GraphqlDynamicPermissionError {
|
||||||
|
source: Box::new(source),
|
||||||
|
field: field_name.to_string(),
|
||||||
|
column: column_name.to_string(),
|
||||||
|
context_path: context
|
||||||
|
.ctx
|
||||||
|
.path_node
|
||||||
|
.map(|p| p.to_string_vec().join(""))
|
||||||
|
.unwrap_or_default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct AuthErrorResponse {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<AuthError> for AuthErrorResponse {
|
||||||
|
fn from(value: AuthError) -> Self {
|
||||||
|
AuthErrorResponse {
|
||||||
|
success: false,
|
||||||
|
message: value.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoResponse for AuthError {
|
impl IntoResponse for AuthError {
|
||||||
fn into_response(self) -> Response {
|
fn into_response(self) -> Response {
|
||||||
(StatusCode::UNAUTHORIZED, Json(self.to_string())).into_response()
|
(
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
Json(AuthErrorResponse::from(self)),
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
77
apps/recorder/src/auth/middleware.rs
Normal file
77
apps/recorder/src/auth/middleware.rs
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use axum::{
|
||||||
|
extract::{Request, State},
|
||||||
|
http::header,
|
||||||
|
middleware::Next,
|
||||||
|
response::{IntoResponse, Response},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::AppContextTrait,
|
||||||
|
auth::{AuthService, AuthServiceTrait},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub async fn auth_middleware(
|
||||||
|
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||||
|
request: Request,
|
||||||
|
next: Next,
|
||||||
|
) -> Response {
|
||||||
|
let auth_service = ctx.auth();
|
||||||
|
|
||||||
|
let (mut parts, body) = request.into_parts();
|
||||||
|
|
||||||
|
let mut response = match auth_service
|
||||||
|
.extract_user_info(ctx.as_ref() as &dyn AppContextTrait, &mut parts)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(auth_user_info) => {
|
||||||
|
let mut request = Request::from_parts(parts, body);
|
||||||
|
request.extensions_mut().insert(auth_user_info);
|
||||||
|
next.run(request).await
|
||||||
|
}
|
||||||
|
Err(auth_error) => auth_error.into_response(),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(header_value) = auth_service.www_authenticate_header_value() {
|
||||||
|
response
|
||||||
|
.headers_mut()
|
||||||
|
.insert(header::WWW_AUTHENTICATE, header_value);
|
||||||
|
};
|
||||||
|
|
||||||
|
response
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn webui_auth_middleware(
|
||||||
|
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||||
|
request: Request,
|
||||||
|
next: Next,
|
||||||
|
) -> Response {
|
||||||
|
if (!request.uri().path().starts_with("/api"))
|
||||||
|
&& let AuthService::Basic(auth_service) = ctx.auth()
|
||||||
|
{
|
||||||
|
let (mut parts, body) = request.into_parts();
|
||||||
|
|
||||||
|
let mut response = match auth_service
|
||||||
|
.extract_user_info(ctx.as_ref() as &dyn AppContextTrait, &mut parts)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(auth_user_info) => {
|
||||||
|
let mut request = Request::from_parts(parts, body);
|
||||||
|
request.extensions_mut().insert(auth_user_info);
|
||||||
|
next.run(request).await
|
||||||
|
}
|
||||||
|
Err(auth_error) => auth_error.into_response(),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(header_value) = auth_service.www_authenticate_header_value() {
|
||||||
|
response
|
||||||
|
.headers_mut()
|
||||||
|
.insert(header::WWW_AUTHENTICATE, header_value);
|
||||||
|
};
|
||||||
|
|
||||||
|
response
|
||||||
|
} else {
|
||||||
|
next.run(request).await
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user