Compare commits

...

104 Commits

Author SHA1 Message Date
9fd3ae6563 feat: basic support rss 2025-06-24 06:37:19 +08:00
cde3361458 feat: add new test resource mikan classic episodes tiny.parquet 2025-06-23 03:07:58 +08:00
f055011b86 feat: add rss feeds and episode enclosure 2025-06-22 01:04:23 +08:00
16429a44b4 fix: fix missing 2025-06-21 03:25:22 +08:00
fe0b7e88e6 feat: classic episodes scraper 2025-06-21 03:21:58 +08:00
28dd9da6ac fix: fix typo 2025-06-20 02:05:23 +08:00
02c16a2972 feat: support optimize images 2025-06-20 01:56:34 +08:00
324427513c refactor: rewrite origin name extractor from regex to nom combinators 2025-06-19 02:37:56 +08:00
c12b9b360a feat: static server support etag 2025-06-18 04:42:33 +08:00
cc06142050 fix: fix middlewares config 2025-06-18 03:09:10 +08:00
6726cafff4 feat: support static server 2025-06-18 02:19:42 +08:00
35312ea1ff fix: fix issues 2025-06-17 02:23:02 +08:00
721eee9c88 fix: fix issues 2025-06-16 08:01:02 +08:00
421f9d0293 feat: task ui & custom filter mutation 2025-06-16 07:56:52 +08:00
7eb4e41708 feat: try views and seaography 2025-06-15 05:02:23 +08:00
a2254bbe80 fix: fix auto accessToken renew 2025-06-15 02:48:48 +08:00
1b5bdadf10 fix: fix tasks 2025-06-14 22:30:58 +08:00
882b29d7a1 feat: task ui basic done 2025-06-13 04:02:01 +08:00
c60f6f511e feat: remove turbo 2025-06-13 00:09:18 +08:00
07955286f1 feat: add tasks manage view 2025-06-12 03:32:18 +08:00
258eeddc74 refactor: refactor graphql 2025-06-12 00:15:26 +08:00
b09e9e6aaa fix: update webui graphql schema 2025-06-11 04:01:00 +08:00
0df371adb7 fix: fix subscription and mikan doppel 2025-05-11 03:41:02 +08:00
8144986a48 fix: fix subscriptions api 2025-05-10 02:31:58 +08:00
d2aab7369d fix: add sync subscription webui and check credential web ui 2025-06-08 00:36:59 +08:00
946d4e8c2c feat: add subscription detail & edit page 2025-06-07 02:50:14 +08:00
0b5f25a263 fix: fix credential 3rd error 2025-06-06 01:58:19 +08:00
c669d66969 fix: just dev-all support windows 2025-06-05 02:44:23 +08:00
082e08e7f4 fcwd: add tui temp 2025-06-04 01:09:18 +08:00
a3fd03d32a refactor: refactor subscriptions 2025-06-03 02:21:49 +08:00
5645645c5f fix: fix table horizontal scroll and collapsed sidebar 2025-05-01 20:49:42 +08:00
ac7d1efb8d feat: support server port reuse 2025-05-31 01:59:04 +08:00
a676061b3e fix: fix testcases 2025-05-29 02:01:36 +08:00
1c34cebbde fix: fix testcases 2025-05-27 01:01:05 +08:00
22a2ce0559 fix: fix testsuite 2025-05-26 02:44:46 +08:00
313b1bf1ba fix: fix credential3rd graphql 2025-05-25 00:04:02 +08:00
66413f92e3 fix: fix credential3rd graphql 2025-05-24 02:32:02 +08:00
0fcbc6bbe9 feat: alter unsafe packages 2025-05-23 02:54:53 +08:00
f1d8318500 fix: fix graphql 2025-05-22 02:11:16 +08:00
b2f327d48f feat: refactor tasks 2025-05-20 01:23:13 +08:00
b772937354 feat: json filter input done mainly 2025-05-19 02:27:23 +08:00
a3b9543d0e refactor: continue 2025-05-16 01:02:17 +08:00
d0a423df9f refactor: continue 2025-05-15 03:03:36 +08:00
8600bf216a refactor: continue 2025-05-14 02:01:59 +08:00
bf270e4e87 refactor: continue 2025-05-13 01:23:59 +08:00
760cb2344e refactor: continue 2025-05-12 08:11:11 +08:00
ed2c1038e6 refactor: refactor subscription 2025-05-11 01:41:11 +08:00
d4bdc677a9 feat: more task system 2025-05-10 02:34:11 +08:00
9d58d961bd feat: add task system 2025-05-09 00:56:26 +08:00
791b75b3af test: add mikan client login test 2025-05-07 02:15:46 +08:00
a7f52fe0eb fix: fix scrape mikan season bangumi list 2025-05-06 02:23:17 +08:00
439353d318 fix: fix some issues 2025-05-05 01:02:58 +08:00
f245a68790 fix: fix some issues 2025-05-04 03:59:59 +08:00
3fe0538468 feature: add new mikan scrapers 2025-05-03 04:23:33 +08:00
dbded94324 feature: rewrite season subscription extractor 2025-05-02 02:23:23 +08:00
4301f1dbab feature: add subscription manage 2025-04-30 01:59:14 +08:00
9fdb778330 feature: add mgraphql codegen 2025-04-29 02:22:06 +08:00
0300d7baf6 feature: add mutation input object transformer 2025-04-28 02:44:16 +08:00
ee1b1ae5e6 refactor: refactor webui 2025-04-26 01:43:23 +08:00
b20f7cd1ad deps: update webui deps 2025-04-25 02:21:20 +08:00
eb8f0be004 refactor: refactor webui structure 2025-04-24 02:23:26 +08:00
68aa13e216 feat: add transformer and refactor graphql guards 2025-04-23 02:57:22 +08:00
2a5c2b18e7 feat: prepare transformers 2025-04-22 03:19:59 +08:00
e64086b7cf fix: fix addrInUse at dev & cursor-point 2025-04-22 01:12:01 +08:00
08946059ad feat: refactor to react for better ai generation 2025-04-21 02:22:28 +08:00
10b17dc66b feat: add zellij tab support 2025-04-17 02:03:21 +08:00
1ff8a311ae feat(downloader): add rqbit impl 2025-04-09 02:26:23 +08:00
2686fa1d76 refactor: split modules 2025-04-08 02:12:06 +08:00
376d2b28d3 refactor: split out testcontainers-rs-ext 2025-04-05 19:51:59 +08:00
a3609696c7 feat: finsih qbit adapter 2025-04-05 14:24:47 +08:00
b0c12acbc6 fix: fix paths 2025-04-05 10:40:48 +08:00
3dfcf2a536 fix: add testing-torrents params 2025-04-05 09:20:51 +08:00
ecb56013a5 fix: temp save 2025-04-05 07:02:47 +08:00
27b52f7fd1 refactor: rewrite qbit downlaoder 2025-04-03 02:22:26 +08:00
234441e6a3 refactor: switch error handle to snafu 2025-04-01 20:53:27 +08:00
011f62829a fix: fix workflows 2025-04-01 03:56:48 +08:00
c34584a215 fix: fix workflows 2025-04-01 03:49:22 +08:00
1fca69fa66 fix: fix testing torrents container 2025-04-01 03:45:56 +08:00
a0fc4c04d9 feat: add testing-torrents 2025-04-01 03:00:29 +08:00
07ac7e3376 fix: do some fix 2025-03-08 03:22:46 +08:00
f94e175082 feat: add replay-stream-tasks pattern support 2025-03-08 00:00:44 +08:00
e66573b315 fix: fix typos 2025-03-06 02:49:15 +08:00
27cdcdef58 refactor: merge playground into webui 2025-03-05 22:53:37 +08:00
383e6340ea feat: add auth to webapi 2025-03-04 23:31:13 +08:00
5a4a4d7e3a fix: remove favicon image padding transparent 2025-03-04 12:28:52 +08:00
6e4c136614 style: rollback tailwindcss to v3 for temp fix 2025-03-04 03:16:06 +08:00
e2fdeaabb2 fix: temp save 2025-03-04 01:09:38 +08:00
408d211f27 refactor: remove useless folders 2025-03-04 01:03:39 +08:00
2844e1fc32 refactor: remove loco-rs deps 2025-02-28 06:14:08 +08:00
a68aab1452 refactor: remove loco-rs 2025-02-28 00:19:40 +08:00
c0707d17bb feat: switch mikan bangumi metas from my bangumi page to stream 2025-02-27 14:32:13 +08:00
6887b2a069 feat: add mikan cookie support 2025-02-26 05:31:28 +08:00
cac0d37e53 repo: fix gitattributes 2025-02-25 02:20:25 +08:00
f327ea29f1 fix: fix mikan rss extractors 2025-02-25 00:03:05 +08:00
5bc5d98823 fix: fix mikan web extractors 2025-02-24 21:38:36 +08:00
09565bd827 feat: add mikan my bangumi page extractor 2025-02-23 23:58:21 +08:00
7adc0582aa feat: add key context for graphql permission error 2025-02-23 16:47:16 +08:00
4f9e74ceb4 feat: add custom types for subscriber id input filter 2025-02-23 15:57:59 +08:00
c2f74dc369 feat: add permission control 2025-02-22 20:26:14 +08:00
ae40a3a7f8 feat: switch to oidc-client-rx 2025-02-21 05:34:30 +08:00
027112db9a style: update recorder api playground styles 2025-01-15 01:22:05 +08:00
9a2a8f029f feat: add assets 2025-01-14 07:30:04 +08:00
877d90d1e2 feat: add oidc and basic support for playground 2025-01-14 07:27:09 +08:00
c6677d414d feat: replace graphql playground to altair 2025-01-12 03:46:28 +08:00
1813 changed files with 368593 additions and 36363 deletions

View File

@@ -1,34 +1,5 @@
[alias] [alias]
recorder = "run -p recorder --bin recorder_cli -- --environment development"
recorder-playground = "run -p recorder --example playground -- --environment development" recorder-playground = "run -p recorder --example playground -- --environment development"
[build] [build]
rustflags = ["-Zthreads=8"] rustflags = ["-Zthreads=8", "-Zshare-generics=y"]
[target.x86_64-unknown-linux-gnu]
linker = "clang"
rustflags = ["-Zthreads=8", "-Clink-arg=-fuse-ld=lld", "-Zshare-generics=y"]
[target.x86_64-pc-windows-msvc]
linker = "rust-lld.exe"
rustflags = ["-Zthreads=8", "-Zshare-generics=n"]
# NOTE: you must install [Mach-O LLD Port](https://lld.llvm.org/MachO/index.html) on mac. you can easily do this by installing llvm which includes lld with the "brew" package manager:
# `brew install llvm`
#[target.x86_64-apple-darwin]
#rustflags = [
# "-Zthreads=8",
# "-C",
# "link-arg=-fuse-ld=/usr/local/opt/llvm/bin/ld64.lld",
# "-Zshare-generics=y",
#]
# NOTE: you must install [Mach-O LLD Port](https://lld.llvm.org/MachO/index.html) on mac. you can easily do this by installing llvm which includes lld with the "brew" package manager:
# `brew install llvm`
#[target.aarch64-apple-darwin]
#rustflags = [
# "-Zthreads=8",
# "-C",
# "link-arg=-fuse-ld=/opt/homebrew/opt/llvm/bin/ld64.lld",
# "-Zshare-generics=y",
#]

View File

@@ -2,7 +2,7 @@ root = true
[*] [*]
indent_style = space indent_style = space
indent_size = 2 indent_size = 4
charset = utf-8 charset = utf-8
trim_trailing_whitespace = true trim_trailing_whitespace = true
insert_final_newline = true insert_final_newline = true

1
.gitattributes vendored Normal file
View File

@@ -0,0 +1 @@
**/tests/resources/** linguist-detectable=false

View File

@@ -1,107 +0,0 @@
name: CI
on:
push:
branches:
- master
- main
pull_request:
env:
RUST_TOOLCHAIN: stable
TOOLCHAIN_PROFILE: minimal
jobs:
rustfmt:
name: Check Style
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout the code
uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with:
profile: ${{ env.TOOLCHAIN_PROFILE }}
toolchain: ${{ env.RUST_TOOLCHAIN }}
override: true
components: rustfmt
- name: Run cargo fmt
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
clippy:
name: Run Clippy
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout the code
uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with:
profile: ${{ env.TOOLCHAIN_PROFILE }}
toolchain: ${{ env.RUST_TOOLCHAIN }}
override: true
- name: Setup Rust cache
uses: Swatinem/rust-cache@v2
- name: Run cargo clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: --all-features -- -D warnings -W clippy::pedantic -W clippy::nursery -W rust-2018-idioms
test:
name: Run Tests
runs-on: ubuntu-latest
permissions:
contents: read
services:
redis:
image: redis
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- "6379:6379"
postgres:
image: postgres
env:
POSTGRES_DB: postgress_test
POSTGRES_USER: postgress
POSTGRES_PASSWORD: postgress
ports:
- "5432:5432"
# Set health checks to wait until postgres has started
options: --health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout the code
uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with:
profile: ${{ env.TOOLCHAIN_PROFILE }}
toolchain: ${{ env.RUST_TOOLCHAIN }}
override: true
- name: Setup Rust cache
uses: Swatinem/rust-cache@v2
- name: Run cargo test
uses: actions-rs/cargo@v1
with:
command: test
args: --all-features --all
env:
REDIS_URL: redis://localhost:${{job.services.redis.ports[6379]}}
DATABASE_URL: postgres://postgress:postgress@localhost:5432/postgress_test

View File

@@ -0,0 +1,36 @@
name: Testing Torrents Container
on:
workflow_dispatch:
env:
REGISTRY: ghcr.io
ORG: dumtruck
PROJECT: konobangu
jobs:
build-container:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GHCR
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: 'packages/testing-torrents'
file: 'packages/testing-torrents/Dockerfile'
push: true
tags: '${{ env.REGISTRY }}/${{ env.ORG }}/${{ env.PROJECT }}-testing-torrents:latest'
cache-from: type=gha
cache-to: type=gha,mode=max

6
.gitignore vendored
View File

@@ -158,11 +158,8 @@ web_modules/
.yarn-integrity .yarn-integrity
# Local env files # Local env files
.env
.env.local .env.local
.env.development.local .env.*.local
.env.test.local
.env.production.local
# parcel-bundler cache (https://parceljs.org/) # parcel-bundler cache (https://parceljs.org/)
.cache .cache
@@ -219,6 +216,7 @@ index.d.ts.map
# Added by cargo # Added by cargo
/target /target
/ide-target
!/examples/.gitkeep !/examples/.gitkeep
/.env /.env
/.env.bk /.env.bk

View File

@@ -5,6 +5,7 @@
"unifiedjs.vscode-mdx", "unifiedjs.vscode-mdx",
"mikestead.dotenv", "mikestead.dotenv",
"christian-kohler.npm-intellisense", "christian-kohler.npm-intellisense",
"skellock.just" "skellock.just",
"zerotaskx.rust-extension-pack"
] ]
} }

83
.vscode/settings.json vendored
View File

@@ -1,33 +1,52 @@
{ {
"npm.packageManager": "pnpm", "npm.packageManager": "pnpm",
"rust-analyzer.showUnlinkedFileNotification": false, "[javascript]": {
"[javascript]": { "editor.defaultFormatter": "vscode.typescript-language-features",
"editor.defaultFormatter": "vscode.typescript-language-features", "editor.formatOnSave": true
"editor.formatOnSave": true },
}, "[json]": {
"[json]": { "editor.defaultFormatter": "biomejs.biome",
"editor.defaultFormatter": "biomejs.biome", "editor.formatOnSave": true
"editor.formatOnSave": true },
}, "[jsonc]": {
"[jsonc]": { "editor.defaultFormatter": "biomejs.biome",
"editor.defaultFormatter": "biomejs.biome", "editor.formatOnSave": true
"editor.formatOnSave": true },
}, "[typescript]": {
"[typescript]": { "editor.defaultFormatter": "biomejs.biome",
"editor.defaultFormatter": "biomejs.biome", "editor.formatOnSave": true
"editor.formatOnSave": true },
}, "[typescriptreact]": {
"[typescriptreact]": { "editor.defaultFormatter": "biomejs.biome",
"editor.defaultFormatter": "biomejs.biome", "editor.formatOnSave": true
"editor.formatOnSave": true },
}, "editor.codeActionsOnSave": {
"editor.codeActionsOnSave": { "quickfix.biome": "explicit",
"quickfix.biome": "explicit", "source.organizeImports.biome": "explicit"
"source.organizeImports.biome": "explicit" },
}, "emmet.showExpandedAbbreviation": "never",
"emmet.showExpandedAbbreviation": "never", "prettier.enable": false,
"prettier.enable": false, "typescript.tsdk": "node_modules/typescript/lib",
"tailwindCSS.experimental.configFile": "./packages/tailwind-config/config.ts", "rust-analyzer.showUnlinkedFileNotification": false,
"typescript.tsdk": "node_modules/typescript/lib", "sqltools.connections": [
"rust-analyzer.cargo.features": ["testcontainers"] {
} "previewLimit": 50,
"server": "localhost",
"port": 5432,
"driver": "PostgreSQL",
"name": "konobangu-dev",
"database": "konobangu",
"username": "konobangu"
}
],
"rust-analyzer.cargo.features": "all",
"rust-analyzer.testExplorer": true
// https://github.com/rust-lang/rust/issues/141540
// "rust-analyzer.cargo.targetDir": "target/rust-analyzer",
// "rust-analyzer.check.extraEnv": {
// "CARGO_TARGET_DIR": "target/rust-analyzer"
// },
// "rust-analyzer.cargo.extraEnv": {
// "CARGO_TARGET_DIR": "target/analyzer"
// }
}

112
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,112 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "dev-all",
"dependsOn": [
"dev-webui",
"dev-recorder",
"dev-proxy",
"dev-codegen-wait",
"dev-deps",
],
"dependsOrder": "parallel",
"group": {
"kind": "build",
"isDefault": false,
},
"presentation": {
"group": "new-group",
"echo": true,
"reveal": "always",
"panel": "shared",
"clear": false
}
},
{
"label": "dev-webui",
"type": "shell",
"command": "just",
"args": [
"dev-webui"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "always",
"focus": true,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-deps",
"type": "shell",
"command": "just",
"args": [
"dev-deps"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-codegen-wait",
"type": "shell",
"command": "just",
"args": [
"dev-codegen-wait"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-recorder",
"type": "shell",
"command": "just",
"args": [
"dev-recorder"
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
},
{
"label": "dev-proxy",
"type": "shell",
"command": "just",
"args": [
"dev-proxy",
],
"isBackground": true,
"problemMatcher": [],
"presentation": {
"panel": "dedicated",
"reveal": "never",
"focus": false,
"showReuseMessage": true,
"clear": true,
}
}
]
}

6423
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,14 +1,90 @@
# cargo-features = ["codegen-backend"]
[workspace] [workspace]
members = ["apps/recorder"] members = [
"packages/testing-torrents",
"packages/util",
"packages/util-derive",
"packages/fetch",
"packages/downloader",
"apps/recorder",
"apps/proxy",
]
resolver = "2" resolver = "2"
[patch.crates-io] [profile.dev]
testcontainers = { git = "https://github.com/testcontainers/testcontainers-rs.git", rev = "af21727" } debug = 0
# loco-rs = { git = "https://github.com/lonelyhentxi/loco.git", rev = "beb890e" } # https://github.com/rust-lang/rust/issues/141540
# loco-rs = { git = "https://github.com/loco-rs/loco.git" } incremental = false
async-graphql = { git = "https://github.com/aumetra/async-graphql.git", rev = "690ece7" } # [simd not supported by cranelift](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171)
async-graphql-axum = { git = "https://github.com/aumetra/async-graphql.git", rev = "690ece7" } # codegen-backend = "cranelift"
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
# [patch."https://github.com/lonelyhentxi/qbit.git"] [workspace.dependencies]
# qbit-rs = { path = "./patches/qbit-rs" } testing-torrents = { path = "./packages/testing-torrents" }
util = { path = "./packages/util" }
util-derive = { path = "./packages/util-derive" }
fetch = { path = "./packages/fetch" }
downloader = { path = "./packages/downloader" }
recorder = { path = "./apps/recorder" }
reqwest = { version = "0.12.20", features = [
"charset",
"http2",
"json",
"macos-system-configuration",
"cookies",
] }
moka = "0.12"
futures = "0.3"
quirks_path = "0.1"
snafu = { version = "0.8", features = ["futures"] }
testcontainers = { version = "0.24" }
testcontainers-modules = { version = "0.12.1" }
testcontainers-ext = { version = "0.1.0", features = ["tracing"] }
serde = { version = "1", features = ["derive"] }
tokio = { version = "1.45.1", features = [
"macros",
"fs",
"rt-multi-thread",
"signal",
] }
serde_json = "1"
async-trait = "0.1"
tracing = "0.1"
url = "2.5.2"
anyhow = "1"
itertools = "0.14"
chrono = "0.4"
bytes = "1"
serde_with = "3"
regex = "1.11"
lazy_static = "1.5"
axum = { version = "0.8.3", features = ["macros"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
axum-extra = { version = "0.10", features = ["typed-header"] }
mockito = { version = "1.6.1" }
convert_case = "0.8"
color-eyre = "0.6.5"
inquire = "0.7.5"
image = "0.25.6"
uuid = { version = "1.6.0", features = ["v4"] }
maplit = "1.0.2"
once_cell = "1.20.2"
rand = "0.9.1"
rust_decimal = "1.37.2"
base64 = "0.22.1"
nom = "8.0.0"
percent-encoding = "2.3.1"
num-traits = "0.2.19"
http = "1.2.0"
async-stream = "0.3.6"
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.40"
ipnetwork = "0.21.1"
typed-builder = "0.21.0"
nanoid = "0.4.0"
webp = "0.3.0"
[patch.crates-io]
seaography = { git = "https://github.com/dumtruck/seaography.git", rev = "a787c3a" }

View File

@@ -1,3 +1,8 @@
# KONOBUNGU <h1 align="center">
<img src="./assets/icon.png" height=180>
<br />
<b>Konobangu</b>
<div align="center"><img src="https://img.shields.io/badge/status-work--in--progress-blue" alt="status-badge" /></div>
</h1>
Kono Bangumi? <p align="center">Kono bangumi?</p>

View File

@@ -1,29 +0,0 @@
# Server
AUTH_TYPE="basic" #
BASIC_USER="konobangu"
BASIC_PASSWORD="konobangu"
OIDC_PROVIDER_ENDPOINT="https://some-oidc-auth.com/oidc/.well-known/openid-configuration"
OIDC_CLIENT_ID=""
OIDC_CLIENT_SECRET=""
OIDC_API_ISSUER="https://some-oidc-auth.com/oidc"
OIDC_API_AUDIENCE="https://konobangu.com/api"
OIDC_ICON_URL=""
OIDC_EXTRA_SCOPE_REGEX=""
OIDC_EXTRA_CLAIM_KEY=""
OIDC_EXTRA_CLAIM_VALUE=""
DATABASE_URL="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"
BETTERSTACK_API_KEY=""
BETTERSTACK_URL=""
FLAGS_SECRET=""
ARCJET_KEY=""
SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# Client
NEXT_PUBLIC_APP_URL="http://localhost:5000"
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"

View File

@@ -1,29 +0,0 @@
# AUTH
AUTH_TYPE="basic"
NEXT_PUBLIC_OIDC_PROVIDER_ENDPOINT="https://some-oidc-auth.com/oidc/.well-known/openid-configuration"
NEXT_PUBLIC_OIDC_CLIENT_ID=""
NEXT_PUBLIC_OIDC_CLIENT_SECRET=""
NEXT_PUBLIC_OIDC_ICON_URL=""
OIDC_API_ISSUER="https://some-oidc-auth.com/oidc"
OIDC_API_AUDIENCE="https://konobangu.com/api"
OIDC_EXTRA_SCOPES="" # 如 "read:konobangu,write:konobangu"
OIDC_EXTRA_CLAIM_KEY=""
OIDC_EXTRA_CLAIM_VALUE=""
# DATABASE
DATABASE_URL="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"
# SERVER MISC
BETTERSTACK_API_KEY=""
BETTERSTACK_URL=""
FLAGS_SECRET=""
ARCJET_KEY=""
SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# WEBUI
NEXT_PUBLIC_APP_URL="http://localhost:5000"
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"

45
apps/app/.gitignore vendored
View File

@@ -1,45 +0,0 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# local env files
.env*.local
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts
# prisma
.env
# react.email
.react-email
# Sentry
.sentryclirc

View File

@@ -1,13 +0,0 @@
import { render, screen } from '@testing-library/react';
import { expect, test } from 'vitest';
import Page from '../app/(unauthenticated)/sign-in/[[...sign-in]]/page';
test('Sign In Page', () => {
render(<Page />);
expect(
screen.getByRole('heading', {
level: 1,
name: 'Welcome back',
})
).toBeDefined();
});

View File

@@ -1,13 +0,0 @@
import { render, screen } from '@testing-library/react';
import { expect, test } from 'vitest';
import Page from '../app/(unauthenticated)/sign-up/[[...sign-up]]/page';
test('Sign Up Page', () => {
render(<Page />);
expect(
screen.getByRole('heading', {
level: 1,
name: 'Create an account',
})
).toBeDefined();
});

View File

@@ -1,59 +0,0 @@
'use client';
import { useOthers, useSelf } from '@konobangu/collaboration/hooks';
import {
Avatar,
AvatarFallback,
AvatarImage,
} from '@konobangu/design-system/components/ui/avatar';
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from '@konobangu/design-system/components/ui/tooltip';
import { tailwind } from '@konobangu/tailwind-config';
type PresenceAvatarProps = {
info?: Liveblocks['UserMeta']['info'];
};
const PresenceAvatar = ({ info }: PresenceAvatarProps) => (
<Tooltip delayDuration={0}>
<TooltipTrigger>
<Avatar className="h-7 w-7 bg-secondary ring-1 ring-background">
<AvatarImage src={info?.avatar} alt={info?.name} />
<AvatarFallback className="text-xs">
{info?.name?.slice(0, 2)}
</AvatarFallback>
</Avatar>
</TooltipTrigger>
<TooltipContent collisionPadding={4}>
<p>{info?.name ?? 'Unknown'}</p>
</TooltipContent>
</Tooltip>
);
export const AvatarStack = () => {
const others = useOthers();
const self = useSelf();
const hasMoreUsers = others.length > 3;
return (
<div className="-space-x-1 flex items-center px-4">
{others.slice(0, 3).map(({ connectionId, info }) => (
<PresenceAvatar key={connectionId} info={info} />
))}
{hasMoreUsers && (
<PresenceAvatar
info={{
name: `+${others.length - 3}`,
color: tailwind.theme.colors.gray[500],
}}
/>
)}
{self && <PresenceAvatar info={self.info} />}
</div>
);
};

View File

@@ -1,48 +0,0 @@
'use client';
import { getUsers } from '@/app/actions/users/get';
import { searchUsers } from '@/app/actions/users/search';
import { Room } from '@konobangu/collaboration/room';
import type { ReactNode } from 'react';
export const CollaborationProvider = ({
orgId,
children,
}: {
orgId: string;
children: ReactNode;
}) => {
const resolveUsers = async ({ userIds }: { userIds: string[] }) => {
const response = await getUsers(userIds);
if ('error' in response) {
throw new Error('Problem resolving users');
}
return response.data;
};
const resolveMentionSuggestions = async ({ text }: { text: string }) => {
const response = await searchUsers(text);
if ('error' in response) {
throw new Error('Problem resolving mention suggestions');
}
return response.data;
};
return (
<Room
id={`${orgId}:presence`}
authEndpoint="/api/collaboration/auth"
fallback={
<div className="px-3 text-muted-foreground text-xs">Loading...</div>
}
resolveUsers={resolveUsers}
resolveMentionSuggestions={resolveMentionSuggestions}
>
{children}
</Room>
);
};

View File

@@ -1,106 +0,0 @@
'use client';
import { useMyPresence, useOthers } from '@konobangu/collaboration/hooks';
import { useEffect } from 'react';
const Cursor = ({
name,
color,
x,
y,
}: {
name: string | undefined;
color: string;
x: number;
y: number;
}) => (
<div
className="pointer-events-none absolute top-0 left-0 z-[999] select-none transition-transform duration-100"
style={{
transform: `translateX(${x}px) translateY(${y}px)`,
}}
>
<svg
className="absolute top-0 left-0"
width="24"
height="36"
viewBox="0 0 24 36"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<title>Cursor</title>
<path
d="M5.65376 12.3673H5.46026L5.31717 12.4976L0.500002 16.8829L0.500002 1.19841L11.7841 12.3673H5.65376Z"
fill={color}
/>
</svg>
<div
className="absolute top-4 left-1.5 whitespace-nowrap rounded-full px-2 py-0.5 text-white text-xs"
style={{
backgroundColor: color,
}}
>
{name}
</div>
</div>
);
export const Cursors = () => {
/**
* useMyPresence returns the presence of the current user and a function to update it.
* updateMyPresence is different than the setState function returned by the useState hook from React.
* You don't need to pass the full presence object to update it.
* See https://liveblocks.io/docs/api-reference/liveblocks-react#useMyPresence for more information
*/
const [_cursor, updateMyPresence] = useMyPresence();
/**
* Return all the other users in the room and their presence (a cursor position in this case)
*/
const others = useOthers();
useEffect(() => {
const onPointerMove = (event: PointerEvent) => {
// Update the user cursor position on every pointer move
updateMyPresence({
cursor: {
x: Math.round(event.clientX),
y: Math.round(event.clientY),
},
});
};
const onPointerLeave = () => {
// When the pointer goes out, set cursor to null
updateMyPresence({
cursor: null,
});
};
document.body.addEventListener('pointermove', onPointerMove);
document.body.addEventListener('pointerleave', onPointerLeave);
return () => {
document.body.removeEventListener('pointermove', onPointerMove);
document.body.removeEventListener('pointerleave', onPointerLeave);
};
}, [updateMyPresence]);
return others.map(({ connectionId, presence, info }) => {
if (!presence.cursor) {
return null;
}
return (
<Cursor
key={`cursor-${connectionId}`}
// connectionId is an integer that is incremented at every new connections
// Assigning a color with a modulo makes sure that a specific user has the same colors on every clients
color={info.color}
x={presence.cursor.x}
y={presence.cursor.y}
name={info?.name}
/>
);
});
};

View File

@@ -1,43 +0,0 @@
import {
Breadcrumb,
BreadcrumbItem,
BreadcrumbLink,
BreadcrumbList,
BreadcrumbPage,
BreadcrumbSeparator,
} from '@konobangu/design-system/components/ui/breadcrumb';
import { Separator } from '@konobangu/design-system/components/ui/separator';
import { SidebarTrigger } from '@konobangu/design-system/components/ui/sidebar';
import { Fragment, type ReactNode } from 'react';
type HeaderProps = {
pages: string[];
page: string;
children?: ReactNode;
};
export const Header = ({ pages, page, children }: HeaderProps) => (
<header className="flex h-16 shrink-0 items-center justify-between gap-2">
<div className="flex items-center gap-2 px-4">
<SidebarTrigger className="-ml-1" />
<Separator orientation="vertical" className="mr-2 h-4" />
<Breadcrumb>
<BreadcrumbList>
{pages.map((page, index) => (
<Fragment key={page}>
{index > 0 && <BreadcrumbSeparator className="hidden md:block" />}
<BreadcrumbItem className="hidden md:block">
<BreadcrumbLink href="#">{page}</BreadcrumbLink>
</BreadcrumbItem>
</Fragment>
))}
<BreadcrumbSeparator className="hidden md:block" />
<BreadcrumbItem>
<BreadcrumbPage>{page}</BreadcrumbPage>
</BreadcrumbItem>
</BreadcrumbList>
</Breadcrumb>
</div>
{children}
</header>
);

View File

@@ -1,44 +0,0 @@
'use client';
import { analytics } from '@konobangu/analytics/client';
import { useSession } from '@konobangu/auth/client';
import { usePathname, useSearchParams } from 'next/navigation';
import { useEffect, useRef } from 'react';
export const PostHogIdentifier = () => {
const session = useSession();
const user = session?.data?.user;
const identified = useRef(false);
const pathname = usePathname();
const searchParams = useSearchParams();
useEffect(() => {
// Track pageviews
if (pathname && analytics) {
let url = window.origin + pathname;
if (searchParams.toString()) {
url = `${url}?${searchParams.toString()}`;
}
analytics.capture('$pageview', {
$current_url: url,
});
}
}, [pathname, searchParams]);
useEffect(() => {
if (!user || identified.current) {
return;
}
analytics.identify(user.id, {
email: user.email,
name: user.name,
createdAt: user.createdAt,
avatar: user.image,
});
identified.current = true;
}, [user]);
return null;
};

View File

@@ -1,342 +0,0 @@
'use client';
// import { OrganizationSwitcher, UserButton } from '@konobangu/auth/client';
import { ModeToggle } from '@konobangu/design-system/components/mode-toggle';
import {
Collapsible,
CollapsibleContent,
CollapsibleTrigger,
} from '@konobangu/design-system/components/ui/collapsible';
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from '@konobangu/design-system/components/ui/dropdown-menu';
import {
Sidebar,
SidebarContent,
SidebarFooter,
SidebarGroup,
SidebarGroupContent,
SidebarGroupLabel,
SidebarHeader,
SidebarInset,
SidebarMenu,
SidebarMenuAction,
SidebarMenuButton,
SidebarMenuItem,
SidebarMenuSub,
SidebarMenuSubButton,
SidebarMenuSubItem,
useSidebar,
} from '@konobangu/design-system/components/ui/sidebar';
import { cn } from '@konobangu/design-system/lib/utils';
import {
AnchorIcon,
BookOpenIcon,
BotIcon,
ChevronRightIcon,
FolderIcon,
FrameIcon,
LifeBuoyIcon,
MapIcon,
MoreHorizontalIcon,
PieChartIcon,
SendIcon,
Settings2Icon,
ShareIcon,
SquareTerminalIcon,
Trash2Icon,
} from 'lucide-react';
import type { ReactNode } from 'react';
type GlobalSidebarProperties = {
readonly children: ReactNode;
};
const data = {
user: {
name: 'shadcn',
email: 'm@example.com',
avatar: '/avatars/shadcn.jpg',
},
navMain: [
{
title: 'Playground',
url: '#',
icon: SquareTerminalIcon,
isActive: true,
items: [
{
title: 'History',
url: '#',
},
{
title: 'Starred',
url: '#',
},
{
title: 'Settings',
url: '#',
},
],
},
{
title: 'Models',
url: '#',
icon: BotIcon,
items: [
{
title: 'Genesis',
url: '#',
},
{
title: 'Explorer',
url: '#',
},
{
title: 'Quantum',
url: '#',
},
],
},
{
title: 'Documentation',
url: '#',
icon: BookOpenIcon,
items: [
{
title: 'Introduction',
url: '#',
},
{
title: 'Get Started',
url: '#',
},
{
title: 'Tutorials',
url: '#',
},
{
title: 'Changelog',
url: '#',
},
],
},
{
title: 'Settings',
url: '#',
icon: Settings2Icon,
items: [
{
title: 'General',
url: '#',
},
{
title: 'Team',
url: '#',
},
{
title: 'Billing',
url: '#',
},
{
title: 'Limits',
url: '#',
},
],
},
],
navSecondary: [
{
title: 'Webhooks',
url: '/webhooks',
icon: AnchorIcon,
},
{
title: 'Support',
url: '#',
icon: LifeBuoyIcon,
},
{
title: 'Feedback',
url: '#',
icon: SendIcon,
},
],
projects: [
{
name: 'Design Engineering',
url: '#',
icon: FrameIcon,
},
{
name: 'Sales & Marketing',
url: '#',
icon: PieChartIcon,
},
{
name: 'Travel',
url: '#',
icon: MapIcon,
},
],
};
export const GlobalSidebar = ({ children }: GlobalSidebarProperties) => {
const sidebar = useSidebar();
return (
<>
<Sidebar variant="inset">
<SidebarHeader>
<SidebarMenu>
<SidebarMenuItem>
<div
className={cn(
'h-[36px] overflow-hidden transition-all [&>div]:w-full',
sidebar.open ? '' : '-mx-1'
)}
>
{/* <OrganizationSwitcher
hidePersonal
afterSelectOrganizationUrl="/"
/> */}
</div>
</SidebarMenuItem>
</SidebarMenu>
</SidebarHeader>
<SidebarContent>
<SidebarGroup>
<SidebarGroupLabel>Platform</SidebarGroupLabel>
<SidebarMenu>
{data.navMain.map((item) => (
<Collapsible
key={item.title}
asChild
defaultOpen={item.isActive}
>
<SidebarMenuItem>
<SidebarMenuButton asChild tooltip={item.title}>
<a href={item.url}>
<item.icon />
<span>{item.title}</span>
</a>
</SidebarMenuButton>
{item.items?.length ? (
<>
<CollapsibleTrigger asChild>
<SidebarMenuAction className="data-[state=open]:rotate-90">
<ChevronRightIcon />
<span className="sr-only">Toggle</span>
</SidebarMenuAction>
</CollapsibleTrigger>
<CollapsibleContent>
<SidebarMenuSub>
{item.items?.map((subItem) => (
<SidebarMenuSubItem key={subItem.title}>
<SidebarMenuSubButton asChild>
<a href={subItem.url}>
<span>{subItem.title}</span>
</a>
</SidebarMenuSubButton>
</SidebarMenuSubItem>
))}
</SidebarMenuSub>
</CollapsibleContent>
</>
) : null}
</SidebarMenuItem>
</Collapsible>
))}
</SidebarMenu>
</SidebarGroup>
<SidebarGroup className="group-data-[collapsible=icon]:hidden">
<SidebarGroupLabel>Projects</SidebarGroupLabel>
<SidebarMenu>
{data.projects.map((item) => (
<SidebarMenuItem key={item.name}>
<SidebarMenuButton asChild>
<a href={item.url}>
<item.icon />
<span>{item.name}</span>
</a>
</SidebarMenuButton>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<SidebarMenuAction showOnHover>
<MoreHorizontalIcon />
<span className="sr-only">More</span>
</SidebarMenuAction>
</DropdownMenuTrigger>
<DropdownMenuContent
className="w-48"
side="bottom"
align="end"
>
<DropdownMenuItem>
<FolderIcon className="text-muted-foreground" />
<span>View Project</span>
</DropdownMenuItem>
<DropdownMenuItem>
<ShareIcon className="text-muted-foreground" />
<span>Share Project</span>
</DropdownMenuItem>
<DropdownMenuSeparator />
<DropdownMenuItem>
<Trash2Icon className="text-muted-foreground" />
<span>Delete Project</span>
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</SidebarMenuItem>
))}
<SidebarMenuItem>
<SidebarMenuButton>
<MoreHorizontalIcon />
<span>More</span>
</SidebarMenuButton>
</SidebarMenuItem>
</SidebarMenu>
</SidebarGroup>
<SidebarGroup className="mt-auto">
<SidebarGroupContent>
<SidebarMenu>
{data.navSecondary.map((item) => (
<SidebarMenuItem key={item.title}>
<SidebarMenuButton asChild>
<a href={item.url}>
<item.icon />
<span>{item.title}</span>
</a>
</SidebarMenuButton>
</SidebarMenuItem>
))}
</SidebarMenu>
</SidebarGroupContent>
</SidebarGroup>
</SidebarContent>
<SidebarFooter>
<SidebarMenu>
<SidebarMenuItem className="flex items-center gap-2">
{/* <UserButton
showName
appearance={{
elements: {
rootBox: 'flex overflow-hidden w-full',
userButtonBox: 'flex-row-reverse',
userButtonOuterIdentifier: 'truncate pl-0',
},
}}
/> */}
<ModeToggle />
</SidebarMenuItem>
</SidebarMenu>
</SidebarFooter>
</Sidebar>
<SidebarInset>{children}</SidebarInset>
</>
);
};

View File

@@ -1,42 +0,0 @@
import { getSessionFromHeaders } from '@konobangu/auth/server';
import { SidebarProvider } from '@konobangu/design-system/components/ui/sidebar';
import { env } from '@konobangu/env';
import { showBetaFeature } from '@konobangu/feature-flags';
import { secure } from '@konobangu/security';
import { redirect } from 'next/navigation';
import type { ReactNode } from 'react';
import { PostHogIdentifier } from './components/posthog-identifier';
import { GlobalSidebar } from './components/sidebar';
type AppLayoutProperties = {
readonly children: ReactNode;
};
const AppLayout = async ({ children }: AppLayoutProperties) => {
if (env.ARCJET_KEY) {
await secure(['CATEGORY:PREVIEW']);
}
const { user } = await getSessionFromHeaders();
if (!user) {
return redirect('/sign-in'); // from next/navigation
}
const betaFeature = await showBetaFeature();
return (
<SidebarProvider>
<GlobalSidebar>
{betaFeature && (
<div className="m-4 rounded-full bg-success p-1.5 text-center text-sm text-success-foreground">
Beta feature now available
</div>
)}
{children}
</GlobalSidebar>
<PostHogIdentifier />
</SidebarProvider>
);
};
export default AppLayout;

View File

@@ -1,57 +0,0 @@
import { getSessionFromHeaders } from '@konobangu/auth/server';
import { database } from '@konobangu/database';
import { env } from '@konobangu/env';
import type { Metadata } from 'next';
import dynamic from 'next/dynamic';
import { notFound } from 'next/navigation';
import { AvatarStack } from './components/avatar-stack';
import { Cursors } from './components/cursors';
import { Header } from './components/header';
const title = 'Acme Inc';
const description = 'My application.';
const CollaborationProvider = dynamic(() =>
import('./components/collaboration-provider').then(
(mod) => mod.CollaborationProvider
)
);
export const metadata: Metadata = {
title,
description,
};
const App = async () => {
const pages = await database.selectFrom('page').selectAll().execute();
const { orgId } = await getSessionFromHeaders();
if (!orgId) {
notFound();
}
return (
<>
<Header pages={['Building Your Application']} page="Data Fetching">
{env.LIVEBLOCKS_SECRET && (
<CollaborationProvider orgId={orgId}>
<AvatarStack />
<Cursors />
</CollaborationProvider>
)}
</Header>
<div className="flex flex-1 flex-col gap-4 p-4 pt-0">
<div className="grid auto-rows-min gap-4 md:grid-cols-3">
{pages.map((page) => (
<div key={page.id} className="aspect-video rounded-xl bg-muted/50">
{page.name}
</div>
))}
</div>
<div className="min-h-[100vh] flex-1 rounded-xl bg-muted/50 md:min-h-min" />
</div>
</>
);
};
export default App;

View File

@@ -1,29 +0,0 @@
import { webhooks } from '@konobangu/webhooks';
import { notFound } from 'next/navigation';
export const metadata = {
title: 'Webhooks',
description: 'Send webhooks to your users.',
};
const WebhooksPage = async () => {
const response = await webhooks.getAppPortal();
if (!response?.url) {
notFound();
}
return (
<div className="h-full w-full overflow-hidden">
<iframe
title="Webhooks"
src={response.url}
className="h-full w-full border-none"
allow="clipboard-write"
loading="lazy"
/>
</div>
);
};
export default WebhooksPage;

View File

@@ -1,58 +0,0 @@
import { ModeToggle } from '@konobangu/design-system/components/mode-toggle';
import { env } from '@konobangu/env';
import { CommandIcon } from 'lucide-react';
import Link from 'next/link';
import type { ReactNode } from 'react';
type AuthLayoutProps = {
readonly children: ReactNode;
};
const AuthLayout = ({ children }: AuthLayoutProps) => (
<div className="container relative grid h-dvh flex-col items-center justify-center lg:max-w-none lg:grid-cols-2 lg:px-0">
<div className="relative hidden h-full flex-col bg-muted p-10 text-white lg:flex dark:border-r">
<div className="absolute inset-0 bg-zinc-900" />
<div className="relative z-20 flex items-center font-medium text-lg">
<CommandIcon className="mr-2 h-6 w-6" />
Acme Inc
</div>
<div className="absolute top-4 right-4">
<ModeToggle />
</div>
<div className="relative z-20 mt-auto">
<blockquote className="space-y-2">
<p className="text-lg">
&ldquo;This library has saved me countless hours of work and helped
me deliver stunning designs to my clients faster than ever
before.&rdquo;
</p>
<footer className="text-sm">Sofia Davis</footer>
</blockquote>
</div>
</div>
<div className="lg:p-8">
<div className="mx-auto flex w-full max-w-[400px] flex-col justify-center space-y-6">
{children}
<p className="px-8 text-center text-muted-foreground text-sm">
By clicking continue, you agree to our{' '}
<Link
href={new URL('/legal/terms', env.NEXT_PUBLIC_WEB_URL).toString()}
className="underline underline-offset-4 hover:text-primary"
>
Terms of Service
</Link>{' '}
and{' '}
<Link
href={new URL('/legal/privacy', env.NEXT_PUBLIC_WEB_URL).toString()}
className="underline underline-offset-4 hover:text-primary"
>
Privacy Policy
</Link>
.
</p>
</div>
</div>
</div>
);
export default AuthLayout;

View File

@@ -1,23 +0,0 @@
import { createMetadata } from '@konobangu/seo/metadata';
import type { Metadata } from 'next';
import dynamic from 'next/dynamic';
const title = 'Welcome back';
const description = 'Enter your details to sign in.';
const SignIn = dynamic(() =>
import('@konobangu/auth/components/sign-in').then((mod) => mod.SignIn)
);
export const metadata: Metadata = createMetadata({ title, description });
const SignInPage = () => (
<>
<div className="flex flex-col space-y-2 text-center">
<h1 className="font-semibold text-2xl tracking-tight">{title}</h1>
<p className="text-muted-foreground text-sm">{description}</p>
</div>
<SignIn />
</>
);
export default SignInPage;

View File

@@ -1,23 +0,0 @@
import { createMetadata } from '@konobangu/seo/metadata';
import type { Metadata } from 'next';
import dynamic from 'next/dynamic';
const title = 'Create an account';
const description = 'Enter your details to get started.';
const SignUp = dynamic(() =>
import('@konobangu/auth/components/sign-up').then((mod) => mod.SignUp)
);
export const metadata: Metadata = createMetadata({ title, description });
const SignUpPage = () => (
<>
<div className="flex flex-col space-y-2 text-center">
<h1 className="font-semibold text-2xl tracking-tight">{title}</h1>
<p className="text-muted-foreground text-sm">{description}</p>
</div>
<SignUp />
</>
);
export default SignUpPage;

View File

@@ -1,3 +0,0 @@
import { getFlags } from '@konobangu/feature-flags/access';
export const GET = getFlags;

View File

@@ -1,63 +0,0 @@
'use server';
import {
getFullOrganizationFromSession,
getSessionFromHeaders,
} from '@konobangu/auth/server';
import { tailwind } from '@konobangu/tailwind-config';
const colors = [
tailwind.theme.colors.red[500],
tailwind.theme.colors.orange[500],
tailwind.theme.colors.amber[500],
tailwind.theme.colors.yellow[500],
tailwind.theme.colors.lime[500],
tailwind.theme.colors.green[500],
tailwind.theme.colors.emerald[500],
tailwind.theme.colors.teal[500],
tailwind.theme.colors.cyan[500],
tailwind.theme.colors.sky[500],
tailwind.theme.colors.blue[500],
tailwind.theme.colors.indigo[500],
tailwind.theme.colors.violet[500],
tailwind.theme.colors.purple[500],
tailwind.theme.colors.fuchsia[500],
tailwind.theme.colors.pink[500],
tailwind.theme.colors.rose[500],
];
export const getUsers = async (
userIds: string[]
): Promise<
| {
data: Liveblocks['UserMeta']['info'][];
}
| {
error: unknown;
}
> => {
try {
const session = await getSessionFromHeaders();
const { orgId } = session;
if (!orgId) {
throw new Error('Not logged in');
}
const { fullOrganization } = await getFullOrganizationFromSession(session);
const members = fullOrganization?.members || [];
const data: Liveblocks['UserMeta']['info'][] = members
.filter((user) => user?.userId && userIds.includes(user?.userId))
.map((user) => ({
name: user.user.name ?? user.user.email ?? 'Unknown user',
picture: user.user.image,
color: colors[Math.floor(Math.random() * colors.length)],
}));
return { data };
} catch (error) {
return { error };
}
};

View File

@@ -1,50 +0,0 @@
'use server';
import {
getFullOrganizationFromSession,
getSessionFromHeaders,
} from '@konobangu/auth/server';
import Fuse from 'fuse.js';
export const searchUsers = async (
query: string
): Promise<
| {
data: string[];
}
| {
error: unknown;
}
> => {
try {
const session = await getSessionFromHeaders();
const { orgId } = session;
if (!orgId) {
throw new Error('Not logged in');
}
const { fullOrganization } = await getFullOrganizationFromSession(session);
const members = fullOrganization?.members || [];
const users = members.map((user) => ({
id: user.id,
name: user.user.name ?? user.user.email ?? 'Unknown user',
imageUrl: user.user.image,
}));
const fuse = new Fuse(users, {
keys: ['name'],
minMatchCharLength: 1,
threshold: 0.3,
});
const results = fuse.search(query);
const data = results.map((result) => result.item.id);
return { data };
} catch (error) {
return { error };
}
};

View File

@@ -1,42 +0,0 @@
import { getSessionFromHeaders } from '@konobangu/auth/server';
import { authenticate } from '@konobangu/collaboration/auth';
import { tailwind } from '@konobangu/tailwind-config';
const COLORS = [
tailwind.theme.colors.red[500],
tailwind.theme.colors.orange[500],
tailwind.theme.colors.amber[500],
tailwind.theme.colors.yellow[500],
tailwind.theme.colors.lime[500],
tailwind.theme.colors.green[500],
tailwind.theme.colors.emerald[500],
tailwind.theme.colors.teal[500],
tailwind.theme.colors.cyan[500],
tailwind.theme.colors.sky[500],
tailwind.theme.colors.blue[500],
tailwind.theme.colors.indigo[500],
tailwind.theme.colors.violet[500],
tailwind.theme.colors.purple[500],
tailwind.theme.colors.fuchsia[500],
tailwind.theme.colors.pink[500],
tailwind.theme.colors.rose[500],
];
export const POST = async () => {
const session = await getSessionFromHeaders();
const { orgId, user } = session;
if (!user || !orgId) {
return new Response('Unauthorized', { status: 401 });
}
return authenticate({
userId: user.id,
orgId,
userInfo: {
name: user.name ?? user.email ?? undefined,
avatar: user.image ?? undefined,
color: COLORS[Math.floor(Math.random() * COLORS.length)],
},
});
};

Binary file not shown.

Before

Width:  |  Height:  |  Size: 216 B

View File

@@ -1,17 +0,0 @@
import { database } from '@konobangu/database';
export const POST = async () => {
const newPage = await database
.insertInto('page')
.values([
{
name: 'cron-temp',
},
])
.returning('id')
.executeTakeFirstOrThrow();
await database.deleteFrom('page').where('id', '=', newPage.id);
return new Response('OK', { status: 200 });
};

View File

@@ -1,29 +0,0 @@
'use client';
import { Button } from '@konobangu/design-system/components/ui/button';
import { fonts } from '@konobangu/design-system/lib/fonts';
import { captureException } from '@sentry/nextjs';
import type NextError from 'next/error';
import { useEffect } from 'react';
type GlobalErrorProperties = {
readonly error: NextError & { digest?: string };
readonly reset: () => void;
};
const GlobalError = ({ error, reset }: GlobalErrorProperties) => {
useEffect(() => {
captureException(error);
}, [error]);
return (
<html lang="en" className={fonts}>
<body>
<h1>Oops, something went wrong</h1>
<Button onClick={() => reset()}>Try again</Button>
</body>
</html>
);
};
export default GlobalError;

View File

@@ -1,3 +0,0 @@
export const runtime = 'edge';
export const GET = (): Response => new Response('OK', { status: 200 });

Binary file not shown.

Before

Width:  |  Height:  |  Size: 96 B

View File

@@ -1,18 +0,0 @@
import '@konobangu/design-system/styles/globals.css';
import { DesignSystemProvider } from '@konobangu/design-system';
import { fonts } from '@konobangu/design-system/lib/fonts';
import type { ReactNode } from 'react';
type RootLayoutProperties = {
readonly children: ReactNode;
};
const RootLayout = ({ children }: RootLayoutProperties) => (
<html lang="en" className={fonts} suppressHydrationWarning>
<body>
<DesignSystemProvider>{children}</DesignSystemProvider>
</body>
</html>
);
export default RootLayout;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

View File

@@ -1,3 +0,0 @@
import { initializeSentry } from '@konobangu/next-config/instrumentation';
export const register = initializeSentry();

View File

@@ -1 +0,0 @@
export * from '@konobangu/collaboration/config';

View File

@@ -1,22 +0,0 @@
import { authMiddleware } from '@konobangu/auth/middleware';
import {
noseconeConfig,
noseconeMiddleware,
} from '@konobangu/security/middleware';
import { NextRequest } from 'next/server';
const securityHeaders = noseconeMiddleware(noseconeConfig);
export async function middleware (_request: NextRequest) {
const response = await securityHeaders();
return authMiddleware(response as any);
}
export const config = {
matcher: [
// Skip Next.js internals and all static files, unless found in search params
'/((?!_next|[^?]*\\.(?:html?|css|js(?!on)|jpe?g|webp|png|gif|svg|ttf|woff2?|ico|csv|docx?|xlsx?|zip|webmanifest)).*)',
// Always run for API routes
'/(api|trpc)(.*)',
],
};

View File

@@ -1,15 +0,0 @@
import { env } from '@konobangu/env';
import { config, withAnalyzer, withSentry } from '@konobangu/next-config';
import type { NextConfig } from 'next';
let nextConfig: NextConfig = { ...config };
if (env.VERCEL) {
nextConfig = withSentry(nextConfig);
}
if (env.ANALYZE === 'true') {
nextConfig = withAnalyzer(nextConfig);
}
export default nextConfig;

View File

@@ -1,51 +0,0 @@
{
"name": "app",
"private": true,
"scripts": {
"dev": "next dev -p 5000 --turbopack",
"build": "next build",
"start": "next start",
"analyze": "ANALYZE=true pnpm build",
"test": "vitest run",
"clean": "git clean -xdf .cache .turbo dist node_modules",
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
},
"dependencies": {
"@prisma/client": "6.0.1",
"@konobangu/analytics": "workspace:*",
"@konobangu/auth": "workspace:*",
"@konobangu/collaboration": "workspace:*",
"@konobangu/database": "workspace:*",
"@konobangu/migrate": "workspace:*",
"@konobangu/design-system": "workspace:*",
"@konobangu/env": "workspace:*",
"@konobangu/feature-flags": "workspace:*",
"@konobangu/next-config": "workspace:*",
"@konobangu/security": "workspace:*",
"@konobangu/seo": "workspace:*",
"@konobangu/tailwind-config": "workspace:*",
"@konobangu/webhooks": "workspace:*",
"@sentry/nextjs": "^8.43.0",
"fuse.js": "^7.0.0",
"import-in-the-middle": "^1.11.3",
"lucide-react": "^0.468.0",
"next": "^15.1.3",
"next-themes": "^0.4.4",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"require-in-the-middle": "^7.4.0"
},
"devDependencies": {
"@konobangu/testing": "workspace:*",
"@konobangu/typescript-config": "workspace:*",
"@testing-library/dom": "^10.4.0",
"@testing-library/react": "^16.1.0",
"@types/node": "22.10.1",
"@types/react": "19.0.1",
"@types/react-dom": "19.0.2",
"jsdom": "^25.0.1",
"tailwindcss": "^3.4.16",
"typescript": "^5.7.2",
"vitest": "^2.1.8"
}
}

View File

@@ -1 +0,0 @@
export { default } from '@konobangu/design-system/postcss.config.mjs';

View File

@@ -1,34 +0,0 @@
/*
* This file configures the initialization of Sentry on the client.
* The config you add here will be used whenever a users loads a page in their browser.
* https://docs.sentry.io/platforms/javascript/guides/nextjs/
*/
import { init, replayIntegration } from '@sentry/nextjs';
init({
dsn: process.env.NEXT_PUBLIC_SENTRY_DSN,
// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
replaysOnErrorSampleRate: 1,
/*
* This sets the sample rate to be 10%. You may want this to be 100% while
* in development and sample at a lower rate in production
*/
replaysSessionSampleRate: 0.1,
// You can remove this option if you're not planning to use the Sentry Session Replay feature:
integrations: [
replayIntegration({
// Additional Replay configuration goes in here, for example:
maskAllText: true,
blockAllMedia: true,
}),
],
});

View File

@@ -1 +0,0 @@
export { config as default } from '@konobangu/tailwind-config/config';

View File

@@ -1,17 +0,0 @@
{
"extends": "@konobangu/typescript-config/nextjs.json",
"compilerOptions": {
"baseUrl": ".",
"paths": {
"@/*": ["./*"],
"@konobangu/*": ["../../packages/*"]
}
},
"include": [
"next-env.d.ts",
"next.config.ts",
"**/*.ts",
"**/*.tsx",
".next/types/**/*.ts"
]
}

View File

@@ -1,8 +0,0 @@
{
"crons": [
{
"path": "/cron/keep-alive",
"schedule": "0 1 * * *"
}
]
}

View File

@@ -1 +0,0 @@
export { default } from '@konobangu/testing';

View File

@@ -4,8 +4,5 @@
"scripts": { "scripts": {
"dev": "npx --yes mintlify dev --port 5004", "dev": "npx --yes mintlify dev --port 5004",
"lint": "npx --yes mintlify broken-links" "lint": "npx --yes mintlify broken-links"
},
"devDependencies": {
"typescript": "^5.7.2"
} }
} }

View File

@@ -6,19 +6,16 @@
"build": "email build", "build": "email build",
"dev": "email dev --port 5003", "dev": "email dev --port 5003",
"export": "email export", "export": "email export",
"clean": "git clean -xdf .cache .turbo dist node_modules", "clean": "git clean -xdf .cache dist node_modules",
"typecheck": "tsc --noEmit --emitDeclarationOnly false" "typecheck": "tsc --noEmit --emitDeclarationOnly false"
}, },
"dependencies": { "dependencies": {
"@react-email/components": "0.0.31", "@react-email/components": "^0.0.42",
"@konobangu/email": "workspace:*",
"react": "^19.0.0", "react": "^19.0.0",
"react-email": "3.0.4" "react-email": "^4.0.16",
"@konobangu/email": "workspace:*"
}, },
"devDependencies": { "devDependencies": {
"@konobangu/typescript-config": "workspace:*", "@types/react": "19.0.1"
"@types/node": "22.10.1",
"@types/react": "19.0.1",
"typescript": "^5.7.2"
} }
} }

View File

@@ -1,5 +1,13 @@
{ {
"extends": "@konobangu/typescript-config/nextjs.json", "extends": "../../tsconfig.base.json",
"compilerOptions": {
"composite": true,
"jsx": "react-jsx",
"jsxImportSource": "react",
"module": "ESNext",
"moduleResolution": "bundler"
},
"references": [{ "path": "../../packages/email" }],
"include": ["**/*.ts", "**/*.tsx"], "include": ["**/*.ts", "**/*.tsx"],
"exclude": ["node_modules"] "exclude": ["node_modules"]
} }

View File

@@ -0,0 +1,11 @@
```x-forwarded.json
{
"X-Forwarded-Host": "konobangu.com",
"X-Forwarded-Proto": "https"
}
```
#^https://konobangu.com/api*** statusCode://500
^https://konobangu.com/api*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/api$1
^https://konobangu.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000/$1 excludeFilter://^https://konobangu.com/api***
^wss://konobangu.com/*** reqHeaders://{x-forwarded.json} ws://127.0.0.1:5000/$1 excludeFilter://^wss://konobangu.com/api

View File

@@ -1,2 +0,0 @@
^https://konobangu.com/*** http://127.0.0.1:5000/$1 excludeFilter://^https://konobangu.com/api/***
^wss://konobangu.com/*** ws://127.0.0.1:5000/$1 ^excludeFilter://^wss://konobangu.com/api/***

View File

@@ -0,0 +1 @@
^https://mikanani.me/*** http://127.0.0.1:5005/$1 excludeFilter://^**/***.svg excludeFilter://^**/***.css excludeFilter://^**/***.js

View File

@@ -1 +0,0 @@
^https://konobangu.com/api/*** http://127.0.0.1:5001/api/$1

View File

@@ -1 +1 @@
{"filesOrder":["webui","recorder"],"selectedList":["webui","recorder"],"disabledDefalutRules":true} {"filesOrder":["konobangu","mikan_doppel"],"selectedList":["konobangu","mikan_doppel"],"disabledDefalutRules":true,"defalutRules":""}

View File

@@ -0,0 +1 @@
{"filesOrder":[]}

19
apps/proxy/Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[package]
name = "proxy"
version = "0.1.0"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "proxy"
path = "src/lib.rs"
[[bin]]
name = "mikan_doppel"
path = "src/bin/mikan_doppel.rs"
[dependencies]
recorder = { workspace = true }
tokio = { workspace = true }
tracing-subscriber = { workspace = true }
tracing = { workspace = true }

View File

@@ -3,13 +3,13 @@
"version": "0.1.0", "version": "0.1.0",
"private": true, "private": true,
"scripts": { "scripts": {
"start": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF\" whistle run -p 8899 -t 30000 -D . --no-global-plugins", "whistle": "cross-env WHISTLE_MODE=\"prod|capture|keepXFF|x-forwarded-host|x-forwarded-proto\" whistle run -p 8899 -t 30000 -D .",
"dev": "pnpm run start" "mikan_doppel": "cargo run -p proxy --bin mikan_doppel",
"dev": "npm-run-all -p mikan_doppel whistle"
}, },
"keywords": [], "keywords": [],
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"whistle": "^2.9.61", "whistle": "^2.9.99"
"cross-env": "^7.0.3"
} }
} }

View File

@@ -0,0 +1,22 @@
use std::time::Duration;
use recorder::{errors::RecorderResult, test_utils::mikan::MikanMockServer};
use tracing::Level;
#[allow(unused_variables)]
#[tokio::main]
async fn main() -> RecorderResult<()> {
tracing_subscriber::fmt()
.with_max_level(Level::DEBUG)
.init();
let mut mikan_server = MikanMockServer::new_with_port(5005).await.unwrap();
let resources_mock = mikan_server.mock_resources_with_doppel();
let login_mock = mikan_server.mock_get_login_page();
loop {
tokio::time::sleep(Duration::from_secs(1)).await;
}
}

View File

@@ -1,8 +0,0 @@
FROM mcr.microsoft.com/vscode/devcontainers/rust:0-1
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install --no-install-recommends postgresql-client \
&& cargo install sea-orm-cli cargo-insta \
&& chown -R vscode /usr/local/cargo
COPY .env /.env

View File

@@ -1,7 +0,0 @@
{
"name": "Konobangu Recorder",
"dockerComposeFile": "docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
"forwardPorts": [5001]
}

View File

@@ -1,40 +0,0 @@
version: "3"
services:
app:
build:
context: .
dockerfile: Dockerfile
command: sleep infinity
networks:
- db
- redis
volumes:
- ../..:/workspaces:cached
env_file:
- .env
db:
image: postgres:15.3-alpine
restart: unless-stopped
ports:
- 5432:5432
networks:
- db
volumes:
- postgres-data:/var/lib/postgresql/data
env_file:
- .env
redis:
image: redis:latest
restart: unless-stopped
ports:
- 6379:6379
networks:
- redis
volumes:
postgres-data:
networks:
db:
redis:

17
apps/recorder/.env Normal file
View File

@@ -0,0 +1,17 @@
HOST="konobangu.com"
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
STORAGE_DATA_DIR = "./data"
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# OIDC_EXTRA_CLAIM_KEY = ""
# OIDC_EXTRA_CLAIM_VALUE = ""
# MIKAN_PROXY = ""
# MIKAN_PROXY_AUTH_HEADER = ""
# MIKAN_NO_PROXY = ""
# MIKAN_PROXY_ACCEPT_INVALID_CERTS = "true"

17
apps/recorder/.env.dev Normal file
View File

@@ -0,0 +1,17 @@
HOST="konobangu.com"
DATABASE_URL = "postgres://konobangu:konobangu@localhost:5432/konobangu"
STORAGE_DATA_DIR = "./data"
AUTH_TYPE = "basic" # or oidc
BASIC_USER = "konobangu"
BASIC_PASSWORD = "konobangu"
# OIDC_ISSUER="https://auth.logto.io/oidc"
# OIDC_AUDIENCE = "https://konobangu.com/api"
# OIDC_CLIENT_ID = "client_id"
# OIDC_CLIENT_SECRET = "client_secret" # optional
# OIDC_EXTRA_SCOPES = "read:konobangu write:konobangu"
# OIDC_EXTRA_CLAIM_KEY = ""
# OIDC_EXTRA_CLAIM_VALUE = ""
MIKAN_PROXY = "http://127.0.0.1:8899"
# MIKAN_PROXY_AUTH_HEADER = ""
# MIKAN_NO_PROXY = ""
MIKAN_PROXY_ACCEPT_INVALID_CERTS = true

View File

@@ -15,3 +15,17 @@ Cargo.lock
# MSVC Windows builds of rustc generate these, which store debugging information # MSVC Windows builds of rustc generate these, which store debugging information
*.pdb *.pdb
# Local
.DS_Store
*.local
*.log*
# Dist
node_modules
dist/
temp/*
!temp/.gitkeep
tests/resources/mikan/classic_episodes/*/*
!tests/resources/mikan/classic_episodes/parquet/tiny.parquet

View File

@@ -1,9 +1,21 @@
[package] [package]
name = "recorder" name = "recorder"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = ["jxl"]
playground = ["dep:inquire", "dep:color-eyre", "dep:polars"]
testcontainers = [
"dep:testcontainers",
"dep:testcontainers-modules",
"dep:testcontainers-ext",
"downloader/testcontainers",
"testcontainers-modules/postgres",
]
jxl = ["dep:jpegxl-rs", "dep:jpegxl-sys"]
[lib] [lib]
name = "recorder" name = "recorder"
path = "src/lib.rs" path = "src/lib.rs"
@@ -13,94 +25,145 @@ name = "recorder_cli"
path = "src/bin/main.rs" path = "src/bin/main.rs"
required-features = [] required-features = []
[features] [[example]]
default = [] name = "mikan_collect_classic_eps"
testcontainers = [ path = "examples/mikan_collect_classic_eps.rs"
"dep:testcontainers", required-features = ["playground"]
"dep:testcontainers-modules",
"dep:bollard", [[example]]
] name = "mikan_doppel_season_subscription"
path = "examples/mikan_doppel_season_subscription.rs"
required-features = ["playground"]
[[example]]
name = "mikan_doppel_subscriber_subscription"
path = "examples/mikan_doppel_subscriber_subscription.rs"
required-features = ["playground"]
[[example]]
name = "playground"
path = "examples/playground.rs"
required-features = ["playground"]
[dependencies] [dependencies]
loco-rs = { version = "0.14" } downloader = { workspace = true }
serde = { version = "1", features = ["derive"] } util = { workspace = true }
serde_json = "1" util-derive = { workspace = true }
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] } fetch = { workspace = true }
async-trait = "0.1.83"
tracing = "0.1" serde = { workspace = true }
chrono = "0.4" tokio = { workspace = true }
sea-orm = { version = "1", features = [ serde_json = { workspace = true }
async-trait = { workspace = true }
testcontainers = { workspace = true, optional = true }
testcontainers-modules = { workspace = true, optional = true }
testcontainers-ext = { workspace = true, optional = true, features = [
"tracing",
] }
tracing = { workspace = true }
axum = { workspace = true }
axum-extra = { workspace = true }
snafu = { workspace = true }
itertools = { workspace = true }
url = { workspace = true }
regex = { workspace = true }
lazy_static = { workspace = true }
quirks_path = { workspace = true }
futures = { workspace = true }
bytes = { workspace = true }
serde_with = { workspace = true }
moka = { workspace = true }
chrono = { workspace = true }
tracing-subscriber = { workspace = true }
mockito = { workspace = true }
color-eyre = { workspace = true, optional = true }
inquire = { workspace = true, optional = true }
convert_case = { workspace = true }
image = { workspace = true }
uuid = { workspace = true }
maplit = { workspace = true }
once_cell = { workspace = true }
rand = { workspace = true }
rust_decimal = { workspace = true }
base64 = { workspace = true }
nom = { workspace = true }
percent-encoding = { workspace = true }
num-traits = { workspace = true }
http = { workspace = true }
async-stream = { workspace = true }
serde_variant = { workspace = true }
tracing-appender = { workspace = true }
clap = { workspace = true }
ipnetwork = { workspace = true }
typed-builder = { workspace = true }
nanoid = { workspace = true }
webp = { workspace = true }
sea-orm = { version = "1.1", features = [
"sqlx-sqlite", "sqlx-sqlite",
"sqlx-postgres", "sqlx-postgres",
"runtime-tokio-rustls", "runtime-tokio",
"macros", "macros",
"debug-print", "debug-print",
] } ] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] } figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
axum = "0.8"
uuid = { version = "1.6.0", features = ["v4"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
sea-orm-migration = { version = "1", features = ["runtime-tokio-rustls"] }
reqwest = { version = "0.12", features = [
"charset",
"http2",
"json",
"macos-system-configuration",
"rustls-tls",
] }
thiserror = "2"
rss = "2" rss = "2"
bytes = "1.9"
itertools = "0.13.0"
url = "2.5"
fancy-regex = "0.14" fancy-regex = "0.14"
regex = "1.11" lightningcss = "1.0.0-alpha.66"
lazy_static = "1.5"
maplit = "1.0.2"
lightningcss = "1.0.0-alpha.61"
html-escape = "0.2.13" html-escape = "0.2.13"
opendal = { version = "0.51.0", features = ["default", "services-fs"] } opendal = { version = "0.53", features = ["default", "services-fs"] }
zune-image = "0.4.15" scraper = "0.23.1"
once_cell = "1.20.2" async-graphql = { version = "7", features = ["dynamic-schema"] }
reqwest-middleware = "0.4.0" async-graphql-axum = "7"
reqwest-retry = "0.7.0" seaography = { version = "1.1", features = [
reqwest-tracing = "0.5.5" "with-json",
scraper = "0.22.0" "with-chrono",
leaky-bucket = "1.1.2" "with-time",
serde_with = "3" "with-uuid",
jwt-authorizer = "0.15.0" "with-decimal",
futures = "0.3.31" "with-bigdecimal",
librqbit-core = "4" "with-postgres-array",
qbit-rs = { git = "https://github.com/lonelyhentxi/qbit.git", rev = "72d53138ebe", features = [ "with-json-as-scalar",
"default",
"builder",
] } ] }
testcontainers = { version = "0.23.1", features = [
"default",
"properties-config",
"watchdog",
"http_wait",
"reusable-containers",
], optional = true }
testcontainers-modules = { version = "0.11.4", optional = true }
color-eyre = "0.6"
log = "0.4.22"
anyhow = "1.0.95"
bollard = { version = "0.18", optional = true }
async-graphql = { version = "7.0.13", features = [] }
async-graphql-axum = "7.0.13"
fastrand = "2.3.0"
seaography = "1.1.2"
quirks_path = "0.1.0"
base64 = "0.22.1"
tower = "0.5.2" tower = "0.5.2"
axum-extra = "0.10.0" tower-http = { version = "0.6", features = [
"trace",
"catch-panic",
"timeout",
"add-extension",
"cors",
"fs",
"set-header",
"compression-full",
] }
tera = "1.20.0"
openidconnect = { version = "4" }
dotenvy = "0.15.7"
jpegxl-rs = { version = "0.11.2", optional = true }
jpegxl-sys = { version = "0.11.2", optional = true }
apalis = { version = "0.7", features = ["limit", "tracing", "catch-panic"] }
apalis-sql = { version = "0.7", features = ["postgres"] }
cocoon = { version = "0.4.3", features = ["getrandom", "thiserror"] }
reqwest_cookie_store = "0.8.0"
jwtk = "0.4.0"
mime_guess = "2.0.5"
icu_properties = "2.0.1"
icu = "2.0.0"
tracing-tree = "0.4.0"
num_cpus = "1.17.0"
headers-accept = "0.1.4"
polars = { version = "0.49.1", features = [
"parquet",
"lazy",
"diagonal_concat",
], optional = true }
[dev-dependencies] [dev-dependencies]
inquire = { workspace = true }
color-eyre = { workspace = true }
serial_test = "3" serial_test = "3"
loco-rs = { version = "0.14", features = ["testing"] } insta = { version = "1", features = ["redactions", "toml", "filters"] }
insta = { version = "1", features = ["redactions", "yaml", "filters"] } rstest = "0.25"
ctor = "0.4.0"

View File

@@ -1,138 +0,0 @@
# Loco configuration file documentation
# Application logging configuration
logger:
# Enable or disable logging.
enable: true
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace: true
# Log level, options: trace, debug, info, warn or error.
level: debug
# Define the logging format. options: compact, pretty or Json
format: compact
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
# override_filter: trace
# Web server configuration
server:
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
port: 5001
# The UI hostname or IP address that mailers will point to.
host: http://webui.konobangu.com
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
middlewares:
# Enable Etag cache header middleware
etag:
enable: true
# Allows to limit the payload size request. payload that bigger than this file will blocked the request.
limit_payload:
# Enable/Disable the middleware.
enable: true
# the limit size. can be b,kb,kib,mb,mib,gb,gib
body_limit: 5mb
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
logger:
# Enable/Disable the middleware.
enable: true
# when your code is panicked, the request still returns 500 status code.
catch_panic:
# Enable/Disable the middleware.
enable: true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
timeout_request:
# Enable/Disable the middleware.
enable: false
# Duration time in milliseconds.
timeout: 5000
cors:
enable: true
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://loco.rs
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
# allow_methods:
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
fallback:
enable: false
# Worker Configuration
workers:
# specifies the worker mode. Options:
# - BackgroundQueue - Workers operate asynchronously in the background, processing queued.
# - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed.
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
mode: BackgroundQueue
# Mailer Configuration.
mailer:
# SMTP mailer configuration.
smtp:
# Enable/Disable smtp mailer.
enable: true
# SMTP server host. e.x localhost, smtp.gmail.com
host: '{{ get_env(name="MAILER_HOST", default="localhost") }}'
# SMTP server port
port: 1025
# Use secure connection (SSL/TLS).
secure: false
# auth:
# user:
# password:
# Database Configuration
database:
# Database connection URI
uri: '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu") }}'
# When enabled, the sql query will be logged.
enable_logging: true
# Set the timeout duration when acquiring a connection.
connect_timeout: 500
# Set the idle duration before closing a connection.
idle_timeout: 500
# Minimum number of connections for a pool.
min_connections: 1
# Maximum number of connections for a pool.
max_connections: 1
# Run migration up when application loaded
auto_migrate: true
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_truncate: false
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false
# Redis Configuration
redis:
# Redis connection URI
uri: '{{ get_env(name="REDIS_URL", default="redis://127.0.0.1:6379") }}'
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_flush: false
settings:
dal:
data_dir: ./data
mikan:
base_url: "https://mikanani.me/"
http_client:
exponential_backoff_max_retries: 3
leaky_bucket_max_tokens: 2
leaky_bucket_initial_tokens: 0
leaky_bucket_refill_tokens: 1
leaky_bucket_refill_interval: 500
auth:
auth_type: "oidc" # or "basic"
basic_user: "konobangu"
basic_password: "konobangu"
oidc_api_issuer: "https://some-oidc-auth.com/oidc"
oidc_api_audience: "https://konobangu.com/api"
oidc_extra_scopes: "read:konobangu,write:konobangu"
oidc_extra_claim_key: ""
oidc_extra_claim_value: ""

View File

@@ -1,125 +0,0 @@
# Loco configuration file documentation
# Application logging configuration
logger:
# Enable or disable logging.
enable: true
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace: true
# Log level, options: trace, debug, info, warn or error.
level: debug
# Define the logging format. options: compact, pretty or Json
format: compact
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
# override_filter: trace
# Web server configuration
server:
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
port: 5001
# The UI hostname or IP address that mailers will point to.
host: http://webui.konobangu.com
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
middlewares:
# Enable Etag cache header middleware
etag:
enable: true
# Allows to limit the payload size request. payload that bigger than this file will blocked the request.
limit_payload:
# Enable/Disable the middleware.
enable: true
# the limit size. can be b,kb,kib,mb,mib,gb,gib
body_limit: 5mb
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
logger:
# Enable/Disable the middleware.
enable: true
# when your code is panicked, the request still returns 500 status code.
catch_panic:
# Enable/Disable the middleware.
enable: true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
timeout_request:
# Enable/Disable the middleware.
enable: false
# Duration time in milliseconds.
timeout: 5000
cors:
enable: true
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://loco.rs
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
# allow_methods:
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
# Worker Configuration
workers:
# specifies the worker mode. Options:
# - BackgroundQueue - Workers operate asynchronously in the background, processing queued.
# - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed.
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
mode: BackgroundQueue
# Mailer Configuration.
mailer:
# SMTP mailer configuration.
smtp:
# Enable/Disable smtp mailer.
enable: true
# SMTP server host. e.x localhost, smtp.gmail.com
host: '{{ get_env(name="MAILER_HOST", default="localhost") }}'
# SMTP server port
port: 1025
# Use secure connection (SSL/TLS).
secure: false
# auth:
# user:
# password:
# Database Configuration
database:
# Database connection URI
uri: '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu") }}'
# When enabled, the sql query will be logged.
enable_logging: true
# Set the timeout duration when acquiring a connection.
connect_timeout: 500
# Set the idle duration before closing a connection.
idle_timeout: 500
# Minimum number of connections for a pool.
min_connections: 1
# Maximum number of connections for a pool.
max_connections: 1
# Run migration up when application loaded
auto_migrate: true
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_truncate: false
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false
# Redis Configuration
redis:
# Redis connection URI
uri: '{{ get_env(name="REDIS_URL", default="redis://127.0.0.1:6379") }}'
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_flush: false
settings:
dal:
data_dir: ./temp
mikan:
http_client:
exponential_backoff_max_retries: 3
leaky_bucket_max_tokens: 2
leaky_bucket_initial_tokens: 0
leaky_bucket_refill_tokens: 1
leaky_bucket_refill_interval: 500
user_agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0"
base_url: "https://mikanani.me/"

View File

@@ -0,0 +1,584 @@
use std::collections::HashSet;
use chrono::{DateTime, Duration, FixedOffset, NaiveDate, NaiveTime, TimeZone, Utc};
use fetch::{HttpClientConfig, fetch_html};
use itertools::Itertools;
use lazy_static::lazy_static;
use nom::{
IResult, Parser,
branch::alt,
bytes::complete::{tag, take, take_till1},
character::complete::space1,
combinator::map,
};
use recorder::{
errors::{RecorderError, RecorderResult},
extract::{
html::extract_inner_text_from_element_ref,
mikan::{MikanClient, MikanConfig, MikanEpisodeHash, MikanFansubHash},
},
};
use regex::Regex;
use scraper::{ElementRef, Html, Selector};
use snafu::FromString;
use url::Url;
lazy_static! {
static ref TEST_FOLDER: std::path::PathBuf =
if cfg!(any(test, debug_assertions, feature = "playground")) {
std::path::PathBuf::from(format!(
"{}/tests/resources/mikan/classic_episodes",
env!("CARGO_MANIFEST_DIR")
))
} else {
std::path::PathBuf::from("tests/resources/mikan/classic_episodes")
};
}
lazy_static! {
static ref TOTAL_PAGE_REGEX: Regex =
Regex::new(r#"\$\(\'\.classic-view-pagination2\'\)\.bootpag\(\{\s*total:\s*(\d+)"#)
.unwrap();
}
pub struct MikanClassicEpisodeTableRow {
pub id: i32,
pub publish_at: DateTime<Utc>,
pub mikan_fansub_id: Option<String>,
pub fansub_name: Option<String>,
pub mikan_episode_id: String,
pub original_name: String,
pub magnet_link: Option<String>,
pub file_size: Option<String>,
pub torrent_link: Option<String>,
}
impl MikanClassicEpisodeTableRow {
fn timezone() -> FixedOffset {
FixedOffset::east_opt(8 * 3600).unwrap()
}
fn fixed_date_parser(input: &str) -> IResult<&str, NaiveDate> {
alt((
map(tag("今天"), move |_| {
Utc::now().with_timezone(&Self::timezone()).date_naive()
}),
map(tag("昨天"), move |_| {
Utc::now().with_timezone(&Self::timezone()).date_naive() - Duration::days(1)
}),
))
.parse(input)
}
fn formatted_date_parser(input: &str) -> IResult<&str, NaiveDate> {
let (remain, date_str) = take_till1(|c: char| c.is_whitespace()).parse(input)?;
let date = NaiveDate::parse_from_str(date_str, "%Y/%m/%d").map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
})?;
Ok((remain, date))
}
fn date_parser(input: &str) -> IResult<&str, NaiveDate> {
alt((Self::fixed_date_parser, Self::formatted_date_parser)).parse(input)
}
fn time_parser(input: &str) -> IResult<&str, NaiveTime> {
let (remain, time_str) = take(5usize).parse(input)?;
let time = NaiveTime::parse_from_str(time_str, "%H:%M").map_err(|_| {
nom::Err::Error(nom::error::Error::new(input, nom::error::ErrorKind::Verify))
})?;
Ok((remain, time))
}
fn extract_publish_at(text: &str) -> Option<DateTime<Utc>> {
let (_, (date, _, time)) = (Self::date_parser, space1, Self::time_parser)
.parse(text)
.ok()?;
let local_dt = Self::timezone()
.from_local_datetime(&date.and_time(time))
.single()?;
Some(local_dt.with_timezone(&Utc))
}
pub fn from_element_ref(
row: ElementRef<'_>,
rev_id: i32,
idx: i32,
mikan_base_url: &Url,
) -> RecorderResult<Self> {
let publish_at_selector = &Selector::parse("td:nth-of-type(1)").unwrap();
let fansub_selector = &Selector::parse("td:nth-of-type(2) > a").unwrap();
let original_name_selector =
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(1)").unwrap();
let magnet_link_selector =
&Selector::parse("td:nth-of-type(3) > a:nth-of-type(2)").unwrap();
let file_size_selector = &Selector::parse("td:nth-of-type(4)").unwrap();
let torrent_link_selector = &Selector::parse("td:nth-of-type(5) > a").unwrap();
let publish_at = row
.select(publish_at_selector)
.next()
.map(extract_inner_text_from_element_ref)
.and_then(|e| Self::extract_publish_at(&e));
let (mikan_fansub_hash, fansub_name) = row
.select(fansub_selector)
.next()
.and_then(|e| {
e.attr("href")
.and_then(|s| mikan_base_url.join(s).ok())
.and_then(|u| MikanFansubHash::from_homepage_url(&u))
.map(|h| (h, extract_inner_text_from_element_ref(e)))
})
.unzip();
let (mikan_episode_hash, original_name) = row
.select(original_name_selector)
.next()
.and_then(|el| {
el.attr("href")
.and_then(|s| mikan_base_url.join(s).ok())
.and_then(|u| MikanEpisodeHash::from_homepage_url(&u))
.map(|h| (h, extract_inner_text_from_element_ref(el)))
})
.unzip();
let magnet_link = row
.select(magnet_link_selector)
.next()
.and_then(|el| el.attr("data-clipboard-text"));
let file_size = row
.select(file_size_selector)
.next()
.map(extract_inner_text_from_element_ref);
let torrent_link = row
.select(torrent_link_selector)
.next()
.and_then(|el| el.attr("href"));
if let (Some(mikan_episode_hash), Some(original_name), Some(publish_at)) = (
mikan_episode_hash.as_ref(),
original_name.as_ref(),
publish_at.as_ref(),
) {
Ok(Self {
id: rev_id * 1000 + idx,
publish_at: *publish_at,
mikan_fansub_id: mikan_fansub_hash.map(|h| h.mikan_fansub_id.clone()),
fansub_name,
mikan_episode_id: mikan_episode_hash.mikan_episode_id.clone(),
original_name: original_name.clone(),
magnet_link: magnet_link.map(|s| s.to_string()),
file_size: file_size.map(|s| s.to_string()),
torrent_link: torrent_link.map(|s| s.to_string()),
})
} else {
let mut missing_fields = vec![];
if mikan_episode_hash.is_none() {
missing_fields.push("mikan_episode_id");
}
if original_name.is_none() {
missing_fields.push("original_name");
}
if publish_at.is_none() {
missing_fields.push("publish_at");
}
Err(RecorderError::without_source(format!(
"Failed to parse episode table row, missing fields: {missing_fields:?}, row \
index: {idx}"
)))
}
}
}
pub struct MikanClassicEpisodeTablePage {
pub page: i32,
pub total: i32,
pub html: String,
pub rows: Vec<MikanClassicEpisodeTableRow>,
}
impl MikanClassicEpisodeTablePage {
pub fn from_html(
html: String,
mikan_base_url: &Url,
page: i32,
updated_info: Option<(i32, i32)>,
) -> RecorderResult<Self> {
let tr_selector = &Selector::parse("tbody tr").unwrap();
let doc = Html::parse_document(&html);
if let Some(mut total) = TOTAL_PAGE_REGEX
.captures(&html)
.and_then(|c| c.get(1))
.and_then(|s| s.as_str().parse::<i32>().ok())
{
if let Some((_, update_total)) = updated_info {
total = update_total;
}
let rev_id = total - page;
let rows = doc
.select(tr_selector)
.rev()
.enumerate()
.map(|(idx, tr)| {
MikanClassicEpisodeTableRow::from_element_ref(
tr,
rev_id,
idx as i32,
mikan_base_url,
)
})
.collect::<RecorderResult<Vec<_>>>()?;
Ok(Self {
page,
total,
html,
rows,
})
} else {
Err(RecorderError::without_source(
"Failed to parse pagination meta and rows".into(),
))
}
}
pub fn save_to_files(&self) -> RecorderResult<()> {
use polars::prelude::*;
let rev_id = self.total - self.page;
let parquet_path = TEST_FOLDER.join(format!("parquet/rev_{rev_id}.parquet"));
let csv_path = TEST_FOLDER.join(format!("csv/rev_{rev_id}.csv"));
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
std::fs::write(html_path, self.html.clone())?;
let mut id_vec = Vec::new();
let mut publish_at_vec = Vec::new();
let mut mikan_fansub_id_vec = Vec::new();
let mut fansub_name_vec = Vec::new();
let mut mikan_episode_id_vec = Vec::new();
let mut original_name_vec = Vec::new();
let mut magnet_link_vec = Vec::new();
let mut file_size_vec = Vec::new();
let mut torrent_link_vec = Vec::new();
for row in &self.rows {
id_vec.push(row.id);
publish_at_vec.push(row.publish_at.to_rfc3339());
mikan_fansub_id_vec.push(row.mikan_fansub_id.clone());
fansub_name_vec.push(row.fansub_name.clone());
mikan_episode_id_vec.push(row.mikan_episode_id.clone());
original_name_vec.push(row.original_name.clone());
magnet_link_vec.push(row.magnet_link.clone());
file_size_vec.push(row.file_size.clone());
torrent_link_vec.push(row.torrent_link.clone());
}
let df = df! [
"id" => id_vec,
"publish_at_timestamp" => publish_at_vec,
"mikan_fansub_id" => mikan_fansub_id_vec,
"fansub_name" => fansub_name_vec,
"mikan_episode_id" => mikan_episode_id_vec,
"original_name" => original_name_vec,
"magnet_link" => magnet_link_vec,
"file_size" => file_size_vec,
"torrent_link" => torrent_link_vec,
]
.map_err(|e| {
let message = format!("Failed to create DataFrame: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let mut parquet_file = std::fs::File::create(&parquet_path)?;
ParquetWriter::new(&mut parquet_file)
.finish(&mut df.clone())
.map_err(|e| {
let message = format!("Failed to write parquet file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let mut csv_file = std::fs::File::create(&csv_path)?;
CsvWriter::new(&mut csv_file)
.include_header(true)
.with_quote_style(QuoteStyle::Always)
.finish(&mut df.clone())
.map_err(|e| {
let message = format!("Failed to write csv file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
println!(
"[{}/{}] Saved {} rows to rev_{}.{{parquet,html,csv}}",
self.page,
self.total,
self.rows.len(),
rev_id
);
Ok(())
}
pub fn waiting_rev_ids(total: i32) -> RecorderResult<Vec<i32>> {
let dir = TEST_FOLDER.join("csv");
let files = std::fs::read_dir(dir)?;
let rev_ids = files
.filter_map(|f| f.ok())
.filter_map(|f| {
f.path().file_stem().and_then(|s| {
s.to_str().and_then(|s| {
if s.starts_with("rev_") {
s.replace("rev_", "").parse::<i32>().ok()
} else {
None
}
})
})
})
.collect::<HashSet<_>>();
Ok((0..total)
.filter(|rev_id| !rev_ids.contains(rev_id))
.collect::<Vec<_>>())
}
}
async fn scrape_mikan_classic_episode_table_page(
mikan_client: &MikanClient,
page: i32,
updated_info: Option<(i32, i32)>,
) -> RecorderResult<MikanClassicEpisodeTablePage> {
let mikan_base_url = mikan_client.base_url();
let url = mikan_base_url.join(&format!("/Home/Classic/{page}"))?;
if let Some((rev_id, update_total)) = updated_info.as_ref() {
let html_path = TEST_FOLDER.join(format!("html/rev_{rev_id}.html"));
if html_path.exists() {
let html = std::fs::read_to_string(&html_path)?;
println!("[{page}/{update_total}] html exists, skipping fetch");
return MikanClassicEpisodeTablePage::from_html(
html,
mikan_base_url,
page,
updated_info,
);
}
}
let total = if let Some((_, update_total)) = updated_info.as_ref() {
update_total.to_string()
} else {
"Unknown".to_string()
};
println!("[{page}/{total}] fetching html...");
let html = fetch_html(mikan_client, url).await?;
println!("[{page}/{total}] fetched html done");
std::fs::write(TEST_FOLDER.join("html/temp.html"), html.clone())?;
MikanClassicEpisodeTablePage::from_html(html, mikan_base_url, page, updated_info)
}
async fn scrape_mikan_classic_episode_table_page_from_rev_id(
mikan_client: &MikanClient,
total: i32,
rev_idx: i32,
) -> RecorderResult<MikanClassicEpisodeTablePage> {
let page = total - rev_idx;
scrape_mikan_classic_episode_table_page(mikan_client, page, Some((rev_idx, total))).await
}
async fn merge_mikan_classic_episodes_and_strip_columns() -> RecorderResult<()> {
use polars::prelude::*;
let dir = TEST_FOLDER.join("parquet");
let files = std::fs::read_dir(dir)?;
let parquet_paths = files
.filter_map(|f| f.ok())
.filter_map(|f| {
let path = f.path();
if let Some(ext) = path.extension()
&& ext == "parquet"
&& path
.file_stem()
.is_some_and(|f| f.to_string_lossy().starts_with("rev_"))
{
Some(path)
} else {
None
}
})
.collect::<Vec<_>>();
if parquet_paths.is_empty() {
return Err(RecorderError::without_source(
"No parquet files found to merge".into(),
));
}
println!("Found {} parquet files to merge", parquet_paths.len());
// 读取并合并所有 parquet 文件
let mut all_dfs = Vec::new();
for path in &parquet_paths {
println!("Reading {path:?}");
let file = std::fs::File::open(path)?;
let df = ParquetReader::new(file).finish().map_err(|e| {
let message = format!("Failed to read parquet file {path:?}: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
all_dfs.push(df);
}
let lazy_frames: Vec<LazyFrame> = all_dfs.into_iter().map(|df| df.lazy()).collect();
let merged_df = concat_lf_diagonal(&lazy_frames, UnionArgs::default())
.map_err(|e| {
let message = format!("Failed to concat DataFrames: {e}");
RecorderError::with_source(Box::new(e), message)
})?
.sort(
["publish_at_timestamp"],
SortMultipleOptions::default().with_order_descending(true),
)
.unique(
Some(vec![
"mikan_fansub_id".to_string(),
"mikan_episode_id".to_string(),
]),
UniqueKeepStrategy::First,
)
.collect()
.map_err(|e| {
let message = format!("Failed to collect lazy DataFrame: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
fn select_columns_and_write(
merged_df: DataFrame,
name: &str,
columns: &[&str],
) -> RecorderResult<()> {
let result_df = merged_df
.lazy()
.sort(["publish_at_timestamp"], SortMultipleOptions::default())
.select(columns.iter().map(|c| col(*c)).collect_vec())
.collect()
.map_err(|e| {
let message = format!("Failed to sort and select columns: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
let output_path = TEST_FOLDER.join(format!("parquet/{name}.parquet"));
let mut output_file = std::fs::File::create(&output_path)?;
ParquetWriter::new(&mut output_file)
.set_parallel(true)
.with_compression(ParquetCompression::Zstd(Some(
ZstdLevel::try_new(22).unwrap(),
)))
.finish(&mut result_df.clone())
.map_err(|e| {
let message = format!("Failed to write merged parquet file: {e}");
RecorderError::with_source(Box::new(e), message)
})?;
println!("Merged {} rows into {output_path:?}", result_df.height());
Ok(())
}
select_columns_and_write(merged_df.clone(), "tiny", &["fansub_name", "original_name"])?;
// select_columns_and_write(
// merged_df.clone(),
// "lite",
// &[
// "mikan_fansub_id",
// "fansub_name",
// "mikan_episode_id",
// "original_name",
// ],
// )?;
// select_columns_and_write(
// merged_df,
// "full",
// &[
// "id",
// "publish_at_timestamp",
// "mikan_fansub_id",
// "fansub_name",
// "mikan_episode_id",
// "original_name",
// "magnet_link",
// "file_size",
// "torrent_link",
// ],
// )?;
Ok(())
}
#[tokio::main]
async fn main() -> RecorderResult<()> {
std::fs::create_dir_all(TEST_FOLDER.join("html"))?;
std::fs::create_dir_all(TEST_FOLDER.join("parquet"))?;
std::fs::create_dir_all(TEST_FOLDER.join("csv"))?;
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
http_client: HttpClientConfig {
exponential_backoff_max_retries: Some(3),
leaky_bucket_max_tokens: Some(2),
leaky_bucket_initial_tokens: Some(1),
leaky_bucket_refill_tokens: Some(1),
leaky_bucket_refill_interval: Some(std::time::Duration::from_millis(1000)),
user_agent: Some(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
.to_string(),
),
..Default::default()
},
base_url: Url::parse("https://mikanani.me")?,
})
.await?;
let first_page_and_pagination_info =
scrape_mikan_classic_episode_table_page(&mikan_scrape_client, 1, None).await?;
let total_page = first_page_and_pagination_info.total;
first_page_and_pagination_info.save_to_files()?;
let next_rev_ids = MikanClassicEpisodeTablePage::waiting_rev_ids(total_page)?;
for todo_rev_id in next_rev_ids {
let page = scrape_mikan_classic_episode_table_page_from_rev_id(
&mikan_scrape_client,
total_page,
todo_rev_id,
)
.await?;
page.save_to_files()?;
}
// 合并所有 parquet 文件
println!("\nMerging all parquet files...");
merge_mikan_classic_episodes_and_strip_columns().await?;
println!("Merge completed!");
Ok(())
}

View File

@@ -0,0 +1,249 @@
use std::time::Duration;
use color_eyre::{Result, eyre::OptionExt};
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
use inquire::{Password, Text, validator::Validation};
use recorder::{
crypto::UserPassCredential,
extract::mikan::{
MikanClient, MikanConfig, MikanRssEpisodeItem, build_mikan_bangumi_expand_subscribed_url,
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
},
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
};
use scraper::Html;
use tokio::fs;
use url::Url;
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt()
.with_max_level(tracing::Level::INFO)
.init();
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
http_client: HttpClientConfig {
exponential_backoff_max_retries: Some(3),
leaky_bucket_max_tokens: Some(2),
leaky_bucket_initial_tokens: Some(0),
leaky_bucket_refill_tokens: Some(1),
leaky_bucket_refill_interval: Some(Duration::from_millis(1000)),
user_agent: Some(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
.to_string(),
),
..Default::default()
},
base_url: Url::parse("https://mikanani.me")?,
})
.await?;
let username_validator = |input: &str| {
if input.trim().is_empty() {
Ok(Validation::Invalid("Username cannot be empty".into()))
} else {
Ok(Validation::Valid)
}
};
let password_validator = |input: &str| {
if input.trim().is_empty() {
Ok(Validation::Invalid("Password cannot be empty".into()))
} else {
Ok(Validation::Valid)
}
};
let username = Text::new("Please enter your mikan username:")
.with_validator(username_validator)
.prompt()?;
let password = Password::new("Please enter your mikan password:")
.without_confirmation()
.with_display_mode(inquire::PasswordDisplayMode::Masked)
.with_validator(password_validator)
.prompt()?;
let mikan_scrape_client = mikan_scrape_client
.fork_with_userpass_credential(UserPassCredential {
username,
password,
user_agent: None,
cookies: None,
})
.await?;
tracing::info!("Checking if logged in...");
if !mikan_scrape_client.has_login().await? {
tracing::info!("Logging in to mikan...");
mikan_scrape_client.login().await?;
tracing::info!("Logged in to mikan");
}
let mikan_base_url = mikan_scrape_client.base_url().clone();
tracing::info!("Scraping season subscription...");
let season_subscription =
fs::read("tests/resources/mikan/BangumiCoverFlow-2025-spring.html").await?;
let html = Html::parse_fragment(String::from_utf8(season_subscription)?.as_str());
let bangumi_index_list =
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment(&html, &mikan_base_url);
for bangumi_index in bangumi_index_list {
let bangumi_meta = {
let bangumi_expand_subscribed_url = build_mikan_bangumi_expand_subscribed_url(
mikan_base_url.clone(),
bangumi_index.mikan_bangumi_id.as_ref(),
);
let bangumi_expand_subscribed_doppel_path =
MikanDoppelPath::new(bangumi_expand_subscribed_url.clone());
tracing::info!(
bangumi_title = bangumi_index.bangumi_title,
"Scraping bangumi expand subscribed..."
);
let bangumi_expand_subscribed_data =
if !bangumi_expand_subscribed_doppel_path.exists_any() {
let bangumi_expand_subscribed_data =
fetch_html(&mikan_scrape_client, bangumi_expand_subscribed_url).await?;
bangumi_expand_subscribed_doppel_path.write(&bangumi_expand_subscribed_data)?;
tracing::info!(
bangumi_title = bangumi_index.bangumi_title,
"Bangumi expand subscribed saved"
);
bangumi_expand_subscribed_data
} else {
tracing::info!(
bangumi_title = bangumi_index.bangumi_title,
"Bangumi expand subscribed already exists"
);
String::from_utf8(bangumi_expand_subscribed_doppel_path.read()?)?
};
let html = Html::parse_fragment(&bangumi_expand_subscribed_data);
extract_mikan_bangumi_meta_from_expand_subscribed_fragment(
&html,
bangumi_index.clone(),
mikan_base_url.clone(),
)
.ok_or_eyre(format!(
"Failed to extract bangumi meta from expand subscribed fragment: {:?}",
bangumi_index.bangumi_title
))
}?;
{
if let Some(poster_url) = bangumi_meta.origin_poster_src.as_ref() {
let poster_doppel_path = MikanDoppelPath::new(poster_url.clone());
tracing::info!(
title = bangumi_meta.bangumi_title,
"Scraping bangumi poster..."
);
if !poster_doppel_path.exists_any() {
let poster_data = fetch_image(&mikan_scrape_client, poster_url.clone()).await?;
poster_doppel_path.write(&poster_data)?;
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi poster saved");
} else {
tracing::info!(
title = bangumi_meta.bangumi_title,
"Bangumi poster already exists"
);
}
}
}
{
let bangumi_homepage_url = bangumi_meta
.bangumi_hash()
.build_homepage_url(mikan_base_url.clone());
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
tracing::info!(
title = bangumi_meta.bangumi_title,
"Scraping bangumi homepage..."
);
if !bangumi_homepage_doppel_path.exists_any() {
let bangumi_homepage_data =
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi homepage saved");
} else {
tracing::info!(
title = bangumi_meta.bangumi_title,
"Bangumi homepage already exists"
);
}
}
let rss_items = {
let bangumi_rss_url = bangumi_meta
.bangumi_hash()
.build_rss_url(mikan_base_url.clone());
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
tracing::info!(
title = bangumi_meta.bangumi_title,
"Scraping bangumi rss..."
);
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
tracing::info!(title = bangumi_meta.bangumi_title, "Bangumi rss saved");
bangumi_rss_data
} else {
tracing::info!(
title = bangumi_meta.bangumi_title,
"Bangumi rss already exists"
);
String::from_utf8(bangumi_rss_doppel_path.read()?)?
};
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items;
rss_items
.into_iter()
.map(MikanRssEpisodeItem::try_from)
.collect::<Result<Vec<_>, _>>()
}?;
for rss_item in rss_items {
{
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
let episode_homepage_doppel_path =
MikanDoppelPath::new(episode_homepage_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode...");
if !episode_homepage_doppel_path.exists_any() {
let episode_homepage_data =
fetch_html(&mikan_scrape_client, episode_homepage_url).await?;
episode_homepage_doppel_path.write(&episode_homepage_data)?;
tracing::info!(title = rss_item.title, "Episode saved");
} else {
tracing::info!(title = rss_item.title, "Episode already exists");
};
}
{
let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() {
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
Ok(episode_torrent_data) => {
episode_torrent_doppel_path.write(&episode_torrent_data)?;
tracing::info!(title = rss_item.title, "Episode torrent saved");
}
Err(e) => {
if let FetchError::ReqwestError { source } = &e
&& source
.status()
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
{
tracing::warn!(
title = rss_item.title,
"Episode torrent not found, maybe deleted since new version"
);
episode_torrent_doppel_path
.write_meta(MikanDoppelMeta { status: 404 })?;
} else {
Err(e)?;
}
}
}
} else {
tracing::info!(title = rss_item.title, "Episode torrent already exists");
}
}
}
}
tracing::info!("Scraping season subscription done");
Ok(())
}

View File

@@ -0,0 +1,215 @@
use std::time::Duration;
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
use recorder::{
errors::RecorderResult,
extract::mikan::{
MikanClient, MikanConfig, MikanRssEpisodeItem,
extract_mikan_episode_meta_from_episode_homepage_html,
},
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
};
use scraper::Html;
use tokio::fs;
use url::Url;
#[tokio::main]
async fn main() -> RecorderResult<()> {
tracing_subscriber::fmt()
.with_max_level(tracing::Level::INFO)
.init();
std::env::set_current_dir(std::path::Path::new("apps/recorder"))?;
let mikan_scrape_client = MikanClient::from_config(MikanConfig {
http_client: HttpClientConfig {
exponential_backoff_max_retries: Some(3),
leaky_bucket_max_tokens: Some(2),
leaky_bucket_initial_tokens: Some(0),
leaky_bucket_refill_tokens: Some(1),
leaky_bucket_refill_interval: Some(Duration::from_millis(500)),
user_agent: Some(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
.to_string(),
),
..Default::default()
},
base_url: Url::parse("https://mikanani.me")?,
})
.await?;
let mikan_base_url = mikan_scrape_client.base_url().clone();
tracing::info!("Scraping subscriber subscription...");
let subscriber_subscription =
fs::read("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
let channel = rss::Channel::read_from(&subscriber_subscription[..])?;
let rss_items: Vec<MikanRssEpisodeItem> = channel
.items
.into_iter()
.map(MikanRssEpisodeItem::try_from)
.collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items {
let episode_homepage_meta = {
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
let episode_homepage_doppel_path = MikanDoppelPath::new(episode_homepage_url.clone());
let episode_homepage_data = if !episode_homepage_doppel_path.exists_any() {
let episode_homepage_data =
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
episode_homepage_doppel_path.write(&episode_homepage_data)?;
tracing::info!(title = rss_item.title, "Episode homepage saved");
episode_homepage_data
} else {
tracing::info!(title = rss_item.title, "Episode homepage already exists");
String::from_utf8(episode_homepage_doppel_path.read()?)?
};
let html = Html::parse_document(&episode_homepage_data);
extract_mikan_episode_meta_from_episode_homepage_html(
&html,
mikan_base_url.clone(),
episode_homepage_url,
)
}?;
{
let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path = MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() {
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
Ok(episode_torrent_data) => {
episode_torrent_doppel_path.write(&episode_torrent_data)?;
tracing::info!(title = rss_item.title, "Episode torrent saved");
}
Err(e) => {
if let FetchError::ReqwestError { source } = &e
&& source
.status()
.is_some_and(|status| status == reqwest::StatusCode::NOT_FOUND)
{
tracing::warn!(
title = rss_item.title,
"Episode torrent not found, maybe deleted since new version"
);
episode_torrent_doppel_path
.write_meta(MikanDoppelMeta { status: 404 })?;
} else {
Err(e)?;
}
}
}
tracing::info!(title = rss_item.title, "Episode torrent saved");
} else {
tracing::info!(title = rss_item.title, "Episode torrent already exists");
}
}
{
if let Some(episode_poster_url) = episode_homepage_meta.origin_poster_src.as_ref() {
let episode_poster_doppel_path = MikanDoppelPath::new(episode_poster_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode poster...");
if !episode_poster_doppel_path.exists_any() {
let episode_poster_data =
fetch_image(&mikan_scrape_client, episode_poster_url.clone()).await?;
episode_poster_doppel_path.write(&episode_poster_data)?;
tracing::info!(title = rss_item.title, "Episode poster saved");
} else {
tracing::info!(title = rss_item.title, "Episode poster already exists");
}
}
}
{
let bangumi_homepage_url = episode_homepage_meta
.bangumi_hash()
.build_homepage_url(mikan_base_url.clone());
let bangumi_homepage_doppel_path = MikanDoppelPath::new(bangumi_homepage_url.clone());
tracing::info!(title = rss_item.title, "Scraping bangumi homepage...");
if !bangumi_homepage_doppel_path.exists_any() {
let bangumi_homepage_data =
fetch_html(&mikan_scrape_client, bangumi_homepage_url).await?;
bangumi_homepage_doppel_path.write(&bangumi_homepage_data)?;
tracing::info!(title = rss_item.title, "Bangumi homepage saved");
} else {
tracing::info!(title = rss_item.title, "Bangumi homepage already exists");
};
}
{
let bangumi_rss_url = episode_homepage_meta
.bangumi_hash()
.build_rss_url(mikan_base_url.clone());
let bangumi_rss_doppel_path = MikanDoppelPath::new(bangumi_rss_url.clone());
tracing::info!(title = rss_item.title, "Scraping bangumi rss...");
let bangumi_rss_data = if !bangumi_rss_doppel_path.exists_any() {
let bangumi_rss_data = fetch_html(&mikan_scrape_client, bangumi_rss_url).await?;
bangumi_rss_doppel_path.write(&bangumi_rss_data)?;
tracing::info!(title = rss_item.title, "Bangumi rss saved");
bangumi_rss_data
} else {
tracing::info!(title = rss_item.title, "Bangumi rss already exists");
String::from_utf8(bangumi_rss_doppel_path.read()?)?
};
let channel = rss::Channel::read_from(bangumi_rss_data.as_bytes())?;
let rss_items: Vec<MikanRssEpisodeItem> = channel
.items
.into_iter()
.map(MikanRssEpisodeItem::try_from)
.collect::<Result<Vec<_>, _>>()?;
for rss_item in rss_items {
{
tracing::info!(title = rss_item.title, "Scraping episode homepage...");
let episode_homepage_url = rss_item.build_homepage_url(mikan_base_url.clone());
let episode_homepage_doppel_path =
MikanDoppelPath::new(episode_homepage_url.clone());
if !episode_homepage_doppel_path.exists_any() {
let episode_homepage_data =
fetch_html(&mikan_scrape_client, episode_homepage_url.clone()).await?;
episode_homepage_doppel_path.write(&episode_homepage_data)?;
tracing::info!(title = rss_item.title, "Episode homepage saved");
} else {
tracing::info!(title = rss_item.title, "Episode homepage already exists");
};
};
{
let episode_torrent_url = rss_item.torrent_link;
let episode_torrent_doppel_path =
MikanDoppelPath::new(episode_torrent_url.clone());
tracing::info!(title = rss_item.title, "Scraping episode torrent...");
if !episode_torrent_doppel_path.exists_any() {
match fetch_bytes(&mikan_scrape_client, episode_torrent_url).await {
Ok(episode_torrent_data) => {
episode_torrent_doppel_path.write(&episode_torrent_data)?;
tracing::info!(title = rss_item.title, "Episode torrent saved");
}
Err(e) => {
if let FetchError::ReqwestError { source } = &e
&& source.status().is_some_and(|status| {
status == reqwest::StatusCode::NOT_FOUND
})
{
tracing::warn!(
title = rss_item.title,
"Episode torrent not found, maybe deleted since new \
version"
);
episode_torrent_doppel_path
.write_meta(MikanDoppelMeta { status: 404 })?;
} else {
Err(e)?;
}
}
}
tracing::info!(title = rss_item.title, "Episode torrent saved");
} else {
tracing::info!(title = rss_item.title, "Episode torrent already exists");
}
}
}
}
}
tracing::info!("Scraping subscriber subscription done");
Ok(())
}

View File

@@ -1,66 +1,33 @@
#![allow(unused_imports)] #![feature(duration_constructors_lite)]
use color_eyre::eyre::Context; use std::{sync::Arc, time::Duration};
use itertools::Itertools;
use loco_rs::{ use apalis_sql::postgres::PostgresStorage;
app::Hooks,
boot::{BootResult, StartMode},
environment::Environment,
prelude::*,
};
use recorder::{ use recorder::{
app::App, app::AppContextTrait,
extract::mikan::parse_mikan_rss_items_from_rss_link, errors::RecorderResult,
migrations::Migrator, test_utils::{
models::{ app::TestingAppContext,
subscribers::SEED_SUBSCRIBER, database::{TestingDatabaseServiceConfig, build_testing_database_service},
subscriptions::{self, SubscriptionCreateFromRssDto},
}, },
}; };
use sea_orm_migration::MigratorTrait;
async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> color_eyre::eyre::Result<()> {
let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
// let rss_link =
// "https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
let subscription = if let Some(subscription) = subscriptions::Entity::find()
.filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
.one(&ctx.db)
.await?
{
subscription
} else {
subscriptions::Model::add_subscription(
ctx,
subscriptions::SubscriptionCreateDto::Mikan(SubscriptionCreateFromRssDto {
rss_link: rss_link.to_string(),
display_name: String::from("Mikan Project - 我的番组"),
enabled: Some(true),
}),
1,
)
.await?
};
subscription.pull_subscription(ctx).await?;
Ok(())
}
async fn init() -> color_eyre::eyre::Result<AppContext> {
color_eyre::install()?;
let ctx = loco_rs::cli::playground::<App>().await?;
let BootResult {
app_context: ctx, ..
} = loco_rs::boot::run_app::<App>(&StartMode::ServerOnly, ctx).await?;
Migrator::up(&ctx.db, None).await?;
Ok(ctx)
}
#[tokio::main] #[tokio::main]
async fn main() -> color_eyre::eyre::Result<()> { async fn main() -> RecorderResult<()> {
let ctx = init().await?; let app_ctx = {
pull_mikan_bangumi_rss(&ctx).await?; let db_service = build_testing_database_service(TestingDatabaseServiceConfig {
auto_migrate: false,
})
.await?;
Arc::new(TestingAppContext::builder().db(db_service).build())
};
let db = app_ctx.db();
PostgresStorage::setup(db.get_postgres_connection_pool()).await?;
dbg!(db.get_postgres_connection_pool().connect_options());
tokio::time::sleep(Duration::from_hours(1)).await;
Ok(()) Ok(())
} }

View File

@@ -0,0 +1,114 @@
# Application logging configuration
[logger]
# Enable or disable logging.
enable = true
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace = true
# Log level, options: trace, debug, info, warn or error.
level = "debug"
# Define the logging format. options: compact, pretty or Json
format = "compact"
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
# override_filter: trace
# Web server configuration
[server]
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
port = 5001
binding = "0.0.0.0"
# The UI hostname or IP address that mailers will point to.
host = '{{ get_env(name="HOST", default="localhost") }}'
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
# Enable Etag cache header middleware
[server.middlewares.etag]
enable = true
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
[server.middlewares.request_id]
enable = true
[server.middlewares.logger]
enable = true
# when your code is panicked, the request still returns 500 status code.
[server.middlewares.catch_panic]
enable = true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
[server.middlewares.timeout_request]
enable = false
# Duration time in milliseconds.
timeout = 5000
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://konobangu.com
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
# allow_methods:
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
[server.middlewares.cors]
enable = true
[server.middlewares.compression]
enable = true
# Database Configuration
[database]
# Database connection URI
uri = '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@localhost:5432/konobangu") }}'
# When enabled, the sql query will be logged.
enable_logging = true
# Set the timeout duration when acquiring a connection.
connect_timeout = 500
# Set the idle duration before closing a connection.
idle_timeout = 500
# Minimum number of connections for a pool.
min_connections = 1
# Maximum number of connections for a pool.
max_connections = 10
# Run migration up when application loaded
auto_migrate = true
[storage]
data_dir = '{{ get_env(name="STORAGE_DATA_DIR", default="./data") }}'
[mikan]
base_url = "https://mikanani.me/"
[mikan.http_client]
exponential_backoff_max_retries = 3
leaky_bucket_max_tokens = 2
leaky_bucket_initial_tokens = 1
leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500
[mikan.http_client.proxy]
server = '{{ get_env(name="MIKAN_PROXY", default = "") }}'
auth_header = '{{ get_env(name="MIKAN_PROXY_AUTH_HEADER", default = "") }}'
no_proxy = '{{ get_env(name="MIKAN_NO_PROXY", default = "") }}'
accept_invalid_certs = '{{ get_env(name="MIKAN_PROXY_ACCEPT_INVALID_CERTS", default = "false") }}'
[auth]
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
basic_password = '{{ get_env(name="BASIC_PASSWORD", default = "konobangu") }}'
oidc_issuer = '{{ get_env(name="OIDC_ISSUER", default = "") }}'
oidc_audience = '{{ get_env(name="OIDC_AUDIENCE", default = "") }}'
oidc_client_id = '{{ get_env(name="OIDC_CLIENT_ID", default = "") }}'
oidc_client_secret = '{{ get_env(name="OIDC_CLIENT_SECRET", default = "") }}'
oidc_extra_scopes = '{{ get_env(name="OIDC_EXTRA_SCOPES", default = "") }}'
oidc_extra_claim_key = '{{ get_env(name="OIDC_EXTRA_CLAIM_KEY", default = "") }}'
oidc_extra_claim_value = '{{ get_env(name="OIDC_EXTRA_CLAIM_VALUE", default = "") }}'
[graphql]
# depth_limit = inf
# complexity_limit = inf

View File

@@ -1,130 +0,0 @@
use std::path::Path;
use async_trait::async_trait;
use loco_rs::{
app::{AppContext, Hooks},
boot::{create_app, BootResult, StartMode},
cache,
config::Config,
controller::AppRoutes,
db::truncate_table,
environment::Environment,
prelude::*,
task::Tasks,
Result,
};
use crate::{
auth::service::{AppAuthService, AppAuthServiceInitializer},
controllers::{self},
dal::{AppDalClient, AppDalInitalizer},
extract::mikan::{client::AppMikanClientInitializer, AppMikanClient},
graphql::service::{AppGraphQLService, AppGraphQLServiceInitializer},
migrations::Migrator,
models::subscribers,
workers::subscription_worker::SubscriptionWorker,
};
pub const CONFIG_FOLDER: &str = "LOCO_CONFIG_FOLDER";
pub trait AppContextExt {
fn get_dal_client(&self) -> &AppDalClient {
AppDalClient::app_instance()
}
fn get_mikan_client(&self) -> &AppMikanClient {
AppMikanClient::app_instance()
}
fn get_auth_service(&self) -> &AppAuthService {
AppAuthService::app_instance()
}
fn get_graphql_service(&self) -> &AppGraphQLService {
AppGraphQLService::app_instance()
}
}
impl AppContextExt for AppContext {}
pub struct App;
#[async_trait]
impl Hooks for App {
async fn load_config(env: &Environment) -> Result<Config> {
std::env::var(CONFIG_FOLDER).map_or_else(
|_| {
let monorepo_project_config_dir = Path::new("./apps/recorder/config");
if monorepo_project_config_dir.exists() && monorepo_project_config_dir.is_dir() {
return env.load_from_folder(monorepo_project_config_dir);
}
let current_config_dir = Path::new("./config");
env.load_from_folder(current_config_dir)
},
|config_folder| env.load_from_folder(Path::new(&config_folder)),
)
}
fn app_name() -> &'static str {
env!("CARGO_CRATE_NAME")
}
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
let initializers: Vec<Box<dyn Initializer>> = vec![
Box::new(AppDalInitalizer),
Box::new(AppMikanClientInitializer),
Box::new(AppGraphQLServiceInitializer),
Box::new(AppAuthServiceInitializer),
];
Ok(initializers)
}
fn app_version() -> String {
format!(
"{} ({})",
env!("CARGO_PKG_VERSION"),
option_env!("BUILD_SHA")
.or(option_env!("GITHUB_SHA"))
.unwrap_or("dev")
)
}
async fn boot(
mode: StartMode,
environment: &Environment,
config: Config,
) -> Result<BootResult> {
create_app::<Self, Migrator>(mode, environment, config).await
}
fn routes(ctx: &AppContext) -> AppRoutes {
AppRoutes::with_default_routes()
.prefix("/api")
.add_route(controllers::auth::routes())
.add_route(controllers::graphql::routes(ctx.clone()))
}
async fn connect_workers(ctx: &AppContext, queue: &Queue) -> Result<()> {
queue.register(SubscriptionWorker::build(ctx)).await?;
Ok(())
}
async fn after_context(ctx: AppContext) -> Result<AppContext> {
Ok(AppContext {
cache: cache::Cache::new(cache::drivers::inmem::new()).into(),
..ctx
})
}
fn register_tasks(_tasks: &mut Tasks) {}
async fn truncate(ctx: &AppContext) -> Result<()> {
truncate_table(&ctx.db, subscribers::Entity).await?;
Ok(())
}
async fn seed(_ctx: &AppContext, _base: &Path) -> Result<()> {
Ok(())
}
}

View File

@@ -0,0 +1,158 @@
use clap::{Parser, command};
use super::{AppContext, core::App, env::Environment};
use crate::{app::config::AppConfig, errors::RecorderResult};
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
pub struct MainCliArgs {
/// Explicit config file path
#[arg(short, long)]
config_file: Option<String>,
/// Explicit dotenv file path
#[arg(short, long)]
dotenv_file: Option<String>,
/// Explicit working dir
#[arg(short, long)]
working_dir: Option<String>,
/// Explicit environment
#[arg(short, long)]
environment: Option<Environment>,
#[arg(long)]
graceful_shutdown: Option<bool>,
}
pub struct AppBuilder {
dotenv_file: Option<String>,
config_file: Option<String>,
working_dir: String,
environment: Environment,
pub graceful_shutdown: bool,
}
impl AppBuilder {
pub async fn from_main_cli(environment: Option<Environment>) -> RecorderResult<Self> {
let args = MainCliArgs::parse();
let environment = environment.unwrap_or_else(|| {
args.environment.unwrap_or({
if cfg!(test) {
Environment::Testing
} else if cfg!(debug_assertions) {
Environment::Development
} else {
Environment::Production
}
})
});
let mut builder = Self::default();
if let Some(working_dir) = args.working_dir {
builder = builder.working_dir(working_dir);
}
if matches!(
&environment,
Environment::Testing | Environment::Development
) {
builder = builder.working_dir_from_manifest_dir();
}
builder = builder
.config_file(args.config_file)
.dotenv_file(args.dotenv_file)
.environment(environment)
.graceful_shutdown(args.graceful_shutdown.unwrap_or(true));
Ok(builder)
}
pub async fn build(self) -> RecorderResult<App> {
self.load_env().await?;
let config = self.load_config().await?;
let app_context =
AppContext::new(self.environment.clone(), config, self.working_dir.clone()).await?;
Ok(App {
context: app_context,
builder: self,
})
}
pub async fn load_env(&self) -> RecorderResult<()> {
AppConfig::load_dotenv(
&self.environment,
&self.working_dir,
self.dotenv_file.as_deref(),
)
.await?;
Ok(())
}
pub async fn load_config(&self) -> RecorderResult<AppConfig> {
let config = AppConfig::load_config(
&self.environment,
&self.working_dir,
self.config_file.as_deref(),
)
.await?;
Ok(config)
}
pub fn working_dir(self, working_dir: String) -> Self {
let mut ret = self;
ret.working_dir = working_dir;
ret
}
pub fn environment(self, environment: Environment) -> Self {
let mut ret = self;
ret.environment = environment;
ret
}
pub fn config_file(self, config_file: Option<String>) -> Self {
let mut ret = self;
ret.config_file = config_file;
ret
}
pub fn graceful_shutdown(self, graceful_shutdown: bool) -> Self {
let mut ret = self;
ret.graceful_shutdown = graceful_shutdown;
ret
}
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
let mut ret = self;
ret.dotenv_file = dotenv_file;
ret
}
pub fn working_dir_from_manifest_dir(self) -> Self {
let manifest_dir = if cfg!(debug_assertions) || cfg!(test) {
env!("CARGO_MANIFEST_DIR")
} else {
"./apps/recorder"
};
self.working_dir(manifest_dir.to_string())
}
}
impl Default for AppBuilder {
fn default() -> Self {
Self {
environment: Environment::Production,
dotenv_file: None,
config_file: None,
working_dir: String::from("."),
graceful_shutdown: true,
}
}
}

View File

@@ -0,0 +1,31 @@
[storage]
data_dir = "./data"
[mikan]
base_url = "https://mikanani.me/"
[mikan.http_client]
exponential_backoff_max_retries = 3
leaky_bucket_max_tokens = 2
leaky_bucket_initial_tokens = 0
leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500
[mikan.http_client.proxy]
[mikan.http_client.proxy.headers]
[graphql]
depth_limit = inf
complexity_limit = inf
[cache]
[crypto]
[task]
[message]
[media]

View File

@@ -0,0 +1,181 @@
use std::{fs, path::Path, str};
use figment::{
Figment, Provider,
providers::{Format, Json, Toml, Yaml},
};
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use super::env::Environment;
use crate::{
auth::AuthConfig, cache::CacheConfig, crypto::CryptoConfig, database::DatabaseConfig,
errors::RecorderResult, extract::mikan::MikanConfig, graphql::GraphQLConfig,
logger::LoggerConfig, media::MediaConfig, message::MessageConfig, storage::StorageConfig,
task::TaskConfig, web::WebServerConfig,
};
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
const CONFIG_ALLOWED_EXTENSIONS: &[&str] = &[".toml", ".json", ".yaml", ".yml"];
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AppConfig {
pub server: WebServerConfig,
pub cache: CacheConfig,
pub auth: AuthConfig,
pub storage: StorageConfig,
pub mikan: MikanConfig,
pub crypto: CryptoConfig,
pub graphql: GraphQLConfig,
pub media: MediaConfig,
pub logger: LoggerConfig,
pub database: DatabaseConfig,
pub task: TaskConfig,
pub message: MessageConfig,
}
impl AppConfig {
pub fn config_prefix() -> String {
format!("{}.config", env!("CARGO_PKG_NAME"))
}
pub fn dotenv_prefix() -> String {
String::from(".env")
}
pub fn allowed_extension() -> Vec<String> {
CONFIG_ALLOWED_EXTENSIONS
.iter()
.map(|s| s.to_string())
.collect_vec()
}
pub fn priority_suffix(environment: &Environment) -> Vec<String> {
vec![
format!(".{}.local", environment.full_name()),
format!(".{}.local", environment.short_name()),
String::from(".local"),
environment.full_name().to_string(),
environment.short_name().to_string(),
String::from(""),
]
}
pub fn default_provider() -> impl Provider {
Toml::string(DEFAULT_CONFIG_MIXIN)
}
pub fn merge_provider_from_file(
fig: Figment,
filepath: impl AsRef<Path>,
ext: &str,
) -> RecorderResult<Figment> {
let content = fs::read_to_string(filepath)?;
let rendered = tera::Tera::one_off(
&content,
&tera::Context::from_value(serde_json::json!({}))?,
false,
)?;
Ok(match ext {
".toml" => fig.merge(Toml::string(&rendered)),
".json" => fig.merge(Json::string(&rendered)),
".yaml" | ".yml" => fig.merge(Yaml::string(&rendered)),
_ => unreachable!("unsupported config extension"),
})
}
pub async fn load_dotenv(
environment: &Environment,
working_dir: &str,
dotenv_file: Option<&str>,
) -> RecorderResult<()> {
let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
vec![dotenv_file]
} else {
vec![Some(working_dir)]
};
let priority_suffix = &AppConfig::priority_suffix(environment);
let dotenv_prefix = AppConfig::dotenv_prefix();
let try_filenames = priority_suffix
.iter()
.map(|ps| format!("{}{}", &dotenv_prefix, ps))
.collect_vec();
for try_dotenv_file_or_dir in try_dotenv_file_or_dirs.into_iter().flatten() {
let try_dotenv_file_or_dir_path = Path::new(try_dotenv_file_or_dir);
if try_dotenv_file_or_dir_path.exists() {
if try_dotenv_file_or_dir_path.is_dir() {
for f in try_filenames.iter() {
let p = try_dotenv_file_or_dir_path.join(f);
if p.exists() && p.is_file() {
dotenvy::from_path(p)?;
break;
}
}
} else if try_dotenv_file_or_dir_path.is_file() {
dotenvy::from_path(try_dotenv_file_or_dir_path)?;
break;
}
}
}
Ok(())
}
pub async fn load_config(
environment: &Environment,
working_dir: &str,
config_file: Option<&str>,
) -> RecorderResult<AppConfig> {
let try_config_file_or_dirs = if config_file.is_some() {
vec![config_file]
} else {
vec![Some(working_dir)]
};
let allowed_extensions = &AppConfig::allowed_extension();
let priority_suffix = &AppConfig::priority_suffix(environment);
let convention_prefix = &AppConfig::config_prefix();
let try_filenames = priority_suffix
.iter()
.flat_map(|ps| {
allowed_extensions
.iter()
.map(move |ext| (format!("{convention_prefix}{ps}{ext}"), ext))
})
.collect_vec();
let mut fig = Figment::from(AppConfig::default_provider());
for try_config_file_or_dir in try_config_file_or_dirs.into_iter().flatten() {
let try_config_file_or_dir_path = Path::new(try_config_file_or_dir);
if try_config_file_or_dir_path.exists() {
if try_config_file_or_dir_path.is_dir() {
for (f, ext) in try_filenames.iter() {
let p = try_config_file_or_dir_path.join(f);
if p.exists() && p.is_file() {
fig = AppConfig::merge_provider_from_file(fig, &p, ext)?;
break;
}
}
} else if let Some(ext) = try_config_file_or_dir_path
.extension()
.and_then(|s| s.to_str())
&& try_config_file_or_dir_path.is_file()
{
fig =
AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?;
break;
}
}
}
let app_config: AppConfig = fig.extract()?;
Ok(app_config)
}
}

View File

@@ -0,0 +1,147 @@
use std::{fmt::Debug, sync::Arc};
use tokio::sync::OnceCell;
use super::{Environment, config::AppConfig};
use crate::{
auth::AuthService, cache::CacheService, crypto::CryptoService, database::DatabaseService,
errors::RecorderResult, extract::mikan::MikanClient, graphql::GraphQLService,
logger::LoggerService, media::MediaService, message::MessageService, storage::StorageService,
task::TaskService,
};
pub trait AppContextTrait: Send + Sync + Debug {
fn logger(&self) -> &LoggerService;
fn db(&self) -> &DatabaseService;
fn config(&self) -> &AppConfig;
fn cache(&self) -> &CacheService;
fn mikan(&self) -> &MikanClient;
fn auth(&self) -> &AuthService;
fn graphql(&self) -> &GraphQLService;
fn storage(&self) -> &StorageService;
fn working_dir(&self) -> &String;
fn environment(&self) -> &Environment;
fn crypto(&self) -> &CryptoService;
fn task(&self) -> &TaskService;
fn message(&self) -> &MessageService;
fn media(&self) -> &MediaService;
}
pub struct AppContext {
logger: LoggerService,
db: DatabaseService,
config: AppConfig,
cache: CacheService,
mikan: MikanClient,
auth: AuthService,
storage: StorageService,
crypto: CryptoService,
working_dir: String,
environment: Environment,
message: MessageService,
media: MediaService,
task: OnceCell<TaskService>,
graphql: OnceCell<GraphQLService>,
}
impl AppContext {
pub async fn new(
environment: Environment,
config: AppConfig,
working_dir: impl ToString,
) -> RecorderResult<Arc<Self>> {
let config_cloned = config.clone();
let logger = LoggerService::from_config(config.logger).await?;
let cache = CacheService::from_config(config.cache).await?;
let db = DatabaseService::from_config(config.database).await?;
let storage = StorageService::from_config(config.storage).await?;
let message = MessageService::from_config(config.message).await?;
let auth = AuthService::from_conf(config.auth).await?;
let mikan = MikanClient::from_config(config.mikan).await?;
let crypto = CryptoService::from_config(config.crypto).await?;
let media = MediaService::from_config(config.media).await?;
let ctx = Arc::new(AppContext {
config: config_cloned,
environment,
logger,
auth,
cache,
db,
storage,
mikan,
working_dir: working_dir.to_string(),
crypto,
message,
media,
task: OnceCell::new(),
graphql: OnceCell::new(),
});
ctx.task
.get_or_try_init(async || {
TaskService::from_config_and_ctx(config.task, ctx.clone()).await
})
.await?;
ctx.graphql
.get_or_try_init(async || {
GraphQLService::from_config_and_ctx(config.graphql, ctx.clone()).await
})
.await?;
Ok(ctx)
}
}
impl Debug for AppContext {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "AppContext")
}
}
impl AppContextTrait for AppContext {
fn logger(&self) -> &LoggerService {
&self.logger
}
fn db(&self) -> &DatabaseService {
&self.db
}
fn config(&self) -> &AppConfig {
&self.config
}
fn cache(&self) -> &CacheService {
&self.cache
}
fn mikan(&self) -> &MikanClient {
&self.mikan
}
fn auth(&self) -> &AuthService {
&self.auth
}
fn graphql(&self) -> &GraphQLService {
self.graphql.get().expect("graphql should be set")
}
fn storage(&self) -> &StorageService {
&self.storage
}
fn working_dir(&self) -> &String {
&self.working_dir
}
fn environment(&self) -> &Environment {
&self.environment
}
fn crypto(&self) -> &CryptoService {
&self.crypto
}
fn task(&self) -> &TaskService {
self.task.get().expect("task should be set")
}
fn message(&self) -> &MessageService {
&self.message
}
fn media(&self) -> &MediaService {
&self.media
}
}

View File

@@ -0,0 +1,166 @@
use std::{net::SocketAddr, sync::Arc};
use axum::Router;
use tokio::{net::TcpSocket, signal};
use tracing::instrument;
use super::{builder::AppBuilder, context::AppContextTrait};
use crate::{
errors::{RecorderError, RecorderResult},
web::{
controller::{self, core::ControllerTrait},
middleware::default_middleware_stack,
},
};
pub const PROJECT_NAME: &str = "konobangu";
pub struct App {
pub context: Arc<dyn AppContextTrait>,
pub builder: AppBuilder,
}
impl App {
pub fn builder() -> AppBuilder {
AppBuilder::default()
}
#[instrument(err, skip(self))]
pub async fn serve(&self) -> RecorderResult<()> {
let context = &self.context;
let config = context.config();
let listener = {
let addr: SocketAddr =
format!("{}:{}", config.server.binding, config.server.port).parse()?;
let socket = if addr.is_ipv4() {
TcpSocket::new_v4()
} else {
TcpSocket::new_v6()
}?;
socket.set_reuseaddr(true)?;
#[cfg(all(unix, not(target_os = "solaris")))]
if let Err(e) = socket.set_reuseport(true) {
tracing::warn!("Failed to set SO_REUSEPORT: {}", e);
}
socket.bind(addr)?;
socket.listen(1024)
}?;
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
let (graphql_c, oidc_c, metadata_c, static_c, feeds_c) = futures::try_join!(
controller::graphql::create(context.clone()),
controller::oidc::create(context.clone()),
controller::metadata::create(context.clone()),
controller::r#static::create(context.clone()),
controller::feeds::create(context.clone()),
)?;
for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
router = c.apply_to(router);
}
let middlewares = default_middleware_stack(context.clone());
for mid in middlewares {
if mid.is_enabled() {
router = mid.apply(router)?;
tracing::info!(name = mid.name(), "+middleware");
}
}
let router = router
.with_state(context.clone())
.into_make_service_with_connect_info::<SocketAddr>();
let task = context.task();
let graceful_shutdown = self.builder.graceful_shutdown;
tokio::try_join!(
async {
let axum_serve = axum::serve(listener, router);
if graceful_shutdown {
axum_serve
.with_graceful_shutdown(async move {
Self::shutdown_signal().await;
tracing::info!("axum shutting down...");
})
.await?;
} else {
axum_serve.await?;
}
Ok::<(), RecorderError>(())
},
async {
{
let monitor = task.setup_monitor().await?;
if graceful_shutdown {
monitor
.run_with_signal(async move {
Self::shutdown_signal().await;
tracing::info!("apalis shutting down...");
Ok(())
})
.await?;
} else {
monitor.run().await?;
}
}
Ok::<(), RecorderError>(())
},
async {
let listener = task.setup_listener().await?;
listener.listen().await?;
Ok::<(), RecorderError>(())
}
)?;
Ok(())
}
async fn shutdown_signal() {
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(all(unix, debug_assertions))]
let quit = async {
signal::unix::signal(signal::unix::SignalKind::quit())
.expect("Failed to install SIGQUIT handler")
.recv()
.await;
println!("Received SIGQUIT");
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
#[cfg(not(all(unix, debug_assertions)))]
let quit = std::future::pending::<()>();
tokio::select! {
() = ctrl_c => {},
() = terminate => {},
() = quit => {},
}
}
}

View File

@@ -0,0 +1,35 @@
use clap::ValueEnum;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, ValueEnum)]
#[serde(rename_all = "snake_case")]
#[value(rename_all = "snake_case")]
pub enum Environment {
#[serde(alias = "dev")]
#[value(alias = "dev")]
Development,
#[serde(alias = "prod")]
#[value(alias = "prod")]
Production,
#[serde(alias = "test")]
#[value(alias = "test")]
Testing,
}
impl Environment {
pub fn full_name(&self) -> &'static str {
match &self {
Self::Development => "development",
Self::Production => "production",
Self::Testing => "testing",
}
}
pub fn short_name(&self) -> &'static str {
match &self {
Self::Development => "dev",
Self::Production => "prod",
Self::Testing => "test",
}
}
}

View File

@@ -0,0 +1,12 @@
pub mod builder;
pub mod config;
pub mod context;
pub mod core;
pub mod env;
pub use core::{App, PROJECT_NAME};
pub use builder::AppBuilder;
pub use config::AppConfig;
pub use context::{AppContext, AppContextTrait};
pub use env::Environment;

View File

@@ -1,14 +1,17 @@
use async_trait::async_trait; use async_trait::async_trait;
use axum::http::request::Parts; use axum::http::{HeaderValue, request::Parts};
use base64::{self, Engine}; use base64::{self, Engine};
use reqwest::header::AUTHORIZATION; use http::header::AUTHORIZATION;
use super::{ use super::{
config::BasicAuthConfig, config::BasicAuthConfig,
errors::AuthError, errors::AuthError,
service::{AuthService, AuthUserInfo}, service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{
app::{AppContextTrait, PROJECT_NAME},
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
}; };
use crate::models::{auth::AuthType, subscribers::SEED_SUBSCRIBER};
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]
pub struct AuthBasic { pub struct AuthBasic {
@@ -22,12 +25,12 @@ impl AuthBasic {
.headers .headers
.get(AUTHORIZATION) .get(AUTHORIZATION)
.and_then(|s| s.to_str().ok()) .and_then(|s| s.to_str().ok())
.ok_or_else(|| AuthError::BasicInvalidCredentials)?; .ok_or(AuthError::BasicInvalidCredentials)?;
let split = authorization.split_once(' '); let split = authorization.split_once(' ');
match split { match split {
Some((name, contents)) if name == "Basic" => { Some(("Basic", contents)) => {
let decoded = base64::engine::general_purpose::STANDARD let decoded = base64::engine::general_purpose::STANDARD
.decode(contents) .decode(contents)
.map_err(|_| AuthError::BasicInvalidCredentials)?; .map_err(|_| AuthError::BasicInvalidCredentials)?;
@@ -58,22 +61,35 @@ pub struct BasicAuthService {
} }
#[async_trait] #[async_trait]
impl AuthService for BasicAuthService { impl AuthServiceTrait for BasicAuthService {
async fn extract_user_info(&self, request: &mut Parts) -> Result<AuthUserInfo, AuthError> { async fn extract_user_info(
&self,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
if let Ok(AuthBasic { if let Ok(AuthBasic {
user: found_user, user: found_user,
password: found_password, password: found_password,
}) = AuthBasic::decode_request_parts(request) }) = AuthBasic::decode_request_parts(request)
&& self.config.user == found_user
&& self.config.password == found_password.unwrap_or_default()
{ {
if self.config.user == found_user let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER)
&& self.config.password == found_password.unwrap_or_default() .await
{ .map_err(|_| AuthError::FindAuthRecordError)?;
return Ok(AuthUserInfo { return Ok(AuthUserInfo {
user_pid: SEED_SUBSCRIBER.to_string(), subscriber_auth,
auth_type: AuthType::Basic, auth_type: AuthType::Basic,
}); });
}
} }
Err(AuthError::BasicInvalidCredentials) Err(AuthError::BasicInvalidCredentials)
} }
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
Some(HeaderValue::from_str(format!("Basic realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
}
fn auth_type(&self) -> AuthType {
AuthType::Basic
}
} }

View File

@@ -1,5 +1,8 @@
use jwt_authorizer::OneOrArray; use std::collections::HashMap;
use jwtk::OneOrMany;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_with::serde_as;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct BasicAuthConfig { pub struct BasicAuthConfig {
@@ -9,23 +12,26 @@ pub struct BasicAuthConfig {
pub password: String, pub password: String,
} }
#[serde_as]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct OidcAuthConfig { pub struct OidcAuthConfig {
#[serde(rename = "oidc_api_issuer")] #[serde(rename = "oidc_issuer")]
pub issuer: String, pub issuer: String,
#[serde(rename = "oidc_api_audience")] #[serde(rename = "oidc_audience")]
pub audience: String, pub audience: String,
#[serde(rename = "oidc_client_id")]
pub client_id: String,
#[serde(rename = "oidc_client_secret")]
pub client_secret: String,
#[serde(rename = "oidc_extra_scopes")] #[serde(rename = "oidc_extra_scopes")]
pub extra_scopes: Option<OneOrArray<String>>, pub extra_scopes: Option<OneOrMany<String>>,
#[serde(rename = "oidc_extra_claim_key")] #[serde(rename = "oidc_extra_claims")]
pub extra_claim_key: Option<String>, pub extra_claims: Option<HashMap<String, Option<String>>>,
#[serde(rename = "oidc_extra_claim_value")]
pub extra_claim_value: Option<String>,
} }
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "auth_type", rename_all = "snake_case")] #[serde(tag = "auth_type", rename_all = "snake_case")]
pub enum AppAuthConfig { pub enum AuthConfig {
Basic(BasicAuthConfig), Basic(BasicAuthConfig),
Oidc(OidcAuthConfig), Oidc(OidcAuthConfig),
} }

View File

@@ -1,36 +1,142 @@
use async_graphql::dynamic::ResolverContext;
use axum::{ use axum::{
Json,
http::StatusCode, http::StatusCode,
response::{IntoResponse, Response}, response::{IntoResponse, Response},
Json,
}; };
use thiserror::Error; use fetch::HttpClientError;
use openidconnect::{
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
StandardErrorResponse, core::CoreErrorResponseType,
};
use serde::{Deserialize, Serialize};
use snafu::prelude::*;
#[derive(Debug, Error)] use crate::models::auth::AuthType;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum AuthError { pub enum AuthError {
#[error(transparent)] #[snafu(display("Permission denied"))]
OidcInitError(#[from] jwt_authorizer::error::InitError), PermissionError,
#[error("Invalid credentials")] #[snafu(display("Not support auth method"))]
NotSupportAuthMethod {
supported: Vec<AuthType>,
current: AuthType,
},
#[snafu(display("Failed to find auth record"))]
FindAuthRecordError,
#[snafu(display("Invalid credentials"))]
BasicInvalidCredentials, BasicInvalidCredentials,
#[error(transparent)] #[snafu(display("Invalid oidc provider meta client error: {source}"))]
OidcJwtAuthError(#[from] jwt_authorizer::AuthError), OidcProviderHttpClientError { source: HttpClientError },
#[error("Extra scopes {expected} do not match found scopes {found}")] #[snafu(transparent)]
OidcProviderMetaError {
source: openidconnect::DiscoveryError<HttpClientError>,
},
#[snafu(display("Invalid oidc provider URL: {source}"))]
OidcProviderUrlError { source: url::ParseError },
#[snafu(display("Invalid oidc redirect URI: {source}"))]
OidcRequestRedirectUriError {
#[snafu(source)]
source: url::ParseError,
},
#[snafu(display("Oidc request session not found or expired"))]
OidcCallbackRecordNotFoundOrExpiredError,
#[snafu(display("Invalid oidc request callback nonce"))]
OidcInvalidNonceError,
#[snafu(display("Invalid oidc request callback state"))]
OidcInvalidStateError,
#[snafu(display("Invalid oidc request callback code"))]
OidcInvalidCodeError,
#[snafu(transparent)]
OidcCallbackTokenConfigurationError { source: ConfigurationError },
#[snafu(transparent)]
OidcRequestTokenError {
source: RequestTokenError<HttpClientError, StandardErrorResponse<CoreErrorResponseType>>,
},
#[snafu(display("Invalid oidc id token"))]
OidcInvalidIdTokenError,
#[snafu(display("Invalid oidc access token"))]
OidcInvalidAccessTokenError,
#[snafu(transparent)]
OidcSignatureVerificationError { source: SignatureVerificationError },
#[snafu(transparent)]
OidcSigningError { source: SigningError },
#[snafu(display("Missing Bearer token"))]
OidcMissingBearerToken,
#[snafu(transparent)]
OidcJwtkError { source: jwtk::Error },
#[snafu(display("Extra scopes {expected} do not match found scopes {found}"))]
OidcExtraScopesMatchError { expected: String, found: String }, OidcExtraScopesMatchError { expected: String, found: String },
#[error("Extra claim {key} does not match expected value {expected}, found {found}")] #[snafu(display("Extra claim {key} does not match expected value {expected}, found {found}"))]
OidcExtraClaimMatchError { OidcExtraClaimMatchError {
key: String, key: String,
expected: String, expected: String,
found: String, found: String,
}, },
#[error("Extra claim {0} missing")] #[snafu(display("Extra claim {claim} missing"))]
OidcExtraClaimMissingError(String), OidcExtraClaimMissingError { claim: String },
#[error("Audience {0} missing")] #[snafu(display("Audience {aud} missing"))]
OidcAudMissingError(String), OidcAudMissingError { aud: String },
#[error("Subject missing")] #[snafu(display("Subject missing"))]
OidcSubMissingError, OidcSubMissingError,
#[snafu(display(
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
(if field.is_empty() { "" } else { "." }),
(if column.is_empty() { "" } else { "." }),
source.message
))]
GraphqlDynamicPermissionError {
#[snafu(source(false))]
source: Box<async_graphql::Error>,
field: String,
column: String,
context_path: String,
},
}
impl AuthError {
pub fn from_graphql_dynamic_subscribe_id_guard(
source: async_graphql::Error,
context: &ResolverContext,
field_name: &str,
column_name: &str,
) -> AuthError {
AuthError::GraphqlDynamicPermissionError {
source: Box::new(source),
field: field_name.to_string(),
column: column_name.to_string(),
context_path: context
.ctx
.path_node
.map(|p| p.to_string_vec().join(""))
.unwrap_or_default(),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AuthErrorResponse {
pub success: bool,
pub message: String,
}
impl From<AuthError> for AuthErrorResponse {
fn from(value: AuthError) -> Self {
AuthErrorResponse {
success: false,
message: value.to_string(),
}
}
} }
impl IntoResponse for AuthError { impl IntoResponse for AuthError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
(StatusCode::UNAUTHORIZED, Json(self.to_string())).into_response() (
StatusCode::UNAUTHORIZED,
Json(AuthErrorResponse::from(self)),
)
.into_response()
} }
} }

View File

@@ -0,0 +1,40 @@
use std::sync::Arc;
use axum::{
extract::{Request, State},
http::header,
middleware::Next,
response::{IntoResponse, Response},
};
use crate::{app::AppContextTrait, auth::AuthServiceTrait};
pub async fn auth_middleware(
State(ctx): State<Arc<dyn AppContextTrait>>,
request: Request,
next: Next,
) -> Response {
let auth_service = ctx.auth();
let (mut parts, body) = request.into_parts();
let mut response = match auth_service
.extract_user_info(ctx.as_ref() as &dyn AppContextTrait, &mut parts)
.await
{
Ok(auth_user_info) => {
let mut request = Request::from_parts(parts, body);
request.extensions_mut().insert(auth_user_info);
next.run(request).await
}
Err(auth_error) => auth_error.into_response(),
};
if let Some(header_value) = auth_service.www_authenticate_header_value() {
response
.headers_mut()
.insert(header::WWW_AUTHENTICATE, header_value);
};
response
}

View File

@@ -1,9 +1,11 @@
pub mod basic; pub mod basic;
pub mod config; pub mod config;
pub mod errors; pub mod errors;
pub mod middleware;
pub mod oidc; pub mod oidc;
pub mod service; pub mod service;
pub use config::{AppAuthConfig, BasicAuthConfig, OidcAuthConfig}; pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig};
pub use errors::AuthError; pub use errors::AuthError;
pub use service::{AppAuthService, AuthService, AuthUserInfo}; pub use middleware::auth_middleware;
pub use service::{AuthService, AuthServiceTrait, AuthUserInfo};

View File

@@ -1,36 +1,87 @@
use std::collections::{HashMap, HashSet}; use std::{
collections::{HashMap, HashSet},
future::Future,
ops::Deref,
pin::Pin,
sync::Arc,
};
use async_trait::async_trait; use async_trait::async_trait;
use axum::http::request::Parts; use axum::{
http,
http::{HeaderValue, request::Parts},
};
use fetch::{HttpClient, client::HttpClientError};
use http::header::AUTHORIZATION;
use itertools::Itertools; use itertools::Itertools;
use jwt_authorizer::{authorizer::Authorizer, NumericDate, OneOrArray}; use jwtk::jwk::RemoteJwksVerifier;
use moka::future::Cache;
use openidconnect::{
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse,
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata},
};
use sea_orm::DbErr;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use snafu::ResultExt;
use url::Url;
use super::{ use super::{
config::OidcAuthConfig, config::OidcAuthConfig,
errors::AuthError, errors::{AuthError, OidcProviderUrlSnafu, OidcRequestRedirectUriSnafu},
service::{AuthService, AuthUserInfo}, service::{AuthServiceTrait, AuthUserInfo},
}; };
use crate::models::auth::AuthType; use crate::{
app::{AppContextTrait, PROJECT_NAME},
errors::RecorderError,
models::auth::AuthType,
};
pub struct OidcHttpClient(pub Arc<HttpClient>);
impl Deref for OidcHttpClient {
type Target = HttpClient;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'c> openidconnect::AsyncHttpClient<'c> for OidcHttpClient {
type Error = HttpClientError;
#[cfg(target_arch = "wasm32")]
type Future =
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + 'c>>;
#[cfg(not(target_arch = "wasm32"))]
type Future =
Pin<Box<dyn Future<Output = Result<openidconnect::HttpResponse, Self::Error>> + Send + 'c>>;
fn call(&'c self, request: openidconnect::HttpRequest) -> Self::Future {
Box::pin(async move {
let response = self.execute(request.try_into()?).await?;
let mut builder = http::Response::builder().status(response.status());
#[cfg(not(target_arch = "wasm32"))]
{
builder = builder.version(response.version());
}
for (name, value) in response.headers().iter() {
builder = builder.header(name, value);
}
builder
.body(response.bytes().await?.to_vec())
.map_err(HttpClientError::from)
})
}
}
#[derive(Deserialize, Serialize, Clone, Debug)] #[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OidcAuthClaims { pub struct OidcAuthClaims {
#[serde(skip_serializing_if = "Option::is_none")]
pub iss: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub sub: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub aud: Option<OneOrArray<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub exp: Option<NumericDate>,
#[serde(skip_serializing_if = "Option::is_none")]
pub nbf: Option<NumericDate>,
#[serde(skip_serializing_if = "Option::is_none")]
pub iat: Option<NumericDate>,
#[serde(skip_serializing_if = "Option::is_none")]
pub jti: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub scope: Option<String>, pub scope: Option<String>,
#[serde(flatten)] #[serde(flatten)]
pub custom: HashMap<String, Value>, pub custom: HashMap<String, Value>,
@@ -40,99 +91,274 @@ impl OidcAuthClaims {
pub fn scopes(&self) -> std::str::Split<'_, char> { pub fn scopes(&self) -> std::str::Split<'_, char> {
self.scope.as_deref().unwrap_or_default().split(',') self.scope.as_deref().unwrap_or_default().split(',')
} }
}
pub fn get_claim(&self, key: &str) -> Option<String> { #[derive(Debug, Clone, Serialize)]
match key { pub struct OidcAuthRequest {
"iss" => self.iss.clone(), pub auth_uri: Url,
"sub" => self.sub.clone(), #[serde(skip)]
"aud" => self.aud.as_ref().map(|s| s.iter().join(",")), pub redirect_uri: RedirectUrl,
"exp" => self.exp.clone().map(|s| s.0.to_string()), #[serde(skip)]
"nbf" => self.nbf.clone().map(|s| s.0.to_string()), pub csrf_token: CsrfToken,
"iat" => self.iat.clone().map(|s| s.0.to_string()), #[serde(skip)]
"jti" => self.jti.clone(), pub nonce: Nonce,
"scope" => self.scope.clone(), #[serde(skip)]
key => self.custom.get(key).map(|s| s.to_string()), pub pkce_verifier: Arc<PkceCodeVerifier>,
} }
}
pub fn has_claim(&self, key: &str) -> bool { #[derive(Debug, Clone, Serialize, Deserialize)]
match key { pub struct OidcAuthCallbackQuery {
"iss" => self.iss.is_some(), pub state: Option<String>,
"sub" => self.sub.is_some(), pub code: Option<String>,
"aud" => self.aud.is_some(), pub redirect_uri: Option<String>,
"exp" => self.exp.is_some(), }
"nbf" => self.nbf.is_some(),
"iat" => self.iat.is_some(),
"jti" => self.jti.is_some(),
"scope" => self.scope.is_some(),
key => self.custom.contains_key(key),
}
}
pub fn contains_audience(&self, aud: &str) -> bool { #[derive(Debug, Clone, Serialize, Deserialize)]
self.aud pub struct OidcAuthCallbackPayload {
.as_ref() pub access_token: String,
.is_some_and(|arr| arr.iter().any(|s| s == aud))
}
} }
pub struct OidcAuthService { pub struct OidcAuthService {
pub config: OidcAuthConfig, pub config: OidcAuthConfig,
pub authorizer: Authorizer<OidcAuthClaims>, pub jwk_verifier: RemoteJwksVerifier,
pub oidc_provider_client: Arc<HttpClient>,
pub oidc_request_cache: Cache<String, OidcAuthRequest>,
}
impl OidcAuthService {
pub async fn build_authorization_request(
&self,
redirect_uri: &str,
) -> Result<OidcAuthRequest, AuthError> {
let oidc_provider_client = OidcHttpClient(self.oidc_provider_client.clone());
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&oidc_provider_client,
)
.await?;
let redirect_uri =
RedirectUrl::new(redirect_uri.to_string()).context(OidcRequestRedirectUriSnafu)?;
let oidc_client = CoreClient::from_provider_metadata(
provider_metadata,
ClientId::new(self.config.client_id.clone()),
Some(ClientSecret::new(self.config.client_secret.clone())),
)
.set_redirect_uri(redirect_uri.clone());
let (pkce_chanllenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256();
let mut authorization_request = oidc_client
.authorize_url(
CoreAuthenticationFlow::AuthorizationCode,
CsrfToken::new_random,
Nonce::new_random,
)
.set_pkce_challenge(pkce_chanllenge);
{
if let Some(scopes) = self.config.extra_scopes.as_ref() {
authorization_request = authorization_request.add_scopes(
scopes
.iter()
.map(|s| openidconnect::Scope::new(s.to_string())),
)
}
}
let (auth_uri, csrf_token, nonce) = authorization_request.url();
Ok(OidcAuthRequest {
auth_uri,
csrf_token,
nonce,
pkce_verifier: Arc::new(pkce_verifier),
redirect_uri,
})
}
pub async fn store_authorization_request(
&self,
request: OidcAuthRequest,
) -> Result<(), AuthError> {
self.oidc_request_cache
.insert(request.csrf_token.secret().to_string(), request)
.await;
Ok(())
}
pub async fn load_authorization_request(
&self,
state: &str,
) -> Result<OidcAuthRequest, AuthError> {
let result = self
.oidc_request_cache
.get(state)
.await
.ok_or(AuthError::OidcCallbackRecordNotFoundOrExpiredError)?;
self.oidc_request_cache.invalidate(state).await;
Ok(result)
}
pub async fn extract_authorization_request_callback(
&self,
query: OidcAuthCallbackQuery,
) -> Result<OidcAuthCallbackPayload, AuthError> {
let oidc_http_client = OidcHttpClient(self.oidc_provider_client.clone());
let csrf_token = query.state.ok_or(AuthError::OidcInvalidStateError)?;
let code = query.code.ok_or(AuthError::OidcInvalidCodeError)?;
let request_cache = self.load_authorization_request(&csrf_token).await?;
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(self.config.issuer.clone()).context(OidcProviderUrlSnafu)?,
&oidc_http_client,
)
.await?;
let oidc_client = CoreClient::from_provider_metadata(
provider_metadata,
ClientId::new(self.config.client_id.clone()),
Some(ClientSecret::new(self.config.client_secret.clone())),
)
.set_redirect_uri(request_cache.redirect_uri);
let pkce_verifier = PkceCodeVerifier::new(request_cache.pkce_verifier.secret().to_string());
let token_response = oidc_client
.exchange_code(AuthorizationCode::new(code))?
.set_pkce_verifier(pkce_verifier)
.request_async(&oidc_http_client)
.await?;
let id_token = token_response
.id_token()
.ok_or(AuthError::OidcInvalidIdTokenError)?;
let id_token_verifier = &oidc_client.id_token_verifier();
let claims = id_token
.claims(id_token_verifier, &request_cache.nonce)
.map_err(|_| AuthError::OidcInvalidNonceError)?;
let access_token = token_response.access_token();
let actual_access_token_hash = AccessTokenHash::from_token(
access_token,
id_token.signing_alg()?,
id_token.signing_key(id_token_verifier)?,
)?;
if let Some(expected_access_token_hash) = claims.access_token_hash()
&& actual_access_token_hash != *expected_access_token_hash
{
return Err(AuthError::OidcInvalidAccessTokenError);
}
Ok(OidcAuthCallbackPayload {
access_token: access_token.secret().to_string(),
})
}
} }
#[async_trait] #[async_trait]
impl AuthService for OidcAuthService { impl AuthServiceTrait for OidcAuthService {
async fn extract_user_info(&self, request: &mut Parts) -> Result<AuthUserInfo, AuthError> { async fn extract_user_info(
&self,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
let config = &self.config; let config = &self.config;
let token = let token = request
self.authorizer .headers
.extract_token(&request.headers) .get(AUTHORIZATION)
.ok_or(AuthError::OidcJwtAuthError( .and_then(|authorization| {
jwt_authorizer::AuthError::MissingToken(), authorization
))?; .to_str()
.ok()
.and_then(|s| s.strip_prefix("Bearer "))
})
.ok_or(AuthError::OidcMissingBearerToken)?;
let token_data = self.authorizer.check_auth(&token).await?; let token_data = self.jwk_verifier.verify::<OidcAuthClaims>(token).await?;
let claims = token_data.claims; let claims = token_data.claims();
if claims.sub.as_deref().is_none_or(|s| s.trim().is_empty()) { let sub = if let Some(sub) = claims.sub.as_deref() {
sub
} else {
return Err(AuthError::OidcSubMissingError); return Err(AuthError::OidcSubMissingError);
};
if !claims.aud.iter().any(|aud| aud == &config.audience) {
return Err(AuthError::OidcAudMissingError {
aud: config.audience.clone(),
});
} }
if !claims.contains_audience(&config.audience) { let extra_claims = &claims.extra;
return Err(AuthError::OidcAudMissingError(config.audience.clone()));
}
if let Some(expected_scopes) = config.extra_scopes.as_ref() { if let Some(expected_scopes) = config.extra_scopes.as_ref() {
let found_scopes = claims.scopes().collect::<HashSet<_>>(); let found_scopes = extra_claims.scopes().collect::<HashSet<_>>();
if !expected_scopes if !expected_scopes
.iter() .iter()
.all(|es| found_scopes.contains(es as &str)) .all(|es| found_scopes.contains(es as &str))
{ {
return Err(AuthError::OidcExtraScopesMatchError { return Err(AuthError::OidcExtraScopesMatchError {
expected: expected_scopes.iter().join(","), expected: expected_scopes.iter().join(","),
found: claims.scope.unwrap_or_default(), found: extra_claims
.scope
.as_deref()
.unwrap_or_default()
.to_string(),
}); });
} }
} }
if let Some(key) = config.extra_claim_key.as_ref() { if let Some(expected_extra_claims) = config.extra_claims.as_ref() {
if !claims.has_claim(key) { for (expected_key, expected_value) in expected_extra_claims.iter() {
return Err(AuthError::OidcExtraClaimMissingError(key.clone())); match (extra_claims.custom.get(expected_key), expected_value) {
} (found_value, Some(expected_value)) => {
if let Some(value) = config.extra_claim_value.as_ref() { if let Some(Value::String(found_value)) = found_value
if claims.get_claim(key).is_none_or(|v| &v != value) { && expected_value == found_value
return Err(AuthError::OidcExtraClaimMatchError { {
expected: value.clone(), } else {
found: claims.get_claim(key).unwrap_or_default().to_string(), return Err(AuthError::OidcExtraClaimMatchError {
key: key.clone(), expected: expected_value.clone(),
}); found: found_value.map(|v| v.to_string()).unwrap_or_default(),
key: expected_key.clone(),
});
}
}
(None, None) => {
return Err(AuthError::OidcExtraClaimMissingError {
claim: expected_key.clone(),
});
}
_ => {}
} }
} }
} }
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(RecorderError::DbError {
source: DbErr::RecordNotFound(..),
}) => crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await,
r => r,
}
.map_err(|e| {
tracing::error!("Error finding auth record: {:?}", e);
AuthError::FindAuthRecordError
})?;
Ok(AuthUserInfo { Ok(AuthUserInfo {
user_pid: claims subscriber_auth,
.sub
.as_deref()
.map(|s| s.trim().to_string())
.unwrap_or_else(|| unreachable!("sub should be present and validated")),
auth_type: AuthType::Oidc, auth_type: AuthType::Oidc,
}) })
} }
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
Some(HeaderValue::from_str(format!("Bearer realm=\"{PROJECT_NAME}\"").as_str()).unwrap())
}
fn auth_type(&self) -> AuthType {
AuthType::Oidc
}
} }

View File

@@ -1,78 +1,84 @@
use std::{sync::Arc, time::Duration};
use async_trait::async_trait; use async_trait::async_trait;
use axum::{ use axum::http::request::Parts;
extract::FromRequestParts, use fetch::{
http::request::Parts, HttpClient, HttpClientConfig,
response::{IntoResponse as _, Response}, client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
}; };
use jwt_authorizer::{JwtAuthorizer, Validation}; use http::header::HeaderValue;
use loco_rs::app::{AppContext, Initializer}; use jwtk::jwk::RemoteJwksVerifier;
use once_cell::sync::OnceCell; use moka::future::Cache;
use openidconnect::{IssuerUrl, core::CoreProviderMetadata};
use snafu::prelude::*;
use super::{ use super::{
AuthConfig,
basic::BasicAuthService, basic::BasicAuthService,
errors::AuthError, errors::{AuthError, OidcProviderHttpClientSnafu, OidcProviderUrlSnafu},
oidc::{OidcAuthClaims, OidcAuthService}, oidc::{OidcAuthService, OidcHttpClient},
AppAuthConfig,
}; };
use crate::{app::AppContextExt as _, config::AppConfigExt, models::auth::AuthType}; use crate::{app::AppContextTrait, models::auth::AuthType};
#[derive(Clone, Debug)]
pub struct AuthUserInfo { pub struct AuthUserInfo {
pub user_pid: String, pub subscriber_auth: crate::models::auth::Model,
pub auth_type: AuthType, pub auth_type: AuthType,
} }
impl FromRequestParts<AppContext> for AuthUserInfo {
type Rejection = Response;
async fn from_request_parts(
parts: &mut Parts,
state: &AppContext,
) -> Result<Self, Self::Rejection> {
let auth_service = state.get_auth_service();
auth_service
.extract_user_info(parts)
.await
.map_err(|err| err.into_response())
}
}
#[async_trait] #[async_trait]
pub trait AuthService { pub trait AuthServiceTrait {
async fn extract_user_info(&self, request: &mut Parts) -> Result<AuthUserInfo, AuthError>; async fn extract_user_info(
&self,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError>;
fn www_authenticate_header_value(&self) -> Option<HeaderValue>;
fn auth_type(&self) -> AuthType;
} }
pub enum AppAuthService { pub enum AuthService {
Basic(BasicAuthService), Basic(Box<BasicAuthService>),
Oidc(OidcAuthService), Oidc(Box<OidcAuthService>),
} }
static APP_AUTH_SERVICE: OnceCell<AppAuthService> = OnceCell::new(); impl AuthService {
pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> {
impl AppAuthService {
pub fn app_instance() -> &'static Self {
APP_AUTH_SERVICE
.get()
.expect("AppAuthService is not initialized")
}
pub async fn from_conf(config: AppAuthConfig) -> Result<Self, AuthError> {
let result = match config { let result = match config {
AppAuthConfig::Basic(config) => AppAuthService::Basic(BasicAuthService { config }), AuthConfig::Basic(config) => AuthService::Basic(Box::new(BasicAuthService { config })),
AppAuthConfig::Oidc(config) => { AuthConfig::Oidc(config) => {
let validation = Validation::new() let oidc_provider_client = Arc::new(
.iss(&[&config.issuer]) HttpClient::from_config(HttpClientConfig {
.aud(&[&config.audience]); exponential_backoff_max_retries: Some(3),
cache_backend: Some(HttpClientCacheBackendConfig::Moka { cache_size: 1 }),
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
..Default::default()
})
.context(OidcProviderHttpClientSnafu)?,
);
let jwt_auth = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer) let provider_metadata = {
.validation(validation) let client = OidcHttpClient(oidc_provider_client.clone());
.build() let issuer_url =
.await?; IssuerUrl::new(config.issuer.clone()).context(OidcProviderUrlSnafu)?;
CoreProviderMetadata::discover_async(issuer_url, &client).await
}?;
AppAuthService::Oidc(OidcAuthService { let jwk_verifier = RemoteJwksVerifier::new(
provider_metadata.jwks_uri().to_string().clone(),
None,
Duration::from_secs(300),
);
AuthService::Oidc(Box::new(OidcAuthService {
config, config,
authorizer: jwt_auth, jwk_verifier,
}) oidc_provider_client,
oidc_request_cache: Cache::builder()
.time_to_live(Duration::from_mins(5))
.name("oidc_request_cache")
.build(),
}))
} }
}; };
Ok(result) Ok(result)
@@ -80,32 +86,30 @@ impl AppAuthService {
} }
#[async_trait] #[async_trait]
impl AuthService for AppAuthService { impl AuthServiceTrait for AuthService {
async fn extract_user_info(&self, request: &mut Parts) -> Result<AuthUserInfo, AuthError> { #[tracing::instrument(skip(self, ctx, request))]
async fn extract_user_info(
&self,
ctx: &dyn AppContextTrait,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
match self { match self {
AppAuthService::Basic(service) => service.extract_user_info(request).await, AuthService::Basic(service) => service.extract_user_info(ctx, request).await,
AppAuthService::Oidc(service) => service.extract_user_info(request).await, AuthService::Oidc(service) => service.extract_user_info(ctx, request).await,
}
}
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
match self {
AuthService::Basic(service) => service.www_authenticate_header_value(),
AuthService::Oidc(service) => service.www_authenticate_header_value(),
}
}
fn auth_type(&self) -> AuthType {
match self {
AuthService::Basic(service) => service.auth_type(),
AuthService::Oidc(service) => service.auth_type(),
} }
} }
} }
pub struct AppAuthServiceInitializer;
#[async_trait]
impl Initializer for AppAuthServiceInitializer {
fn name(&self) -> String {
String::from("AppAuthServiceInitializer")
}
async fn before_run(&self, ctx: &AppContext) -> Result<(), loco_rs::Error> {
let auth_conf = ctx.config.get_app_conf()?.auth;
let service = AppAuthService::from_conf(auth_conf)
.await
.map_err(loco_rs::Error::wrap)?;
APP_AUTH_SERVICE.get_or_init(|| service);
Ok(())
}
}

View File

@@ -1,9 +1,12 @@
use loco_rs::cli; use recorder::{app::AppBuilder, errors::RecorderResult};
use recorder::{app::App, migrations::Migrator};
#[tokio::main] #[tokio::main]
async fn main() -> color_eyre::eyre::Result<()> { async fn main() -> RecorderResult<()> {
color_eyre::install()?; let builder = AppBuilder::from_main_cli(None).await?;
cli::main::<App, Migrator>().await?;
let app = builder.build().await?;
app.serve().await?;
Ok(()) Ok(())
} }

View File

@@ -0,0 +1,16 @@
use recorder::{app::AppBuilder, database::DatabaseService, errors::RecorderResult};
#[tokio::main]
async fn main() -> RecorderResult<()> {
let builder = AppBuilder::from_main_cli(None).await?;
builder.load_env().await?;
let mut database_config = builder.load_config().await?.database;
database_config.auto_migrate = false;
let database_service = DatabaseService::from_config(database_config).await?;
database_service.migrate_down().await?;
Ok(())
}

4
apps/recorder/src/cache/config.rs vendored Normal file
View File

@@ -0,0 +1,4 @@
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CacheConfig {}

5
apps/recorder/src/cache/mod.rs vendored Normal file
View File

@@ -0,0 +1,5 @@
pub mod config;
pub mod service;
pub use config::CacheConfig;
pub use service::CacheService;

10
apps/recorder/src/cache/service.rs vendored Normal file
View File

@@ -0,0 +1,10 @@
use super::CacheConfig;
use crate::errors::RecorderResult;
pub struct CacheService {}
impl CacheService {
pub async fn from_config(_config: CacheConfig) -> RecorderResult<Self> {
Ok(Self {})
}
}

View File

@@ -1,75 +0,0 @@
use figment::{
providers::{Format, Json, Yaml},
Figment,
};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::{
auth::AppAuthConfig, dal::config::AppDalConfig, extract::mikan::AppMikanConfig,
graphql::config::AppGraphQLConfig,
};
const DEFAULT_APP_SETTINGS_MIXIN: &str = include_str!("./settings_mixin.yaml");
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AppConfig {
pub auth: AppAuthConfig,
pub dal: AppDalConfig,
pub mikan: AppMikanConfig,
pub graphql: AppGraphQLConfig,
}
pub fn deserialize_key_path_from_json_value<T: DeserializeOwned>(
value: &serde_json::Value,
key_path: &[&str],
) -> Result<Option<T>, loco_rs::Error> {
let mut stack = vec![("", value)];
for key in key_path {
let current = stack.last().unwrap().1;
if let Some(v) = current.get(key) {
stack.push((key, v));
} else {
return Ok(None);
}
}
let result: T = serde_json::from_value(stack.pop().unwrap().1.clone())?;
Ok(Some(result))
}
pub fn deserialize_key_path_from_app_config<T: DeserializeOwned>(
app_config: &loco_rs::config::Config,
key_path: &[&str],
) -> Result<Option<T>, loco_rs::Error> {
let settings = app_config.settings.as_ref();
if let Some(settings) = settings {
deserialize_key_path_from_json_value(settings, key_path)
} else {
Ok(None)
}
}
pub trait AppConfigExt {
fn get_root_conf(&self) -> &loco_rs::config::Config;
fn get_app_conf(&self) -> loco_rs::Result<AppConfig> {
let settings_str = self
.get_root_conf()
.settings
.as_ref()
.map(serde_json::to_string)
.unwrap_or_else(|| Ok(String::new()))?;
let app_config = Figment::from(Json::string(&settings_str))
.merge(Yaml::string(DEFAULT_APP_SETTINGS_MIXIN))
.extract()
.map_err(loco_rs::Error::wrap)?;
Ok(app_config)
}
}
impl AppConfigExt for loco_rs::config::Config {
fn get_root_conf(&self) -> &loco_rs::config::Config {
self
}
}

View File

@@ -1,15 +0,0 @@
dal:
data_dir: ./data
mikan:
http_client:
exponential_backoff_max_retries: 3
leaky_bucket_max_tokens: 2
leaky_bucket_initial_tokens: 0
leaky_bucket_refill_tokens: 1
leaky_bucket_refill_interval: 500
base_url: "https://mikanani.me/"
graphql:
depth_limit: null
complexity_limit: null

Some files were not shown because too many files have changed in this diff Show More