Compare commits

..

No commits in common. "6e4c13661465561c947e0d2438ecab3b8dc685e4" and "6887b2a069e549443da30e997a31db86f5ba6b00" have entirely different histories.

502 changed files with 30253 additions and 13683 deletions

1197
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -3,6 +3,9 @@ members = ["apps/recorder"]
resolver = "2" resolver = "2"
[patch.crates-io] [patch.crates-io]
# loco-rs = { git = "https://github.com/lonelyhentxi/loco.git", rev = "beb890e" }
# loco-rs = { git = "https://github.com/loco-rs/loco.git" }
# loco-rs = { path = "./patches/loco" }
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" } jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
# [patch."https://github.com/lonelyhentxi/qbit.git"] # [patch."https://github.com/lonelyhentxi/qbit.git"]

29
apps/app/.env.development Normal file
View File

@ -0,0 +1,29 @@
# Server
AUTH_TYPE="basic" #
BASIC_USER="konobangu"
BASIC_PASSWORD="konobangu"
OIDC_PROVIDER_ENDPOINT="https://some-oidc-auth.com/oidc/.well-known/openid-configuration"
OIDC_CLIENT_ID=""
OIDC_CLIENT_SECRET=""
OIDC_ISSUER="https://some-oidc-auth.com/oidc"
OIDC_AUDIENCE="https://konobangu.com/api"
OIDC_ICON_URL=""
OIDC_EXTRA_SCOPE_REGEX=""
OIDC_EXTRA_CLAIM_KEY=""
OIDC_EXTRA_CLAIM_VALUE=""
DATABASE_URL="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"
BETTERSTACK_API_KEY=""
BETTERSTACK_URL=""
FLAGS_SECRET=""
ARCJET_KEY=""
SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# Client
NEXT_PUBLIC_APP_URL="http://localhost:5000"
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"

29
apps/app/.env.example Normal file
View File

@ -0,0 +1,29 @@
# AUTH
AUTH_TYPE="basic"
NEXT_PUBLIC_OIDC_PROVIDER_ENDPOINT="https://some-oidc-auth.com/oidc/.well-known/openid-configuration"
NEXT_PUBLIC_OIDC_CLIENT_ID=""
NEXT_PUBLIC_OIDC_CLIENT_SECRET=""
NEXT_PUBLIC_OIDC_ICON_URL=""
OIDC_ISSUER="https://some-oidc-auth.com/oidc"
OIDC_AUDIENCE="https://konobangu.com/api"
OIDC_EXTRA_SCOPES="" # 如 "read:konobangu,write:konobangu"
OIDC_EXTRA_CLAIM_KEY=""
OIDC_EXTRA_CLAIM_VALUE=""
# DATABASE
DATABASE_URL="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"
# SERVER MISC
BETTERSTACK_API_KEY=""
BETTERSTACK_URL=""
FLAGS_SECRET=""
ARCJET_KEY=""
SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# WEBUI
NEXT_PUBLIC_APP_URL="http://localhost:5000"
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"

45
apps/app/.gitignore vendored Normal file
View File

@ -0,0 +1,45 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# local env files
.env*.local
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts
# prisma
.env
# react.email
.react-email
# Sentry
.sentryclirc

View File

@ -0,0 +1,13 @@
import { render, screen } from '@testing-library/react';
import { expect, test } from 'vitest';
import Page from '../app/(unauthenticated)/sign-in/[[...sign-in]]/page';
test('Sign In Page', () => {
render(<Page />);
expect(
screen.getByRole('heading', {
level: 1,
name: 'Welcome back',
})
).toBeDefined();
});

View File

@ -0,0 +1,13 @@
import { render, screen } from '@testing-library/react';
import { expect, test } from 'vitest';
import Page from '../app/(unauthenticated)/sign-up/[[...sign-up]]/page';
test('Sign Up Page', () => {
render(<Page />);
expect(
screen.getByRole('heading', {
level: 1,
name: 'Create an account',
})
).toBeDefined();
});

View File

@ -0,0 +1,59 @@
'use client';
import { useOthers, useSelf } from '@konobangu/collaboration/hooks';
import {
Avatar,
AvatarFallback,
AvatarImage,
} from '@konobangu/design-system/components/ui/avatar';
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from '@konobangu/design-system/components/ui/tooltip';
import { tailwind } from '@konobangu/tailwind-config';
type PresenceAvatarProps = {
info?: Liveblocks['UserMeta']['info'];
};
const PresenceAvatar = ({ info }: PresenceAvatarProps) => (
<Tooltip delayDuration={0}>
<TooltipTrigger>
<Avatar className="h-7 w-7 bg-secondary ring-1 ring-background">
<AvatarImage src={info?.avatar} alt={info?.name} />
<AvatarFallback className="text-xs">
{info?.name?.slice(0, 2)}
</AvatarFallback>
</Avatar>
</TooltipTrigger>
<TooltipContent collisionPadding={4}>
<p>{info?.name ?? 'Unknown'}</p>
</TooltipContent>
</Tooltip>
);
export const AvatarStack = () => {
const others = useOthers();
const self = useSelf();
const hasMoreUsers = others.length > 3;
return (
<div className="-space-x-1 flex items-center px-4">
{others.slice(0, 3).map(({ connectionId, info }) => (
<PresenceAvatar key={connectionId} info={info} />
))}
{hasMoreUsers && (
<PresenceAvatar
info={{
name: `+${others.length - 3}`,
color: tailwind.theme.colors.gray[500],
}}
/>
)}
{self && <PresenceAvatar info={self.info} />}
</div>
);
};

View File

@ -0,0 +1,48 @@
'use client';
import { getUsers } from '@/app/actions/users/get';
import { searchUsers } from '@/app/actions/users/search';
import { Room } from '@konobangu/collaboration/room';
import type { ReactNode } from 'react';
export const CollaborationProvider = ({
orgId,
children,
}: {
orgId: string;
children: ReactNode;
}) => {
const resolveUsers = async ({ userIds }: { userIds: string[] }) => {
const response = await getUsers(userIds);
if ('error' in response) {
throw new Error('Problem resolving users');
}
return response.data;
};
const resolveMentionSuggestions = async ({ text }: { text: string }) => {
const response = await searchUsers(text);
if ('error' in response) {
throw new Error('Problem resolving mention suggestions');
}
return response.data;
};
return (
<Room
id={`${orgId}:presence`}
authEndpoint="/api/collaboration/auth"
fallback={
<div className="px-3 text-muted-foreground text-xs">Loading...</div>
}
resolveUsers={resolveUsers}
resolveMentionSuggestions={resolveMentionSuggestions}
>
{children}
</Room>
);
};

View File

@ -0,0 +1,106 @@
'use client';
import { useMyPresence, useOthers } from '@konobangu/collaboration/hooks';
import { useEffect } from 'react';
const Cursor = ({
name,
color,
x,
y,
}: {
name: string | undefined;
color: string;
x: number;
y: number;
}) => (
<div
className="pointer-events-none absolute top-0 left-0 z-[999] select-none transition-transform duration-100"
style={{
transform: `translateX(${x}px) translateY(${y}px)`,
}}
>
<svg
className="absolute top-0 left-0"
width="24"
height="36"
viewBox="0 0 24 36"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<title>Cursor</title>
<path
d="M5.65376 12.3673H5.46026L5.31717 12.4976L0.500002 16.8829L0.500002 1.19841L11.7841 12.3673H5.65376Z"
fill={color}
/>
</svg>
<div
className="absolute top-4 left-1.5 whitespace-nowrap rounded-full px-2 py-0.5 text-white text-xs"
style={{
backgroundColor: color,
}}
>
{name}
</div>
</div>
);
export const Cursors = () => {
/**
* useMyPresence returns the presence of the current user and a function to update it.
* updateMyPresence is different than the setState function returned by the useState hook from React.
* You don't need to pass the full presence object to update it.
* See https://liveblocks.io/docs/api-reference/liveblocks-react#useMyPresence for more information
*/
const [_cursor, updateMyPresence] = useMyPresence();
/**
* Return all the other users in the room and their presence (a cursor position in this case)
*/
const others = useOthers();
useEffect(() => {
const onPointerMove = (event: PointerEvent) => {
// Update the user cursor position on every pointer move
updateMyPresence({
cursor: {
x: Math.round(event.clientX),
y: Math.round(event.clientY),
},
});
};
const onPointerLeave = () => {
// When the pointer goes out, set cursor to null
updateMyPresence({
cursor: null,
});
};
document.body.addEventListener('pointermove', onPointerMove);
document.body.addEventListener('pointerleave', onPointerLeave);
return () => {
document.body.removeEventListener('pointermove', onPointerMove);
document.body.removeEventListener('pointerleave', onPointerLeave);
};
}, [updateMyPresence]);
return others.map(({ connectionId, presence, info }) => {
if (!presence.cursor) {
return null;
}
return (
<Cursor
key={`cursor-${connectionId}`}
// connectionId is an integer that is incremented at every new connections
// Assigning a color with a modulo makes sure that a specific user has the same colors on every clients
color={info.color}
x={presence.cursor.x}
y={presence.cursor.y}
name={info?.name}
/>
);
});
};

View File

@ -0,0 +1,43 @@
import {
Breadcrumb,
BreadcrumbItem,
BreadcrumbLink,
BreadcrumbList,
BreadcrumbPage,
BreadcrumbSeparator,
} from '@konobangu/design-system/components/ui/breadcrumb';
import { Separator } from '@konobangu/design-system/components/ui/separator';
import { SidebarTrigger } from '@konobangu/design-system/components/ui/sidebar';
import { Fragment, type ReactNode } from 'react';
type HeaderProps = {
pages: string[];
page: string;
children?: ReactNode;
};
export const Header = ({ pages, page, children }: HeaderProps) => (
<header className="flex h-16 shrink-0 items-center justify-between gap-2">
<div className="flex items-center gap-2 px-4">
<SidebarTrigger className="-ml-1" />
<Separator orientation="vertical" className="mr-2 h-4" />
<Breadcrumb>
<BreadcrumbList>
{pages.map((page, index) => (
<Fragment key={page}>
{index > 0 && <BreadcrumbSeparator className="hidden md:block" />}
<BreadcrumbItem className="hidden md:block">
<BreadcrumbLink href="#">{page}</BreadcrumbLink>
</BreadcrumbItem>
</Fragment>
))}
<BreadcrumbSeparator className="hidden md:block" />
<BreadcrumbItem>
<BreadcrumbPage>{page}</BreadcrumbPage>
</BreadcrumbItem>
</BreadcrumbList>
</Breadcrumb>
</div>
{children}
</header>
);

View File

@ -0,0 +1,44 @@
'use client';
import { analytics } from '@konobangu/analytics/client';
import { useSession } from '@konobangu/auth/client';
import { usePathname, useSearchParams } from 'next/navigation';
import { useEffect, useRef } from 'react';
export const PostHogIdentifier = () => {
const session = useSession();
const user = session?.data?.user;
const identified = useRef(false);
const pathname = usePathname();
const searchParams = useSearchParams();
useEffect(() => {
// Track pageviews
if (pathname && analytics) {
let url = window.origin + pathname;
if (searchParams.toString()) {
url = `${url}?${searchParams.toString()}`;
}
analytics.capture('$pageview', {
$current_url: url,
});
}
}, [pathname, searchParams]);
useEffect(() => {
if (!user || identified.current) {
return;
}
analytics.identify(user.id, {
email: user.email,
name: user.name,
createdAt: user.createdAt,
avatar: user.image,
});
identified.current = true;
}, [user]);
return null;
};

View File

@ -0,0 +1,342 @@
'use client';
// import { OrganizationSwitcher, UserButton } from '@konobangu/auth/client';
import { ModeToggle } from '@konobangu/design-system/components/mode-toggle';
import {
Collapsible,
CollapsibleContent,
CollapsibleTrigger,
} from '@konobangu/design-system/components/ui/collapsible';
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from '@konobangu/design-system/components/ui/dropdown-menu';
import {
Sidebar,
SidebarContent,
SidebarFooter,
SidebarGroup,
SidebarGroupContent,
SidebarGroupLabel,
SidebarHeader,
SidebarInset,
SidebarMenu,
SidebarMenuAction,
SidebarMenuButton,
SidebarMenuItem,
SidebarMenuSub,
SidebarMenuSubButton,
SidebarMenuSubItem,
useSidebar,
} from '@konobangu/design-system/components/ui/sidebar';
import { cn } from '@konobangu/design-system/lib/utils';
import {
AnchorIcon,
BookOpenIcon,
BotIcon,
ChevronRightIcon,
FolderIcon,
FrameIcon,
LifeBuoyIcon,
MapIcon,
MoreHorizontalIcon,
PieChartIcon,
SendIcon,
Settings2Icon,
ShareIcon,
SquareTerminalIcon,
Trash2Icon,
} from 'lucide-react';
import type { ReactNode } from 'react';
type GlobalSidebarProperties = {
readonly children: ReactNode;
};
const data = {
user: {
name: 'shadcn',
email: 'm@example.com',
avatar: '/avatars/shadcn.jpg',
},
navMain: [
{
title: 'Playground',
url: '#',
icon: SquareTerminalIcon,
isActive: true,
items: [
{
title: 'History',
url: '#',
},
{
title: 'Starred',
url: '#',
},
{
title: 'Settings',
url: '#',
},
],
},
{
title: 'Models',
url: '#',
icon: BotIcon,
items: [
{
title: 'Genesis',
url: '#',
},
{
title: 'Explorer',
url: '#',
},
{
title: 'Quantum',
url: '#',
},
],
},
{
title: 'Documentation',
url: '#',
icon: BookOpenIcon,
items: [
{
title: 'Introduction',
url: '#',
},
{
title: 'Get Started',
url: '#',
},
{
title: 'Tutorials',
url: '#',
},
{
title: 'Changelog',
url: '#',
},
],
},
{
title: 'Settings',
url: '#',
icon: Settings2Icon,
items: [
{
title: 'General',
url: '#',
},
{
title: 'Team',
url: '#',
},
{
title: 'Billing',
url: '#',
},
{
title: 'Limits',
url: '#',
},
],
},
],
navSecondary: [
{
title: 'Webhooks',
url: '/webhooks',
icon: AnchorIcon,
},
{
title: 'Support',
url: '#',
icon: LifeBuoyIcon,
},
{
title: 'Feedback',
url: '#',
icon: SendIcon,
},
],
projects: [
{
name: 'Design Engineering',
url: '#',
icon: FrameIcon,
},
{
name: 'Sales & Marketing',
url: '#',
icon: PieChartIcon,
},
{
name: 'Travel',
url: '#',
icon: MapIcon,
},
],
};
export const GlobalSidebar = ({ children }: GlobalSidebarProperties) => {
const sidebar = useSidebar();
return (
<>
<Sidebar variant="inset">
<SidebarHeader>
<SidebarMenu>
<SidebarMenuItem>
<div
className={cn(
'h-[36px] overflow-hidden transition-all [&>div]:w-full',
sidebar.open ? '' : '-mx-1'
)}
>
{/* <OrganizationSwitcher
hidePersonal
afterSelectOrganizationUrl="/"
/> */}
</div>
</SidebarMenuItem>
</SidebarMenu>
</SidebarHeader>
<SidebarContent>
<SidebarGroup>
<SidebarGroupLabel>Platform</SidebarGroupLabel>
<SidebarMenu>
{data.navMain.map((item) => (
<Collapsible
key={item.title}
asChild
defaultOpen={item.isActive}
>
<SidebarMenuItem>
<SidebarMenuButton asChild tooltip={item.title}>
<a href={item.url}>
<item.icon />
<span>{item.title}</span>
</a>
</SidebarMenuButton>
{item.items?.length ? (
<>
<CollapsibleTrigger asChild>
<SidebarMenuAction className="data-[state=open]:rotate-90">
<ChevronRightIcon />
<span className="sr-only">Toggle</span>
</SidebarMenuAction>
</CollapsibleTrigger>
<CollapsibleContent>
<SidebarMenuSub>
{item.items?.map((subItem) => (
<SidebarMenuSubItem key={subItem.title}>
<SidebarMenuSubButton asChild>
<a href={subItem.url}>
<span>{subItem.title}</span>
</a>
</SidebarMenuSubButton>
</SidebarMenuSubItem>
))}
</SidebarMenuSub>
</CollapsibleContent>
</>
) : null}
</SidebarMenuItem>
</Collapsible>
))}
</SidebarMenu>
</SidebarGroup>
<SidebarGroup className="group-data-[collapsible=icon]:hidden">
<SidebarGroupLabel>Projects</SidebarGroupLabel>
<SidebarMenu>
{data.projects.map((item) => (
<SidebarMenuItem key={item.name}>
<SidebarMenuButton asChild>
<a href={item.url}>
<item.icon />
<span>{item.name}</span>
</a>
</SidebarMenuButton>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<SidebarMenuAction showOnHover>
<MoreHorizontalIcon />
<span className="sr-only">More</span>
</SidebarMenuAction>
</DropdownMenuTrigger>
<DropdownMenuContent
className="w-48"
side="bottom"
align="end"
>
<DropdownMenuItem>
<FolderIcon className="text-muted-foreground" />
<span>View Project</span>
</DropdownMenuItem>
<DropdownMenuItem>
<ShareIcon className="text-muted-foreground" />
<span>Share Project</span>
</DropdownMenuItem>
<DropdownMenuSeparator />
<DropdownMenuItem>
<Trash2Icon className="text-muted-foreground" />
<span>Delete Project</span>
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</SidebarMenuItem>
))}
<SidebarMenuItem>
<SidebarMenuButton>
<MoreHorizontalIcon />
<span>More</span>
</SidebarMenuButton>
</SidebarMenuItem>
</SidebarMenu>
</SidebarGroup>
<SidebarGroup className="mt-auto">
<SidebarGroupContent>
<SidebarMenu>
{data.navSecondary.map((item) => (
<SidebarMenuItem key={item.title}>
<SidebarMenuButton asChild>
<a href={item.url}>
<item.icon />
<span>{item.title}</span>
</a>
</SidebarMenuButton>
</SidebarMenuItem>
))}
</SidebarMenu>
</SidebarGroupContent>
</SidebarGroup>
</SidebarContent>
<SidebarFooter>
<SidebarMenu>
<SidebarMenuItem className="flex items-center gap-2">
{/* <UserButton
showName
appearance={{
elements: {
rootBox: 'flex overflow-hidden w-full',
userButtonBox: 'flex-row-reverse',
userButtonOuterIdentifier: 'truncate pl-0',
},
}}
/> */}
<ModeToggle />
</SidebarMenuItem>
</SidebarMenu>
</SidebarFooter>
</Sidebar>
<SidebarInset>{children}</SidebarInset>
</>
);
};

View File

@ -0,0 +1,42 @@
import { getSessionFromHeaders } from '@konobangu/auth/server';
import { SidebarProvider } from '@konobangu/design-system/components/ui/sidebar';
import { env } from '@konobangu/env';
import { showBetaFeature } from '@konobangu/feature-flags';
import { secure } from '@konobangu/security';
import { redirect } from 'next/navigation';
import type { ReactNode } from 'react';
import { PostHogIdentifier } from './components/posthog-identifier';
import { GlobalSidebar } from './components/sidebar';
type AppLayoutProperties = {
readonly children: ReactNode;
};
const AppLayout = async ({ children }: AppLayoutProperties) => {
if (env.ARCJET_KEY) {
await secure(['CATEGORY:PREVIEW']);
}
const { user } = await getSessionFromHeaders();
if (!user) {
return redirect('/sign-in'); // from next/navigation
}
const betaFeature = await showBetaFeature();
return (
<SidebarProvider>
<GlobalSidebar>
{betaFeature && (
<div className="m-4 rounded-full bg-success p-1.5 text-center text-sm text-success-foreground">
Beta feature now available
</div>
)}
{children}
</GlobalSidebar>
<PostHogIdentifier />
</SidebarProvider>
);
};
export default AppLayout;

View File

@ -0,0 +1,57 @@
import { getSessionFromHeaders } from '@konobangu/auth/server';
import { database } from '@konobangu/database';
import { env } from '@konobangu/env';
import type { Metadata } from 'next';
import dynamic from 'next/dynamic';
import { notFound } from 'next/navigation';
import { AvatarStack } from './components/avatar-stack';
import { Cursors } from './components/cursors';
import { Header } from './components/header';
const title = 'Acme Inc';
const description = 'My application.';
const CollaborationProvider = dynamic(() =>
import('./components/collaboration-provider').then(
(mod) => mod.CollaborationProvider
)
);
export const metadata: Metadata = {
title,
description,
};
const App = async () => {
const pages = await database.selectFrom('page').selectAll().execute();
const { orgId } = await getSessionFromHeaders();
if (!orgId) {
notFound();
}
return (
<>
<Header pages={['Building Your Application']} page="Data Fetching">
{env.LIVEBLOCKS_SECRET && (
<CollaborationProvider orgId={orgId}>
<AvatarStack />
<Cursors />
</CollaborationProvider>
)}
</Header>
<div className="flex flex-1 flex-col gap-4 p-4 pt-0">
<div className="grid auto-rows-min gap-4 md:grid-cols-3">
{pages.map((page) => (
<div key={page.id} className="aspect-video rounded-xl bg-muted/50">
{page.name}
</div>
))}
</div>
<div className="min-h-[100vh] flex-1 rounded-xl bg-muted/50 md:min-h-min" />
</div>
</>
);
};
export default App;

View File

@ -0,0 +1,29 @@
import { webhooks } from '@konobangu/webhooks';
import { notFound } from 'next/navigation';
export const metadata = {
title: 'Webhooks',
description: 'Send webhooks to your users.',
};
const WebhooksPage = async () => {
const response = await webhooks.getAppPortal();
if (!response?.url) {
notFound();
}
return (
<div className="h-full w-full overflow-hidden">
<iframe
title="Webhooks"
src={response.url}
className="h-full w-full border-none"
allow="clipboard-write"
loading="lazy"
/>
</div>
);
};
export default WebhooksPage;

View File

@ -0,0 +1,58 @@
import { ModeToggle } from '@konobangu/design-system/components/mode-toggle';
import { env } from '@konobangu/env';
import { CommandIcon } from 'lucide-react';
import Link from 'next/link';
import type { ReactNode } from 'react';
type AuthLayoutProps = {
readonly children: ReactNode;
};
const AuthLayout = ({ children }: AuthLayoutProps) => (
<div className="container relative grid h-dvh flex-col items-center justify-center lg:max-w-none lg:grid-cols-2 lg:px-0">
<div className="relative hidden h-full flex-col bg-muted p-10 text-white lg:flex dark:border-r">
<div className="absolute inset-0 bg-zinc-900" />
<div className="relative z-20 flex items-center font-medium text-lg">
<CommandIcon className="mr-2 h-6 w-6" />
Acme Inc
</div>
<div className="absolute top-4 right-4">
<ModeToggle />
</div>
<div className="relative z-20 mt-auto">
<blockquote className="space-y-2">
<p className="text-lg">
&ldquo;This library has saved me countless hours of work and helped
me deliver stunning designs to my clients faster than ever
before.&rdquo;
</p>
<footer className="text-sm">Sofia Davis</footer>
</blockquote>
</div>
</div>
<div className="lg:p-8">
<div className="mx-auto flex w-full max-w-[400px] flex-col justify-center space-y-6">
{children}
<p className="px-8 text-center text-muted-foreground text-sm">
By clicking continue, you agree to our{' '}
<Link
href={new URL('/legal/terms', env.NEXT_PUBLIC_WEB_URL).toString()}
className="underline underline-offset-4 hover:text-primary"
>
Terms of Service
</Link>{' '}
and{' '}
<Link
href={new URL('/legal/privacy', env.NEXT_PUBLIC_WEB_URL).toString()}
className="underline underline-offset-4 hover:text-primary"
>
Privacy Policy
</Link>
.
</p>
</div>
</div>
</div>
);
export default AuthLayout;

View File

@ -0,0 +1,23 @@
import { createMetadata } from '@konobangu/seo/metadata';
import type { Metadata } from 'next';
import dynamic from 'next/dynamic';
const title = 'Welcome back';
const description = 'Enter your details to sign in.';
const SignIn = dynamic(() =>
import('@konobangu/auth/components/sign-in').then((mod) => mod.SignIn)
);
export const metadata: Metadata = createMetadata({ title, description });
const SignInPage = () => (
<>
<div className="flex flex-col space-y-2 text-center">
<h1 className="font-semibold text-2xl tracking-tight">{title}</h1>
<p className="text-muted-foreground text-sm">{description}</p>
</div>
<SignIn />
</>
);
export default SignInPage;

View File

@ -0,0 +1,23 @@
import { createMetadata } from '@konobangu/seo/metadata';
import type { Metadata } from 'next';
import dynamic from 'next/dynamic';
const title = 'Create an account';
const description = 'Enter your details to get started.';
const SignUp = dynamic(() =>
import('@konobangu/auth/components/sign-up').then((mod) => mod.SignUp)
);
export const metadata: Metadata = createMetadata({ title, description });
const SignUpPage = () => (
<>
<div className="flex flex-col space-y-2 text-center">
<h1 className="font-semibold text-2xl tracking-tight">{title}</h1>
<p className="text-muted-foreground text-sm">{description}</p>
</div>
<SignUp />
</>
);
export default SignUpPage;

View File

@ -0,0 +1,3 @@
import { getFlags } from '@konobangu/feature-flags/access';
export const GET = getFlags;

View File

@ -0,0 +1,63 @@
'use server';
import {
getFullOrganizationFromSession,
getSessionFromHeaders,
} from '@konobangu/auth/server';
import { tailwind } from '@konobangu/tailwind-config';
const colors = [
tailwind.theme.colors.red[500],
tailwind.theme.colors.orange[500],
tailwind.theme.colors.amber[500],
tailwind.theme.colors.yellow[500],
tailwind.theme.colors.lime[500],
tailwind.theme.colors.green[500],
tailwind.theme.colors.emerald[500],
tailwind.theme.colors.teal[500],
tailwind.theme.colors.cyan[500],
tailwind.theme.colors.sky[500],
tailwind.theme.colors.blue[500],
tailwind.theme.colors.indigo[500],
tailwind.theme.colors.violet[500],
tailwind.theme.colors.purple[500],
tailwind.theme.colors.fuchsia[500],
tailwind.theme.colors.pink[500],
tailwind.theme.colors.rose[500],
];
export const getUsers = async (
userIds: string[]
): Promise<
| {
data: Liveblocks['UserMeta']['info'][];
}
| {
error: unknown;
}
> => {
try {
const session = await getSessionFromHeaders();
const { orgId } = session;
if (!orgId) {
throw new Error('Not logged in');
}
const { fullOrganization } = await getFullOrganizationFromSession(session);
const members = fullOrganization?.members || [];
const data: Liveblocks['UserMeta']['info'][] = members
.filter((user) => user?.userId && userIds.includes(user?.userId))
.map((user) => ({
name: user.user.name ?? user.user.email ?? 'Unknown user',
picture: user.user.image,
color: colors[Math.floor(Math.random() * colors.length)],
}));
return { data };
} catch (error) {
return { error };
}
};

View File

@ -0,0 +1,50 @@
'use server';
import {
getFullOrganizationFromSession,
getSessionFromHeaders,
} from '@konobangu/auth/server';
import Fuse from 'fuse.js';
export const searchUsers = async (
query: string
): Promise<
| {
data: string[];
}
| {
error: unknown;
}
> => {
try {
const session = await getSessionFromHeaders();
const { orgId } = session;
if (!orgId) {
throw new Error('Not logged in');
}
const { fullOrganization } = await getFullOrganizationFromSession(session);
const members = fullOrganization?.members || [];
const users = members.map((user) => ({
id: user.id,
name: user.user.name ?? user.user.email ?? 'Unknown user',
imageUrl: user.user.image,
}));
const fuse = new Fuse(users, {
keys: ['name'],
minMatchCharLength: 1,
threshold: 0.3,
});
const results = fuse.search(query);
const data = results.map((result) => result.item.id);
return { data };
} catch (error) {
return { error };
}
};

View File

@ -0,0 +1,42 @@
import { getSessionFromHeaders } from '@konobangu/auth/server';
import { authenticate } from '@konobangu/collaboration/auth';
import { tailwind } from '@konobangu/tailwind-config';
const COLORS = [
tailwind.theme.colors.red[500],
tailwind.theme.colors.orange[500],
tailwind.theme.colors.amber[500],
tailwind.theme.colors.yellow[500],
tailwind.theme.colors.lime[500],
tailwind.theme.colors.green[500],
tailwind.theme.colors.emerald[500],
tailwind.theme.colors.teal[500],
tailwind.theme.colors.cyan[500],
tailwind.theme.colors.sky[500],
tailwind.theme.colors.blue[500],
tailwind.theme.colors.indigo[500],
tailwind.theme.colors.violet[500],
tailwind.theme.colors.purple[500],
tailwind.theme.colors.fuchsia[500],
tailwind.theme.colors.pink[500],
tailwind.theme.colors.rose[500],
];
export const POST = async () => {
const session = await getSessionFromHeaders();
const { orgId, user } = session;
if (!user || !orgId) {
return new Response('Unauthorized', { status: 401 });
}
return authenticate({
userId: user.id,
orgId,
userInfo: {
name: user.name ?? user.email ?? undefined,
avatar: user.image ?? undefined,
color: COLORS[Math.floor(Math.random() * COLORS.length)],
},
});
};

BIN
apps/app/app/apple-icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 216 B

View File

@ -0,0 +1,17 @@
import { database } from '@konobangu/database';
export const POST = async () => {
const newPage = await database
.insertInto('page')
.values([
{
name: 'cron-temp',
},
])
.returning('id')
.executeTakeFirstOrThrow();
await database.deleteFrom('page').where('id', '=', newPage.id);
return new Response('OK', { status: 200 });
};

View File

@ -0,0 +1,29 @@
'use client';
import { Button } from '@konobangu/design-system/components/ui/button';
import { fonts } from '@konobangu/design-system/lib/fonts';
import { captureException } from '@sentry/nextjs';
import type NextError from 'next/error';
import { useEffect } from 'react';
type GlobalErrorProperties = {
readonly error: NextError & { digest?: string };
readonly reset: () => void;
};
const GlobalError = ({ error, reset }: GlobalErrorProperties) => {
useEffect(() => {
captureException(error);
}, [error]);
return (
<html lang="en" className={fonts}>
<body>
<h1>Oops, something went wrong</h1>
<Button onClick={() => reset()}>Try again</Button>
</body>
</html>
);
};
export default GlobalError;

View File

@ -0,0 +1,3 @@
export const runtime = 'edge';
export const GET = (): Response => new Response('OK', { status: 200 });

BIN
apps/app/app/icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 B

18
apps/app/app/layout.tsx Normal file
View File

@ -0,0 +1,18 @@
import '@konobangu/design-system/styles/globals.css';
import { DesignSystemProvider } from '@konobangu/design-system';
import { fonts } from '@konobangu/design-system/lib/fonts';
import type { ReactNode } from 'react';
type RootLayoutProperties = {
readonly children: ReactNode;
};
const RootLayout = ({ children }: RootLayoutProperties) => (
<html lang="en" className={fonts} suppressHydrationWarning>
<body>
<DesignSystemProvider>{children}</DesignSystemProvider>
</body>
</html>
);
export default RootLayout;

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

View File

@ -0,0 +1,3 @@
import { initializeSentry } from '@konobangu/next-config/instrumentation';
export const register = initializeSentry();

View File

@ -0,0 +1 @@
export * from '@konobangu/collaboration/config';

22
apps/app/middleware.ts Normal file
View File

@ -0,0 +1,22 @@
import { authMiddleware } from '@konobangu/auth/middleware';
import {
noseconeConfig,
noseconeMiddleware,
} from '@konobangu/security/middleware';
import type { NextRequest } from 'next/server';
const securityHeaders = noseconeMiddleware(noseconeConfig);
export async function middleware(_request: NextRequest) {
const response = await securityHeaders();
return authMiddleware(response as any);
}
export const config = {
matcher: [
// Skip Next.js internals and all static files, unless found in search params
'/((?!_next|[^?]*\\.(?:html?|css|js(?!on)|jpe?g|webp|png|gif|svg|ttf|woff2?|ico|csv|docx?|xlsx?|zip|webmanifest)).*)',
// Always run for API routes
'/(api|trpc)(.*)',
],
};

15
apps/app/next.config.ts Normal file
View File

@ -0,0 +1,15 @@
import { env } from '@konobangu/env';
import { config, withAnalyzer, withSentry } from '@konobangu/next-config';
import type { NextConfig } from 'next';
let nextConfig: NextConfig = { ...config };
if (env.VERCEL) {
nextConfig = withSentry(nextConfig);
}
if (env.ANALYZE === 'true') {
nextConfig = withAnalyzer(nextConfig);
}
export default nextConfig;

51
apps/app/package.json Normal file
View File

@ -0,0 +1,51 @@
{
"name": "app",
"private": true,
"scripts": {
"dev": "next dev -p 5000 --turbopack",
"build": "next build",
"start": "next start",
"analyze": "ANALYZE=true pnpm build",
"test": "vitest run",
"clean": "git clean -xdf .cache .turbo dist node_modules",
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
},
"dependencies": {
"@konobangu/analytics": "workspace:*",
"@konobangu/auth": "workspace:*",
"@konobangu/collaboration": "workspace:*",
"@konobangu/database": "workspace:*",
"@konobangu/design-system": "workspace:*",
"@konobangu/env": "workspace:*",
"@konobangu/feature-flags": "workspace:*",
"@konobangu/migrate": "workspace:*",
"@konobangu/next-config": "workspace:*",
"@konobangu/security": "workspace:*",
"@konobangu/seo": "workspace:*",
"@konobangu/tailwind-config": "workspace:*",
"@konobangu/webhooks": "workspace:*",
"@prisma/client": "6.0.1",
"@sentry/nextjs": "^8.48.0",
"fuse.js": "^7.0.0",
"import-in-the-middle": "^1.12.0",
"lucide-react": "^0.468.0",
"next": "^15.1.4",
"next-themes": "^0.4.4",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"require-in-the-middle": "^7.4.0"
},
"devDependencies": {
"@konobangu/testing": "workspace:*",
"@konobangu/typescript-config": "workspace:*",
"@testing-library/dom": "^10.4.0",
"@testing-library/react": "^16.1.0",
"@types/node": "22.10.1",
"@types/react": "19.0.1",
"@types/react-dom": "19.0.2",
"jsdom": "^25.0.1",
"tailwindcss": "^3.4.17",
"typescript": "^5.7.3",
"vitest": "^2.1.8"
}
}

View File

@ -0,0 +1 @@
export { default } from '@konobangu/design-system/postcss.config.mjs';

View File

@ -0,0 +1,34 @@
/*
* This file configures the initialization of Sentry on the client.
* The config you add here will be used whenever a users loads a page in their browser.
* https://docs.sentry.io/platforms/javascript/guides/nextjs/
*/
import { init, replayIntegration } from '@sentry/nextjs';
init({
dsn: process.env.NEXT_PUBLIC_SENTRY_DSN,
// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
replaysOnErrorSampleRate: 1,
/*
* This sets the sample rate to be 10%. You may want this to be 100% while
* in development and sample at a lower rate in production
*/
replaysSessionSampleRate: 0.1,
// You can remove this option if you're not planning to use the Sentry Session Replay feature:
integrations: [
replayIntegration({
// Additional Replay configuration goes in here, for example:
maskAllText: true,
blockAllMedia: true,
}),
],
});

View File

@ -0,0 +1 @@
export { config as default } from '@konobangu/tailwind-config/config';

17
apps/app/tsconfig.json Normal file
View File

@ -0,0 +1,17 @@
{
"extends": "@konobangu/typescript-config/nextjs.json",
"compilerOptions": {
"baseUrl": ".",
"paths": {
"@/*": ["./*"],
"@konobangu/*": ["../../packages/*"]
}
},
"include": [
"next-env.d.ts",
"next.config.ts",
"**/*.ts",
"**/*.tsx",
".next/types/**/*.ts"
]
}

8
apps/app/vercel.json Normal file
View File

@ -0,0 +1,8 @@
{
"crons": [
{
"path": "/cron/keep-alive",
"schedule": "0 1 * * *"
}
]
}

View File

@ -0,0 +1 @@
export { default } from '@konobangu/testing';

View File

@ -4,5 +4,8 @@
"scripts": { "scripts": {
"dev": "npx --yes mintlify dev --port 5004", "dev": "npx --yes mintlify dev --port 5004",
"lint": "npx --yes mintlify broken-links" "lint": "npx --yes mintlify broken-links"
},
"devDependencies": {
"typescript": "^5.7.3"
} }
} }

View File

@ -10,11 +10,15 @@
"typecheck": "tsc --noEmit --emitDeclarationOnly false" "typecheck": "tsc --noEmit --emitDeclarationOnly false"
}, },
"dependencies": { "dependencies": {
"@konobangu/email": "workspace:*",
"@react-email/components": "0.0.31", "@react-email/components": "0.0.31",
"react": "^19.0.0", "react": "^19.0.0",
"react-email": "3.0.4" "react-email": "3.0.4"
}, },
"devDependencies": { "devDependencies": {
"@types/react": "19.0.1" "@konobangu/typescript-config": "workspace:*",
"@types/node": "22.10.1",
"@types/react": "19.0.1",
"typescript": "^5.7.3"
} }
} }

View File

@ -1,9 +1,5 @@
{ {
"extends": "../../tsconfig.base.json", "extends": "@konobangu/typescript-config/nextjs.json",
"compilerOptions": {
"composite": true,
"jsx": "react-jsx"
},
"include": ["**/*.ts", "**/*.tsx"], "include": ["**/*.ts", "**/*.tsx"],
"exclude": ["node_modules"] "exclude": ["node_modules"]
} }

View File

@ -22,6 +22,7 @@ testcontainers = [
] ]
[dependencies] [dependencies]
loco-rs = { version = "0.14" }
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1" serde_json = "1"
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] } tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
@ -36,11 +37,12 @@ sea-orm = { version = "1.1", features = [
"debug-print", "debug-print",
] } ] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] } figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
axum = "0.8" axum = "0.8"
uuid = { version = "1.6.0", features = ["v4"] } uuid = { version = "1.6.0", features = ["v4"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio-rustls"] } sea-orm-migration = { version = "1.1", features = ["runtime-tokio-rustls"] }
reqwest = { version = "0.12", default-features = false, features = [ reqwest = { version = "0.12", features = [
"charset", "charset",
"http2", "http2",
"json", "json",
@ -94,20 +96,11 @@ seaography = { version = "1.1" }
quirks_path = "0.1.1" quirks_path = "0.1.1"
base64 = "0.22.1" base64 = "0.22.1"
tower = "0.5.2" tower = "0.5.2"
axum-extra = "0.10" axum-extra = "0.10.0"
tower-http = { version = "0.6", features = [ tower-http = "0.6.2"
"trace",
"catch-panic",
"timeout",
"add-extension",
"cors",
"fs",
"set-header",
"compression-full",
] }
serde_yaml = "0.9.34" serde_yaml = "0.9.34"
tera = "1.20.0" tera = "1.20.0"
openidconnect = { version = "4", features = ["rustls-tls"] } openidconnect = "4"
http-cache-reqwest = { version = "0.15", features = [ http-cache-reqwest = { version = "0.15", features = [
"manager-cacache", "manager-cacache",
"manager-moka", "manager-moka",
@ -124,15 +117,10 @@ nom = "8.0.0"
secrecy = { version = "0.10.3", features = ["serde"] } secrecy = { version = "0.10.3", features = ["serde"] }
http = "1.2.0" http = "1.2.0"
cookie = "0.18.1" cookie = "0.18.1"
async-stream = "0.3.6"
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.31"
futures-util = "0.3.31"
ipnetwork = "0.21.1"
[dev-dependencies] [dev-dependencies]
serial_test = "3" serial_test = "3"
loco-rs = { version = "0.14", features = ["testing"] }
insta = { version = "1", features = ["redactions", "yaml", "filters"] } insta = { version = "1", features = ["redactions", "yaml", "filters"] }
mockito = "1.6.1" mockito = "1.6.1"
rstest = "0.24.0" rstest = "0.24.0"

View File

@ -0,0 +1,145 @@
# Loco configuration file documentation
# Application logging configuration
logger:
# Enable or disable logging.
enable: true
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace: true
# Log level, options: trace, debug, info, warn or error.
level: debug
# Define the logging format. options: compact, pretty or Json
format: compact
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
# override_filter: trace
# Web server configuration
server:
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
port: 5001
binding: "0.0.0.0"
# The UI hostname or IP address that mailers will point to.
host: '{{ get_env(name="HOST", default="localhost") }}'
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
middlewares:
# Enable Etag cache header middleware
etag:
enable: true
# Allows to limit the payload size request. payload that bigger than this file will blocked the request.
limit_payload:
# Enable/Disable the middleware.
enable: true
# the limit size. can be b,kb,kib,mb,mib,gb,gib
body_limit: 5mb
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
logger:
# Enable/Disable the middleware.
enable: true
# when your code is panicked, the request still returns 500 status code.
catch_panic:
# Enable/Disable the middleware.
enable: true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
timeout_request:
# Enable/Disable the middleware.
enable: false
# Duration time in milliseconds.
timeout: 5000
cors:
enable: true
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://loco.rs
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
# allow_methods:
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
fallback:
enable: false
# Worker Configuration
workers:
# specifies the worker mode. Options:
# - BackgroundQueue - Workers operate asynchronously in the background, processing queued.
# - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed.
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
mode: BackgroundAsync
# Mailer Configuration.
mailer:
# SMTP mailer configuration.
smtp:
# Enable/Disable smtp mailer.
enable: true
# SMTP server host. e.x localhost, smtp.gmail.com
host: '{{ get_env(name="MAILER_HOST", default="localhost") }}'
# SMTP server port
port: 1025
# Use secure connection (SSL/TLS).
secure: false
# auth:
# user:
# password:
# Database Configuration
database:
# Database connection URI
uri: '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@localhost:5432/konobangu") }}'
# When enabled, the sql query will be logged.
enable_logging: true
# Set the timeout duration when acquiring a connection.
connect_timeout: 500
# Set the idle duration before closing a connection.
idle_timeout: 500
# Minimum number of connections for a pool.
min_connections: 1
# Maximum number of connections for a pool.
max_connections: 1
# Run migration up when application loaded
auto_migrate: true
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_truncate: false
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false
# Redis Configuration
redis:
# Redis connection URI
uri: '{{ get_env(name="REDIS_URL", default="redis://localhost:6379") }}'
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_flush: false
settings:
dal:
data_dir: '{{ get_env(name="DAL_DATA_DIR", default="./data") }}'
mikan:
base_url: "https://mikanani.me/"
http_client:
exponential_backoff_max_retries: 3
leaky_bucket_max_tokens: 2
leaky_bucket_initial_tokens: 0
leaky_bucket_refill_tokens: 1
leaky_bucket_refill_interval: 500
auth:
auth_type: '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
basic_user: '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
basic_password: '{{ get_env(name="BASIC_PASSWORD", default = "konobangu") }}'
oidc_issuer: '{{ get_env(name="OIDC_ISSUER", default = "") }}'
oidc_audience: '{{ get_env(name="OIDC_AUDIENCE", default = "") }}'
oidc_client_id: '{{ get_env(name="OIDC_CLIENT_ID", default = "") }}'
oidc_client_secret: '{{ get_env(name="OIDC_CLIENT_SECRET", default = "") }}'
oidc_extra_scopes: '{{ get_env(name="OIDC_EXTRA_SCOPES", default = "") }}'
oidc_extra_claim_key: '{{ get_env(name="OIDC_EXTRA_CLAIM_KEY", default = "") }}'
oidc_extra_claim_value: '{{ get_env(name="OIDC_EXTRA_CLAIM_VALUE", default = "") }}'
graphql:
depth_limit: null
complexity_limit: null

View File

@ -0,0 +1,125 @@
# Loco configuration file documentation
# Application logging configuration
logger:
# Enable or disable logging.
enable: true
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace: true
# Log level, options: trace, debug, info, warn or error.
level: debug
# Define the logging format. options: compact, pretty or Json
format: compact
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
# override_filter: trace
# Web server configuration
server:
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
port: 5001
# The UI hostname or IP address that mailers will point to.
host: http://webui.konobangu.com
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
middlewares:
# Enable Etag cache header middleware
etag:
enable: true
# Allows to limit the payload size request. payload that bigger than this file will blocked the request.
limit_payload:
# Enable/Disable the middleware.
enable: true
# the limit size. can be b,kb,kib,mb,mib,gb,gib
body_limit: 5mb
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
logger:
# Enable/Disable the middleware.
enable: true
# when your code is panicked, the request still returns 500 status code.
catch_panic:
# Enable/Disable the middleware.
enable: true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
timeout_request:
# Enable/Disable the middleware.
enable: false
# Duration time in milliseconds.
timeout: 5000
cors:
enable: true
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://loco.rs
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
# allow_methods:
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
# Worker Configuration
workers:
# specifies the worker mode. Options:
# - BackgroundQueue - Workers operate asynchronously in the background, processing queued.
# - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed.
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
mode: BackgroundQueue
# Mailer Configuration.
mailer:
# SMTP mailer configuration.
smtp:
# Enable/Disable smtp mailer.
enable: true
# SMTP server host. e.x localhost, smtp.gmail.com
host: '{{ get_env(name="MAILER_HOST", default="localhost") }}'
# SMTP server port
port: 1025
# Use secure connection (SSL/TLS).
secure: false
# auth:
# user:
# password:
# Database Configuration
database:
# Database connection URI
uri: '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu") }}'
# When enabled, the sql query will be logged.
enable_logging: true
# Set the timeout duration when acquiring a connection.
connect_timeout: 500
# Set the idle duration before closing a connection.
idle_timeout: 500
# Minimum number of connections for a pool.
min_connections: 1
# Maximum number of connections for a pool.
max_connections: 1
# Run migration up when application loaded
auto_migrate: true
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_truncate: false
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false
# Redis Configuration
redis:
# Redis connection URI
uri: '{{ get_env(name="REDIS_URL", default="redis://127.0.0.1:6379") }}'
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_flush: false
settings:
dal:
data_dir: ./temp
mikan:
http_client:
exponential_backoff_max_retries: 3
leaky_bucket_max_tokens: 2
leaky_bucket_initial_tokens: 0
leaky_bucket_refill_tokens: 1
leaky_bucket_refill_interval: 500
user_agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0"
base_url: "https://mikanani.me/"

View File

@ -1,68 +1,65 @@
// #![allow(unused_imports)] #![allow(unused_imports)]
// use color_eyre::eyre::Context; use color_eyre::eyre::Context;
// use itertools::Itertools; use itertools::Itertools;
// use loco_rs::{ use loco_rs::{
// app::Hooks, app::Hooks,
// boot::{BootResult, StartMode}, boot::{BootResult, StartMode},
// environment::Environment, environment::Environment,
// prelude::AppContext as LocoContext, prelude::*,
// }; };
// use recorder::{ use recorder::{
// app::{App1, AppContext}, app::App,
// errors::RResult, migrations::Migrator,
// migrations::Migrator, models::{
// models::{ subscribers::SEED_SUBSCRIBER,
// subscribers::SEED_SUBSCRIBER, subscriptions::{self, SubscriptionCreateFromRssDto},
// subscriptions::{self, SubscriptionCreateFromRssDto}, },
// }, };
// }; use sea_orm_migration::MigratorTrait;
// use sea_orm::ColumnTrait;
// use sea_orm_migration::MigratorTrait;
// async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> RResult<()> { async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> color_eyre::eyre::Result<()> {
// let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370"; let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
// // let rss_link = // let rss_link =
// // "https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d"; // "https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
// let subscription = if let Some(subscription) = let subscription = if let Some(subscription) = subscriptions::Entity::find()
// subscriptions::Entity::find() .filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
// .filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link))) .one(&ctx.db)
// .one(&ctx.db) .await?
// .await? {
// { subscription
// subscription } else {
// } else { subscriptions::Model::add_subscription(
// subscriptions::Model::add_subscription( ctx,
// ctx, subscriptions::SubscriptionCreateDto::Mikan(SubscriptionCreateFromRssDto {
// rss_link: rss_link.to_string(),
// subscriptions::SubscriptionCreateDto::Mikan(SubscriptionCreateFromRssDto { display_name: String::from("Mikan Project - 我的番组"),
// rss_link: rss_link.to_string(), enabled: Some(true),
// display_name: String::from("Mikan Project - 我的番组"), }),
// enabled: Some(true), 1,
// }), )
// 1, .await?
// ) };
// .await?
// };
// subscription.pull_subscription(ctx).await?; subscription.pull_subscription(ctx).await?;
// Ok(()) Ok(())
// } }
// async fn init() -> RResult<LocoContext> { async fn init() -> color_eyre::eyre::Result<AppContext> {
// let ctx = loco_rs::cli::playground::<App1>().await?; color_eyre::install()?;
// let BootResult { let ctx = loco_rs::cli::playground::<App>().await?;
// app_context: ctx, .. let BootResult {
// } = loco_rs::boot::run_app::<App1>(&StartMode::ServerOnly, ctx).await?; app_context: ctx, ..
// Migrator::up(&ctx.db, None).await?; } = loco_rs::boot::run_app::<App>(&StartMode::ServerOnly, ctx).await?;
// Ok(ctx) Migrator::up(&ctx.db, None).await?;
// } Ok(ctx)
}
// #[tokio::main] #[tokio::main]
// async fn main() -> color_eyre::eyre::Result<()> { async fn main() -> color_eyre::eyre::Result<()> {
// pull_mikan_bangumi_rss(&ctx).await?; let ctx = init().await?;
pull_mikan_bangumi_rss(&ctx).await?;
// Ok(()) Ok(())
// } }
fn main() {}

View File

@ -8,17 +8,30 @@
"preview": "rsbuild preview" "preview": "rsbuild preview"
}, },
"dependencies": { "dependencies": {
"@abraham/reflection": "^0.12.0",
"@graphiql/react": "^0.28.2", "@graphiql/react": "^0.28.2",
"@graphiql/toolkit": "^0.11.1", "@graphiql/toolkit": "^0.11.1",
"@konobangu/design-system": "workspace:*",
"@konobangu/tailwind-config": "workspace:*",
"@outposts/injection-js": "^2.5.1",
"@tanstack/react-router": "^1.95.6",
"@tanstack/router-devtools": "^1.95.6",
"graphiql": "^3.8.3", "graphiql": "^3.8.3",
"graphql-ws": "^6.0.4", "graphql-ws": "^5.16.2",
"observable-hooks": "^4.2.4", "observable-hooks": "^4.2.4",
"oidc-client-rx": "0.1.0-alpha.6",
"react": "^19.0.0", "react": "^19.0.0",
"react-dom": "^19.0.0" "react-dom": "^19.0.0",
"rxjs": "^7.8.1"
}, },
"devDependencies": { "devDependencies": {
"@konobangu/typescript-config": "workspace:*",
"@rsbuild/core": "1.1.3",
"@rsbuild/plugin-react": "^1.1.1", "@rsbuild/plugin-react": "^1.1.1",
"@tanstack/router-plugin": "^1.95.6",
"@types/react": "^19.0.7", "@types/react": "^19.0.7",
"@types/react-dom": "^19.0.3" "@types/react-dom": "^19.0.3",
"tailwindcss": "^3.4.17",
"typescript": "^5.7.3"
} }
} }

View File

@ -1,5 +1,5 @@
export default { export default {
plugins: { plugins: {
'tailwindcss': {}, tailwindcss: {},
}, },
}; }

View File

@ -1,5 +0,0 @@
export default {
plugins: {
'@tailwindcss/postcss': {},
},
};

View File

@ -1,103 +0,0 @@
# Application logging configuration
[logger]
# Enable or disable logging.
enable = true
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace = true
# Log level, options: trace, debug, info, warn or error.
level = "debug"
# Define the logging format. options: compact, pretty or Json
format = "compact"
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
# override_filter: trace
# Web server configuration
[server]
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
port = 5001
binding = "0.0.0.0"
# The UI hostname or IP address that mailers will point to.
host = '{{ get_env(name="HOST", default="localhost") }}'
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
# Enable Etag cache header middleware
[server.middlewares.etag]
enable = true
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
[server.middleware.request_id]
enable = true
[server.middleware.logger]
enable = true
# when your code is panicked, the request still returns 500 status code.
[server.middleware.catch_panic]
enable = true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
[server.middleware.timeout_request]
enable = false
# Duration time in milliseconds.
timeout = 5000
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://loco.rs
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
# allow_methods:
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
[server.middleware.cors]
enable = true
# Database Configuration
[database]
# Database connection URI
uri = '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@localhost:5432/konobangu") }}'
# When enabled, the sql query will be logged.
enable_logging = true
# Set the timeout duration when acquiring a connection.
connect_timeout = 500
# Set the idle duration before closing a connection.
idle_timeout = 500
# Minimum number of connections for a pool.
min_connections = 1
# Maximum number of connections for a pool.
max_connections = 10
# Run migration up when application loaded
auto_migrate = true
[storage]
data_dir = '{{ get_env(name="STORAGE_DATA_DIR", default="./data") }}'
[mikan]
base_url = "https://mikanani.me/"
[mikan.http_client]
exponential_backoff_max_retries = 3
leaky_bucket_max_tokens = 2
leaky_bucket_initial_tokens = 1
leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500
[auth]
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
basic_password = '{{ get_env(name="BASIC_PASSWORD", default = "konobangu") }}'
oidc_issuer = '{{ get_env(name="OIDC_ISSUER", default = "") }}'
oidc_audience = '{{ get_env(name="OIDC_AUDIENCE", default = "") }}'
oidc_client_id = '{{ get_env(name="OIDC_CLIENT_ID", default = "") }}'
oidc_client_secret = '{{ get_env(name="OIDC_CLIENT_SECRET", default = "") }}'
oidc_extra_scopes = '{{ get_env(name="OIDC_EXTRA_SCOPES", default = "") }}'
oidc_extra_claim_key = '{{ get_env(name="OIDC_EXTRA_CLAIM_KEY", default = "") }}'
oidc_extra_claim_value = '{{ get_env(name="OIDC_EXTRA_CLAIM_VALUE", default = "") }}'
[graphql]
# depth_limit = inf
# complexity_limit = inf

View File

@ -1,139 +0,0 @@
use std::sync::Arc;
use clap::{Parser, command};
use super::{AppContext, core::App, env::Environment};
use crate::{app::config::AppConfig, errors::RResult};
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
pub struct MainCliArgs {
/// Explicit config file path
#[arg(short, long)]
config_file: Option<String>,
/// Explicit dotenv file path
#[arg(short, long)]
dotenv_file: Option<String>,
/// Explicit working dir
#[arg(short, long)]
working_dir: Option<String>,
/// Explicit environment
#[arg(short, long)]
environment: Option<Environment>,
}
pub struct AppBuilder {
dotenv_file: Option<String>,
config_file: Option<String>,
working_dir: String,
enviornment: Environment,
}
impl AppBuilder {
pub async fn from_main_cli(environment: Option<Environment>) -> RResult<Self> {
let args = MainCliArgs::parse();
let environment = environment.unwrap_or_else(|| {
args.environment.unwrap_or({
if cfg!(test) {
Environment::Testing
} else if cfg!(debug_assertions) {
Environment::Development
} else {
Environment::Production
}
})
});
let mut builder = Self::default();
if let Some(working_dir) = args.working_dir {
builder = builder.working_dir(working_dir);
}
if matches!(
&environment,
Environment::Testing | Environment::Development
) {
builder = builder.working_dir_from_manifest_dir();
}
builder = builder
.config_file(args.config_file)
.dotenv_file(args.dotenv_file)
.environment(environment);
Ok(builder)
}
pub async fn build(self) -> RResult<App> {
AppConfig::load_dotenv(
&self.enviornment,
&self.working_dir,
self.dotenv_file.as_deref(),
)
.await?;
let config = AppConfig::load_config(
&self.enviornment,
&self.working_dir,
self.config_file.as_deref(),
)
.await?;
let app_context = Arc::new(
AppContext::new(self.enviornment.clone(), config, self.working_dir.clone()).await?,
);
Ok(App {
context: app_context,
builder: self,
})
}
pub fn working_dir(self, working_dir: String) -> Self {
let mut ret = self;
ret.working_dir = working_dir;
ret
}
pub fn environment(self, environment: Environment) -> Self {
let mut ret = self;
ret.enviornment = environment;
ret
}
pub fn config_file(self, config_file: Option<String>) -> Self {
let mut ret = self;
ret.config_file = config_file;
ret
}
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
let mut ret = self;
ret.dotenv_file = dotenv_file;
ret
}
pub fn working_dir_from_manifest_dir(self) -> Self {
let manifest_dir = if cfg!(debug_assertions) || cfg!(test) {
env!("CARGO_MANIFEST_DIR")
} else {
"./apps/recorder"
};
self.working_dir(manifest_dir.to_string())
}
}
impl Default for AppBuilder {
fn default() -> Self {
Self {
enviornment: Environment::Production,
dotenv_file: None,
config_file: None,
working_dir: String::from("."),
}
}
}

View File

@ -1,18 +0,0 @@
[storage]
data_dir = "./data"
[mikan]
base_url = "https://mikanani.me/"
[mikan.http_client]
exponential_backoff_max_retries = 3
leaky_bucket_max_tokens = 2
leaky_bucket_initial_tokens = 0
leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500
[graphql]
depth_limit = inf
complexity_limit = inf
[cache]

View File

@ -1,176 +0,0 @@
use std::{fs, path::Path, str};
use figment::{
Figment, Provider,
providers::{Format, Json, Toml, Yaml},
};
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use super::env::Environment;
use crate::{
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::RResult,
extract::mikan::MikanConfig, graphql::GraphQLConfig, logger::LoggerConfig,
storage::StorageConfig, web::WebServerConfig,
};
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
const CONFIG_ALLOWED_EXTENSIONS: &[&str] = &[".toml", ".json", ".yaml", ".yml"];
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AppConfig {
pub server: WebServerConfig,
pub cache: CacheConfig,
pub auth: AuthConfig,
pub storage: StorageConfig,
pub mikan: MikanConfig,
pub graphql: GraphQLConfig,
pub logger: LoggerConfig,
pub database: DatabaseConfig,
}
impl AppConfig {
pub fn config_prefix() -> String {
format!("{}.config", env!("CARGO_PKG_NAME"))
}
pub fn dotenv_prefix() -> String {
String::from(".env")
}
pub fn allowed_extension() -> Vec<String> {
CONFIG_ALLOWED_EXTENSIONS
.iter()
.map(|s| s.to_string())
.collect_vec()
}
pub fn priority_suffix(environment: &Environment) -> Vec<String> {
vec![
format!(".{}.local", environment.full_name()),
format!(".{}.local", environment.short_name()),
String::from(".local"),
environment.full_name().to_string(),
environment.short_name().to_string(),
String::from(""),
]
}
pub fn default_provider() -> impl Provider {
Toml::string(DEFAULT_CONFIG_MIXIN)
}
pub fn merge_provider_from_file(
fig: Figment,
filepath: impl AsRef<Path>,
ext: &str,
) -> RResult<Figment> {
let content = fs::read_to_string(filepath)?;
let rendered = tera::Tera::one_off(
&content,
&tera::Context::from_value(serde_json::json!({}))?,
false,
)?;
Ok(match ext {
".toml" => fig.merge(Toml::string(&rendered)),
".json" => fig.merge(Json::string(&rendered)),
".yaml" | ".yml" => fig.merge(Yaml::string(&rendered)),
_ => unreachable!("unsupported config extension"),
})
}
pub async fn load_dotenv(
environment: &Environment,
working_dir: &str,
dotenv_file: Option<&str>,
) -> RResult<()> {
let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
vec![dotenv_file]
} else {
vec![Some(working_dir)]
};
let priority_suffix = &AppConfig::priority_suffix(environment);
let dotenv_prefix = AppConfig::dotenv_prefix();
let try_filenames = priority_suffix
.iter()
.map(|ps| format!("{}{}", &dotenv_prefix, ps))
.collect_vec();
for try_dotenv_file_or_dir in try_dotenv_file_or_dirs.into_iter().flatten() {
let try_dotenv_file_or_dir_path = Path::new(try_dotenv_file_or_dir);
if try_dotenv_file_or_dir_path.exists() {
if try_dotenv_file_or_dir_path.is_dir() {
for f in try_filenames.iter() {
let p = try_dotenv_file_or_dir_path.join(f);
if p.exists() && p.is_file() {
dotenv::from_path(p)?;
break;
}
}
} else if try_dotenv_file_or_dir_path.is_file() {
dotenv::from_path(try_dotenv_file_or_dir_path)?;
break;
}
}
}
Ok(())
}
pub async fn load_config(
environment: &Environment,
working_dir: &str,
config_file: Option<&str>,
) -> RResult<AppConfig> {
let try_config_file_or_dirs = if config_file.is_some() {
vec![config_file]
} else {
vec![Some(working_dir)]
};
let allowed_extensions = &AppConfig::allowed_extension();
let priority_suffix = &AppConfig::priority_suffix(environment);
let convention_prefix = &AppConfig::config_prefix();
let try_filenames = priority_suffix
.iter()
.flat_map(|ps| {
allowed_extensions
.iter()
.map(move |ext| (format!("{}{}{}", convention_prefix, ps, ext), ext))
})
.collect_vec();
let mut fig = Figment::from(AppConfig::default_provider());
for try_config_file_or_dir in try_config_file_or_dirs.into_iter().flatten() {
let try_config_file_or_dir_path = Path::new(try_config_file_or_dir);
if try_config_file_or_dir_path.exists() {
if try_config_file_or_dir_path.is_dir() {
for (f, ext) in try_filenames.iter() {
let p = try_config_file_or_dir_path.join(f);
if p.exists() && p.is_file() {
fig = AppConfig::merge_provider_from_file(fig, &p, ext)?;
break;
}
}
} else if let Some(ext) = try_config_file_or_dir_path
.extension()
.and_then(|s| s.to_str())
&& try_config_file_or_dir_path.is_file()
{
fig =
AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?;
break;
}
}
}
let app_config: AppConfig = fig.extract()?;
Ok(app_config)
}
}

View File

@ -1,50 +0,0 @@
use super::{Environment, config::AppConfig};
use crate::{
auth::AuthService, cache::CacheService, database::DatabaseService, errors::RResult,
extract::mikan::MikanClient, graphql::GraphQLService, logger::LoggerService,
storage::StorageService,
};
pub struct AppContext {
pub logger: LoggerService,
pub db: DatabaseService,
pub config: AppConfig,
pub cache: CacheService,
pub mikan: MikanClient,
pub auth: AuthService,
pub graphql: GraphQLService,
pub storage: StorageService,
pub working_dir: String,
pub environment: Environment,
}
impl AppContext {
pub async fn new(
environment: Environment,
config: AppConfig,
working_dir: impl ToString,
) -> RResult<Self> {
let config_cloned = config.clone();
let logger = LoggerService::from_config(config.logger).await?;
let cache = CacheService::from_config(config.cache).await?;
let db = DatabaseService::from_config(config.database).await?;
let storage = StorageService::from_config(config.storage).await?;
let auth = AuthService::from_conf(config.auth).await?;
let mikan = MikanClient::from_config(config.mikan).await?;
let graphql = GraphQLService::from_config_and_database(config.graphql, db.clone()).await?;
Ok(AppContext {
config: config_cloned,
environment,
logger,
auth,
cache,
db,
storage,
mikan,
working_dir: working_dir.to_string(),
graphql,
})
}
}

View File

@ -1,89 +0,0 @@
use std::{net::SocketAddr, sync::Arc};
use axum::Router;
use futures::try_join;
use tokio::signal;
use super::{builder::AppBuilder, context::AppContext};
use crate::{
errors::RResult,
web::{
controller::{self, core::ControllerTrait},
middleware::default_middleware_stack,
},
};
pub struct App {
pub context: Arc<AppContext>,
pub builder: AppBuilder,
}
impl App {
pub fn builder() -> AppBuilder {
AppBuilder::default()
}
pub async fn serve(&self) -> RResult<()> {
let context = &self.context;
let config = &context.config;
let listener = tokio::net::TcpListener::bind(&format!(
"{}:{}",
config.server.binding, config.server.port
))
.await?;
let mut router = Router::<Arc<AppContext>>::new();
let (graphqlc, oidcc) = try_join!(
controller::graphql::create(context.clone()),
controller::oidc::create(context.clone()),
)?;
for c in [graphqlc, oidcc] {
router = c.apply_to(router);
}
let middlewares = default_middleware_stack(context.clone());
for mid in middlewares {
router = mid.apply(router)?;
tracing::info!(name = mid.name(), "+middleware");
}
let router = router
.with_state(context.clone())
.into_make_service_with_connect_info::<SocketAddr>();
axum::serve(listener, router)
.with_graceful_shutdown(async move {
Self::shutdown_signal().await;
tracing::info!("shutting down...");
})
.await?;
Ok(())
}
async fn shutdown_signal() {
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
tokio::select! {
() = ctrl_c => {},
() = terminate => {},
}
}
}

View File

@ -1,35 +0,0 @@
use clap::ValueEnum;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, ValueEnum)]
#[serde(rename_all = "snake_case")]
#[value(rename_all = "snake_case")]
pub enum Environment {
#[serde(alias = "dev")]
#[value(alias = "dev")]
Development,
#[serde(alias = "prod")]
#[value(alias = "prod")]
Production,
#[serde(alias = "test")]
#[value(alias = "test")]
Testing,
}
impl Environment {
pub fn full_name(&self) -> &'static str {
match &self {
Self::Development => "development",
Self::Production => "production",
Self::Testing => "testing",
}
}
pub fn short_name(&self) -> &'static str {
match &self {
Self::Development => "dev",
Self::Production => "prod",
Self::Testing => "test",
}
}
}

View File

@ -0,0 +1,34 @@
use loco_rs::{app::AppContext, environment::Environment};
use crate::{
auth::service::AppAuthService, dal::AppDalClient, extract::mikan::AppMikanClient,
graphql::service::AppGraphQLService,
};
pub trait AppContextExt {
fn get_dal_client(&self) -> &AppDalClient {
AppDalClient::app_instance()
}
fn get_mikan_client(&self) -> &AppMikanClient {
AppMikanClient::app_instance()
}
fn get_auth_service(&self) -> &AppAuthService {
AppAuthService::app_instance()
}
fn get_graphql_service(&self) -> &AppGraphQLService {
AppGraphQLService::app_instance()
}
fn get_node_env(&self) -> Environment {
let node_env = std::env::var("NODE_ENV");
match node_env.as_deref() {
Ok("production") => Environment::Production,
_ => Environment::Development,
}
}
}
impl AppContextExt for AppContext {}

View File

@ -1,12 +1,208 @@
pub mod builder; pub mod ext;
pub mod config;
pub mod context;
pub mod core;
pub mod env;
pub use core::App; use std::{
fs,
path::{self, Path, PathBuf},
sync::Arc,
};
pub use builder::AppBuilder; use async_trait::async_trait;
pub use config::AppConfig; pub use ext::AppContextExt;
pub use context::AppContext; use itertools::Itertools;
pub use env::Environment; use loco_rs::{
app::{AppContext, Hooks},
boot::{create_app, BootResult, StartMode},
cache,
config::Config,
controller::{middleware, middleware::MiddlewareLayer, AppRoutes},
db::truncate_table,
environment::Environment,
prelude::*,
task::Tasks,
Result,
};
use once_cell::sync::OnceCell;
use crate::{
auth::service::AppAuthServiceInitializer,
controllers::{self},
dal::AppDalInitalizer,
extract::mikan::client::AppMikanClientInitializer,
graphql::service::AppGraphQLServiceInitializer,
migrations::Migrator,
models::subscribers,
workers::subscription_worker::SubscriptionWorker,
};
pub const WORKING_ROOT_VAR_NAME: &str = "WORKING_ROOT";
static APP_WORKING_ROOT: OnceCell<quirks_path::PathBuf> = OnceCell::new();
pub struct App;
impl App {
pub fn set_working_root(path: PathBuf) {
APP_WORKING_ROOT.get_or_init(|| {
quirks_path::PathBuf::from(path.as_os_str().to_string_lossy().to_string())
});
}
pub fn get_working_root() -> &'static quirks_path::Path {
APP_WORKING_ROOT
.get()
.map(|p| p.as_path())
.expect("working root not set")
}
}
#[async_trait]
impl Hooks for App {
fn app_version() -> String {
format!(
"{} ({})",
env!("CARGO_PKG_VERSION"),
option_env!("BUILD_SHA")
.or(option_env!("GITHUB_SHA"))
.unwrap_or("dev")
)
}
fn app_name() -> &'static str {
env!("CARGO_CRATE_NAME")
}
async fn boot(
mode: StartMode,
environment: &Environment,
config: Config,
) -> Result<BootResult> {
create_app::<Self, Migrator>(mode, environment, config).await
}
async fn load_config(env: &Environment) -> Result<Config> {
let working_roots_to_search = [
std::env::var(WORKING_ROOT_VAR_NAME).ok(),
Some(String::from("./apps/recorder")),
Some(String::from(".")),
]
.into_iter()
.flatten()
.collect_vec();
for working_root in working_roots_to_search.iter() {
let working_root = PathBuf::from(working_root);
for env_file in [
working_root.join(format!(".env.{env}.local")),
working_root.join(format!(".env.{env}")),
working_root.join(".env.local"),
working_root.join(".env"),
] {
tracing::info!(env_file =? env_file);
if env_file.exists() && env_file.is_file() {
dotenv::from_path(&env_file).map_err(loco_rs::Error::wrap)?;
tracing::info!("loaded env from {} success.", env_file.to_string_lossy());
}
}
}
for working_root in working_roots_to_search.iter() {
let working_root = PathBuf::from(working_root);
let config_dir = working_root.as_path().join("config");
for config_file in [
config_dir.join(format!("{env}.local.yaml")),
config_dir.join(format!("{env}.yaml")),
] {
if config_file.exists() && config_file.is_file() {
let content = fs::read_to_string(config_file.clone())?;
let rendered = tera::Tera::one_off(
&content,
&tera::Context::from_value(serde_json::json!({}))?,
false,
)?;
App::set_working_root(working_root);
let config_file = &config_file.to_string_lossy();
let res = serde_yaml::from_str(&rendered)
.map_err(|err| loco_rs::Error::YAMLFile(err, config_file.to_string()))?;
tracing::info!("loading config from {} success", config_file);
return Ok(res);
}
}
}
Err(loco_rs::Error::Message(format!(
"no configuration file found in search paths: {}",
working_roots_to_search
.iter()
.flat_map(|p| path::absolute(PathBuf::from(p)))
.map(|p| p.to_string_lossy().to_string())
.join(",")
)))
}
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
let initializers: Vec<Box<dyn Initializer>> = vec![
Box::new(AppDalInitalizer),
Box::new(AppMikanClientInitializer),
Box::new(AppGraphQLServiceInitializer),
Box::new(AppAuthServiceInitializer),
];
Ok(initializers)
}
fn routes(ctx: &AppContext) -> AppRoutes {
let ctx = Arc::new(ctx.clone());
AppRoutes::with_default_routes()
.prefix("/api")
.add_route(controllers::graphql::routes(ctx.clone()))
}
fn middlewares(ctx: &AppContext) -> Vec<Box<dyn MiddlewareLayer>> {
use loco_rs::controller::middleware::static_assets::{FolderConfig, StaticAssets};
let mut middlewares = middleware::default_middleware_stack(ctx);
middlewares.push(Box::new(StaticAssets {
enable: true,
must_exist: true,
folder: FolderConfig {
uri: String::from("/api/static"),
path: App::get_working_root().join("public").into(),
},
fallback: App::get_working_root()
.join("public/assets/404.html")
.into(),
precompressed: false,
}));
middlewares
}
async fn after_context(ctx: AppContext) -> Result<AppContext> {
Ok(AppContext {
cache: cache::Cache::new(cache::drivers::inmem::new()).into(),
..ctx
})
}
async fn connect_workers(ctx: &AppContext, queue: &Queue) -> Result<()> {
queue.register(SubscriptionWorker::build(ctx)).await?;
Ok(())
}
fn register_tasks(_tasks: &mut Tasks) {}
async fn truncate(ctx: &AppContext) -> Result<()> {
truncate_table(&ctx.db, subscribers::Entity).await?;
Ok(())
}
async fn seed(_ctx: &AppContext, _base: &Path) -> Result<()> {
Ok(())
}
}

View File

@ -1,17 +1,15 @@
use async_trait::async_trait; use async_trait::async_trait;
use axum::http::{HeaderValue, request::Parts}; use axum::http::{HeaderValue, request::Parts};
use base64::{self, Engine}; use base64::{self, Engine};
use loco_rs::app::AppContext;
use reqwest::header::AUTHORIZATION; use reqwest::header::AUTHORIZATION;
use super::{ use super::{
config::BasicAuthConfig, config::BasicAuthConfig,
errors::AuthError, errors::AuthError,
service::{AuthServiceTrait, AuthUserInfo}, service::{AuthService, AuthUserInfo},
};
use crate::{
app::AppContext,
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
}; };
use crate::models::{auth::AuthType, subscribers::SEED_SUBSCRIBER};
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]
pub struct AuthBasic { pub struct AuthBasic {
@ -61,7 +59,7 @@ pub struct BasicAuthService {
} }
#[async_trait] #[async_trait]
impl AuthServiceTrait for BasicAuthService { impl AuthService for BasicAuthService {
async fn extract_user_info( async fn extract_user_info(
&self, &self,
ctx: &AppContext, ctx: &AppContext,
@ -77,7 +75,7 @@ impl AuthServiceTrait for BasicAuthService {
{ {
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER) let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER)
.await .await
.map_err(|_| AuthError::FindAuthRecordError)?; .map_err(AuthError::FindAuthRecordError)?;
return Ok(AuthUserInfo { return Ok(AuthUserInfo {
subscriber_auth, subscriber_auth,
auth_type: AuthType::Basic, auth_type: AuthType::Basic,

View File

@ -1,6 +1,6 @@
use jwt_authorizer::OneOrArray; use jwt_authorizer::OneOrArray;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_with::{NoneAsEmptyString, serde_as}; use serde_with::{serde_as, NoneAsEmptyString};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct BasicAuthConfig { pub struct BasicAuthConfig {
@ -33,7 +33,7 @@ pub struct OidcAuthConfig {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "auth_type", rename_all = "snake_case")] #[serde(tag = "auth_type", rename_all = "snake_case")]
pub enum AuthConfig { pub enum AppAuthConfig {
Basic(BasicAuthConfig), Basic(BasicAuthConfig),
Oidc(OidcAuthConfig), Oidc(OidcAuthConfig),
} }

View File

@ -1,4 +1,9 @@
import { LogLevel, type OpenIdConfiguration } from 'oidc-client-rx'; import { InjectionToken } from '@outposts/injection-js';
import {
type EventTypes,
LogLevel,
type OpenIdConfiguration,
} from 'oidc-client-rx';
export const isBasicAuth = process.env.AUTH_TYPE === 'basic'; export const isBasicAuth = process.env.AUTH_TYPE === 'basic';

View File

@ -6,6 +6,7 @@ use axum::{
http::StatusCode, http::StatusCode,
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use loco_rs::model::ModelError;
use openidconnect::{ use openidconnect::{
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError, ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
StandardErrorResponse, core::CoreErrorResponseType, StandardErrorResponse, core::CoreErrorResponseType,
@ -23,7 +24,7 @@ pub enum AuthError {
current: AuthType, current: AuthType,
}, },
#[error("Failed to find auth record")] #[error("Failed to find auth record")]
FindAuthRecordError, FindAuthRecordError(ModelError),
#[error("Invalid credentials")] #[error("Invalid credentials")]
BasicInvalidCredentials, BasicInvalidCredentials,
#[error(transparent)] #[error(transparent)]

View File

@ -6,15 +6,16 @@ use axum::{
middleware::Next, middleware::Next,
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use loco_rs::prelude::AppContext;
use crate::{app::AppContext, auth::AuthServiceTrait}; use crate::{app::AppContextExt, auth::AuthService};
pub async fn header_www_authenticate_middleware( pub async fn api_auth_middleware(
State(ctx): State<Arc<AppContext>>, State(ctx): State<Arc<AppContext>>,
request: Request, request: Request,
next: Next, next: Next,
) -> Response { ) -> Response {
let auth_service = &ctx.auth; let auth_service = ctx.get_auth_service();
let (mut parts, body) = request.into_parts(); let (mut parts, body) = request.into_parts();

View File

@ -5,7 +5,7 @@ pub mod middleware;
pub mod oidc; pub mod oidc;
pub mod service; pub mod service;
pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig}; pub use config::{AppAuthConfig, BasicAuthConfig, OidcAuthConfig};
pub use errors::AuthError; pub use errors::AuthError;
pub use middleware::header_www_authenticate_middleware; pub use middleware::api_auth_middleware;
pub use service::{AuthService, AuthServiceTrait, AuthUserInfo}; pub use service::{AppAuthService, AuthService, AuthUserInfo};

View File

@ -7,13 +7,13 @@ use async_trait::async_trait;
use axum::http::{HeaderValue, request::Parts}; use axum::http::{HeaderValue, request::Parts};
use itertools::Itertools; use itertools::Itertools;
use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer}; use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer};
use loco_rs::{app::AppContext, model::ModelError};
use moka::future::Cache; use moka::future::Cache;
use openidconnect::{ use openidconnect::{
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce, AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse, OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse,
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata}, core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata},
}; };
use sea_orm::DbErr;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use url::Url; use url::Url;
@ -21,9 +21,9 @@ use url::Url;
use super::{ use super::{
config::OidcAuthConfig, config::OidcAuthConfig,
errors::AuthError, errors::AuthError,
service::{AuthServiceTrait, AuthUserInfo}, service::{AuthService, AuthUserInfo},
}; };
use crate::{app::AppContext, errors::RError, fetch::HttpClient, models::auth::AuthType}; use crate::{fetch::HttpClient, models::auth::AuthType};
#[derive(Deserialize, Serialize, Clone, Debug)] #[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OidcAuthClaims { pub struct OidcAuthClaims {
@ -258,7 +258,7 @@ impl OidcAuthService {
} }
#[async_trait] #[async_trait]
impl AuthServiceTrait for OidcAuthService { impl AuthService for OidcAuthService {
async fn extract_user_info( async fn extract_user_info(
&self, &self,
ctx: &AppContext, ctx: &AppContext,
@ -306,12 +306,12 @@ impl AuthServiceTrait for OidcAuthService {
} }
} }
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await { let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(RError::DbError(DbErr::RecordNotFound(..))) => { Err(ModelError::EntityNotFound) => {
crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await
} }
r => r, r => r,
} }
.map_err(|_| AuthError::FindAuthRecordError)?; .map_err(AuthError::FindAuthRecordError)?;
Ok(AuthUserInfo { Ok(AuthUserInfo {
subscriber_auth, subscriber_auth,

View File

@ -7,17 +7,20 @@ use axum::{
response::{IntoResponse as _, Response}, response::{IntoResponse as _, Response},
}; };
use jwt_authorizer::{JwtAuthorizer, Validation}; use jwt_authorizer::{JwtAuthorizer, Validation};
use loco_rs::app::{AppContext, Initializer};
use moka::future::Cache; use moka::future::Cache;
use once_cell::sync::OnceCell;
use reqwest::header::HeaderValue; use reqwest::header::HeaderValue;
use super::{ use super::{
AuthConfig, AppAuthConfig,
basic::BasicAuthService, basic::BasicAuthService,
errors::AuthError, errors::AuthError,
oidc::{OidcAuthClaims, OidcAuthService}, oidc::{OidcAuthClaims, OidcAuthService},
}; };
use crate::{ use crate::{
app::AppContext, app::AppContextExt as _,
config::AppConfigExt,
fetch::{ fetch::{
HttpClient, HttpClientConfig, HttpClient, HttpClientConfig,
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig}, client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
@ -38,7 +41,7 @@ impl FromRequestParts<AppContext> for AuthUserInfo {
parts: &mut Parts, parts: &mut Parts,
state: &AppContext, state: &AppContext,
) -> Result<Self, Self::Rejection> { ) -> Result<Self, Self::Rejection> {
let auth_service = &state.auth; let auth_service = state.get_auth_service();
auth_service auth_service
.extract_user_info(state, parts) .extract_user_info(state, parts)
@ -48,7 +51,7 @@ impl FromRequestParts<AppContext> for AuthUserInfo {
} }
#[async_trait] #[async_trait]
pub trait AuthServiceTrait { pub trait AuthService {
async fn extract_user_info( async fn extract_user_info(
&self, &self,
ctx: &AppContext, ctx: &AppContext,
@ -58,16 +61,24 @@ pub trait AuthServiceTrait {
fn auth_type(&self) -> AuthType; fn auth_type(&self) -> AuthType;
} }
pub enum AuthService { pub enum AppAuthService {
Basic(BasicAuthService), Basic(BasicAuthService),
Oidc(OidcAuthService), Oidc(OidcAuthService),
} }
impl AuthService { static APP_AUTH_SERVICE: OnceCell<AppAuthService> = OnceCell::new();
pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> {
impl AppAuthService {
pub fn app_instance() -> &'static Self {
APP_AUTH_SERVICE
.get()
.expect("AppAuthService is not initialized")
}
pub async fn from_conf(config: AppAuthConfig) -> Result<Self, AuthError> {
let result = match config { let result = match config {
AuthConfig::Basic(config) => AuthService::Basic(BasicAuthService { config }), AppAuthConfig::Basic(config) => AppAuthService::Basic(BasicAuthService { config }),
AuthConfig::Oidc(config) => { AppAuthConfig::Oidc(config) => {
let validation = Validation::new() let validation = Validation::new()
.iss(&[&config.issuer]) .iss(&[&config.issuer])
.aud(&[&config.audience]); .aud(&[&config.audience]);
@ -85,7 +96,7 @@ impl AuthService {
.build() .build()
.await?; .await?;
AuthService::Oidc(OidcAuthService { AppAuthService::Oidc(OidcAuthService {
config, config,
api_authorizer, api_authorizer,
oidc_provider_client, oidc_provider_client,
@ -101,29 +112,50 @@ impl AuthService {
} }
#[async_trait] #[async_trait]
impl AuthServiceTrait for AuthService { impl AuthService for AppAuthService {
async fn extract_user_info( async fn extract_user_info(
&self, &self,
ctx: &AppContext, ctx: &AppContext,
request: &mut Parts, request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> { ) -> Result<AuthUserInfo, AuthError> {
match self { match self {
AuthService::Basic(service) => service.extract_user_info(ctx, request).await, AppAuthService::Basic(service) => service.extract_user_info(ctx, request).await,
AuthService::Oidc(service) => service.extract_user_info(ctx, request).await, AppAuthService::Oidc(service) => service.extract_user_info(ctx, request).await,
} }
} }
fn www_authenticate_header_value(&self) -> Option<HeaderValue> { fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
match self { match self {
AuthService::Basic(service) => service.www_authenticate_header_value(), AppAuthService::Basic(service) => service.www_authenticate_header_value(),
AuthService::Oidc(service) => service.www_authenticate_header_value(), AppAuthService::Oidc(service) => service.www_authenticate_header_value(),
} }
} }
fn auth_type(&self) -> AuthType { fn auth_type(&self) -> AuthType {
match self { match self {
AuthService::Basic(service) => service.auth_type(), AppAuthService::Basic(service) => service.auth_type(),
AuthService::Oidc(service) => service.auth_type(), AppAuthService::Oidc(service) => service.auth_type(),
} }
} }
} }
pub struct AppAuthServiceInitializer;
#[async_trait]
impl Initializer for AppAuthServiceInitializer {
fn name(&self) -> String {
String::from("AppAuthServiceInitializer")
}
async fn before_run(&self, ctx: &AppContext) -> Result<(), loco_rs::Error> {
let auth_conf = ctx.config.get_app_conf()?.auth;
let service = AppAuthService::from_conf(auth_conf)
.await
.map_err(loco_rs::Error::wrap)?;
APP_AUTH_SERVICE.get_or_init(|| service);
Ok(())
}
}

View File

@ -1,15 +1,9 @@
use color_eyre::{self, eyre}; use loco_rs::cli;
use recorder::app::AppBuilder; use recorder::{app::App, migrations::Migrator};
#[tokio::main] #[tokio::main]
async fn main() -> eyre::Result<()> { async fn main() -> color_eyre::eyre::Result<()> {
color_eyre::install()?; color_eyre::install()?;
cli::main::<App, Migrator>().await?;
let builder = AppBuilder::from_main_cli(None).await?;
let app = builder.build().await?;
app.serve().await?;
Ok(()) Ok(())
} }

View File

@ -1,4 +0,0 @@
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CacheConfig {}

View File

@ -1,5 +0,0 @@
pub mod config;
pub mod service;
pub use config::CacheConfig;
pub use service::CacheService;

View File

@ -1,10 +0,0 @@
use super::CacheConfig;
use crate::errors::RResult;
pub struct CacheService {}
impl CacheService {
pub async fn from_config(_config: CacheConfig) -> RResult<Self> {
Ok(Self {})
}
}

View File

@ -0,0 +1,74 @@
use figment::{
Figment,
providers::{Format, Json, Yaml},
};
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use crate::{
auth::AppAuthConfig, dal::config::AppDalConfig, errors::RecorderError,
extract::mikan::AppMikanConfig, graphql::config::AppGraphQLConfig,
};
const DEFAULT_APP_SETTINGS_MIXIN: &str = include_str!("./settings_mixin.yaml");
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AppConfig {
pub auth: AppAuthConfig,
pub dal: AppDalConfig,
pub mikan: AppMikanConfig,
pub graphql: AppGraphQLConfig,
}
pub fn deserialize_key_path_from_json_value<T: DeserializeOwned>(
value: &serde_json::Value,
key_path: &[&str],
) -> Result<Option<T>, loco_rs::Error> {
let mut stack = vec![("", value)];
for key in key_path {
let current = stack.last().unwrap().1;
if let Some(v) = current.get(key) {
stack.push((key, v));
} else {
return Ok(None);
}
}
let result: T = serde_json::from_value(stack.pop().unwrap().1.clone())?;
Ok(Some(result))
}
pub fn deserialize_key_path_from_app_config<T: DeserializeOwned>(
app_config: &loco_rs::config::Config,
key_path: &[&str],
) -> Result<Option<T>, loco_rs::Error> {
let settings = app_config.settings.as_ref();
if let Some(settings) = settings {
deserialize_key_path_from_json_value(settings, key_path)
} else {
Ok(None)
}
}
pub trait AppConfigExt {
fn get_root_conf(&self) -> &loco_rs::config::Config;
fn get_app_conf(&self) -> Result<AppConfig, RecorderError> {
let settings_str = self
.get_root_conf()
.settings
.as_ref()
.map(serde_json::to_string)
.unwrap_or_else(|| Ok(String::new()))?;
let app_config = Figment::from(Json::string(&settings_str))
.merge(Yaml::string(DEFAULT_APP_SETTINGS_MIXIN))
.extract()?;
Ok(app_config)
}
}
impl AppConfigExt for loco_rs::config::Config {
fn get_root_conf(&self) -> &loco_rs::config::Config {
self
}
}

View File

@ -0,0 +1,15 @@
# dal:
# data_dir: ./data
# mikan:
# http_client:
# exponential_backoff_max_retries: 3
# leaky_bucket_max_tokens: 2
# leaky_bucket_initial_tokens: 0
# leaky_bucket_refill_tokens: 1
# leaky_bucket_refill_interval: 500
# base_url: "https://mikanani.me/"
# graphql:
# depth_limit: null
# complexity_limit: null

View File

@ -2,10 +2,10 @@ import { type Fetcher, createGraphiQLFetcher } from '@graphiql/toolkit';
import { createFileRoute } from '@tanstack/react-router'; import { createFileRoute } from '@tanstack/react-router';
import GraphiQL from 'graphiql'; import GraphiQL from 'graphiql';
import { useMemo } from 'react'; import { useMemo } from 'react';
import { firstValueFrom } from 'rxjs'; import { beforeLoadGuard } from '../../auth/guard';
import { beforeLoadGuard } from '../../../auth/guard';
import { useAuth } from '../../../auth/hooks';
import 'graphiql/graphiql.css'; import 'graphiql/graphiql.css';
import { firstValueFrom } from 'rxjs';
import { useAuth } from '../../auth/hooks';
export const Route = createFileRoute('/graphql/')({ export const Route = createFileRoute('/graphql/')({
component: RouteComponent, component: RouteComponent,
@ -32,5 +32,5 @@ function RouteComponent() {
[oidcSecurityService] [oidcSecurityService]
); );
return <GraphiQL fetcher={fetcher} className="!h-svh" />; return <GraphiQL fetcher={fetcher} className="h-svh" />;
} }

View File

@ -0,0 +1,30 @@
use std::sync::Arc;
use async_graphql_axum::{GraphQLRequest, GraphQLResponse};
use axum::{extract::State, middleware::from_fn_with_state, routing::post, Extension};
use loco_rs::{app::AppContext, prelude::Routes};
use crate::{
app::AppContextExt,
auth::{api_auth_middleware, AuthUserInfo},
};
async fn graphql_handler(
State(ctx): State<AppContext>,
Extension(auth_user_info): Extension<AuthUserInfo>,
req: GraphQLRequest,
) -> GraphQLResponse {
let graphql_service = ctx.get_graphql_service();
let mut req = req.into_inner();
req = req.data(auth_user_info);
graphql_service.schema.execute(req).await.into()
}
pub fn routes(ctx: Arc<AppContext>) -> Routes {
Routes::new().prefix("/graphql").add(
"/",
post(graphql_handler).layer(from_fn_with_state(ctx, api_auth_middleware)),
)
}

View File

@ -0,0 +1,2 @@
pub mod graphql;
pub mod oidc;

View File

@ -1,6 +1,6 @@
import { createFileRoute, redirect } from '@tanstack/react-router'; import { createFileRoute, redirect } from '@tanstack/react-router';
import { EventTypes } from 'oidc-client-rx'; import { EventTypes } from 'oidc-client-rx';
import { useAuth } from '../../../auth/hooks'; import { useAuth } from '../../auth/hooks';
export const Route = createFileRoute('/oidc/callback')({ export const Route = createFileRoute('/oidc/callback')({
component: RouteComponent, component: RouteComponent,

View File

@ -1,32 +1,24 @@
use std::sync::Arc; use std::sync::Arc;
use axum::{ use axum::{extract::Query, http::request::Parts};
Json, Router, use loco_rs::prelude::*;
extract::{Query, State},
http::request::Parts,
routing::get,
};
use super::core::Controller;
use crate::{ use crate::{
app::AppContext, app::AppContextExt,
auth::{ auth::{
AuthError, AuthService, AuthServiceTrait,
oidc::{OidcAuthCallbackPayload, OidcAuthCallbackQuery, OidcAuthRequest}, oidc::{OidcAuthCallbackPayload, OidcAuthCallbackQuery, OidcAuthRequest},
AppAuthService, AuthError, AuthService,
}, },
errors::RResult,
extract::http::ForwardedRelatedInfo, extract::http::ForwardedRelatedInfo,
models::auth::AuthType, models::auth::AuthType,
}; };
pub const CONTROLLER_PREFIX: &str = "/api/oidc";
async fn oidc_callback( async fn oidc_callback(
State(ctx): State<Arc<AppContext>>, State(ctx): State<Arc<AppContext>>,
Query(query): Query<OidcAuthCallbackQuery>, Query(query): Query<OidcAuthCallbackQuery>,
) -> Result<Json<OidcAuthCallbackPayload>, AuthError> { ) -> Result<Json<OidcAuthCallbackPayload>, AuthError> {
let auth_service = &ctx.auth; let auth_service = ctx.get_auth_service();
if let AuthService::Oidc(oidc_auth_service) = auth_service { if let AppAuthService::Oidc(oidc_auth_service) = auth_service {
let response = oidc_auth_service let response = oidc_auth_service
.extract_authorization_request_callback(query) .extract_authorization_request_callback(query)
.await?; .await?;
@ -43,13 +35,13 @@ async fn oidc_auth(
State(ctx): State<Arc<AppContext>>, State(ctx): State<Arc<AppContext>>,
parts: Parts, parts: Parts,
) -> Result<Json<OidcAuthRequest>, AuthError> { ) -> Result<Json<OidcAuthRequest>, AuthError> {
let auth_service = &ctx.auth; let auth_service = ctx.get_auth_service();
if let AuthService::Oidc(oidc_auth_service) = auth_service { if let AppAuthService::Oidc(oidc_auth_service) = auth_service {
let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts) let mut redirect_uri = ForwardedRelatedInfo::from_request_parts(&parts)
.resolved_origin() .resolved_origin()
.ok_or_else(|| AuthError::OidcRequestRedirectUriError(url::ParseError::EmptyHost))?; .ok_or_else(|| AuthError::OidcRequestRedirectUriError(url::ParseError::EmptyHost))?;
redirect_uri.set_path(&format!("{CONTROLLER_PREFIX}/callback")); redirect_uri.set_path("/api/oidc/callback");
let auth_request = oidc_auth_service let auth_request = oidc_auth_service
.build_authorization_request(redirect_uri.as_str()) .build_authorization_request(redirect_uri.as_str())
@ -70,10 +62,9 @@ async fn oidc_auth(
} }
} }
pub async fn create(_context: Arc<AppContext>) -> RResult<Controller> { pub fn routes(state: Arc<AppContext>) -> Routes {
let router = Router::<Arc<AppContext>>::new() Routes::new()
.route("/auth", get(oidc_auth)) .prefix("/oidc")
.route("/callback", get(oidc_callback)); .add("/auth", get(oidc_auth).with_state(state.clone()))
.add("/callback", get(oidc_callback).with_state(state))
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router))
} }

View File

@ -1,22 +1,26 @@
use std::fmt; use std::fmt;
use async_trait::async_trait;
use bytes::Bytes; use bytes::Bytes;
use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use opendal::{Buffer, Operator, layers::LoggingLayer, services::Fs}; use opendal::{Buffer, Operator, layers::LoggingLayer, services::Fs};
use quirks_path::{Path, PathBuf}; use quirks_path::{Path, PathBuf};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
use uuid::Uuid; use uuid::Uuid;
use super::StorageConfig; use super::AppDalConfig;
use crate::errors::{RError, RResult}; use crate::{app::App, config::AppConfigExt, errors::RecorderError};
// TODO: wait app-context-trait to integrate
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
pub enum StorageContentCategory { pub enum DalContentCategory {
Image, Image,
} }
impl AsRef<str> for StorageContentCategory { impl AsRef<str> for DalContentCategory {
fn as_ref(&self) -> &str { fn as_ref(&self) -> &str {
match self { match self {
Self::Image => "image", Self::Image => "image",
@ -24,12 +28,14 @@ impl AsRef<str> for StorageContentCategory {
} }
} }
pub enum StorageStoredUrl { static APP_DAL_CLIENT: OnceCell<AppDalClient> = OnceCell::new();
pub enum DalStoredUrl {
RelativePath { path: String }, RelativePath { path: String },
Absolute { url: Url }, Absolute { url: Url },
} }
impl AsRef<str> for StorageStoredUrl { impl AsRef<str> for DalStoredUrl {
fn as_ref(&self) -> &str { fn as_ref(&self) -> &str {
match &self { match &self {
Self::Absolute { url } => url.as_str(), Self::Absolute { url } => url.as_str(),
@ -38,22 +44,30 @@ impl AsRef<str> for StorageStoredUrl {
} }
} }
impl fmt::Display for StorageStoredUrl { impl fmt::Display for DalStoredUrl {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.as_ref()) write!(f, "{}", self.as_ref())
} }
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct StorageService { pub struct AppDalClient {
pub data_dir: String, pub data_dir: String,
} }
impl StorageService { impl AppDalClient {
pub async fn from_config(config: StorageConfig) -> RResult<Self> { pub fn new(config: AppDalConfig) -> Self {
Ok(Self { Self {
data_dir: config.data_dir.to_string(), data_dir: App::get_working_root()
}) .join(config.data_dir.as_deref().unwrap_or("./data"))
.to_string(),
}
}
pub fn app_instance() -> &'static AppDalClient {
APP_DAL_CLIENT
.get()
.expect("AppDalClient is not initialized")
} }
pub fn get_fs(&self) -> Fs { pub fn get_fs(&self) -> Fs {
@ -66,14 +80,14 @@ impl StorageService {
pub async fn store_object( pub async fn store_object(
&self, &self,
content_category: StorageContentCategory, content_category: DalContentCategory,
subscriber_id: i32, subscriber_id: i32,
bucket: Option<&str>, bucket: Option<&str>,
filename: &str, filename: &str,
data: Bytes, data: Bytes,
) -> Result<StorageStoredUrl, RError> { ) -> Result<DalStoredUrl, RecorderError> {
match content_category { match content_category {
StorageContentCategory::Image => { DalContentCategory::Image => {
let fullname = [ let fullname = [
&subscriber_id.to_string(), &subscriber_id.to_string(),
content_category.as_ref(), content_category.as_ref(),
@ -95,7 +109,7 @@ impl StorageService {
fs_op.write(fullname.as_str(), data).await?; fs_op.write(fullname.as_str(), data).await?;
Ok(StorageStoredUrl::RelativePath { Ok(DalStoredUrl::RelativePath {
path: fullname.to_string(), path: fullname.to_string(),
}) })
} }
@ -104,13 +118,13 @@ impl StorageService {
pub async fn exists_object( pub async fn exists_object(
&self, &self,
content_category: StorageContentCategory, content_category: DalContentCategory,
subscriber_id: i32, subscriber_id: i32,
bucket: Option<&str>, bucket: Option<&str>,
filename: &str, filename: &str,
) -> Result<Option<StorageStoredUrl>, RError> { ) -> Result<Option<DalStoredUrl>, RecorderError> {
match content_category { match content_category {
StorageContentCategory::Image => { DalContentCategory::Image => {
let fullname = [ let fullname = [
&subscriber_id.to_string(), &subscriber_id.to_string(),
content_category.as_ref(), content_category.as_ref(),
@ -126,7 +140,7 @@ impl StorageService {
.finish(); .finish();
if fs_op.exists(fullname.as_str()).await? { if fs_op.exists(fullname.as_str()).await? {
Ok(Some(StorageStoredUrl::RelativePath { Ok(Some(DalStoredUrl::RelativePath {
path: fullname.to_string(), path: fullname.to_string(),
})) }))
} else { } else {
@ -138,13 +152,13 @@ impl StorageService {
pub async fn load_object( pub async fn load_object(
&self, &self,
content_category: StorageContentCategory, content_category: DalContentCategory,
subscriber_pid: &str, subscriber_pid: &str,
bucket: Option<&str>, bucket: Option<&str>,
filename: &str, filename: &str,
) -> color_eyre::eyre::Result<Buffer> { ) -> color_eyre::eyre::Result<Buffer> {
match content_category { match content_category {
StorageContentCategory::Image => { DalContentCategory::Image => {
let fullname = [ let fullname = [
subscriber_pid, subscriber_pid,
content_category.as_ref(), content_category.as_ref(),
@ -166,3 +180,21 @@ impl StorageService {
} }
} }
} }
pub struct AppDalInitalizer;
#[async_trait]
impl Initializer for AppDalInitalizer {
fn name(&self) -> String {
String::from("AppDalInitalizer")
}
async fn before_run(&self, app_context: &AppContext) -> loco_rs::Result<()> {
let config = &app_context.config;
let app_dal_conf = config.get_app_conf()?.dal;
APP_DAL_CLIENT.get_or_init(|| AppDalClient::new(app_dal_conf));
Ok(())
}
}

View File

@ -1,6 +1,6 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct StorageConfig { pub struct AppDalConfig {
pub data_dir: String, pub data_dir: Option<String>,
} }

View File

@ -0,0 +1,4 @@
pub mod client;
pub mod config;
pub use client::{AppDalClient, AppDalInitalizer, DalContentCategory};
pub use config::AppDalConfig;

View File

@ -1,14 +0,0 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct DatabaseConfig {
pub uri: String,
pub enable_logging: bool,
pub min_connections: u32,
pub max_connections: u32,
pub connect_timeout: u64,
pub idle_timeout: u64,
pub acquire_timeout: Option<u64>,
#[serde(default)]
pub auto_migrate: bool,
}

View File

@ -1,5 +0,0 @@
pub mod config;
pub mod service;
pub use config::DatabaseConfig;
pub use service::DatabaseService;

View File

@ -1,97 +0,0 @@
use std::{ops::Deref, time::Duration};
use sea_orm::{
ConnectOptions, ConnectionTrait, Database, DatabaseBackend, DatabaseConnection, DbBackend,
DbErr, ExecResult, QueryResult, Statement,
};
use sea_orm_migration::MigratorTrait;
use super::DatabaseConfig;
use crate::{errors::RResult, migrations::Migrator};
pub struct DatabaseService {
connection: DatabaseConnection,
}
impl DatabaseService {
pub async fn from_config(config: DatabaseConfig) -> RResult<Self> {
let mut opt = ConnectOptions::new(&config.uri);
opt.max_connections(config.max_connections)
.min_connections(config.min_connections)
.connect_timeout(Duration::from_millis(config.connect_timeout))
.idle_timeout(Duration::from_millis(config.idle_timeout))
.sqlx_logging(config.enable_logging);
if let Some(acquire_timeout) = config.acquire_timeout {
opt.acquire_timeout(Duration::from_millis(acquire_timeout));
}
let db = Database::connect(opt).await?;
if db.get_database_backend() == DatabaseBackend::Sqlite {
db.execute(Statement::from_string(
DatabaseBackend::Sqlite,
"
PRAGMA foreign_keys = ON;
PRAGMA journal_mode = WAL;
PRAGMA synchronous = NORMAL;
PRAGMA mmap_size = 134217728;
PRAGMA journal_size_limit = 67108864;
PRAGMA cache_size = 2000;
",
))
.await?;
}
if config.auto_migrate {
Migrator::up(&db, None).await?;
}
Ok(Self { connection: db })
}
}
impl Deref for DatabaseService {
type Target = DatabaseConnection;
fn deref(&self) -> &Self::Target {
&self.connection
}
}
impl AsRef<DatabaseConnection> for DatabaseService {
fn as_ref(&self) -> &DatabaseConnection {
&self.connection
}
}
#[async_trait::async_trait]
impl ConnectionTrait for DatabaseService {
fn get_database_backend(&self) -> DbBackend {
self.deref().get_database_backend()
}
async fn execute(&self, stmt: Statement) -> Result<ExecResult, DbErr> {
self.deref().execute(stmt).await
}
async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> {
self.deref().execute_unprepared(sql).await
}
async fn query_one(&self, stmt: Statement) -> Result<Option<QueryResult>, DbErr> {
self.deref().query_one(stmt).await
}
async fn query_all(&self, stmt: Statement) -> Result<Vec<QueryResult>, DbErr> {
self.deref().query_all(stmt).await
}
fn support_returning(&self) -> bool {
self.deref().support_returning()
}
fn is_mock_connection(&self) -> bool {
self.deref().is_mock_connection()
}
}

View File

@ -1,33 +1,11 @@
use std::{borrow::Cow, error::Error as StdError}; use std::{borrow::Cow, error::Error as StdError};
use axum::response::{IntoResponse, Response};
use http::StatusCode;
use thiserror::Error as ThisError; use thiserror::Error as ThisError;
use crate::{auth::AuthError, fetch::HttpClientError}; use crate::fetch::HttpClientError;
#[derive(ThisError, Debug)] #[derive(ThisError, Debug)]
pub enum RError { pub enum RecorderError {
#[error(transparent)]
InvalidMethodError(#[from] http::method::InvalidMethod),
#[error(transparent)]
InvalidHeaderNameError(#[from] http::header::InvalidHeaderName),
#[error(transparent)]
TracingAppenderInitError(#[from] tracing_appender::rolling::InitError),
#[error(transparent)]
GraphQLSchemaError(#[from] async_graphql::dynamic::SchemaError),
#[error(transparent)]
AuthError(#[from] AuthError),
#[error(transparent)]
RSSError(#[from] rss::Error),
#[error(transparent)]
DotEnvError(#[from] dotenv::Error),
#[error(transparent)]
TeraError(#[from] tera::Error),
#[error(transparent)]
IOError(#[from] std::io::Error),
#[error(transparent)]
DbError(#[from] sea_orm::DbErr),
#[error(transparent)] #[error(transparent)]
CookieParseError(#[from] cookie::ParseError), CookieParseError(#[from] cookie::ParseError),
#[error(transparent)] #[error(transparent)]
@ -66,15 +44,9 @@ pub enum RError {
#[source] #[source]
source: Option<Box<dyn StdError + Send + Sync>>, source: Option<Box<dyn StdError + Send + Sync>>,
}, },
#[error("Model Entity {entity} not found")]
ModelEntityNotFound { entity: Cow<'static, str> },
#[error("{0}")]
CustomMessageStr(&'static str),
#[error("{0}")]
CustomMessageString(String),
} }
impl RError { impl RecorderError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self { pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError { Self::MikanMetaMissingFieldError {
field, field,
@ -98,19 +70,10 @@ impl RError {
source: Some(source), source: Some(source),
} }
} }
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
Self::DbError(sea_orm::DbErr::RecordNotFound(detail.to_string()))
}
} }
impl IntoResponse for RError { impl From<RecorderError> for loco_rs::Error {
fn into_response(self) -> Response { fn from(error: RecorderError) -> Self {
match self { Self::wrap(error)
Self::AuthError(auth_error) => auth_error.into_response(),
err => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
} }
} }
}
pub type RResult<T> = Result<T, RError>;

View File

@ -1,15 +1,21 @@
use std::ops::Deref; use std::ops::Deref;
use async_trait::async_trait;
use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use reqwest_middleware::ClientWithMiddleware; use reqwest_middleware::ClientWithMiddleware;
use secrecy::{ExposeSecret, SecretString}; use secrecy::{ExposeSecret, SecretString};
use url::Url; use url::Url;
use super::MikanConfig; use super::AppMikanConfig;
use crate::{ use crate::{
errors::RError, config::AppConfigExt,
errors::RecorderError,
fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth}, fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth},
}; };
static APP_MIKAN_CLIENT: OnceCell<AppMikanClient> = OnceCell::new();
#[derive(Debug, Default, Clone)] #[derive(Debug, Default, Clone)]
pub struct MikanAuthSecrecy { pub struct MikanAuthSecrecy {
pub cookie: SecretString, pub cookie: SecretString,
@ -17,19 +23,19 @@ pub struct MikanAuthSecrecy {
} }
impl MikanAuthSecrecy { impl MikanAuthSecrecy {
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RError> { pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RecorderError> {
HttpClientCookiesAuth::from_cookies(self.cookie.expose_secret(), url, self.user_agent) HttpClientCookiesAuth::from_cookies(self.cookie.expose_secret(), url, self.user_agent)
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct MikanClient { pub struct AppMikanClient {
http_client: HttpClient, http_client: HttpClient,
base_url: Url, base_url: Url,
} }
impl MikanClient { impl AppMikanClient {
pub async fn from_config(config: MikanConfig) -> Result<Self, RError> { pub fn new(config: AppMikanConfig) -> Result<Self, RecorderError> {
let http_client = HttpClient::from_config(config.http_client)?; let http_client = HttpClient::from_config(config.http_client)?;
let base_url = config.base_url; let base_url = config.base_url;
Ok(Self { Ok(Self {
@ -38,7 +44,7 @@ impl MikanClient {
}) })
} }
pub fn fork_with_auth(&self, secrecy: MikanAuthSecrecy) -> Result<Self, RError> { pub fn fork_with_auth(&self, secrecy: MikanAuthSecrecy) -> Result<Self, RecorderError> {
let cookie_auth = secrecy.into_cookie_auth(&self.base_url)?; let cookie_auth = secrecy.into_cookie_auth(&self.base_url)?;
let fork = self.http_client.fork().attach_secrecy(cookie_auth); let fork = self.http_client.fork().attach_secrecy(cookie_auth);
@ -48,6 +54,12 @@ impl MikanClient {
}) })
} }
pub fn app_instance() -> &'static AppMikanClient {
APP_MIKAN_CLIENT
.get()
.expect("AppMikanClient is not initialized")
}
pub fn base_url(&self) -> &Url { pub fn base_url(&self) -> &Url {
&self.base_url &self.base_url
} }
@ -57,7 +69,7 @@ impl MikanClient {
} }
} }
impl Deref for MikanClient { impl Deref for AppMikanClient {
type Target = ClientWithMiddleware; type Target = ClientWithMiddleware;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
@ -65,4 +77,22 @@ impl Deref for MikanClient {
} }
} }
impl HttpClientTrait for MikanClient {} impl HttpClientTrait for AppMikanClient {}
pub struct AppMikanClientInitializer;
#[async_trait]
impl Initializer for AppMikanClientInitializer {
fn name(&self) -> String {
"AppMikanClientInitializer".to_string()
}
async fn before_run(&self, app_context: &AppContext) -> loco_rs::Result<()> {
let config = &app_context.config;
let app_mikan_conf = config.get_app_conf()?.mikan;
APP_MIKAN_CLIENT.get_or_try_init(|| AppMikanClient::new(app_mikan_conf))?;
Ok(())
}
}

View File

@ -4,7 +4,7 @@ use url::Url;
use crate::fetch::HttpClientConfig; use crate::fetch::HttpClientConfig;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct MikanConfig { pub struct AppMikanConfig {
pub http_client: HttpClientConfig, pub http_client: HttpClientConfig,
pub base_url: Url, pub base_url: Url,
} }

View File

@ -4,8 +4,8 @@ pub mod constants;
pub mod rss_extract; pub mod rss_extract;
pub mod web_extract; pub mod web_extract;
pub use client::{MikanAuthSecrecy, MikanClient}; pub use client::{AppMikanClient, AppMikanClientInitializer, MikanAuthSecrecy};
pub use config::MikanConfig; pub use config::AppMikanConfig;
pub use constants::MIKAN_BUCKET_KEY; pub use constants::MIKAN_BUCKET_KEY;
pub use rss_extract::{ pub use rss_extract::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanBangumiRssLink, MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanBangumiRssLink,

View File

@ -1,6 +1,7 @@
use std::borrow::Cow; use std::borrow::Cow;
use chrono::DateTime; use chrono::DateTime;
use color_eyre::eyre;
use itertools::Itertools; use itertools::Itertools;
use reqwest::IntoUrl; use reqwest::IntoUrl;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -8,9 +9,9 @@ use tracing::instrument;
use url::Url; use url::Url;
use crate::{ use crate::{
errors::{RError, RResult}, errors::RecorderError,
extract::mikan::{ extract::mikan::{
MikanClient, AppMikanClient,
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage}, web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
}, },
fetch::bytes::fetch_bytes, fetch::bytes::fetch_bytes,
@ -100,28 +101,28 @@ impl MikanRssChannel {
} }
impl TryFrom<rss::Item> for MikanRssItem { impl TryFrom<rss::Item> for MikanRssItem {
type Error = RError; type Error = RecorderError;
fn try_from(item: rss::Item) -> Result<Self, Self::Error> { fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
let enclosure = item let enclosure = item.enclosure.ok_or_else(|| {
.enclosure RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure"))
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure")))?; })?;
let mime_type = enclosure.mime_type; let mime_type = enclosure.mime_type;
if mime_type != BITTORRENT_MIME_TYPE { if mime_type != BITTORRENT_MIME_TYPE {
return Err(RError::MimeError { return Err(RecorderError::MimeError {
expected: String::from(BITTORRENT_MIME_TYPE), expected: String::from(BITTORRENT_MIME_TYPE),
found: mime_type.to_string(), found: mime_type.to_string(),
desc: String::from("MikanRssItem"), desc: String::from("MikanRssItem"),
}); });
} }
let title = item let title = item.title.ok_or_else(|| {
.title RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title"))
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?; })?;
let enclosure_url = Url::parse(&enclosure.url).map_err(|inner| { let enclosure_url = Url::parse(&enclosure.url).map_err(|inner| {
RError::from_mikan_rss_invalid_field_and_source( RecorderError::from_mikan_rss_invalid_field_and_source(
Cow::Borrowed("enclosure_url:enclosure.link"), Cow::Borrowed("enclosure_url:enclosure.link"),
Box::new(inner), Box::new(inner),
) )
@ -130,12 +131,14 @@ impl TryFrom<rss::Item> for MikanRssItem {
let homepage = item let homepage = item
.link .link
.and_then(|link| Url::parse(&link).ok()) .and_then(|link| Url::parse(&link).ok())
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link")))?; .ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link"))
})?;
let MikanEpisodeHomepage { let MikanEpisodeHomepage {
mikan_episode_id, .. mikan_episode_id, ..
} = extract_mikan_episode_id_from_homepage(&homepage).ok_or_else(|| { } = extract_mikan_episode_id_from_homepage(&homepage).ok_or_else(|| {
RError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id")) RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
})?; })?;
Ok(MikanRssItem { Ok(MikanRssItem {
@ -168,7 +171,7 @@ pub fn build_mikan_bangumi_rss_link(
mikan_base_url: impl IntoUrl, mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str, mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>, mikan_fansub_id: Option<&str>,
) -> RResult<Url> { ) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?; let mut url = mikan_base_url.into_url()?;
url.set_path("/RSS/Bangumi"); url.set_path("/RSS/Bangumi");
url.query_pairs_mut() url.query_pairs_mut()
@ -183,7 +186,7 @@ pub fn build_mikan_bangumi_rss_link(
pub fn build_mikan_subscriber_aggregation_rss_link( pub fn build_mikan_subscriber_aggregation_rss_link(
mikan_base_url: &str, mikan_base_url: &str,
mikan_aggregation_id: &str, mikan_aggregation_id: &str,
) -> RResult<Url> { ) -> eyre::Result<Url> {
let mut url = Url::parse(mikan_base_url)?; let mut url = Url::parse(mikan_base_url)?;
url.set_path("/RSS/MyBangumi"); url.set_path("/RSS/MyBangumi");
url.query_pairs_mut() url.query_pairs_mut()
@ -223,9 +226,9 @@ pub fn extract_mikan_subscriber_aggregation_id_from_rss_link(
#[instrument(skip_all, fields(channel_rss_link = channel_rss_link.as_str()))] #[instrument(skip_all, fields(channel_rss_link = channel_rss_link.as_str()))]
pub async fn extract_mikan_rss_channel_from_rss_link( pub async fn extract_mikan_rss_channel_from_rss_link(
http_client: &MikanClient, http_client: &AppMikanClient,
channel_rss_link: impl IntoUrl, channel_rss_link: impl IntoUrl,
) -> RResult<MikanRssChannel> { ) -> eyre::Result<MikanRssChannel> {
let bytes = fetch_bytes(http_client, channel_rss_link.as_str()).await?; let bytes = fetch_bytes(http_client, channel_rss_link.as_str()).await?;
let channel = rss::Channel::read_from(&bytes[..])?; let channel = rss::Channel::read_from(&bytes[..])?;
@ -324,9 +327,11 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
}, },
)) ))
} else { } else {
Err(RError::MikanRssInvalidFormatError).inspect_err(|error| { Err(RecorderError::MikanRssInvalidFormatError)
.inspect_err(|error| {
tracing::warn!(error = %error); tracing::warn!(error = %error);
}) })
.map_err(|error| error.into())
} }
} }
@ -354,7 +359,7 @@ mod tests {
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
{ {
let bangumi_rss_url = let bangumi_rss_url =

View File

@ -1,25 +1,24 @@
use std::borrow::Cow; use std::borrow::Cow;
use async_stream::try_stream;
use bytes::Bytes; use bytes::Bytes;
use futures::Stream;
use itertools::Itertools; use itertools::Itertools;
use loco_rs::app::AppContext;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use tracing::instrument; use tracing::instrument;
use url::Url; use url::Url;
use super::{ use super::{
MIKAN_BUCKET_KEY, MikanBangumiRssLink, MikanClient, extract_mikan_bangumi_id_from_rss_link, AppMikanClient, MIKAN_BUCKET_KEY, MikanBangumiRssLink, extract_mikan_bangumi_id_from_rss_link,
}; };
use crate::{ use crate::{
app::AppContext, app::AppContextExt,
errors::{RError, RResult}, dal::DalContentCategory,
errors::RecorderError,
extract::{ extract::{
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref}, html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str, media::extract_image_src_from_str,
}, },
fetch::{html::fetch_html, image::fetch_image}, fetch::{html::fetch_html, image::fetch_image},
storage::StorageContentCategory,
}; };
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
@ -111,9 +110,9 @@ pub fn extract_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeH
} }
pub async fn extract_mikan_poster_meta_from_src( pub async fn extract_mikan_poster_meta_from_src(
http_client: &MikanClient, http_client: &AppMikanClient,
origin_poster_src_url: Url, origin_poster_src_url: Url,
) -> Result<MikanBangumiPosterMeta, RError> { ) -> Result<MikanBangumiPosterMeta, RecorderError> {
let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?; let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?;
Ok(MikanBangumiPosterMeta { Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url, origin_poster_src: origin_poster_src_url,
@ -126,12 +125,12 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
ctx: &AppContext, ctx: &AppContext,
origin_poster_src_url: Url, origin_poster_src_url: Url,
subscriber_id: i32, subscriber_id: i32,
) -> RResult<MikanBangumiPosterMeta> { ) -> Result<MikanBangumiPosterMeta, RecorderError> {
let dal_client = &ctx.storage; let dal_client = ctx.get_dal_client();
let mikan_client = &ctx.mikan; let mikan_client = ctx.get_mikan_client();
if let Some(poster_src) = dal_client if let Some(poster_src) = dal_client
.exists_object( .exists_object(
StorageContentCategory::Image, DalContentCategory::Image,
subscriber_id, subscriber_id,
Some(MIKAN_BUCKET_KEY), Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""), &origin_poster_src_url.path().replace("/images/Bangumi/", ""),
@ -149,7 +148,7 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
let poster_str = dal_client let poster_str = dal_client
.store_object( .store_object(
StorageContentCategory::Image, DalContentCategory::Image,
subscriber_id, subscriber_id,
Some(MIKAN_BUCKET_KEY), Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""), &origin_poster_src_url.path().replace("/images/Bangumi/", ""),
@ -166,9 +165,9 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
#[instrument(skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))] #[instrument(skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))]
pub async fn extract_mikan_episode_meta_from_episode_homepage( pub async fn extract_mikan_episode_meta_from_episode_homepage(
http_client: &MikanClient, http_client: &AppMikanClient,
mikan_episode_homepage_url: Url, mikan_episode_homepage_url: Url,
) -> Result<MikanEpisodeMeta, RError> { ) -> Result<MikanEpisodeMeta, RecorderError> {
let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?; let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?; let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?;
@ -184,7 +183,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
.select(bangumi_title_selector) .select(bangumi_title_selector)
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title"))) .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| { .inspect_err(|error| {
tracing::warn!(error = %error); tracing::warn!(error = %error);
})?; })?;
@ -199,18 +198,22 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
.and_then(|el| el.value().attr("href")) .and_then(|el| el.value().attr("href"))
.and_then(|s| mikan_episode_homepage_url.join(s).ok()) .and_then(|s| mikan_episode_homepage_url.join(s).ok())
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url)) .and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))) .ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?; .inspect_err(|error| tracing::error!(error = %error))?;
let mikan_fansub_id = mikan_fansub_id let mikan_fansub_id = mikan_fansub_id
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id"))) .ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?; .inspect_err(|error| tracing::error!(error = %error))?;
let episode_title = html let episode_title = html
.select(&Selector::parse("title").unwrap()) .select(&Selector::parse("title").unwrap())
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title"))) .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
.inspect_err(|error| { .inspect_err(|error| {
tracing::warn!(error = %error); tracing::warn!(error = %error);
})?; })?;
@ -218,7 +221,9 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
let MikanEpisodeHomepage { let MikanEpisodeHomepage {
mikan_episode_id, .. mikan_episode_id, ..
} = extract_mikan_episode_id_from_homepage(&mikan_episode_homepage_url) } = extract_mikan_episode_id_from_homepage(&mikan_episode_homepage_url)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))) .ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
})
.inspect_err(|error| { .inspect_err(|error| {
tracing::warn!(error = %error); tracing::warn!(error = %error);
})?; })?;
@ -230,7 +235,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
) )
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name"))) .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
.inspect_err(|error| { .inspect_err(|error| {
tracing::warn!(error = %error); tracing::warn!(error = %error);
})?; })?;
@ -271,9 +276,9 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
#[instrument(skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))] #[instrument(skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))]
pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage( pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
http_client: &MikanClient, http_client: &AppMikanClient,
mikan_bangumi_homepage_url: Url, mikan_bangumi_homepage_url: Url,
) -> Result<MikanBangumiMeta, RError> { ) -> Result<MikanBangumiMeta, RecorderError> {
let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?; let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?; let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?;
let html = Html::parse_document(&content); let html = Html::parse_document(&content);
@ -287,7 +292,7 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
.select(bangumi_title_selector) .select(bangumi_title_selector)
.next() .next()
.map(extract_inner_text_from_element_ref) .map(extract_inner_text_from_element_ref)
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title"))) .ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| tracing::warn!(error = %error))?; .inspect_err(|error| tracing::warn!(error = %error))?;
let mikan_bangumi_id = html let mikan_bangumi_id = html
@ -301,7 +306,9 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
mikan_bangumi_id, .. mikan_bangumi_id, ..
}| mikan_bangumi_id, }| mikan_bangumi_id,
) )
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))) .ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?; .inspect_err(|error| tracing::error!(error = %error))?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| { let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
@ -350,20 +357,14 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
* @logined-required * @logined-required
*/ */
#[instrument(skip_all, fields(my_bangumi_page_url = my_bangumi_page_url.as_str()))] #[instrument(skip_all, fields(my_bangumi_page_url = my_bangumi_page_url.as_str()))]
pub fn extract_mikan_bangumis_meta_from_my_bangumi_page( pub async fn extract_mikan_bangumis_meta_from_my_bangumi_page(
http_client: &MikanClient, http_client: &AppMikanClient,
my_bangumi_page_url: Url, my_bangumi_page_url: Url,
) -> impl Stream<Item = Result<MikanBangumiMeta, RError>> { ) -> Result<Vec<MikanBangumiMeta>, RecorderError> {
try_stream! {
let mikan_base_url = Url::parse(&my_bangumi_page_url.origin().unicode_serialization())?; let mikan_base_url = Url::parse(&my_bangumi_page_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, my_bangumi_page_url.clone()).await?; let content = fetch_html(http_client, my_bangumi_page_url.clone()).await?;
let bangumi_container_selector = &Selector::parse(".sk-bangumi .an-ul>li").unwrap();
let bangumi_info_selector = &Selector::parse(".an-info a.an-text").unwrap();
let bangumi_poster_selector =
&Selector::parse("span[data-src][data-bangumiid], span[data-bangumiid][style]")
.unwrap();
let fansub_container_selector = let fansub_container_selector =
&Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap(); &Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap();
let fansub_title_selector = &Selector::parse(".tag-res-name[title]").unwrap(); let fansub_title_selector = &Selector::parse(".tag-res-name[title]").unwrap();
@ -371,6 +372,12 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
&Selector::parse(".active[data-subtitlegroupid][data-bangumiid]").unwrap(); &Selector::parse(".active[data-subtitlegroupid][data-bangumiid]").unwrap();
let bangumi_iters = { let bangumi_iters = {
let bangumi_container_selector = &Selector::parse(".sk-bangumi .an-ul>li").unwrap();
let bangumi_info_selector = &Selector::parse(".an-info a.an-text").unwrap();
let bangumi_poster_selector =
&Selector::parse("span[data-src][data-bangumiid], span[data-bangumiid][style]")
.unwrap();
let html = Html::parse_document(&content); let html = Html::parse_document(&content);
html.select(bangumi_container_selector) html.select(bangumi_container_selector)
@ -432,58 +439,52 @@ pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
.collect_vec() .collect_vec()
}; };
let mut bangumi_list = vec![];
for (bangumi_title, mikan_bangumi_id, bangumi_expand_info_url, origin_poster_src) in for (bangumi_title, mikan_bangumi_id, bangumi_expand_info_url, origin_poster_src) in
bangumi_iters bangumi_iters
{ {
if let Some((fansub_name, mikan_fansub_id)) = {
let bangumi_expand_info_content = fetch_html(http_client, bangumi_expand_info_url).await?; let bangumi_expand_info_content = fetch_html(http_client, bangumi_expand_info_url).await?;
let bangumi_expand_info_fragment = Html::parse_fragment(&bangumi_expand_info_content); let bangumi_expand_info_fragment = Html::parse_fragment(&bangumi_expand_info_content);
bangumi_expand_info_fragment.select(fansub_container_selector).next().and_then(|fansub_info| { for fansub_info in bangumi_expand_info_fragment.select(fansub_container_selector) {
if let (Some(fansub_name), Some(mikan_fansub_id)) = ( if let (Some(fansub_name), Some(mikan_fansub_id)) = (
fansub_info fansub_info
.select(fansub_title_selector) .select(fansub_title_selector)
.next() .next()
.and_then(|ele| ele.attr("title")) .and_then(|ele| ele.attr("title")),
.map(String::from),
fansub_info fansub_info
.select(fansub_id_selector) .select(fansub_id_selector)
.next() .next()
.and_then(|ele| ele.attr("data-subtitlegroupid")) .and_then(|ele| ele.attr("data-subtitlegroupid")),
.map(String::from)
) { ) {
Some((fansub_name, mikan_fansub_id))
} else {
None
}
})
} {
tracing::trace!( tracing::trace!(
fansub_name, fansub_name = &fansub_name,
mikan_fansub_id, mikan_fansub_id,
"subscribed fansub extracted" "subscribed fansub extracted"
); );
yield MikanBangumiMeta { bangumi_list.push(MikanBangumiMeta {
homepage: build_mikan_bangumi_homepage( homepage: build_mikan_bangumi_homepage(
mikan_base_url.clone(), mikan_base_url.clone(),
&mikan_bangumi_id, mikan_bangumi_id.as_str(),
Some(&mikan_fansub_id), Some(mikan_fansub_id),
), ),
bangumi_title: bangumi_title.to_string(), bangumi_title: bangumi_title.to_string(),
mikan_bangumi_id: mikan_bangumi_id.to_string(), mikan_bangumi_id: mikan_bangumi_id.to_string(),
mikan_fansub_id: Some(mikan_fansub_id), mikan_fansub_id: Some(mikan_fansub_id.to_string()),
fansub: Some(fansub_name), fansub: Some(fansub_name.to_string()),
origin_poster_src: origin_poster_src.clone(), origin_poster_src: origin_poster_src.clone(),
}; })
} }
} }
} }
Ok(bangumi_list)
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
#![allow(unused_variables)] #![allow(unused_variables)]
use color_eyre::eyre; use color_eyre::eyre;
use futures::{TryStreamExt, pin_mut};
use http::header; use http::header;
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
use secrecy::SecretString; use secrecy::SecretString;
@ -509,7 +510,7 @@ mod test {
async fn test_extract_mikan_poster_from_src(before_each: ()) -> eyre::Result<()> { async fn test_extract_mikan_poster_from_src(before_each: ()) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?; let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;
@ -540,7 +541,7 @@ mod test {
async fn test_extract_mikan_episode(before_each: ()) -> eyre::Result<()> { async fn test_extract_mikan_episode(before_each: ()) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let episode_homepage_url = let episode_homepage_url =
mikan_base_url.join("/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a")?; mikan_base_url.join("/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a")?;
@ -582,7 +583,7 @@ mod test {
) -> eyre::Result<()> { ) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await; let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?; let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let bangumi_homepage_url = mikan_base_url.join("/Home/Bangumi/3416#370")?; let bangumi_homepage_url = mikan_base_url.join("/Home/Bangumi/3416#370")?;
@ -625,7 +626,7 @@ mod test {
let my_bangumi_page_url = mikan_base_url.join("/Home/MyBangumi")?; let my_bangumi_page_url = mikan_base_url.join("/Home/MyBangumi")?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?; let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
{ {
let my_bangumi_without_cookie_mock = mikan_server let my_bangumi_without_cookie_mock = mikan_server
@ -638,11 +639,8 @@ mod test {
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page( let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
&mikan_client, &mikan_client,
my_bangumi_page_url.clone(), my_bangumi_page_url.clone(),
); )
.await?;
pin_mut!(bangumi_metas);
let bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
assert!(bangumi_metas.is_empty()); assert!(bangumi_metas.is_empty());
@ -681,13 +679,11 @@ mod test {
Gecko) Chrome/133.0.0.0 Safari/537.36 Edg/133.0.0.0", Gecko) Chrome/133.0.0.0 Safari/537.36 Edg/133.0.0.0",
)), )),
})?; })?;
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page( let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
&mikan_client_with_cookie, &mikan_client_with_cookie,
my_bangumi_page_url, my_bangumi_page_url,
); )
pin_mut!(bangumi_metas); .await?;
let bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
assert!(!bangumi_metas.is_empty()); assert!(!bangumi_metas.is_empty());

View File

@ -2,12 +2,12 @@ use bytes::Bytes;
use reqwest::IntoUrl; use reqwest::IntoUrl;
use super::client::HttpClientTrait; use super::client::HttpClientTrait;
use crate::errors::RError; use crate::errors::RecorderError;
pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>( pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>(
client: &H, client: &H,
url: T, url: T,
) -> Result<Bytes, RError> { ) -> Result<Bytes, RecorderError> {
let bytes = client let bytes = client
.get(url) .get(url)
.send() .send()

View File

@ -3,7 +3,7 @@ use std::{fmt::Debug, ops::Deref, sync::Arc, time::Duration};
use async_trait::async_trait; use async_trait::async_trait;
use axum::http::{self, Extensions}; use axum::http::{self, Extensions};
use http_cache_reqwest::{ use http_cache_reqwest::{
Cache, CacheManager, CacheMode, HttpCache, HttpCacheOptions, MokaManager, CACacheManager, Cache, CacheManager, CacheMode, HttpCache, HttpCacheOptions, MokaManager,
}; };
use leaky_bucket::RateLimiter; use leaky_bucket::RateLimiter;
use reqwest::{ClientBuilder, Request, Response}; use reqwest::{ClientBuilder, Request, Response};
@ -17,7 +17,7 @@ use serde_with::serde_as;
use thiserror::Error; use thiserror::Error;
use super::HttpClientSecrecyDataTrait; use super::HttpClientSecrecyDataTrait;
use crate::fetch::get_random_mobile_ua; use crate::{app::App, fetch::get_random_mobile_ua};
pub struct RateLimiterMiddleware { pub struct RateLimiterMiddleware {
rate_limiter: RateLimiter, rate_limiter: RateLimiter,
@ -40,6 +40,7 @@ impl Middleware for RateLimiterMiddleware {
#[serde(rename_all = "snake_case", tag = "type")] #[serde(rename_all = "snake_case", tag = "type")]
pub enum HttpClientCacheBackendConfig { pub enum HttpClientCacheBackendConfig {
Moka { cache_size: u64 }, Moka { cache_size: u64 },
CACache { cache_path: String },
} }
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
@ -237,6 +238,12 @@ impl HttpClient {
.cache_backend .cache_backend
.as_ref() .as_ref()
.map(|b| match b { .map(|b| match b {
HttpClientCacheBackendConfig::CACache { cache_path } => {
let path = std::path::PathBuf::from(
App::get_working_root().join(cache_path).as_str(),
);
CacheBackend::new(CACacheManager { path })
}
HttpClientCacheBackendConfig::Moka { cache_size } => { HttpClientCacheBackendConfig::Moka { cache_size } => {
CacheBackend::new(MokaManager { CacheBackend::new(MokaManager {
cache: Arc::new(moka::future::Cache::new(*cache_size)), cache: Arc::new(moka::future::Cache::new(*cache_size)),

View File

@ -5,7 +5,7 @@ use reqwest::{ClientBuilder, cookie::Jar};
use secrecy::zeroize::Zeroize; use secrecy::zeroize::Zeroize;
use url::Url; use url::Url;
use crate::errors::RError; use crate::errors::RecorderError;
pub trait HttpClientSecrecyDataTrait: Zeroize { pub trait HttpClientSecrecyDataTrait: Zeroize {
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder { fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
@ -24,7 +24,7 @@ impl HttpClientCookiesAuth {
cookies: &str, cookies: &str,
url: &Url, url: &Url,
user_agent: Option<String>, user_agent: Option<String>,
) -> Result<Self, RError> { ) -> Result<Self, RecorderError> {
let cookie_jar = Arc::new(Jar::default()); let cookie_jar = Arc::new(Jar::default());
for cookie in Cookie::split_parse(cookies).try_collect::<Vec<_>>()? { for cookie in Cookie::split_parse(cookies).try_collect::<Vec<_>>()? {
cookie_jar.add_cookie_str(&cookie.to_string(), url); cookie_jar.add_cookie_str(&cookie.to_string(), url);

Some files were not shown because too many files have changed in this diff Show More