Compare commits

...

6 Commits

502 changed files with 13714 additions and 30284 deletions

1187
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -3,9 +3,6 @@ members = ["apps/recorder"]
resolver = "2"
[patch.crates-io]
# loco-rs = { git = "https://github.com/lonelyhentxi/loco.git", rev = "beb890e" }
# loco-rs = { git = "https://github.com/loco-rs/loco.git" }
# loco-rs = { path = "./patches/loco" }
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
# [patch."https://github.com/lonelyhentxi/qbit.git"]

View File

@ -1,29 +0,0 @@
# Server
AUTH_TYPE="basic" #
BASIC_USER="konobangu"
BASIC_PASSWORD="konobangu"
OIDC_PROVIDER_ENDPOINT="https://some-oidc-auth.com/oidc/.well-known/openid-configuration"
OIDC_CLIENT_ID=""
OIDC_CLIENT_SECRET=""
OIDC_ISSUER="https://some-oidc-auth.com/oidc"
OIDC_AUDIENCE="https://konobangu.com/api"
OIDC_ICON_URL=""
OIDC_EXTRA_SCOPE_REGEX=""
OIDC_EXTRA_CLAIM_KEY=""
OIDC_EXTRA_CLAIM_VALUE=""
DATABASE_URL="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"
BETTERSTACK_API_KEY=""
BETTERSTACK_URL=""
FLAGS_SECRET=""
ARCJET_KEY=""
SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# Client
NEXT_PUBLIC_APP_URL="http://localhost:5000"
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"

View File

@ -1,29 +0,0 @@
# AUTH
AUTH_TYPE="basic"
NEXT_PUBLIC_OIDC_PROVIDER_ENDPOINT="https://some-oidc-auth.com/oidc/.well-known/openid-configuration"
NEXT_PUBLIC_OIDC_CLIENT_ID=""
NEXT_PUBLIC_OIDC_CLIENT_SECRET=""
NEXT_PUBLIC_OIDC_ICON_URL=""
OIDC_ISSUER="https://some-oidc-auth.com/oidc"
OIDC_AUDIENCE="https://konobangu.com/api"
OIDC_EXTRA_SCOPES="" # 如 "read:konobangu,write:konobangu"
OIDC_EXTRA_CLAIM_KEY=""
OIDC_EXTRA_CLAIM_VALUE=""
# DATABASE
DATABASE_URL="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu"
# SERVER MISC
BETTERSTACK_API_KEY=""
BETTERSTACK_URL=""
FLAGS_SECRET=""
ARCJET_KEY=""
SVIX_TOKEN=""
LIVEBLOCKS_SECRET=""
# WEBUI
NEXT_PUBLIC_APP_URL="http://localhost:5000"
NEXT_PUBLIC_WEB_URL="http://localhost:5001"
NEXT_PUBLIC_DOCS_URL="http://localhost:5004"
NEXT_PUBLIC_VERCEL_PROJECT_PRODUCTION_URL="https://konobangu.com"

45
apps/app/.gitignore vendored
View File

@ -1,45 +0,0 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# local env files
.env*.local
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts
# prisma
.env
# react.email
.react-email
# Sentry
.sentryclirc

View File

@ -1,13 +0,0 @@
import { render, screen } from '@testing-library/react';
import { expect, test } from 'vitest';
import Page from '../app/(unauthenticated)/sign-in/[[...sign-in]]/page';
test('Sign In Page', () => {
render(<Page />);
expect(
screen.getByRole('heading', {
level: 1,
name: 'Welcome back',
})
).toBeDefined();
});

View File

@ -1,13 +0,0 @@
import { render, screen } from '@testing-library/react';
import { expect, test } from 'vitest';
import Page from '../app/(unauthenticated)/sign-up/[[...sign-up]]/page';
test('Sign Up Page', () => {
render(<Page />);
expect(
screen.getByRole('heading', {
level: 1,
name: 'Create an account',
})
).toBeDefined();
});

View File

@ -1,59 +0,0 @@
'use client';
import { useOthers, useSelf } from '@konobangu/collaboration/hooks';
import {
Avatar,
AvatarFallback,
AvatarImage,
} from '@konobangu/design-system/components/ui/avatar';
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from '@konobangu/design-system/components/ui/tooltip';
import { tailwind } from '@konobangu/tailwind-config';
type PresenceAvatarProps = {
info?: Liveblocks['UserMeta']['info'];
};
const PresenceAvatar = ({ info }: PresenceAvatarProps) => (
<Tooltip delayDuration={0}>
<TooltipTrigger>
<Avatar className="h-7 w-7 bg-secondary ring-1 ring-background">
<AvatarImage src={info?.avatar} alt={info?.name} />
<AvatarFallback className="text-xs">
{info?.name?.slice(0, 2)}
</AvatarFallback>
</Avatar>
</TooltipTrigger>
<TooltipContent collisionPadding={4}>
<p>{info?.name ?? 'Unknown'}</p>
</TooltipContent>
</Tooltip>
);
export const AvatarStack = () => {
const others = useOthers();
const self = useSelf();
const hasMoreUsers = others.length > 3;
return (
<div className="-space-x-1 flex items-center px-4">
{others.slice(0, 3).map(({ connectionId, info }) => (
<PresenceAvatar key={connectionId} info={info} />
))}
{hasMoreUsers && (
<PresenceAvatar
info={{
name: `+${others.length - 3}`,
color: tailwind.theme.colors.gray[500],
}}
/>
)}
{self && <PresenceAvatar info={self.info} />}
</div>
);
};

View File

@ -1,48 +0,0 @@
'use client';
import { getUsers } from '@/app/actions/users/get';
import { searchUsers } from '@/app/actions/users/search';
import { Room } from '@konobangu/collaboration/room';
import type { ReactNode } from 'react';
export const CollaborationProvider = ({
orgId,
children,
}: {
orgId: string;
children: ReactNode;
}) => {
const resolveUsers = async ({ userIds }: { userIds: string[] }) => {
const response = await getUsers(userIds);
if ('error' in response) {
throw new Error('Problem resolving users');
}
return response.data;
};
const resolveMentionSuggestions = async ({ text }: { text: string }) => {
const response = await searchUsers(text);
if ('error' in response) {
throw new Error('Problem resolving mention suggestions');
}
return response.data;
};
return (
<Room
id={`${orgId}:presence`}
authEndpoint="/api/collaboration/auth"
fallback={
<div className="px-3 text-muted-foreground text-xs">Loading...</div>
}
resolveUsers={resolveUsers}
resolveMentionSuggestions={resolveMentionSuggestions}
>
{children}
</Room>
);
};

View File

@ -1,106 +0,0 @@
'use client';
import { useMyPresence, useOthers } from '@konobangu/collaboration/hooks';
import { useEffect } from 'react';
const Cursor = ({
name,
color,
x,
y,
}: {
name: string | undefined;
color: string;
x: number;
y: number;
}) => (
<div
className="pointer-events-none absolute top-0 left-0 z-[999] select-none transition-transform duration-100"
style={{
transform: `translateX(${x}px) translateY(${y}px)`,
}}
>
<svg
className="absolute top-0 left-0"
width="24"
height="36"
viewBox="0 0 24 36"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<title>Cursor</title>
<path
d="M5.65376 12.3673H5.46026L5.31717 12.4976L0.500002 16.8829L0.500002 1.19841L11.7841 12.3673H5.65376Z"
fill={color}
/>
</svg>
<div
className="absolute top-4 left-1.5 whitespace-nowrap rounded-full px-2 py-0.5 text-white text-xs"
style={{
backgroundColor: color,
}}
>
{name}
</div>
</div>
);
export const Cursors = () => {
/**
* useMyPresence returns the presence of the current user and a function to update it.
* updateMyPresence is different than the setState function returned by the useState hook from React.
* You don't need to pass the full presence object to update it.
* See https://liveblocks.io/docs/api-reference/liveblocks-react#useMyPresence for more information
*/
const [_cursor, updateMyPresence] = useMyPresence();
/**
* Return all the other users in the room and their presence (a cursor position in this case)
*/
const others = useOthers();
useEffect(() => {
const onPointerMove = (event: PointerEvent) => {
// Update the user cursor position on every pointer move
updateMyPresence({
cursor: {
x: Math.round(event.clientX),
y: Math.round(event.clientY),
},
});
};
const onPointerLeave = () => {
// When the pointer goes out, set cursor to null
updateMyPresence({
cursor: null,
});
};
document.body.addEventListener('pointermove', onPointerMove);
document.body.addEventListener('pointerleave', onPointerLeave);
return () => {
document.body.removeEventListener('pointermove', onPointerMove);
document.body.removeEventListener('pointerleave', onPointerLeave);
};
}, [updateMyPresence]);
return others.map(({ connectionId, presence, info }) => {
if (!presence.cursor) {
return null;
}
return (
<Cursor
key={`cursor-${connectionId}`}
// connectionId is an integer that is incremented at every new connections
// Assigning a color with a modulo makes sure that a specific user has the same colors on every clients
color={info.color}
x={presence.cursor.x}
y={presence.cursor.y}
name={info?.name}
/>
);
});
};

View File

@ -1,43 +0,0 @@
import {
Breadcrumb,
BreadcrumbItem,
BreadcrumbLink,
BreadcrumbList,
BreadcrumbPage,
BreadcrumbSeparator,
} from '@konobangu/design-system/components/ui/breadcrumb';
import { Separator } from '@konobangu/design-system/components/ui/separator';
import { SidebarTrigger } from '@konobangu/design-system/components/ui/sidebar';
import { Fragment, type ReactNode } from 'react';
type HeaderProps = {
pages: string[];
page: string;
children?: ReactNode;
};
export const Header = ({ pages, page, children }: HeaderProps) => (
<header className="flex h-16 shrink-0 items-center justify-between gap-2">
<div className="flex items-center gap-2 px-4">
<SidebarTrigger className="-ml-1" />
<Separator orientation="vertical" className="mr-2 h-4" />
<Breadcrumb>
<BreadcrumbList>
{pages.map((page, index) => (
<Fragment key={page}>
{index > 0 && <BreadcrumbSeparator className="hidden md:block" />}
<BreadcrumbItem className="hidden md:block">
<BreadcrumbLink href="#">{page}</BreadcrumbLink>
</BreadcrumbItem>
</Fragment>
))}
<BreadcrumbSeparator className="hidden md:block" />
<BreadcrumbItem>
<BreadcrumbPage>{page}</BreadcrumbPage>
</BreadcrumbItem>
</BreadcrumbList>
</Breadcrumb>
</div>
{children}
</header>
);

View File

@ -1,44 +0,0 @@
'use client';
import { analytics } from '@konobangu/analytics/client';
import { useSession } from '@konobangu/auth/client';
import { usePathname, useSearchParams } from 'next/navigation';
import { useEffect, useRef } from 'react';
export const PostHogIdentifier = () => {
const session = useSession();
const user = session?.data?.user;
const identified = useRef(false);
const pathname = usePathname();
const searchParams = useSearchParams();
useEffect(() => {
// Track pageviews
if (pathname && analytics) {
let url = window.origin + pathname;
if (searchParams.toString()) {
url = `${url}?${searchParams.toString()}`;
}
analytics.capture('$pageview', {
$current_url: url,
});
}
}, [pathname, searchParams]);
useEffect(() => {
if (!user || identified.current) {
return;
}
analytics.identify(user.id, {
email: user.email,
name: user.name,
createdAt: user.createdAt,
avatar: user.image,
});
identified.current = true;
}, [user]);
return null;
};

View File

@ -1,342 +0,0 @@
'use client';
// import { OrganizationSwitcher, UserButton } from '@konobangu/auth/client';
import { ModeToggle } from '@konobangu/design-system/components/mode-toggle';
import {
Collapsible,
CollapsibleContent,
CollapsibleTrigger,
} from '@konobangu/design-system/components/ui/collapsible';
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from '@konobangu/design-system/components/ui/dropdown-menu';
import {
Sidebar,
SidebarContent,
SidebarFooter,
SidebarGroup,
SidebarGroupContent,
SidebarGroupLabel,
SidebarHeader,
SidebarInset,
SidebarMenu,
SidebarMenuAction,
SidebarMenuButton,
SidebarMenuItem,
SidebarMenuSub,
SidebarMenuSubButton,
SidebarMenuSubItem,
useSidebar,
} from '@konobangu/design-system/components/ui/sidebar';
import { cn } from '@konobangu/design-system/lib/utils';
import {
AnchorIcon,
BookOpenIcon,
BotIcon,
ChevronRightIcon,
FolderIcon,
FrameIcon,
LifeBuoyIcon,
MapIcon,
MoreHorizontalIcon,
PieChartIcon,
SendIcon,
Settings2Icon,
ShareIcon,
SquareTerminalIcon,
Trash2Icon,
} from 'lucide-react';
import type { ReactNode } from 'react';
type GlobalSidebarProperties = {
readonly children: ReactNode;
};
const data = {
user: {
name: 'shadcn',
email: 'm@example.com',
avatar: '/avatars/shadcn.jpg',
},
navMain: [
{
title: 'Playground',
url: '#',
icon: SquareTerminalIcon,
isActive: true,
items: [
{
title: 'History',
url: '#',
},
{
title: 'Starred',
url: '#',
},
{
title: 'Settings',
url: '#',
},
],
},
{
title: 'Models',
url: '#',
icon: BotIcon,
items: [
{
title: 'Genesis',
url: '#',
},
{
title: 'Explorer',
url: '#',
},
{
title: 'Quantum',
url: '#',
},
],
},
{
title: 'Documentation',
url: '#',
icon: BookOpenIcon,
items: [
{
title: 'Introduction',
url: '#',
},
{
title: 'Get Started',
url: '#',
},
{
title: 'Tutorials',
url: '#',
},
{
title: 'Changelog',
url: '#',
},
],
},
{
title: 'Settings',
url: '#',
icon: Settings2Icon,
items: [
{
title: 'General',
url: '#',
},
{
title: 'Team',
url: '#',
},
{
title: 'Billing',
url: '#',
},
{
title: 'Limits',
url: '#',
},
],
},
],
navSecondary: [
{
title: 'Webhooks',
url: '/webhooks',
icon: AnchorIcon,
},
{
title: 'Support',
url: '#',
icon: LifeBuoyIcon,
},
{
title: 'Feedback',
url: '#',
icon: SendIcon,
},
],
projects: [
{
name: 'Design Engineering',
url: '#',
icon: FrameIcon,
},
{
name: 'Sales & Marketing',
url: '#',
icon: PieChartIcon,
},
{
name: 'Travel',
url: '#',
icon: MapIcon,
},
],
};
export const GlobalSidebar = ({ children }: GlobalSidebarProperties) => {
const sidebar = useSidebar();
return (
<>
<Sidebar variant="inset">
<SidebarHeader>
<SidebarMenu>
<SidebarMenuItem>
<div
className={cn(
'h-[36px] overflow-hidden transition-all [&>div]:w-full',
sidebar.open ? '' : '-mx-1'
)}
>
{/* <OrganizationSwitcher
hidePersonal
afterSelectOrganizationUrl="/"
/> */}
</div>
</SidebarMenuItem>
</SidebarMenu>
</SidebarHeader>
<SidebarContent>
<SidebarGroup>
<SidebarGroupLabel>Platform</SidebarGroupLabel>
<SidebarMenu>
{data.navMain.map((item) => (
<Collapsible
key={item.title}
asChild
defaultOpen={item.isActive}
>
<SidebarMenuItem>
<SidebarMenuButton asChild tooltip={item.title}>
<a href={item.url}>
<item.icon />
<span>{item.title}</span>
</a>
</SidebarMenuButton>
{item.items?.length ? (
<>
<CollapsibleTrigger asChild>
<SidebarMenuAction className="data-[state=open]:rotate-90">
<ChevronRightIcon />
<span className="sr-only">Toggle</span>
</SidebarMenuAction>
</CollapsibleTrigger>
<CollapsibleContent>
<SidebarMenuSub>
{item.items?.map((subItem) => (
<SidebarMenuSubItem key={subItem.title}>
<SidebarMenuSubButton asChild>
<a href={subItem.url}>
<span>{subItem.title}</span>
</a>
</SidebarMenuSubButton>
</SidebarMenuSubItem>
))}
</SidebarMenuSub>
</CollapsibleContent>
</>
) : null}
</SidebarMenuItem>
</Collapsible>
))}
</SidebarMenu>
</SidebarGroup>
<SidebarGroup className="group-data-[collapsible=icon]:hidden">
<SidebarGroupLabel>Projects</SidebarGroupLabel>
<SidebarMenu>
{data.projects.map((item) => (
<SidebarMenuItem key={item.name}>
<SidebarMenuButton asChild>
<a href={item.url}>
<item.icon />
<span>{item.name}</span>
</a>
</SidebarMenuButton>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<SidebarMenuAction showOnHover>
<MoreHorizontalIcon />
<span className="sr-only">More</span>
</SidebarMenuAction>
</DropdownMenuTrigger>
<DropdownMenuContent
className="w-48"
side="bottom"
align="end"
>
<DropdownMenuItem>
<FolderIcon className="text-muted-foreground" />
<span>View Project</span>
</DropdownMenuItem>
<DropdownMenuItem>
<ShareIcon className="text-muted-foreground" />
<span>Share Project</span>
</DropdownMenuItem>
<DropdownMenuSeparator />
<DropdownMenuItem>
<Trash2Icon className="text-muted-foreground" />
<span>Delete Project</span>
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</SidebarMenuItem>
))}
<SidebarMenuItem>
<SidebarMenuButton>
<MoreHorizontalIcon />
<span>More</span>
</SidebarMenuButton>
</SidebarMenuItem>
</SidebarMenu>
</SidebarGroup>
<SidebarGroup className="mt-auto">
<SidebarGroupContent>
<SidebarMenu>
{data.navSecondary.map((item) => (
<SidebarMenuItem key={item.title}>
<SidebarMenuButton asChild>
<a href={item.url}>
<item.icon />
<span>{item.title}</span>
</a>
</SidebarMenuButton>
</SidebarMenuItem>
))}
</SidebarMenu>
</SidebarGroupContent>
</SidebarGroup>
</SidebarContent>
<SidebarFooter>
<SidebarMenu>
<SidebarMenuItem className="flex items-center gap-2">
{/* <UserButton
showName
appearance={{
elements: {
rootBox: 'flex overflow-hidden w-full',
userButtonBox: 'flex-row-reverse',
userButtonOuterIdentifier: 'truncate pl-0',
},
}}
/> */}
<ModeToggle />
</SidebarMenuItem>
</SidebarMenu>
</SidebarFooter>
</Sidebar>
<SidebarInset>{children}</SidebarInset>
</>
);
};

View File

@ -1,42 +0,0 @@
import { getSessionFromHeaders } from '@konobangu/auth/server';
import { SidebarProvider } from '@konobangu/design-system/components/ui/sidebar';
import { env } from '@konobangu/env';
import { showBetaFeature } from '@konobangu/feature-flags';
import { secure } from '@konobangu/security';
import { redirect } from 'next/navigation';
import type { ReactNode } from 'react';
import { PostHogIdentifier } from './components/posthog-identifier';
import { GlobalSidebar } from './components/sidebar';
type AppLayoutProperties = {
readonly children: ReactNode;
};
const AppLayout = async ({ children }: AppLayoutProperties) => {
if (env.ARCJET_KEY) {
await secure(['CATEGORY:PREVIEW']);
}
const { user } = await getSessionFromHeaders();
if (!user) {
return redirect('/sign-in'); // from next/navigation
}
const betaFeature = await showBetaFeature();
return (
<SidebarProvider>
<GlobalSidebar>
{betaFeature && (
<div className="m-4 rounded-full bg-success p-1.5 text-center text-sm text-success-foreground">
Beta feature now available
</div>
)}
{children}
</GlobalSidebar>
<PostHogIdentifier />
</SidebarProvider>
);
};
export default AppLayout;

View File

@ -1,57 +0,0 @@
import { getSessionFromHeaders } from '@konobangu/auth/server';
import { database } from '@konobangu/database';
import { env } from '@konobangu/env';
import type { Metadata } from 'next';
import dynamic from 'next/dynamic';
import { notFound } from 'next/navigation';
import { AvatarStack } from './components/avatar-stack';
import { Cursors } from './components/cursors';
import { Header } from './components/header';
const title = 'Acme Inc';
const description = 'My application.';
const CollaborationProvider = dynamic(() =>
import('./components/collaboration-provider').then(
(mod) => mod.CollaborationProvider
)
);
export const metadata: Metadata = {
title,
description,
};
const App = async () => {
const pages = await database.selectFrom('page').selectAll().execute();
const { orgId } = await getSessionFromHeaders();
if (!orgId) {
notFound();
}
return (
<>
<Header pages={['Building Your Application']} page="Data Fetching">
{env.LIVEBLOCKS_SECRET && (
<CollaborationProvider orgId={orgId}>
<AvatarStack />
<Cursors />
</CollaborationProvider>
)}
</Header>
<div className="flex flex-1 flex-col gap-4 p-4 pt-0">
<div className="grid auto-rows-min gap-4 md:grid-cols-3">
{pages.map((page) => (
<div key={page.id} className="aspect-video rounded-xl bg-muted/50">
{page.name}
</div>
))}
</div>
<div className="min-h-[100vh] flex-1 rounded-xl bg-muted/50 md:min-h-min" />
</div>
</>
);
};
export default App;

View File

@ -1,29 +0,0 @@
import { webhooks } from '@konobangu/webhooks';
import { notFound } from 'next/navigation';
export const metadata = {
title: 'Webhooks',
description: 'Send webhooks to your users.',
};
const WebhooksPage = async () => {
const response = await webhooks.getAppPortal();
if (!response?.url) {
notFound();
}
return (
<div className="h-full w-full overflow-hidden">
<iframe
title="Webhooks"
src={response.url}
className="h-full w-full border-none"
allow="clipboard-write"
loading="lazy"
/>
</div>
);
};
export default WebhooksPage;

View File

@ -1,58 +0,0 @@
import { ModeToggle } from '@konobangu/design-system/components/mode-toggle';
import { env } from '@konobangu/env';
import { CommandIcon } from 'lucide-react';
import Link from 'next/link';
import type { ReactNode } from 'react';
type AuthLayoutProps = {
readonly children: ReactNode;
};
const AuthLayout = ({ children }: AuthLayoutProps) => (
<div className="container relative grid h-dvh flex-col items-center justify-center lg:max-w-none lg:grid-cols-2 lg:px-0">
<div className="relative hidden h-full flex-col bg-muted p-10 text-white lg:flex dark:border-r">
<div className="absolute inset-0 bg-zinc-900" />
<div className="relative z-20 flex items-center font-medium text-lg">
<CommandIcon className="mr-2 h-6 w-6" />
Acme Inc
</div>
<div className="absolute top-4 right-4">
<ModeToggle />
</div>
<div className="relative z-20 mt-auto">
<blockquote className="space-y-2">
<p className="text-lg">
&ldquo;This library has saved me countless hours of work and helped
me deliver stunning designs to my clients faster than ever
before.&rdquo;
</p>
<footer className="text-sm">Sofia Davis</footer>
</blockquote>
</div>
</div>
<div className="lg:p-8">
<div className="mx-auto flex w-full max-w-[400px] flex-col justify-center space-y-6">
{children}
<p className="px-8 text-center text-muted-foreground text-sm">
By clicking continue, you agree to our{' '}
<Link
href={new URL('/legal/terms', env.NEXT_PUBLIC_WEB_URL).toString()}
className="underline underline-offset-4 hover:text-primary"
>
Terms of Service
</Link>{' '}
and{' '}
<Link
href={new URL('/legal/privacy', env.NEXT_PUBLIC_WEB_URL).toString()}
className="underline underline-offset-4 hover:text-primary"
>
Privacy Policy
</Link>
.
</p>
</div>
</div>
</div>
);
export default AuthLayout;

View File

@ -1,23 +0,0 @@
import { createMetadata } from '@konobangu/seo/metadata';
import type { Metadata } from 'next';
import dynamic from 'next/dynamic';
const title = 'Welcome back';
const description = 'Enter your details to sign in.';
const SignIn = dynamic(() =>
import('@konobangu/auth/components/sign-in').then((mod) => mod.SignIn)
);
export const metadata: Metadata = createMetadata({ title, description });
const SignInPage = () => (
<>
<div className="flex flex-col space-y-2 text-center">
<h1 className="font-semibold text-2xl tracking-tight">{title}</h1>
<p className="text-muted-foreground text-sm">{description}</p>
</div>
<SignIn />
</>
);
export default SignInPage;

View File

@ -1,23 +0,0 @@
import { createMetadata } from '@konobangu/seo/metadata';
import type { Metadata } from 'next';
import dynamic from 'next/dynamic';
const title = 'Create an account';
const description = 'Enter your details to get started.';
const SignUp = dynamic(() =>
import('@konobangu/auth/components/sign-up').then((mod) => mod.SignUp)
);
export const metadata: Metadata = createMetadata({ title, description });
const SignUpPage = () => (
<>
<div className="flex flex-col space-y-2 text-center">
<h1 className="font-semibold text-2xl tracking-tight">{title}</h1>
<p className="text-muted-foreground text-sm">{description}</p>
</div>
<SignUp />
</>
);
export default SignUpPage;

View File

@ -1,3 +0,0 @@
import { getFlags } from '@konobangu/feature-flags/access';
export const GET = getFlags;

View File

@ -1,63 +0,0 @@
'use server';
import {
getFullOrganizationFromSession,
getSessionFromHeaders,
} from '@konobangu/auth/server';
import { tailwind } from '@konobangu/tailwind-config';
const colors = [
tailwind.theme.colors.red[500],
tailwind.theme.colors.orange[500],
tailwind.theme.colors.amber[500],
tailwind.theme.colors.yellow[500],
tailwind.theme.colors.lime[500],
tailwind.theme.colors.green[500],
tailwind.theme.colors.emerald[500],
tailwind.theme.colors.teal[500],
tailwind.theme.colors.cyan[500],
tailwind.theme.colors.sky[500],
tailwind.theme.colors.blue[500],
tailwind.theme.colors.indigo[500],
tailwind.theme.colors.violet[500],
tailwind.theme.colors.purple[500],
tailwind.theme.colors.fuchsia[500],
tailwind.theme.colors.pink[500],
tailwind.theme.colors.rose[500],
];
export const getUsers = async (
userIds: string[]
): Promise<
| {
data: Liveblocks['UserMeta']['info'][];
}
| {
error: unknown;
}
> => {
try {
const session = await getSessionFromHeaders();
const { orgId } = session;
if (!orgId) {
throw new Error('Not logged in');
}
const { fullOrganization } = await getFullOrganizationFromSession(session);
const members = fullOrganization?.members || [];
const data: Liveblocks['UserMeta']['info'][] = members
.filter((user) => user?.userId && userIds.includes(user?.userId))
.map((user) => ({
name: user.user.name ?? user.user.email ?? 'Unknown user',
picture: user.user.image,
color: colors[Math.floor(Math.random() * colors.length)],
}));
return { data };
} catch (error) {
return { error };
}
};

View File

@ -1,50 +0,0 @@
'use server';
import {
getFullOrganizationFromSession,
getSessionFromHeaders,
} from '@konobangu/auth/server';
import Fuse from 'fuse.js';
export const searchUsers = async (
query: string
): Promise<
| {
data: string[];
}
| {
error: unknown;
}
> => {
try {
const session = await getSessionFromHeaders();
const { orgId } = session;
if (!orgId) {
throw new Error('Not logged in');
}
const { fullOrganization } = await getFullOrganizationFromSession(session);
const members = fullOrganization?.members || [];
const users = members.map((user) => ({
id: user.id,
name: user.user.name ?? user.user.email ?? 'Unknown user',
imageUrl: user.user.image,
}));
const fuse = new Fuse(users, {
keys: ['name'],
minMatchCharLength: 1,
threshold: 0.3,
});
const results = fuse.search(query);
const data = results.map((result) => result.item.id);
return { data };
} catch (error) {
return { error };
}
};

View File

@ -1,42 +0,0 @@
import { getSessionFromHeaders } from '@konobangu/auth/server';
import { authenticate } from '@konobangu/collaboration/auth';
import { tailwind } from '@konobangu/tailwind-config';
const COLORS = [
tailwind.theme.colors.red[500],
tailwind.theme.colors.orange[500],
tailwind.theme.colors.amber[500],
tailwind.theme.colors.yellow[500],
tailwind.theme.colors.lime[500],
tailwind.theme.colors.green[500],
tailwind.theme.colors.emerald[500],
tailwind.theme.colors.teal[500],
tailwind.theme.colors.cyan[500],
tailwind.theme.colors.sky[500],
tailwind.theme.colors.blue[500],
tailwind.theme.colors.indigo[500],
tailwind.theme.colors.violet[500],
tailwind.theme.colors.purple[500],
tailwind.theme.colors.fuchsia[500],
tailwind.theme.colors.pink[500],
tailwind.theme.colors.rose[500],
];
export const POST = async () => {
const session = await getSessionFromHeaders();
const { orgId, user } = session;
if (!user || !orgId) {
return new Response('Unauthorized', { status: 401 });
}
return authenticate({
userId: user.id,
orgId,
userInfo: {
name: user.name ?? user.email ?? undefined,
avatar: user.image ?? undefined,
color: COLORS[Math.floor(Math.random() * COLORS.length)],
},
});
};

Binary file not shown.

Before

Width:  |  Height:  |  Size: 216 B

View File

@ -1,17 +0,0 @@
import { database } from '@konobangu/database';
export const POST = async () => {
const newPage = await database
.insertInto('page')
.values([
{
name: 'cron-temp',
},
])
.returning('id')
.executeTakeFirstOrThrow();
await database.deleteFrom('page').where('id', '=', newPage.id);
return new Response('OK', { status: 200 });
};

View File

@ -1,29 +0,0 @@
'use client';
import { Button } from '@konobangu/design-system/components/ui/button';
import { fonts } from '@konobangu/design-system/lib/fonts';
import { captureException } from '@sentry/nextjs';
import type NextError from 'next/error';
import { useEffect } from 'react';
type GlobalErrorProperties = {
readonly error: NextError & { digest?: string };
readonly reset: () => void;
};
const GlobalError = ({ error, reset }: GlobalErrorProperties) => {
useEffect(() => {
captureException(error);
}, [error]);
return (
<html lang="en" className={fonts}>
<body>
<h1>Oops, something went wrong</h1>
<Button onClick={() => reset()}>Try again</Button>
</body>
</html>
);
};
export default GlobalError;

View File

@ -1,3 +0,0 @@
export const runtime = 'edge';
export const GET = (): Response => new Response('OK', { status: 200 });

Binary file not shown.

Before

Width:  |  Height:  |  Size: 96 B

View File

@ -1,18 +0,0 @@
import '@konobangu/design-system/styles/globals.css';
import { DesignSystemProvider } from '@konobangu/design-system';
import { fonts } from '@konobangu/design-system/lib/fonts';
import type { ReactNode } from 'react';
type RootLayoutProperties = {
readonly children: ReactNode;
};
const RootLayout = ({ children }: RootLayoutProperties) => (
<html lang="en" className={fonts} suppressHydrationWarning>
<body>
<DesignSystemProvider>{children}</DesignSystemProvider>
</body>
</html>
);
export default RootLayout;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

View File

@ -1,3 +0,0 @@
import { initializeSentry } from '@konobangu/next-config/instrumentation';
export const register = initializeSentry();

View File

@ -1 +0,0 @@
export * from '@konobangu/collaboration/config';

View File

@ -1,22 +0,0 @@
import { authMiddleware } from '@konobangu/auth/middleware';
import {
noseconeConfig,
noseconeMiddleware,
} from '@konobangu/security/middleware';
import type { NextRequest } from 'next/server';
const securityHeaders = noseconeMiddleware(noseconeConfig);
export async function middleware(_request: NextRequest) {
const response = await securityHeaders();
return authMiddleware(response as any);
}
export const config = {
matcher: [
// Skip Next.js internals and all static files, unless found in search params
'/((?!_next|[^?]*\\.(?:html?|css|js(?!on)|jpe?g|webp|png|gif|svg|ttf|woff2?|ico|csv|docx?|xlsx?|zip|webmanifest)).*)',
// Always run for API routes
'/(api|trpc)(.*)',
],
};

View File

@ -1,15 +0,0 @@
import { env } from '@konobangu/env';
import { config, withAnalyzer, withSentry } from '@konobangu/next-config';
import type { NextConfig } from 'next';
let nextConfig: NextConfig = { ...config };
if (env.VERCEL) {
nextConfig = withSentry(nextConfig);
}
if (env.ANALYZE === 'true') {
nextConfig = withAnalyzer(nextConfig);
}
export default nextConfig;

View File

@ -1,51 +0,0 @@
{
"name": "app",
"private": true,
"scripts": {
"dev": "next dev -p 5000 --turbopack",
"build": "next build",
"start": "next start",
"analyze": "ANALYZE=true pnpm build",
"test": "vitest run",
"clean": "git clean -xdf .cache .turbo dist node_modules",
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
},
"dependencies": {
"@konobangu/analytics": "workspace:*",
"@konobangu/auth": "workspace:*",
"@konobangu/collaboration": "workspace:*",
"@konobangu/database": "workspace:*",
"@konobangu/design-system": "workspace:*",
"@konobangu/env": "workspace:*",
"@konobangu/feature-flags": "workspace:*",
"@konobangu/migrate": "workspace:*",
"@konobangu/next-config": "workspace:*",
"@konobangu/security": "workspace:*",
"@konobangu/seo": "workspace:*",
"@konobangu/tailwind-config": "workspace:*",
"@konobangu/webhooks": "workspace:*",
"@prisma/client": "6.0.1",
"@sentry/nextjs": "^8.48.0",
"fuse.js": "^7.0.0",
"import-in-the-middle": "^1.12.0",
"lucide-react": "^0.468.0",
"next": "^15.1.4",
"next-themes": "^0.4.4",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"require-in-the-middle": "^7.4.0"
},
"devDependencies": {
"@konobangu/testing": "workspace:*",
"@konobangu/typescript-config": "workspace:*",
"@testing-library/dom": "^10.4.0",
"@testing-library/react": "^16.1.0",
"@types/node": "22.10.1",
"@types/react": "19.0.1",
"@types/react-dom": "19.0.2",
"jsdom": "^25.0.1",
"tailwindcss": "^3.4.17",
"typescript": "^5.7.3",
"vitest": "^2.1.8"
}
}

View File

@ -1 +0,0 @@
export { default } from '@konobangu/design-system/postcss.config.mjs';

View File

@ -1,34 +0,0 @@
/*
* This file configures the initialization of Sentry on the client.
* The config you add here will be used whenever a users loads a page in their browser.
* https://docs.sentry.io/platforms/javascript/guides/nextjs/
*/
import { init, replayIntegration } from '@sentry/nextjs';
init({
dsn: process.env.NEXT_PUBLIC_SENTRY_DSN,
// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
replaysOnErrorSampleRate: 1,
/*
* This sets the sample rate to be 10%. You may want this to be 100% while
* in development and sample at a lower rate in production
*/
replaysSessionSampleRate: 0.1,
// You can remove this option if you're not planning to use the Sentry Session Replay feature:
integrations: [
replayIntegration({
// Additional Replay configuration goes in here, for example:
maskAllText: true,
blockAllMedia: true,
}),
],
});

View File

@ -1 +0,0 @@
export { config as default } from '@konobangu/tailwind-config/config';

View File

@ -1,17 +0,0 @@
{
"extends": "@konobangu/typescript-config/nextjs.json",
"compilerOptions": {
"baseUrl": ".",
"paths": {
"@/*": ["./*"],
"@konobangu/*": ["../../packages/*"]
}
},
"include": [
"next-env.d.ts",
"next.config.ts",
"**/*.ts",
"**/*.tsx",
".next/types/**/*.ts"
]
}

View File

@ -1,8 +0,0 @@
{
"crons": [
{
"path": "/cron/keep-alive",
"schedule": "0 1 * * *"
}
]
}

View File

@ -1 +0,0 @@
export { default } from '@konobangu/testing';

View File

@ -4,8 +4,5 @@
"scripts": {
"dev": "npx --yes mintlify dev --port 5004",
"lint": "npx --yes mintlify broken-links"
},
"devDependencies": {
"typescript": "^5.7.3"
}
}

View File

@ -10,15 +10,11 @@
"typecheck": "tsc --noEmit --emitDeclarationOnly false"
},
"dependencies": {
"@konobangu/email": "workspace:*",
"@react-email/components": "0.0.31",
"react": "^19.0.0",
"react-email": "3.0.4"
},
"devDependencies": {
"@konobangu/typescript-config": "workspace:*",
"@types/node": "22.10.1",
"@types/react": "19.0.1",
"typescript": "^5.7.3"
"@types/react": "19.0.1"
}
}

View File

@ -1,5 +1,9 @@
{
"extends": "@konobangu/typescript-config/nextjs.json",
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"composite": true,
"jsx": "react-jsx"
},
"include": ["**/*.ts", "**/*.tsx"],
"exclude": ["node_modules"]
}

View File

@ -22,7 +22,6 @@ testcontainers = [
]
[dependencies]
loco-rs = { version = "0.14" }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
@ -37,12 +36,11 @@ sea-orm = { version = "1.1", features = [
"debug-print",
] }
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
axum = "0.8"
uuid = { version = "1.6.0", features = ["v4"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio-rustls"] }
reqwest = { version = "0.12", features = [
reqwest = { version = "0.12", default-features = false, features = [
"charset",
"http2",
"json",
@ -96,11 +94,20 @@ seaography = { version = "1.1" }
quirks_path = "0.1.1"
base64 = "0.22.1"
tower = "0.5.2"
axum-extra = "0.10.0"
tower-http = "0.6.2"
axum-extra = "0.10"
tower-http = { version = "0.6", features = [
"trace",
"catch-panic",
"timeout",
"add-extension",
"cors",
"fs",
"set-header",
"compression-full",
] }
serde_yaml = "0.9.34"
tera = "1.20.0"
openidconnect = "4"
openidconnect = { version = "4", features = ["rustls-tls"] }
http-cache-reqwest = { version = "0.15", features = [
"manager-cacache",
"manager-moka",
@ -117,10 +124,15 @@ nom = "8.0.0"
secrecy = { version = "0.10.3", features = ["serde"] }
http = "1.2.0"
cookie = "0.18.1"
async-stream = "0.3.6"
serde_variant = "0.1.3"
tracing-appender = "0.2.3"
clap = "4.5.31"
futures-util = "0.3.31"
ipnetwork = "0.21.1"
[dev-dependencies]
serial_test = "3"
loco-rs = { version = "0.14", features = ["testing"] }
insta = { version = "1", features = ["redactions", "yaml", "filters"] }
mockito = "1.6.1"
rstest = "0.24.0"

View File

@ -1,145 +0,0 @@
# Loco configuration file documentation
# Application logging configuration
logger:
# Enable or disable logging.
enable: true
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace: true
# Log level, options: trace, debug, info, warn or error.
level: debug
# Define the logging format. options: compact, pretty or Json
format: compact
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
# override_filter: trace
# Web server configuration
server:
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
port: 5001
binding: "0.0.0.0"
# The UI hostname or IP address that mailers will point to.
host: '{{ get_env(name="HOST", default="localhost") }}'
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
middlewares:
# Enable Etag cache header middleware
etag:
enable: true
# Allows to limit the payload size request. payload that bigger than this file will blocked the request.
limit_payload:
# Enable/Disable the middleware.
enable: true
# the limit size. can be b,kb,kib,mb,mib,gb,gib
body_limit: 5mb
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
logger:
# Enable/Disable the middleware.
enable: true
# when your code is panicked, the request still returns 500 status code.
catch_panic:
# Enable/Disable the middleware.
enable: true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
timeout_request:
# Enable/Disable the middleware.
enable: false
# Duration time in milliseconds.
timeout: 5000
cors:
enable: true
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://loco.rs
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
# allow_methods:
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
fallback:
enable: false
# Worker Configuration
workers:
# specifies the worker mode. Options:
# - BackgroundQueue - Workers operate asynchronously in the background, processing queued.
# - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed.
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
mode: BackgroundAsync
# Mailer Configuration.
mailer:
# SMTP mailer configuration.
smtp:
# Enable/Disable smtp mailer.
enable: true
# SMTP server host. e.x localhost, smtp.gmail.com
host: '{{ get_env(name="MAILER_HOST", default="localhost") }}'
# SMTP server port
port: 1025
# Use secure connection (SSL/TLS).
secure: false
# auth:
# user:
# password:
# Database Configuration
database:
# Database connection URI
uri: '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@localhost:5432/konobangu") }}'
# When enabled, the sql query will be logged.
enable_logging: true
# Set the timeout duration when acquiring a connection.
connect_timeout: 500
# Set the idle duration before closing a connection.
idle_timeout: 500
# Minimum number of connections for a pool.
min_connections: 1
# Maximum number of connections for a pool.
max_connections: 1
# Run migration up when application loaded
auto_migrate: true
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_truncate: false
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false
# Redis Configuration
redis:
# Redis connection URI
uri: '{{ get_env(name="REDIS_URL", default="redis://localhost:6379") }}'
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_flush: false
settings:
dal:
data_dir: '{{ get_env(name="DAL_DATA_DIR", default="./data") }}'
mikan:
base_url: "https://mikanani.me/"
http_client:
exponential_backoff_max_retries: 3
leaky_bucket_max_tokens: 2
leaky_bucket_initial_tokens: 0
leaky_bucket_refill_tokens: 1
leaky_bucket_refill_interval: 500
auth:
auth_type: '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
basic_user: '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
basic_password: '{{ get_env(name="BASIC_PASSWORD", default = "konobangu") }}'
oidc_issuer: '{{ get_env(name="OIDC_ISSUER", default = "") }}'
oidc_audience: '{{ get_env(name="OIDC_AUDIENCE", default = "") }}'
oidc_client_id: '{{ get_env(name="OIDC_CLIENT_ID", default = "") }}'
oidc_client_secret: '{{ get_env(name="OIDC_CLIENT_SECRET", default = "") }}'
oidc_extra_scopes: '{{ get_env(name="OIDC_EXTRA_SCOPES", default = "") }}'
oidc_extra_claim_key: '{{ get_env(name="OIDC_EXTRA_CLAIM_KEY", default = "") }}'
oidc_extra_claim_value: '{{ get_env(name="OIDC_EXTRA_CLAIM_VALUE", default = "") }}'
graphql:
depth_limit: null
complexity_limit: null

View File

@ -1,125 +0,0 @@
# Loco configuration file documentation
# Application logging configuration
logger:
# Enable or disable logging.
enable: true
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace: true
# Log level, options: trace, debug, info, warn or error.
level: debug
# Define the logging format. options: compact, pretty or Json
format: compact
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
# override_filter: trace
# Web server configuration
server:
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
port: 5001
# The UI hostname or IP address that mailers will point to.
host: http://webui.konobangu.com
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
middlewares:
# Enable Etag cache header middleware
etag:
enable: true
# Allows to limit the payload size request. payload that bigger than this file will blocked the request.
limit_payload:
# Enable/Disable the middleware.
enable: true
# the limit size. can be b,kb,kib,mb,mib,gb,gib
body_limit: 5mb
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
logger:
# Enable/Disable the middleware.
enable: true
# when your code is panicked, the request still returns 500 status code.
catch_panic:
# Enable/Disable the middleware.
enable: true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
timeout_request:
# Enable/Disable the middleware.
enable: false
# Duration time in milliseconds.
timeout: 5000
cors:
enable: true
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://loco.rs
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
# allow_methods:
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
# Worker Configuration
workers:
# specifies the worker mode. Options:
# - BackgroundQueue - Workers operate asynchronously in the background, processing queued.
# - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed.
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
mode: BackgroundQueue
# Mailer Configuration.
mailer:
# SMTP mailer configuration.
smtp:
# Enable/Disable smtp mailer.
enable: true
# SMTP server host. e.x localhost, smtp.gmail.com
host: '{{ get_env(name="MAILER_HOST", default="localhost") }}'
# SMTP server port
port: 1025
# Use secure connection (SSL/TLS).
secure: false
# auth:
# user:
# password:
# Database Configuration
database:
# Database connection URI
uri: '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@127.0.0.1:5432/konobangu") }}'
# When enabled, the sql query will be logged.
enable_logging: true
# Set the timeout duration when acquiring a connection.
connect_timeout: 500
# Set the idle duration before closing a connection.
idle_timeout: 500
# Minimum number of connections for a pool.
min_connections: 1
# Maximum number of connections for a pool.
max_connections: 1
# Run migration up when application loaded
auto_migrate: true
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_truncate: false
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false
# Redis Configuration
redis:
# Redis connection URI
uri: '{{ get_env(name="REDIS_URL", default="redis://127.0.0.1:6379") }}'
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_flush: false
settings:
dal:
data_dir: ./temp
mikan:
http_client:
exponential_backoff_max_retries: 3
leaky_bucket_max_tokens: 2
leaky_bucket_initial_tokens: 0
leaky_bucket_refill_tokens: 1
leaky_bucket_refill_interval: 500
user_agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0"
base_url: "https://mikanani.me/"

View File

@ -1,65 +1,68 @@
#![allow(unused_imports)]
use color_eyre::eyre::Context;
use itertools::Itertools;
use loco_rs::{
app::Hooks,
boot::{BootResult, StartMode},
environment::Environment,
prelude::*,
};
use recorder::{
app::App,
migrations::Migrator,
models::{
subscribers::SEED_SUBSCRIBER,
subscriptions::{self, SubscriptionCreateFromRssDto},
},
};
use sea_orm_migration::MigratorTrait;
// #![allow(unused_imports)]
// use color_eyre::eyre::Context;
// use itertools::Itertools;
// use loco_rs::{
// app::Hooks,
// boot::{BootResult, StartMode},
// environment::Environment,
// prelude::AppContext as LocoContext,
// };
// use recorder::{
// app::{App1, AppContext},
// errors::RResult,
// migrations::Migrator,
// models::{
// subscribers::SEED_SUBSCRIBER,
// subscriptions::{self, SubscriptionCreateFromRssDto},
// },
// };
// use sea_orm::ColumnTrait;
// use sea_orm_migration::MigratorTrait;
async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> color_eyre::eyre::Result<()> {
let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
// async fn pull_mikan_bangumi_rss(ctx: &AppContext) -> RResult<()> {
// let rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=3416&subgroupid=370";
// let rss_link =
// "https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
let subscription = if let Some(subscription) = subscriptions::Entity::find()
.filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
.one(&ctx.db)
.await?
{
subscription
} else {
subscriptions::Model::add_subscription(
ctx,
subscriptions::SubscriptionCreateDto::Mikan(SubscriptionCreateFromRssDto {
rss_link: rss_link.to_string(),
display_name: String::from("Mikan Project - 我的番组"),
enabled: Some(true),
}),
1,
)
.await?
};
// // let rss_link =
// // "https://mikanani.me/RSS/MyBangumi?token=FE9tccsML2nBPUUqpCuJW2uJZydAXCntHJ7RpD9LDP8%3d";
// let subscription = if let Some(subscription) =
// subscriptions::Entity::find()
// .filter(subscriptions::Column::SourceUrl.eq(String::from(rss_link)))
// .one(&ctx.db)
// .await?
// {
// subscription
// } else {
// subscriptions::Model::add_subscription(
// ctx,
//
// subscriptions::SubscriptionCreateDto::Mikan(SubscriptionCreateFromRssDto {
// rss_link: rss_link.to_string(),
// display_name: String::from("Mikan Project - 我的番组"),
// enabled: Some(true),
// }),
// 1,
// )
// .await?
// };
subscription.pull_subscription(ctx).await?;
// subscription.pull_subscription(ctx).await?;
Ok(())
}
// Ok(())
// }
async fn init() -> color_eyre::eyre::Result<AppContext> {
color_eyre::install()?;
let ctx = loco_rs::cli::playground::<App>().await?;
let BootResult {
app_context: ctx, ..
} = loco_rs::boot::run_app::<App>(&StartMode::ServerOnly, ctx).await?;
Migrator::up(&ctx.db, None).await?;
Ok(ctx)
}
// async fn init() -> RResult<LocoContext> {
// let ctx = loco_rs::cli::playground::<App1>().await?;
// let BootResult {
// app_context: ctx, ..
// } = loco_rs::boot::run_app::<App1>(&StartMode::ServerOnly, ctx).await?;
// Migrator::up(&ctx.db, None).await?;
// Ok(ctx)
// }
#[tokio::main]
async fn main() -> color_eyre::eyre::Result<()> {
let ctx = init().await?;
pull_mikan_bangumi_rss(&ctx).await?;
// #[tokio::main]
// async fn main() -> color_eyre::eyre::Result<()> {
// pull_mikan_bangumi_rss(&ctx).await?;
Ok(())
}
// Ok(())
// }
fn main() {}

View File

@ -8,30 +8,17 @@
"preview": "rsbuild preview"
},
"dependencies": {
"@abraham/reflection": "^0.12.0",
"@graphiql/react": "^0.28.2",
"@graphiql/toolkit": "^0.11.1",
"@konobangu/design-system": "workspace:*",
"@konobangu/tailwind-config": "workspace:*",
"@outposts/injection-js": "^2.5.1",
"@tanstack/react-router": "^1.95.6",
"@tanstack/router-devtools": "^1.95.6",
"graphiql": "^3.8.3",
"graphql-ws": "^5.16.2",
"graphql-ws": "^6.0.4",
"observable-hooks": "^4.2.4",
"oidc-client-rx": "0.1.0-alpha.6",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"rxjs": "^7.8.1"
"react-dom": "^19.0.0"
},
"devDependencies": {
"@konobangu/typescript-config": "workspace:*",
"@rsbuild/core": "1.1.3",
"@rsbuild/plugin-react": "^1.1.1",
"@tanstack/router-plugin": "^1.95.6",
"@types/react": "^19.0.7",
"@types/react-dom": "^19.0.3",
"tailwindcss": "^3.4.17",
"typescript": "^5.7.3"
"@types/react-dom": "^19.0.3"
}
}

View File

@ -0,0 +1,5 @@
export default {
plugins: {
'@tailwindcss/postcss': {},
},
};

View File

@ -0,0 +1,103 @@
# Application logging configuration
[logger]
# Enable or disable logging.
enable = true
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace = true
# Log level, options: trace, debug, info, warn or error.
level = "debug"
# Define the logging format. options: compact, pretty or Json
format = "compact"
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
# override_filter: trace
# Web server configuration
[server]
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
port = 5001
binding = "0.0.0.0"
# The UI hostname or IP address that mailers will point to.
host = '{{ get_env(name="HOST", default="localhost") }}'
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
# Enable Etag cache header middleware
[server.middlewares.etag]
enable = true
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
[server.middleware.request_id]
enable = true
[server.middleware.logger]
enable = true
# when your code is panicked, the request still returns 500 status code.
[server.middleware.catch_panic]
enable = true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
[server.middleware.timeout_request]
enable = false
# Duration time in milliseconds.
timeout = 5000
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://loco.rs
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
# allow_methods:
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
[server.middleware.cors]
enable = true
# Database Configuration
[database]
# Database connection URI
uri = '{{ get_env(name="DATABASE_URL", default="postgres://konobangu:konobangu@localhost:5432/konobangu") }}'
# When enabled, the sql query will be logged.
enable_logging = true
# Set the timeout duration when acquiring a connection.
connect_timeout = 500
# Set the idle duration before closing a connection.
idle_timeout = 500
# Minimum number of connections for a pool.
min_connections = 1
# Maximum number of connections for a pool.
max_connections = 10
# Run migration up when application loaded
auto_migrate = true
[storage]
data_dir = '{{ get_env(name="STORAGE_DATA_DIR", default="./data") }}'
[mikan]
base_url = "https://mikanani.me/"
[mikan.http_client]
exponential_backoff_max_retries = 3
leaky_bucket_max_tokens = 2
leaky_bucket_initial_tokens = 1
leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500
[auth]
auth_type = '{{ get_env(name="AUTH_TYPE", default = "basic") }}'
basic_user = '{{ get_env(name="BASIC_USER", default = "konobangu") }}'
basic_password = '{{ get_env(name="BASIC_PASSWORD", default = "konobangu") }}'
oidc_issuer = '{{ get_env(name="OIDC_ISSUER", default = "") }}'
oidc_audience = '{{ get_env(name="OIDC_AUDIENCE", default = "") }}'
oidc_client_id = '{{ get_env(name="OIDC_CLIENT_ID", default = "") }}'
oidc_client_secret = '{{ get_env(name="OIDC_CLIENT_SECRET", default = "") }}'
oidc_extra_scopes = '{{ get_env(name="OIDC_EXTRA_SCOPES", default = "") }}'
oidc_extra_claim_key = '{{ get_env(name="OIDC_EXTRA_CLAIM_KEY", default = "") }}'
oidc_extra_claim_value = '{{ get_env(name="OIDC_EXTRA_CLAIM_VALUE", default = "") }}'
[graphql]
# depth_limit = inf
# complexity_limit = inf

View File

@ -0,0 +1,139 @@
use std::sync::Arc;
use clap::{Parser, command};
use super::{AppContext, core::App, env::Environment};
use crate::{app::config::AppConfig, errors::RResult};
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
pub struct MainCliArgs {
/// Explicit config file path
#[arg(short, long)]
config_file: Option<String>,
/// Explicit dotenv file path
#[arg(short, long)]
dotenv_file: Option<String>,
/// Explicit working dir
#[arg(short, long)]
working_dir: Option<String>,
/// Explicit environment
#[arg(short, long)]
environment: Option<Environment>,
}
pub struct AppBuilder {
dotenv_file: Option<String>,
config_file: Option<String>,
working_dir: String,
enviornment: Environment,
}
impl AppBuilder {
pub async fn from_main_cli(environment: Option<Environment>) -> RResult<Self> {
let args = MainCliArgs::parse();
let environment = environment.unwrap_or_else(|| {
args.environment.unwrap_or({
if cfg!(test) {
Environment::Testing
} else if cfg!(debug_assertions) {
Environment::Development
} else {
Environment::Production
}
})
});
let mut builder = Self::default();
if let Some(working_dir) = args.working_dir {
builder = builder.working_dir(working_dir);
}
if matches!(
&environment,
Environment::Testing | Environment::Development
) {
builder = builder.working_dir_from_manifest_dir();
}
builder = builder
.config_file(args.config_file)
.dotenv_file(args.dotenv_file)
.environment(environment);
Ok(builder)
}
pub async fn build(self) -> RResult<App> {
AppConfig::load_dotenv(
&self.enviornment,
&self.working_dir,
self.dotenv_file.as_deref(),
)
.await?;
let config = AppConfig::load_config(
&self.enviornment,
&self.working_dir,
self.config_file.as_deref(),
)
.await?;
let app_context = Arc::new(
AppContext::new(self.enviornment.clone(), config, self.working_dir.clone()).await?,
);
Ok(App {
context: app_context,
builder: self,
})
}
pub fn working_dir(self, working_dir: String) -> Self {
let mut ret = self;
ret.working_dir = working_dir;
ret
}
pub fn environment(self, environment: Environment) -> Self {
let mut ret = self;
ret.enviornment = environment;
ret
}
pub fn config_file(self, config_file: Option<String>) -> Self {
let mut ret = self;
ret.config_file = config_file;
ret
}
pub fn dotenv_file(self, dotenv_file: Option<String>) -> Self {
let mut ret = self;
ret.dotenv_file = dotenv_file;
ret
}
pub fn working_dir_from_manifest_dir(self) -> Self {
let manifest_dir = if cfg!(debug_assertions) || cfg!(test) {
env!("CARGO_MANIFEST_DIR")
} else {
"./apps/recorder"
};
self.working_dir(manifest_dir.to_string())
}
}
impl Default for AppBuilder {
fn default() -> Self {
Self {
enviornment: Environment::Production,
dotenv_file: None,
config_file: None,
working_dir: String::from("."),
}
}
}

View File

@ -0,0 +1,18 @@
[storage]
data_dir = "./data"
[mikan]
base_url = "https://mikanani.me/"
[mikan.http_client]
exponential_backoff_max_retries = 3
leaky_bucket_max_tokens = 2
leaky_bucket_initial_tokens = 0
leaky_bucket_refill_tokens = 1
leaky_bucket_refill_interval = 500
[graphql]
depth_limit = inf
complexity_limit = inf
[cache]

View File

@ -0,0 +1,176 @@
use std::{fs, path::Path, str};
use figment::{
Figment, Provider,
providers::{Format, Json, Toml, Yaml},
};
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use super::env::Environment;
use crate::{
auth::AuthConfig, cache::CacheConfig, database::DatabaseConfig, errors::RResult,
extract::mikan::MikanConfig, graphql::GraphQLConfig, logger::LoggerConfig,
storage::StorageConfig, web::WebServerConfig,
};
const DEFAULT_CONFIG_MIXIN: &str = include_str!("./default_mixin.toml");
const CONFIG_ALLOWED_EXTENSIONS: &[&str] = &[".toml", ".json", ".yaml", ".yml"];
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AppConfig {
pub server: WebServerConfig,
pub cache: CacheConfig,
pub auth: AuthConfig,
pub storage: StorageConfig,
pub mikan: MikanConfig,
pub graphql: GraphQLConfig,
pub logger: LoggerConfig,
pub database: DatabaseConfig,
}
impl AppConfig {
pub fn config_prefix() -> String {
format!("{}.config", env!("CARGO_PKG_NAME"))
}
pub fn dotenv_prefix() -> String {
String::from(".env")
}
pub fn allowed_extension() -> Vec<String> {
CONFIG_ALLOWED_EXTENSIONS
.iter()
.map(|s| s.to_string())
.collect_vec()
}
pub fn priority_suffix(environment: &Environment) -> Vec<String> {
vec![
format!(".{}.local", environment.full_name()),
format!(".{}.local", environment.short_name()),
String::from(".local"),
environment.full_name().to_string(),
environment.short_name().to_string(),
String::from(""),
]
}
pub fn default_provider() -> impl Provider {
Toml::string(DEFAULT_CONFIG_MIXIN)
}
pub fn merge_provider_from_file(
fig: Figment,
filepath: impl AsRef<Path>,
ext: &str,
) -> RResult<Figment> {
let content = fs::read_to_string(filepath)?;
let rendered = tera::Tera::one_off(
&content,
&tera::Context::from_value(serde_json::json!({}))?,
false,
)?;
Ok(match ext {
".toml" => fig.merge(Toml::string(&rendered)),
".json" => fig.merge(Json::string(&rendered)),
".yaml" | ".yml" => fig.merge(Yaml::string(&rendered)),
_ => unreachable!("unsupported config extension"),
})
}
pub async fn load_dotenv(
environment: &Environment,
working_dir: &str,
dotenv_file: Option<&str>,
) -> RResult<()> {
let try_dotenv_file_or_dirs = if dotenv_file.is_some() {
vec![dotenv_file]
} else {
vec![Some(working_dir)]
};
let priority_suffix = &AppConfig::priority_suffix(environment);
let dotenv_prefix = AppConfig::dotenv_prefix();
let try_filenames = priority_suffix
.iter()
.map(|ps| format!("{}{}", &dotenv_prefix, ps))
.collect_vec();
for try_dotenv_file_or_dir in try_dotenv_file_or_dirs.into_iter().flatten() {
let try_dotenv_file_or_dir_path = Path::new(try_dotenv_file_or_dir);
if try_dotenv_file_or_dir_path.exists() {
if try_dotenv_file_or_dir_path.is_dir() {
for f in try_filenames.iter() {
let p = try_dotenv_file_or_dir_path.join(f);
if p.exists() && p.is_file() {
dotenv::from_path(p)?;
break;
}
}
} else if try_dotenv_file_or_dir_path.is_file() {
dotenv::from_path(try_dotenv_file_or_dir_path)?;
break;
}
}
}
Ok(())
}
pub async fn load_config(
environment: &Environment,
working_dir: &str,
config_file: Option<&str>,
) -> RResult<AppConfig> {
let try_config_file_or_dirs = if config_file.is_some() {
vec![config_file]
} else {
vec![Some(working_dir)]
};
let allowed_extensions = &AppConfig::allowed_extension();
let priority_suffix = &AppConfig::priority_suffix(environment);
let convention_prefix = &AppConfig::config_prefix();
let try_filenames = priority_suffix
.iter()
.flat_map(|ps| {
allowed_extensions
.iter()
.map(move |ext| (format!("{}{}{}", convention_prefix, ps, ext), ext))
})
.collect_vec();
let mut fig = Figment::from(AppConfig::default_provider());
for try_config_file_or_dir in try_config_file_or_dirs.into_iter().flatten() {
let try_config_file_or_dir_path = Path::new(try_config_file_or_dir);
if try_config_file_or_dir_path.exists() {
if try_config_file_or_dir_path.is_dir() {
for (f, ext) in try_filenames.iter() {
let p = try_config_file_or_dir_path.join(f);
if p.exists() && p.is_file() {
fig = AppConfig::merge_provider_from_file(fig, &p, ext)?;
break;
}
}
} else if let Some(ext) = try_config_file_or_dir_path
.extension()
.and_then(|s| s.to_str())
&& try_config_file_or_dir_path.is_file()
{
fig =
AppConfig::merge_provider_from_file(fig, try_config_file_or_dir_path, ext)?;
break;
}
}
}
let app_config: AppConfig = fig.extract()?;
Ok(app_config)
}
}

View File

@ -0,0 +1,50 @@
use super::{Environment, config::AppConfig};
use crate::{
auth::AuthService, cache::CacheService, database::DatabaseService, errors::RResult,
extract::mikan::MikanClient, graphql::GraphQLService, logger::LoggerService,
storage::StorageService,
};
pub struct AppContext {
pub logger: LoggerService,
pub db: DatabaseService,
pub config: AppConfig,
pub cache: CacheService,
pub mikan: MikanClient,
pub auth: AuthService,
pub graphql: GraphQLService,
pub storage: StorageService,
pub working_dir: String,
pub environment: Environment,
}
impl AppContext {
pub async fn new(
environment: Environment,
config: AppConfig,
working_dir: impl ToString,
) -> RResult<Self> {
let config_cloned = config.clone();
let logger = LoggerService::from_config(config.logger).await?;
let cache = CacheService::from_config(config.cache).await?;
let db = DatabaseService::from_config(config.database).await?;
let storage = StorageService::from_config(config.storage).await?;
let auth = AuthService::from_conf(config.auth).await?;
let mikan = MikanClient::from_config(config.mikan).await?;
let graphql = GraphQLService::from_config_and_database(config.graphql, db.clone()).await?;
Ok(AppContext {
config: config_cloned,
environment,
logger,
auth,
cache,
db,
storage,
mikan,
working_dir: working_dir.to_string(),
graphql,
})
}
}

View File

@ -0,0 +1,89 @@
use std::{net::SocketAddr, sync::Arc};
use axum::Router;
use futures::try_join;
use tokio::signal;
use super::{builder::AppBuilder, context::AppContext};
use crate::{
errors::RResult,
web::{
controller::{self, core::ControllerTrait},
middleware::default_middleware_stack,
},
};
pub struct App {
pub context: Arc<AppContext>,
pub builder: AppBuilder,
}
impl App {
pub fn builder() -> AppBuilder {
AppBuilder::default()
}
pub async fn serve(&self) -> RResult<()> {
let context = &self.context;
let config = &context.config;
let listener = tokio::net::TcpListener::bind(&format!(
"{}:{}",
config.server.binding, config.server.port
))
.await?;
let mut router = Router::<Arc<AppContext>>::new();
let (graphqlc, oidcc) = try_join!(
controller::graphql::create(context.clone()),
controller::oidc::create(context.clone()),
)?;
for c in [graphqlc, oidcc] {
router = c.apply_to(router);
}
let middlewares = default_middleware_stack(context.clone());
for mid in middlewares {
router = mid.apply(router)?;
tracing::info!(name = mid.name(), "+middleware");
}
let router = router
.with_state(context.clone())
.into_make_service_with_connect_info::<SocketAddr>();
axum::serve(listener, router)
.with_graceful_shutdown(async move {
Self::shutdown_signal().await;
tracing::info!("shutting down...");
})
.await?;
Ok(())
}
async fn shutdown_signal() {
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
tokio::select! {
() = ctrl_c => {},
() = terminate => {},
}
}
}

View File

@ -0,0 +1,35 @@
use clap::ValueEnum;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, ValueEnum)]
#[serde(rename_all = "snake_case")]
#[value(rename_all = "snake_case")]
pub enum Environment {
#[serde(alias = "dev")]
#[value(alias = "dev")]
Development,
#[serde(alias = "prod")]
#[value(alias = "prod")]
Production,
#[serde(alias = "test")]
#[value(alias = "test")]
Testing,
}
impl Environment {
pub fn full_name(&self) -> &'static str {
match &self {
Self::Development => "development",
Self::Production => "production",
Self::Testing => "testing",
}
}
pub fn short_name(&self) -> &'static str {
match &self {
Self::Development => "dev",
Self::Production => "prod",
Self::Testing => "test",
}
}
}

View File

@ -1,34 +0,0 @@
use loco_rs::{app::AppContext, environment::Environment};
use crate::{
auth::service::AppAuthService, dal::AppDalClient, extract::mikan::AppMikanClient,
graphql::service::AppGraphQLService,
};
pub trait AppContextExt {
fn get_dal_client(&self) -> &AppDalClient {
AppDalClient::app_instance()
}
fn get_mikan_client(&self) -> &AppMikanClient {
AppMikanClient::app_instance()
}
fn get_auth_service(&self) -> &AppAuthService {
AppAuthService::app_instance()
}
fn get_graphql_service(&self) -> &AppGraphQLService {
AppGraphQLService::app_instance()
}
fn get_node_env(&self) -> Environment {
let node_env = std::env::var("NODE_ENV");
match node_env.as_deref() {
Ok("production") => Environment::Production,
_ => Environment::Development,
}
}
}
impl AppContextExt for AppContext {}

View File

@ -1,208 +1,12 @@
pub mod ext;
pub mod builder;
pub mod config;
pub mod context;
pub mod core;
pub mod env;
use std::{
fs,
path::{self, Path, PathBuf},
sync::Arc,
};
pub use core::App;
use async_trait::async_trait;
pub use ext::AppContextExt;
use itertools::Itertools;
use loco_rs::{
app::{AppContext, Hooks},
boot::{create_app, BootResult, StartMode},
cache,
config::Config,
controller::{middleware, middleware::MiddlewareLayer, AppRoutes},
db::truncate_table,
environment::Environment,
prelude::*,
task::Tasks,
Result,
};
use once_cell::sync::OnceCell;
use crate::{
auth::service::AppAuthServiceInitializer,
controllers::{self},
dal::AppDalInitalizer,
extract::mikan::client::AppMikanClientInitializer,
graphql::service::AppGraphQLServiceInitializer,
migrations::Migrator,
models::subscribers,
workers::subscription_worker::SubscriptionWorker,
};
pub const WORKING_ROOT_VAR_NAME: &str = "WORKING_ROOT";
static APP_WORKING_ROOT: OnceCell<quirks_path::PathBuf> = OnceCell::new();
pub struct App;
impl App {
pub fn set_working_root(path: PathBuf) {
APP_WORKING_ROOT.get_or_init(|| {
quirks_path::PathBuf::from(path.as_os_str().to_string_lossy().to_string())
});
}
pub fn get_working_root() -> &'static quirks_path::Path {
APP_WORKING_ROOT
.get()
.map(|p| p.as_path())
.expect("working root not set")
}
}
#[async_trait]
impl Hooks for App {
fn app_version() -> String {
format!(
"{} ({})",
env!("CARGO_PKG_VERSION"),
option_env!("BUILD_SHA")
.or(option_env!("GITHUB_SHA"))
.unwrap_or("dev")
)
}
fn app_name() -> &'static str {
env!("CARGO_CRATE_NAME")
}
async fn boot(
mode: StartMode,
environment: &Environment,
config: Config,
) -> Result<BootResult> {
create_app::<Self, Migrator>(mode, environment, config).await
}
async fn load_config(env: &Environment) -> Result<Config> {
let working_roots_to_search = [
std::env::var(WORKING_ROOT_VAR_NAME).ok(),
Some(String::from("./apps/recorder")),
Some(String::from(".")),
]
.into_iter()
.flatten()
.collect_vec();
for working_root in working_roots_to_search.iter() {
let working_root = PathBuf::from(working_root);
for env_file in [
working_root.join(format!(".env.{env}.local")),
working_root.join(format!(".env.{env}")),
working_root.join(".env.local"),
working_root.join(".env"),
] {
tracing::info!(env_file =? env_file);
if env_file.exists() && env_file.is_file() {
dotenv::from_path(&env_file).map_err(loco_rs::Error::wrap)?;
tracing::info!("loaded env from {} success.", env_file.to_string_lossy());
}
}
}
for working_root in working_roots_to_search.iter() {
let working_root = PathBuf::from(working_root);
let config_dir = working_root.as_path().join("config");
for config_file in [
config_dir.join(format!("{env}.local.yaml")),
config_dir.join(format!("{env}.yaml")),
] {
if config_file.exists() && config_file.is_file() {
let content = fs::read_to_string(config_file.clone())?;
let rendered = tera::Tera::one_off(
&content,
&tera::Context::from_value(serde_json::json!({}))?,
false,
)?;
App::set_working_root(working_root);
let config_file = &config_file.to_string_lossy();
let res = serde_yaml::from_str(&rendered)
.map_err(|err| loco_rs::Error::YAMLFile(err, config_file.to_string()))?;
tracing::info!("loading config from {} success", config_file);
return Ok(res);
}
}
}
Err(loco_rs::Error::Message(format!(
"no configuration file found in search paths: {}",
working_roots_to_search
.iter()
.flat_map(|p| path::absolute(PathBuf::from(p)))
.map(|p| p.to_string_lossy().to_string())
.join(",")
)))
}
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
let initializers: Vec<Box<dyn Initializer>> = vec![
Box::new(AppDalInitalizer),
Box::new(AppMikanClientInitializer),
Box::new(AppGraphQLServiceInitializer),
Box::new(AppAuthServiceInitializer),
];
Ok(initializers)
}
fn routes(ctx: &AppContext) -> AppRoutes {
let ctx = Arc::new(ctx.clone());
AppRoutes::with_default_routes()
.prefix("/api")
.add_route(controllers::graphql::routes(ctx.clone()))
}
fn middlewares(ctx: &AppContext) -> Vec<Box<dyn MiddlewareLayer>> {
use loco_rs::controller::middleware::static_assets::{FolderConfig, StaticAssets};
let mut middlewares = middleware::default_middleware_stack(ctx);
middlewares.push(Box::new(StaticAssets {
enable: true,
must_exist: true,
folder: FolderConfig {
uri: String::from("/api/static"),
path: App::get_working_root().join("public").into(),
},
fallback: App::get_working_root()
.join("public/assets/404.html")
.into(),
precompressed: false,
}));
middlewares
}
async fn after_context(ctx: AppContext) -> Result<AppContext> {
Ok(AppContext {
cache: cache::Cache::new(cache::drivers::inmem::new()).into(),
..ctx
})
}
async fn connect_workers(ctx: &AppContext, queue: &Queue) -> Result<()> {
queue.register(SubscriptionWorker::build(ctx)).await?;
Ok(())
}
fn register_tasks(_tasks: &mut Tasks) {}
async fn truncate(ctx: &AppContext) -> Result<()> {
truncate_table(&ctx.db, subscribers::Entity).await?;
Ok(())
}
async fn seed(_ctx: &AppContext, _base: &Path) -> Result<()> {
Ok(())
}
}
pub use builder::AppBuilder;
pub use config::AppConfig;
pub use context::AppContext;
pub use env::Environment;

View File

@ -1,15 +1,17 @@
use async_trait::async_trait;
use axum::http::{HeaderValue, request::Parts};
use base64::{self, Engine};
use loco_rs::app::AppContext;
use reqwest::header::AUTHORIZATION;
use super::{
config::BasicAuthConfig,
errors::AuthError,
service::{AuthService, AuthUserInfo},
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{
app::AppContext,
models::{auth::AuthType, subscribers::SEED_SUBSCRIBER},
};
use crate::models::{auth::AuthType, subscribers::SEED_SUBSCRIBER};
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct AuthBasic {
@ -59,7 +61,7 @@ pub struct BasicAuthService {
}
#[async_trait]
impl AuthService for BasicAuthService {
impl AuthServiceTrait for BasicAuthService {
async fn extract_user_info(
&self,
ctx: &AppContext,
@ -75,7 +77,7 @@ impl AuthService for BasicAuthService {
{
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER)
.await
.map_err(AuthError::FindAuthRecordError)?;
.map_err(|_| AuthError::FindAuthRecordError)?;
return Ok(AuthUserInfo {
subscriber_auth,
auth_type: AuthType::Basic,

View File

@ -1,6 +1,6 @@
use jwt_authorizer::OneOrArray;
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, NoneAsEmptyString};
use serde_with::{NoneAsEmptyString, serde_as};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct BasicAuthConfig {
@ -33,7 +33,7 @@ pub struct OidcAuthConfig {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "auth_type", rename_all = "snake_case")]
pub enum AppAuthConfig {
pub enum AuthConfig {
Basic(BasicAuthConfig),
Oidc(OidcAuthConfig),
}

View File

@ -1,9 +1,4 @@
import { InjectionToken } from '@outposts/injection-js';
import {
type EventTypes,
LogLevel,
type OpenIdConfiguration,
} from 'oidc-client-rx';
import { LogLevel, type OpenIdConfiguration } from 'oidc-client-rx';
export const isBasicAuth = process.env.AUTH_TYPE === 'basic';

View File

@ -6,7 +6,6 @@ use axum::{
http::StatusCode,
response::{IntoResponse, Response},
};
use loco_rs::model::ModelError;
use openidconnect::{
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
StandardErrorResponse, core::CoreErrorResponseType,
@ -24,7 +23,7 @@ pub enum AuthError {
current: AuthType,
},
#[error("Failed to find auth record")]
FindAuthRecordError(ModelError),
FindAuthRecordError,
#[error("Invalid credentials")]
BasicInvalidCredentials,
#[error(transparent)]

View File

@ -6,16 +6,15 @@ use axum::{
middleware::Next,
response::{IntoResponse, Response},
};
use loco_rs::prelude::AppContext;
use crate::{app::AppContextExt, auth::AuthService};
use crate::{app::AppContext, auth::AuthServiceTrait};
pub async fn api_auth_middleware(
pub async fn header_www_authenticate_middleware(
State(ctx): State<Arc<AppContext>>,
request: Request,
next: Next,
) -> Response {
let auth_service = ctx.get_auth_service();
let auth_service = &ctx.auth;
let (mut parts, body) = request.into_parts();

View File

@ -5,7 +5,7 @@ pub mod middleware;
pub mod oidc;
pub mod service;
pub use config::{AppAuthConfig, BasicAuthConfig, OidcAuthConfig};
pub use config::{AuthConfig, BasicAuthConfig, OidcAuthConfig};
pub use errors::AuthError;
pub use middleware::api_auth_middleware;
pub use service::{AppAuthService, AuthService, AuthUserInfo};
pub use middleware::header_www_authenticate_middleware;
pub use service::{AuthService, AuthServiceTrait, AuthUserInfo};

View File

@ -7,13 +7,13 @@ use async_trait::async_trait;
use axum::http::{HeaderValue, request::Parts};
use itertools::Itertools;
use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer};
use loco_rs::{app::AppContext, model::ModelError};
use moka::future::Cache;
use openidconnect::{
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse,
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata},
};
use sea_orm::DbErr;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use url::Url;
@ -21,9 +21,9 @@ use url::Url;
use super::{
config::OidcAuthConfig,
errors::AuthError,
service::{AuthService, AuthUserInfo},
service::{AuthServiceTrait, AuthUserInfo},
};
use crate::{fetch::HttpClient, models::auth::AuthType};
use crate::{app::AppContext, errors::RError, fetch::HttpClient, models::auth::AuthType};
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OidcAuthClaims {
@ -258,7 +258,7 @@ impl OidcAuthService {
}
#[async_trait]
impl AuthService for OidcAuthService {
impl AuthServiceTrait for OidcAuthService {
async fn extract_user_info(
&self,
ctx: &AppContext,
@ -306,12 +306,12 @@ impl AuthService for OidcAuthService {
}
}
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(ModelError::EntityNotFound) => {
Err(RError::DbError(DbErr::RecordNotFound(..))) => {
crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await
}
r => r,
}
.map_err(AuthError::FindAuthRecordError)?;
.map_err(|_| AuthError::FindAuthRecordError)?;
Ok(AuthUserInfo {
subscriber_auth,

View File

@ -7,20 +7,17 @@ use axum::{
response::{IntoResponse as _, Response},
};
use jwt_authorizer::{JwtAuthorizer, Validation};
use loco_rs::app::{AppContext, Initializer};
use moka::future::Cache;
use once_cell::sync::OnceCell;
use reqwest::header::HeaderValue;
use super::{
AppAuthConfig,
AuthConfig,
basic::BasicAuthService,
errors::AuthError,
oidc::{OidcAuthClaims, OidcAuthService},
};
use crate::{
app::AppContextExt as _,
config::AppConfigExt,
app::AppContext,
fetch::{
HttpClient, HttpClientConfig,
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
@ -41,7 +38,7 @@ impl FromRequestParts<AppContext> for AuthUserInfo {
parts: &mut Parts,
state: &AppContext,
) -> Result<Self, Self::Rejection> {
let auth_service = state.get_auth_service();
let auth_service = &state.auth;
auth_service
.extract_user_info(state, parts)
@ -51,7 +48,7 @@ impl FromRequestParts<AppContext> for AuthUserInfo {
}
#[async_trait]
pub trait AuthService {
pub trait AuthServiceTrait {
async fn extract_user_info(
&self,
ctx: &AppContext,
@ -61,24 +58,16 @@ pub trait AuthService {
fn auth_type(&self) -> AuthType;
}
pub enum AppAuthService {
pub enum AuthService {
Basic(BasicAuthService),
Oidc(OidcAuthService),
}
static APP_AUTH_SERVICE: OnceCell<AppAuthService> = OnceCell::new();
impl AppAuthService {
pub fn app_instance() -> &'static Self {
APP_AUTH_SERVICE
.get()
.expect("AppAuthService is not initialized")
}
pub async fn from_conf(config: AppAuthConfig) -> Result<Self, AuthError> {
impl AuthService {
pub async fn from_conf(config: AuthConfig) -> Result<Self, AuthError> {
let result = match config {
AppAuthConfig::Basic(config) => AppAuthService::Basic(BasicAuthService { config }),
AppAuthConfig::Oidc(config) => {
AuthConfig::Basic(config) => AuthService::Basic(BasicAuthService { config }),
AuthConfig::Oidc(config) => {
let validation = Validation::new()
.iss(&[&config.issuer])
.aud(&[&config.audience]);
@ -96,7 +85,7 @@ impl AppAuthService {
.build()
.await?;
AppAuthService::Oidc(OidcAuthService {
AuthService::Oidc(OidcAuthService {
config,
api_authorizer,
oidc_provider_client,
@ -112,50 +101,29 @@ impl AppAuthService {
}
#[async_trait]
impl AuthService for AppAuthService {
impl AuthServiceTrait for AuthService {
async fn extract_user_info(
&self,
ctx: &AppContext,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
match self {
AppAuthService::Basic(service) => service.extract_user_info(ctx, request).await,
AppAuthService::Oidc(service) => service.extract_user_info(ctx, request).await,
AuthService::Basic(service) => service.extract_user_info(ctx, request).await,
AuthService::Oidc(service) => service.extract_user_info(ctx, request).await,
}
}
fn www_authenticate_header_value(&self) -> Option<HeaderValue> {
match self {
AppAuthService::Basic(service) => service.www_authenticate_header_value(),
AppAuthService::Oidc(service) => service.www_authenticate_header_value(),
AuthService::Basic(service) => service.www_authenticate_header_value(),
AuthService::Oidc(service) => service.www_authenticate_header_value(),
}
}
fn auth_type(&self) -> AuthType {
match self {
AppAuthService::Basic(service) => service.auth_type(),
AppAuthService::Oidc(service) => service.auth_type(),
AuthService::Basic(service) => service.auth_type(),
AuthService::Oidc(service) => service.auth_type(),
}
}
}
pub struct AppAuthServiceInitializer;
#[async_trait]
impl Initializer for AppAuthServiceInitializer {
fn name(&self) -> String {
String::from("AppAuthServiceInitializer")
}
async fn before_run(&self, ctx: &AppContext) -> Result<(), loco_rs::Error> {
let auth_conf = ctx.config.get_app_conf()?.auth;
let service = AppAuthService::from_conf(auth_conf)
.await
.map_err(loco_rs::Error::wrap)?;
APP_AUTH_SERVICE.get_or_init(|| service);
Ok(())
}
}

View File

@ -1,9 +1,15 @@
use loco_rs::cli;
use recorder::{app::App, migrations::Migrator};
use color_eyre::{self, eyre};
use recorder::app::AppBuilder;
#[tokio::main]
async fn main() -> color_eyre::eyre::Result<()> {
async fn main() -> eyre::Result<()> {
color_eyre::install()?;
cli::main::<App, Migrator>().await?;
let builder = AppBuilder::from_main_cli(None).await?;
let app = builder.build().await?;
app.serve().await?;
Ok(())
}

4
apps/recorder/src/cache/config.rs vendored Normal file
View File

@ -0,0 +1,4 @@
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CacheConfig {}

5
apps/recorder/src/cache/mod.rs vendored Normal file
View File

@ -0,0 +1,5 @@
pub mod config;
pub mod service;
pub use config::CacheConfig;
pub use service::CacheService;

10
apps/recorder/src/cache/service.rs vendored Normal file
View File

@ -0,0 +1,10 @@
use super::CacheConfig;
use crate::errors::RResult;
pub struct CacheService {}
impl CacheService {
pub async fn from_config(_config: CacheConfig) -> RResult<Self> {
Ok(Self {})
}
}

View File

@ -1,74 +0,0 @@
use figment::{
Figment,
providers::{Format, Json, Yaml},
};
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use crate::{
auth::AppAuthConfig, dal::config::AppDalConfig, errors::RecorderError,
extract::mikan::AppMikanConfig, graphql::config::AppGraphQLConfig,
};
const DEFAULT_APP_SETTINGS_MIXIN: &str = include_str!("./settings_mixin.yaml");
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AppConfig {
pub auth: AppAuthConfig,
pub dal: AppDalConfig,
pub mikan: AppMikanConfig,
pub graphql: AppGraphQLConfig,
}
pub fn deserialize_key_path_from_json_value<T: DeserializeOwned>(
value: &serde_json::Value,
key_path: &[&str],
) -> Result<Option<T>, loco_rs::Error> {
let mut stack = vec![("", value)];
for key in key_path {
let current = stack.last().unwrap().1;
if let Some(v) = current.get(key) {
stack.push((key, v));
} else {
return Ok(None);
}
}
let result: T = serde_json::from_value(stack.pop().unwrap().1.clone())?;
Ok(Some(result))
}
pub fn deserialize_key_path_from_app_config<T: DeserializeOwned>(
app_config: &loco_rs::config::Config,
key_path: &[&str],
) -> Result<Option<T>, loco_rs::Error> {
let settings = app_config.settings.as_ref();
if let Some(settings) = settings {
deserialize_key_path_from_json_value(settings, key_path)
} else {
Ok(None)
}
}
pub trait AppConfigExt {
fn get_root_conf(&self) -> &loco_rs::config::Config;
fn get_app_conf(&self) -> Result<AppConfig, RecorderError> {
let settings_str = self
.get_root_conf()
.settings
.as_ref()
.map(serde_json::to_string)
.unwrap_or_else(|| Ok(String::new()))?;
let app_config = Figment::from(Json::string(&settings_str))
.merge(Yaml::string(DEFAULT_APP_SETTINGS_MIXIN))
.extract()?;
Ok(app_config)
}
}
impl AppConfigExt for loco_rs::config::Config {
fn get_root_conf(&self) -> &loco_rs::config::Config {
self
}
}

View File

@ -1,15 +0,0 @@
# dal:
# data_dir: ./data
# mikan:
# http_client:
# exponential_backoff_max_retries: 3
# leaky_bucket_max_tokens: 2
# leaky_bucket_initial_tokens: 0
# leaky_bucket_refill_tokens: 1
# leaky_bucket_refill_interval: 500
# base_url: "https://mikanani.me/"
# graphql:
# depth_limit: null
# complexity_limit: null

View File

@ -1,30 +0,0 @@
use std::sync::Arc;
use async_graphql_axum::{GraphQLRequest, GraphQLResponse};
use axum::{extract::State, middleware::from_fn_with_state, routing::post, Extension};
use loco_rs::{app::AppContext, prelude::Routes};
use crate::{
app::AppContextExt,
auth::{api_auth_middleware, AuthUserInfo},
};
async fn graphql_handler(
State(ctx): State<AppContext>,
Extension(auth_user_info): Extension<AuthUserInfo>,
req: GraphQLRequest,
) -> GraphQLResponse {
let graphql_service = ctx.get_graphql_service();
let mut req = req.into_inner();
req = req.data(auth_user_info);
graphql_service.schema.execute(req).await.into()
}
pub fn routes(ctx: Arc<AppContext>) -> Routes {
Routes::new().prefix("/graphql").add(
"/",
post(graphql_handler).layer(from_fn_with_state(ctx, api_auth_middleware)),
)
}

View File

@ -1,2 +0,0 @@
pub mod graphql;
pub mod oidc;

View File

@ -1,4 +0,0 @@
pub mod client;
pub mod config;
pub use client::{AppDalClient, AppDalInitalizer, DalContentCategory};
pub use config::AppDalConfig;

View File

@ -0,0 +1,14 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct DatabaseConfig {
pub uri: String,
pub enable_logging: bool,
pub min_connections: u32,
pub max_connections: u32,
pub connect_timeout: u64,
pub idle_timeout: u64,
pub acquire_timeout: Option<u64>,
#[serde(default)]
pub auto_migrate: bool,
}

View File

@ -0,0 +1,5 @@
pub mod config;
pub mod service;
pub use config::DatabaseConfig;
pub use service::DatabaseService;

View File

@ -0,0 +1,97 @@
use std::{ops::Deref, time::Duration};
use sea_orm::{
ConnectOptions, ConnectionTrait, Database, DatabaseBackend, DatabaseConnection, DbBackend,
DbErr, ExecResult, QueryResult, Statement,
};
use sea_orm_migration::MigratorTrait;
use super::DatabaseConfig;
use crate::{errors::RResult, migrations::Migrator};
pub struct DatabaseService {
connection: DatabaseConnection,
}
impl DatabaseService {
pub async fn from_config(config: DatabaseConfig) -> RResult<Self> {
let mut opt = ConnectOptions::new(&config.uri);
opt.max_connections(config.max_connections)
.min_connections(config.min_connections)
.connect_timeout(Duration::from_millis(config.connect_timeout))
.idle_timeout(Duration::from_millis(config.idle_timeout))
.sqlx_logging(config.enable_logging);
if let Some(acquire_timeout) = config.acquire_timeout {
opt.acquire_timeout(Duration::from_millis(acquire_timeout));
}
let db = Database::connect(opt).await?;
if db.get_database_backend() == DatabaseBackend::Sqlite {
db.execute(Statement::from_string(
DatabaseBackend::Sqlite,
"
PRAGMA foreign_keys = ON;
PRAGMA journal_mode = WAL;
PRAGMA synchronous = NORMAL;
PRAGMA mmap_size = 134217728;
PRAGMA journal_size_limit = 67108864;
PRAGMA cache_size = 2000;
",
))
.await?;
}
if config.auto_migrate {
Migrator::up(&db, None).await?;
}
Ok(Self { connection: db })
}
}
impl Deref for DatabaseService {
type Target = DatabaseConnection;
fn deref(&self) -> &Self::Target {
&self.connection
}
}
impl AsRef<DatabaseConnection> for DatabaseService {
fn as_ref(&self) -> &DatabaseConnection {
&self.connection
}
}
#[async_trait::async_trait]
impl ConnectionTrait for DatabaseService {
fn get_database_backend(&self) -> DbBackend {
self.deref().get_database_backend()
}
async fn execute(&self, stmt: Statement) -> Result<ExecResult, DbErr> {
self.deref().execute(stmt).await
}
async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> {
self.deref().execute_unprepared(sql).await
}
async fn query_one(&self, stmt: Statement) -> Result<Option<QueryResult>, DbErr> {
self.deref().query_one(stmt).await
}
async fn query_all(&self, stmt: Statement) -> Result<Vec<QueryResult>, DbErr> {
self.deref().query_all(stmt).await
}
fn support_returning(&self) -> bool {
self.deref().support_returning()
}
fn is_mock_connection(&self) -> bool {
self.deref().is_mock_connection()
}
}

View File

@ -1,11 +1,33 @@
use std::{borrow::Cow, error::Error as StdError};
use axum::response::{IntoResponse, Response};
use http::StatusCode;
use thiserror::Error as ThisError;
use crate::fetch::HttpClientError;
use crate::{auth::AuthError, fetch::HttpClientError};
#[derive(ThisError, Debug)]
pub enum RecorderError {
pub enum RError {
#[error(transparent)]
InvalidMethodError(#[from] http::method::InvalidMethod),
#[error(transparent)]
InvalidHeaderNameError(#[from] http::header::InvalidHeaderName),
#[error(transparent)]
TracingAppenderInitError(#[from] tracing_appender::rolling::InitError),
#[error(transparent)]
GraphQLSchemaError(#[from] async_graphql::dynamic::SchemaError),
#[error(transparent)]
AuthError(#[from] AuthError),
#[error(transparent)]
RSSError(#[from] rss::Error),
#[error(transparent)]
DotEnvError(#[from] dotenv::Error),
#[error(transparent)]
TeraError(#[from] tera::Error),
#[error(transparent)]
IOError(#[from] std::io::Error),
#[error(transparent)]
DbError(#[from] sea_orm::DbErr),
#[error(transparent)]
CookieParseError(#[from] cookie::ParseError),
#[error(transparent)]
@ -44,9 +66,15 @@ pub enum RecorderError {
#[source]
source: Option<Box<dyn StdError + Send + Sync>>,
},
#[error("Model Entity {entity} not found")]
ModelEntityNotFound { entity: Cow<'static, str> },
#[error("{0}")]
CustomMessageStr(&'static str),
#[error("{0}")]
CustomMessageString(String),
}
impl RecorderError {
impl RError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
@ -70,10 +98,19 @@ impl RecorderError {
source: Some(source),
}
}
}
impl From<RecorderError> for loco_rs::Error {
fn from(error: RecorderError) -> Self {
Self::wrap(error)
pub fn from_db_record_not_found<T: ToString>(detail: T) -> Self {
Self::DbError(sea_orm::DbErr::RecordNotFound(detail.to_string()))
}
}
impl IntoResponse for RError {
fn into_response(self) -> Response {
match self {
Self::AuthError(auth_error) => auth_error.into_response(),
err => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}
}
}
pub type RResult<T> = Result<T, RError>;

View File

@ -1,21 +1,15 @@
use std::ops::Deref;
use async_trait::async_trait;
use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use reqwest_middleware::ClientWithMiddleware;
use secrecy::{ExposeSecret, SecretString};
use url::Url;
use super::AppMikanConfig;
use super::MikanConfig;
use crate::{
config::AppConfigExt,
errors::RecorderError,
errors::RError,
fetch::{HttpClient, HttpClientTrait, client::HttpClientCookiesAuth},
};
static APP_MIKAN_CLIENT: OnceCell<AppMikanClient> = OnceCell::new();
#[derive(Debug, Default, Clone)]
pub struct MikanAuthSecrecy {
pub cookie: SecretString,
@ -23,19 +17,19 @@ pub struct MikanAuthSecrecy {
}
impl MikanAuthSecrecy {
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RecorderError> {
pub fn into_cookie_auth(self, url: &Url) -> Result<HttpClientCookiesAuth, RError> {
HttpClientCookiesAuth::from_cookies(self.cookie.expose_secret(), url, self.user_agent)
}
}
#[derive(Debug)]
pub struct AppMikanClient {
pub struct MikanClient {
http_client: HttpClient,
base_url: Url,
}
impl AppMikanClient {
pub fn new(config: AppMikanConfig) -> Result<Self, RecorderError> {
impl MikanClient {
pub async fn from_config(config: MikanConfig) -> Result<Self, RError> {
let http_client = HttpClient::from_config(config.http_client)?;
let base_url = config.base_url;
Ok(Self {
@ -44,7 +38,7 @@ impl AppMikanClient {
})
}
pub fn fork_with_auth(&self, secrecy: MikanAuthSecrecy) -> Result<Self, RecorderError> {
pub fn fork_with_auth(&self, secrecy: MikanAuthSecrecy) -> Result<Self, RError> {
let cookie_auth = secrecy.into_cookie_auth(&self.base_url)?;
let fork = self.http_client.fork().attach_secrecy(cookie_auth);
@ -54,12 +48,6 @@ impl AppMikanClient {
})
}
pub fn app_instance() -> &'static AppMikanClient {
APP_MIKAN_CLIENT
.get()
.expect("AppMikanClient is not initialized")
}
pub fn base_url(&self) -> &Url {
&self.base_url
}
@ -69,7 +57,7 @@ impl AppMikanClient {
}
}
impl Deref for AppMikanClient {
impl Deref for MikanClient {
type Target = ClientWithMiddleware;
fn deref(&self) -> &Self::Target {
@ -77,22 +65,4 @@ impl Deref for AppMikanClient {
}
}
impl HttpClientTrait for AppMikanClient {}
pub struct AppMikanClientInitializer;
#[async_trait]
impl Initializer for AppMikanClientInitializer {
fn name(&self) -> String {
"AppMikanClientInitializer".to_string()
}
async fn before_run(&self, app_context: &AppContext) -> loco_rs::Result<()> {
let config = &app_context.config;
let app_mikan_conf = config.get_app_conf()?.mikan;
APP_MIKAN_CLIENT.get_or_try_init(|| AppMikanClient::new(app_mikan_conf))?;
Ok(())
}
}
impl HttpClientTrait for MikanClient {}

View File

@ -4,7 +4,7 @@ use url::Url;
use crate::fetch::HttpClientConfig;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct AppMikanConfig {
pub struct MikanConfig {
pub http_client: HttpClientConfig,
pub base_url: Url,
}

View File

@ -4,8 +4,8 @@ pub mod constants;
pub mod rss_extract;
pub mod web_extract;
pub use client::{AppMikanClient, AppMikanClientInitializer, MikanAuthSecrecy};
pub use config::AppMikanConfig;
pub use client::{MikanAuthSecrecy, MikanClient};
pub use config::MikanConfig;
pub use constants::MIKAN_BUCKET_KEY;
pub use rss_extract::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanBangumiRssLink,

View File

@ -1,7 +1,6 @@
use std::borrow::Cow;
use chrono::DateTime;
use color_eyre::eyre;
use itertools::Itertools;
use reqwest::IntoUrl;
use serde::{Deserialize, Serialize};
@ -9,9 +8,9 @@ use tracing::instrument;
use url::Url;
use crate::{
errors::RecorderError,
errors::{RError, RResult},
extract::mikan::{
AppMikanClient,
MikanClient,
web_extract::{MikanEpisodeHomepage, extract_mikan_episode_id_from_homepage},
},
fetch::bytes::fetch_bytes,
@ -101,28 +100,28 @@ impl MikanRssChannel {
}
impl TryFrom<rss::Item> for MikanRssItem {
type Error = RecorderError;
type Error = RError;
fn try_from(item: rss::Item) -> Result<Self, Self::Error> {
let enclosure = item.enclosure.ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure"))
})?;
let enclosure = item
.enclosure
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("enclosure")))?;
let mime_type = enclosure.mime_type;
if mime_type != BITTORRENT_MIME_TYPE {
return Err(RecorderError::MimeError {
return Err(RError::MimeError {
expected: String::from(BITTORRENT_MIME_TYPE),
found: mime_type.to_string(),
desc: String::from("MikanRssItem"),
});
}
let title = item.title.ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title"))
})?;
let title = item
.title
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("title:title")))?;
let enclosure_url = Url::parse(&enclosure.url).map_err(|inner| {
RecorderError::from_mikan_rss_invalid_field_and_source(
RError::from_mikan_rss_invalid_field_and_source(
Cow::Borrowed("enclosure_url:enclosure.link"),
Box::new(inner),
)
@ -131,14 +130,12 @@ impl TryFrom<rss::Item> for MikanRssItem {
let homepage = item
.link
.and_then(|link| Url::parse(&link).ok())
.ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link"))
})?;
.ok_or_else(|| RError::from_mikan_rss_invalid_field(Cow::Borrowed("homepage:link")))?;
let MikanEpisodeHomepage {
mikan_episode_id, ..
} = extract_mikan_episode_id_from_homepage(&homepage).ok_or_else(|| {
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
RError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
})?;
Ok(MikanRssItem {
@ -171,7 +168,7 @@ pub fn build_mikan_bangumi_rss_link(
mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> eyre::Result<Url> {
) -> RResult<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path("/RSS/Bangumi");
url.query_pairs_mut()
@ -186,7 +183,7 @@ pub fn build_mikan_bangumi_rss_link(
pub fn build_mikan_subscriber_aggregation_rss_link(
mikan_base_url: &str,
mikan_aggregation_id: &str,
) -> eyre::Result<Url> {
) -> RResult<Url> {
let mut url = Url::parse(mikan_base_url)?;
url.set_path("/RSS/MyBangumi");
url.query_pairs_mut()
@ -226,9 +223,9 @@ pub fn extract_mikan_subscriber_aggregation_id_from_rss_link(
#[instrument(skip_all, fields(channel_rss_link = channel_rss_link.as_str()))]
pub async fn extract_mikan_rss_channel_from_rss_link(
http_client: &AppMikanClient,
http_client: &MikanClient,
channel_rss_link: impl IntoUrl,
) -> eyre::Result<MikanRssChannel> {
) -> RResult<MikanRssChannel> {
let bytes = fetch_bytes(http_client, channel_rss_link.as_str()).await?;
let channel = rss::Channel::read_from(&bytes[..])?;
@ -327,11 +324,9 @@ pub async fn extract_mikan_rss_channel_from_rss_link(
},
))
} else {
Err(RecorderError::MikanRssInvalidFormatError)
.inspect_err(|error| {
tracing::warn!(error = %error);
})
.map_err(|error| error.into())
Err(RError::MikanRssInvalidFormatError).inspect_err(|error| {
tracing::warn!(error = %error);
})
}
}
@ -359,7 +354,7 @@ mod tests {
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
{
let bangumi_rss_url =

View File

@ -1,24 +1,25 @@
use std::borrow::Cow;
use async_stream::try_stream;
use bytes::Bytes;
use futures::Stream;
use itertools::Itertools;
use loco_rs::app::AppContext;
use scraper::{Html, Selector};
use tracing::instrument;
use url::Url;
use super::{
AppMikanClient, MIKAN_BUCKET_KEY, MikanBangumiRssLink, extract_mikan_bangumi_id_from_rss_link,
MIKAN_BUCKET_KEY, MikanBangumiRssLink, MikanClient, extract_mikan_bangumi_id_from_rss_link,
};
use crate::{
app::AppContextExt,
dal::DalContentCategory,
errors::RecorderError,
app::AppContext,
errors::{RError, RResult},
extract::{
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str,
},
fetch::{html::fetch_html, image::fetch_image},
storage::StorageContentCategory,
};
#[derive(Clone, Debug, PartialEq)]
@ -110,9 +111,9 @@ pub fn extract_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeH
}
pub async fn extract_mikan_poster_meta_from_src(
http_client: &AppMikanClient,
http_client: &MikanClient,
origin_poster_src_url: Url,
) -> Result<MikanBangumiPosterMeta, RecorderError> {
) -> Result<MikanBangumiPosterMeta, RError> {
let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
@ -125,12 +126,12 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
ctx: &AppContext,
origin_poster_src_url: Url,
subscriber_id: i32,
) -> Result<MikanBangumiPosterMeta, RecorderError> {
let dal_client = ctx.get_dal_client();
let mikan_client = ctx.get_mikan_client();
) -> RResult<MikanBangumiPosterMeta> {
let dal_client = &ctx.storage;
let mikan_client = &ctx.mikan;
if let Some(poster_src) = dal_client
.exists_object(
DalContentCategory::Image,
StorageContentCategory::Image,
subscriber_id,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
@ -148,7 +149,7 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
let poster_str = dal_client
.store_object(
DalContentCategory::Image,
StorageContentCategory::Image,
subscriber_id,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
@ -165,9 +166,9 @@ pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
#[instrument(skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))]
pub async fn extract_mikan_episode_meta_from_episode_homepage(
http_client: &AppMikanClient,
http_client: &MikanClient,
mikan_episode_homepage_url: Url,
) -> Result<MikanEpisodeMeta, RecorderError> {
) -> Result<MikanEpisodeMeta, RError> {
let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?;
@ -183,7 +184,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
.select(bangumi_title_selector)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -198,22 +199,18 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
.and_then(|el| el.value().attr("href"))
.and_then(|s| mikan_episode_homepage_url.join(s).ok())
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")))
.inspect_err(|error| tracing::error!(error = %error))?;
let mikan_fansub_id = mikan_fansub_id
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id"))
})
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id")))
.inspect_err(|error| tracing::error!(error = %error))?;
let episode_title = html
.select(&Selector::parse("title").unwrap())
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -221,9 +218,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
let MikanEpisodeHomepage {
mikan_episode_id, ..
} = extract_mikan_episode_id_from_homepage(&mikan_episode_homepage_url)
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
})
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -235,7 +230,7 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
@ -276,9 +271,9 @@ pub async fn extract_mikan_episode_meta_from_episode_homepage(
#[instrument(skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))]
pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
http_client: &AppMikanClient,
http_client: &MikanClient,
mikan_bangumi_homepage_url: Url,
) -> Result<MikanBangumiMeta, RecorderError> {
) -> Result<MikanBangumiMeta, RError> {
let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?;
let html = Html::parse_document(&content);
@ -292,7 +287,7 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
.select(bangumi_title_selector)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| tracing::warn!(error = %error))?;
let mikan_bangumi_id = html
@ -306,9 +301,7 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
mikan_bangumi_id, ..
}| mikan_bangumi_id,
)
.ok_or_else(|| {
RecorderError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.ok_or_else(|| RError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id")))
.inspect_err(|error| tracing::error!(error = %error))?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
@ -357,134 +350,140 @@ pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
* @logined-required
*/
#[instrument(skip_all, fields(my_bangumi_page_url = my_bangumi_page_url.as_str()))]
pub async fn extract_mikan_bangumis_meta_from_my_bangumi_page(
http_client: &AppMikanClient,
pub fn extract_mikan_bangumis_meta_from_my_bangumi_page(
http_client: &MikanClient,
my_bangumi_page_url: Url,
) -> Result<Vec<MikanBangumiMeta>, RecorderError> {
let mikan_base_url = Url::parse(&my_bangumi_page_url.origin().unicode_serialization())?;
) -> impl Stream<Item = Result<MikanBangumiMeta, RError>> {
try_stream! {
let mikan_base_url = Url::parse(&my_bangumi_page_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, my_bangumi_page_url.clone()).await?;
let content = fetch_html(http_client, my_bangumi_page_url.clone()).await?;
let fansub_container_selector =
&Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap();
let fansub_title_selector = &Selector::parse(".tag-res-name[title]").unwrap();
let fansub_id_selector =
&Selector::parse(".active[data-subtitlegroupid][data-bangumiid]").unwrap();
let bangumi_iters = {
let bangumi_container_selector = &Selector::parse(".sk-bangumi .an-ul>li").unwrap();
let bangumi_info_selector = &Selector::parse(".an-info a.an-text").unwrap();
let bangumi_poster_selector =
&Selector::parse("span[data-src][data-bangumiid], span[data-bangumiid][style]")
.unwrap();
let html = Html::parse_document(&content);
let fansub_container_selector =
&Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap();
let fansub_title_selector = &Selector::parse(".tag-res-name[title]").unwrap();
let fansub_id_selector =
&Selector::parse(".active[data-subtitlegroupid][data-bangumiid]").unwrap();
html.select(bangumi_container_selector)
.filter_map(|bangumi_elem| {
let title_and_href_elem = bangumi_elem.select(bangumi_info_selector).next();
let poster_elem = bangumi_elem.select(bangumi_poster_selector).next();
if let (Some(bangumi_home_page_url), Some(bangumi_title)) = (
title_and_href_elem.and_then(|elem| elem.attr("href")),
title_and_href_elem.and_then(|elem| elem.attr("title")),
) {
let origin_poster_src = poster_elem.and_then(|ele| {
ele.attr("data-src")
.and_then(|data_src| {
extract_image_src_from_str(data_src, &mikan_base_url)
})
.or_else(|| {
ele.attr("style").and_then(|style| {
extract_background_image_src_from_style_attr(
style,
&mikan_base_url,
)
let bangumi_iters = {
let html = Html::parse_document(&content);
html.select(bangumi_container_selector)
.filter_map(|bangumi_elem| {
let title_and_href_elem = bangumi_elem.select(bangumi_info_selector).next();
let poster_elem = bangumi_elem.select(bangumi_poster_selector).next();
if let (Some(bangumi_home_page_url), Some(bangumi_title)) = (
title_and_href_elem.and_then(|elem| elem.attr("href")),
title_and_href_elem.and_then(|elem| elem.attr("title")),
) {
let origin_poster_src = poster_elem.and_then(|ele| {
ele.attr("data-src")
.and_then(|data_src| {
extract_image_src_from_str(data_src, &mikan_base_url)
})
})
});
let bangumi_title = bangumi_title.to_string();
let bangumi_home_page_url =
my_bangumi_page_url.join(bangumi_home_page_url).ok()?;
let MikanBangumiHomepage {
mikan_bangumi_id, ..
} = extract_mikan_bangumi_id_from_homepage(&bangumi_home_page_url)?;
if let Some(origin_poster_src) = origin_poster_src.as_ref() {
tracing::trace!(
origin_poster_src = origin_poster_src.as_str(),
.or_else(|| {
ele.attr("style").and_then(|style| {
extract_background_image_src_from_style_attr(
style,
&mikan_base_url,
)
})
})
});
let bangumi_title = bangumi_title.to_string();
let bangumi_home_page_url =
my_bangumi_page_url.join(bangumi_home_page_url).ok()?;
let MikanBangumiHomepage {
mikan_bangumi_id, ..
} = extract_mikan_bangumi_id_from_homepage(&bangumi_home_page_url)?;
if let Some(origin_poster_src) = origin_poster_src.as_ref() {
tracing::trace!(
origin_poster_src = origin_poster_src.as_str(),
bangumi_title,
mikan_bangumi_id,
"bangumi info extracted"
);
} else {
tracing::warn!(
bangumi_title,
mikan_bangumi_id,
"bangumi info extracted, but failed to extract poster_src"
);
}
let bangumi_expand_info_url = build_mikan_bangumi_expand_info_url(
mikan_base_url.clone(),
&mikan_bangumi_id,
);
Some((
bangumi_title,
mikan_bangumi_id,
"bangumi info extracted"
);
bangumi_expand_info_url,
origin_poster_src,
))
} else {
tracing::warn!(
bangumi_title,
mikan_bangumi_id,
"bangumi info extracted, but failed to extract poster_src"
);
None
}
let bangumi_expand_info_url = build_mikan_bangumi_expand_info_url(
mikan_base_url.clone(),
&mikan_bangumi_id,
);
Some((
bangumi_title,
mikan_bangumi_id,
bangumi_expand_info_url,
origin_poster_src,
))
} else {
None
}
})
.collect_vec()
};
})
.collect_vec()
};
let mut bangumi_list = vec![];
for (bangumi_title, mikan_bangumi_id, bangumi_expand_info_url, origin_poster_src) in
for (bangumi_title, mikan_bangumi_id, bangumi_expand_info_url, origin_poster_src) in
bangumi_iters
{
let bangumi_expand_info_content = fetch_html(http_client, bangumi_expand_info_url).await?;
let bangumi_expand_info_fragment = Html::parse_fragment(&bangumi_expand_info_content);
for fansub_info in bangumi_expand_info_fragment.select(fansub_container_selector) {
if let (Some(fansub_name), Some(mikan_fansub_id)) = (
fansub_info
.select(fansub_title_selector)
.next()
.and_then(|ele| ele.attr("title")),
fansub_info
.select(fansub_id_selector)
.next()
.and_then(|ele| ele.attr("data-subtitlegroupid")),
) {
{
if let Some((fansub_name, mikan_fansub_id)) = {
let bangumi_expand_info_content = fetch_html(http_client, bangumi_expand_info_url).await?;
let bangumi_expand_info_fragment = Html::parse_fragment(&bangumi_expand_info_content);
bangumi_expand_info_fragment.select(fansub_container_selector).next().and_then(|fansub_info| {
if let (Some(fansub_name), Some(mikan_fansub_id)) = (
fansub_info
.select(fansub_title_selector)
.next()
.and_then(|ele| ele.attr("title"))
.map(String::from),
fansub_info
.select(fansub_id_selector)
.next()
.and_then(|ele| ele.attr("data-subtitlegroupid"))
.map(String::from)
) {
Some((fansub_name, mikan_fansub_id))
} else {
None
}
})
} {
tracing::trace!(
fansub_name = &fansub_name,
fansub_name,
mikan_fansub_id,
"subscribed fansub extracted"
);
bangumi_list.push(MikanBangumiMeta {
yield MikanBangumiMeta {
homepage: build_mikan_bangumi_homepage(
mikan_base_url.clone(),
mikan_bangumi_id.as_str(),
Some(mikan_fansub_id),
&mikan_bangumi_id,
Some(&mikan_fansub_id),
),
bangumi_title: bangumi_title.to_string(),
mikan_bangumi_id: mikan_bangumi_id.to_string(),
mikan_fansub_id: Some(mikan_fansub_id.to_string()),
fansub: Some(fansub_name.to_string()),
mikan_fansub_id: Some(mikan_fansub_id),
fansub: Some(fansub_name),
origin_poster_src: origin_poster_src.clone(),
})
};
}
}
}
Ok(bangumi_list)
}
#[cfg(test)]
mod test {
#![allow(unused_variables)]
use color_eyre::eyre;
use futures::{TryStreamExt, pin_mut};
use http::header;
use rstest::{fixture, rstest};
use secrecy::SecretString;
@ -510,7 +509,7 @@ mod test {
async fn test_extract_mikan_poster_from_src(before_each: ()) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;
@ -541,7 +540,7 @@ mod test {
async fn test_extract_mikan_episode(before_each: ()) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
let episode_homepage_url =
mikan_base_url.join("/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a")?;
@ -583,7 +582,7 @@ mod test {
) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
let bangumi_homepage_url = mikan_base_url.join("/Home/Bangumi/3416#370")?;
@ -626,7 +625,7 @@ mod test {
let my_bangumi_page_url = mikan_base_url.join("/Home/MyBangumi")?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone()).await?;
{
let my_bangumi_without_cookie_mock = mikan_server
@ -639,8 +638,11 @@ mod test {
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
&mikan_client,
my_bangumi_page_url.clone(),
)
.await?;
);
pin_mut!(bangumi_metas);
let bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
assert!(bangumi_metas.is_empty());
@ -679,11 +681,13 @@ mod test {
Gecko) Chrome/133.0.0.0 Safari/537.36 Edg/133.0.0.0",
)),
})?;
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
&mikan_client_with_cookie,
my_bangumi_page_url,
)
.await?;
);
pin_mut!(bangumi_metas);
let bangumi_metas = bangumi_metas.try_collect::<Vec<_>>().await?;
assert!(!bangumi_metas.is_empty());

View File

@ -2,12 +2,12 @@ use bytes::Bytes;
use reqwest::IntoUrl;
use super::client::HttpClientTrait;
use crate::errors::RecorderError;
use crate::errors::RError;
pub async fn fetch_bytes<T: IntoUrl, H: HttpClientTrait>(
client: &H,
url: T,
) -> Result<Bytes, RecorderError> {
) -> Result<Bytes, RError> {
let bytes = client
.get(url)
.send()

View File

@ -3,7 +3,7 @@ use std::{fmt::Debug, ops::Deref, sync::Arc, time::Duration};
use async_trait::async_trait;
use axum::http::{self, Extensions};
use http_cache_reqwest::{
CACacheManager, Cache, CacheManager, CacheMode, HttpCache, HttpCacheOptions, MokaManager,
Cache, CacheManager, CacheMode, HttpCache, HttpCacheOptions, MokaManager,
};
use leaky_bucket::RateLimiter;
use reqwest::{ClientBuilder, Request, Response};
@ -17,7 +17,7 @@ use serde_with::serde_as;
use thiserror::Error;
use super::HttpClientSecrecyDataTrait;
use crate::{app::App, fetch::get_random_mobile_ua};
use crate::fetch::get_random_mobile_ua;
pub struct RateLimiterMiddleware {
rate_limiter: RateLimiter,
@ -40,7 +40,6 @@ impl Middleware for RateLimiterMiddleware {
#[serde(rename_all = "snake_case", tag = "type")]
pub enum HttpClientCacheBackendConfig {
Moka { cache_size: u64 },
CACache { cache_path: String },
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
@ -238,12 +237,6 @@ impl HttpClient {
.cache_backend
.as_ref()
.map(|b| match b {
HttpClientCacheBackendConfig::CACache { cache_path } => {
let path = std::path::PathBuf::from(
App::get_working_root().join(cache_path).as_str(),
);
CacheBackend::new(CACacheManager { path })
}
HttpClientCacheBackendConfig::Moka { cache_size } => {
CacheBackend::new(MokaManager {
cache: Arc::new(moka::future::Cache::new(*cache_size)),

View File

@ -5,7 +5,7 @@ use reqwest::{ClientBuilder, cookie::Jar};
use secrecy::zeroize::Zeroize;
use url::Url;
use crate::errors::RecorderError;
use crate::errors::RError;
pub trait HttpClientSecrecyDataTrait: Zeroize {
fn attach_secrecy_to_client(&self, client_builder: ClientBuilder) -> ClientBuilder {
@ -24,7 +24,7 @@ impl HttpClientCookiesAuth {
cookies: &str,
url: &Url,
user_agent: Option<String>,
) -> Result<Self, RecorderError> {
) -> Result<Self, RError> {
let cookie_jar = Arc::new(Jar::default());
for cookie in Cookie::split_parse(cookies).try_collect::<Vec<_>>()? {
cookie_jar.add_cookie_str(&cookie.to_string(), url);

View File

@ -1,12 +1,12 @@
use reqwest::IntoUrl;
use super::client::HttpClientTrait;
use crate::errors::RecorderError;
use crate::errors::RError;
pub async fn fetch_html<T: IntoUrl, H: HttpClientTrait>(
client: &H,
url: T,
) -> Result<String, RecorderError> {
) -> Result<String, RError> {
let content = client
.get(url)
.send()

View File

@ -2,11 +2,11 @@ use bytes::Bytes;
use reqwest::IntoUrl;
use super::{bytes::fetch_bytes, client::HttpClientTrait};
use crate::errors::RecorderError;
use crate::errors::RError;
pub async fn fetch_image<T: IntoUrl, H: HttpClientTrait>(
client: &H,
url: T,
) -> Result<Bytes, RecorderError> {
) -> Result<Bytes, RError> {
fetch_bytes(client, url).await
}

View File

@ -1,7 +1,75 @@
use serde::{Deserialize, Serialize};
use core::f64;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct AppGraphQLConfig {
pub depth_limit: Option<usize>,
pub complexity_limit: Option<usize>,
use serde::{
Deserialize, Deserializer, Serialize, Serializer,
de::{self, Unexpected},
};
#[derive(Debug, Clone, Copy)]
pub struct OnlyInfOrNaN(f64);
impl OnlyInfOrNaN {
pub fn inf() -> Self {
OnlyInfOrNaN(f64::INFINITY)
}
pub fn nan() -> Self {
OnlyInfOrNaN(f64::NAN)
}
}
impl From<OnlyInfOrNaN> for Option<usize> {
fn from(_: OnlyInfOrNaN) -> Self {
None
}
}
impl Serialize for OnlyInfOrNaN {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_f64(self.0)
}
}
impl<'de> Deserialize<'de> for OnlyInfOrNaN {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let value = f64::deserialize(deserializer)?;
if value.is_nan() {
Ok(Self::nan())
} else if value.is_infinite() {
Ok(Self::inf())
} else {
Err(de::Error::invalid_value(
Unexpected::Float(value),
&"a NaN or a Inf",
))
}
}
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GraphQLLimitNum {
Num(usize),
Adhoc(OnlyInfOrNaN),
}
impl From<GraphQLLimitNum> for Option<usize> {
fn from(value: GraphQLLimitNum) -> Self {
match value {
GraphQLLimitNum::Adhoc(v) => v.into(),
GraphQLLimitNum::Num(v) => Some(v),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GraphQLConfig {
pub depth_limit: Option<GraphQLLimitNum>,
pub complexity_limit: Option<GraphQLLimitNum>,
}

View File

@ -1,8 +1,10 @@
pub mod config;
pub mod filter;
pub mod guard;
pub mod schema_root;
pub mod service;
pub mod util;
pub mod filter;
pub use config::GraphQLConfig;
pub use schema_root::schema;
pub use service::GraphQLService;

View File

@ -1,51 +1,24 @@
use async_graphql::dynamic::{Schema, SchemaError};
use async_trait::async_trait;
use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use async_graphql::dynamic::Schema;
use sea_orm::DatabaseConnection;
use super::{config::AppGraphQLConfig, schema_root};
use crate::config::AppConfigExt;
static APP_GRAPHQL_SERVICE: OnceCell<AppGraphQLService> = OnceCell::new();
use super::{config::GraphQLConfig, schema_root};
use crate::errors::RResult;
#[derive(Debug)]
pub struct AppGraphQLService {
pub struct GraphQLService {
pub schema: Schema,
}
impl AppGraphQLService {
pub fn new(config: AppGraphQLConfig, db: DatabaseConnection) -> Result<Self, SchemaError> {
let schema = schema_root::schema(db, config.depth_limit, config.complexity_limit)?;
impl GraphQLService {
pub async fn from_config_and_database(
config: GraphQLConfig,
db: DatabaseConnection,
) -> RResult<Self> {
let schema = schema_root::schema(
db,
config.depth_limit.and_then(|l| l.into()),
config.complexity_limit.and_then(|l| l.into()),
)?;
Ok(Self { schema })
}
pub fn app_instance() -> &'static Self {
APP_GRAPHQL_SERVICE
.get()
.expect("AppGraphQLService is not initialized")
}
}
#[derive(Debug, Clone)]
pub struct AppGraphQLServiceInitializer;
#[async_trait]
impl Initializer for AppGraphQLServiceInitializer {
fn name(&self) -> String {
String::from("AppGraphQLServiceInitializer")
}
async fn before_run(&self, app_context: &AppContext) -> loco_rs::Result<()> {
APP_GRAPHQL_SERVICE.get_or_try_init(|| {
let config = app_context
.config
.get_app_conf()
.map_err(loco_rs::Error::wrap)?
.graphql;
let db = &app_context.db;
AppGraphQLService::new(config, db.clone()).map_err(loco_rs::Error::wrap)
})?;
Ok(())
}
}

View File

@ -3,23 +3,26 @@
assert_matches,
unboxed_closures,
impl_trait_in_bindings,
iterator_try_collect
iterator_try_collect,
async_fn_traits,
let_chains
)]
pub mod app;
pub mod auth;
pub mod config;
pub mod controllers;
pub mod dal;
pub mod cache;
pub mod database;
pub mod errors;
pub mod extract;
pub mod fetch;
pub mod graphql;
pub mod logger;
pub mod migrations;
pub mod models;
pub mod storage;
pub mod sync;
pub mod tasks;
#[cfg(test)]
pub mod test_utils;
pub mod views;
pub mod workers;
pub mod utils;
pub mod web;

View File

@ -0,0 +1,38 @@
use serde::{Deserialize, Serialize};
use super::{
LogRotation,
core::{LogFormat, LogLevel},
};
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
pub struct LoggerConfig {
pub enable: bool,
#[serde(default)]
pub pretty_backtrace: bool,
pub level: LogLevel,
pub format: LogFormat,
pub filter: Option<String>,
pub override_filter: Option<String>,
pub file_appender: Option<LoggerFileAppender>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
pub struct LoggerFileAppender {
pub enable: bool,
#[serde(default)]
pub non_blocking: bool,
pub level: LogLevel,
pub format: LogFormat,
pub rotation: LogRotation,
pub dir: Option<String>,
pub filename_prefix: Option<String>,
pub filename_suffix: Option<String>,
pub max_log_files: usize,
}

View File

@ -0,0 +1,49 @@
use serde::{Deserialize, Serialize};
use serde_variant::to_variant_name;
#[derive(Debug, Default, Clone, Deserialize, Serialize)]
pub enum LogLevel {
#[serde(rename = "off")]
Off,
#[serde(rename = "trace")]
Trace,
#[serde(rename = "debug")]
Debug,
#[serde(rename = "info")]
#[default]
Info,
#[serde(rename = "warn")]
Warn,
#[serde(rename = "error")]
Error,
}
impl std::fmt::Display for LogLevel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
to_variant_name(self).expect("only enum supported").fmt(f)
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize)]
pub enum LogFormat {
#[serde(rename = "compact")]
#[default]
Compact,
#[serde(rename = "pretty")]
Pretty,
#[serde(rename = "json")]
Json,
}
#[derive(Debug, Default, Clone, Deserialize, Serialize)]
pub enum LogRotation {
#[serde(rename = "minutely")]
Minutely,
#[serde(rename = "hourly")]
#[default]
Hourly,
#[serde(rename = "daily")]
Daily,
#[serde(rename = "never")]
Never,
}

View File

@ -0,0 +1,8 @@
pub mod config;
pub mod core;
pub mod service;
pub use core::{LogFormat, LogLevel, LogRotation};
pub use config::{LoggerConfig, LoggerFileAppender};
pub use service::LoggerService;

Some files were not shown because too many files have changed in this diff Show More