Compare commits

...

5 Commits

54 changed files with 10357 additions and 778 deletions

92
Cargo.lock generated
View File

@ -225,8 +225,9 @@ dependencies = [
[[package]]
name = "async-graphql"
version = "7.0.13"
source = "git+https://github.com/aumetra/async-graphql.git?rev=690ece7#690ece7cd408e28bfaf0c434fdd4c46ef1a78ef2"
version = "7.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfff2b17d272a5e3e201feda444e2c24b011fa722951268d1bd8b9b5bc6dc449"
dependencies = [
"async-graphql-derive",
"async-graphql-parser",
@ -261,8 +262,9 @@ dependencies = [
[[package]]
name = "async-graphql-axum"
version = "7.0.13"
source = "git+https://github.com/aumetra/async-graphql.git?rev=690ece7#690ece7cd408e28bfaf0c434fdd4c46ef1a78ef2"
version = "7.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bf2882c816094fef6e39d381b8e9b710e5943e7bdef5198496441d5083164fa"
dependencies = [
"async-graphql",
"axum",
@ -277,8 +279,9 @@ dependencies = [
[[package]]
name = "async-graphql-derive"
version = "7.0.13"
source = "git+https://github.com/aumetra/async-graphql.git?rev=690ece7#690ece7cd408e28bfaf0c434fdd4c46ef1a78ef2"
version = "7.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8e5d0c6697def2f79ccbd972fb106b633173a6066e430b480e1ff9376a7561a"
dependencies = [
"Inflector",
"async-graphql-parser",
@ -293,8 +296,9 @@ dependencies = [
[[package]]
name = "async-graphql-parser"
version = "7.0.13"
source = "git+https://github.com/aumetra/async-graphql.git?rev=690ece7#690ece7cd408e28bfaf0c434fdd4c46ef1a78ef2"
version = "7.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8531ee6d292c26df31c18c565ff22371e7bdfffe7f5e62b69537db0b8fd554dc"
dependencies = [
"async-graphql-value",
"pest",
@ -304,8 +308,9 @@ dependencies = [
[[package]]
name = "async-graphql-value"
version = "7.0.13"
source = "git+https://github.com/aumetra/async-graphql.git?rev=690ece7#690ece7cd408e28bfaf0c434fdd4c46ef1a78ef2"
version = "7.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "741110dda927420a28fbc1c310543d3416f789a6ba96859c2c265843a0a96887"
dependencies = [
"bytes",
"indexmap 2.7.1",
@ -3829,6 +3834,30 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "mockito"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "652cd6d169a36eaf9d1e6bce1a221130439a966d7f27858af66a33a66e9c4ee2"
dependencies = [
"assert-json-diff",
"bytes",
"colored",
"futures-util",
"http",
"http-body",
"http-body-util",
"hyper",
"hyper-util",
"log",
"rand 0.8.5",
"regex",
"serde_json",
"serde_urlencoded",
"similar",
"tokio",
]
[[package]]
name = "mod_use"
version = "0.2.3"
@ -5007,6 +5036,7 @@ dependencies = [
"loco-rs",
"log",
"maplit",
"mockito",
"moka",
"nom 8.0.0",
"once_cell",
@ -5020,6 +5050,7 @@ dependencies = [
"reqwest-retry",
"reqwest-tracing",
"rss",
"rstest",
"scraper 0.22.0",
"sea-orm",
"sea-orm-migration",
@ -5157,6 +5188,12 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "relative-path"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2"
[[package]]
name = "rend"
version = "0.4.2"
@ -5406,6 +5443,36 @@ dependencies = [
"quick-xml 0.37.2",
]
[[package]]
name = "rstest"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03e905296805ab93e13c1ec3a03f4b6c4f35e9498a3d5fa96dc626d22c03cd89"
dependencies = [
"futures-timer",
"futures-util",
"rstest_macros",
"rustc_version",
]
[[package]]
name = "rstest_macros"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef0053bbffce09062bee4bcc499b0fbe7a57b879f1efe088d6d8d4c7adcdef9b"
dependencies = [
"cfg-if",
"glob",
"proc-macro-crate",
"proc-macro2",
"quote",
"regex",
"relative-path",
"rustc_version",
"syn 2.0.98",
"unicode-ident",
]
[[package]]
name = "rust-multipart-rfc7578_2"
version = "0.7.0"
@ -6801,8 +6868,9 @@ dependencies = [
[[package]]
name = "testcontainers"
version = "0.23.1"
source = "git+https://github.com/testcontainers/testcontainers-rs.git?rev=af21727#af2172714bbb79c6ce648b699135922f85cafc0c"
version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59a4f01f39bb10fc2a5ab23eb0d888b1e2bb168c157f61a1b98e6c501c639c74"
dependencies = [
"async-trait",
"bollard",

View File

@ -3,12 +3,9 @@ members = ["apps/recorder"]
resolver = "2"
[patch.crates-io]
testcontainers = { git = "https://github.com/testcontainers/testcontainers-rs.git", rev = "af21727" }
# loco-rs = { git = "https://github.com/lonelyhentxi/loco.git", rev = "beb890e" }
# loco-rs = { git = "https://github.com/loco-rs/loco.git" }
# loco-rs = { path = "./patches/loco" }
async-graphql = { git = "https://github.com/aumetra/async-graphql.git", rev = "690ece7" }
async-graphql-axum = { git = "https://github.com/aumetra/async-graphql.git", rev = "690ece7" }
jwt-authorizer = { git = "https://github.com/blablacio/jwt-authorizer.git", rev = "e956774" }
# [patch."https://github.com/lonelyhentxi/qbit.git"]

View File

@ -29,7 +29,7 @@ tokio = { version = "1.42", features = ["macros", "fs", "rt-multi-thread"] }
async-trait = "0.1.83"
tracing = "0.1"
chrono = "0.4"
sea-orm = { version = "1", features = [
sea-orm = { version = "1.1", features = [
"sqlx-sqlite",
"sqlx-postgres",
"runtime-tokio-rustls",
@ -41,7 +41,7 @@ figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
axum = "0.8"
uuid = { version = "1.6.0", features = ["v4"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
sea-orm-migration = { version = "1", features = ["runtime-tokio-rustls"] }
sea-orm-migration = { version = "1.1", features = ["runtime-tokio-rustls"] }
reqwest = { version = "0.12", features = [
"charset",
"http2",
@ -76,7 +76,7 @@ qbit-rs = { git = "https://github.com/lonelyhentxi/qbit.git", rev = "72d53138ebe
"default",
"builder",
] }
testcontainers = { version = "0.23.1", features = [
testcontainers = { version = "0.23.3", features = [
"default",
"properties-config",
"watchdog",
@ -88,10 +88,10 @@ color-eyre = "0.6"
log = "0.4.22"
anyhow = "1.0.95"
bollard = { version = "0.18", optional = true }
async-graphql = { version = "7.0.13", features = [] }
async-graphql-axum = "7.0.13"
async-graphql = { version = "7.0.15", features = [] }
async-graphql-axum = "7.0.15"
fastrand = "2.3.0"
seaography = "1.1.2"
seaography = { version = "1.1" }
quirks_path = "0.1.1"
base64 = "0.22.1"
tower = "0.5.2"
@ -99,7 +99,7 @@ axum-extra = "0.10.0"
tower-http = "0.6.2"
serde_yaml = "0.9.34"
tera = "1.20.0"
openidconnect = "4.0.0-rc.1"
openidconnect = "4"
http-cache-reqwest = { version = "0.15", features = [
"manager-cacache",
"manager-moka",
@ -118,3 +118,5 @@ nom = "8.0.0"
serial_test = "3"
loco-rs = { version = "0.14", features = ["testing"] }
insta = { version = "1", features = ["redactions", "yaml", "filters"] }
mockito = "1.6.1"
rstest = "0.24.0"

View File

@ -1,6 +1,7 @@
use async_trait::async_trait;
use axum::http::{request::Parts, HeaderValue};
use axum::http::{HeaderValue, request::Parts};
use base64::{self, Engine};
use loco_rs::app::AppContext;
use reqwest::header::AUTHORIZATION;
use super::{
@ -59,7 +60,11 @@ pub struct BasicAuthService {
#[async_trait]
impl AuthService for BasicAuthService {
async fn extract_user_info(&self, request: &mut Parts) -> Result<AuthUserInfo, AuthError> {
async fn extract_user_info(
&self,
ctx: &AppContext,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
if let Ok(AuthBasic {
user: found_user,
password: found_password,
@ -68,8 +73,11 @@ impl AuthService for BasicAuthService {
if self.config.user == found_user
&& self.config.password == found_password.unwrap_or_default()
{
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER)
.await
.map_err(AuthError::FindAuthRecordError)?;
return Ok(AuthUserInfo {
user_pid: SEED_SUBSCRIBER.to_string(),
subscriber_auth,
auth_type: AuthType::Basic,
});
}

View File

@ -1,11 +1,15 @@
use std::fmt;
use async_graphql::dynamic::ResolverContext;
use axum::{
Json,
http::StatusCode,
response::{IntoResponse, Response},
Json,
};
use loco_rs::model::ModelError;
use openidconnect::{
core::CoreErrorResponseType, ConfigurationError, RequestTokenError, SignatureVerificationError,
SigningError, StandardErrorResponse,
ConfigurationError, RequestTokenError, SignatureVerificationError, SigningError,
StandardErrorResponse, core::CoreErrorResponseType,
};
use serde::{Deserialize, Serialize};
use thiserror::Error;
@ -19,6 +23,8 @@ pub enum AuthError {
supported: Vec<AuthType>,
current: AuthType,
},
#[error("Failed to find auth record")]
FindAuthRecordError(ModelError),
#[error("Invalid credentials")]
BasicInvalidCredentials,
#[error(transparent)]
@ -69,6 +75,49 @@ pub enum AuthError {
OidcAudMissingError(String),
#[error("Subject missing")]
OidcSubMissingError,
#[error(fmt = display_graphql_permission_error)]
GraphQLPermissionError {
inner_error: async_graphql::Error,
field: String,
column: String,
context_path: String,
},
}
impl AuthError {
pub fn from_graphql_subscribe_id_guard(
inner_error: async_graphql::Error,
context: &ResolverContext,
field_name: &str,
column_name: &str,
) -> AuthError {
AuthError::GraphQLPermissionError {
inner_error,
field: field_name.to_string(),
column: column_name.to_string(),
context_path: context
.ctx
.path_node
.map(|p| p.to_string_vec().join(""))
.unwrap_or_default(),
}
}
}
fn display_graphql_permission_error(
inner_error: &async_graphql::Error,
field: &String,
column: &String,
context_path: &String,
formatter: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(
formatter,
"GraphQL permission denied since {context_path}{}{field}{}{column}: {}",
(if field.is_empty() { "" } else { "." }),
(if column.is_empty() { "" } else { "." }),
inner_error.message
)
}
#[derive(Clone, Debug, Serialize, Deserialize)]

View File

@ -19,7 +19,7 @@ pub async fn api_auth_middleware(
let (mut parts, body) = request.into_parts();
let mut response = match auth_service.extract_user_info(&mut parts).await {
let mut response = match auth_service.extract_user_info(&ctx, &mut parts).await {
Ok(auth_user_info) => {
let mut request = Request::from_parts(parts, body);
request.extensions_mut().insert(auth_user_info);

View File

@ -4,14 +4,15 @@ use std::{
};
use async_trait::async_trait;
use axum::http::{request::Parts, HeaderValue};
use axum::http::{HeaderValue, request::Parts};
use itertools::Itertools;
use jwt_authorizer::{authorizer::Authorizer, NumericDate, OneOrArray};
use jwt_authorizer::{NumericDate, OneOrArray, authorizer::Authorizer};
use loco_rs::{app::AppContext, model::ModelError};
use moka::future::Cache;
use openidconnect::{
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata},
AccessTokenHash, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, TokenResponse,
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata},
};
use serde::{Deserialize, Serialize};
use serde_json::Value;
@ -258,7 +259,11 @@ impl OidcAuthService {
#[async_trait]
impl AuthService for OidcAuthService {
async fn extract_user_info(&self, request: &mut Parts) -> Result<AuthUserInfo, AuthError> {
async fn extract_user_info(
&self,
ctx: &AppContext,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
let config = &self.config;
let token = self.api_authorizer.extract_token(&request.headers).ok_or(
AuthError::OidcJwtAuthError(jwt_authorizer::AuthError::MissingToken()),
@ -266,9 +271,11 @@ impl AuthService for OidcAuthService {
let token_data = self.api_authorizer.check_auth(&token).await?;
let claims = token_data.claims;
if claims.sub.as_deref().is_none_or(|s| s.trim().is_empty()) {
let sub = if let Some(sub) = claims.sub.as_deref() {
sub
} else {
return Err(AuthError::OidcSubMissingError);
}
};
if !claims.contains_audience(&config.audience) {
return Err(AuthError::OidcAudMissingError(config.audience.clone()));
}
@ -298,12 +305,16 @@ impl AuthService for OidcAuthService {
}
}
}
let subscriber_auth = match crate::models::auth::Model::find_by_pid(ctx, sub).await {
Err(ModelError::EntityNotFound) => {
crate::models::auth::Model::create_from_oidc(ctx, sub.to_string()).await
}
r => r,
}
.map_err(AuthError::FindAuthRecordError)?;
Ok(AuthUserInfo {
user_pid: claims
.sub
.as_deref()
.map(|s| s.trim().to_string())
.unwrap_or_else(|| unreachable!("sub should be present and validated")),
subscriber_auth,
auth_type: AuthType::Oidc,
})
}

View File

@ -13,24 +13,24 @@ use once_cell::sync::OnceCell;
use reqwest::header::HeaderValue;
use super::{
AppAuthConfig,
basic::BasicAuthService,
errors::AuthError,
oidc::{OidcAuthClaims, OidcAuthService},
AppAuthConfig,
};
use crate::{
app::AppContextExt as _,
config::AppConfigExt,
fetch::{
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
HttpClient, HttpClientConfig,
client::{HttpClientCacheBackendConfig, HttpClientCachePresetConfig},
},
models::auth::AuthType,
};
#[derive(Clone, Debug)]
pub struct AuthUserInfo {
pub user_pid: String,
pub subscriber_auth: crate::models::auth::Model,
pub auth_type: AuthType,
}
@ -44,7 +44,7 @@ impl FromRequestParts<AppContext> for AuthUserInfo {
let auth_service = state.get_auth_service();
auth_service
.extract_user_info(parts)
.extract_user_info(state, parts)
.await
.map_err(|err| err.into_response())
}
@ -52,7 +52,11 @@ impl FromRequestParts<AppContext> for AuthUserInfo {
#[async_trait]
pub trait AuthService {
async fn extract_user_info(&self, request: &mut Parts) -> Result<AuthUserInfo, AuthError>;
async fn extract_user_info(
&self,
ctx: &AppContext,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError>;
fn www_authenticate_header_value(&self) -> Option<HeaderValue>;
fn auth_type(&self) -> AuthType;
}
@ -79,21 +83,23 @@ impl AppAuthService {
.iss(&[&config.issuer])
.aud(&[&config.audience]);
let jwt_auth = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer)
let oidc_provider_client = HttpClient::from_config(HttpClientConfig {
exponential_backoff_max_retries: Some(3),
cache_backend: Some(HttpClientCacheBackendConfig::Moka { cache_size: 1 }),
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
..Default::default()
})
.map_err(AuthError::OidcProviderHttpClientError)?;
let api_authorizer = JwtAuthorizer::<OidcAuthClaims>::from_oidc(&config.issuer)
.validation(validation)
.build()
.await?;
AppAuthService::Oidc(OidcAuthService {
config,
api_authorizer: jwt_auth,
oidc_provider_client: HttpClient::from_config(HttpClientConfig {
exponential_backoff_max_retries: Some(3),
cache_backend: Some(HttpClientCacheBackendConfig::Moka { cache_size: 1 }),
cache_preset: Some(HttpClientCachePresetConfig::RFC7234),
..Default::default()
})
.map_err(AuthError::OidcProviderHttpClientError)?,
api_authorizer,
oidc_provider_client,
oidc_request_cache: Cache::builder()
.time_to_live(Duration::from_mins(5))
.name("oidc_request_cache")
@ -107,10 +113,14 @@ impl AppAuthService {
#[async_trait]
impl AuthService for AppAuthService {
async fn extract_user_info(&self, request: &mut Parts) -> Result<AuthUserInfo, AuthError> {
async fn extract_user_info(
&self,
ctx: &AppContext,
request: &mut Parts,
) -> Result<AuthUserInfo, AuthError> {
match self {
AppAuthService::Basic(service) => service.extract_user_info(request).await,
AppAuthService::Oidc(service) => service.extract_user_info(request).await,
AppAuthService::Basic(service) => service.extract_user_info(ctx, request).await,
AppAuthService::Oidc(service) => service.extract_user_info(ctx, request).await,
}
}

View File

@ -4,7 +4,7 @@ use async_trait::async_trait;
use bytes::Bytes;
use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use opendal::{layers::LoggingLayer, services::Fs, Buffer, Operator};
use opendal::{Buffer, Operator, layers::LoggingLayer, services::Fs};
use quirks_path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use url::Url;
@ -81,7 +81,7 @@ impl AppDalClient {
pub async fn store_object(
&self,
content_category: DalContentCategory,
subscriber_pid: &str,
subscriber_id: i32,
bucket: Option<&str>,
filename: &str,
data: Bytes,
@ -89,7 +89,7 @@ impl AppDalClient {
match content_category {
DalContentCategory::Image => {
let fullname = [
subscriber_pid,
&subscriber_id.to_string(),
content_category.as_ref(),
bucket.unwrap_or_default(),
filename,
@ -119,14 +119,14 @@ impl AppDalClient {
pub async fn exists_object(
&self,
content_category: DalContentCategory,
subscriber_pid: &str,
subscriber_id: i32,
bucket: Option<&str>,
filename: &str,
) -> color_eyre::eyre::Result<Option<DalStoredUrl>> {
match content_category {
DalContentCategory::Image => {
let fullname = [
subscriber_pid,
&subscriber_id.to_string(),
content_category.as_ref(),
bucket.unwrap_or_default(),
filename,

View File

@ -1,3 +1,5 @@
use std::{borrow::Cow, error::Error as StdError};
use thiserror::Error;
#[derive(Error, Debug)]
@ -16,4 +18,19 @@ pub enum ExtractError {
MikanRssFormatError { url: String },
#[error("Parse mikan rss item format error, {reason}")]
MikanRssItemFormatError { reason: String },
#[error("Missing field {field} in extracting meta")]
MikanMetaMissingFieldError {
field: Cow<'static, str>,
#[source]
source: Option<Box<dyn StdError + Send + Sync>>,
},
}
impl ExtractError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
source: None,
}
}
}

View File

@ -1,3 +1,11 @@
pub mod styles;
pub use styles::parse_style_attr;
use html_escape::decode_html_entities;
use itertools::Itertools;
use scraper::ElementRef;
pub use styles::{extract_background_image_src_from_style_attr, extract_style_from_attr};
pub fn extract_inner_text_from_element_ref(el: ElementRef<'_>) -> String {
let raw_text = el.text().collect_vec().join(",");
decode_html_entities(&raw_text).trim().to_string()
}

View File

@ -1,6 +1,45 @@
use lightningcss::declaration::DeclarationBlock;
use lightningcss::{
declaration::DeclarationBlock, properties::Property, values::image::Image as CSSImage,
};
use url::Url;
pub fn parse_style_attr(style_attr: &str) -> Option<DeclarationBlock> {
use crate::extract::media::extract_image_src_from_str;
pub fn extract_style_from_attr(style_attr: &str) -> Option<DeclarationBlock> {
let result = DeclarationBlock::parse_string(style_attr, Default::default()).ok()?;
Some(result)
}
pub fn extract_background_image_src_from_style_attr(
style_attr: &str,
base_url: &Url,
) -> Option<Url> {
extract_style_from_attr(style_attr).and_then(|style| {
style.iter().find_map(|(prop, _)| {
match prop {
Property::BackgroundImage(images) => {
for img in images {
if let CSSImage::Url(path) = img {
if let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
{
return Some(url);
}
}
}
}
Property::Background(backgrounds) => {
for bg in backgrounds {
if let CSSImage::Url(path) = &bg.image {
if let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
{
return Some(url);
}
}
}
}
_ => {}
}
None
})
})
}

View File

@ -0,0 +1,8 @@
use url::Url;
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
let mut image_url = base_url.join(image_src).ok()?;
image_url.set_query(None);
image_url.set_fragment(None);
Some(image_url)
}

View File

@ -3,15 +3,17 @@ use std::ops::Deref;
use async_trait::async_trait;
use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use url::Url;
use super::AppMikanConfig;
use crate::{config::AppConfigExt, fetch::HttpClient};
static APP_MIKAN_CLIENT: OnceCell<AppMikanClient> = OnceCell::new();
#[derive(Debug)]
pub struct AppMikanClient {
http_client: HttpClient,
base_url: String,
base_url: Url,
}
impl AppMikanClient {
@ -31,7 +33,7 @@ impl AppMikanClient {
.expect("AppMikanClient is not initialized")
}
pub fn base_url(&self) -> &str {
pub fn base_url(&self) -> &Url {
&self.base_url
}
}

View File

@ -1,9 +1,10 @@
use serde::{Deserialize, Serialize};
use url::Url;
use crate::fetch::HttpClientConfig;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct AppMikanConfig {
pub http_client: HttpClientConfig,
pub base_url: String,
pub base_url: Url,
}

View File

@ -1,22 +1,22 @@
pub mod client;
pub mod config;
pub mod constants;
pub mod rss_parser;
pub mod web_parser;
pub mod rss_extract;
pub mod web_extract;
pub use client::{AppMikanClient, AppMikanClientInitializer};
pub use config::AppMikanConfig;
pub use constants::MIKAN_BUCKET_KEY;
pub use rss_parser::{
build_mikan_bangumi_rss_link, build_mikan_subscriber_aggregation_rss_link,
parse_mikan_bangumi_id_from_rss_link, parse_mikan_rss_channel_from_rss_link,
parse_mikan_rss_items_from_rss_link, parse_mikan_subscriber_aggregation_id_from_rss_link,
pub use rss_extract::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanBangumiRssLink,
MikanRssChannel, MikanRssItem, MikanSubscriberAggregationRssChannel,
MikanSubscriberAggregationRssLink,
MikanSubscriberAggregationRssLink, build_mikan_bangumi_rss_link,
build_mikan_subscriber_aggregation_rss_link, extract_mikan_bangumi_id_from_rss_link,
extract_mikan_subscriber_aggregation_id_from_rss_link, parse_mikan_rss_channel_from_rss_link,
parse_mikan_rss_items_from_rss_link,
};
pub use web_parser::{
build_mikan_bangumi_homepage, build_mikan_episode_homepage,
parse_mikan_bangumi_meta_from_mikan_homepage, parse_mikan_episode_meta_from_mikan_homepage,
MikanBangumiMeta, MikanEpisodeMeta,
pub use web_extract::{
MikanBangumiMeta, MikanEpisodeMeta, build_mikan_bangumi_homepage, build_mikan_episode_homepage,
extract_mikan_bangumi_meta_from_bangumi_homepage,
extract_mikan_episode_meta_from_episode_homepage,
};

View File

@ -1,6 +1,7 @@
use std::ops::Deref;
use chrono::DateTime;
use color_eyre::eyre;
use itertools::Itertools;
use reqwest::IntoUrl;
use serde::{Deserialize, Serialize};
@ -10,8 +11,8 @@ use crate::{
extract::{
errors::ExtractError,
mikan::{
web_parser::{parse_mikan_episode_id_from_homepage, MikanEpisodeHomepage},
AppMikanClient,
web_extract::{MikanEpisodeHomepage, parse_mikan_episode_id_from_homepage},
},
},
fetch::bytes::fetch_bytes,
@ -163,11 +164,11 @@ pub struct MikanSubscriberAggregationRssLink {
}
pub fn build_mikan_bangumi_rss_link(
mikan_base_url: &str,
mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> color_eyre::eyre::Result<Url> {
let mut url = Url::parse(mikan_base_url)?;
) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path("/RSS/Bangumi");
url.query_pairs_mut()
.append_pair("bangumiId", mikan_bangumi_id);
@ -181,7 +182,7 @@ pub fn build_mikan_bangumi_rss_link(
pub fn build_mikan_subscriber_aggregation_rss_link(
mikan_base_url: &str,
mikan_aggregation_id: &str,
) -> color_eyre::eyre::Result<Url> {
) -> eyre::Result<Url> {
let mut url = Url::parse(mikan_base_url)?;
url.set_path("/RSS/MyBangumi");
url.query_pairs_mut()
@ -189,7 +190,7 @@ pub fn build_mikan_subscriber_aggregation_rss_link(
Ok(url)
}
pub fn parse_mikan_bangumi_id_from_rss_link(url: &Url) -> Option<MikanBangumiRssLink> {
pub fn extract_mikan_bangumi_id_from_rss_link(url: &Url) -> Option<MikanBangumiRssLink> {
if url.path() == "/RSS/Bangumi" {
url.query_pairs()
.find(|(k, _)| k == "bangumiId")
@ -205,7 +206,7 @@ pub fn parse_mikan_bangumi_id_from_rss_link(url: &Url) -> Option<MikanBangumiRss
}
}
pub fn parse_mikan_subscriber_aggregation_id_from_rss_link(
pub fn extract_mikan_subscriber_aggregation_id_from_rss_link(
url: &Url,
) -> Option<MikanSubscriberAggregationRssLink> {
if url.path() == "/RSS/MyBangumi" {
@ -222,7 +223,7 @@ pub fn parse_mikan_subscriber_aggregation_id_from_rss_link(
pub async fn parse_mikan_rss_items_from_rss_link(
client: Option<&AppMikanClient>,
url: impl IntoUrl,
) -> color_eyre::eyre::Result<Vec<MikanRssItem>> {
) -> eyre::Result<Vec<MikanRssItem>> {
let channel = parse_mikan_rss_channel_from_rss_link(client, url).await?;
Ok(channel.into_items())
@ -231,7 +232,7 @@ pub async fn parse_mikan_rss_items_from_rss_link(
pub async fn parse_mikan_rss_channel_from_rss_link(
client: Option<&AppMikanClient>,
url: impl IntoUrl,
) -> color_eyre::eyre::Result<MikanRssChannel> {
) -> eyre::Result<MikanRssChannel> {
let http_client = client.map(|s| s.deref());
let bytes = fetch_bytes(http_client, url.as_str()).await?;
@ -242,7 +243,7 @@ pub async fn parse_mikan_rss_channel_from_rss_link(
if let Some(MikanBangumiRssLink {
mikan_bangumi_id,
mikan_fansub_id,
}) = parse_mikan_bangumi_id_from_rss_link(&channel_link)
}) = extract_mikan_bangumi_id_from_rss_link(&channel_link)
{
let channel_name = channel.title().replace("Mikan Project - ", "");
@ -274,7 +275,7 @@ pub async fn parse_mikan_rss_channel_from_rss_link(
} else if let Some(MikanSubscriberAggregationRssLink {
mikan_aggregation_id,
..
}) = parse_mikan_subscriber_aggregation_id_from_rss_link(&channel_link)
}) = extract_mikan_subscriber_aggregation_id_from_rss_link(&channel_link)
{
let items = channel
.items
@ -304,8 +305,8 @@ mod tests {
use crate::{
extract::mikan::{
parse_mikan_rss_channel_from_rss_link, MikanBangumiAggregationRssChannel,
MikanBangumiRssChannel, MikanRssChannel,
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
parse_mikan_rss_channel_from_rss_link,
},
sync::core::BITTORRENT_MIME_TYPE,
};
@ -333,10 +334,12 @@ mod tests {
assert_eq!(first_sub_item.mime, BITTORRENT_MIME_TYPE);
assert!(&first_sub_item
.homepage
.as_str()
.starts_with("https://mikanani.me/Home/Episode"));
assert!(
&first_sub_item
.homepage
.as_str()
.starts_with("https://mikanani.me/Home/Episode")
);
let name = first_sub_item.title.as_str();
assert!(name.contains("葬送的芙莉莲"));

View File

@ -0,0 +1,644 @@
use std::{borrow::Cow, ops::Deref};
use bytes::Bytes;
use color_eyre::eyre;
use loco_rs::app::AppContext;
use reqwest::IntoUrl;
use scraper::{Html, Selector};
use tracing::instrument;
use url::Url;
use super::{
AppMikanClient, MIKAN_BUCKET_KEY, MikanBangumiRssLink, extract_mikan_bangumi_id_from_rss_link,
};
use crate::{
app::AppContextExt,
dal::DalContentCategory,
extract::{
errors::ExtractError,
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str,
},
fetch::{html::fetch_html, image::fetch_image},
};
#[derive(Clone, Debug, PartialEq)]
pub struct MikanEpisodeMeta {
pub homepage: Url,
pub origin_poster_src: Option<Url>,
pub bangumi_title: String,
pub episode_title: String,
pub fansub: String,
pub mikan_bangumi_id: String,
pub mikan_fansub_id: String,
pub mikan_episode_id: String,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiMeta {
pub homepage: Url,
pub origin_poster_src: Option<Url>,
pub bangumi_title: String,
pub mikan_bangumi_id: String,
pub mikan_fansub_id: Option<String>,
pub fansub: Option<String>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiPosterMeta {
pub origin_poster_src: Url,
pub poster_data: Option<Bytes>,
pub poster_src: Option<String>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanEpisodeHomepage {
pub mikan_episode_id: String,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiHomepage {
pub mikan_bangumi_id: String,
pub mikan_fansub_id: Option<String>,
}
pub fn build_mikan_bangumi_homepage(
mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path(&format!("/Home/Bangumi/{mikan_bangumi_id}"));
url.set_fragment(mikan_fansub_id);
Ok(url)
}
pub fn build_mikan_episode_homepage(
mikan_base_url: impl IntoUrl,
mikan_episode_id: &str,
) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path(&format!("/Home/Episode/{mikan_episode_id}"));
Ok(url)
}
pub fn build_mikan_bangumi_expand_info_url(
mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str,
) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path("/ExpandBangumi");
url.query_pairs_mut()
.append_pair("bangumiId", mikan_bangumi_id)
.append_pair("showSubscribed", "true");
Ok(url)
}
pub fn parse_mikan_bangumi_id_from_homepage(url: &Url) -> Option<MikanBangumiHomepage> {
if url.path().starts_with("/Home/Bangumi/") {
let mikan_bangumi_id = url.path().replace("/Home/Bangumi/", "");
Some(MikanBangumiHomepage {
mikan_bangumi_id,
mikan_fansub_id: url.fragment().map(String::from),
})
} else {
None
}
}
pub fn parse_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeHomepage> {
if url.path().starts_with("/Home/Episode/") {
let mikan_episode_id = url.path().replace("/Home/Episode/", "");
Some(MikanEpisodeHomepage { mikan_episode_id })
} else {
None
}
}
pub async fn extract_mikan_poster_meta_from_src(
client: Option<&AppMikanClient>,
origin_poster_src_url: Url,
) -> eyre::Result<MikanBangumiPosterMeta> {
let http_client = client.map(|s| s.deref());
let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_data: Some(poster_data),
poster_src: None,
})
}
pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
ctx: &AppContext,
origin_poster_src_url: Url,
subscriber_id: i32,
) -> eyre::Result<MikanBangumiPosterMeta> {
let dal_client = ctx.get_dal_client();
let mikan_client = ctx.get_mikan_client();
if let Some(poster_src) = dal_client
.exists_object(
DalContentCategory::Image,
subscriber_id,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
)
.await?
{
return Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_data: None,
poster_src: Some(poster_src.to_string()),
});
}
let poster_data =
fetch_image(Some(mikan_client.deref()), origin_poster_src_url.clone()).await?;
let poster_str = dal_client
.store_object(
DalContentCategory::Image,
subscriber_id,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
poster_data.clone(),
)
.await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_data: Some(poster_data),
poster_src: Some(poster_str.to_string()),
})
}
#[instrument(skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))]
pub async fn extract_mikan_episode_meta_from_episode_homepage(
client: Option<&AppMikanClient>,
mikan_episode_homepage_url: Url,
) -> eyre::Result<MikanEpisodeMeta> {
let http_client = client.map(|s| s.deref());
let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?;
let html = Html::parse_document(&content);
let bangumi_title_selector =
&Selector::parse(".bangumi-title > a[href^='/Home/Bangumi/']").unwrap();
let mikan_bangumi_id_selector =
&Selector::parse(".bangumi-title > a.mikan-rss[data-original-title='RSS']").unwrap();
let bangumi_poster_selector = &Selector::parse(".bangumi-poster").unwrap();
let bangumi_title = html
.select(bangumi_title_selector)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
let MikanBangumiRssLink {
mikan_bangumi_id,
mikan_fansub_id,
..
} = html
.select(mikan_bangumi_id_selector)
.next()
.and_then(|el| el.value().attr("href"))
.and_then(|s| mikan_episode_homepage_url.join(s).ok())
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
.ok_or_else(|| {
ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?;
let mikan_fansub_id = mikan_fansub_id
.ok_or_else(|| {
ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?;
let episode_title = html
.select(&Selector::parse("title").unwrap())
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
let MikanEpisodeHomepage {
mikan_episode_id, ..
} = parse_mikan_episode_id_from_homepage(&mikan_episode_homepage_url)
.ok_or_else(|| {
ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
})
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
let fansub_name = html
.select(
&Selector::parse(".bangumi-info a.magnet-link-wrap[href^='/Home/PublishGroup/']")
.unwrap(),
)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
el.value()
.attr("data-src")
.and_then(|data_src| extract_image_src_from_str(data_src, &mikan_base_url))
.or_else(|| {
el.value().attr("style").and_then(|style| {
extract_background_image_src_from_style_attr(style, &mikan_base_url)
})
})
});
tracing::trace!(
bangumi_title,
mikan_bangumi_id,
episode_title,
mikan_episode_id,
origin_poster_src = origin_poster_src.as_ref().map(|url| url.as_str()),
fansub_name,
mikan_fansub_id,
"mikan episode meta extracted"
);
Ok(MikanEpisodeMeta {
mikan_bangumi_id,
mikan_fansub_id,
bangumi_title,
episode_title,
homepage: mikan_episode_homepage_url,
origin_poster_src,
fansub: fansub_name,
mikan_episode_id,
})
}
#[instrument(skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))]
pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
client: Option<&AppMikanClient>,
mikan_bangumi_homepage_url: Url,
) -> eyre::Result<MikanBangumiMeta> {
let http_client = client.map(|s| s.deref());
let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?;
let html = Html::parse_document(&content);
let bangumi_title_selector = &Selector::parse(".bangumi-title").unwrap();
let mikan_bangumi_id_selector =
&Selector::parse(".bangumi-title > .mikan-rss[data-original-title='RSS']").unwrap();
let bangumi_poster_selector = &Selector::parse(".bangumi-poster").unwrap();
let bangumi_title = html
.select(bangumi_title_selector)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| tracing::warn!(error = %error))?;
let mikan_bangumi_id = html
.select(mikan_bangumi_id_selector)
.next()
.and_then(|el| el.value().attr("href"))
.and_then(|s| mikan_bangumi_homepage_url.join(s).ok())
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
.map(
|MikanBangumiRssLink {
mikan_bangumi_id, ..
}| mikan_bangumi_id,
)
.ok_or_else(|| {
ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
el.value()
.attr("data-src")
.and_then(|data_src| extract_image_src_from_str(data_src, &mikan_base_url))
.or_else(|| {
el.value().attr("style").and_then(|style| {
extract_background_image_src_from_style_attr(style, &mikan_base_url)
})
})
});
let (mikan_fansub_id, fansub_name) = mikan_bangumi_homepage_url
.fragment()
.and_then(|id| {
html.select(
&Selector::parse(&format!("a.subgroup-name[data-anchor='#{}']", id)).unwrap(),
)
.next()
.map(extract_inner_text_from_element_ref)
.map(|fansub_name| (id.to_string(), fansub_name))
})
.unzip();
tracing::trace!(
bangumi_title,
mikan_bangumi_id,
origin_poster_src = origin_poster_src.as_ref().map(|url| url.as_str()),
fansub_name,
mikan_fansub_id,
"mikan bangumi meta extracted"
);
Ok(MikanBangumiMeta {
homepage: mikan_bangumi_homepage_url,
bangumi_title,
origin_poster_src,
mikan_bangumi_id,
fansub: fansub_name,
mikan_fansub_id,
})
}
/**
* @logined-required
*/
#[instrument(skip_all, fields(my_bangumi_page_url = my_bangumi_page_url.as_str()))]
pub async fn extract_mikan_bangumis_meta_from_my_bangumi_page(
client: Option<&AppMikanClient>,
my_bangumi_page_url: Url,
) -> eyre::Result<Vec<MikanBangumiMeta>> {
let http_client = client.map(|c| c.deref());
let mikan_base_url = Url::parse(&my_bangumi_page_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, my_bangumi_page_url.clone()).await?;
let bangumi_container_selector = &Selector::parse(".sk-bangumi .an-ul>li").unwrap();
let bangumi_info_selector = &Selector::parse(".an-info a.an-text").unwrap();
let bangumi_poster_selector =
&Selector::parse("span[data-src][data-bangumiid], span[data-bangumiid][style]").unwrap();
let fansub_container_selector =
&Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap();
let fansub_title_selector = &Selector::parse(".tag-res-name[title]").unwrap();
let fansub_id_selector =
&Selector::parse(".active[data-subtitlegroupid][data-bangumiid]").unwrap();
let html = Html::parse_document(&content);
let mut bangumi_list = vec![];
for bangumi_elem in html.select(bangumi_container_selector) {
let title_and_href_elem = bangumi_elem.select(bangumi_info_selector).next();
let poster_elem = bangumi_elem.select(bangumi_poster_selector).next();
if let (Some(bangumi_home_page_url), Some(bangumi_title)) = (
title_and_href_elem.and_then(|elem| elem.attr("href")),
title_and_href_elem.and_then(|elem| elem.attr("title")),
) {
let origin_poster_src = poster_elem.and_then(|ele| {
ele.attr("data-src")
.and_then(|data_src| extract_image_src_from_str(data_src, &mikan_base_url))
.or_else(|| {
ele.attr("style").and_then(|style| {
extract_background_image_src_from_style_attr(style, &mikan_base_url)
})
})
});
let bangumi_home_page_url = my_bangumi_page_url.join(bangumi_home_page_url)?;
if let Some(MikanBangumiHomepage {
ref mikan_bangumi_id,
..
}) = parse_mikan_bangumi_id_from_homepage(&bangumi_home_page_url)
{
if let Some(origin_poster_src) = origin_poster_src.as_ref() {
tracing::trace!(
origin_poster_src = origin_poster_src.as_str(),
bangumi_title,
mikan_bangumi_id,
"bangumi info extracted"
);
} else {
tracing::warn!(
bangumi_title,
mikan_bangumi_id,
"bangumi info extracted, but failed to extract poster_src"
);
}
let bangumi_expand_info_url =
build_mikan_bangumi_expand_info_url(mikan_base_url.clone(), mikan_bangumi_id)?;
let bangumi_expand_info_content =
fetch_html(http_client, bangumi_expand_info_url).await?;
let bangumi_expand_info_fragment =
Html::parse_fragment(&bangumi_expand_info_content);
for fansub_info in bangumi_expand_info_fragment.select(fansub_container_selector) {
if let (Some(fansub_name), Some(mikan_fansub_id)) = (
fansub_info
.select(fansub_title_selector)
.next()
.and_then(|ele| ele.attr("title")),
fansub_info
.select(fansub_id_selector)
.next()
.and_then(|ele| ele.attr("data-subtitlegroupid")),
) {
tracing::trace!(
fansub_name = &fansub_name,
mikan_fansub_id,
"subscribed fansub extracted"
);
bangumi_list.push(MikanBangumiMeta {
homepage: build_mikan_bangumi_homepage(
mikan_base_url.clone(),
mikan_bangumi_id.as_str(),
Some(mikan_fansub_id),
)?,
bangumi_title: bangumi_title.to_string(),
mikan_bangumi_id: mikan_bangumi_id.to_string(),
mikan_fansub_id: Some(mikan_fansub_id.to_string()),
fansub: Some(fansub_name.to_string()),
origin_poster_src: origin_poster_src.clone(),
})
}
}
}
}
}
Ok(bangumi_list)
}
#[cfg(test)]
mod test {
#![allow(unused_variables)]
use color_eyre::eyre;
use rstest::{fixture, rstest};
use tracing::Level;
use url::Url;
use zune_image::{codecs::ImageFormat, image::Image};
use super::*;
use crate::{
extract::mikan::web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page,
test_utils::{mikan::build_testing_mikan_client, tracing::init_testing_tracing},
};
#[fixture]
fn before_each() {
init_testing_tracing(Level::INFO);
}
#[rstest]
#[tokio::test]
async fn test_extract_mikan_poster_from_src(before_each: ()) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;
let bangumi_poster_mock = mikan_server
.mock("GET", bangumi_poster_url.path())
.with_body_from_file("tests/resources/mikan/Bangumi-202309-5ce9fed1.jpg")
.create_async()
.await;
let bgm_poster =
extract_mikan_poster_meta_from_src(Some(&mikan_client), bangumi_poster_url).await?;
bangumi_poster_mock.expect(1);
let u8_data = bgm_poster.poster_data.expect("should have poster data");
let image = Image::read(u8_data.to_vec(), Default::default());
assert!(
image.is_ok_and(|img| img
.metadata()
.get_image_format()
.is_some_and(|fmt| matches!(fmt, ImageFormat::JPEG))),
"should start with valid jpeg data magic number"
);
Ok(())
}
#[rstest]
#[tokio::test]
async fn test_extract_mikan_episode(before_each: ()) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let episode_homepage_url =
mikan_base_url.join("/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a")?;
let episode_homepage_mock = mikan_server
.mock("GET", episode_homepage_url.path())
.with_body_from_file(
"tests/resources/mikan/Episode-475184dce83ea2b82902592a5ac3343f6d54b36a.htm",
)
.create_async()
.await;
let ep_meta = extract_mikan_episode_meta_from_episode_homepage(
Some(&mikan_client),
episode_homepage_url.clone(),
)
.await?;
assert_eq!(ep_meta.homepage, episode_homepage_url);
assert_eq!(ep_meta.bangumi_title, "葬送的芙莉莲");
assert_eq!(
ep_meta
.origin_poster_src
.as_ref()
.map(|s| s.path().to_string()),
Some(String::from("/images/Bangumi/202309/5ce9fed1.jpg"))
);
assert_eq!(ep_meta.fansub, "LoliHouse");
assert_eq!(ep_meta.mikan_fansub_id, "370");
assert_eq!(ep_meta.mikan_bangumi_id, "3141");
Ok(())
}
#[rstest]
#[tokio::test]
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(
before_each: (),
) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let bangumi_homepage_url = mikan_base_url.join("/Home/Bangumi/3416#370")?;
let bangumi_homepage_mock = mikan_server
.mock("GET", bangumi_homepage_url.path())
.with_body_from_file("tests/resources/mikan/Bangumi-3416-370.htm")
.create_async()
.await;
let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage(
Some(&mikan_client),
bangumi_homepage_url.clone(),
)
.await?;
assert_eq!(bgm_meta.homepage, bangumi_homepage_url);
assert_eq!(bgm_meta.bangumi_title, "叹气的亡灵想隐退");
assert_eq!(
bgm_meta
.origin_poster_src
.as_ref()
.map(|s| s.path().to_string()),
Some(String::from("/images/Bangumi/202410/480ef127.jpg"))
);
assert_eq!(bgm_meta.fansub, Some(String::from("LoliHouse")));
assert_eq!(bgm_meta.mikan_fansub_id, Some(String::from("370")));
assert_eq!(bgm_meta.mikan_bangumi_id, "3416");
Ok(())
}
#[rstest]
#[tokio::test]
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(
before_each: (),
) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let my_bangumi_page_url = mikan_base_url.join("/Home/MyBangumi")?;
let mock_my_bangumi = mikan_server
.mock("GET", my_bangumi_page_url.path())
.with_body_from_file("tests/resources/mikan/MyBangumi.htm")
.create_async()
.await;
let mock_expand_bangumi = mikan_server
.mock("GET", "/ExpandBangumi")
.match_query(mockito::Matcher::Any)
.with_body_from_file("tests/resources/mikan/ExpandBangumi.htm")
.create_async()
.await;
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
Some(&mikan_client),
my_bangumi_page_url,
)
.await?;
assert!(!bangumi_metas.is_empty());
assert!(bangumi_metas[0].origin_poster_src.is_some());
mock_my_bangumi.expect(1);
mock_expand_bangumi.expect(bangumi_metas.len());
Ok(())
}
}

View File

@ -1,494 +0,0 @@
use std::ops::Deref;
use bytes::Bytes;
use color_eyre::eyre::ContextCompat;
use html_escape::decode_html_entities;
use itertools::Itertools;
use lazy_static::lazy_static;
use lightningcss::{properties::Property, values::image::Image as CSSImage};
use loco_rs::app::AppContext;
use regex::Regex;
use scraper::Html;
use url::Url;
use super::{
parse_mikan_bangumi_id_from_rss_link, AppMikanClient, MikanBangumiRssLink, MIKAN_BUCKET_KEY,
};
use crate::{
app::AppContextExt,
dal::DalContentCategory,
extract::html::parse_style_attr,
fetch::{html::fetch_html, image::fetch_image},
models::subscribers,
};
#[derive(Clone, Debug, PartialEq)]
pub struct MikanEpisodeMeta {
pub homepage: Url,
pub origin_poster_src: Option<Url>,
pub bangumi_title: String,
pub episode_title: String,
pub fansub: String,
pub mikan_bangumi_id: String,
pub mikan_fansub_id: String,
pub mikan_episode_id: String,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiMeta {
pub homepage: Url,
pub origin_poster_src: Option<Url>,
pub bangumi_title: String,
pub mikan_bangumi_id: String,
pub mikan_fansub_id: Option<String>,
pub fansub: Option<String>,
pub mikan_fansub_candidates: Vec<(String, String)>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiPosterMeta {
pub origin_poster_src: Url,
pub poster_data: Option<Bytes>,
pub poster_src: Option<String>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanEpisodeHomepage {
pub mikan_episode_id: String,
}
lazy_static! {
static ref MIKAN_TITLE_SEASON: Regex = Regex::new("第.*季").unwrap();
}
pub fn build_mikan_bangumi_homepage(
mikan_base_url: &str,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> color_eyre::eyre::Result<Url> {
let mut url = Url::parse(mikan_base_url)?;
url.set_path(&format!("/Home/Bangumi/{mikan_bangumi_id}"));
url.set_fragment(mikan_fansub_id);
Ok(url)
}
pub fn build_mikan_episode_homepage(
mikan_base_url: &str,
mikan_episode_id: &str,
) -> color_eyre::eyre::Result<Url> {
let mut url = Url::parse(mikan_base_url)?;
url.set_path(&format!("/Home/Episode/{mikan_episode_id}"));
Ok(url)
}
pub fn parse_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeHomepage> {
if url.path().starts_with("/Home/Episode/") {
let mikan_episode_id = url.path().replace("/Home/Episode/", "");
Some(MikanEpisodeHomepage { mikan_episode_id })
} else {
None
}
}
pub async fn parse_mikan_bangumi_poster_from_origin_poster_src(
client: Option<&AppMikanClient>,
origin_poster_src: Url,
) -> color_eyre::eyre::Result<MikanBangumiPosterMeta> {
let http_client = client.map(|s| s.deref());
let poster_data = fetch_image(http_client, origin_poster_src.clone()).await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src,
poster_data: Some(poster_data),
poster_src: None,
})
}
pub async fn parse_mikan_bangumi_poster_from_origin_poster_src_with_cache(
ctx: &AppContext,
origin_poster_src: Url,
subscriber_id: i32,
) -> color_eyre::eyre::Result<MikanBangumiPosterMeta> {
let dal_client = ctx.get_dal_client();
let mikan_client = ctx.get_mikan_client();
let subscriber_pid = &subscribers::Model::find_pid_by_id_with_cache(ctx, subscriber_id).await?;
if let Some(poster_src) = dal_client
.exists_object(
DalContentCategory::Image,
subscriber_pid,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src.path().replace("/images/Bangumi/", ""),
)
.await?
{
return Ok(MikanBangumiPosterMeta {
origin_poster_src,
poster_data: None,
poster_src: Some(poster_src.to_string()),
});
}
let poster_data = fetch_image(Some(mikan_client.deref()), origin_poster_src.clone()).await?;
let poster_str = dal_client
.store_object(
DalContentCategory::Image,
subscriber_pid,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src.path().replace("/images/Bangumi/", ""),
poster_data.clone(),
)
.await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src,
poster_data: Some(poster_data),
poster_src: Some(poster_str.to_string()),
})
}
pub async fn parse_mikan_bangumi_meta_from_mikan_homepage(
client: Option<&AppMikanClient>,
url: Url,
) -> color_eyre::eyre::Result<MikanBangumiMeta> {
let http_client = client.map(|s| s.deref());
let url_host = url.origin().unicode_serialization();
let content = fetch_html(http_client, url.as_str()).await?;
let html = Html::parse_document(&content);
let bangumi_fansubs = html
.select(&scraper::Selector::parse(".subgroup-text").unwrap())
.filter_map(|el| {
if let (Some(fansub_id), Some(fansub_name)) = (
el.value()
.attr("id")
.map(|s| decode_html_entities(s).trim().to_string()),
el.select(&scraper::Selector::parse("a:nth-child(1)").unwrap())
.next()
.map(|child| {
let mut s = String::from(
child
.prev_sibling()
.and_then(|t| t.value().as_text())
.map(|s| s.trim())
.unwrap_or_default(),
);
s.extend(child.text());
decode_html_entities(&s).trim().to_string()
}),
) {
Some((fansub_id, fansub_name))
} else {
None
}
})
.collect_vec();
let fansub_info = url.fragment().and_then(|b| {
bangumi_fansubs
.iter()
.find_map(|(id, name)| if id == b { Some((id, name)) } else { None })
});
let bangumi_title = html
.select(&scraper::Selector::parse(".bangumi-title").unwrap())
.next()
.map(|el| {
decode_html_entities(&el.text().collect::<String>())
.trim()
.to_string()
})
.and_then(|title| if title.is_empty() { None } else { Some(title) })
.wrap_err_with(|| {
// todo: error handler
format!("Missing mikan bangumi official title for {}", url)
})?;
let MikanBangumiRssLink {
mikan_bangumi_id, ..
} = html
.select(&scraper::Selector::parse(".bangumi-title > .mikan-rss").unwrap())
.next()
.and_then(|el| el.value().attr("href"))
.as_ref()
.and_then(|s| url.join(s).ok())
.and_then(|rss_link_url| parse_mikan_bangumi_id_from_rss_link(&rss_link_url))
.wrap_err_with(|| {
// todo: error handler
format!("Missing mikan bangumi rss link or error format for {}", url)
})?;
let origin_poster_src = html
.select(&scraper::Selector::parse(".bangumi-poster").unwrap())
.next()
.and_then(|el| el.value().attr("style"))
.as_ref()
.and_then(|s| parse_style_attr(s))
.and_then(|style| {
style.iter().find_map(|(prop, _)| {
match prop {
Property::BackgroundImage(images) => {
for img in images {
if let CSSImage::Url(path) = img {
if let Ok(url) =
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
{
return Some(url);
}
}
}
}
Property::Background(backgrounds) => {
for bg in backgrounds {
if let CSSImage::Url(path) = &bg.image {
if let Ok(url) =
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
{
return Some(url);
}
}
}
}
_ => {}
}
None
})
})
.map(|mut origin_poster_src| {
origin_poster_src.set_query(None);
origin_poster_src
});
Ok(MikanBangumiMeta {
homepage: url,
bangumi_title,
origin_poster_src,
mikan_bangumi_id,
fansub: fansub_info.map(|s| s.1.to_string()),
mikan_fansub_id: fansub_info.map(|s| s.0.to_string()),
mikan_fansub_candidates: bangumi_fansubs.clone(),
})
}
pub async fn parse_mikan_episode_meta_from_mikan_homepage(
client: Option<&AppMikanClient>,
url: Url,
) -> color_eyre::eyre::Result<MikanEpisodeMeta> {
let http_client = client.map(|s| s.deref());
let url_host = url.origin().unicode_serialization();
let content = fetch_html(http_client, url.as_str()).await?;
let html = Html::parse_document(&content);
let bangumi_title = html
.select(&scraper::Selector::parse(".bangumi-title").unwrap())
.next()
.map(|el| {
decode_html_entities(&el.text().collect::<String>())
.trim()
.to_string()
})
.and_then(|title| if title.is_empty() { None } else { Some(title) })
.wrap_err_with(|| {
// todo: error handler
format!("Missing mikan bangumi official title for {}", url)
})?;
let episode_title = html
.select(&scraper::Selector::parse("title").unwrap())
.next()
.map(|el| {
decode_html_entities(&el.text().collect::<String>())
.replace(" - Mikan Project", "")
.trim()
.to_string()
})
.and_then(|title| if title.is_empty() { None } else { Some(title) })
.wrap_err_with(|| {
// todo: error handler
format!("Missing mikan episode official title for {}", url)
})?;
let (mikan_bangumi_id, mikan_fansub_id) = html
.select(&scraper::Selector::parse(".bangumi-title > .mikan-rss").unwrap())
.next()
.and_then(|el| el.value().attr("href"))
.as_ref()
.and_then(|s| url.join(s).ok())
.and_then(|rss_link_url| parse_mikan_bangumi_id_from_rss_link(&rss_link_url))
.and_then(
|MikanBangumiRssLink {
mikan_bangumi_id,
mikan_fansub_id,
..
}| {
mikan_fansub_id.map(|mikan_fansub_id| (mikan_bangumi_id, mikan_fansub_id))
},
)
.wrap_err_with(|| {
// todo: error handler
format!("Missing mikan bangumi rss link or error format for {}", url)
})?;
let fansub = html
.select(&scraper::Selector::parse(".bangumi-info>.magnet-link-wrap").unwrap())
.next()
.map(|el| {
decode_html_entities(&el.text().collect::<String>())
.trim()
.to_string()
})
.wrap_err_with(|| {
// todo: error handler
format!("Missing mikan bangumi fansub name for {}", url)
})?;
let origin_poster_src = html
.select(&scraper::Selector::parse(".bangumi-poster").unwrap())
.next()
.and_then(|el| el.value().attr("style"))
.as_ref()
.and_then(|s| parse_style_attr(s))
.and_then(|style| {
style.iter().find_map(|(prop, _)| {
match prop {
Property::BackgroundImage(images) => {
for img in images {
if let CSSImage::Url(path) = img {
if let Ok(url) =
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
{
return Some(url);
}
}
}
}
Property::Background(backgrounds) => {
for bg in backgrounds {
if let CSSImage::Url(path) = &bg.image {
if let Ok(url) =
Url::parse(&url_host).and_then(|s| s.join(path.url.trim()))
{
return Some(url);
}
}
}
}
_ => {}
}
None
})
})
.map(|mut origin_poster_src| {
origin_poster_src.set_query(None);
origin_poster_src
});
let MikanEpisodeHomepage {
mikan_episode_id, ..
} = parse_mikan_episode_id_from_homepage(&url)
.wrap_err_with(|| format!("Failed to extract mikan_episode_id from {}", &url))?;
Ok(MikanEpisodeMeta {
mikan_bangumi_id,
mikan_fansub_id,
bangumi_title,
episode_title,
homepage: url,
origin_poster_src,
fansub,
mikan_episode_id,
})
}
pub async fn parse_mikan_bangumis_from_user_home(_client: Option<&AppMikanClient>, _url: Url) {}
#[cfg(test)]
mod test {
use std::assert_matches::assert_matches;
use url::Url;
use zune_image::{codecs::ImageFormat, image::Image};
use super::{
parse_mikan_bangumi_meta_from_mikan_homepage,
parse_mikan_bangumi_poster_from_origin_poster_src,
parse_mikan_episode_meta_from_mikan_homepage,
};
#[tokio::test]
async fn test_parse_mikan_episode() {
let test_fn = async || -> color_eyre::eyre::Result<()> {
let url_str =
"https://mikanani.me/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a";
let url = Url::parse(url_str)?;
let ep_meta = parse_mikan_episode_meta_from_mikan_homepage(None, url.clone()).await?;
assert_eq!(ep_meta.homepage, url);
assert_eq!(ep_meta.bangumi_title, "葬送的芙莉莲");
assert_eq!(
ep_meta.origin_poster_src,
Some(Url::parse(
"https://mikanani.me/images/Bangumi/202309/5ce9fed1.jpg"
)?)
);
assert_eq!(ep_meta.fansub, "LoliHouse");
assert_eq!(ep_meta.mikan_fansub_id, "370");
assert_eq!(ep_meta.mikan_bangumi_id, "3141");
assert_matches!(ep_meta.origin_poster_src, Some(..));
let bgm_poster = parse_mikan_bangumi_poster_from_origin_poster_src(
None,
ep_meta.origin_poster_src.unwrap(),
)
.await?;
let u8_data = bgm_poster.poster_data.expect("should have poster data");
let image = Image::read(u8_data.to_vec(), Default::default());
assert!(
image.is_ok_and(|img| img
.metadata()
.get_image_format()
.is_some_and(|fmt| matches!(fmt, ImageFormat::JPEG))),
"should start with valid jpeg data magic number"
);
Ok(())
};
test_fn().await.expect("test parse mikan failed");
}
#[tokio::test]
async fn test_parse_mikan_bangumi() {
let test_fn = async || -> color_eyre::eyre::Result<()> {
let url_str = "https://mikanani.me/Home/Bangumi/3416#370";
let url = Url::parse(url_str)?;
let bgm_meta = parse_mikan_bangumi_meta_from_mikan_homepage(None, url.clone()).await?;
assert_eq!(bgm_meta.homepage, url);
assert_eq!(bgm_meta.bangumi_title, "叹气的亡灵想隐退");
assert_eq!(
bgm_meta.origin_poster_src,
Some(Url::parse(
"https://mikanani.me/images/Bangumi/202410/480ef127.jpg"
)?)
);
assert_eq!(bgm_meta.fansub, Some(String::from("LoliHouse")));
assert_eq!(bgm_meta.mikan_fansub_id, Some(String::from("370")));
assert_eq!(bgm_meta.mikan_bangumi_id, "3416");
assert_eq!(
bgm_meta.homepage.as_str(),
"https://mikanani.me/Home/Bangumi/3416#370"
);
assert_eq!(bgm_meta.mikan_fansub_candidates.len(), 6);
Ok(())
};
test_fn().await.expect("test parse mikan failed");
}
}

View File

@ -2,6 +2,7 @@ pub mod defs;
pub mod errors;
pub mod html;
pub mod http;
pub mod media;
pub mod mikan;
pub mod rawname;
pub mod torrent;

View File

@ -1,4 +1,4 @@
use std::{ops::Deref, sync::Arc, time::Duration};
use std::{fmt::Debug, ops::Deref, sync::Arc, time::Duration};
use async_trait::async_trait;
use axum::http::{self, Extensions};
@ -11,7 +11,7 @@ use reqwest::{ClientBuilder, Request, Response};
use reqwest_middleware::{
ClientBuilder as ClientWithMiddlewareBuilder, ClientWithMiddleware, Next,
};
use reqwest_retry::{policies::ExponentialBackoff, RetryTransientMiddleware};
use reqwest_retry::{RetryTransientMiddleware, policies::ExponentialBackoff};
use reqwest_tracing::TracingMiddleware;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
@ -101,6 +101,14 @@ pub struct HttpClient {
pub config: HttpClientConfig,
}
impl Debug for HttpClient {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("HttpClient")
.field("config", &self.config)
.finish()
}
}
impl From<HttpClient> for ClientWithMiddleware {
fn from(val: HttpClient) -> Self {
val.client

View File

@ -0,0 +1,46 @@
use async_graphql::dynamic::{ObjectAccessor, TypeRef};
use maplit::btreeset;
use once_cell::sync::OnceCell;
use sea_orm::{ColumnTrait, Condition, EntityTrait, Value};
use seaography::{BuilderContext, FilterInfo, FilterOperation, SeaResult};
pub static SUBSCRIBER_ID_FILTER_INFO: OnceCell<FilterInfo> = OnceCell::new();
pub fn init_custom_filter_info() {
SUBSCRIBER_ID_FILTER_INFO.get_or_init(|| FilterInfo {
type_name: String::from("SubscriberIdFilterInput"),
base_type: TypeRef::INT.into(),
supported_operations: btreeset! { FilterOperation::Equals },
});
}
pub type FnFilterCondition =
Box<dyn Fn(Condition, &ObjectAccessor) -> SeaResult<Condition> + Send + Sync>;
pub fn subscriber_id_condition_function<T>(
_context: &BuilderContext,
column: &T::Column,
) -> FnFilterCondition
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let column = *column;
Box::new(move |mut condition, filter| {
let subscriber_id_filter_info = SUBSCRIBER_ID_FILTER_INFO.get().unwrap();
let operations = &subscriber_id_filter_info.supported_operations;
for operation in operations {
match operation {
FilterOperation::Equals => {
if let Some(value) = filter.get("eq") {
let value: i32 = value.i64()?.try_into()?;
let value = Value::Int(Some(value));
condition = condition.add(column.eq(value));
}
}
_ => unreachable!("unreachable filter operation for subscriber_id"),
}
}
Ok(condition)
})
}

View File

@ -0,0 +1,249 @@
use std::sync::Arc;
use async_graphql::dynamic::{ResolverContext, ValueAccessor};
use sea_orm::EntityTrait;
use seaography::{BuilderContext, FnGuard, GuardAction};
use super::util::get_entity_key;
use crate::{
auth::{AuthError, AuthUserInfo},
graphql::util::get_column_key,
};
fn guard_data_object_accessor_with_subscriber_id(
value: ValueAccessor<'_>,
column_name: &str,
subscriber_id: i32,
) -> async_graphql::Result<()> {
let obj = value.object()?;
let subscriber_id_value = obj.try_get(column_name)?;
let id = subscriber_id_value.i64()?;
if id == subscriber_id as i64 {
Ok(())
} else {
Err(async_graphql::Error::new("subscriber not match"))
}
}
fn guard_data_object_accessor_with_optional_subscriber_id(
value: ValueAccessor<'_>,
column_name: &str,
subscriber_id: i32,
) -> async_graphql::Result<()> {
if value.is_null() {
return Ok(());
}
let obj = value.object()?;
if let Some(subscriber_id_value) = obj.get(column_name) {
let id = subscriber_id_value.i64()?;
if id == subscriber_id as i64 {
Ok(())
} else {
Err(async_graphql::Error::new("subscriber not match"))
}
} else {
Ok(())
}
}
fn guard_filter_object_accessor_with_subscriber_id(
value: ValueAccessor<'_>,
column_name: &str,
subscriber_id: i32,
) -> async_graphql::Result<()> {
let obj = value.object()?;
let subscriber_id_filter_input_value = obj.try_get(column_name)?;
let subscriber_id_filter_input_obj = subscriber_id_filter_input_value.object()?;
let subscriber_id_value = subscriber_id_filter_input_obj.try_get("eq")?;
let id = subscriber_id_value.i64()?;
if id == subscriber_id as i64 {
Ok(())
} else {
Err(async_graphql::Error::new("subscriber not match"))
}
}
pub fn guard_entity_with_subscriber_id<T>(context: &BuilderContext, column: &T::Column) -> FnGuard
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
let column_key = get_column_key::<T>(context, column);
let column_name = Arc::new(context.entity_object.column_name.as_ref()(
&entity_key,
&column_key,
));
let entity_create_one_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_create_one_mutation.mutation_suffix
));
let entity_create_one_mutation_data_field_name =
Arc::new(context.entity_create_one_mutation.data_field.clone());
let entity_create_batch_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_create_batch_mutation.mutation_suffix.clone()
));
let entity_create_batch_mutation_data_field_name =
Arc::new(context.entity_create_batch_mutation.data_field.clone());
let entity_delete_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name,
context.entity_delete_mutation.mutation_suffix.clone()
));
let entity_delete_mutation_filter_field_name =
Arc::new(context.entity_delete_mutation.filter_field.clone());
let entity_update_mutation_field_name = Arc::new(format!(
"{}{}",
entity_name, context.entity_update_mutation.mutation_suffix
));
let entity_update_mutation_filter_field_name =
Arc::new(context.entity_update_mutation.filter_field.clone());
let entity_update_mutation_data_field_name =
Arc::new(context.entity_update_mutation.data_field.clone());
let entity_query_field_name = Arc::new(entity_name);
let entity_query_filter_field_name = Arc::new(context.entity_query_field.filters.clone());
Box::new(move |context: &ResolverContext| -> GuardAction {
match context.ctx.data::<AuthUserInfo>() {
Ok(user_info) => {
let subscriber_id = user_info.subscriber_auth.subscriber_id;
let validation_result = match context.field().name() {
field if field == entity_create_one_mutation_field_name.as_str() => context
.args
.try_get(&entity_create_one_mutation_data_field_name)
.and_then(|data_value| {
guard_data_object_accessor_with_subscriber_id(
data_value,
&column_name,
subscriber_id,
)
})
.map_err(|inner_error| {
AuthError::from_graphql_subscribe_id_guard(
inner_error,
context,
&entity_create_one_mutation_data_field_name,
&column_name,
)
}),
field if field == entity_create_batch_mutation_field_name.as_str() => context
.args
.try_get(&entity_create_batch_mutation_data_field_name)
.and_then(|data_value| {
data_value.list().and_then(|data_list| {
data_list.iter().try_for_each(|data_item_value| {
guard_data_object_accessor_with_subscriber_id(
data_item_value,
&column_name,
subscriber_id,
)
})
})
})
.map_err(|inner_error| {
AuthError::from_graphql_subscribe_id_guard(
inner_error,
context,
&entity_create_batch_mutation_data_field_name,
&column_name,
)
}),
field if field == entity_delete_mutation_field_name.as_str() => context
.args
.try_get(&entity_delete_mutation_filter_field_name)
.and_then(|filter_value| {
guard_filter_object_accessor_with_subscriber_id(
filter_value,
&column_name,
subscriber_id,
)
})
.map_err(|inner_error| {
AuthError::from_graphql_subscribe_id_guard(
inner_error,
context,
&entity_delete_mutation_filter_field_name,
&column_name,
)
}),
field if field == entity_update_mutation_field_name.as_str() => context
.args
.try_get(&entity_update_mutation_filter_field_name)
.and_then(|filter_value| {
guard_filter_object_accessor_with_subscriber_id(
filter_value,
&column_name,
subscriber_id,
)
})
.map_err(|inner_error| {
AuthError::from_graphql_subscribe_id_guard(
inner_error,
context,
&entity_update_mutation_filter_field_name,
&column_name,
)
})
.and_then(|_| {
match context.args.get(&entity_update_mutation_data_field_name) {
Some(data_value) => {
guard_data_object_accessor_with_optional_subscriber_id(
data_value,
&column_name,
subscriber_id,
)
.map_err(|inner_error| {
AuthError::from_graphql_subscribe_id_guard(
inner_error,
context,
&entity_update_mutation_data_field_name,
&column_name,
)
})
}
None => Ok(()),
}
}),
field if field == entity_query_field_name.as_str() => context
.args
.try_get(&entity_query_filter_field_name)
.and_then(|filter_value| {
guard_filter_object_accessor_with_subscriber_id(
filter_value,
&column_name,
subscriber_id,
)
})
.map_err(|inner_error| {
AuthError::from_graphql_subscribe_id_guard(
inner_error,
context,
&entity_query_filter_field_name,
&column_name,
)
}),
field => Err(AuthError::from_graphql_subscribe_id_guard(
async_graphql::Error::new("unsupport graphql field"),
context,
field,
"",
)),
};
match validation_result {
Ok(_) => GuardAction::Allow,
Err(err) => GuardAction::Block(Some(err.to_string())),
}
}
Err(err) => GuardAction::Block(Some(err.message)),
}
})
}

View File

@ -1,5 +1,8 @@
pub mod config;
pub mod query_root;
pub mod guard;
pub mod schema_root;
pub mod service;
pub mod util;
pub mod filter;
pub use query_root::schema;
pub use schema_root::schema;

View File

@ -1,56 +0,0 @@
use async_graphql::dynamic::*;
use sea_orm::DatabaseConnection;
use seaography::{Builder, BuilderContext};
lazy_static::lazy_static! { static ref CONTEXT : BuilderContext = {
BuilderContext {
..Default::default()
}
}; }
pub fn schema(
database: DatabaseConnection,
depth: Option<usize>,
complexity: Option<usize>,
) -> Result<Schema, SchemaError> {
use crate::models::*;
let mut builder = Builder::new(&CONTEXT, database.clone());
seaography::register_entities!(
builder,
[
bangumi,
downloaders,
downloads,
episodes,
subscribers,
subscription_bangumi,
subscription_episode,
subscriptions
]
);
{
builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
builder.register_enumeration::<downloaders::DownloaderCategory>();
builder.register_enumeration::<downloads::DownloadMime>();
}
let schema = builder.schema_builder();
let schema = if let Some(depth) = depth {
schema.limit_depth(depth)
} else {
schema
};
let schema = if let Some(complexity) = complexity {
schema.limit_complexity(complexity)
} else {
schema
};
schema
.data(database)
.finish()
.inspect_err(|e| tracing::error!(e = ?e))
}

View File

@ -0,0 +1,159 @@
use async_graphql::dynamic::*;
use once_cell::sync::OnceCell;
use sea_orm::{DatabaseConnection, EntityTrait, Iterable};
use seaography::{Builder, BuilderContext, FilterType, FilterTypesMapHelper};
use super::{
filter::{SUBSCRIBER_ID_FILTER_INFO, subscriber_id_condition_function},
util::{get_entity_column_key, get_entity_key},
};
use crate::graphql::{filter::init_custom_filter_info, guard::guard_entity_with_subscriber_id};
static CONTEXT: OnceCell<BuilderContext> = OnceCell::new();
fn restrict_filter_input_for_entity<T>(
context: &mut BuilderContext,
column: &T::Column,
filter_type: Option<FilterType>,
) where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let key = get_entity_column_key::<T>(context, column);
context.filter_types.overwrites.insert(key, filter_type);
}
fn restrict_subscriber_for_entity<T>(context: &mut BuilderContext, column: &T::Column)
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_key = get_entity_key::<T>(context);
let entity_column_key = get_entity_column_key::<T>(context, column);
context.guards.entity_guards.insert(
entity_key,
guard_entity_with_subscriber_id::<T>(context, column),
);
context.filter_types.overwrites.insert(
entity_column_key.clone(),
Some(FilterType::Custom(
SUBSCRIBER_ID_FILTER_INFO.get().unwrap().type_name.clone(),
)),
);
context.filter_types.condition_functions.insert(
entity_column_key,
subscriber_id_condition_function::<T>(context, column),
);
}
pub fn schema(
database: DatabaseConnection,
depth: Option<usize>,
complexity: Option<usize>,
) -> Result<Schema, SchemaError> {
use crate::models::*;
init_custom_filter_info();
let context = CONTEXT.get_or_init(|| {
let mut context = BuilderContext::default();
restrict_subscriber_for_entity::<bangumi::Entity>(
&mut context,
&bangumi::Column::SubscriberId,
);
restrict_subscriber_for_entity::<downloaders::Entity>(
&mut context,
&downloaders::Column::SubscriberId,
);
restrict_subscriber_for_entity::<downloads::Entity>(
&mut context,
&downloads::Column::SubscriberId,
);
restrict_subscriber_for_entity::<episodes::Entity>(
&mut context,
&episodes::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscriptions::Entity>(
&mut context,
&subscriptions::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscribers::Entity>(
&mut context,
&subscribers::Column::Id,
);
restrict_subscriber_for_entity::<subscription_bangumi::Entity>(
&mut context,
&subscription_bangumi::Column::SubscriberId,
);
restrict_subscriber_for_entity::<subscription_episode::Entity>(
&mut context,
&subscription_episode::Column::SubscriberId,
);
for column in subscribers::Column::iter() {
if !matches!(column, subscribers::Column::Id) {
restrict_filter_input_for_entity::<subscribers::Entity>(
&mut context,
&column,
None,
);
}
}
context
});
let mut builder = Builder::new(context, database.clone());
{
let filter_types_map_helper = FilterTypesMapHelper { context };
builder.schema = builder.schema.register(
filter_types_map_helper.generate_filter_input(SUBSCRIBER_ID_FILTER_INFO.get().unwrap()),
);
}
{
builder.register_entity::<subscribers::Entity>(
<subscribers::RelatedEntity as sea_orm::Iterable>::iter()
.map(|rel| seaography::RelationBuilder::get_relation(&rel, builder.context))
.collect(),
);
builder = builder.register_entity_dataloader_one_to_one(subscribers::Entity, tokio::spawn);
builder = builder.register_entity_dataloader_one_to_many(subscribers::Entity, tokio::spawn);
}
seaography::register_entities!(
builder,
[
bangumi,
downloaders,
downloads,
episodes,
subscription_bangumi,
subscription_episode,
subscriptions
]
);
{
builder.register_enumeration::<downloads::DownloadStatus>();
builder.register_enumeration::<subscriptions::SubscriptionCategory>();
builder.register_enumeration::<downloaders::DownloaderCategory>();
builder.register_enumeration::<downloads::DownloadMime>();
}
let schema = builder.schema_builder();
let schema = if let Some(depth) = depth {
schema.limit_depth(depth)
} else {
schema
};
let schema = if let Some(complexity) = complexity {
schema.limit_complexity(complexity)
} else {
schema
};
schema
.data(database)
// .extension(GraphqlAuthExtension)
.finish()
.inspect_err(|e| tracing::error!(e = ?e))
}

View File

@ -4,7 +4,7 @@ use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use sea_orm::DatabaseConnection;
use super::{config::AppGraphQLConfig, query_root};
use super::{config::AppGraphQLConfig, schema_root};
use crate::config::AppConfigExt;
static APP_GRAPHQL_SERVICE: OnceCell<AppGraphQLService> = OnceCell::new();
@ -16,7 +16,7 @@ pub struct AppGraphQLService {
impl AppGraphQLService {
pub fn new(config: AppGraphQLConfig, db: DatabaseConnection) -> Result<Self, SchemaError> {
let schema = query_root::schema(db, config.depth_limit, config.complexity_limit)?;
let schema = schema_root::schema(db, config.depth_limit, config.complexity_limit)?;
Ok(Self { schema })
}

View File

@ -0,0 +1,30 @@
use sea_orm::{EntityName, EntityTrait, IdenStatic};
use seaography::BuilderContext;
pub fn get_entity_key<T>(context: &BuilderContext) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
context.entity_object.type_name.as_ref()(<T as EntityName>::table_name(&T::default()))
}
pub fn get_column_key<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_key::<T>(context);
context.entity_object.column_name.as_ref()(&entity_name, column.as_str())
}
pub fn get_entity_column_key<T>(context: &BuilderContext, column: &T::Column) -> String
where
T: EntityTrait,
<T as EntityTrait>::Model: Sync,
{
let entity_name = get_entity_key::<T>(context);
let column_name = get_column_key::<T>(context, column);
format!("{}.{}", &entity_name, &column_name)
}

View File

@ -1,4 +1,9 @@
#![feature(duration_constructors, assert_matches, unboxed_closures)]
#![feature(
duration_constructors,
assert_matches,
unboxed_closures,
impl_trait_in_bindings
)]
pub mod app;
pub mod auth;

View File

@ -16,7 +16,6 @@ pub enum GeneralIds {
pub enum Subscribers {
Table,
Id,
Pid,
DisplayName,
DownloaderId,
BangumiConf,
@ -58,6 +57,7 @@ pub enum Bangumi {
pub enum SubscriptionBangumi {
Table,
Id,
SubscriberId,
SubscriptionId,
BangumiId,
}
@ -90,6 +90,7 @@ pub enum Episodes {
pub enum SubscriptionEpisode {
Table,
Id,
SubscriberId,
SubscriptionId,
EpisodeId,
}
@ -130,7 +131,6 @@ pub enum Auth {
Id,
Pid,
SubscriberId,
AvatarUrl,
AuthType,
}

View File

@ -24,7 +24,6 @@ impl MigrationTrait for Migration {
.create_table(
table_auto(Subscribers::Table)
.col(pk_auto(Subscribers::Id))
.col(string_len_uniq(Subscribers::Pid, 64))
.col(string(Subscribers::DisplayName))
.col(json_binary_null(Subscribers::BangumiConf))
.to_owned(),
@ -42,8 +41,8 @@ impl MigrationTrait for Migration {
.exec_stmt(
Query::insert()
.into_table(Subscribers::Table)
.columns([Subscribers::Pid, Subscribers::DisplayName])
.values_panic([SEED_SUBSCRIBER.into(), SEED_SUBSCRIBER.into()])
.columns([Subscribers::DisplayName])
.values_panic([SEED_SUBSCRIBER.into()])
.to_owned(),
)
.await?;
@ -159,6 +158,7 @@ impl MigrationTrait for Migration {
.create_table(
table_auto(SubscriptionBangumi::Table)
.col(pk_auto(SubscriptionBangumi::Id))
.col(integer(SubscriptionBangumi::SubscriberId))
.col(integer(SubscriptionBangumi::SubscriptionId))
.col(integer(SubscriptionBangumi::BangumiId))
.foreign_key(
@ -193,6 +193,17 @@ impl MigrationTrait for Migration {
)
.await?;
manager
.create_index(
Index::create()
.if_not_exists()
.name("index_subscription_bangumi_subscriber_id")
.table(SubscriptionBangumi::Table)
.col(SubscriptionBangumi::SubscriberId)
.to_owned(),
)
.await?;
manager
.create_table(
table_auto(Episodes::Table)
@ -268,6 +279,7 @@ impl MigrationTrait for Migration {
.col(pk_auto(SubscriptionEpisode::Id))
.col(integer(SubscriptionEpisode::SubscriptionId))
.col(integer(SubscriptionEpisode::EpisodeId))
.col(integer(SubscriptionEpisode::SubscriberId))
.foreign_key(
ForeignKey::create()
.name("fk_subscription_episode_subscription_id")
@ -300,10 +312,31 @@ impl MigrationTrait for Migration {
)
.await?;
manager
.create_index(
Index::create()
.if_not_exists()
.name("index_subscription_episode_subscriber_id")
.table(SubscriptionEpisode::Table)
.col(SubscriptionEpisode::SubscriberId)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_index(
Index::drop()
.if_exists()
.name("index_subscription_episode_subscriber_id")
.table(SubscriptionBangumi::Table)
.to_owned(),
)
.await?;
manager
.drop_table(Table::drop().table(SubscriptionEpisode::Table).to_owned())
.await?;
@ -316,6 +349,16 @@ impl MigrationTrait for Migration {
.drop_table(Table::drop().table(Episodes::Table).to_owned())
.await?;
manager
.drop_index(
Index::drop()
.if_exists()
.name("index_subscription_bangumi_subscriber_id")
.table(SubscriptionBangumi::Table)
.to_owned(),
)
.await?;
manager
.drop_table(Table::drop().table(SubscriptionBangumi::Table).to_owned())
.await?;

View File

@ -2,13 +2,13 @@ use sea_orm_migration::{prelude::*, schema::*};
use crate::{
migrations::defs::{CustomSchemaManagerExt, Downloaders, GeneralIds, Subscribers},
models::{downloaders::DownloaderCategoryEnum, prelude::DownloaderCategory},
models::downloaders::{DownloaderCategory, DownloaderCategoryEnum},
};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait]
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
create_postgres_enum_for_active_enum!(

View File

@ -34,7 +34,6 @@ impl MigrationTrait for Migration {
AuthTypeEnum,
AuthType::iden_values(),
))
.col(string_null(Auth::AvatarUrl))
.col(integer(Auth::SubscriberId))
.foreign_key(
ForeignKey::create()
@ -66,6 +65,20 @@ impl MigrationTrait for Migration {
.create_postgres_auto_update_ts_trigger_for_col(Auth::Table, GeneralIds::UpdatedAt)
.await?;
let seed_subscriber_id = manager
.get_connection()
.query_one(
manager.get_database_backend().build(
Query::select()
.column(Subscribers::Id)
.from(Subscribers::Table)
.limit(1),
),
)
.await?
.ok_or_else(|| DbErr::RecordNotFound(String::from("seed subscriber not found")))?
.try_get_by_index::<i32>(0)?;
manager
.exec_stmt(
Query::insert()
@ -74,7 +87,7 @@ impl MigrationTrait for Migration {
.values_panic([
SEED_SUBSCRIBER.into(),
SimpleExpr::from(AuthType::Basic).as_enum(AuthTypeEnum),
1.into(),
seed_subscriber_id.into(),
])
.to_owned(),
)

View File

@ -5,6 +5,7 @@ pub use sea_orm_migration::prelude::*;
pub mod defs;
pub mod m20220101_000001_init;
pub mod m20240224_082543_add_downloads;
pub mod m20240225_060853_subscriber_add_downloader;
pub mod m20241231_000001_auth;
pub struct Migrator;
@ -15,6 +16,7 @@ impl MigratorTrait for Migrator {
vec![
Box::new(m20220101_000001_init::Migration),
Box::new(m20240224_082543_add_downloads::Migration),
Box::new(m20240225_060853_subscriber_add_downloader::Migration),
Box::new(m20241231_000001_auth::Migration),
]
}

View File

@ -1,7 +1,13 @@
use async_trait::async_trait;
use sea_orm::entity::prelude::*;
use loco_rs::{
app::AppContext,
model::{ModelError, ModelResult},
};
use sea_orm::{Set, TransactionTrait, entity::prelude::*};
use serde::{Deserialize, Serialize};
use super::subscribers::{self, SEED_SUBSCRIBER};
#[derive(
Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, DeriveDisplay, Serialize, Deserialize,
)]
@ -17,14 +23,16 @@ pub enum AuthType {
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, DeriveEntityModel)]
#[sea_orm(table_name = "auth")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTime,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
#[sea_orm(unique)]
pub pid: String,
pub subscriber_id: i32,
pub auth_type: AuthType,
pub avatar_url: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@ -47,3 +55,52 @@ impl Related<super::subscribers::Entity> for Entity {
#[async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl Model {
pub async fn find_by_pid(ctx: &AppContext, pid: &str) -> ModelResult<Self> {
let db = &ctx.db;
let subscriber_auth = Entity::find()
.filter(Column::Pid.eq(pid))
.one(db)
.await?
.ok_or_else(|| ModelError::EntityNotFound)?;
Ok(subscriber_auth)
}
pub async fn create_from_oidc(ctx: &AppContext, sub: String) -> ModelResult<Self> {
let db = &ctx.db;
let txn = db.begin().await?;
let subscriber_id = if let Some(seed_subscriber_id) = Entity::find()
.filter(
Column::AuthType
.eq(AuthType::Basic)
.and(Column::Pid.eq(SEED_SUBSCRIBER)),
)
.one(&txn)
.await?
.map(|m| m.subscriber_id)
{
seed_subscriber_id
} else {
let new_subscriber = subscribers::ActiveModel {
..Default::default()
};
let new_subscriber: subscribers::Model = new_subscriber.save(&txn).await?.try_into()?;
new_subscriber.id
};
let new_item = ActiveModel {
pid: Set(sub),
auth_type: Set(AuthType::Oidc),
subscriber_id: Set(subscriber_id),
..Default::default()
};
let new_item: Model = new_item.save(&txn).await?.try_into()?;
Ok(new_item)
}
}

View File

@ -1,7 +1,7 @@
use async_graphql::SimpleObject;
use async_trait::async_trait;
use loco_rs::app::AppContext;
use sea_orm::{entity::prelude::*, sea_query::OnConflict, ActiveValue, FromJsonQueryResult};
use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::OnConflict};
use serde::{Deserialize, Serialize};
use super::subscription_bangumi;
@ -9,7 +9,6 @@ use super::subscription_bangumi;
#[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
)]
#[graphql(name = "BangumiFilter")]
pub struct BangumiFilter {
pub name: Option<Vec<String>>,
pub group: Option<Vec<String>>,
@ -18,7 +17,6 @@ pub struct BangumiFilter {
#[derive(
Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult, SimpleObject,
)]
#[graphql(name = "BangumiExtra")]
pub struct BangumiExtra {
pub name_zh: Option<String>,
pub s_name_zh: Option<String>,
@ -30,14 +28,14 @@ pub struct BangumiExtra {
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)]
#[sea_orm(table_name = "bangumi")]
#[graphql(name = "Bangumi")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTime,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub mikan_bangumi_id: Option<String>,
#[graphql(default_with = "default_subscriber_id")]
pub subscriber_id: i32,
pub display_name: String,
pub raw_name: String,

View File

@ -22,9 +22,9 @@ pub enum DownloaderCategory {
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "downloaders")]
pub struct Model {
#[sea_orm(column_type = "Timestamp")]
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTime,
#[sea_orm(column_type = "Timestamp")]
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,

View File

@ -38,7 +38,9 @@ pub enum DownloadMime {
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "downloads")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTime,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,

View File

@ -2,14 +2,14 @@ use std::sync::Arc;
use async_trait::async_trait;
use loco_rs::app::AppContext;
use sea_orm::{entity::prelude::*, sea_query::OnConflict, ActiveValue, FromJsonQueryResult};
use sea_orm::{ActiveValue, FromJsonQueryResult, entity::prelude::*, sea_query::OnConflict};
use serde::{Deserialize, Serialize};
use super::{bangumi, query::InsertManyReturningExt, subscription_episode};
use crate::{
app::AppContextExt,
extract::{
mikan::{build_mikan_episode_homepage, MikanEpisodeMeta},
mikan::{MikanEpisodeMeta, build_mikan_episode_homepage},
rawname::parse_episode_meta_from_raw_name,
},
};
@ -27,7 +27,9 @@ pub struct EpisodeExtra {
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "episodes")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTime,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
@ -135,6 +137,7 @@ pub struct MikanEpsiodeCreation {
impl Model {
pub async fn add_episodes(
ctx: &AppContext,
subscriber_id: i32,
subscription_id: i32,
creations: impl IntoIterator<Item = MikanEpsiodeCreation>,
) -> color_eyre::eyre::Result<()> {
@ -162,6 +165,7 @@ impl Model {
let insert_subscription_episode_links = inserted_episodes.into_iter().map(|episode_id| {
subscription_episode::ActiveModel::from_subscription_and_episode(
subscriber_id,
subscription_id,
episode_id,
)
@ -197,7 +201,7 @@ impl ActiveModel {
.ok()
.unwrap_or_default();
let homepage = build_mikan_episode_homepage(
ctx.get_mikan_client().base_url(),
ctx.get_mikan_client().base_url().clone(),
&item.mikan_episode_id,
)?;

View File

@ -4,7 +4,7 @@ use loco_rs::{
app::AppContext,
model::{ModelError, ModelResult},
};
use sea_orm::{entity::prelude::*, ActiveValue, FromJsonQueryResult, TransactionTrait};
use sea_orm::{ActiveValue, FromJsonQueryResult, TransactionTrait, entity::prelude::*};
use serde::{Deserialize, Serialize};
pub const SEED_SUBSCRIBER: &str = "konobangu";
@ -16,15 +16,15 @@ pub struct SubscriberBangumiConfig {
pub leading_group_tag: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject)]
#[sea_orm(table_name = "subscribers")]
pub struct Model {
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTime,
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
#[sea_orm(unique)]
pub pid: String,
pub display_name: String,
pub bangumi_conf: Option<SubscriberBangumiConfig>,
}
@ -91,59 +91,22 @@ pub struct SubscriberIdParams {
}
#[async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(self, _db: &C, insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
if insert {
let mut this = self;
this.pid = ActiveValue::Set(Uuid::new_v4().to_string());
Ok(this)
} else {
Ok(self)
}
}
}
impl ActiveModelBehavior for ActiveModel {}
impl Model {
pub async fn find_by_pid(ctx: &AppContext, pid: &str) -> ModelResult<Self> {
let db = &ctx.db;
let parse_uuid = Uuid::parse_str(pid).map_err(|e| ModelError::Any(e.into()))?;
let subscriber = Entity::find()
.filter(Column::Pid.eq(parse_uuid))
.one(db)
.await?;
subscriber.ok_or_else(|| ModelError::EntityNotFound)
pub async fn find_seed_subscriber_id(ctx: &AppContext) -> ModelResult<i32> {
let subscriber_auth = crate::models::auth::Model::find_by_pid(ctx, SEED_SUBSCRIBER).await?;
Ok(subscriber_auth.subscriber_id)
}
pub async fn find_by_id(ctx: &AppContext, id: i32) -> ModelResult<Self> {
let db = &ctx.db;
let subscriber = Entity::find_by_id(id).one(db).await?;
subscriber.ok_or_else(|| ModelError::EntityNotFound)
}
pub async fn find_pid_by_id_with_cache(
ctx: &AppContext,
id: i32,
) -> color_eyre::eyre::Result<String> {
let db = &ctx.db;
let cache = &ctx.cache;
let pid = cache
.get_or_insert(&format!("subscriber-id2pid::{}", id), async {
let subscriber = Entity::find_by_id(id)
.one(db)
.await?
.ok_or_else(|| loco_rs::Error::string(&format!("No such pid for id {}", id)))?;
Ok(subscriber.pid)
})
.await?;
Ok(pid)
}
pub async fn find_root(ctx: &AppContext) -> ModelResult<Self> {
Self::find_by_pid(ctx, SEED_SUBSCRIBER).await
let subscriber = Entity::find_by_id(id)
.one(db)
.await?
.ok_or_else(|| ModelError::EntityNotFound)?;
Ok(subscriber)
}
pub async fn create_root(ctx: &AppContext) -> ModelResult<Self> {
@ -152,7 +115,6 @@ impl Model {
let user = ActiveModel {
display_name: ActiveValue::set(SEED_SUBSCRIBER.to_string()),
pid: ActiveValue::set(SEED_SUBSCRIBER.to_string()),
..Default::default()
}
.insert(&txn)

View File

@ -1,5 +1,5 @@
use async_trait::async_trait;
use sea_orm::{entity::prelude::*, ActiveValue};
use sea_orm::{ActiveValue, entity::prelude::*};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
@ -7,6 +7,7 @@ use serde::{Deserialize, Serialize};
pub struct Model {
#[sea_orm(primary_key)]
pub id: i32,
pub subscriber_id: i32,
pub subscription_id: i32,
pub bangumi_id: i32,
}
@ -55,8 +56,13 @@ pub enum RelatedEntity {
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {
pub fn from_subscription_and_bangumi(subscription_id: i32, bangumi_id: i32) -> Self {
pub fn from_subscription_and_bangumi(
subscriber_id: i32,
subscription_id: i32,
bangumi_id: i32,
) -> Self {
Self {
subscriber_id: ActiveValue::Set(subscriber_id),
subscription_id: ActiveValue::Set(subscription_id),
bangumi_id: ActiveValue::Set(bangumi_id),
..Default::default()

View File

@ -1,5 +1,5 @@
use async_trait::async_trait;
use sea_orm::{entity::prelude::*, ActiveValue};
use sea_orm::{ActiveValue, entity::prelude::*};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
@ -7,6 +7,7 @@ use serde::{Deserialize, Serialize};
pub struct Model {
#[sea_orm(primary_key)]
pub id: i32,
pub subscriber_id: i32,
pub subscription_id: i32,
pub episode_id: i32,
}
@ -55,8 +56,13 @@ pub enum RelatedEntity {
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {
pub fn from_subscription_and_episode(subscription_id: i32, episode_id: i32) -> Self {
pub fn from_subscription_and_episode(
subscriber_id: i32,
subscription_id: i32,
episode_id: i32,
) -> Self {
Self {
subscriber_id: ActiveValue::Set(subscriber_id),
subscription_id: ActiveValue::Set(subscription_id),
episode_id: ActiveValue::Set(episode_id),
..Default::default()

View File

@ -3,7 +3,7 @@ use std::{collections::HashSet, sync::Arc};
use async_trait::async_trait;
use itertools::Itertools;
use loco_rs::app::AppContext;
use sea_orm::{entity::prelude::*, ActiveValue};
use sea_orm::{ActiveValue, entity::prelude::*};
use serde::{Deserialize, Serialize};
use super::{bangumi, episodes, query::filter_values_in};
@ -12,11 +12,11 @@ use crate::{
extract::{
mikan::{
build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link,
parse_mikan_bangumi_meta_from_mikan_homepage,
parse_mikan_episode_meta_from_mikan_homepage, parse_mikan_rss_channel_from_rss_link,
web_parser::{
parse_mikan_bangumi_poster_from_origin_poster_src_with_cache,
MikanBangumiPosterMeta,
extract_mikan_bangumi_meta_from_bangumi_homepage,
extract_mikan_episode_meta_from_episode_homepage,
parse_mikan_rss_channel_from_rss_link,
web_extract::{
MikanBangumiPosterMeta, extract_mikan_bangumi_poster_meta_from_src_with_cache,
},
},
rawname::extract_season_from_title_body,
@ -43,9 +43,9 @@ pub enum SubscriptionCategory {
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")]
pub struct Model {
#[sea_orm(column_type = "Timestamp")]
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub created_at: DateTime,
#[sea_orm(column_type = "Timestamp")]
#[sea_orm(default_expr = "Expr::current_timestamp()")]
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
@ -256,7 +256,7 @@ impl Model {
let mut new_metas = vec![];
for new_rss_item in new_rss_items.iter() {
new_metas.push(
parse_mikan_episode_meta_from_mikan_homepage(
extract_mikan_episode_meta_from_episode_homepage(
Some(mikan_client),
new_rss_item.homepage.clone(),
)
@ -272,12 +272,12 @@ impl Model {
{
let mikan_base_url = ctx.get_mikan_client().base_url();
let bgm_homepage = build_mikan_bangumi_homepage(
mikan_base_url,
mikan_base_url.clone(),
&mikan_bangumi_id,
Some(&mikan_fansub_id),
)?;
let bgm_rss_link = build_mikan_bangumi_rss_link(
mikan_base_url,
mikan_base_url.clone(),
&mikan_bangumi_id,
Some(&mikan_fansub_id),
)?;
@ -289,7 +289,7 @@ impl Model {
mikan_bangumi_id.to_string(),
mikan_fansub_id.to_string(),
async |am| -> color_eyre::eyre::Result<()> {
let bgm_meta = parse_mikan_bangumi_meta_from_mikan_homepage(
let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage(
Some(mikan_client),
bgm_homepage.clone(),
)
@ -306,9 +306,9 @@ impl Model {
am.fansub = ActiveValue::Set(bgm_meta.fansub);
if let Some(origin_poster_src) = bgm_meta.origin_poster_src {
if let MikanBangumiPosterMeta {
poster_src: Some(poster_src),
..
} = parse_mikan_bangumi_poster_from_origin_poster_src_with_cache(
poster_src: Some(poster_src),
..
} = extract_mikan_bangumi_poster_meta_from_src_with_cache(
ctx,
origin_poster_src,
self.subscriber_id,
@ -318,13 +318,14 @@ impl Model {
am.poster_link = ActiveValue::Set(Some(poster_src))
}
}
Ok(())
Ok(())
},
)
.await?,
);
episodes::Model::add_episodes(
ctx,
self.subscriber_id,
self.id,
new_ep_metas.into_iter().map(|item| MikanEpsiodeCreation {
episode: item,

View File

@ -0,0 +1,17 @@
use color_eyre::eyre;
use reqwest::IntoUrl;
use crate::{
extract::mikan::{AppMikanClient, AppMikanConfig},
fetch::HttpClientConfig,
};
pub fn build_testing_mikan_client(base_mikan_url: impl IntoUrl) -> eyre::Result<AppMikanClient> {
let mikan_client = AppMikanClient::new(AppMikanConfig {
http_client: HttpClientConfig {
..Default::default()
},
base_url: base_mikan_url.into_url()?,
})?;
Ok(mikan_client)
}

View File

@ -1,2 +1,4 @@
pub mod mikan;
#[cfg(feature = "testcontainers")]
pub mod testcontainers;
pub mod tracing;

View File

@ -0,0 +1,12 @@
use tracing::Level;
use tracing_subscriber::EnvFilter;
pub fn init_testing_tracing(level: Level) {
let crate_name = env!("CARGO_PKG_NAME");
let filter = EnvFilter::new(format!(
"{}[]={}",
crate_name,
level.as_str().to_lowercase()
));
tracing_subscriber::fmt().with_env_filter(filter).init();
}

View File

@ -3,17 +3,11 @@ use serde::{Deserialize, Serialize};
use crate::models::subscribers;
#[derive(Debug, Deserialize, Serialize)]
pub struct CurrentResponse {
pub pid: String,
pub display_name: String,
}
pub struct CurrentResponse {}
impl CurrentResponse {
#[must_use]
pub fn new(user: &subscribers::Model) -> Self {
Self {
pid: user.pid.to_string(),
display_name: user.display_name.to_string(),
}
pub fn new(_user: &subscribers::Model) -> Self {
Self {}
}
}

View File

@ -1,16 +1,16 @@
use insta::assert_debug_snapshot;
use loco_rs::testing;
use recorder::{app::App, models::subscribers::Model};
// use insta::assert_debug_snapshot;
// use loco_rs::testing;
// use recorder::{app::App, models::subscribers::Model};
use serial_test::serial;
macro_rules! configure_insta {
($($expr:expr),*) => {
let mut settings = insta::Settings::clone_current();
settings.set_prepend_module_to_snapshot(false);
settings.set_snapshot_suffix("users");
let _guard = settings.bind_to_scope();
};
}
// macro_rules! configure_insta {
// ($($expr:expr),*) => {
// let mut settings = insta::Settings::clone_current();
// settings.set_prepend_module_to_snapshot(false);
// settings.set_snapshot_suffix("users");
// let _guard = settings.bind_to_scope();
// };
// }
#[tokio::test]
#[serial]

View File

@ -4,25 +4,23 @@ use loco_rs::testing;
use recorder::app::App;
use serial_test::serial;
// TODO: see how to dedup / extract this to app-local test utils
// not to framework, because that would require a runtime dep on insta
// macro_rules! configure_insta {
// ($($expr:expr),*) => {
// let mut settings = insta::Settings::clone_current();
// settings.set_prepend_module_to_snapshot(false);
// settings.set_snapshot_suffix("user_request");
// let _guard = settings.bind_to_scope();
// };
// }
macro_rules! configure_insta {
($($expr:expr),*) => {
let mut settings = insta::Settings::clone_current();
settings.set_prepend_module_to_snapshot(false);
settings.set_snapshot_suffix("user_request");
let _guard = settings.bind_to_scope();
};
}
#[tokio::test]
#[serial]
async fn can_get_current_user() {
// configure_insta!();
//
configure_insta!();
// testing::request::<App, _, _>(|request, _ctx| async move {
// let response = request.get("/api/user/current").await;
//
// with_settings!({
// filters => testing::cleanup_user_model()
// }, {

Binary file not shown.

After

Width:  |  Height:  |  Size: 240 KiB

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,473 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="keywords" content="新番,动漫,动漫下載,新番下载,animation,bangumi,动画,蜜柑计划,Mikan Project" />
<meta name="description" content="蜜柑计划:新一代的动漫下载站" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- 若用户有Google Chrome Frame,那么ie浏览时让IE使用chrome内核 -->
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" />
<!-- 若是双核浏览器,默认webkit渲染(chrome) -->
<meta name="renderer" content="webkit">
<title>[&#x55B5;&#x840C;&#x5976;&#x8336;&#x5C4B;&amp;LoliHouse] &#x846C;&#x9001;&#x7684;&#x8299;&#x8389;&#x83B2; /
Sousou no Frieren - 23 [WebRip 1080p HEVC-10bit AAC][&#x7B80;&#x7E41;&#x65E5;&#x5185;&#x5C01;&#x5B57;&#x5E55;] -
Mikan Project</title>
<!-- here put import css lib -->
<link rel="stylesheet"
href="/lib/bootstrap/dist/css/bootstrap.min.css?v=7s5uDGW3AHqw6xtJmNNtr-OBRJUlgkNJEo78P4b0yRw" />
<link rel="stylesheet"
href="/lib/font-awesome/css/font-awesome.min.css?v=3dkvEK0WLHRJ7_Csr0BZjAWxERc5WH7bdeUya2aXxdU" />
<link rel="stylesheet" href="/css/thirdparty.min.css?v=c2SZy6n-55iljz60XCAALXejEZvjc43kgwamU5DAYUU" />
<link rel="stylesheet" href="/css/animate.min.css?v=w_eXqGX0NdMPQ0LZNhdQ8B-DQMYAxelvLoIP39dzmus" />
<link rel="stylesheet" href="/css/mikan.min.css?v=aupBMgBgKRB5chTb5fl8lvHpN3OqX67_gKg3lXZewRw" />
<script src="/lib/jquery/dist/jquery.min.js?v=BbhdlvQf_xTY9gja0Dq3HiwQF8LaCRTXxZKRutelT44"></script>
<script src="/lib/bootstrap/dist/js/bootstrap.min.js?v=KXn5puMvxCw-dAYznun-drMdG1IFl3agK0p_pqT9KAo"></script>
<script src="/js/thirdparty.min.js?v=NsK_w5fw7Nm4ZPm4eZDgsivasZNgT6ArhIjmj-bRnR0"></script>
<script src="/js/darkreader.min.js?v=Lr_8XODLEDSPtT6LqaeLKzREs4jocJUzV8HvQPItIic"></script>
<script src="/js/ScrollMagic.min.js?v=1xuIM3UJWEZX_wWN9zrA8W7CWukfsMaEqb759CeHo3U"></script>
<script src="/js/jquery.ScrollMagic.min.js?v=SyygQh9gWWfvyS13QwI0SKGAQyHDachlaigiK4X59iw"></script>
<link rel="icon" href="/images/favicon.ico?v=2" />
<link rel="apple-touch-icon" href="\Images\apple-touch-icon.png">
<link rel="apple-touch-icon" sizes="152x152" href="\Images\apple-touch-icon-152x152.png">
<link rel="apple-touch-icon" sizes="180x180" href="\Images\apple-touch-icon-180x180.png">
<link rel="apple-touch-icon" sizes="144x144" href="\Images\apple-touch-icon-144x144.png">
<script>
(function (i, s, o, g, r, a, m) {
i['GoogleAnalyticsObject'] = r; i[r] = i[r] || function () {
(i[r].q = i[r].q || []).push(arguments)
}, i[r].l = 1 * new Date(); a = s.createElement(o),
m = s.getElementsByTagName(o)[0]; a.async = 1; a.src = g; m.parentNode.insertBefore(a, m)
})(window, document, 'script', '//www.google-analytics.com/analytics.js', 'ga');
ga('create', 'UA-8911610-8', 'auto');
ga('send', 'pageview');
</script>
</head>
<body class="main">
<div id="sk-header" class="hidden-xs hidden-sm">
<div id="sk-top-nav" class="container">
<a id="logo" href="/" style="width:205px;"><img id="mikan-pic" src="/images/mikan-pic.png" /><img
src="/images/mikan-text.svg" style="height:30px;" /></a>
<div id="nav-list">
<ul class="list-inline nav-ul">
<li class="">
<div class="sk-col"><a href="/"><i class="fa fa-home fa-lg"></i>主页</a></div>
</li>
<li class="">
<div class="sk-col"><a href="/Home/MyBangumi"><i class="fa fa-rss fa-lg"></i>订阅</a></div>
</li>
<li class="">
<div class="sk-col"><a href="/Home/Classic"><i class="fa fa-slack fa-lg"></i>列表</a></div>
</li>
<li class="">
<div class="sk-col"><a href="/Home/Publish"><i class="fa fa-pencil-square-o fa-lg"></i>发布</a>
</div>
</li>
</ul>
</div>
<div class="search-form">
<form method="get" action="/Home/Search">
<div class="form-group has-feedback">
<label for="search" class="sr-only">搜索</label>
<input type="text" class="form-control input-sm" name="searchstr" id="header-search"
placeholder="搜索">
<span class="glyphicon glyphicon-search form-control-feedback"></span>
</div>
</form>
</div>
<section id="login">
<div id="user-welcome" class="hidden-sm hidden-xs">
<div id="user-name">
<div class="text-right">testuser</div>
<div class="w-other-c text-right"><a href="/Account/Manage" style="color: #47c1c5;">账号设置</a>
</div>
</div>
<div id="head-cir">
<a href="/Account/Manage"><img id="head-pic" src="/images/Avatar/none.gif" /></a>
</div>
<form action="/Account/Logout" id="logoutForm" method="post"> <a
href="javascript:document.getElementById('logoutForm').submit()" id="user-logout"><img
src="/images/logout_normal.png" />&nbsp;&nbsp;退出</a>
<input name="__RequestVerificationToken" type="hidden"
value="CfDJ8MyNMqFNaC9JmJW13PvY-93KUsLhEERSkyq42lFSAJwcYqoOWYU2p5zHR6sNxZiW0yVxlm0_ZDBSsO96b-i4SCtXHaFUIq9Q2wyrW1FMKQWBOnqxa2mjUZ0E45xioOsLwst5PrezZO-Y5VKiPkrlDdAyLHyTwQ3HZeoHrqpQ88tG1lroVWSKgS4nRtTSthj-gg" />
</form>
</div>
<div class="hidden-lg hidden-md">
<div class="m-tool-title">
Mikan Project
</div>
<div style="text-align: center;margin-top: 2rem;" class="m-head-cir">
<img class="m-head-pic" src="/images/Avatar/none.gif" />
</div>
<div id="user-name">
<div class="m-head-welcometext">testuser 欢迎回来!</div>
</div>
<div class="m-head-welcometext">
<form action="/Account/Logout" id="mobileLogoutForm" method="post"> <a
href="javascript:document.getElementById('logoutForm').submit()" id="user-logout"><img
src="/images/logout_normal.png" />&nbsp;&nbsp;退出登录</a>
<input name="__RequestVerificationToken" type="hidden"
value="CfDJ8MyNMqFNaC9JmJW13PvY-93KUsLhEERSkyq42lFSAJwcYqoOWYU2p5zHR6sNxZiW0yVxlm0_ZDBSsO96b-i4SCtXHaFUIq9Q2wyrW1FMKQWBOnqxa2mjUZ0E45xioOsLwst5PrezZO-Y5VKiPkrlDdAyLHyTwQ3HZeoHrqpQ88tG1lroVWSKgS4nRtTSthj-gg" />
</form>
</div>
</div>
<style>
.num-node {
display: none;
}
</style>
<script>
AdvancedSubscriptionEnabled = true;
</script>
</section>
</div>
<div class="ribbon">
<span class="ribbon-color1"></span>
<span class="ribbon-color2"></span>
<span class="ribbon-color3"></span>
<span class="ribbon-color4"></span>
<span class="ribbon-color5"></span>
<span class="ribbon-color6"></span>
<span class="ribbon-color7"></span>
</div>
</div>
<div class="m-home-nav hidden-lg hidden-md" id="sk-mobile-header">
<div class="m-home-tool-left clickable" data-toggle="modal" data-target="#modal-nav">
<i class="fa fa-bars" aria-hidden="true"></i>
</div>
<div class="m-home-tool-left"></div>
<div style="text-align: center; height:100%;flex:1;">
<a href="/" style="text-decoration:none">
<img src="/images/mikan-pic.png" style="height: 3rem;margin-top: 0.5rem;">
<img src="/images/mikan-text.png" style="height: 1.5rem;margin-top: 0.5rem;">
</a>
</div>
<div class="m-home-tool-right clickable" data-toggle="modal" data-target="#modal-login">
<i class="fa fa-user" aria-hidden="true" style="margin-right: 1rem;"></i>
</div>
<div class="m-home-tool-right clickable" onclick="ShowNavSearch()">
<i class="fa fa-search" aria-hidden="true"></i>
</div>
</div>
<div class="m-nav-search" style="width: 100%;">
<div style="flex: 1;">
<form method="get" action="/Home/Search">
<div class="input-group">
<span class="input-group-addon" id="sizing-addon1" style="border: none;background-color: white;">
<i class="fa fa-search" aria-hidden="true"></i>
</span>
<input type="text" class="form-control" placeholder="搜索" name="searchstr"
aria-describedby="sizing-addon1" style="border: none;font-size:16px;">
</div>
</form>
</div>
<div style="width: 4rem;" onclick="HideNavSearch()">
<span style="font-size: 1.25rem;">取消</span>
</div>
</div>
<meta name="robots" content="noindex">
<div id="sk-container" class="container">
<div class="pull-left leftbar-container">
<img src="/images/subscribed-badge.svg" class="subscribed-badge" style="" />
<div class="bangumi-poster div-hover"
style="background-image: url('/images/Bangumi/202309/5ce9fed1.jpg?width=400&height=560&format=webp');"
onclick="window.open('/Home/Bangumi/3141#370', '_blank');"></div>
<p class="bangumi-title"><a target="_blank" class="w-other-c" style="color:#555"
href="/Home/Bangumi/3141#370">&#x846C;&#x9001;&#x7684;&#x8299;&#x8389;&#x83B2;</a> <a
href="/RSS/Bangumi?bangumiId=3141&subgroupid=370" class="mikan-rss" data-placement="bottom"
data-toggle="tooltip" data-original-title="RSS" target="_blank"><i class="fa fa-rss-square"></i></a>
</p>
<p class="bangumi-info">
字幕组:<a class="magnet-link-wrap" href="/Home/PublishGroup/223" target="_blank">LoliHouse</a>
</p>
<p class="bangumi-info">发布日期2024/02/22 19:14</p>
<p class="bangumi-info">文件大小573.95 MB</p>
<div id="leftbar-nav-anchor"></div>
<div class="leftbar-nav">
<a class="btn episode-btn"
href="/Download/20240222/475184dce83ea2b82902592a5ac3343f6d54b36a.torrent">下载种子</a>
<a class="btn episode-btn"
href="magnet:?xt=urn:btih:475184dce83ea2b82902592a5ac3343f6d54b36a&amp;tr=http%3a%2f%2ft.nyaatracker.com%2fannounce&amp;tr=http%3a%2f%2ftracker.kamigami.org%3a2710%2fannounce&amp;tr=http%3a%2f%2fshare.camoe.cn%3a8080%2fannounce&amp;tr=http%3a%2f%2fopentracker.acgnx.se%2fannounce&amp;tr=http%3a%2f%2fanidex.moe%3a6969%2fannounce&amp;tr=http%3a%2f%2ft.acg.rip%3a6699%2fannounce&amp;tr=https%3a%2f%2ftr.bangumi.moe%3a9696%2fannounce&amp;tr=udp%3a%2f%2ftr.bangumi.moe%3a6969%2fannounce&amp;tr=http%3a%2f%2fopen.acgtracker.com%3a1096%2fannounce&amp;tr=udp%3a%2f%2ftracker.opentrackr.org%3a1337%2fannounce">磁力链接</a>
<a class="btn episode-btn"
href="https://mypikpak.com/drive/url-checker?url=magnet:?xt.1=urn:btih:475184dce83ea2b82902592a5ac3343f6d54b36a">在线播放</a>
<button class="btn episode-btn js-subscribe_bangumi_page active" data-bangumiid="3141"
data-subtitlegroupid="370">取消番组订阅</button>
</div>
</div>
<div class="central-container" style="min-height: 532px;">
<div class="episode-header" style="">
<p class="episode-title">[&#x55B5;&#x840C;&#x5976;&#x8336;&#x5C4B;&amp;LoliHouse]
&#x846C;&#x9001;&#x7684;&#x8299;&#x8389;&#x83B2; / Sousou no Frieren - 23 [WebRip 1080p HEVC-10bit
AAC][&#x7B80;&#x7E41;&#x65E5;&#x5185;&#x5C01;&#x5B57;&#x5E55;] [573.95 MB]</p>
</div>
<br />
<div style="padding-bottom:20px" class="episode-desc">
<div style="margin-top: -10px; margin-bottom: 10px;">
<div style="width:100%; margin-right: auto; margin-left: auto;" class="hidden-xs hidden-sm">
<a href="https://shop119340084.taobao.com/?mm_sycmid=1_150417_dba461f2e2f73a9ea2a8fa11f33a1aee"
onclick="ga('send', 'event', 'sswj_lg', 'clicked', 'ad');">
<img src="/images/SSWJ/sswj6_lg.jpg"
style='height: 100%; width: 100%; object-fit: contain' />
</a>
</div>
<div style="width:100%; margin-right: auto; margin-left: auto;" class="hidden-lg hidden-md">
<a href="https://m.tb.cn/h.g0X5kru9wgYTRsp?mm_sycmid=1_150416_5914d148315f48d5297c751b84bac595"
onclick="ga('send', 'event', 'sswj_sm', 'clicked', 'ad');">
<img src="/images/SSWJ/sswj6_sm.jpg"
style='height: 100%; width: 100%; object-fit: contain' />
</a>
</div>
</div>
<p><img src="https://s2.loli.net/2023/10/04/2YE8DWOANHUxJKf.png" style="width:800px;height:1131px"></p>
<p><strong>葬送的芙莉莲 / Sousou no Frieren<br></strong></p>
<p><strong>字幕:喵萌奶茶屋<br></strong></p>
<p><strong>脚本TauZolver<br></strong></p>
<p><strong>压制Kotachi<br></strong></p>
<p><strong>本片与喵萌奶茶屋合作,感谢字幕组的辛勤劳动。<br></strong></p>
<hr>
<p><strong>为了顺利地观看我们的作品,推荐大家使用以下播放器:</strong></p>
<p><strong>Windows<a href="https://mpv.io/" target="_blank">mpv</a><a
href="https://vcb-s.com/archives/7594" target="_blank">教程</a></strong></p>
<p><strong>macOS<a href="https://iina.io/" target="_blank">IINA</a></strong></p>
<p><strong>iOS/Android</strong><a href="https://www.videolan.org/vlc/" target="_blank"><strong>VLC
media player<br></strong></a></p>
<hr>
<p><strong><a
href="https://share.dmhy.org/topics/view/599634_LoliHouse_LoliHouse_5th_Anniversary_Announcement.html">点击查看LoliHouse五周年纪念公告附往年全部礼包</a><br></strong>
</p>
<hr><strong>人人为我,我为人人,为了各位观众能快速下载,请使用 uTorrent / qBittorrent 等正规 BT 软件下载,并保持开机上传,谢谢~</strong>
</div>
</div>
<a href="#0" class="cd-top cd-top-btn">Top</a>
</div>
<style>
img {
max-width: 937px;
}
.episode-title {
color: #555;
font-size: 12px;
font-weight: bold;
margin-bottom: 8px;
margin-top: 10px;
}
.episode-header {
max-height: 45px;
border-bottom: 2px solid #e8e8e8;
}
.episode-btn {
background-color: #c8d2d7;
border-radius: 3px;
color: #fff;
font-weight: bold;
height: 35px;
margin-top: 10px;
width: 100%;
}
.episode-btn:hover {
background-color: #61ccd1;
color: #fff;
}
.div-hover {
cursor: pointer;
}
</style>
<div class="modal modal-fullscreen fade" id="modal-nav" tabindex="-1" role="dialog" aria-labelledby="myModalLabel"
aria-hidden="true" style="background-color:#3bc0c3;">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-body" style="margin: auto;width:100%;">
<div class="m-tool">
<span class="m-close clickable"><i class="fa fa-times" aria-hidden="true" data-toggle="modal"
data-target="#modal-nav"></i></span>
<div class="m-tool-toolbar">
<img src="/images/mikan-pic.png" style="width: 3rem;">
<img src="/images/mikan-text.png" style="width: 7rem;">
</div>
<div class="m-tool-list">
<ul>
<li><a href="/" class="link">主页</a></li>
<li class="m-tool-search-change"><a href="/Home/MyBangumi" class="link">订阅</a></li>
<li onclick="tool.clickSearch()" class="m-tool-search-change">
<i class="fa fa-search" aria-hidden="true"></i>&nbsp;&nbsp;搜索站内
</li>
<li class="m-tool-search-input">
<form method="get" action="/Home/Search">
<div style="display: flex;height: 100%;">
<input type="text" class="form-control" name="searchstr"
style="font-size:16px;" />
<span style="width: 5rem;" onclick="tool.resetSearch()">取消</span>
</div>
</form>
</li>
</ul>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="modal modal-fullscreen fade" id="modal-login" tabindex="-1" role="dialog" aria-labelledby="myModalLabel"
aria-hidden="true" style="background-color:#edf1f2;">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-body" style="margin: auto;width:100%;height:85vh;">
<div class="m-login">
<span class="m-left clickable"><i class="fa fa-angle-left" aria-hidden="true"
data-toggle="modal" data-target="#modal-login"></i></span>
<section id="login">
<div id="user-welcome" class="hidden-sm hidden-xs">
<div id="user-name">
<div class="text-right">testuser</div>
<div class="w-other-c text-right"><a href="/Account/Manage"
style="color: #47c1c5;">账号设置</a></div>
</div>
<div id="head-cir">
<a href="/Account/Manage"><img id="head-pic" src="/images/Avatar/none.gif" /></a>
</div>
<form action="/Account/Logout" id="logoutForm" method="post"> <a
href="javascript:document.getElementById('logoutForm').submit()"
id="user-logout"><img src="/images/logout_normal.png" />&nbsp;&nbsp;退出</a>
<input name="__RequestVerificationToken" type="hidden"
value="CfDJ8MyNMqFNaC9JmJW13PvY-93KUsLhEERSkyq42lFSAJwcYqoOWYU2p5zHR6sNxZiW0yVxlm0_ZDBSsO96b-i4SCtXHaFUIq9Q2wyrW1FMKQWBOnqxa2mjUZ0E45xioOsLwst5PrezZO-Y5VKiPkrlDdAyLHyTwQ3HZeoHrqpQ88tG1lroVWSKgS4nRtTSthj-gg" />
</form>
</div>
<div class="hidden-lg hidden-md">
<div class="m-tool-title">
Mikan Project
</div>
<div style="text-align: center;margin-top: 2rem;" class="m-head-cir">
<img class="m-head-pic" src="/images/Avatar/none.gif" />
</div>
<div id="user-name">
<div class="m-head-welcometext">testuser 欢迎回来!</div>
</div>
<div class="m-head-welcometext">
<form action="/Account/Logout" id="mobileLogoutForm" method="post"> <a
href="javascript:document.getElementById('logoutForm').submit()"
id="user-logout"><img src="/images/logout_normal.png" />&nbsp;&nbsp;退出登录</a>
<input name="__RequestVerificationToken" type="hidden"
value="CfDJ8MyNMqFNaC9JmJW13PvY-93KUsLhEERSkyq42lFSAJwcYqoOWYU2p5zHR6sNxZiW0yVxlm0_ZDBSsO96b-i4SCtXHaFUIq9Q2wyrW1FMKQWBOnqxa2mjUZ0E45xioOsLwst5PrezZO-Y5VKiPkrlDdAyLHyTwQ3HZeoHrqpQ88tG1lroVWSKgS4nRtTSthj-gg" />
</form>
</div>
</div>
<style>
.num-node {
display: none;
}
</style>
<script>
AdvancedSubscriptionEnabled = true;
</script>
</section>
</div>
</div>
</div>
</div>
</div>
<footer class="footer hidden-xs hidden-sm">
<div id="sk-footer" class="container text-center">
<div>Powered by Mikan Project <a href="/Home/Contact" target="_blank">联系我们</a></div>
<div>Cooperate by PlaymateCat@Lisa</div>
</div>
</footer>
<script>
var tool = {};
(function () {
var inputPEl = $('.m-tool-search-input');
var inputEl = inputPEl.find('input');
var changeEl = $('.m-tool-search-change');
inputPEl.hide();
tool.clickSearch = clickSearch;
tool.resetSearch = resetSearch;
function clickSearch() {
changeEl.hide();
inputPEl.show();
inputEl.focus();
}
function resetSearch() {
changeEl.show();
inputPEl.hide();
inputEl.val('');
}
})();
</script>
<script>
var pageUtil;
(function () {
pageUtil = {
isMobile: isMobile
};
function isMobile() {
var check = false;
(function (a) {
if (/(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i.test(a) || /1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-/i.test(a.substr(0, 4))) check = true;
})(navigator.userAgent || navigator.vendor || window.opera);
return check;
}
})();
//detect if page is mobile
if (pageUtil.isMobile()) {
document.getElementsByTagName('html')[0].style['font-size'] = window.innerWidth / 32 + 'px';
}
</script>
</body>
<!-- here put your own javascript -->
<script src="/js/mikan.min.js?v=7USd_hfRE7KH46vQBdF29boa3ENWKMVFRTyD9a8XEDg"></script>
</html>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff