fix: fix mikan web extractors

This commit is contained in:
master 2025-02-25 01:02:38 +08:00
parent 09565bd827
commit 5bc5d98823
26 changed files with 9537 additions and 659 deletions

62
Cargo.lock generated
View File

@ -3834,6 +3834,30 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "mockito"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "652cd6d169a36eaf9d1e6bce1a221130439a966d7f27858af66a33a66e9c4ee2"
dependencies = [
"assert-json-diff",
"bytes",
"colored",
"futures-util",
"http",
"http-body",
"http-body-util",
"hyper",
"hyper-util",
"log",
"rand 0.8.5",
"regex",
"serde_json",
"serde_urlencoded",
"similar",
"tokio",
]
[[package]]
name = "mod_use"
version = "0.2.3"
@ -5012,6 +5036,7 @@ dependencies = [
"loco-rs",
"log",
"maplit",
"mockito",
"moka",
"nom 8.0.0",
"once_cell",
@ -5025,6 +5050,7 @@ dependencies = [
"reqwest-retry",
"reqwest-tracing",
"rss",
"rstest",
"scraper 0.22.0",
"sea-orm",
"sea-orm-migration",
@ -5162,6 +5188,12 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "relative-path"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2"
[[package]]
name = "rend"
version = "0.4.2"
@ -5411,6 +5443,36 @@ dependencies = [
"quick-xml 0.37.2",
]
[[package]]
name = "rstest"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03e905296805ab93e13c1ec3a03f4b6c4f35e9498a3d5fa96dc626d22c03cd89"
dependencies = [
"futures-timer",
"futures-util",
"rstest_macros",
"rustc_version",
]
[[package]]
name = "rstest_macros"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef0053bbffce09062bee4bcc499b0fbe7a57b879f1efe088d6d8d4c7adcdef9b"
dependencies = [
"cfg-if",
"glob",
"proc-macro-crate",
"proc-macro2",
"quote",
"regex",
"relative-path",
"rustc_version",
"syn 2.0.98",
"unicode-ident",
]
[[package]]
name = "rust-multipart-rfc7578_2"
version = "0.7.0"

View File

@ -118,3 +118,5 @@ nom = "8.0.0"
serial_test = "3"
loco-rs = { version = "0.14", features = ["testing"] }
insta = { version = "1", features = ["redactions", "yaml", "filters"] }
mockito = "1.6.1"
rstest = "0.24.0"

View File

@ -1,3 +1,5 @@
use std::{borrow::Cow, error::Error as StdError};
use thiserror::Error;
#[derive(Error, Debug)]
@ -16,4 +18,19 @@ pub enum ExtractError {
MikanRssFormatError { url: String },
#[error("Parse mikan rss item format error, {reason}")]
MikanRssItemFormatError { reason: String },
#[error("Missing field {field} in extracting meta")]
MikanMetaMissingFieldError {
field: Cow<'static, str>,
#[source]
source: Option<Box<dyn StdError + Send + Sync>>,
},
}
impl ExtractError {
pub fn from_mikan_meta_missing_field(field: Cow<'static, str>) -> Self {
Self::MikanMetaMissingFieldError {
field,
source: None,
}
}
}

View File

@ -1,3 +1,11 @@
pub mod styles;
pub use styles::parse_style_attr;
use html_escape::decode_html_entities;
use itertools::Itertools;
use scraper::ElementRef;
pub use styles::{extract_background_image_src_from_style_attr, extract_style_from_attr};
pub fn extract_inner_text_from_element_ref(el: ElementRef<'_>) -> String {
let raw_text = el.text().collect_vec().join(",");
decode_html_entities(&raw_text).trim().to_string()
}

View File

@ -1,6 +1,45 @@
use lightningcss::declaration::DeclarationBlock;
use lightningcss::{
declaration::DeclarationBlock, properties::Property, values::image::Image as CSSImage,
};
use url::Url;
pub fn parse_style_attr(style_attr: &str) -> Option<DeclarationBlock> {
use crate::extract::media::extract_image_src_from_str;
pub fn extract_style_from_attr(style_attr: &str) -> Option<DeclarationBlock> {
let result = DeclarationBlock::parse_string(style_attr, Default::default()).ok()?;
Some(result)
}
pub fn extract_background_image_src_from_style_attr(
style_attr: &str,
base_url: &Url,
) -> Option<Url> {
extract_style_from_attr(style_attr).and_then(|style| {
style.iter().find_map(|(prop, _)| {
match prop {
Property::BackgroundImage(images) => {
for img in images {
if let CSSImage::Url(path) = img {
if let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
{
return Some(url);
}
}
}
}
Property::Background(backgrounds) => {
for bg in backgrounds {
if let CSSImage::Url(path) = &bg.image {
if let Some(url) = extract_image_src_from_str(path.url.trim(), base_url)
{
return Some(url);
}
}
}
}
_ => {}
}
None
})
})
}

View File

@ -0,0 +1,8 @@
use url::Url;
pub fn extract_image_src_from_str(image_src: &str, base_url: &Url) -> Option<Url> {
let mut image_url = base_url.join(image_src).ok()?;
image_url.set_query(None);
image_url.set_fragment(None);
Some(image_url)
}

View File

@ -3,15 +3,17 @@ use std::ops::Deref;
use async_trait::async_trait;
use loco_rs::app::{AppContext, Initializer};
use once_cell::sync::OnceCell;
use url::Url;
use super::AppMikanConfig;
use crate::{config::AppConfigExt, fetch::HttpClient};
static APP_MIKAN_CLIENT: OnceCell<AppMikanClient> = OnceCell::new();
#[derive(Debug)]
pub struct AppMikanClient {
http_client: HttpClient,
base_url: String,
base_url: Url,
}
impl AppMikanClient {
@ -31,7 +33,7 @@ impl AppMikanClient {
.expect("AppMikanClient is not initialized")
}
pub fn base_url(&self) -> &str {
pub fn base_url(&self) -> &Url {
&self.base_url
}
}

View File

@ -1,9 +1,10 @@
use serde::{Deserialize, Serialize};
use url::Url;
use crate::fetch::HttpClientConfig;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct AppMikanConfig {
pub http_client: HttpClientConfig,
pub base_url: String,
pub base_url: Url,
}

View File

@ -1,22 +1,22 @@
pub mod client;
pub mod config;
pub mod constants;
pub mod rss_parser;
pub mod web_parser;
pub mod rss_extract;
pub mod web_extract;
pub use client::{AppMikanClient, AppMikanClientInitializer};
pub use config::AppMikanConfig;
pub use constants::MIKAN_BUCKET_KEY;
pub use rss_parser::{
build_mikan_bangumi_rss_link, build_mikan_subscriber_aggregation_rss_link,
parse_mikan_bangumi_id_from_rss_link, parse_mikan_rss_channel_from_rss_link,
parse_mikan_rss_items_from_rss_link, parse_mikan_subscriber_aggregation_id_from_rss_link,
pub use rss_extract::{
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanBangumiRssLink,
MikanRssChannel, MikanRssItem, MikanSubscriberAggregationRssChannel,
MikanSubscriberAggregationRssLink,
MikanSubscriberAggregationRssLink, build_mikan_bangumi_rss_link,
build_mikan_subscriber_aggregation_rss_link, extract_mikan_bangumi_id_from_rss_link,
extract_mikan_subscriber_aggregation_id_from_rss_link, parse_mikan_rss_channel_from_rss_link,
parse_mikan_rss_items_from_rss_link,
};
pub use web_parser::{
build_mikan_bangumi_homepage, build_mikan_episode_homepage,
parse_mikan_bangumi_meta_from_mikan_homepage, parse_mikan_episode_meta_from_mikan_homepage,
MikanBangumiMeta, MikanEpisodeMeta,
pub use web_extract::{
MikanBangumiMeta, MikanEpisodeMeta, build_mikan_bangumi_homepage, build_mikan_episode_homepage,
extract_mikan_bangumi_meta_from_bangumi_homepage,
extract_mikan_episode_meta_from_episode_homepage,
};

View File

@ -1,6 +1,7 @@
use std::ops::Deref;
use chrono::DateTime;
use color_eyre::eyre;
use itertools::Itertools;
use reqwest::IntoUrl;
use serde::{Deserialize, Serialize};
@ -10,8 +11,8 @@ use crate::{
extract::{
errors::ExtractError,
mikan::{
web_parser::{parse_mikan_episode_id_from_homepage, MikanEpisodeHomepage},
AppMikanClient,
web_extract::{MikanEpisodeHomepage, parse_mikan_episode_id_from_homepage},
},
},
fetch::bytes::fetch_bytes,
@ -163,11 +164,11 @@ pub struct MikanSubscriberAggregationRssLink {
}
pub fn build_mikan_bangumi_rss_link(
mikan_base_url: &str,
mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> color_eyre::eyre::Result<Url> {
let mut url = Url::parse(mikan_base_url)?;
) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path("/RSS/Bangumi");
url.query_pairs_mut()
.append_pair("bangumiId", mikan_bangumi_id);
@ -181,7 +182,7 @@ pub fn build_mikan_bangumi_rss_link(
pub fn build_mikan_subscriber_aggregation_rss_link(
mikan_base_url: &str,
mikan_aggregation_id: &str,
) -> color_eyre::eyre::Result<Url> {
) -> eyre::Result<Url> {
let mut url = Url::parse(mikan_base_url)?;
url.set_path("/RSS/MyBangumi");
url.query_pairs_mut()
@ -189,7 +190,7 @@ pub fn build_mikan_subscriber_aggregation_rss_link(
Ok(url)
}
pub fn parse_mikan_bangumi_id_from_rss_link(url: &Url) -> Option<MikanBangumiRssLink> {
pub fn extract_mikan_bangumi_id_from_rss_link(url: &Url) -> Option<MikanBangumiRssLink> {
if url.path() == "/RSS/Bangumi" {
url.query_pairs()
.find(|(k, _)| k == "bangumiId")
@ -205,7 +206,7 @@ pub fn parse_mikan_bangumi_id_from_rss_link(url: &Url) -> Option<MikanBangumiRss
}
}
pub fn parse_mikan_subscriber_aggregation_id_from_rss_link(
pub fn extract_mikan_subscriber_aggregation_id_from_rss_link(
url: &Url,
) -> Option<MikanSubscriberAggregationRssLink> {
if url.path() == "/RSS/MyBangumi" {
@ -222,7 +223,7 @@ pub fn parse_mikan_subscriber_aggregation_id_from_rss_link(
pub async fn parse_mikan_rss_items_from_rss_link(
client: Option<&AppMikanClient>,
url: impl IntoUrl,
) -> color_eyre::eyre::Result<Vec<MikanRssItem>> {
) -> eyre::Result<Vec<MikanRssItem>> {
let channel = parse_mikan_rss_channel_from_rss_link(client, url).await?;
Ok(channel.into_items())
@ -231,7 +232,7 @@ pub async fn parse_mikan_rss_items_from_rss_link(
pub async fn parse_mikan_rss_channel_from_rss_link(
client: Option<&AppMikanClient>,
url: impl IntoUrl,
) -> color_eyre::eyre::Result<MikanRssChannel> {
) -> eyre::Result<MikanRssChannel> {
let http_client = client.map(|s| s.deref());
let bytes = fetch_bytes(http_client, url.as_str()).await?;
@ -242,7 +243,7 @@ pub async fn parse_mikan_rss_channel_from_rss_link(
if let Some(MikanBangumiRssLink {
mikan_bangumi_id,
mikan_fansub_id,
}) = parse_mikan_bangumi_id_from_rss_link(&channel_link)
}) = extract_mikan_bangumi_id_from_rss_link(&channel_link)
{
let channel_name = channel.title().replace("Mikan Project - ", "");
@ -274,7 +275,7 @@ pub async fn parse_mikan_rss_channel_from_rss_link(
} else if let Some(MikanSubscriberAggregationRssLink {
mikan_aggregation_id,
..
}) = parse_mikan_subscriber_aggregation_id_from_rss_link(&channel_link)
}) = extract_mikan_subscriber_aggregation_id_from_rss_link(&channel_link)
{
let items = channel
.items
@ -304,8 +305,8 @@ mod tests {
use crate::{
extract::mikan::{
parse_mikan_rss_channel_from_rss_link, MikanBangumiAggregationRssChannel,
MikanBangumiRssChannel, MikanRssChannel,
MikanBangumiAggregationRssChannel, MikanBangumiRssChannel, MikanRssChannel,
parse_mikan_rss_channel_from_rss_link,
},
sync::core::BITTORRENT_MIME_TYPE,
};
@ -333,10 +334,12 @@ mod tests {
assert_eq!(first_sub_item.mime, BITTORRENT_MIME_TYPE);
assert!(&first_sub_item
.homepage
.as_str()
.starts_with("https://mikanani.me/Home/Episode"));
assert!(
&first_sub_item
.homepage
.as_str()
.starts_with("https://mikanani.me/Home/Episode")
);
let name = first_sub_item.title.as_str();
assert!(name.contains("葬送的芙莉莲"));

View File

@ -0,0 +1,644 @@
use std::{borrow::Cow, ops::Deref};
use bytes::Bytes;
use color_eyre::eyre;
use loco_rs::app::AppContext;
use reqwest::IntoUrl;
use scraper::{Html, Selector};
use tracing::instrument;
use url::Url;
use super::{
AppMikanClient, MIKAN_BUCKET_KEY, MikanBangumiRssLink, extract_mikan_bangumi_id_from_rss_link,
};
use crate::{
app::AppContextExt,
dal::DalContentCategory,
extract::{
errors::ExtractError,
html::{extract_background_image_src_from_style_attr, extract_inner_text_from_element_ref},
media::extract_image_src_from_str,
},
fetch::{html::fetch_html, image::fetch_image},
};
#[derive(Clone, Debug, PartialEq)]
pub struct MikanEpisodeMeta {
pub homepage: Url,
pub origin_poster_src: Option<Url>,
pub bangumi_title: String,
pub episode_title: String,
pub fansub: String,
pub mikan_bangumi_id: String,
pub mikan_fansub_id: String,
pub mikan_episode_id: String,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiMeta {
pub homepage: Url,
pub origin_poster_src: Option<Url>,
pub bangumi_title: String,
pub mikan_bangumi_id: String,
pub mikan_fansub_id: Option<String>,
pub fansub: Option<String>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiPosterMeta {
pub origin_poster_src: Url,
pub poster_data: Option<Bytes>,
pub poster_src: Option<String>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanEpisodeHomepage {
pub mikan_episode_id: String,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiHomepage {
pub mikan_bangumi_id: String,
pub mikan_fansub_id: Option<String>,
}
pub fn build_mikan_bangumi_homepage(
mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path(&format!("/Home/Bangumi/{mikan_bangumi_id}"));
url.set_fragment(mikan_fansub_id);
Ok(url)
}
pub fn build_mikan_episode_homepage(
mikan_base_url: impl IntoUrl,
mikan_episode_id: &str,
) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path(&format!("/Home/Episode/{mikan_episode_id}"));
Ok(url)
}
pub fn build_mikan_bangumi_expand_info_url(
mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str,
) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path("/ExpandBangumi");
url.query_pairs_mut()
.append_pair("bangumiId", mikan_bangumi_id)
.append_pair("showSubscribed", "true");
Ok(url)
}
pub fn parse_mikan_bangumi_id_from_homepage(url: &Url) -> Option<MikanBangumiHomepage> {
if url.path().starts_with("/Home/Bangumi/") {
let mikan_bangumi_id = url.path().replace("/Home/Bangumi/", "");
Some(MikanBangumiHomepage {
mikan_bangumi_id,
mikan_fansub_id: url.fragment().map(String::from),
})
} else {
None
}
}
pub fn parse_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeHomepage> {
if url.path().starts_with("/Home/Episode/") {
let mikan_episode_id = url.path().replace("/Home/Episode/", "");
Some(MikanEpisodeHomepage { mikan_episode_id })
} else {
None
}
}
pub async fn extract_mikan_poster_meta_from_src(
client: Option<&AppMikanClient>,
origin_poster_src_url: Url,
) -> eyre::Result<MikanBangumiPosterMeta> {
let http_client = client.map(|s| s.deref());
let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_data: Some(poster_data),
poster_src: None,
})
}
pub async fn extract_mikan_bangumi_poster_meta_from_src_with_cache(
ctx: &AppContext,
origin_poster_src_url: Url,
subscriber_id: i32,
) -> eyre::Result<MikanBangumiPosterMeta> {
let dal_client = ctx.get_dal_client();
let mikan_client = ctx.get_mikan_client();
if let Some(poster_src) = dal_client
.exists_object(
DalContentCategory::Image,
subscriber_id,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
)
.await?
{
return Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_data: None,
poster_src: Some(poster_src.to_string()),
});
}
let poster_data =
fetch_image(Some(mikan_client.deref()), origin_poster_src_url.clone()).await?;
let poster_str = dal_client
.store_object(
DalContentCategory::Image,
subscriber_id,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
poster_data.clone(),
)
.await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_data: Some(poster_data),
poster_src: Some(poster_str.to_string()),
})
}
#[instrument(skip_all, fields(mikan_episode_homepage_url = mikan_episode_homepage_url.as_str()))]
pub async fn extract_mikan_episode_meta_from_episode_homepage(
client: Option<&AppMikanClient>,
mikan_episode_homepage_url: Url,
) -> eyre::Result<MikanEpisodeMeta> {
let http_client = client.map(|s| s.deref());
let mikan_base_url = Url::parse(&mikan_episode_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?;
let html = Html::parse_document(&content);
let bangumi_title_selector =
&Selector::parse(".bangumi-title > a[href^='/Home/Bangumi/']").unwrap();
let mikan_bangumi_id_selector =
&Selector::parse(".bangumi-title > a.mikan-rss[data-original-title='RSS']").unwrap();
let bangumi_poster_selector = &Selector::parse(".bangumi-poster").unwrap();
let bangumi_title = html
.select(bangumi_title_selector)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
let MikanBangumiRssLink {
mikan_bangumi_id,
mikan_fansub_id,
..
} = html
.select(mikan_bangumi_id_selector)
.next()
.and_then(|el| el.value().attr("href"))
.and_then(|s| mikan_episode_homepage_url.join(s).ok())
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
.ok_or_else(|| {
ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?;
let mikan_fansub_id = mikan_fansub_id
.ok_or_else(|| {
ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_fansub_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?;
let episode_title = html
.select(&Selector::parse("title").unwrap())
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("episode_title")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
let MikanEpisodeHomepage {
mikan_episode_id, ..
} = parse_mikan_episode_id_from_homepage(&mikan_episode_homepage_url)
.ok_or_else(|| {
ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_episode_id"))
})
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
let fansub_name = html
.select(
&Selector::parse(".bangumi-info a.magnet-link-wrap[href^='/Home/PublishGroup/']")
.unwrap(),
)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("fansub_name")))
.inspect_err(|error| {
tracing::warn!(error = %error);
})?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
el.value()
.attr("data-src")
.and_then(|data_src| extract_image_src_from_str(data_src, &mikan_base_url))
.or_else(|| {
el.value().attr("style").and_then(|style| {
extract_background_image_src_from_style_attr(style, &mikan_base_url)
})
})
});
tracing::trace!(
bangumi_title,
mikan_bangumi_id,
episode_title,
mikan_episode_id,
origin_poster_src = origin_poster_src.as_ref().map(|url| url.as_str()),
fansub_name,
mikan_fansub_id,
"mikan episode meta extracted"
);
Ok(MikanEpisodeMeta {
mikan_bangumi_id,
mikan_fansub_id,
bangumi_title,
episode_title,
homepage: mikan_episode_homepage_url,
origin_poster_src,
fansub: fansub_name,
mikan_episode_id,
})
}
#[instrument(skip_all, fields(mikan_bangumi_homepage_url = mikan_bangumi_homepage_url.as_str()))]
pub async fn extract_mikan_bangumi_meta_from_bangumi_homepage(
client: Option<&AppMikanClient>,
mikan_bangumi_homepage_url: Url,
) -> eyre::Result<MikanBangumiMeta> {
let http_client = client.map(|s| s.deref());
let mikan_base_url = Url::parse(&mikan_bangumi_homepage_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?;
let html = Html::parse_document(&content);
let bangumi_title_selector = &Selector::parse(".bangumi-title").unwrap();
let mikan_bangumi_id_selector =
&Selector::parse(".bangumi-title > .mikan-rss[data-original-title='RSS']").unwrap();
let bangumi_poster_selector = &Selector::parse(".bangumi-poster").unwrap();
let bangumi_title = html
.select(bangumi_title_selector)
.next()
.map(extract_inner_text_from_element_ref)
.ok_or_else(|| ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("bangumi_title")))
.inspect_err(|error| tracing::warn!(error = %error))?;
let mikan_bangumi_id = html
.select(mikan_bangumi_id_selector)
.next()
.and_then(|el| el.value().attr("href"))
.and_then(|s| mikan_bangumi_homepage_url.join(s).ok())
.and_then(|rss_link_url| extract_mikan_bangumi_id_from_rss_link(&rss_link_url))
.map(
|MikanBangumiRssLink {
mikan_bangumi_id, ..
}| mikan_bangumi_id,
)
.ok_or_else(|| {
ExtractError::from_mikan_meta_missing_field(Cow::Borrowed("mikan_bangumi_id"))
})
.inspect_err(|error| tracing::error!(error = %error))?;
let origin_poster_src = html.select(bangumi_poster_selector).next().and_then(|el| {
el.value()
.attr("data-src")
.and_then(|data_src| extract_image_src_from_str(data_src, &mikan_base_url))
.or_else(|| {
el.value().attr("style").and_then(|style| {
extract_background_image_src_from_style_attr(style, &mikan_base_url)
})
})
});
let (mikan_fansub_id, fansub_name) = mikan_bangumi_homepage_url
.fragment()
.and_then(|id| {
html.select(
&Selector::parse(&format!("a.subgroup-name[data-anchor='#{}']", id)).unwrap(),
)
.next()
.map(extract_inner_text_from_element_ref)
.map(|fansub_name| (id.to_string(), fansub_name))
})
.unzip();
tracing::trace!(
bangumi_title,
mikan_bangumi_id,
origin_poster_src = origin_poster_src.as_ref().map(|url| url.as_str()),
fansub_name,
mikan_fansub_id,
"mikan bangumi meta extracted"
);
Ok(MikanBangumiMeta {
homepage: mikan_bangumi_homepage_url,
bangumi_title,
origin_poster_src,
mikan_bangumi_id,
fansub: fansub_name,
mikan_fansub_id,
})
}
/**
* @logined-required
*/
#[instrument(skip_all, fields(my_bangumi_page_url = my_bangumi_page_url.as_str()))]
pub async fn extract_mikan_bangumis_meta_from_my_bangumi_page(
client: Option<&AppMikanClient>,
my_bangumi_page_url: Url,
) -> eyre::Result<Vec<MikanBangumiMeta>> {
let http_client = client.map(|c| c.deref());
let mikan_base_url = Url::parse(&my_bangumi_page_url.origin().unicode_serialization())?;
let content = fetch_html(http_client, my_bangumi_page_url.clone()).await?;
let bangumi_container_selector = &Selector::parse(".sk-bangumi .an-ul>li").unwrap();
let bangumi_info_selector = &Selector::parse(".an-info a.an-text").unwrap();
let bangumi_poster_selector =
&Selector::parse("span[data-src][data-bangumiid], span[data-bangumiid][style]").unwrap();
let fansub_container_selector =
&Selector::parse(".js-expand_bangumi-subgroup.js-subscribed").unwrap();
let fansub_title_selector = &Selector::parse(".tag-res-name[title]").unwrap();
let fansub_id_selector =
&Selector::parse(".active[data-subtitlegroupid][data-bangumiid]").unwrap();
let html = Html::parse_document(&content);
let mut bangumi_list = vec![];
for bangumi_elem in html.select(bangumi_container_selector) {
let title_and_href_elem = bangumi_elem.select(bangumi_info_selector).next();
let poster_elem = bangumi_elem.select(bangumi_poster_selector).next();
if let (Some(bangumi_home_page_url), Some(bangumi_title)) = (
title_and_href_elem.and_then(|elem| elem.attr("href")),
title_and_href_elem.and_then(|elem| elem.attr("title")),
) {
let origin_poster_src = poster_elem.and_then(|ele| {
ele.attr("data-src")
.and_then(|data_src| extract_image_src_from_str(data_src, &mikan_base_url))
.or_else(|| {
ele.attr("style").and_then(|style| {
extract_background_image_src_from_style_attr(style, &mikan_base_url)
})
})
});
let bangumi_home_page_url = my_bangumi_page_url.join(bangumi_home_page_url)?;
if let Some(MikanBangumiHomepage {
ref mikan_bangumi_id,
..
}) = parse_mikan_bangumi_id_from_homepage(&bangumi_home_page_url)
{
if let Some(origin_poster_src) = origin_poster_src.as_ref() {
tracing::trace!(
origin_poster_src = origin_poster_src.as_str(),
bangumi_title,
mikan_bangumi_id,
"bangumi info extracted"
);
} else {
tracing::warn!(
bangumi_title,
mikan_bangumi_id,
"bangumi info extracted, but failed to extract poster_src"
);
}
let bangumi_expand_info_url =
build_mikan_bangumi_expand_info_url(mikan_base_url.clone(), mikan_bangumi_id)?;
let bangumi_expand_info_content =
fetch_html(http_client, bangumi_expand_info_url).await?;
let bangumi_expand_info_fragment =
Html::parse_fragment(&bangumi_expand_info_content);
for fansub_info in bangumi_expand_info_fragment.select(fansub_container_selector) {
if let (Some(fansub_name), Some(mikan_fansub_id)) = (
fansub_info
.select(fansub_title_selector)
.next()
.and_then(|ele| ele.attr("title")),
fansub_info
.select(fansub_id_selector)
.next()
.and_then(|ele| ele.attr("data-subtitlegroupid")),
) {
tracing::trace!(
fansub_name = &fansub_name,
mikan_fansub_id,
"subscribed fansub extracted"
);
bangumi_list.push(MikanBangumiMeta {
homepage: build_mikan_bangumi_homepage(
mikan_base_url.clone(),
mikan_bangumi_id.as_str(),
Some(mikan_fansub_id),
)?,
bangumi_title: bangumi_title.to_string(),
mikan_bangumi_id: mikan_bangumi_id.to_string(),
mikan_fansub_id: Some(mikan_fansub_id.to_string()),
fansub: Some(fansub_name.to_string()),
origin_poster_src: origin_poster_src.clone(),
})
}
}
}
}
}
Ok(bangumi_list)
}
#[cfg(test)]
mod test {
#![allow(unused_variables)]
use color_eyre::eyre;
use rstest::{fixture, rstest};
use tracing::Level;
use url::Url;
use zune_image::{codecs::ImageFormat, image::Image};
use super::*;
use crate::{
extract::mikan::web_extract::extract_mikan_bangumis_meta_from_my_bangumi_page,
test_utils::{mikan::build_testing_mikan_client, tracing::init_testing_tracing},
};
#[fixture]
fn before_each() {
init_testing_tracing(Level::INFO);
}
#[rstest]
#[tokio::test]
async fn test_extract_mikan_poster_from_src(before_each: ()) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let bangumi_poster_url = mikan_base_url.join("/images/Bangumi/202309/5ce9fed1.jpg")?;
let bangumi_poster_mock = mikan_server
.mock("GET", bangumi_poster_url.path())
.with_body_from_file("tests/resources/mikan/Bangumi-202309-5ce9fed1.jpg")
.create_async()
.await;
let bgm_poster =
extract_mikan_poster_meta_from_src(Some(&mikan_client), bangumi_poster_url).await?;
bangumi_poster_mock.expect(1);
let u8_data = bgm_poster.poster_data.expect("should have poster data");
let image = Image::read(u8_data.to_vec(), Default::default());
assert!(
image.is_ok_and(|img| img
.metadata()
.get_image_format()
.is_some_and(|fmt| matches!(fmt, ImageFormat::JPEG))),
"should start with valid jpeg data magic number"
);
Ok(())
}
#[rstest]
#[tokio::test]
async fn test_extract_mikan_episode(before_each: ()) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let episode_homepage_url =
mikan_base_url.join("/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a")?;
let episode_homepage_mock = mikan_server
.mock("GET", episode_homepage_url.path())
.with_body_from_file(
"tests/resources/mikan/Episode-475184dce83ea2b82902592a5ac3343f6d54b36a.htm",
)
.create_async()
.await;
let ep_meta = extract_mikan_episode_meta_from_episode_homepage(
Some(&mikan_client),
episode_homepage_url.clone(),
)
.await?;
assert_eq!(ep_meta.homepage, episode_homepage_url);
assert_eq!(ep_meta.bangumi_title, "葬送的芙莉莲");
assert_eq!(
ep_meta
.origin_poster_src
.as_ref()
.map(|s| s.path().to_string()),
Some(String::from("/images/Bangumi/202309/5ce9fed1.jpg"))
);
assert_eq!(ep_meta.fansub, "LoliHouse");
assert_eq!(ep_meta.mikan_fansub_id, "370");
assert_eq!(ep_meta.mikan_bangumi_id, "3141");
Ok(())
}
#[rstest]
#[tokio::test]
async fn test_extract_mikan_bangumi_meta_from_bangumi_homepage(
before_each: (),
) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let bangumi_homepage_url = mikan_base_url.join("/Home/Bangumi/3416#370")?;
let bangumi_homepage_mock = mikan_server
.mock("GET", bangumi_homepage_url.path())
.with_body_from_file("tests/resources/mikan/Bangumi-3416-370.htm")
.create_async()
.await;
let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage(
Some(&mikan_client),
bangumi_homepage_url.clone(),
)
.await?;
assert_eq!(bgm_meta.homepage, bangumi_homepage_url);
assert_eq!(bgm_meta.bangumi_title, "叹气的亡灵想隐退");
assert_eq!(
bgm_meta
.origin_poster_src
.as_ref()
.map(|s| s.path().to_string()),
Some(String::from("/images/Bangumi/202410/480ef127.jpg"))
);
assert_eq!(bgm_meta.fansub, Some(String::from("LoliHouse")));
assert_eq!(bgm_meta.mikan_fansub_id, Some(String::from("370")));
assert_eq!(bgm_meta.mikan_bangumi_id, "3416");
Ok(())
}
#[rstest]
#[tokio::test]
async fn test_extract_mikan_bangumis_meta_from_my_bangumi_page(
before_each: (),
) -> eyre::Result<()> {
let mut mikan_server = mockito::Server::new_async().await;
let mikan_base_url = Url::parse(&mikan_server.url())?;
let mikan_client = build_testing_mikan_client(mikan_base_url.clone())?;
let my_bangumi_page_url = mikan_base_url.join("/Home/MyBangumi")?;
let mock_my_bangumi = mikan_server
.mock("GET", my_bangumi_page_url.path())
.with_body_from_file("tests/resources/mikan/MyBangumi.htm")
.create_async()
.await;
let mock_expand_bangumi = mikan_server
.mock("GET", "/ExpandBangumi")
.match_query(mockito::Matcher::Any)
.with_body_from_file("tests/resources/mikan/ExpandBangumi.htm")
.create_async()
.await;
let bangumi_metas = extract_mikan_bangumis_meta_from_my_bangumi_page(
Some(&mikan_client),
my_bangumi_page_url,
)
.await?;
assert!(!bangumi_metas.is_empty());
assert!(bangumi_metas[0].origin_poster_src.is_some());
mock_my_bangumi.expect(1);
mock_expand_bangumi.expect(bangumi_metas.len());
Ok(())
}
}

View File

@ -1,595 +0,0 @@
use std::ops::Deref;
use bytes::Bytes;
use color_eyre::eyre::{self, ContextCompat};
use html_escape::decode_html_entities;
use itertools::Itertools;
use lazy_static::lazy_static;
use lightningcss::{properties::Property, values::image::Image as CSSImage};
use loco_rs::app::AppContext;
use regex::Regex;
use reqwest::IntoUrl;
use scraper::Html;
use url::Url;
use super::{
AppMikanClient, MIKAN_BUCKET_KEY, MikanBangumiRssLink, parse_mikan_bangumi_id_from_rss_link,
};
use crate::{
app::AppContextExt,
dal::DalContentCategory,
extract::html::parse_style_attr,
fetch::{html::fetch_html, image::fetch_image},
};
#[derive(Clone, Debug, PartialEq)]
pub struct MikanEpisodeMeta {
pub homepage: Url,
pub origin_poster_src: Option<Url>,
pub bangumi_title: String,
pub episode_title: String,
pub fansub: String,
pub mikan_bangumi_id: String,
pub mikan_fansub_id: String,
pub mikan_episode_id: String,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiMeta {
pub homepage: Url,
pub origin_poster_src: Option<Url>,
pub bangumi_title: String,
pub mikan_bangumi_id: String,
pub mikan_fansub_id: Option<String>,
pub fansub: Option<String>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiPosterMeta {
pub origin_poster_src: Url,
pub poster_data: Option<Bytes>,
pub poster_src: Option<String>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanEpisodeHomepage {
pub mikan_episode_id: String,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MikanBangumiHomepage {
pub mikan_bangumi_id: String,
pub mikan_fansub_id: Option<String>,
}
lazy_static! {
static ref MIKAN_TITLE_SEASON: Regex = Regex::new("第.*季").unwrap();
}
pub fn build_mikan_bangumi_homepage(
mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str,
mikan_fansub_id: Option<&str>,
) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path(&format!("/Home/Bangumi/{mikan_bangumi_id}"));
url.set_fragment(mikan_fansub_id);
Ok(url)
}
pub fn build_mikan_episode_homepage(
mikan_base_url: impl IntoUrl,
mikan_episode_id: &str,
) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path(&format!("/Home/Episode/{mikan_episode_id}"));
Ok(url)
}
pub fn build_mikan_bangumi_expand_info_url(
mikan_base_url: impl IntoUrl,
mikan_bangumi_id: &str,
) -> eyre::Result<Url> {
let mut url = mikan_base_url.into_url()?;
url.set_path("/ExpandBangumi");
url.query_pairs_mut()
.append_pair("bangumiId", mikan_bangumi_id)
.append_pair("showSubscribed", "true");
Ok(url)
}
pub fn parse_mikan_bangumi_id_from_homepage(url: &Url) -> Option<MikanBangumiHomepage> {
if url.path().starts_with("/Home/Bangumi/") {
let mikan_bangumi_id = url.path().replace("/Home/Bangumi/", "");
Some(MikanBangumiHomepage {
mikan_bangumi_id,
mikan_fansub_id: url.fragment().map(String::from),
})
} else {
None
}
}
pub fn parse_mikan_episode_id_from_homepage(url: &Url) -> Option<MikanEpisodeHomepage> {
if url.path().starts_with("/Home/Episode/") {
let mikan_episode_id = url.path().replace("/Home/Episode/", "");
Some(MikanEpisodeHomepage { mikan_episode_id })
} else {
None
}
}
pub async fn parse_mikan_bangumi_poster_from_origin_poster_src(
client: Option<&AppMikanClient>,
origin_poster_src_url: Url,
) -> eyre::Result<MikanBangumiPosterMeta> {
let http_client = client.map(|s| s.deref());
let poster_data = fetch_image(http_client, origin_poster_src_url.clone()).await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_data: Some(poster_data),
poster_src: None,
})
}
pub async fn parse_mikan_bangumi_poster_from_origin_poster_src_with_cache(
ctx: &AppContext,
origin_poster_src_url: Url,
subscriber_id: i32,
) -> eyre::Result<MikanBangumiPosterMeta> {
let dal_client = ctx.get_dal_client();
let mikan_client = ctx.get_mikan_client();
if let Some(poster_src) = dal_client
.exists_object(
DalContentCategory::Image,
subscriber_id,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
)
.await?
{
return Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_data: None,
poster_src: Some(poster_src.to_string()),
});
}
let poster_data =
fetch_image(Some(mikan_client.deref()), origin_poster_src_url.clone()).await?;
let poster_str = dal_client
.store_object(
DalContentCategory::Image,
subscriber_id,
Some(MIKAN_BUCKET_KEY),
&origin_poster_src_url.path().replace("/images/Bangumi/", ""),
poster_data.clone(),
)
.await?;
Ok(MikanBangumiPosterMeta {
origin_poster_src: origin_poster_src_url,
poster_data: Some(poster_data),
poster_src: Some(poster_str.to_string()),
})
}
pub fn parse_mikan_origin_poster_src_from_style_attr(
mikan_base_url: impl IntoUrl,
style_attr: &str,
) -> Option<Url> {
let base_url = mikan_base_url.into_url().ok()?;
parse_style_attr(style_attr)
.and_then(|style| {
style.iter().find_map(|(prop, _)| {
match prop {
Property::BackgroundImage(images) => {
for img in images {
if let CSSImage::Url(path) = img {
if let Ok(url) = base_url.join(path.url.trim()) {
return Some(url);
}
}
}
}
Property::Background(backgrounds) => {
for bg in backgrounds {
if let CSSImage::Url(path) = &bg.image {
if let Ok(url) = base_url.join(path.url.trim()) {
return Some(url);
}
}
}
}
_ => {}
}
None
})
})
.map(|mut poster_str| {
poster_str.set_query(None);
poster_str.set_fragment(None);
poster_str
})
}
pub async fn parse_mikan_bangumi_meta_from_mikan_homepage(
client: Option<&AppMikanClient>,
mikan_bangumi_homepage_url: Url,
) -> eyre::Result<MikanBangumiMeta> {
let http_client = client.map(|s| s.deref());
let mikan_base_url = mikan_bangumi_homepage_url.origin().unicode_serialization();
let content = fetch_html(http_client, mikan_bangumi_homepage_url.as_str()).await?;
let html = Html::parse_document(&content);
let bangumi_fansubs = html
.select(&scraper::Selector::parse(".subgroup-text").unwrap())
.filter_map(|el| {
if let (Some(fansub_id), Some(fansub_name)) = (
el.value()
.attr("id")
.map(|s| decode_html_entities(s).trim().to_string()),
el.select(&scraper::Selector::parse("a:nth-child(1)").unwrap())
.next()
.map(|child| {
let mut s = String::from(
child
.prev_sibling()
.and_then(|t| t.value().as_text())
.map(|s| s.trim())
.unwrap_or_default(),
);
s.extend(child.text());
decode_html_entities(&s).trim().to_string()
}),
) {
Some((fansub_id, fansub_name))
} else {
None
}
})
.collect_vec();
let fansub_info = mikan_bangumi_homepage_url.fragment().and_then(|b| {
bangumi_fansubs
.iter()
.find_map(|(id, name)| if id == b { Some((id, name)) } else { None })
});
let bangumi_title = html
.select(&scraper::Selector::parse(".bangumi-title").unwrap())
.next()
.map(|el| {
decode_html_entities(&el.text().collect::<String>())
.trim()
.to_string()
})
.and_then(|title| if title.is_empty() { None } else { Some(title) })
.wrap_err_with(|| {
// todo: error handler
format!(
"Missing mikan bangumi official title for {}",
mikan_bangumi_homepage_url
)
})?;
let MikanBangumiRssLink {
mikan_bangumi_id, ..
} = html
.select(&scraper::Selector::parse(".bangumi-title > .mikan-rss").unwrap())
.next()
.and_then(|el| el.value().attr("href"))
.as_ref()
.and_then(|s| mikan_bangumi_homepage_url.join(s).ok())
.and_then(|rss_link_url| parse_mikan_bangumi_id_from_rss_link(&rss_link_url))
.wrap_err_with(|| {
// todo: error handler
format!(
"Missing mikan bangumi rss link or error format for {}",
mikan_bangumi_homepage_url
)
})?;
let origin_poster_src = html
.select(&scraper::Selector::parse(".bangumi-poster").unwrap())
.next()
.and_then(|el| el.value().attr("style"))
.and_then(|style_attr| {
parse_mikan_origin_poster_src_from_style_attr(&mikan_base_url, style_attr)
});
Ok(MikanBangumiMeta {
homepage: mikan_bangumi_homepage_url,
bangumi_title,
origin_poster_src,
mikan_bangumi_id,
fansub: fansub_info.map(|s| s.1.to_string()),
mikan_fansub_id: fansub_info.map(|s| s.0.to_string()),
})
}
pub async fn parse_mikan_episode_meta_from_mikan_homepage(
client: Option<&AppMikanClient>,
mikan_episode_homepage_url: Url,
) -> eyre::Result<MikanEpisodeMeta> {
let http_client = client.map(|s| s.deref());
let mikan_base_url = mikan_episode_homepage_url.origin().unicode_serialization();
let content = fetch_html(http_client, mikan_episode_homepage_url.as_str()).await?;
let html = Html::parse_document(&content);
let bangumi_title = html
.select(&scraper::Selector::parse(".bangumi-title").unwrap())
.next()
.map(|el| {
decode_html_entities(&el.text().collect::<String>())
.trim()
.to_string()
})
.and_then(|title| if title.is_empty() { None } else { Some(title) })
.wrap_err_with(|| {
// todo: error handler
format!(
"Missing mikan bangumi official title for {}",
mikan_episode_homepage_url
)
})?;
let episode_title = html
.select(&scraper::Selector::parse("title").unwrap())
.next()
.map(|el| {
decode_html_entities(&el.text().collect::<String>())
.replace(" - Mikan Project", "")
.trim()
.to_string()
})
.and_then(|title| if title.is_empty() { None } else { Some(title) })
.wrap_err_with(|| {
// todo: error handler
format!(
"Missing mikan episode official title for {}",
mikan_episode_homepage_url
)
})?;
let (mikan_bangumi_id, mikan_fansub_id) = html
.select(&scraper::Selector::parse(".bangumi-title > .mikan-rss").unwrap())
.next()
.and_then(|el| el.value().attr("href"))
.as_ref()
.and_then(|s| mikan_episode_homepage_url.join(s).ok())
.and_then(|rss_link_url| parse_mikan_bangumi_id_from_rss_link(&rss_link_url))
.and_then(
|MikanBangumiRssLink {
mikan_bangumi_id,
mikan_fansub_id,
..
}| {
mikan_fansub_id.map(|mikan_fansub_id| (mikan_bangumi_id, mikan_fansub_id))
},
)
.wrap_err_with(|| {
// todo: error handler
format!(
"Missing mikan bangumi rss link or error format for {}",
mikan_episode_homepage_url
)
})?;
let fansub = html
.select(&scraper::Selector::parse(".bangumi-info>.magnet-link-wrap").unwrap())
.next()
.map(|el| {
decode_html_entities(&el.text().collect::<String>())
.trim()
.to_string()
})
.wrap_err_with(|| {
// todo: error handler
format!(
"Missing mikan bangumi fansub name for {}",
mikan_episode_homepage_url
)
})?;
let origin_poster_src = html
.select(&scraper::Selector::parse(".bangumi-poster").unwrap())
.next()
.and_then(|el| el.value().attr("style"))
.and_then(|s| parse_mikan_origin_poster_src_from_style_attr(mikan_base_url, s));
let MikanEpisodeHomepage {
mikan_episode_id, ..
} = parse_mikan_episode_id_from_homepage(&mikan_episode_homepage_url).wrap_err_with(|| {
format!(
"Failed to extract mikan_episode_id from {}",
&mikan_episode_homepage_url
)
})?;
Ok(MikanEpisodeMeta {
mikan_bangumi_id,
mikan_fansub_id,
bangumi_title,
episode_title,
homepage: mikan_episode_homepage_url,
origin_poster_src,
fansub,
mikan_episode_id,
})
}
/**
* @logined-required
*/
pub async fn parse_mikan_bangumis_meta_from_my_bangumi_page(
client: Option<&AppMikanClient>,
my_bangumi_page_url: Url,
) -> eyre::Result<Vec<MikanBangumiMeta>> {
let http_client = client.map(|c| c.deref());
let mikan_base_url = my_bangumi_page_url.origin().unicode_serialization();
let content = fetch_html(http_client, my_bangumi_page_url.clone()).await?;
let html = Html::parse_document(&content);
let mut bangumi_list = vec![];
for bangumi_elem in
html.select(&scraper::Selector::parse(".sk-bangumi .an-info a.an-text").unwrap())
{
if let (Some(bangumi_home_page_url), Some(bangumi_title)) =
(bangumi_elem.attr("href"), bangumi_elem.attr("title"))
{
let origin_poster_src = bangumi_elem
.prev_sibling()
.and_then(|ele| ele.value().as_element())
.and_then(|ele| ele.attr("style"))
.and_then(|style_attr| {
parse_mikan_origin_poster_src_from_style_attr(
mikan_base_url.clone(),
style_attr,
)
});
let bangumi_home_page_url = my_bangumi_page_url.join(bangumi_home_page_url)?;
if let Some(MikanBangumiHomepage {
ref mikan_bangumi_id,
..
}) = parse_mikan_bangumi_id_from_homepage(&bangumi_home_page_url)
{
let bangumi_expand_info_url =
build_mikan_bangumi_expand_info_url(mikan_base_url.clone(), mikan_bangumi_id)?;
let bangumi_expand_info_content =
fetch_html(http_client, bangumi_expand_info_url).await?;
let bangumi_expand_info_fragment =
Html::parse_fragment(&bangumi_expand_info_content);
for fansub_info in bangumi_expand_info_fragment.select(
&scraper::Selector::parse("js-expand_bangumi-subgroup.js-subscribed").unwrap(),
) {
if let (Some(fansub_name), Some(mikan_fansub_id)) = (
fansub_info
.select(&scraper::Selector::parse(".tag-res-name[title]").unwrap())
.next()
.and_then(|ele| ele.attr("title")),
fansub_info
.select(
&scraper::Selector::parse(
".active[data-subtitlegroupid][data-bangumiid]",
)
.unwrap(),
)
.next()
.and_then(|ele| ele.attr("data-subtitlegroupid")),
) {
bangumi_list.push(MikanBangumiMeta {
homepage: build_mikan_bangumi_homepage(
mikan_base_url.clone(),
mikan_bangumi_id.as_str(),
Some(mikan_fansub_id),
)?,
bangumi_title: bangumi_title.to_string(),
mikan_bangumi_id: mikan_bangumi_id.to_string(),
mikan_fansub_id: Some(mikan_fansub_id.to_string()),
fansub: Some(fansub_name.to_string()),
origin_poster_src: origin_poster_src.clone(),
})
}
}
}
}
}
Ok(bangumi_list)
}
#[cfg(test)]
mod test {
use std::assert_matches::assert_matches;
use color_eyre::eyre;
use url::Url;
use zune_image::{codecs::ImageFormat, image::Image};
use super::{
parse_mikan_bangumi_meta_from_mikan_homepage,
parse_mikan_bangumi_poster_from_origin_poster_src,
parse_mikan_episode_meta_from_mikan_homepage,
};
#[tokio::test]
async fn test_parse_mikan_episode() {
let test_fn = async || -> eyre::Result<()> {
let url_str =
"https://mikanani.me/Home/Episode/475184dce83ea2b82902592a5ac3343f6d54b36a";
let url = Url::parse(url_str)?;
let ep_meta = parse_mikan_episode_meta_from_mikan_homepage(None, url.clone()).await?;
assert_eq!(ep_meta.homepage, url);
assert_eq!(ep_meta.bangumi_title, "葬送的芙莉莲");
assert_eq!(
ep_meta.origin_poster_src,
Some(Url::parse(
"https://mikanani.me/images/Bangumi/202309/5ce9fed1.jpg"
)?)
);
assert_eq!(ep_meta.fansub, "LoliHouse");
assert_eq!(ep_meta.mikan_fansub_id, "370");
assert_eq!(ep_meta.mikan_bangumi_id, "3141");
assert_matches!(ep_meta.origin_poster_src, Some(..));
let bgm_poster = parse_mikan_bangumi_poster_from_origin_poster_src(
None,
ep_meta.origin_poster_src.unwrap(),
)
.await?;
let u8_data = bgm_poster.poster_data.expect("should have poster data");
let image = Image::read(u8_data.to_vec(), Default::default());
assert!(
image.is_ok_and(|img| img
.metadata()
.get_image_format()
.is_some_and(|fmt| matches!(fmt, ImageFormat::JPEG))),
"should start with valid jpeg data magic number"
);
Ok(())
};
test_fn().await.expect("test parse mikan failed");
}
#[tokio::test]
async fn test_parse_mikan_bangumi() {
let test_fn = async || -> eyre::Result<()> {
let url_str = "https://mikanani.me/Home/Bangumi/3416#370";
let url = Url::parse(url_str)?;
let bgm_meta = parse_mikan_bangumi_meta_from_mikan_homepage(None, url.clone()).await?;
assert_eq!(bgm_meta.homepage, url);
assert_eq!(bgm_meta.bangumi_title, "叹气的亡灵想隐退");
assert_eq!(
bgm_meta.origin_poster_src,
Some(Url::parse(
"https://mikanani.me/images/Bangumi/202410/480ef127.jpg"
)?)
);
assert_eq!(bgm_meta.fansub, Some(String::from("LoliHouse")));
assert_eq!(bgm_meta.mikan_fansub_id, Some(String::from("370")));
assert_eq!(bgm_meta.mikan_bangumi_id, "3416");
assert_eq!(
bgm_meta.homepage.as_str(),
"https://mikanani.me/Home/Bangumi/3416#370"
);
Ok(())
};
test_fn().await.expect("test parse mikan failed");
}
}

View File

@ -2,6 +2,7 @@ pub mod defs;
pub mod errors;
pub mod html;
pub mod http;
pub mod media;
pub mod mikan;
pub mod rawname;
pub mod torrent;

View File

@ -1,4 +1,4 @@
use std::{ops::Deref, sync::Arc, time::Duration};
use std::{fmt::Debug, ops::Deref, sync::Arc, time::Duration};
use async_trait::async_trait;
use axum::http::{self, Extensions};
@ -11,7 +11,7 @@ use reqwest::{ClientBuilder, Request, Response};
use reqwest_middleware::{
ClientBuilder as ClientWithMiddlewareBuilder, ClientWithMiddleware, Next,
};
use reqwest_retry::{policies::ExponentialBackoff, RetryTransientMiddleware};
use reqwest_retry::{RetryTransientMiddleware, policies::ExponentialBackoff};
use reqwest_tracing::TracingMiddleware;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
@ -101,6 +101,14 @@ pub struct HttpClient {
pub config: HttpClientConfig,
}
impl Debug for HttpClient {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("HttpClient")
.field("config", &self.config)
.finish()
}
}
impl From<HttpClient> for ClientWithMiddleware {
fn from(val: HttpClient) -> Self {
val.client

View File

@ -1,4 +1,9 @@
#![feature(duration_constructors, assert_matches, unboxed_closures)]
#![feature(
duration_constructors,
assert_matches,
unboxed_closures,
impl_trait_in_bindings
)]
pub mod app;
pub mod auth;

View File

@ -201,7 +201,7 @@ impl ActiveModel {
.ok()
.unwrap_or_default();
let homepage = build_mikan_episode_homepage(
ctx.get_mikan_client().base_url(),
ctx.get_mikan_client().base_url().clone(),
&item.mikan_episode_id,
)?;

View File

@ -12,11 +12,11 @@ use crate::{
extract::{
mikan::{
build_mikan_bangumi_homepage, build_mikan_bangumi_rss_link,
parse_mikan_bangumi_meta_from_mikan_homepage,
parse_mikan_episode_meta_from_mikan_homepage, parse_mikan_rss_channel_from_rss_link,
web_parser::{
MikanBangumiPosterMeta,
parse_mikan_bangumi_poster_from_origin_poster_src_with_cache,
extract_mikan_bangumi_meta_from_bangumi_homepage,
extract_mikan_episode_meta_from_episode_homepage,
parse_mikan_rss_channel_from_rss_link,
web_extract::{
MikanBangumiPosterMeta, extract_mikan_bangumi_poster_meta_from_src_with_cache,
},
},
rawname::extract_season_from_title_body,
@ -256,7 +256,7 @@ impl Model {
let mut new_metas = vec![];
for new_rss_item in new_rss_items.iter() {
new_metas.push(
parse_mikan_episode_meta_from_mikan_homepage(
extract_mikan_episode_meta_from_episode_homepage(
Some(mikan_client),
new_rss_item.homepage.clone(),
)
@ -272,12 +272,12 @@ impl Model {
{
let mikan_base_url = ctx.get_mikan_client().base_url();
let bgm_homepage = build_mikan_bangumi_homepage(
mikan_base_url,
mikan_base_url.clone(),
&mikan_bangumi_id,
Some(&mikan_fansub_id),
)?;
let bgm_rss_link = build_mikan_bangumi_rss_link(
mikan_base_url,
mikan_base_url.clone(),
&mikan_bangumi_id,
Some(&mikan_fansub_id),
)?;
@ -289,7 +289,7 @@ impl Model {
mikan_bangumi_id.to_string(),
mikan_fansub_id.to_string(),
async |am| -> color_eyre::eyre::Result<()> {
let bgm_meta = parse_mikan_bangumi_meta_from_mikan_homepage(
let bgm_meta = extract_mikan_bangumi_meta_from_bangumi_homepage(
Some(mikan_client),
bgm_homepage.clone(),
)
@ -306,9 +306,9 @@ impl Model {
am.fansub = ActiveValue::Set(bgm_meta.fansub);
if let Some(origin_poster_src) = bgm_meta.origin_poster_src {
if let MikanBangumiPosterMeta {
poster_src: Some(poster_src),
..
} = parse_mikan_bangumi_poster_from_origin_poster_src_with_cache(
poster_src: Some(poster_src),
..
} = extract_mikan_bangumi_poster_meta_from_src_with_cache(
ctx,
origin_poster_src,
self.subscriber_id,
@ -318,7 +318,7 @@ impl Model {
am.poster_link = ActiveValue::Set(Some(poster_src))
}
}
Ok(())
Ok(())
},
)
.await?,

View File

@ -0,0 +1,17 @@
use color_eyre::eyre;
use reqwest::IntoUrl;
use crate::{
extract::mikan::{AppMikanClient, AppMikanConfig},
fetch::HttpClientConfig,
};
pub fn build_testing_mikan_client(base_mikan_url: impl IntoUrl) -> eyre::Result<AppMikanClient> {
let mikan_client = AppMikanClient::new(AppMikanConfig {
http_client: HttpClientConfig {
..Default::default()
},
base_url: base_mikan_url.into_url()?,
})?;
Ok(mikan_client)
}

View File

@ -1,2 +1,4 @@
pub mod mikan;
#[cfg(feature = "testcontainers")]
pub mod testcontainers;
pub mod tracing;

View File

@ -0,0 +1,12 @@
use tracing::Level;
use tracing_subscriber::EnvFilter;
pub fn init_testing_tracing(level: Level) {
let crate_name = env!("CARGO_PKG_NAME");
let filter = EnvFilter::new(format!(
"{}[]={}",
crate_name,
level.as_str().to_lowercase()
));
tracing_subscriber::fmt().with_env_filter(filter).init();
}

View File

@ -4,25 +4,23 @@ use loco_rs::testing;
use recorder::app::App;
use serial_test::serial;
// TODO: see how to dedup / extract this to app-local test utils
// not to framework, because that would require a runtime dep on insta
// macro_rules! configure_insta {
// ($($expr:expr),*) => {
// let mut settings = insta::Settings::clone_current();
// settings.set_prepend_module_to_snapshot(false);
// settings.set_snapshot_suffix("user_request");
// let _guard = settings.bind_to_scope();
// };
// }
macro_rules! configure_insta {
($($expr:expr),*) => {
let mut settings = insta::Settings::clone_current();
settings.set_prepend_module_to_snapshot(false);
settings.set_snapshot_suffix("user_request");
let _guard = settings.bind_to_scope();
};
}
#[tokio::test]
#[serial]
async fn can_get_current_user() {
// configure_insta!();
//
configure_insta!();
// testing::request::<App, _, _>(|request, _ctx| async move {
// let response = request.get("/api/user/current").await;
//
// with_settings!({
// filters => testing::cleanup_user_model()
// }, {

Binary file not shown.

After

Width:  |  Height:  |  Size: 240 KiB

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,473 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="keywords" content="新番,动漫,动漫下載,新番下载,animation,bangumi,动画,蜜柑计划,Mikan Project" />
<meta name="description" content="蜜柑计划:新一代的动漫下载站" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- 若用户有Google Chrome Frame,那么ie浏览时让IE使用chrome内核 -->
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" />
<!-- 若是双核浏览器,默认webkit渲染(chrome) -->
<meta name="renderer" content="webkit">
<title>[&#x55B5;&#x840C;&#x5976;&#x8336;&#x5C4B;&amp;LoliHouse] &#x846C;&#x9001;&#x7684;&#x8299;&#x8389;&#x83B2; /
Sousou no Frieren - 23 [WebRip 1080p HEVC-10bit AAC][&#x7B80;&#x7E41;&#x65E5;&#x5185;&#x5C01;&#x5B57;&#x5E55;] -
Mikan Project</title>
<!-- here put import css lib -->
<link rel="stylesheet"
href="/lib/bootstrap/dist/css/bootstrap.min.css?v=7s5uDGW3AHqw6xtJmNNtr-OBRJUlgkNJEo78P4b0yRw" />
<link rel="stylesheet"
href="/lib/font-awesome/css/font-awesome.min.css?v=3dkvEK0WLHRJ7_Csr0BZjAWxERc5WH7bdeUya2aXxdU" />
<link rel="stylesheet" href="/css/thirdparty.min.css?v=c2SZy6n-55iljz60XCAALXejEZvjc43kgwamU5DAYUU" />
<link rel="stylesheet" href="/css/animate.min.css?v=w_eXqGX0NdMPQ0LZNhdQ8B-DQMYAxelvLoIP39dzmus" />
<link rel="stylesheet" href="/css/mikan.min.css?v=aupBMgBgKRB5chTb5fl8lvHpN3OqX67_gKg3lXZewRw" />
<script src="/lib/jquery/dist/jquery.min.js?v=BbhdlvQf_xTY9gja0Dq3HiwQF8LaCRTXxZKRutelT44"></script>
<script src="/lib/bootstrap/dist/js/bootstrap.min.js?v=KXn5puMvxCw-dAYznun-drMdG1IFl3agK0p_pqT9KAo"></script>
<script src="/js/thirdparty.min.js?v=NsK_w5fw7Nm4ZPm4eZDgsivasZNgT6ArhIjmj-bRnR0"></script>
<script src="/js/darkreader.min.js?v=Lr_8XODLEDSPtT6LqaeLKzREs4jocJUzV8HvQPItIic"></script>
<script src="/js/ScrollMagic.min.js?v=1xuIM3UJWEZX_wWN9zrA8W7CWukfsMaEqb759CeHo3U"></script>
<script src="/js/jquery.ScrollMagic.min.js?v=SyygQh9gWWfvyS13QwI0SKGAQyHDachlaigiK4X59iw"></script>
<link rel="icon" href="/images/favicon.ico?v=2" />
<link rel="apple-touch-icon" href="\Images\apple-touch-icon.png">
<link rel="apple-touch-icon" sizes="152x152" href="\Images\apple-touch-icon-152x152.png">
<link rel="apple-touch-icon" sizes="180x180" href="\Images\apple-touch-icon-180x180.png">
<link rel="apple-touch-icon" sizes="144x144" href="\Images\apple-touch-icon-144x144.png">
<script>
(function (i, s, o, g, r, a, m) {
i['GoogleAnalyticsObject'] = r; i[r] = i[r] || function () {
(i[r].q = i[r].q || []).push(arguments)
}, i[r].l = 1 * new Date(); a = s.createElement(o),
m = s.getElementsByTagName(o)[0]; a.async = 1; a.src = g; m.parentNode.insertBefore(a, m)
})(window, document, 'script', '//www.google-analytics.com/analytics.js', 'ga');
ga('create', 'UA-8911610-8', 'auto');
ga('send', 'pageview');
</script>
</head>
<body class="main">
<div id="sk-header" class="hidden-xs hidden-sm">
<div id="sk-top-nav" class="container">
<a id="logo" href="/" style="width:205px;"><img id="mikan-pic" src="/images/mikan-pic.png" /><img
src="/images/mikan-text.svg" style="height:30px;" /></a>
<div id="nav-list">
<ul class="list-inline nav-ul">
<li class="">
<div class="sk-col"><a href="/"><i class="fa fa-home fa-lg"></i>主页</a></div>
</li>
<li class="">
<div class="sk-col"><a href="/Home/MyBangumi"><i class="fa fa-rss fa-lg"></i>订阅</a></div>
</li>
<li class="">
<div class="sk-col"><a href="/Home/Classic"><i class="fa fa-slack fa-lg"></i>列表</a></div>
</li>
<li class="">
<div class="sk-col"><a href="/Home/Publish"><i class="fa fa-pencil-square-o fa-lg"></i>发布</a>
</div>
</li>
</ul>
</div>
<div class="search-form">
<form method="get" action="/Home/Search">
<div class="form-group has-feedback">
<label for="search" class="sr-only">搜索</label>
<input type="text" class="form-control input-sm" name="searchstr" id="header-search"
placeholder="搜索">
<span class="glyphicon glyphicon-search form-control-feedback"></span>
</div>
</form>
</div>
<section id="login">
<div id="user-welcome" class="hidden-sm hidden-xs">
<div id="user-name">
<div class="text-right">testuser</div>
<div class="w-other-c text-right"><a href="/Account/Manage" style="color: #47c1c5;">账号设置</a>
</div>
</div>
<div id="head-cir">
<a href="/Account/Manage"><img id="head-pic" src="/images/Avatar/none.gif" /></a>
</div>
<form action="/Account/Logout" id="logoutForm" method="post"> <a
href="javascript:document.getElementById('logoutForm').submit()" id="user-logout"><img
src="/images/logout_normal.png" />&nbsp;&nbsp;退出</a>
<input name="__RequestVerificationToken" type="hidden"
value="CfDJ8MyNMqFNaC9JmJW13PvY-93KUsLhEERSkyq42lFSAJwcYqoOWYU2p5zHR6sNxZiW0yVxlm0_ZDBSsO96b-i4SCtXHaFUIq9Q2wyrW1FMKQWBOnqxa2mjUZ0E45xioOsLwst5PrezZO-Y5VKiPkrlDdAyLHyTwQ3HZeoHrqpQ88tG1lroVWSKgS4nRtTSthj-gg" />
</form>
</div>
<div class="hidden-lg hidden-md">
<div class="m-tool-title">
Mikan Project
</div>
<div style="text-align: center;margin-top: 2rem;" class="m-head-cir">
<img class="m-head-pic" src="/images/Avatar/none.gif" />
</div>
<div id="user-name">
<div class="m-head-welcometext">testuser 欢迎回来!</div>
</div>
<div class="m-head-welcometext">
<form action="/Account/Logout" id="mobileLogoutForm" method="post"> <a
href="javascript:document.getElementById('logoutForm').submit()" id="user-logout"><img
src="/images/logout_normal.png" />&nbsp;&nbsp;退出登录</a>
<input name="__RequestVerificationToken" type="hidden"
value="CfDJ8MyNMqFNaC9JmJW13PvY-93KUsLhEERSkyq42lFSAJwcYqoOWYU2p5zHR6sNxZiW0yVxlm0_ZDBSsO96b-i4SCtXHaFUIq9Q2wyrW1FMKQWBOnqxa2mjUZ0E45xioOsLwst5PrezZO-Y5VKiPkrlDdAyLHyTwQ3HZeoHrqpQ88tG1lroVWSKgS4nRtTSthj-gg" />
</form>
</div>
</div>
<style>
.num-node {
display: none;
}
</style>
<script>
AdvancedSubscriptionEnabled = true;
</script>
</section>
</div>
<div class="ribbon">
<span class="ribbon-color1"></span>
<span class="ribbon-color2"></span>
<span class="ribbon-color3"></span>
<span class="ribbon-color4"></span>
<span class="ribbon-color5"></span>
<span class="ribbon-color6"></span>
<span class="ribbon-color7"></span>
</div>
</div>
<div class="m-home-nav hidden-lg hidden-md" id="sk-mobile-header">
<div class="m-home-tool-left clickable" data-toggle="modal" data-target="#modal-nav">
<i class="fa fa-bars" aria-hidden="true"></i>
</div>
<div class="m-home-tool-left"></div>
<div style="text-align: center; height:100%;flex:1;">
<a href="/" style="text-decoration:none">
<img src="/images/mikan-pic.png" style="height: 3rem;margin-top: 0.5rem;">
<img src="/images/mikan-text.png" style="height: 1.5rem;margin-top: 0.5rem;">
</a>
</div>
<div class="m-home-tool-right clickable" data-toggle="modal" data-target="#modal-login">
<i class="fa fa-user" aria-hidden="true" style="margin-right: 1rem;"></i>
</div>
<div class="m-home-tool-right clickable" onclick="ShowNavSearch()">
<i class="fa fa-search" aria-hidden="true"></i>
</div>
</div>
<div class="m-nav-search" style="width: 100%;">
<div style="flex: 1;">
<form method="get" action="/Home/Search">
<div class="input-group">
<span class="input-group-addon" id="sizing-addon1" style="border: none;background-color: white;">
<i class="fa fa-search" aria-hidden="true"></i>
</span>
<input type="text" class="form-control" placeholder="搜索" name="searchstr"
aria-describedby="sizing-addon1" style="border: none;font-size:16px;">
</div>
</form>
</div>
<div style="width: 4rem;" onclick="HideNavSearch()">
<span style="font-size: 1.25rem;">取消</span>
</div>
</div>
<meta name="robots" content="noindex">
<div id="sk-container" class="container">
<div class="pull-left leftbar-container">
<img src="/images/subscribed-badge.svg" class="subscribed-badge" style="" />
<div class="bangumi-poster div-hover"
style="background-image: url('/images/Bangumi/202309/5ce9fed1.jpg?width=400&height=560&format=webp');"
onclick="window.open('/Home/Bangumi/3141#370', '_blank');"></div>
<p class="bangumi-title"><a target="_blank" class="w-other-c" style="color:#555"
href="/Home/Bangumi/3141#370">&#x846C;&#x9001;&#x7684;&#x8299;&#x8389;&#x83B2;</a> <a
href="/RSS/Bangumi?bangumiId=3141&subgroupid=370" class="mikan-rss" data-placement="bottom"
data-toggle="tooltip" data-original-title="RSS" target="_blank"><i class="fa fa-rss-square"></i></a>
</p>
<p class="bangumi-info">
字幕组:<a class="magnet-link-wrap" href="/Home/PublishGroup/223" target="_blank">LoliHouse</a>
</p>
<p class="bangumi-info">发布日期2024/02/22 19:14</p>
<p class="bangumi-info">文件大小573.95 MB</p>
<div id="leftbar-nav-anchor"></div>
<div class="leftbar-nav">
<a class="btn episode-btn"
href="/Download/20240222/475184dce83ea2b82902592a5ac3343f6d54b36a.torrent">下载种子</a>
<a class="btn episode-btn"
href="magnet:?xt=urn:btih:475184dce83ea2b82902592a5ac3343f6d54b36a&amp;tr=http%3a%2f%2ft.nyaatracker.com%2fannounce&amp;tr=http%3a%2f%2ftracker.kamigami.org%3a2710%2fannounce&amp;tr=http%3a%2f%2fshare.camoe.cn%3a8080%2fannounce&amp;tr=http%3a%2f%2fopentracker.acgnx.se%2fannounce&amp;tr=http%3a%2f%2fanidex.moe%3a6969%2fannounce&amp;tr=http%3a%2f%2ft.acg.rip%3a6699%2fannounce&amp;tr=https%3a%2f%2ftr.bangumi.moe%3a9696%2fannounce&amp;tr=udp%3a%2f%2ftr.bangumi.moe%3a6969%2fannounce&amp;tr=http%3a%2f%2fopen.acgtracker.com%3a1096%2fannounce&amp;tr=udp%3a%2f%2ftracker.opentrackr.org%3a1337%2fannounce">磁力链接</a>
<a class="btn episode-btn"
href="https://mypikpak.com/drive/url-checker?url=magnet:?xt.1=urn:btih:475184dce83ea2b82902592a5ac3343f6d54b36a">在线播放</a>
<button class="btn episode-btn js-subscribe_bangumi_page active" data-bangumiid="3141"
data-subtitlegroupid="370">取消番组订阅</button>
</div>
</div>
<div class="central-container" style="min-height: 532px;">
<div class="episode-header" style="">
<p class="episode-title">[&#x55B5;&#x840C;&#x5976;&#x8336;&#x5C4B;&amp;LoliHouse]
&#x846C;&#x9001;&#x7684;&#x8299;&#x8389;&#x83B2; / Sousou no Frieren - 23 [WebRip 1080p HEVC-10bit
AAC][&#x7B80;&#x7E41;&#x65E5;&#x5185;&#x5C01;&#x5B57;&#x5E55;] [573.95 MB]</p>
</div>
<br />
<div style="padding-bottom:20px" class="episode-desc">
<div style="margin-top: -10px; margin-bottom: 10px;">
<div style="width:100%; margin-right: auto; margin-left: auto;" class="hidden-xs hidden-sm">
<a href="https://shop119340084.taobao.com/?mm_sycmid=1_150417_dba461f2e2f73a9ea2a8fa11f33a1aee"
onclick="ga('send', 'event', 'sswj_lg', 'clicked', 'ad');">
<img src="/images/SSWJ/sswj6_lg.jpg"
style='height: 100%; width: 100%; object-fit: contain' />
</a>
</div>
<div style="width:100%; margin-right: auto; margin-left: auto;" class="hidden-lg hidden-md">
<a href="https://m.tb.cn/h.g0X5kru9wgYTRsp?mm_sycmid=1_150416_5914d148315f48d5297c751b84bac595"
onclick="ga('send', 'event', 'sswj_sm', 'clicked', 'ad');">
<img src="/images/SSWJ/sswj6_sm.jpg"
style='height: 100%; width: 100%; object-fit: contain' />
</a>
</div>
</div>
<p><img src="https://s2.loli.net/2023/10/04/2YE8DWOANHUxJKf.png" style="width:800px;height:1131px"></p>
<p><strong>葬送的芙莉莲 / Sousou no Frieren<br></strong></p>
<p><strong>字幕:喵萌奶茶屋<br></strong></p>
<p><strong>脚本TauZolver<br></strong></p>
<p><strong>压制Kotachi<br></strong></p>
<p><strong>本片与喵萌奶茶屋合作,感谢字幕组的辛勤劳动。<br></strong></p>
<hr>
<p><strong>为了顺利地观看我们的作品,推荐大家使用以下播放器:</strong></p>
<p><strong>Windows<a href="https://mpv.io/" target="_blank">mpv</a><a
href="https://vcb-s.com/archives/7594" target="_blank">教程</a></strong></p>
<p><strong>macOS<a href="https://iina.io/" target="_blank">IINA</a></strong></p>
<p><strong>iOS/Android</strong><a href="https://www.videolan.org/vlc/" target="_blank"><strong>VLC
media player<br></strong></a></p>
<hr>
<p><strong><a
href="https://share.dmhy.org/topics/view/599634_LoliHouse_LoliHouse_5th_Anniversary_Announcement.html">点击查看LoliHouse五周年纪念公告附往年全部礼包</a><br></strong>
</p>
<hr><strong>人人为我,我为人人,为了各位观众能快速下载,请使用 uTorrent / qBittorrent 等正规 BT 软件下载,并保持开机上传,谢谢~</strong>
</div>
</div>
<a href="#0" class="cd-top cd-top-btn">Top</a>
</div>
<style>
img {
max-width: 937px;
}
.episode-title {
color: #555;
font-size: 12px;
font-weight: bold;
margin-bottom: 8px;
margin-top: 10px;
}
.episode-header {
max-height: 45px;
border-bottom: 2px solid #e8e8e8;
}
.episode-btn {
background-color: #c8d2d7;
border-radius: 3px;
color: #fff;
font-weight: bold;
height: 35px;
margin-top: 10px;
width: 100%;
}
.episode-btn:hover {
background-color: #61ccd1;
color: #fff;
}
.div-hover {
cursor: pointer;
}
</style>
<div class="modal modal-fullscreen fade" id="modal-nav" tabindex="-1" role="dialog" aria-labelledby="myModalLabel"
aria-hidden="true" style="background-color:#3bc0c3;">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-body" style="margin: auto;width:100%;">
<div class="m-tool">
<span class="m-close clickable"><i class="fa fa-times" aria-hidden="true" data-toggle="modal"
data-target="#modal-nav"></i></span>
<div class="m-tool-toolbar">
<img src="/images/mikan-pic.png" style="width: 3rem;">
<img src="/images/mikan-text.png" style="width: 7rem;">
</div>
<div class="m-tool-list">
<ul>
<li><a href="/" class="link">主页</a></li>
<li class="m-tool-search-change"><a href="/Home/MyBangumi" class="link">订阅</a></li>
<li onclick="tool.clickSearch()" class="m-tool-search-change">
<i class="fa fa-search" aria-hidden="true"></i>&nbsp;&nbsp;搜索站内
</li>
<li class="m-tool-search-input">
<form method="get" action="/Home/Search">
<div style="display: flex;height: 100%;">
<input type="text" class="form-control" name="searchstr"
style="font-size:16px;" />
<span style="width: 5rem;" onclick="tool.resetSearch()">取消</span>
</div>
</form>
</li>
</ul>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="modal modal-fullscreen fade" id="modal-login" tabindex="-1" role="dialog" aria-labelledby="myModalLabel"
aria-hidden="true" style="background-color:#edf1f2;">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-body" style="margin: auto;width:100%;height:85vh;">
<div class="m-login">
<span class="m-left clickable"><i class="fa fa-angle-left" aria-hidden="true"
data-toggle="modal" data-target="#modal-login"></i></span>
<section id="login">
<div id="user-welcome" class="hidden-sm hidden-xs">
<div id="user-name">
<div class="text-right">testuser</div>
<div class="w-other-c text-right"><a href="/Account/Manage"
style="color: #47c1c5;">账号设置</a></div>
</div>
<div id="head-cir">
<a href="/Account/Manage"><img id="head-pic" src="/images/Avatar/none.gif" /></a>
</div>
<form action="/Account/Logout" id="logoutForm" method="post"> <a
href="javascript:document.getElementById('logoutForm').submit()"
id="user-logout"><img src="/images/logout_normal.png" />&nbsp;&nbsp;退出</a>
<input name="__RequestVerificationToken" type="hidden"
value="CfDJ8MyNMqFNaC9JmJW13PvY-93KUsLhEERSkyq42lFSAJwcYqoOWYU2p5zHR6sNxZiW0yVxlm0_ZDBSsO96b-i4SCtXHaFUIq9Q2wyrW1FMKQWBOnqxa2mjUZ0E45xioOsLwst5PrezZO-Y5VKiPkrlDdAyLHyTwQ3HZeoHrqpQ88tG1lroVWSKgS4nRtTSthj-gg" />
</form>
</div>
<div class="hidden-lg hidden-md">
<div class="m-tool-title">
Mikan Project
</div>
<div style="text-align: center;margin-top: 2rem;" class="m-head-cir">
<img class="m-head-pic" src="/images/Avatar/none.gif" />
</div>
<div id="user-name">
<div class="m-head-welcometext">testuser 欢迎回来!</div>
</div>
<div class="m-head-welcometext">
<form action="/Account/Logout" id="mobileLogoutForm" method="post"> <a
href="javascript:document.getElementById('logoutForm').submit()"
id="user-logout"><img src="/images/logout_normal.png" />&nbsp;&nbsp;退出登录</a>
<input name="__RequestVerificationToken" type="hidden"
value="CfDJ8MyNMqFNaC9JmJW13PvY-93KUsLhEERSkyq42lFSAJwcYqoOWYU2p5zHR6sNxZiW0yVxlm0_ZDBSsO96b-i4SCtXHaFUIq9Q2wyrW1FMKQWBOnqxa2mjUZ0E45xioOsLwst5PrezZO-Y5VKiPkrlDdAyLHyTwQ3HZeoHrqpQ88tG1lroVWSKgS4nRtTSthj-gg" />
</form>
</div>
</div>
<style>
.num-node {
display: none;
}
</style>
<script>
AdvancedSubscriptionEnabled = true;
</script>
</section>
</div>
</div>
</div>
</div>
</div>
<footer class="footer hidden-xs hidden-sm">
<div id="sk-footer" class="container text-center">
<div>Powered by Mikan Project <a href="/Home/Contact" target="_blank">联系我们</a></div>
<div>Cooperate by PlaymateCat@Lisa</div>
</div>
</footer>
<script>
var tool = {};
(function () {
var inputPEl = $('.m-tool-search-input');
var inputEl = inputPEl.find('input');
var changeEl = $('.m-tool-search-change');
inputPEl.hide();
tool.clickSearch = clickSearch;
tool.resetSearch = resetSearch;
function clickSearch() {
changeEl.hide();
inputPEl.show();
inputEl.focus();
}
function resetSearch() {
changeEl.show();
inputPEl.hide();
inputEl.val('');
}
})();
</script>
<script>
var pageUtil;
(function () {
pageUtil = {
isMobile: isMobile
};
function isMobile() {
var check = false;
(function (a) {
if (/(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i.test(a) || /1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-/i.test(a.substr(0, 4))) check = true;
})(navigator.userAgent || navigator.vendor || window.opera);
return check;
}
})();
//detect if page is mobile
if (pageUtil.isMobile()) {
document.getElementsByTagName('html')[0].style['font-size'] = window.innerWidth / 32 + 'px';
}
</script>
</body>
<!-- here put your own javascript -->
<script src="/js/mikan.min.js?v=7USd_hfRE7KH46vQBdF29boa3ENWKMVFRTyD9a8XEDg"></script>
</html>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff