fix: fix feed rss
This commit is contained in:
parent
9fd3ae6563
commit
571caf50ff
3
Cargo.lock
generated
3
Cargo.lock
generated
@ -552,6 +552,7 @@ dependencies = [
|
||||
"diligent-date-parser",
|
||||
"never",
|
||||
"quick-xml",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -6784,6 +6785,7 @@ dependencies = [
|
||||
"openidconnect",
|
||||
"percent-encoding",
|
||||
"polars",
|
||||
"quick-xml",
|
||||
"quirks_path",
|
||||
"rand 0.9.1",
|
||||
"regex",
|
||||
@ -7216,6 +7218,7 @@ dependencies = [
|
||||
"derive_builder",
|
||||
"never",
|
||||
"quick-xml",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -108,7 +108,7 @@ sea-orm = { version = "1.1", features = [
|
||||
] }
|
||||
figment = { version = "0.10", features = ["toml", "json", "env", "yaml"] }
|
||||
sea-orm-migration = { version = "1.1", features = ["runtime-tokio"] }
|
||||
rss = "2"
|
||||
rss = { version = "2", features = ["builders", "with-serde"] }
|
||||
fancy-regex = "0.14"
|
||||
lightningcss = "1.0.0-alpha.66"
|
||||
html-escape = "0.2.13"
|
||||
@ -159,6 +159,11 @@ polars = { version = "0.49.1", features = [
|
||||
"lazy",
|
||||
"diagonal_concat",
|
||||
], optional = true }
|
||||
quick-xml = { version = "0.37.5", features = [
|
||||
"serialize",
|
||||
"serde-types",
|
||||
"serde",
|
||||
] }
|
||||
|
||||
[dev-dependencies]
|
||||
inquire = { workspace = true }
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::time::Duration;
|
||||
use std::{str::FromStr, time::Duration};
|
||||
|
||||
use color_eyre::{Result, eyre::OptionExt};
|
||||
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||
@ -6,7 +6,8 @@ use inquire::{Password, Text, validator::Validation};
|
||||
use recorder::{
|
||||
crypto::UserPassCredential,
|
||||
extract::mikan::{
|
||||
MikanClient, MikanConfig, MikanRssEpisodeItem, build_mikan_bangumi_expand_subscribed_url,
|
||||
MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
|
||||
build_mikan_bangumi_expand_subscribed_url,
|
||||
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||
},
|
||||
@ -190,10 +191,10 @@ async fn main() -> Result<()> {
|
||||
);
|
||||
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||
};
|
||||
let rss_items = rss::Channel::read_from(bangumi_rss_data.as_bytes())?.items;
|
||||
let rss_items = MikanRssRoot::from_str(&bangumi_rss_data)?.channel.items;
|
||||
rss_items
|
||||
.into_iter()
|
||||
.map(MikanRssEpisodeItem::try_from)
|
||||
.map(MikanRssItemMeta::try_from)
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
}?;
|
||||
for rss_item in rss_items {
|
||||
|
@ -1,10 +1,10 @@
|
||||
use std::time::Duration;
|
||||
use std::{str::FromStr, time::Duration};
|
||||
|
||||
use fetch::{FetchError, HttpClientConfig, fetch_bytes, fetch_html, fetch_image, reqwest};
|
||||
use recorder::{
|
||||
errors::RecorderResult,
|
||||
extract::mikan::{
|
||||
MikanClient, MikanConfig, MikanRssEpisodeItem,
|
||||
MikanClient, MikanConfig, MikanRssItemMeta, MikanRssRoot,
|
||||
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||
},
|
||||
test_utils::mikan::{MikanDoppelMeta, MikanDoppelPath},
|
||||
@ -41,12 +41,12 @@ async fn main() -> RecorderResult<()> {
|
||||
let mikan_base_url = mikan_scrape_client.base_url().clone();
|
||||
tracing::info!("Scraping subscriber subscription...");
|
||||
let subscriber_subscription =
|
||||
fs::read("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
|
||||
let channel = rss::Channel::read_from(&subscriber_subscription[..])?;
|
||||
let rss_items: Vec<MikanRssEpisodeItem> = channel
|
||||
fs::read_to_string("tests/resources/mikan/doppel/RSS/MyBangumi-token%3Dtest.html").await?;
|
||||
let channel = MikanRssRoot::from_str(&subscriber_subscription)?.channel;
|
||||
let rss_items: Vec<MikanRssItemMeta> = channel
|
||||
.items
|
||||
.into_iter()
|
||||
.map(MikanRssEpisodeItem::try_from)
|
||||
.map(MikanRssItemMeta::try_from)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
for rss_item in rss_items {
|
||||
let episode_homepage_meta = {
|
||||
@ -150,11 +150,11 @@ async fn main() -> RecorderResult<()> {
|
||||
String::from_utf8(bangumi_rss_doppel_path.read()?)?
|
||||
};
|
||||
|
||||
let channel = rss::Channel::read_from(bangumi_rss_data.as_bytes())?;
|
||||
let rss_items: Vec<MikanRssEpisodeItem> = channel
|
||||
let rss_items: Vec<MikanRssItemMeta> = MikanRssRoot::from_str(&bangumi_rss_data)?
|
||||
.channel
|
||||
.items
|
||||
.into_iter()
|
||||
.map(MikanRssEpisodeItem::try_from)
|
||||
.map(MikanRssItemMeta::try_from)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
for rss_item in rss_items {
|
||||
{
|
||||
|
@ -49,6 +49,8 @@ pub enum RecorderError {
|
||||
InvalidMethodError,
|
||||
#[snafu(display("Invalid header value"))]
|
||||
InvalidHeaderValueError,
|
||||
#[snafu(transparent)]
|
||||
QuickXmlDeserializeError { source: quick_xml::DeError },
|
||||
#[snafu(display("Invalid header name"))]
|
||||
InvalidHeaderNameError,
|
||||
#[snafu(display("Missing origin (protocol or host) in headers and forwarded info"))]
|
||||
|
@ -2,6 +2,7 @@ mod client;
|
||||
mod config;
|
||||
mod constants;
|
||||
mod credential;
|
||||
mod rss;
|
||||
mod subscription;
|
||||
mod web;
|
||||
|
||||
@ -18,17 +19,19 @@ pub use constants::{
|
||||
MIKAN_UNKNOWN_FANSUB_NAME, MIKAN_YEAR_QUERY_KEY,
|
||||
};
|
||||
pub use credential::MikanCredentialForm;
|
||||
pub use rss::{
|
||||
MikanRssChannel, MikanRssItem, MikanRssItemMeta, MikanRssItemTorrentExtension, MikanRssRoot,
|
||||
build_mikan_bangumi_subscription_rss_url, build_mikan_subscriber_subscription_rss_url,
|
||||
};
|
||||
pub use subscription::{
|
||||
MikanBangumiSubscription, MikanSeasonSubscription, MikanSubscriberSubscription,
|
||||
};
|
||||
pub use web::{
|
||||
MikanBangumiHash, MikanBangumiIndexHash, MikanBangumiIndexMeta, MikanBangumiMeta,
|
||||
MikanBangumiPosterMeta, MikanEpisodeHash, MikanEpisodeMeta, MikanFansubHash,
|
||||
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_expand_subscribed_url,
|
||||
build_mikan_bangumi_homepage_url, build_mikan_bangumi_subscription_rss_url,
|
||||
MikanSeasonFlowUrlMeta, MikanSeasonStr, MikanSubscriberSubscriptionUrlMeta,
|
||||
build_mikan_bangumi_expand_subscribed_url, build_mikan_bangumi_homepage_url,
|
||||
build_mikan_episode_homepage_url, build_mikan_season_flow_url,
|
||||
build_mikan_subscriber_subscription_rss_url,
|
||||
extract_mikan_bangumi_index_meta_list_from_season_flow_fragment,
|
||||
extract_mikan_bangumi_meta_from_expand_subscribed_fragment,
|
||||
extract_mikan_episode_meta_from_episode_homepage_html,
|
||||
|
215
apps/recorder/src/extract/mikan/rss.rs
Normal file
215
apps/recorder/src/extract/mikan/rss.rs
Normal file
@ -0,0 +1,215 @@
|
||||
use std::{borrow::Cow, str::FromStr};
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use downloader::bittorrent::defs::BITTORRENT_MIME_TYPE;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
errors::{RecorderResult, app_error::RecorderError},
|
||||
extract::{
|
||||
bittorrent::EpisodeEnclosureMeta,
|
||||
mikan::{
|
||||
MIKAN_BANGUMI_ID_QUERY_KEY, MIKAN_BANGUMI_RSS_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
|
||||
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
|
||||
MikanEpisodeHash, build_mikan_episode_homepage_url,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MikanRssItemEnclosure {
|
||||
#[serde(rename = "@type")]
|
||||
pub r#type: String,
|
||||
#[serde(rename = "@length")]
|
||||
pub length: i64,
|
||||
#[serde(rename = "@url")]
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MikanRssItemTorrentExtension {
|
||||
pub pub_date: String,
|
||||
pub content_length: i64,
|
||||
pub link: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MikanRssItem {
|
||||
pub torrent: MikanRssItemTorrentExtension,
|
||||
pub link: String,
|
||||
pub title: String,
|
||||
pub enclosure: MikanRssItemEnclosure,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MikanRssChannel {
|
||||
#[serde(rename = "item", default)]
|
||||
pub items: Vec<MikanRssItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MikanRssRoot {
|
||||
pub channel: MikanRssChannel,
|
||||
}
|
||||
|
||||
impl FromStr for MikanRssRoot {
|
||||
type Err = RecorderError;
|
||||
fn from_str(source: &str) -> RecorderResult<Self> {
|
||||
let me = quick_xml::de::from_str(source)?;
|
||||
Ok(me)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanRssItemMeta {
|
||||
pub title: String,
|
||||
pub torrent_link: Url,
|
||||
pub content_length: i64,
|
||||
pub mime: String,
|
||||
pub pub_date: Option<DateTime<Utc>>,
|
||||
pub mikan_episode_id: String,
|
||||
pub magnet_link: Option<String>,
|
||||
}
|
||||
|
||||
impl MikanRssItemMeta {
|
||||
pub fn build_homepage_url(&self, mikan_base_url: Url) -> Url {
|
||||
build_mikan_episode_homepage_url(mikan_base_url, &self.mikan_episode_id)
|
||||
}
|
||||
|
||||
pub fn parse_pub_date(pub_date: &str) -> chrono::ParseResult<DateTime<Utc>> {
|
||||
DateTime::parse_from_rfc2822(pub_date)
|
||||
.or_else(|_| DateTime::parse_from_rfc3339(pub_date))
|
||||
.or_else(|_| DateTime::parse_from_rfc3339(&format!("{pub_date}+08:00")))
|
||||
.map(|s| s.with_timezone(&Utc))
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<MikanRssItem> for MikanRssItemMeta {
|
||||
type Error = RecorderError;
|
||||
|
||||
fn try_from(item: MikanRssItem) -> Result<Self, Self::Error> {
|
||||
let torrent = item.torrent;
|
||||
|
||||
let enclosure = item.enclosure;
|
||||
|
||||
let mime_type = enclosure.r#type;
|
||||
if mime_type != BITTORRENT_MIME_TYPE {
|
||||
return Err(RecorderError::MimeError {
|
||||
expected: String::from(BITTORRENT_MIME_TYPE),
|
||||
found: mime_type.to_string(),
|
||||
desc: String::from("MikanRssItem"),
|
||||
});
|
||||
}
|
||||
|
||||
let title = item.title;
|
||||
|
||||
let enclosure_url = Url::parse(&enclosure.url).map_err(|err| {
|
||||
RecorderError::from_mikan_rss_invalid_field_and_source(
|
||||
"enclosure_url:enclosure.link".into(),
|
||||
err,
|
||||
)
|
||||
})?;
|
||||
|
||||
let homepage = Url::parse(&item.link).map_err(|err| {
|
||||
RecorderError::from_mikan_rss_invalid_field_and_source(
|
||||
"enclosure_url:enclosure.link".into(),
|
||||
err,
|
||||
)
|
||||
})?;
|
||||
|
||||
let MikanEpisodeHash {
|
||||
mikan_episode_id, ..
|
||||
} = MikanEpisodeHash::from_homepage_url(&homepage).ok_or_else(|| {
|
||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
|
||||
})?;
|
||||
|
||||
Ok(MikanRssItemMeta {
|
||||
title,
|
||||
torrent_link: enclosure_url,
|
||||
content_length: enclosure.length,
|
||||
mime: mime_type,
|
||||
pub_date: Self::parse_pub_date(&torrent.pub_date).ok(),
|
||||
mikan_episode_id,
|
||||
magnet_link: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MikanRssItemMeta> for EpisodeEnclosureMeta {
|
||||
fn from(item: MikanRssItemMeta) -> Self {
|
||||
Self {
|
||||
magnet_link: item.magnet_link,
|
||||
torrent_link: Some(item.torrent_link.to_string()),
|
||||
pub_date: item.pub_date,
|
||||
content_length: Some(item.content_length),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_mikan_subscriber_subscription_rss_url(
|
||||
mikan_base_url: Url,
|
||||
mikan_subscription_token: &str,
|
||||
) -> Url {
|
||||
let mut url = mikan_base_url;
|
||||
url.set_path(MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH);
|
||||
url.query_pairs_mut().append_pair(
|
||||
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
|
||||
mikan_subscription_token,
|
||||
);
|
||||
url
|
||||
}
|
||||
|
||||
pub fn build_mikan_bangumi_subscription_rss_url(
|
||||
mikan_base_url: Url,
|
||||
mikan_bangumi_id: &str,
|
||||
mikan_fansub_id: Option<&str>,
|
||||
) -> Url {
|
||||
let mut url = mikan_base_url;
|
||||
url.set_path(MIKAN_BANGUMI_RSS_PATH);
|
||||
url.query_pairs_mut()
|
||||
.append_pair(MIKAN_BANGUMI_ID_QUERY_KEY, mikan_bangumi_id);
|
||||
if let Some(mikan_fansub_id) = mikan_fansub_id {
|
||||
url.query_pairs_mut()
|
||||
.append_pair(MIKAN_FANSUB_ID_QUERY_KEY, mikan_fansub_id);
|
||||
};
|
||||
url
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
#![allow(unused_variables)]
|
||||
use std::fs;
|
||||
|
||||
use rstest::{fixture, rstest};
|
||||
use tracing::Level;
|
||||
|
||||
use super::*;
|
||||
use crate::{errors::RecorderResult, test_utils::tracing::try_init_testing_tracing};
|
||||
|
||||
#[fixture]
|
||||
fn before_each() {
|
||||
try_init_testing_tracing(Level::DEBUG);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[test]
|
||||
fn test_mikan_rss_episode_item_try_from_rss_item(before_each: ()) -> RecorderResult<()> {
|
||||
let rss_str = fs::read_to_string(
|
||||
"tests/resources/mikan/doppel/RSS/Bangumi-bangumiId%3D3288%26subgroupid%3D370.html",
|
||||
)?;
|
||||
|
||||
let mut channel = MikanRssRoot::from_str(&rss_str)?.channel;
|
||||
|
||||
assert!(!channel.items.is_empty());
|
||||
|
||||
let item = channel.items.pop().unwrap();
|
||||
|
||||
let episode_item = MikanRssItemMeta::try_from(item.clone())?;
|
||||
|
||||
assert!(episode_item.pub_date.is_some());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -1,12 +1,13 @@
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::Debug,
|
||||
str::FromStr,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use async_graphql::{InputObject, SimpleObject};
|
||||
use async_stream::try_stream;
|
||||
use fetch::fetch_bytes;
|
||||
use fetch::fetch_html;
|
||||
use futures::{Stream, TryStreamExt, pin_mut, try_join};
|
||||
use maplit::hashmap;
|
||||
use sea_orm::{
|
||||
@ -24,8 +25,8 @@ use crate::{
|
||||
bittorrent::EpisodeEnclosureMeta,
|
||||
mikan::{
|
||||
MikanBangumiHash, MikanBangumiMeta, MikanEpisodeHash, MikanEpisodeMeta,
|
||||
MikanRssEpisodeItem, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||
MikanSubscriberSubscriptionRssUrlMeta, build_mikan_bangumi_subscription_rss_url,
|
||||
MikanRssItemMeta, MikanRssRoot, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||
MikanSubscriberSubscriptionUrlMeta, build_mikan_bangumi_subscription_rss_url,
|
||||
build_mikan_season_flow_url, build_mikan_subscriber_subscription_rss_url,
|
||||
scrape_mikan_episode_meta_from_episode_homepage_url,
|
||||
},
|
||||
@ -39,7 +40,7 @@ use crate::{
|
||||
#[tracing::instrument(err, skip(ctx, rss_item_list))]
|
||||
async fn sync_mikan_feeds_from_rss_item_list(
|
||||
ctx: &dyn AppContextTrait,
|
||||
rss_item_list: Vec<MikanRssEpisodeItem>,
|
||||
rss_item_list: Vec<MikanRssItemMeta>,
|
||||
subscriber_id: i32,
|
||||
subscription_id: i32,
|
||||
) -> RecorderResult<()> {
|
||||
@ -202,7 +203,7 @@ impl SubscriptionTrait for MikanSubscriberSubscription {
|
||||
fn try_from_model(model: &subscriptions::Model) -> RecorderResult<Self> {
|
||||
let source_url = Url::parse(&model.source_url)?;
|
||||
|
||||
let meta = MikanSubscriberSubscriptionRssUrlMeta::from_rss_url(&source_url)
|
||||
let meta = MikanSubscriberSubscriptionUrlMeta::from_rss_url(&source_url)
|
||||
.with_whatever_context::<_, String, RecorderError>(|| {
|
||||
format!(
|
||||
"MikanSubscriberSubscription should extract mikan_subscription_token from \
|
||||
@ -224,19 +225,19 @@ impl MikanSubscriberSubscription {
|
||||
async fn get_rss_item_list_from_source_url(
|
||||
&self,
|
||||
ctx: &dyn AppContextTrait,
|
||||
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||
) -> RecorderResult<Vec<MikanRssItemMeta>> {
|
||||
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||
let rss_url = build_mikan_subscriber_subscription_rss_url(
|
||||
mikan_base_url.clone(),
|
||||
&self.mikan_subscription_token,
|
||||
);
|
||||
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||
let html = fetch_html(ctx.mikan(), rss_url).await?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
let channel = MikanRssRoot::from_str(&html)?.channel;
|
||||
|
||||
let mut result = vec![];
|
||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||
let item = MikanRssEpisodeItem::try_from(item)
|
||||
let item = MikanRssItemMeta::try_from(item)
|
||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||
format!("failed to extract rss item at idx {idx}")
|
||||
})?;
|
||||
@ -249,7 +250,7 @@ impl MikanSubscriberSubscription {
|
||||
async fn get_rss_item_list_from_subsribed_url_rss_link(
|
||||
&self,
|
||||
ctx: &dyn AppContextTrait,
|
||||
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||
) -> RecorderResult<Vec<MikanRssItemMeta>> {
|
||||
let subscribed_bangumi_list =
|
||||
bangumi::Model::get_subsribed_bangumi_list_from_subscription(ctx, self.subscription_id)
|
||||
.await?;
|
||||
@ -264,12 +265,12 @@ impl MikanSubscriberSubscription {
|
||||
self.subscription_id, subscribed_bangumi.display_name
|
||||
)
|
||||
})?;
|
||||
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||
let html = fetch_html(ctx.mikan(), rss_url).await?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
let channel = MikanRssRoot::from_str(&html)?.channel;
|
||||
|
||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||
let item = MikanRssEpisodeItem::try_from(item)
|
||||
let item = MikanRssItemMeta::try_from(item)
|
||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||
format!("failed to extract rss item at idx {idx}")
|
||||
})?;
|
||||
@ -406,7 +407,7 @@ impl MikanSeasonSubscription {
|
||||
fn get_rss_item_stream_from_subsribed_url_rss_link(
|
||||
&self,
|
||||
ctx: &dyn AppContextTrait,
|
||||
) -> impl Stream<Item = RecorderResult<Vec<MikanRssEpisodeItem>>> {
|
||||
) -> impl Stream<Item = RecorderResult<Vec<MikanRssItemMeta>>> {
|
||||
try_stream! {
|
||||
|
||||
let db = ctx.db();
|
||||
@ -433,14 +434,14 @@ impl MikanSeasonSubscription {
|
||||
self.subscription_id, subscribed_bangumi.display_name
|
||||
)
|
||||
})?;
|
||||
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||
let html = fetch_html(ctx.mikan(), rss_url).await?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
let channel = MikanRssRoot::from_str(&html)?.channel;
|
||||
|
||||
let mut rss_item_list = vec![];
|
||||
|
||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||
let item = MikanRssEpisodeItem::try_from(item)
|
||||
let item = MikanRssItemMeta::try_from(item)
|
||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||
format!("failed to extract rss item at idx {idx}")
|
||||
})?;
|
||||
@ -519,20 +520,20 @@ impl MikanBangumiSubscription {
|
||||
async fn get_rss_item_list_from_source_url(
|
||||
&self,
|
||||
ctx: &dyn AppContextTrait,
|
||||
) -> RecorderResult<Vec<MikanRssEpisodeItem>> {
|
||||
) -> RecorderResult<Vec<MikanRssItemMeta>> {
|
||||
let mikan_base_url = ctx.mikan().base_url().clone();
|
||||
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
||||
mikan_base_url.clone(),
|
||||
&self.mikan_bangumi_id,
|
||||
Some(&self.mikan_fansub_id),
|
||||
);
|
||||
let bytes = fetch_bytes(ctx.mikan(), rss_url).await?;
|
||||
let html = fetch_html(ctx.mikan(), rss_url).await?;
|
||||
|
||||
let channel = rss::Channel::read_from(&bytes[..])?;
|
||||
let channel = MikanRssRoot::from_str(&html)?.channel;
|
||||
|
||||
let mut result = vec![];
|
||||
for (idx, item) in channel.items.into_iter().enumerate() {
|
||||
let item = MikanRssEpisodeItem::try_from(item)
|
||||
let item = MikanRssItemMeta::try_from(item)
|
||||
.with_whatever_context::<_, String, RecorderError>(|_| {
|
||||
format!("failed to extract rss item at idx {idx}")
|
||||
})?;
|
||||
@ -556,7 +557,7 @@ mod tests {
|
||||
errors::RecorderResult,
|
||||
extract::mikan::{
|
||||
MikanBangumiHash, MikanSeasonFlowUrlMeta, MikanSeasonStr,
|
||||
MikanSubscriberSubscriptionRssUrlMeta,
|
||||
MikanSubscriberSubscriptionUrlMeta,
|
||||
},
|
||||
models::{
|
||||
bangumi, episodes,
|
||||
@ -677,7 +678,7 @@ mod tests {
|
||||
subscriber_id: ActiveValue::Set(subscriber_id),
|
||||
category: ActiveValue::Set(subscriptions::SubscriptionCategory::MikanSubscriber),
|
||||
source_url: ActiveValue::Set(
|
||||
MikanSubscriberSubscriptionRssUrlMeta {
|
||||
MikanSubscriberSubscriptionUrlMeta {
|
||||
mikan_subscription_token: "test".into(),
|
||||
}
|
||||
.build_rss_url(mikan_server.base_url().clone())
|
||||
|
@ -26,7 +26,8 @@ use crate::{
|
||||
MIKAN_EPISODE_HOMEPAGE_PATH, MIKAN_FANSUB_HOMEPAGE_PATH, MIKAN_FANSUB_ID_QUERY_KEY,
|
||||
MIKAN_POSTER_BUCKET_KEY, MIKAN_SEASON_FLOW_PAGE_PATH, MIKAN_SEASON_STR_QUERY_KEY,
|
||||
MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH, MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
|
||||
MIKAN_YEAR_QUERY_KEY, MikanClient,
|
||||
MIKAN_YEAR_QUERY_KEY, MikanClient, build_mikan_bangumi_subscription_rss_url,
|
||||
build_mikan_subscriber_subscription_rss_url,
|
||||
},
|
||||
},
|
||||
media::{
|
||||
@ -139,16 +140,16 @@ impl From<MikanRssEpisodeItem> for EpisodeEnclosureMeta {
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct MikanSubscriberSubscriptionRssUrlMeta {
|
||||
pub struct MikanSubscriberSubscriptionUrlMeta {
|
||||
pub mikan_subscription_token: String,
|
||||
}
|
||||
|
||||
impl MikanSubscriberSubscriptionRssUrlMeta {
|
||||
impl MikanSubscriberSubscriptionUrlMeta {
|
||||
pub fn from_rss_url(url: &Url) -> Option<Self> {
|
||||
if url.path() == MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH {
|
||||
url.query_pairs()
|
||||
.find(|(k, _)| k == MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY)
|
||||
.map(|(_, v)| MikanSubscriberSubscriptionRssUrlMeta {
|
||||
.map(|(_, v)| MikanSubscriberSubscriptionUrlMeta {
|
||||
mikan_subscription_token: v.to_string(),
|
||||
})
|
||||
} else {
|
||||
@ -161,19 +162,6 @@ impl MikanSubscriberSubscriptionRssUrlMeta {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_mikan_subscriber_subscription_rss_url(
|
||||
mikan_base_url: Url,
|
||||
mikan_subscription_token: &str,
|
||||
) -> Url {
|
||||
let mut url = mikan_base_url;
|
||||
url.set_path(MIKAN_SUBSCRIBER_SUBSCRIPTION_RSS_PATH);
|
||||
url.query_pairs_mut().append_pair(
|
||||
MIKAN_SUBSCRIBER_SUBSCRIPTION_TOKEN_QUERY_KEY,
|
||||
mikan_subscription_token,
|
||||
);
|
||||
url
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Eq)]
|
||||
pub struct MikanBangumiIndexMeta {
|
||||
pub homepage: Url,
|
||||
@ -289,22 +277,6 @@ pub struct MikanBangumiPosterMeta {
|
||||
pub poster_src: Option<String>,
|
||||
}
|
||||
|
||||
pub fn build_mikan_bangumi_subscription_rss_url(
|
||||
mikan_base_url: Url,
|
||||
mikan_bangumi_id: &str,
|
||||
mikan_fansub_id: Option<&str>,
|
||||
) -> Url {
|
||||
let mut url = mikan_base_url;
|
||||
url.set_path(MIKAN_BANGUMI_RSS_PATH);
|
||||
url.query_pairs_mut()
|
||||
.append_pair(MIKAN_BANGUMI_ID_QUERY_KEY, mikan_bangumi_id);
|
||||
if let Some(mikan_fansub_id) = mikan_fansub_id {
|
||||
url.query_pairs_mut()
|
||||
.append_pair(MIKAN_FANSUB_ID_QUERY_KEY, mikan_fansub_id);
|
||||
};
|
||||
url
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct MikanBangumiIndexHash {
|
||||
pub mikan_bangumi_id: String,
|
||||
|
@ -24,6 +24,7 @@ pub trait RssFeedItemTrait: Sized {
|
||||
-> Option<Cow<'_, str>>;
|
||||
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>>;
|
||||
fn get_enclosure_content_length(&self) -> Option<i64>;
|
||||
fn get_xmlns(&self) -> Cow<'_, str>;
|
||||
fn into_item(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Item> {
|
||||
let enclosure_mime_type =
|
||||
self.get_enclosure_mime()
|
||||
@ -53,21 +54,30 @@ pub trait RssFeedItemTrait: Sized {
|
||||
|
||||
let mut extensions = ExtensionMap::default();
|
||||
if enclosure_mime_type == BITTORRENT_MIME_TYPE {
|
||||
extensions.insert("torrent".to_string(), {
|
||||
let mut map = btreemap! {
|
||||
let xmlns = self.get_xmlns();
|
||||
|
||||
let torrent_extension = ExtensionBuilder::default()
|
||||
.name("torrent")
|
||||
.attrs(btreemap! {
|
||||
"xmlns".to_string() => xmlns.to_string()
|
||||
})
|
||||
.children({
|
||||
let mut m = btreemap! {
|
||||
"link".to_string() => vec![
|
||||
ExtensionBuilder::default().name(
|
||||
"link"
|
||||
).value(enclosure_link.to_string()).build()
|
||||
ExtensionBuilder::default()
|
||||
.name("link")
|
||||
.value(link.to_string())
|
||||
.build()
|
||||
],
|
||||
"contentLength".to_string() => vec![
|
||||
ExtensionBuilder::default().name(
|
||||
"contentLength"
|
||||
).value(enclosure_content_length.to_string()).build()
|
||||
],
|
||||
ExtensionBuilder::default()
|
||||
.name("contentLength")
|
||||
.value(enclosure_content_length.to_string())
|
||||
.build()
|
||||
]
|
||||
};
|
||||
if let Some(pub_date) = enclosure_pub_date {
|
||||
map.insert(
|
||||
m.insert(
|
||||
"pubDate".to_string(),
|
||||
vec![
|
||||
ExtensionBuilder::default()
|
||||
@ -76,9 +86,17 @@ pub trait RssFeedItemTrait: Sized {
|
||||
.build(),
|
||||
],
|
||||
);
|
||||
}
|
||||
map
|
||||
});
|
||||
};
|
||||
m
|
||||
})
|
||||
.build();
|
||||
|
||||
extensions.insert(
|
||||
"".to_string(),
|
||||
btreemap! {
|
||||
"torrent".to_string() => vec![torrent_extension]
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
let enclosure = EnclosureBuilder::default()
|
||||
|
@ -42,6 +42,12 @@ impl RssFeedItemTrait for episodes::Model {
|
||||
Cow::Owned(format!("{PROJECT_NAME}:episode:{}", self.id))
|
||||
}
|
||||
|
||||
fn get_xmlns(&self) -> Cow<'_, str> {
|
||||
match self.episode_type {
|
||||
episodes::EpisodeType::Mikan => Cow::Borrowed("https://mikanani.me/0.1/"),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_title(&self) -> Cow<'_, str> {
|
||||
Cow::Borrowed(&self.display_name)
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user