feat: basic support rss
This commit is contained in:
@@ -500,31 +500,31 @@ async fn merge_mikan_classic_episodes_and_strip_columns() -> RecorderResult<()>
|
||||
}
|
||||
|
||||
select_columns_and_write(merged_df.clone(), "tiny", &["fansub_name", "original_name"])?;
|
||||
select_columns_and_write(
|
||||
merged_df.clone(),
|
||||
"lite",
|
||||
&[
|
||||
"mikan_fansub_id",
|
||||
"fansub_name",
|
||||
"mikan_episode_id",
|
||||
"original_name",
|
||||
],
|
||||
)?;
|
||||
select_columns_and_write(
|
||||
merged_df,
|
||||
"full",
|
||||
&[
|
||||
"id",
|
||||
"publish_at_timestamp",
|
||||
"mikan_fansub_id",
|
||||
"fansub_name",
|
||||
"mikan_episode_id",
|
||||
"original_name",
|
||||
"magnet_link",
|
||||
"file_size",
|
||||
"torrent_link",
|
||||
],
|
||||
)?;
|
||||
// select_columns_and_write(
|
||||
// merged_df.clone(),
|
||||
// "lite",
|
||||
// &[
|
||||
// "mikan_fansub_id",
|
||||
// "fansub_name",
|
||||
// "mikan_episode_id",
|
||||
// "original_name",
|
||||
// ],
|
||||
// )?;
|
||||
// select_columns_and_write(
|
||||
// merged_df,
|
||||
// "full",
|
||||
// &[
|
||||
// "id",
|
||||
// "publish_at_timestamp",
|
||||
// "mikan_fansub_id",
|
||||
// "fansub_name",
|
||||
// "mikan_episode_id",
|
||||
// "original_name",
|
||||
// "magnet_link",
|
||||
// "file_size",
|
||||
// "torrent_link",
|
||||
// ],
|
||||
// )?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -53,14 +53,15 @@ impl App {
|
||||
|
||||
let mut router = Router::<Arc<dyn AppContextTrait>>::new();
|
||||
|
||||
let (graphql_c, oidc_c, metadata_c, static_c) = futures::try_join!(
|
||||
let (graphql_c, oidc_c, metadata_c, static_c, feeds_c) = futures::try_join!(
|
||||
controller::graphql::create(context.clone()),
|
||||
controller::oidc::create(context.clone()),
|
||||
controller::metadata::create(context.clone()),
|
||||
controller::r#static::create(context.clone()),
|
||||
controller::feeds::create(context.clone()),
|
||||
)?;
|
||||
|
||||
for c in [graphql_c, oidc_c, metadata_c, static_c] {
|
||||
for c in [graphql_c, oidc_c, metadata_c, static_c, feeds_c] {
|
||||
router = c.apply_to(router);
|
||||
}
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ pub struct EpisodeEnclosureMeta {
|
||||
pub magnet_link: Option<String>,
|
||||
pub torrent_link: Option<String>,
|
||||
pub pub_date: Option<DateTime<Utc>>,
|
||||
pub content_length: Option<u64>,
|
||||
pub content_length: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
|
||||
@@ -41,7 +41,7 @@ use crate::{
|
||||
pub struct MikanRssEpisodeItem {
|
||||
pub title: String,
|
||||
pub torrent_link: Url,
|
||||
pub content_length: Option<u64>,
|
||||
pub content_length: Option<i64>,
|
||||
pub mime: String,
|
||||
pub pub_date: Option<DateTime<Utc>>,
|
||||
pub mikan_episode_id: String,
|
||||
@@ -95,15 +95,32 @@ impl TryFrom<rss::Item> for MikanRssEpisodeItem {
|
||||
RecorderError::from_mikan_rss_invalid_field(Cow::Borrowed("mikan_episode_id"))
|
||||
})?;
|
||||
|
||||
let pub_date = item
|
||||
.extensions
|
||||
.get("torrent")
|
||||
.and_then(|t| t.get("pubDate"))
|
||||
.and_then(|e| e.first())
|
||||
.and_then(|e| e.value.as_deref());
|
||||
|
||||
Ok(MikanRssEpisodeItem {
|
||||
title,
|
||||
torrent_link: enclosure_url,
|
||||
content_length: enclosure.length.parse().ok(),
|
||||
mime: mime_type,
|
||||
pub_date: item.pub_date.and_then(|s| {
|
||||
DateTime::parse_from_rfc2822(&s)
|
||||
pub_date: pub_date.and_then(|s| {
|
||||
DateTime::parse_from_rfc2822(s)
|
||||
.ok()
|
||||
.map(|s| s.with_timezone(&Utc))
|
||||
.or_else(|| {
|
||||
DateTime::parse_from_rfc3339(s)
|
||||
.ok()
|
||||
.map(|s| s.with_timezone(&Utc))
|
||||
})
|
||||
.or_else(|| {
|
||||
DateTime::parse_from_rfc3339(&format!("{s}+08:00"))
|
||||
.ok()
|
||||
.map(|s| s.with_timezone(&Utc))
|
||||
})
|
||||
}),
|
||||
mikan_episode_id,
|
||||
magnet_link: None,
|
||||
|
||||
@@ -1,9 +1,50 @@
|
||||
use seaography::{Builder as SeaographyBuilder, BuilderContext};
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{graphql::domains::subscribers::restrict_subscriber_for_entity, models::feeds};
|
||||
use async_graphql::dynamic::ResolverContext;
|
||||
use sea_orm::Value as SeaValue;
|
||||
use seaography::{Builder as SeaographyBuilder, BuilderContext, SeaResult};
|
||||
|
||||
use crate::{
|
||||
graphql::{
|
||||
domains::subscribers::restrict_subscriber_for_entity,
|
||||
infra::util::{get_entity_column_key, get_entity_key},
|
||||
},
|
||||
models::feeds,
|
||||
};
|
||||
|
||||
pub fn register_feeds_to_schema_context(context: &mut BuilderContext) {
|
||||
restrict_subscriber_for_entity::<feeds::Entity>(context, &feeds::Column::SubscriberId);
|
||||
{
|
||||
let entity_column_key =
|
||||
get_entity_column_key::<feeds::Entity>(context, &feeds::Column::Token);
|
||||
let entity_key = get_entity_key::<feeds::Entity>(context);
|
||||
let entity_name = context.entity_query_field.type_name.as_ref()(&entity_key);
|
||||
let entity_create_one_mutation_field_name = Arc::new(format!(
|
||||
"{}{}",
|
||||
entity_name, context.entity_create_one_mutation.mutation_suffix
|
||||
));
|
||||
let entity_create_batch_mutation_field_name = Arc::new(format!(
|
||||
"{}{}",
|
||||
entity_name,
|
||||
context.entity_create_batch_mutation.mutation_suffix.clone()
|
||||
));
|
||||
|
||||
context.types.input_none_conversions.insert(
|
||||
entity_column_key,
|
||||
Box::new(
|
||||
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
|
||||
let field_name = context.field().name();
|
||||
if field_name == entity_create_one_mutation_field_name.as_str()
|
||||
|| field_name == entity_create_batch_mutation_field_name.as_str()
|
||||
{
|
||||
Ok(Some(SeaValue::String(Some(Box::new(nanoid::nanoid!())))))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register_feeds_to_schema_builder(mut builder: SeaographyBuilder) -> SeaographyBuilder {
|
||||
|
||||
@@ -267,7 +267,6 @@ where
|
||||
Box::new(
|
||||
move |context: &ResolverContext| -> SeaResult<Option<SeaValue>> {
|
||||
let field_name = context.field().name();
|
||||
tracing::warn!("field_name: {:?}", field_name);
|
||||
if field_name == entity_create_one_mutation_field_name.as_str()
|
||||
|| field_name == entity_create_batch_mutation_field_name.as_str()
|
||||
{
|
||||
|
||||
@@ -79,6 +79,9 @@ pub enum Episodes {
|
||||
BangumiId,
|
||||
SubscriberId,
|
||||
DownloadId,
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
SavePath,
|
||||
Resolution,
|
||||
Season,
|
||||
|
||||
@@ -95,8 +95,8 @@ impl MigrationTrait for Migration {
|
||||
DownloadMimeEnum,
|
||||
DownloadMime::iden_values(),
|
||||
))
|
||||
.col(big_unsigned(Downloads::AllSize))
|
||||
.col(big_unsigned(Downloads::CurrSize))
|
||||
.col(big_integer(Downloads::AllSize))
|
||||
.col(big_integer(Downloads::CurrSize))
|
||||
.col(text(Downloads::Url))
|
||||
.col(text_null(Downloads::Homepage))
|
||||
.col(text_null(Downloads::SavePath))
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
use async_trait::async_trait;
|
||||
use sea_orm_migration::{
|
||||
prelude::*,
|
||||
schema::{enumeration, integer_null, pk_auto, text},
|
||||
};
|
||||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use crate::{
|
||||
migrations::defs::{
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
use async_trait::async_trait;
|
||||
use sea_orm_migration::{
|
||||
prelude::*,
|
||||
schema::{
|
||||
enumeration, enumeration_null, integer_null, text_null, timestamp_with_time_zone_null,
|
||||
},
|
||||
};
|
||||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
use crate::{
|
||||
migrations::defs::{Bangumi, CustomSchemaManagerExt, Episodes},
|
||||
@@ -85,7 +80,10 @@ impl MigrationTrait for Migration {
|
||||
.add_column_if_not_exists(timestamp_with_time_zone_null(
|
||||
Episodes::EnclosurePubDate,
|
||||
))
|
||||
.add_column_if_not_exists(integer_null(Episodes::EnclosureContentLength))
|
||||
.add_column_if_not_exists(big_integer_null(
|
||||
Episodes::EnclosureContentLength,
|
||||
))
|
||||
.drop_column(Episodes::SavePath)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -17,7 +17,7 @@ use crate::{
|
||||
MikanBangumiHash, MikanBangumiMeta, build_mikan_bangumi_subscription_rss_url,
|
||||
scrape_mikan_poster_meta_from_image_url,
|
||||
},
|
||||
origin::{OriginCompTrait, SeasonComp},
|
||||
origin::{BangumiComps, OriginCompTrait},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -129,11 +129,12 @@ impl ActiveModel {
|
||||
) -> RecorderResult<Self> {
|
||||
let mikan_client = ctx.mikan();
|
||||
let mikan_base_url = mikan_client.base_url();
|
||||
let season_comp = SeasonComp::parse_comp(&meta.bangumi_title)
|
||||
let season_comp = BangumiComps::parse_comp(&meta.bangumi_title)
|
||||
.ok()
|
||||
.map(|(_, s)| s);
|
||||
.map(|(_, s)| s)
|
||||
.and_then(|s| s.season);
|
||||
let season_index = season_comp.as_ref().map(|s| s.num).unwrap_or(1);
|
||||
let season_raw = season_comp.map(|s| s.source.into_owned());
|
||||
let season_raw = season_comp.map(|s| s.source.to_string());
|
||||
|
||||
let rss_url = build_mikan_bangumi_subscription_rss_url(
|
||||
mikan_base_url.clone(),
|
||||
@@ -162,6 +163,7 @@ impl ActiveModel {
|
||||
origin_poster_link: ActiveValue::Set(meta.origin_poster_src.map(|src| src.to_string())),
|
||||
homepage: ActiveValue::Set(Some(meta.homepage.to_string())),
|
||||
rss_link: ActiveValue::Set(Some(rss_url.to_string())),
|
||||
bangumi_type: ActiveValue::Set(BangumiType::Mikan),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -52,8 +52,8 @@ pub struct Model {
|
||||
pub status: DownloadStatus,
|
||||
pub mime: DownloadMime,
|
||||
pub url: String,
|
||||
pub all_size: Option<u64>,
|
||||
pub curr_size: Option<u64>,
|
||||
pub all_size: Option<i64>,
|
||||
pub curr_size: Option<i64>,
|
||||
pub homepage: Option<String>,
|
||||
pub save_path: Option<String>,
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ pub struct Model {
|
||||
pub enclosure_torrent_link: Option<String>,
|
||||
pub enclosure_magnet_link: Option<String>,
|
||||
pub enclosure_pub_date: Option<DateTimeUtc>,
|
||||
pub enclosure_content_length: Option<u64>,
|
||||
pub enclosure_content_length: Option<i64>,
|
||||
pub episode_type: EpisodeType,
|
||||
pub origin_name: String,
|
||||
pub display_name: String,
|
||||
@@ -166,6 +166,7 @@ impl ActiveModel {
|
||||
enclosure_magnet_link: ActiveValue::Set(enclosure_meta.magnet_link),
|
||||
enclosure_pub_date: ActiveValue::Set(enclosure_meta.pub_date),
|
||||
enclosure_content_length: ActiveValue::Set(enclosure_meta.content_length),
|
||||
episode_type: ActiveValue::Set(EpisodeType::Mikan),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
|
||||
@@ -39,7 +39,9 @@ pub enum FeedSource {
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "feeds")]
|
||||
pub struct Model {
|
||||
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||
pub created_at: DateTimeUtc,
|
||||
#[sea_orm(default_expr = "Expr::current_timestamp()")]
|
||||
pub updated_at: DateTimeUtc,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use rss::Channel;
|
||||
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
|
||||
use sea_orm::{ColumnTrait, EntityTrait, JoinType, QueryFilter, QuerySelect, RelationTrait};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
@@ -8,7 +8,7 @@ use crate::{
|
||||
models::{
|
||||
episodes,
|
||||
feeds::{self, FeedSource, RssFeedTrait, SubscriptionEpisodesFeed},
|
||||
subscriptions,
|
||||
subscription_episode, subscriptions,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -22,19 +22,30 @@ impl Feed {
|
||||
FeedSource::SubscriptionEpisode => {
|
||||
let db = ctx.db();
|
||||
let (subscription, episodes) = if let Some(subscription_id) = m.subscription_id
|
||||
&& let Some((subscription, episodes)) = subscriptions::Entity::find()
|
||||
&& let Some(subscription) = subscriptions::Entity::find()
|
||||
.filter(subscriptions::Column::Id.eq(subscription_id))
|
||||
.find_with_related(episodes::Entity)
|
||||
.all(db)
|
||||
.one(db)
|
||||
.await?
|
||||
.pop()
|
||||
{
|
||||
let episodes = episodes::Entity::find()
|
||||
.join(
|
||||
JoinType::InnerJoin,
|
||||
episodes::Relation::SubscriptionEpisode.def(),
|
||||
)
|
||||
.join(
|
||||
JoinType::InnerJoin,
|
||||
subscription_episode::Relation::Subscription.def(),
|
||||
)
|
||||
.filter(subscriptions::Column::Id.eq(subscription_id))
|
||||
.all(db)
|
||||
.await?;
|
||||
(subscription, episodes)
|
||||
} else {
|
||||
return Err(RecorderError::ModelEntityNotFound {
|
||||
entity: "Subscription".into(),
|
||||
});
|
||||
};
|
||||
|
||||
Ok(Feed::SubscritpionEpisodes(
|
||||
SubscriptionEpisodesFeed::from_model(m, subscription, episodes),
|
||||
))
|
||||
|
||||
@@ -23,7 +23,7 @@ pub trait RssFeedItemTrait: Sized {
|
||||
fn get_enclosure_link(&self, ctx: &dyn AppContextTrait, api_base: &Url)
|
||||
-> Option<Cow<'_, str>>;
|
||||
fn get_enclosure_pub_date(&self) -> Option<DateTime<Utc>>;
|
||||
fn get_enclosure_content_length(&self) -> Option<u64>;
|
||||
fn get_enclosure_content_length(&self) -> Option<i64>;
|
||||
fn into_item(self, ctx: &dyn AppContextTrait, api_base: &Url) -> RecorderResult<Item> {
|
||||
let enclosure_mime_type =
|
||||
self.get_enclosure_mime()
|
||||
@@ -43,12 +43,7 @@ pub trait RssFeedItemTrait: Sized {
|
||||
source: None.into(),
|
||||
}
|
||||
})?;
|
||||
let enclosure_pub_date = self.get_enclosure_pub_date().ok_or_else(|| {
|
||||
RecorderError::MikanRssInvalidFieldError {
|
||||
field: "enclosure_pub_date".into(),
|
||||
source: None.into(),
|
||||
}
|
||||
})?;
|
||||
let enclosure_pub_date = self.get_enclosure_pub_date();
|
||||
let link = self.get_link(ctx, api_base).ok_or_else(|| {
|
||||
RecorderError::MikanRssInvalidFieldError {
|
||||
field: "link".into(),
|
||||
@@ -58,9 +53,8 @@ pub trait RssFeedItemTrait: Sized {
|
||||
|
||||
let mut extensions = ExtensionMap::default();
|
||||
if enclosure_mime_type == BITTORRENT_MIME_TYPE {
|
||||
extensions.insert(
|
||||
"torrent".to_string(),
|
||||
btreemap! {
|
||||
extensions.insert("torrent".to_string(), {
|
||||
let mut map = btreemap! {
|
||||
"link".to_string() => vec![
|
||||
ExtensionBuilder::default().name(
|
||||
"link"
|
||||
@@ -71,13 +65,20 @@ pub trait RssFeedItemTrait: Sized {
|
||||
"contentLength"
|
||||
).value(enclosure_content_length.to_string()).build()
|
||||
],
|
||||
"pubDate".to_string() => vec![
|
||||
ExtensionBuilder::default().name(
|
||||
"pubDate"
|
||||
).value(enclosure_pub_date.to_rfc3339()).build()
|
||||
],
|
||||
},
|
||||
);
|
||||
};
|
||||
if let Some(pub_date) = enclosure_pub_date {
|
||||
map.insert(
|
||||
"pubDate".to_string(),
|
||||
vec![
|
||||
ExtensionBuilder::default()
|
||||
.name("pubDate")
|
||||
.value(pub_date.to_rfc3339())
|
||||
.build(),
|
||||
],
|
||||
);
|
||||
}
|
||||
map
|
||||
});
|
||||
};
|
||||
|
||||
let enclosure = EnclosureBuilder::default()
|
||||
@@ -97,7 +98,6 @@ pub trait RssFeedItemTrait: Sized {
|
||||
.description(self.get_description().to_string())
|
||||
.link(link.to_string())
|
||||
.enclosure(enclosure)
|
||||
.pub_date(enclosure_pub_date.to_rfc3339())
|
||||
.extensions(extensions)
|
||||
.build();
|
||||
|
||||
|
||||
@@ -74,7 +74,7 @@ impl RssFeedItemTrait for episodes::Model {
|
||||
self.enclosure_pub_date
|
||||
}
|
||||
|
||||
fn get_enclosure_content_length(&self) -> Option<u64> {
|
||||
fn get_enclosure_content_length(&self) -> Option<i64> {
|
||||
self.enclosure_content_length
|
||||
}
|
||||
}
|
||||
@@ -84,8 +84,8 @@ impl RssFeedTrait for SubscriptionEpisodesFeed {
|
||||
|
||||
fn get_description(&self) -> Cow<'_, str> {
|
||||
Cow::Owned(format!(
|
||||
"{PROJECT_NAME} - episodes of subscription \"{}\"",
|
||||
self.subscription.display_name
|
||||
"{PROJECT_NAME} - episodes of subscription {}",
|
||||
self.subscription.id
|
||||
))
|
||||
}
|
||||
|
||||
|
||||
@@ -41,6 +41,10 @@ pub enum Relation {
|
||||
Auth,
|
||||
#[sea_orm(has_many = "super::credential_3rd::Entity")]
|
||||
Credential3rd,
|
||||
#[sea_orm(has_many = "super::feeds::Entity")]
|
||||
Feed,
|
||||
#[sea_orm(has_many = "super::subscriber_tasks::Entity")]
|
||||
SubscriberTask,
|
||||
}
|
||||
|
||||
impl Related<super::subscriptions::Entity> for Entity {
|
||||
@@ -79,6 +83,18 @@ impl Related<super::credential_3rd::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::feeds::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Feed.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::subscriber_tasks::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::SubscriberTask.def()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
|
||||
pub enum RelatedEntity {
|
||||
#[sea_orm(entity = "super::subscriptions::Entity")]
|
||||
@@ -91,6 +107,10 @@ pub enum RelatedEntity {
|
||||
Episode,
|
||||
#[sea_orm(entity = "super::credential_3rd::Entity")]
|
||||
Credential3rd,
|
||||
#[sea_orm(entity = "super::feeds::Entity")]
|
||||
Feed,
|
||||
#[sea_orm(entity = "super::subscriber_tasks::Entity")]
|
||||
SubscriberTask,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
|
||||
@@ -59,6 +59,8 @@ pub enum Relation {
|
||||
on_delete = "SetNull"
|
||||
)]
|
||||
Credential3rd,
|
||||
#[sea_orm(has_many = "super::feeds::Entity")]
|
||||
Feed,
|
||||
}
|
||||
|
||||
impl Related<super::subscribers::Entity> for Entity {
|
||||
@@ -93,6 +95,12 @@ impl Related<super::bangumi::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::feeds::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Feed.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::episodes::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
super::subscription_episode::Relation::Episode.def()
|
||||
@@ -127,6 +135,8 @@ pub enum RelatedEntity {
|
||||
SubscriptionBangumi,
|
||||
#[sea_orm(entity = "super::credential_3rd::Entity")]
|
||||
Credential3rd,
|
||||
#[sea_orm(entity = "super::feeds::Entity")]
|
||||
Feed,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
Extension, Router,
|
||||
Router,
|
||||
extract::{Path, State},
|
||||
response::IntoResponse,
|
||||
routing::get,
|
||||
@@ -21,21 +21,22 @@ pub const CONTROLLER_PREFIX: &str = "/api/feeds";
|
||||
async fn rss_handler(
|
||||
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||
Path(token): Path<String>,
|
||||
forwarded_info: Extension<ForwardedRelatedInfo>,
|
||||
forwarded_info: ForwardedRelatedInfo,
|
||||
) -> RecorderResult<impl IntoResponse> {
|
||||
let api_base = forwarded_info
|
||||
.resolved_origin()
|
||||
.ok_or(RecorderError::MissingOriginError)?;
|
||||
let channel = feeds::Model::find_rss_feed_by_token(ctx.as_ref(), &token, &api_base).await?;
|
||||
|
||||
Ok((
|
||||
StatusCode::OK,
|
||||
[("Content-Type", "application/rss+xml")],
|
||||
[("Content-Type", "application/xml; charset=utf-8")],
|
||||
channel.to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
pub async fn create(_ctx: Arc<dyn AppContextTrait>) -> RecorderResult<Controller> {
|
||||
let router = Router::<Arc<dyn AppContextTrait>>::new().route("rss/{token}", get(rss_handler));
|
||||
let router = Router::<Arc<dyn AppContextTrait>>::new().route("/rss/{token}", get(rss_handler));
|
||||
|
||||
Ok(Controller::from_prefix(CONTROLLER_PREFIX, router))
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
Extension, Json, Router,
|
||||
Json, Router,
|
||||
extract::{Query, State},
|
||||
routing::get,
|
||||
};
|
||||
@@ -42,7 +42,7 @@ async fn oidc_callback(
|
||||
|
||||
async fn oidc_auth(
|
||||
State(ctx): State<Arc<dyn AppContextTrait>>,
|
||||
forwarded_info: Extension<ForwardedRelatedInfo>,
|
||||
forwarded_info: ForwardedRelatedInfo,
|
||||
) -> Result<Json<OidcAuthRequest>, AuthError> {
|
||||
let auth_service = ctx.auth();
|
||||
if let AuthService::Oidc(oidc_auth_service) = auth_service {
|
||||
|
||||
Reference in New Issue
Block a user