diff --git a/src/config/mod.rs b/src/config/mod.rs index 7390299..1149979 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -6,20 +6,42 @@ use std::{ time, }; +use chrono::{DateTime, Duration, Utc}; use lemmy_api_common::{ community::{ListCommunities, ListCommunitiesResponse}, post::CreatePost, sensitive::Sensitive, }; use lemmy_db_schema::{ - newtypes::{CommunityId, LanguageId, PostId}, + newtypes::{CommunityId, LanguageId}, ListingType, }; use serde_derive::{Deserialize, Serialize}; use tokio::time::sleep; use url::Url; -use crate::CLIENT; +use crate::{ + feeds::{FeedChapterData, FeedSeriesData, FeedSetting, FeedVolumeData}, + PostQueueMetadata, CLIENT, +}; + +macro_rules! api_url { + () => { + "https://labs.j-novel.club/app/v1/".to_string() + }; +} + +macro_rules! chapter_url { + () => { + "https://j-novel.club/read/".to_string() + }; +} + +macro_rules! volume_url_base { + () => { + "https://j-novel.club/series/".to_string() + }; +} macro_rules! pub_struct { ($name:ident {$($field:ident: $t:ty,)*}) => { @@ -49,6 +71,19 @@ impl Secrets { return config_parse; } + + pub(crate) fn load(&mut self) { + let file_contents = match fs::read_to_string("secrets.json") { + Ok(data) => data, + Err(e) => panic!("ERROR: secrets.json could not be read:\n\n{:#?}", e), + }; + let config_parse: Secrets = match serde_json::from_str(&file_contents) { + Ok(data) => data, + Err(e) => panic!("ERROR: secrets.json could not be parsed:\n\n{:#?}", e), + }; + + *self = config_parse; + } } #[derive(Serialize, Deserialize, Clone, PartialEq)] @@ -76,11 +111,14 @@ pub(crate) struct RedditLogin { } // Config structs -pub_struct!(Config { - instance: String, - reddit_config: RedditConfig, - feeds: Vec, -}); +#[derive(Serialize, Deserialize, Clone, PartialEq)] +pub(crate) struct Config { + pub(crate) instance: String, + pub(crate) reddit_config: RedditConfig, + pub(crate) feeds: Vec, + pub(crate) uptime_kuma: Option, + pub(crate) config_reload: Option, +} impl Config { pub(crate) fn init() -> Config { @@ -106,63 +144,163 @@ impl Config { Err(e) => panic!("ERROR: config.json could not be parsed:\n\n{:#?}", e), }; - self.feeds = config_parse.feeds; - self.instance = config_parse.instance; - self.reddit_config = config_parse.reddit_config; + *self = config_parse; } - pub(crate) async fn check_feeds( - &mut self, - post_history: &mut Vec, - community_ids: &CommunitiesVector, - auth: &Sensitive, - ) -> Result, usize, String))>, Box> { - let mut post_queue: Vec<(CreatePost, (Option, usize, String))> = vec![]; - - let mut i = 0; - while i < self.feeds.len() { - let feed = &self.feeds[i]; - + #[warn(unused_results)] + pub(crate) async fn fetch_infos(&self) -> Result<(), Box> { + // Get Series Data + for feed in &self.feeds { let res = CLIENT - .get(feed.feed_url.clone()) + .get(api_url!() + "series/" + &feed.series_slug + "?format=json") .send() .await? .text() .await?; - let data: FeedData = serde_json::from_str(&res).unwrap(); + let data: FeedSeriesData = serde_json::from_str(&res).unwrap(); + println!("{:#?}", data); + } + return Ok(()); + } - let mut prev_post_idx: Option = None; - let mut do_post = true; - post_history.iter().enumerate().for_each(|(idx, post)| { - if &post.last_post_url == &data.items[0].url { - do_post = false; - } else if &post.title == &data.title { - prev_post_idx = Some(idx); - } - }); + #[warn(unused_results)] + pub(crate) async fn check_feeds( + &self, + post_history: &Vec, + community_ids: &CommunitiesVector, + auth: &Sensitive, + ) -> Result, Box> { + let mut post_queue: Vec<(CreatePost, PostQueueMetadata)> = vec![]; - if do_post { - let item = &data.items[0]; - let new_post = CreatePost { - name: item.title.clone(), - community_id: community_ids.find(&feed.communities.chapter), - url: Some(Url::parse(&item.url).unwrap()), - body: Some( - "[Reddit](https://reddit.com/r/HonzukinoGekokujou)\n\n[Discord](https://discord.com/invite/fGefmzu)".into(), - ), - honeypot: None, - nsfw: Some(false), - language_id: Some(LanguageId(37)), // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future - auth: auth.clone(), + for feed in &self.feeds { + let mut history_data: &PrevPost = &PrevPost { + id: feed.id, + last_volume_slug: None, + last_volume_time: None, + last_part_slug: None, + last_part_time: None, }; - - let prev_data = (prev_post_idx, feed.id, data.title); - - post_queue.push((new_post, prev_data)); + for post in post_history { + if post.id == feed.id { + history_data = post; + break; + } } - sleep(time::Duration::from_millis(100)).await; // Should prevent dos-ing J-Novel servers - i += 1; + + // Check for Volume Release + let res = CLIENT + .get(api_url!() + "series/" + &feed.series_slug + "/volumes?format=json") + .send() + .await? + .text() + .await?; + + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] + struct FeedVolumeReturn { + volumes: Vec, + } + let mut data: FeedVolumeReturn = serde_json::from_str(&res).unwrap(); + + let current_volume_slug = data.volumes[&data.volumes.len() - 1].slug.clone(); + + let now = Utc::now(); + data.volumes.reverse(); + for volume in data.volumes { + let published = DateTime::parse_from_rfc3339(&volume.publishing).unwrap(); + // Get First Volume that has valid Release Data + if now >= published { + if Some(volume.slug.clone()) != history_data.last_volume_slug { + println!( + "Should Post for {} Volume {}", + feed.series_slug, volume.slug + ); + if let Some(volume_community) = &feed.communities.volume { + let mut post_url = Url::parse(&(volume_url_base!() + &feed.series_slug))?; + post_url.set_fragment(Some(&("volume-".to_string() + &volume.number.to_string()))); + + let new_post = CreatePost { + name: volume.title.clone(), + community_id: community_ids.find(&volume_community), + url: Some(post_url), + body: Some( + "[Reddit](https://reddit.com/r/HonzukinoGekokujou)\n\n\ + [Discord](https://discord.com/invite/fGefmzu)\n\n\ + [#ascendanceofabookworm](https://mastodon.social/tags/ascendanceofabookworm)\n\n\ + [#honzuki](https://mastodon.social/tags/honzuki)\n\n\ + [#本好きの下剋上](https://mastodon.social/tags/%E6%9C%AC%E5%A5%BD%E3%81%8D%E3%81%AE%E4%B8%8B%E5%89%8B%E4%B8%8A)" + .into(), + ), + honeypot: None, + nsfw: Some(false), + language_id: Some(LanguageId(37)), // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future + auth: auth.clone(), + }; + println!("{:?}", new_post.url); + post_queue.push(( + new_post, + PostQueueMetadata { + series: feed.series_slug.clone(), + part: None, + volume: Some(volume.slug), + }, + )); + } + } + break; + } + } + + if let Some(chapter_community) = &feed.communities.chapter { + // Check for Part Release + let res = CLIENT + .get(api_url!() + "volumes/" + ¤t_volume_slug + "/parts?format=json") + .send() + .await? + .text() + .await?; + + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] + struct FeedChapterReturn { + parts: Vec, + } + let mut data: FeedChapterReturn = serde_json::from_str(&res).unwrap(); + + data.parts.reverse(); + for part in data.parts { + if Some(part.slug.clone()) != history_data.last_part_slug { + println!("Should Post for {} Part {}", feed.series_slug, part.slug); + + let new_post = CreatePost { + name: part.title.clone(), + community_id: community_ids.find(&chapter_community), + url: Some(Url::parse(&(chapter_url!() + &part.slug)).unwrap()), + body: Some( + "[Reddit](https://reddit.com/r/HonzukinoGekokujou)\n\n\ + [Discord](https://discord.com/invite/fGefmzu)\n\n\ + [#ascendanceofabookworm](https://mastodon.social/tags/ascendanceofabookworm)\n\n\ + [#honzuki](https://mastodon.social/tags/honzuki)\n\n\ + [#本好きの下剋上](https://mastodon.social/tags/%E6%9C%AC%E5%A5%BD%E3%81%8D%E3%81%AE%E4%B8%8B%E5%89%8B%E4%B8%8A)" + .into(), + ), + honeypot: None, + nsfw: Some(false), + language_id: Some(LanguageId(37)), // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future + auth: auth.clone(), + }; + post_queue.push(( + new_post, + PostQueueMetadata { + series: feed.series_slug.clone(), + part: Some(part.slug), + volume: None, + }, + )); + } + break; + } + } + sleep(Duration::milliseconds(100).to_std().unwrap()).await; // Should prevent dos-ing J-Novel servers } return Ok(post_queue); @@ -174,18 +312,7 @@ pub_struct!(RedditConfig { subreddit: String, }); -pub_struct!(FeedSetting { - id: usize, - feed_url: String, - communities: FeedCommunities, - reddit: FeedRedditSettings, -}); - -pub_struct!(FeedCommunities { - chapter: LemmyCommunities, - volume: LemmyCommunities, -}); - +//noinspection ALL #[derive(Serialize, Deserialize, Clone, PartialEq, strum_macros::Display)] #[allow(non_camel_case_types)] pub(crate) enum LemmyCommunities { @@ -196,22 +323,18 @@ pub(crate) enum LemmyCommunities { metadiscussions, } -pub_struct!(FeedRedditSettings { - enabled: bool, - flair: String, -}); - // Posts structs pub_struct!(PrevPost { id: usize, - post_id: PostId, - title: String, - last_post_url: String, + last_volume_slug: Option, + last_volume_time: Option, + last_part_slug: Option, + last_part_time: Option, }); impl PrevPost { pub(crate) fn load() -> Vec { - let history; + let mut history; if Path::new("posts.json").exists() { let file_contents = match fs::read_to_string("posts.json") { @@ -233,6 +356,8 @@ impl PrevPost { history = [].to_vec() } + history.sort_by(|a, b| a.id.cmp(&b.id)); + return history; } @@ -250,27 +375,6 @@ impl PrevPost { } } -// RSS Feed Structs -pub_struct!(FeedData { - version: String, - title: String, - home_page_url: String, - description: String, - author: FeedAuthor, - items: Vec, -}); - -pub_struct!(FeedAuthor { name: String, }); - -pub_struct!(FeedEntry { - id: String, - url: String, - title: String, - summary: String, - image: Option, - date_published: String, -}); - // Bot Helper Structs pub_struct!(CommunitiesVector { ids: Vec<(CommunityId, String)>, @@ -282,11 +386,7 @@ impl CommunitiesVector { } #[warn(unused_results)] - pub(crate) async fn load( - &mut self, - auth: &Sensitive, - base: &String, - ) -> Result<(), Box> { + pub(crate) async fn load(&mut self, auth: &Sensitive, base: &String) -> Result<(), Box> { let params = ListCommunities { auth: Some(auth.clone()), type_: Some(ListingType::Local), diff --git a/src/feeds/mod.rs b/src/feeds/mod.rs new file mode 100644 index 0000000..713c98d --- /dev/null +++ b/src/feeds/mod.rs @@ -0,0 +1,85 @@ +use serde_derive::{Deserialize, Serialize}; + +use crate::config::LemmyCommunities; + +macro_rules! api_url { + () => { + "https://labs.j-novel.club/app/v1/".to_string() + }; +} + +#[derive(Serialize, Deserialize, Clone, PartialEq)] +pub(crate) struct FeedSetting { + pub(crate) id: usize, + pub(crate) series_slug: String, + pub(crate) communities: FeedCommunities, + pub(crate) reddit: Option, +} + +#[derive(Serialize, Deserialize, Clone, PartialEq)] +pub(crate) struct FeedCommunities { + pub(crate) chapter: Option, + pub(crate) volume: Option, +} + +#[derive(Serialize, Deserialize, Clone, PartialEq)] +pub(crate) struct FeedRedditSettings { + pub(crate) enabled: bool, + pub(crate) flair: String, +} + +// RSS Feed Structs +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +pub(crate) struct FeedData { + pub(crate) version: String, + pub(crate) title: String, + pub(crate) home_page_url: String, + pub(crate) description: String, + pub(crate) author: FeedAuthor, + pub(crate) items: Vec, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +pub(crate) struct FeedSeriesData { + pub(crate) title: String, + pub(crate) slug: String, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +pub(crate) struct FeedVolumeData { + pub(crate) title: String, + pub(crate) slug: String, + pub(crate) number: u8, + pub(crate) publishing: String, + pub(crate) cover: FeedCoverData, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +pub(crate) struct FeedChapterData { + pub(crate) title: String, + pub(crate) slug: String, + pub(crate) cover: FeedCoverData, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +pub(crate) struct FeedCoverData { + #[serde(alias = "coverUrl")] + pub(crate) cover: String, + #[serde(alias = "thumbnailUrl")] + pub(crate) thumbnail: String, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +pub(crate) struct FeedAuthor { + pub(crate) name: String, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +pub(crate) struct FeedEntry { + pub(crate) id: String, + pub(crate) url: String, + pub(crate) title: String, + pub(crate) summary: String, + pub(crate) image: Option, + pub(crate) date_published: String, +}