Split Config into Config and Feeds Modules

This commit is contained in:
Neshura 2023-09-18 21:00:55 +02:00
parent 1a2a489e48
commit 594d3e08fe
Signed by: Neshura
GPG key ID: B6983AAA6B9A7A6C
2 changed files with 284 additions and 99 deletions

View file

@ -6,20 +6,42 @@ use std::{
time, time,
}; };
use chrono::{DateTime, Duration, Utc};
use lemmy_api_common::{ use lemmy_api_common::{
community::{ListCommunities, ListCommunitiesResponse}, community::{ListCommunities, ListCommunitiesResponse},
post::CreatePost, post::CreatePost,
sensitive::Sensitive, sensitive::Sensitive,
}; };
use lemmy_db_schema::{ use lemmy_db_schema::{
newtypes::{CommunityId, LanguageId, PostId}, newtypes::{CommunityId, LanguageId},
ListingType, ListingType,
}; };
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
use tokio::time::sleep; use tokio::time::sleep;
use url::Url; use url::Url;
use crate::CLIENT; use crate::{
feeds::{FeedChapterData, FeedSeriesData, FeedSetting, FeedVolumeData},
PostQueueMetadata, CLIENT,
};
macro_rules! api_url {
() => {
"https://labs.j-novel.club/app/v1/".to_string()
};
}
macro_rules! chapter_url {
() => {
"https://j-novel.club/read/".to_string()
};
}
macro_rules! volume_url_base {
() => {
"https://j-novel.club/series/".to_string()
};
}
macro_rules! pub_struct { macro_rules! pub_struct {
($name:ident {$($field:ident: $t:ty,)*}) => { ($name:ident {$($field:ident: $t:ty,)*}) => {
@ -49,6 +71,19 @@ impl Secrets {
return config_parse; return config_parse;
} }
pub(crate) fn load(&mut self) {
let file_contents = match fs::read_to_string("secrets.json") {
Ok(data) => data,
Err(e) => panic!("ERROR: secrets.json could not be read:\n\n{:#?}", e),
};
let config_parse: Secrets = match serde_json::from_str(&file_contents) {
Ok(data) => data,
Err(e) => panic!("ERROR: secrets.json could not be parsed:\n\n{:#?}", e),
};
*self = config_parse;
}
} }
#[derive(Serialize, Deserialize, Clone, PartialEq)] #[derive(Serialize, Deserialize, Clone, PartialEq)]
@ -76,11 +111,14 @@ pub(crate) struct RedditLogin {
} }
// Config structs // Config structs
pub_struct!(Config { #[derive(Serialize, Deserialize, Clone, PartialEq)]
instance: String, pub(crate) struct Config {
reddit_config: RedditConfig, pub(crate) instance: String,
feeds: Vec<FeedSetting>, pub(crate) reddit_config: RedditConfig,
}); pub(crate) feeds: Vec<FeedSetting>,
pub(crate) uptime_kuma: Option<String>,
pub(crate) config_reload: Option<usize>,
}
impl Config { impl Config {
pub(crate) fn init() -> Config { pub(crate) fn init() -> Config {
@ -106,63 +144,163 @@ impl Config {
Err(e) => panic!("ERROR: config.json could not be parsed:\n\n{:#?}", e), Err(e) => panic!("ERROR: config.json could not be parsed:\n\n{:#?}", e),
}; };
self.feeds = config_parse.feeds; *self = config_parse;
self.instance = config_parse.instance;
self.reddit_config = config_parse.reddit_config;
} }
pub(crate) async fn check_feeds( #[warn(unused_results)]
&mut self, pub(crate) async fn fetch_infos(&self) -> Result<(), Box<dyn Error>> {
post_history: &mut Vec<PrevPost>, // Get Series Data
community_ids: &CommunitiesVector, for feed in &self.feeds {
auth: &Sensitive<String>,
) -> Result<Vec<(CreatePost, (Option<usize>, usize, String))>, Box<dyn Error>> {
let mut post_queue: Vec<(CreatePost, (Option<usize>, usize, String))> = vec![];
let mut i = 0;
while i < self.feeds.len() {
let feed = &self.feeds[i];
let res = CLIENT let res = CLIENT
.get(feed.feed_url.clone()) .get(api_url!() + "series/" + &feed.series_slug + "?format=json")
.send() .send()
.await? .await?
.text() .text()
.await?; .await?;
let data: FeedData = serde_json::from_str(&res).unwrap(); let data: FeedSeriesData = serde_json::from_str(&res).unwrap();
println!("{:#?}", data);
}
return Ok(());
}
let mut prev_post_idx: Option<usize> = None; #[warn(unused_results)]
let mut do_post = true; pub(crate) async fn check_feeds(
post_history.iter().enumerate().for_each(|(idx, post)| { &self,
if &post.last_post_url == &data.items[0].url { post_history: &Vec<PrevPost>,
do_post = false; community_ids: &CommunitiesVector,
} else if &post.title == &data.title { auth: &Sensitive<String>,
prev_post_idx = Some(idx); ) -> Result<Vec<(CreatePost, PostQueueMetadata)>, Box<dyn Error>> {
} let mut post_queue: Vec<(CreatePost, PostQueueMetadata)> = vec![];
});
if do_post { for feed in &self.feeds {
let item = &data.items[0]; let mut history_data: &PrevPost = &PrevPost {
let new_post = CreatePost { id: feed.id,
name: item.title.clone(), last_volume_slug: None,
community_id: community_ids.find(&feed.communities.chapter), last_volume_time: None,
url: Some(Url::parse(&item.url).unwrap()), last_part_slug: None,
body: Some( last_part_time: None,
"[Reddit](https://reddit.com/r/HonzukinoGekokujou)\n\n[Discord](https://discord.com/invite/fGefmzu)".into(),
),
honeypot: None,
nsfw: Some(false),
language_id: Some(LanguageId(37)), // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future
auth: auth.clone(),
}; };
for post in post_history {
let prev_data = (prev_post_idx, feed.id, data.title); if post.id == feed.id {
history_data = post;
post_queue.push((new_post, prev_data)); break;
}
} }
sleep(time::Duration::from_millis(100)).await; // Should prevent dos-ing J-Novel servers
i += 1; // Check for Volume Release
let res = CLIENT
.get(api_url!() + "series/" + &feed.series_slug + "/volumes?format=json")
.send()
.await?
.text()
.await?;
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
struct FeedVolumeReturn {
volumes: Vec<FeedVolumeData>,
}
let mut data: FeedVolumeReturn = serde_json::from_str(&res).unwrap();
let current_volume_slug = data.volumes[&data.volumes.len() - 1].slug.clone();
let now = Utc::now();
data.volumes.reverse();
for volume in data.volumes {
let published = DateTime::parse_from_rfc3339(&volume.publishing).unwrap();
// Get First Volume that has valid Release Data
if now >= published {
if Some(volume.slug.clone()) != history_data.last_volume_slug {
println!(
"Should Post for {} Volume {}",
feed.series_slug, volume.slug
);
if let Some(volume_community) = &feed.communities.volume {
let mut post_url = Url::parse(&(volume_url_base!() + &feed.series_slug))?;
post_url.set_fragment(Some(&("volume-".to_string() + &volume.number.to_string())));
let new_post = CreatePost {
name: volume.title.clone(),
community_id: community_ids.find(&volume_community),
url: Some(post_url),
body: Some(
"[Reddit](https://reddit.com/r/HonzukinoGekokujou)\n\n\
[Discord](https://discord.com/invite/fGefmzu)\n\n\
[#ascendanceofabookworm](https://mastodon.social/tags/ascendanceofabookworm)\n\n\
[#honzuki](https://mastodon.social/tags/honzuki)\n\n\
[#](https://mastodon.social/tags/%E6%9C%AC%E5%A5%BD%E3%81%8D%E3%81%AE%E4%B8%8B%E5%89%8B%E4%B8%8A)"
.into(),
),
honeypot: None,
nsfw: Some(false),
language_id: Some(LanguageId(37)), // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future
auth: auth.clone(),
};
println!("{:?}", new_post.url);
post_queue.push((
new_post,
PostQueueMetadata {
series: feed.series_slug.clone(),
part: None,
volume: Some(volume.slug),
},
));
}
}
break;
}
}
if let Some(chapter_community) = &feed.communities.chapter {
// Check for Part Release
let res = CLIENT
.get(api_url!() + "volumes/" + &current_volume_slug + "/parts?format=json")
.send()
.await?
.text()
.await?;
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
struct FeedChapterReturn {
parts: Vec<FeedChapterData>,
}
let mut data: FeedChapterReturn = serde_json::from_str(&res).unwrap();
data.parts.reverse();
for part in data.parts {
if Some(part.slug.clone()) != history_data.last_part_slug {
println!("Should Post for {} Part {}", feed.series_slug, part.slug);
let new_post = CreatePost {
name: part.title.clone(),
community_id: community_ids.find(&chapter_community),
url: Some(Url::parse(&(chapter_url!() + &part.slug)).unwrap()),
body: Some(
"[Reddit](https://reddit.com/r/HonzukinoGekokujou)\n\n\
[Discord](https://discord.com/invite/fGefmzu)\n\n\
[#ascendanceofabookworm](https://mastodon.social/tags/ascendanceofabookworm)\n\n\
[#honzuki](https://mastodon.social/tags/honzuki)\n\n\
[#](https://mastodon.social/tags/%E6%9C%AC%E5%A5%BD%E3%81%8D%E3%81%AE%E4%B8%8B%E5%89%8B%E4%B8%8A)"
.into(),
),
honeypot: None,
nsfw: Some(false),
language_id: Some(LanguageId(37)), // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future
auth: auth.clone(),
};
post_queue.push((
new_post,
PostQueueMetadata {
series: feed.series_slug.clone(),
part: Some(part.slug),
volume: None,
},
));
}
break;
}
}
sleep(Duration::milliseconds(100).to_std().unwrap()).await; // Should prevent dos-ing J-Novel servers
} }
return Ok(post_queue); return Ok(post_queue);
@ -174,18 +312,7 @@ pub_struct!(RedditConfig {
subreddit: String, subreddit: String,
}); });
pub_struct!(FeedSetting { //noinspection ALL
id: usize,
feed_url: String,
communities: FeedCommunities,
reddit: FeedRedditSettings,
});
pub_struct!(FeedCommunities {
chapter: LemmyCommunities,
volume: LemmyCommunities,
});
#[derive(Serialize, Deserialize, Clone, PartialEq, strum_macros::Display)] #[derive(Serialize, Deserialize, Clone, PartialEq, strum_macros::Display)]
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
pub(crate) enum LemmyCommunities { pub(crate) enum LemmyCommunities {
@ -196,22 +323,18 @@ pub(crate) enum LemmyCommunities {
metadiscussions, metadiscussions,
} }
pub_struct!(FeedRedditSettings {
enabled: bool,
flair: String,
});
// Posts structs // Posts structs
pub_struct!(PrevPost { pub_struct!(PrevPost {
id: usize, id: usize,
post_id: PostId, last_volume_slug: Option<String>,
title: String, last_volume_time: Option<String>,
last_post_url: String, last_part_slug: Option<String>,
last_part_time: Option<String>,
}); });
impl PrevPost { impl PrevPost {
pub(crate) fn load() -> Vec<PrevPost> { pub(crate) fn load() -> Vec<PrevPost> {
let history; let mut history;
if Path::new("posts.json").exists() { if Path::new("posts.json").exists() {
let file_contents = match fs::read_to_string("posts.json") { let file_contents = match fs::read_to_string("posts.json") {
@ -233,6 +356,8 @@ impl PrevPost {
history = [].to_vec() history = [].to_vec()
} }
history.sort_by(|a, b| a.id.cmp(&b.id));
return history; return history;
} }
@ -250,27 +375,6 @@ impl PrevPost {
} }
} }
// RSS Feed Structs
pub_struct!(FeedData {
version: String,
title: String,
home_page_url: String,
description: String,
author: FeedAuthor,
items: Vec<FeedEntry>,
});
pub_struct!(FeedAuthor { name: String, });
pub_struct!(FeedEntry {
id: String,
url: String,
title: String,
summary: String,
image: Option<String>,
date_published: String,
});
// Bot Helper Structs // Bot Helper Structs
pub_struct!(CommunitiesVector { pub_struct!(CommunitiesVector {
ids: Vec<(CommunityId, String)>, ids: Vec<(CommunityId, String)>,
@ -282,11 +386,7 @@ impl CommunitiesVector {
} }
#[warn(unused_results)] #[warn(unused_results)]
pub(crate) async fn load( pub(crate) async fn load(&mut self, auth: &Sensitive<String>, base: &String) -> Result<(), Box<dyn Error>> {
&mut self,
auth: &Sensitive<String>,
base: &String,
) -> Result<(), Box<dyn Error>> {
let params = ListCommunities { let params = ListCommunities {
auth: Some(auth.clone()), auth: Some(auth.clone()),
type_: Some(ListingType::Local), type_: Some(ListingType::Local),

85
src/feeds/mod.rs Normal file
View file

@ -0,0 +1,85 @@
use serde_derive::{Deserialize, Serialize};
use crate::config::LemmyCommunities;
macro_rules! api_url {
() => {
"https://labs.j-novel.club/app/v1/".to_string()
};
}
#[derive(Serialize, Deserialize, Clone, PartialEq)]
pub(crate) struct FeedSetting {
pub(crate) id: usize,
pub(crate) series_slug: String,
pub(crate) communities: FeedCommunities,
pub(crate) reddit: Option<FeedRedditSettings>,
}
#[derive(Serialize, Deserialize, Clone, PartialEq)]
pub(crate) struct FeedCommunities {
pub(crate) chapter: Option<LemmyCommunities>,
pub(crate) volume: Option<LemmyCommunities>,
}
#[derive(Serialize, Deserialize, Clone, PartialEq)]
pub(crate) struct FeedRedditSettings {
pub(crate) enabled: bool,
pub(crate) flair: String,
}
// RSS Feed Structs
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub(crate) struct FeedData {
pub(crate) version: String,
pub(crate) title: String,
pub(crate) home_page_url: String,
pub(crate) description: String,
pub(crate) author: FeedAuthor,
pub(crate) items: Vec<FeedEntry>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub(crate) struct FeedSeriesData {
pub(crate) title: String,
pub(crate) slug: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub(crate) struct FeedVolumeData {
pub(crate) title: String,
pub(crate) slug: String,
pub(crate) number: u8,
pub(crate) publishing: String,
pub(crate) cover: FeedCoverData,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub(crate) struct FeedChapterData {
pub(crate) title: String,
pub(crate) slug: String,
pub(crate) cover: FeedCoverData,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub(crate) struct FeedCoverData {
#[serde(alias = "coverUrl")]
pub(crate) cover: String,
#[serde(alias = "thumbnailUrl")]
pub(crate) thumbnail: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub(crate) struct FeedAuthor {
pub(crate) name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub(crate) struct FeedEntry {
pub(crate) id: String,
pub(crate) url: String,
pub(crate) title: String,
pub(crate) summary: String,
pub(crate) image: Option<String>,
pub(crate) date_published: String,
}