309 lines
8.9 KiB
Rust
309 lines
8.9 KiB
Rust
use std::{fs::{self, OpenOptions}, path::Path, io::Write, thread::sleep, time};
|
|
|
|
use lemmy_api_common::{sensitive::Sensitive, post::CreatePost, community::{self, ListCommunities, ListCommunitiesResponse}};
|
|
use lemmy_db_schema::{newtypes::{LanguageId, CommunityId}, ListingType};
|
|
use serde_derive::{Deserialize, Serialize};
|
|
use url::Url;
|
|
|
|
use crate::{CLIENT};
|
|
|
|
macro_rules! pub_struct {
|
|
($name:ident {$($field:ident: $t:ty,)*}) => {
|
|
#[derive(Serialize, Deserialize, Clone, PartialEq)]
|
|
pub(crate) struct $name {
|
|
$(pub(crate) $field: $t), *
|
|
}
|
|
}
|
|
}
|
|
|
|
// Secrets structs
|
|
pub_struct!(Secrets {
|
|
lemmy: LemmyLogin,
|
|
reddit: RedditLogin,
|
|
});
|
|
|
|
impl Secrets {
|
|
pub(crate) fn init() -> Secrets {
|
|
let file_contents = match fs::read_to_string("secrets.json") {
|
|
Ok(data) => data,
|
|
Err(e) => panic!("ERROR: secrets.json could not be read:\n\n{:#?}", e),
|
|
};
|
|
let config_parse: Secrets = match serde_json::from_str(&file_contents) {
|
|
Ok(data) => data,
|
|
Err(e) => panic!("ERROR: secrets.json could not be parsed:\n\n{:#?}", e),
|
|
};
|
|
|
|
return config_parse;
|
|
}
|
|
}
|
|
|
|
#[derive(Serialize, Deserialize, Clone, PartialEq)]
|
|
pub(crate) struct LemmyLogin {
|
|
pub(crate) username: String,
|
|
password: String
|
|
}
|
|
|
|
impl LemmyLogin {
|
|
pub(crate) fn get_username(&self) -> Sensitive<String> {
|
|
return Sensitive::new(self.username.clone());
|
|
}
|
|
|
|
pub(crate) fn get_password(&self) -> Sensitive<String> {
|
|
return Sensitive::new(self.password.clone())
|
|
}
|
|
}
|
|
|
|
#[derive(Serialize, Deserialize, Clone, PartialEq)]
|
|
pub(crate) struct RedditLogin {
|
|
pub(crate) app_id: String,
|
|
app_secret: String,
|
|
refresh_token: String,
|
|
redirect_uri: String,
|
|
}
|
|
|
|
// Config structs
|
|
pub_struct!(Config {
|
|
instance: String,
|
|
reddit_config: RedditConfig,
|
|
feeds: Vec<FeedSetting>,
|
|
});
|
|
|
|
impl Config {
|
|
pub(crate) fn init() -> Config {
|
|
let file_contents = match fs::read_to_string("config.json") {
|
|
Ok(data) => data,
|
|
Err(e) => panic!("ERROR: config.json could not be read:\n\n{:#?}", e),
|
|
};
|
|
let config_parse: Config = match serde_json::from_str(&file_contents) {
|
|
Ok(data) => data,
|
|
Err(e) => panic!("ERROR: config.json could not be parsed:\n\n{:#?}", e),
|
|
};
|
|
|
|
return config_parse;
|
|
}
|
|
|
|
pub(crate) fn load(&mut self) {
|
|
let file_contents = match fs::read_to_string("config.json") {
|
|
Ok(data) => data,
|
|
Err(e) => panic!("ERROR: config.json could not be read:\n\n{:#?}", e),
|
|
};
|
|
let config_parse: Config = match serde_json::from_str(&file_contents) {
|
|
Ok(data) => data,
|
|
Err(e) => panic!("ERROR: config.json could not be parsed:\n\n{:#?}", e),
|
|
};
|
|
|
|
self.feeds = config_parse.feeds;
|
|
self.instance = config_parse.instance;
|
|
self.reddit_config = config_parse.reddit_config;
|
|
}
|
|
|
|
pub(crate) fn check_feeds(&mut self, post_history: &mut Vec<PrevPost>
|
|
, community_ids: &CommunitiesVector, auth: &Sensitive<String>) -> Vec<CreatePost> {
|
|
let mut post_queue: Vec<CreatePost> = vec![];
|
|
|
|
self.feeds.iter().for_each(|feed| {
|
|
let res = CLIENT
|
|
.get(feed.feed_url.clone())
|
|
.send()
|
|
.unwrap()
|
|
.text()
|
|
.unwrap();
|
|
let data: FeedData = serde_json::from_str(&res).unwrap();
|
|
|
|
let mut prev_post_idx: Option<usize> = None;
|
|
let mut do_post = true;
|
|
post_history
|
|
.iter()
|
|
.enumerate()
|
|
.for_each(|(idx, post)| {
|
|
if &post.last_post_url == &data.items[0].url {
|
|
do_post = false;
|
|
} else if &post.title == &data.title {
|
|
prev_post_idx = Some(idx);
|
|
}
|
|
});
|
|
|
|
if do_post {
|
|
let item = &data.items[0];
|
|
let new_post = CreatePost {
|
|
name: item.title.clone(),
|
|
community_id: community_ids.find(&feed.communities.chapter),
|
|
url: Some(Url::parse(&item.url).unwrap()),
|
|
body: Some(
|
|
"[Reddit](https://reddit.com)\n\n[Discord](https://discord.com)".into(),
|
|
),
|
|
honeypot: None,
|
|
nsfw: Some(false),
|
|
language_id: Some(LanguageId(37)), // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future
|
|
auth: auth.clone(),
|
|
};
|
|
post_queue.push(new_post);
|
|
match prev_post_idx {
|
|
Some(idx) => {
|
|
post_history[idx].title = data.title;
|
|
post_history[idx].last_post_url = item.url.clone();
|
|
}
|
|
None => post_history.push(PrevPost {
|
|
id: feed.id,
|
|
title: data.title,
|
|
last_post_url: item.url.clone(),
|
|
}),
|
|
}
|
|
}
|
|
sleep(time::Duration::from_millis(100)); // Should prevent dos-ing J-Novel servers
|
|
});
|
|
|
|
PrevPost::save(&post_history);
|
|
return post_queue;
|
|
}
|
|
}
|
|
|
|
pub_struct!(RedditConfig {
|
|
user_agent: String,
|
|
subreddit: String,
|
|
});
|
|
|
|
pub_struct!(FeedSetting {
|
|
id: usize,
|
|
feed_url: String,
|
|
communities: FeedCommunities,
|
|
reddit: FeedRedditSettings,
|
|
});
|
|
|
|
pub_struct!(FeedCommunities {
|
|
chapter: LemmyCommunities,
|
|
volume: LemmyCommunities,
|
|
});
|
|
|
|
#[derive(Serialize, Deserialize, Clone, PartialEq, strum_macros::Display)]
|
|
#[allow(non_camel_case_types)]
|
|
pub(crate) enum LemmyCommunities {
|
|
aobwebnovel,
|
|
aobprepub,
|
|
aoblightnovel,
|
|
aobmanga,
|
|
aobanime
|
|
}
|
|
|
|
pub_struct!(FeedRedditSettings {
|
|
enabled: bool,
|
|
flair: String,
|
|
});
|
|
|
|
// Posts structs
|
|
pub_struct!(PrevPost {
|
|
id: usize,
|
|
title: String,
|
|
last_post_url: String,
|
|
});
|
|
|
|
impl PrevPost {
|
|
pub(crate) fn load() -> Vec<PrevPost> {
|
|
let history;
|
|
|
|
if Path::new("posts.json").exists() {
|
|
let file_contents = match fs::read_to_string("posts.json") {
|
|
Ok(data) => data,
|
|
Err(e) => panic!("ERROR: secrets.json could not be read:\n\n{:#?}", e),
|
|
};
|
|
|
|
if file_contents.len() > 0 {
|
|
let history_parse: Vec<PrevPost> = match serde_json::from_str(&file_contents) {
|
|
Ok(data) => data,
|
|
Err(e) => panic!("ERROR: secrets.json could not be parsed:\n\n{:#?}", e),
|
|
};
|
|
history = history_parse;
|
|
}
|
|
else {
|
|
history = [].to_vec()
|
|
}
|
|
}
|
|
else {
|
|
let _ = fs::File::create("posts.json");
|
|
history = [].to_vec()
|
|
}
|
|
|
|
return history;
|
|
}
|
|
|
|
pub(crate) fn save(data: &Vec<PrevPost>) {
|
|
let mut file = OpenOptions::new().read(true).write(true).create(true).open("posts.json").unwrap();
|
|
|
|
let json_data = serde_json::to_string_pretty(&data).unwrap();
|
|
|
|
write!(&mut file, "{}", json_data).unwrap();
|
|
}
|
|
}
|
|
|
|
// RSS Feed Structs
|
|
pub_struct!(FeedData {
|
|
version: String,
|
|
title: String,
|
|
home_page_url: String,
|
|
description: String,
|
|
author: FeedAuthor,
|
|
items: Vec<FeedEntry>,
|
|
});
|
|
|
|
pub_struct!(FeedAuthor {
|
|
name: String,
|
|
});
|
|
|
|
pub_struct!(FeedEntry {
|
|
id: String,
|
|
url: String,
|
|
title: String,
|
|
summary: String,
|
|
image: Option<String>,
|
|
date_published: String,
|
|
});
|
|
|
|
// Bot Helper Structs
|
|
pub_struct!(CommunitiesVector {
|
|
ids: Vec<(CommunityId, String)>,
|
|
});
|
|
|
|
impl CommunitiesVector {
|
|
pub(crate) fn new() -> CommunitiesVector {
|
|
CommunitiesVector{ids: vec![]}
|
|
}
|
|
|
|
pub(crate) fn load(&mut self, auth: &Sensitive<String>, base: &String) {
|
|
let params = ListCommunities {
|
|
auth: Some(auth.clone()),
|
|
type_: Some(ListingType::Local),
|
|
..Default::default()
|
|
};
|
|
|
|
let res = CLIENT
|
|
.get(base.clone() + "/api/v3/community/list")
|
|
.query(¶ms)
|
|
.send()
|
|
.unwrap()
|
|
.text()
|
|
.unwrap();
|
|
|
|
let site_data: ListCommunitiesResponse = serde_json::from_str(&res).unwrap();
|
|
|
|
let mut ids = [].to_vec();
|
|
|
|
site_data.communities.iter().for_each(|entry| {
|
|
let new_id = (entry.community.id, entry.community.name.clone());
|
|
ids.push(new_id);
|
|
});
|
|
|
|
self.ids = ids;
|
|
}
|
|
|
|
pub(crate) fn find(&self, name: &LemmyCommunities) -> CommunityId {
|
|
let mut ret_id = CommunityId(0);
|
|
|
|
self.ids.iter().for_each(|id| {
|
|
let id_name = &id.1;
|
|
if &name.to_string() == id_name {
|
|
ret_id = id.0;
|
|
}
|
|
});
|
|
return ret_id;
|
|
}
|
|
}
|