Remove unnecessary thread structure

This commit is contained in:
Neshura 2024-05-06 20:53:38 +02:00
parent 36b59240d9
commit 962d90fe1d
Signed by: Neshura
GPG key ID: B6983AAA6B9A7A6C
2 changed files with 92 additions and 208 deletions

View file

@ -1,15 +1,12 @@
use crate::config::{Config, PostBody, SeriesConfig}; use crate::{config::{Config, PostBody, SeriesConfig}, fetchers::{jnovel}, lemmy};
use crate::fetchers::jnovel::JPostInfo; use crate::fetchers::jnovel::JPostInfo;
use crate::lemmy::{Lemmy, PostInfo}; use crate::lemmy::{Lemmy, PostInfo};
use crate::post_history::SeriesHistory; use crate::post_history::SeriesHistory;
use crate::{fetchers::{jnovel}, lemmy, write_error, write_info, write_warn, SharedData}; use chrono::{DateTime, Duration, Timelike, Utc};
use chrono::{DateTime, Duration, Utc};
use lemmy_api_common::post::CreatePost; use lemmy_api_common::post::CreatePost;
use lemmy_db_schema::newtypes::{CommunityId, LanguageId}; use lemmy_db_schema::newtypes::{CommunityId, LanguageId};
use lemmy_db_schema::PostFeatureType; use lemmy_db_schema::PostFeatureType;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use tokio::time::sleep; use tokio::time::sleep;
use crate::fetchers::Fetcher; use crate::fetchers::Fetcher;
use systemd_journal_logger::connected_to_journal; use systemd_journal_logger::connected_to_journal;
@ -40,22 +37,18 @@ macro_rules! error {
} }
}; };
} }
pub(crate) async fn run() {
let mut last_reload: DateTime<Utc>; let mut last_reload: DateTime<Utc>;
let mut lemmy: Lemmy; let mut lemmy: Lemmy;
let mut login_error: bool; let mut login_error: bool;
let mut communities; let mut communities: HashMap<String, CommunityId>;
{ let mut post_history: SeriesHistory;
let mut write = data.write().await; let mut start: DateTime<Utc>;
let mut config: Config = Config::load();
// Errors during bot init are likely unrecoverable and therefore should panic the bot
// Does not really matter since the bot will get restarted anyway but this way the uptime url logs a downtime
write.config = Config::load();
last_reload = Utc::now(); last_reload = Utc::now();
}
{ lemmy = match lemmy::login(&config).await {
let read = data.read().await;
lemmy = match lemmy::login(&read.config).await {
Ok(data) => data, Ok(data) => data,
Err(_) => panic!(), Err(_) => panic!(),
}; };
@ -65,32 +58,24 @@ macro_rules! error {
Ok(data) => data, Ok(data) => data,
Err(_) => panic!(), Err(_) => panic!(),
}; };
}
{ start = Utc::now();
let mut write = data.write().await;
write.start = Utc::now();
}
let info_msg = "Bot init successful, starting normal operations".to_owned(); let info_msg = "Bot init successful, starting normal operations".to_owned();
info!(info_msg); info!(info_msg);
loop { loop {
idle(&data).await; idle(&start, &config).await;
start = Utc::now();
{ // replace with watcher
let mut write = data.write().await; if start - last_reload >= Duration::seconds(config.config_reload_seconds as i64) {
config = Config::load();
write.start = Utc::now();
let message = "Config reloaded".to_owned(); let message = "Config reloaded".to_owned();
info!(message); info!(message);
write.config = Config::load();
let message = "Config reloaded".to_owned();
write_info(message);
}
} }
{ if login_error {
let info_msg = "Login invalid, refreshing session"; let info_msg = "Login invalid, refreshing session";
info!(info_msg); info!(info_msg);
lemmy = match lemmy::login(&config).await { lemmy = match lemmy::login(&config).await {
@ -99,35 +84,25 @@ macro_rules! error {
}; };
login_error = false; login_error = false;
} }
}
{ if start - last_reload >= Duration::seconds(config.config_reload_seconds as i64) {
let read = data.read().await;
if read.start - last_reload >= Duration::seconds(read.config.config_reload_seconds as i64) {
communities = match lemmy.get_communities().await { communities = match lemmy.get_communities().await {
Ok(data) => data, Ok(data) => data,
Err(_) => { Err(_) => {
login_error = true; login_error = true;
continue; continue;
}
};
let message = "Communities reloaded".to_owned(); let message = "Communities reloaded".to_owned();
info!(message); info!(message);
last_reload = Utc::now(); last_reload = Utc::now();
write_info(message);
last_reload = Utc::now();
}
} }
{ post_history = SeriesHistory::load_history();
let mut write = data.write().await;
write.post_history = SeriesHistory::load_history();
}
{ let series = config.series.clone();
let read = data.read().await;
let series = read.config.series.clone();
drop(read);
for series in series { for series in series {
if handle_series(&series, &communities, &lemmy, &data) if handle_series(&series, &communities, &lemmy, &config, &mut post_history)
.await .await
.is_err() .is_err()
{ {
@ -135,20 +110,18 @@ macro_rules! error {
continue; continue;
}; };
} }
}
idle(&data).await; idle(&start, &config).await;
} }
} }
async fn idle(data: &Arc<RwLock<SharedData>>) { async fn idle(start: &DateTime<Utc>, config: &Config) {
let read = data.read().await;
let mut sleep_duration = Duration::seconds(30); let mut sleep_duration = Duration::seconds(30);
if Utc::now() - read.start > sleep_duration { if Utc::now() - start > sleep_duration {
sleep_duration = Duration::seconds(60); sleep_duration = Duration::seconds(60);
} }
if let Some(status_url) = read.config.status_post_url.clone() { if let Some(status_url) = config.status_post_url.clone() {
match reqwest::get(status_url).await { match reqwest::get(status_url).await {
Ok(_) => {} Ok(_) => {}
Err(e) => { Err(e) => {
@ -158,12 +131,12 @@ async fn idle(data: &Arc<RwLock<SharedData>>) {
} }
}; };
while Utc::now() - read.start < sleep_duration { while Utc::now() - start < sleep_duration {
sleep(Duration::milliseconds(100).to_std().unwrap()).await; sleep(Duration::milliseconds(100).to_std().unwrap()).await;
} }
} }
async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, CommunityId>, lemmy: &Lemmy, data: &Arc<RwLock<SharedData>>) -> Result<(), ()> { async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, CommunityId>, lemmy: &Lemmy, config: &Config, post_history: &mut SeriesHistory ) -> Result<(), ()> {
let jnc = jnovel::JFetcherOptions::new(series.slug.clone(), series.parted); let jnc = jnovel::JFetcherOptions::new(series.slug.clone(), series.parted);
let post_list = match jnc.check_feed().await { let post_list = match jnc.check_feed().await {
Ok(data) => data, Ok(data) => data,
@ -176,20 +149,16 @@ async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, Comm
} }
for post_info in post_list.clone().iter() { for post_info in post_list.clone().iter() {
// todo .clone() likely not needed
let post_part_info = post_info.get_part_info(); let post_part_info = post_info.get_part_info();
let post_lemmy_info = post_info.get_info(); let post_lemmy_info = post_info.get_info();
{ if post_history.check_for_post(
let read = data.read().await;
if read.post_history.check_for_post(
series.slug.as_str(), series.slug.as_str(),
post_part_info.as_string().as_str(), post_part_info.as_string().as_str(),
post_lemmy_info.title.as_str(), post_lemmy_info.title.as_str(),
) { ) {
continue; continue;
} }
}
let post_series_config = match post_info { let post_series_config = match post_info {
JPostInfo::Chapter { .. } => &series.prepub_community, JPostInfo::Chapter { .. } => &series.prepub_community,
@ -224,20 +193,17 @@ async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, Comm
info!(info); info!(info);
let post_id = lemmy.post(post_data).await?; let post_id = lemmy.post(post_data).await?;
{
let read = data.read().await;
if post_series_config.pin_settings.pin_new_post_community if post_series_config.pin_settings.pin_new_post_community
&& !read && config
.config
.protected_communities .protected_communities
info!(info); .contains(&post_series_config.name)
{ {
let info = format!( let info = format!(
"Pinning '{}' to {}", "Pinning '{}' to {}",
post_lemmy_info.title, post_lemmy_info.title,
post_series_config.name.as_str() post_series_config.name.as_str()
); );
write_info(info); info!(info);
let pinned_posts = lemmy.get_community_pinned(community_id).await?; let pinned_posts = lemmy.get_community_pinned(community_id).await?;
if !pinned_posts.is_empty() { if !pinned_posts.is_empty() {
let community_pinned_post = &pinned_posts[0]; let community_pinned_post = &pinned_posts[0];
@ -246,8 +212,7 @@ async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, Comm
.await?; .await?;
} }
lemmy.pin(post_id, PostFeatureType::Community).await?; lemmy.pin(post_id, PostFeatureType::Community).await?;
} else if read } else if config
.config
.protected_communities .protected_communities
.contains(&post_series_config.name) .contains(&post_series_config.name)
{ {
@ -255,20 +220,16 @@ async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, Comm
"Community '{}' for Series '{}' is protected. Is this intended?", "Community '{}' for Series '{}' is protected. Is this intended?",
&post_series_config.name, series.slug &post_series_config.name, series.slug
); );
write_warn(message);
}
warn!(message); warn!(message);
} }
let read = data.read().await;
if post_series_config.pin_settings.pin_new_post_local { if post_series_config.pin_settings.pin_new_post_local {
let info = format!("Pinning '{}' to Instance", post_lemmy_info.title); let info = format!("Pinning '{}' to Instance", post_lemmy_info.title);
info!(info); info!(info);
let pinned_posts = lemmy.get_local_pinned().await?; let pinned_posts = lemmy.get_local_pinned().await?;
if !pinned_posts.is_empty() { if !pinned_posts.is_empty() {
for pinned_post in pinned_posts { for pinned_post in pinned_posts {
if read if config
.config
.protected_communities .protected_communities
.contains(&pinned_post.community.name) .contains(&pinned_post.community.name)
{ {
@ -285,9 +246,8 @@ async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, Comm
lemmy.pin(post_id, PostFeatureType::Local).await?; lemmy.pin(post_id, PostFeatureType::Local).await?;
} }
let mut series_history = read.post_history.get_series(series.slug.as_str()); let mut series_history = post_history.get_series(series.slug.as_str());
let mut part_history = series_history.get_part(post_part_info.as_string().as_str()); let mut part_history = series_history.get_part(post_part_info.as_string().as_str());
drop(read);
match post_info { match post_info {
JPostInfo::Chapter { .. } => part_history.chapter = post_info.get_info().title, JPostInfo::Chapter { .. } => part_history.chapter = post_info.get_info().title,
@ -295,11 +255,9 @@ async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, Comm
} }
series_history.set_part(post_part_info.as_string().as_str(), part_history); series_history.set_part(post_part_info.as_string().as_str(), part_history);
let mut write = data.write().await; post_history
write
.post_history
.set_series(series.slug.as_str(), series_history); .set_series(series.slug.as_str(), series_history);
write.post_history.save_history(); post_history.save_history();
} }
Ok(()) Ok(())
} }

View file

@ -1,15 +1,9 @@
use crate::config::Config; use chrono::{Duration};
use crate::post_history::SeriesHistory; use log::{LevelFilter};
use chrono::{DateTime, Duration, Utc};
use log::{error, info, warn, LevelFilter};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use reqwest::Client; use reqwest::Client;
use std::collections::HashMap;
use std::fmt::Debug; use std::fmt::Debug;
use std::sync::Arc; use systemd_journal_logger::{JournalLog};
use systemd_journal_logger::{connected_to_journal, JournalLog};
use tokio::sync::RwLock;
use tokio::time::sleep;
mod bot; mod bot;
mod config; mod config;
@ -17,27 +11,6 @@ mod lemmy;
mod post_history; mod post_history;
mod fetchers; mod fetchers;
pub(crate) fn write_error(err_msg: String) {
match connected_to_journal() {
true => error!("[ERROR] {err_msg}"),
false => println!("[ERROR] {err_msg}"),
}
}
pub(crate) fn write_warn(warn_msg: String) {
match connected_to_journal() {
true => warn!("[WARN] {warn_msg}"),
false => println!("[WARN] {warn_msg}"),
}
}
pub(crate) fn write_info(info_msg: String) {
match connected_to_journal() {
true => info!("[INFO] {info_msg}"),
false => println!("[INFO] {info_msg}"),
}
}
pub static HTTP_CLIENT: Lazy<Client> = Lazy::new(|| { pub static HTTP_CLIENT: Lazy<Client> = Lazy::new(|| {
Client::builder() Client::builder()
.timeout(Duration::seconds(30).to_std().unwrap()) .timeout(Duration::seconds(30).to_std().unwrap())
@ -46,25 +19,6 @@ pub static HTTP_CLIENT: Lazy<Client> = Lazy::new(|| {
.expect("build client") .expect("build client")
}); });
#[derive(Clone, Debug)]
pub(crate) struct SharedData {
config: Config,
post_history: SeriesHistory,
start: DateTime<Utc>,
}
impl SharedData {
pub(crate) fn new() -> Self {
SharedData {
config: Config::default(),
post_history: SeriesHistory {
series: HashMap::new(),
},
start: Utc::now(),
}
}
}
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
JournalLog::new() JournalLog::new()
@ -72,33 +26,5 @@ async fn main() {
.install() .install()
.expect("Systemd-Logger crate error"); .expect("Systemd-Logger crate error");
log::set_max_level(LevelFilter::Info); log::set_max_level(LevelFilter::Info);
bot::run().await;
let mut data = SharedData::new();
loop {
let write_data = Arc::new(RwLock::new(data.clone()));
//let read_data = write_data.clone();
let persistent_data = write_data.clone();
let bot_thread = tokio::spawn(async move { bot::run(write_data).await });
let _ = bot_thread.await;
data = persistent_data.read().await.clone();
{
let err_msg = "Bot crashed due to unknown Error, restarting thread after wait...";
match connected_to_journal() {
true => error!("[ERROR] {err_msg}"),
false => println!("[ERROR] {err_msg}"),
}
}
sleep(
Duration::seconds(5)
.to_std()
.expect("Conversion should always work since static"),
)
.await;
}
} }