Partially adapt bot module to changes due to fetchers modularization

This commit is contained in:
Neshura 2024-01-08 21:07:17 +01:00
parent e5862ba0ec
commit b9a26a7b1c
Signed by: Neshura
GPG key ID: B6983AAA6B9A7A6C

View file

@ -1,8 +1,8 @@
use crate::config::{Config, PostBody, SeriesConfig}; use crate::config::{Config, PostBody, SeriesConfig};
use crate::jnovel::PostInfo; use crate::fetchers::jnovel::JPostInfo;
use crate::lemmy::Lemmy; use crate::lemmy::{Lemmy, PostInfo};
use crate::post_history::SeriesHistory; use crate::post_history::SeriesHistory;
use crate::{jnovel, lemmy, write_error, write_info, write_warn, SharedData}; use crate::{fetchers::{jnovel}, lemmy, write_error, write_info, write_warn, SharedData};
use chrono::{DateTime, Duration, Utc}; use chrono::{DateTime, Duration, Utc};
use lemmy_api_common::post::CreatePost; use lemmy_api_common::post::CreatePost;
use lemmy_db_schema::newtypes::{CommunityId, LanguageId}; use lemmy_db_schema::newtypes::{CommunityId, LanguageId};
@ -11,6 +11,7 @@ use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tokio::time::sleep; use tokio::time::sleep;
use crate::fetchers::Fetcher;
pub(crate) async fn run(data: Arc<RwLock<SharedData>>) { pub(crate) async fn run(data: Arc<RwLock<SharedData>>) {
let mut last_reload: DateTime<Utc>; let mut last_reload: DateTime<Utc>;
@ -137,15 +138,16 @@ async fn idle(data: &Arc<RwLock<SharedData>>) {
} }
async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, CommunityId>, lemmy: &Lemmy, data: &Arc<RwLock<SharedData>>) -> Result<(), ()> { async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, CommunityId>, lemmy: &Lemmy, data: &Arc<RwLock<SharedData>>) -> Result<(), ()> {
let mut post_list = match jnovel::check_feed(series.slug.as_str(), series.parted).await { let jnc = jnovel::JFetcherOptions::new(series.slug.clone(), series.parted);
let post_list = match jnc.check_feed().await {
Ok(data) => data, Ok(data) => data,
Err(_) => return Err(()), Err(_) => return Err(()),
}; };
for (index, post_info) in post_list.clone().iter().enumerate() { for post_info in post_list.clone().iter() {
// todo .clone() likely not needed // todo .clone() likely not needed
let post_part_info = post_info.get_part_info(); let post_part_info = post_info.get_part_info();
let post_lemmy_info = post_info.get_lemmy_info(); let post_lemmy_info = post_info.get_info();
{ {
let read = data.read().await; let read = data.read().await;
@ -159,8 +161,8 @@ async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, Comm
} }
let post_series_config = match post_info { let post_series_config = match post_info {
PostInfo::Chapter { .. } => &series.prepub_community, JPostInfo::Chapter { .. } => &series.prepub_community,
PostInfo::Volume { .. } => &series.volume_community, JPostInfo::Volume { .. } => &series.volume_community,
}; };
let community_id = *communities let community_id = *communities
@ -256,8 +258,8 @@ async fn handle_series(series: &SeriesConfig, communities: &HashMap<String, Comm
drop(read); drop(read);
match post_info { match post_info {
PostInfo::Chapter { .. } => part_history.chapter = post_info.get_lemmy_info().title, JPostInfo::Chapter { .. } => part_history.chapter = post_info.get_info().title,
PostInfo::Volume { .. } => part_history.volume = post_info.get_lemmy_info().title, JPostInfo::Volume { .. } => part_history.volume = post_info.get_info().title,
} }
series_history.set_part(post_part_info.as_string().as_str(), part_history); series_history.set_part(post_part_info.as_string().as_str(), part_history);