2023-08-31 23:36:37 +02:00
use std ::{
error ::Error ,
fs ::{ self , OpenOptions } ,
io ::Write ,
path ::Path ,
time ,
} ;
use lemmy_api_common ::{
community ::{ ListCommunities , ListCommunitiesResponse } ,
post ::CreatePost ,
sensitive ::Sensitive ,
} ;
use lemmy_db_schema ::{
newtypes ::{ CommunityId , LanguageId , PostId } ,
ListingType ,
} ;
2023-06-19 00:26:50 +02:00
use serde_derive ::{ Deserialize , Serialize } ;
2023-09-01 00:40:59 +02:00
use tokio ::time ::sleep ;
2023-06-22 22:08:10 +02:00
use url ::Url ;
2023-08-31 23:36:37 +02:00
use crate ::CLIENT ;
2023-06-19 00:26:50 +02:00
macro_rules ! pub_struct {
( $name :ident { $( $field :ident : $t :ty , ) * } ) = > {
2023-06-19 19:21:28 +02:00
#[ derive(Serialize, Deserialize, Clone, PartialEq) ]
2023-06-19 00:26:50 +02:00
pub ( crate ) struct $name {
$( pub ( crate ) $field : $t ) , *
}
}
}
// Secrets structs
pub_struct! ( Secrets {
lemmy : LemmyLogin ,
reddit : RedditLogin ,
} ) ;
impl Secrets {
2023-06-22 22:08:10 +02:00
pub ( crate ) fn init ( ) -> Secrets {
2023-06-19 00:26:50 +02:00
let file_contents = match fs ::read_to_string ( " secrets.json " ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: secrets.json could not be read: \n \n {:#?} " , e ) ,
} ;
let config_parse : Secrets = match serde_json ::from_str ( & file_contents ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: secrets.json could not be parsed: \n \n {:#?} " , e ) ,
} ;
return config_parse ;
}
}
2023-06-19 19:21:28 +02:00
#[ derive(Serialize, Deserialize, Clone, PartialEq) ]
2023-06-19 00:26:50 +02:00
pub ( crate ) struct LemmyLogin {
pub ( crate ) username : String ,
2023-08-31 23:36:37 +02:00
password : String ,
2023-06-19 00:26:50 +02:00
}
impl LemmyLogin {
pub ( crate ) fn get_username ( & self ) -> Sensitive < String > {
return Sensitive ::new ( self . username . clone ( ) ) ;
}
pub ( crate ) fn get_password ( & self ) -> Sensitive < String > {
2023-08-31 23:36:37 +02:00
return Sensitive ::new ( self . password . clone ( ) ) ;
2023-06-19 00:26:50 +02:00
}
}
2023-06-19 19:21:28 +02:00
#[ derive(Serialize, Deserialize, Clone, PartialEq) ]
2023-06-19 00:26:50 +02:00
pub ( crate ) struct RedditLogin {
pub ( crate ) app_id : String ,
app_secret : String ,
refresh_token : String ,
redirect_uri : String ,
}
// Config structs
pub_struct! ( Config {
2023-06-19 22:10:28 +02:00
instance : String ,
2023-06-19 00:26:50 +02:00
reddit_config : RedditConfig ,
feeds : Vec < FeedSetting > ,
} ) ;
impl Config {
2023-06-22 22:08:10 +02:00
pub ( crate ) fn init ( ) -> Config {
2023-06-19 00:26:50 +02:00
let file_contents = match fs ::read_to_string ( " config.json " ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: config.json could not be read: \n \n {:#?} " , e ) ,
} ;
let config_parse : Config = match serde_json ::from_str ( & file_contents ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: config.json could not be parsed: \n \n {:#?} " , e ) ,
} ;
return config_parse ;
}
2023-06-22 22:08:10 +02:00
pub ( crate ) fn load ( & mut self ) {
let file_contents = match fs ::read_to_string ( " config.json " ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: config.json could not be read: \n \n {:#?} " , e ) ,
} ;
let config_parse : Config = match serde_json ::from_str ( & file_contents ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: config.json could not be parsed: \n \n {:#?} " , e ) ,
} ;
self . feeds = config_parse . feeds ;
self . instance = config_parse . instance ;
self . reddit_config = config_parse . reddit_config ;
}
2023-08-31 23:36:37 +02:00
pub ( crate ) async fn check_feeds (
& mut self ,
post_history : & mut Vec < PrevPost > ,
community_ids : & CommunitiesVector ,
auth : & Sensitive < String > ,
) -> Result < Vec < ( CreatePost , ( Option < usize > , usize , String ) ) > , Box < dyn Error > > {
2023-08-03 23:34:23 +02:00
let mut post_queue : Vec < ( CreatePost , ( Option < usize > , usize , String ) ) > = vec! [ ] ;
2023-06-22 22:08:10 +02:00
2023-08-31 23:36:37 +02:00
let mut i = 0 ;
while i < self . feeds . len ( ) {
let feed = & self . feeds [ i ] ;
2023-08-03 21:38:22 +02:00
let res = CLIENT
2023-06-22 22:08:10 +02:00
. get ( feed . feed_url . clone ( ) )
2023-08-31 23:36:37 +02:00
. send ( )
. await ?
. text ( )
. await ? ;
2023-07-30 21:10:36 +02:00
2023-06-22 22:08:10 +02:00
let data : FeedData = serde_json ::from_str ( & res ) . unwrap ( ) ;
let mut prev_post_idx : Option < usize > = None ;
let mut do_post = true ;
2023-08-31 23:36:37 +02:00
post_history . iter ( ) . enumerate ( ) . for_each ( | ( idx , post ) | {
if & post . last_post_url = = & data . items [ 0 ] . url {
do_post = false ;
} else if & post . title = = & data . title {
prev_post_idx = Some ( idx ) ;
}
} ) ;
2023-06-22 22:08:10 +02:00
if do_post {
let item = & data . items [ 0 ] ;
let new_post = CreatePost {
2023-08-31 23:36:37 +02:00
name : item . title . clone ( ) ,
community_id : community_ids . find ( & feed . communities . chapter ) ,
url : Some ( Url ::parse ( & item . url ) . unwrap ( ) ) ,
body : Some (
" [Reddit](https://reddit.com/r/HonzukinoGekokujou) \n \n [Discord](https://discord.com/invite/fGefmzu) " . into ( ) ,
) ,
honeypot : None ,
nsfw : Some ( false ) ,
language_id : Some ( LanguageId ( 37 ) ) , // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future
auth : auth . clone ( ) ,
} ;
2023-08-03 23:34:23 +02:00
2023-08-31 23:36:37 +02:00
let prev_data = ( prev_post_idx , feed . id , data . title ) ;
2023-08-03 23:34:23 +02:00
post_queue . push ( ( new_post , prev_data ) ) ;
2023-06-22 22:08:10 +02:00
}
2023-09-01 00:40:59 +02:00
sleep ( time ::Duration ::from_millis ( 100 ) ) . await ; // Should prevent dos-ing J-Novel servers
2023-08-31 23:36:37 +02:00
i + = 1 ;
2023-07-31 19:50:22 +02:00
}
2023-06-22 22:08:10 +02:00
2023-07-30 21:10:36 +02:00
return Ok ( post_queue ) ;
2023-06-22 22:08:10 +02:00
}
2023-06-19 00:26:50 +02:00
}
pub_struct! ( RedditConfig {
user_agent : String ,
subreddit : String ,
} ) ;
pub_struct! ( FeedSetting {
2023-06-27 00:33:00 +02:00
id : usize ,
2023-06-19 00:26:50 +02:00
feed_url : String ,
communities : FeedCommunities ,
reddit : FeedRedditSettings ,
} ) ;
pub_struct! ( FeedCommunities {
chapter : LemmyCommunities ,
volume : LemmyCommunities ,
} ) ;
2023-06-19 19:21:28 +02:00
#[ derive(Serialize, Deserialize, Clone, PartialEq, strum_macros::Display) ]
2023-06-19 00:26:50 +02:00
#[ allow(non_camel_case_types) ]
pub ( crate ) enum LemmyCommunities {
aobwebnovel ,
aobprepub ,
aoblightnovel ,
aobmanga ,
2023-08-31 23:36:37 +02:00
metadiscussions ,
2023-06-19 00:26:50 +02:00
}
pub_struct! ( FeedRedditSettings {
enabled : bool ,
flair : String ,
} ) ;
// Posts structs
pub_struct! ( PrevPost {
2023-06-27 00:33:00 +02:00
id : usize ,
2023-08-03 23:34:23 +02:00
post_id : PostId ,
2023-06-19 00:26:50 +02:00
title : String ,
last_post_url : String ,
} ) ;
impl PrevPost {
pub ( crate ) fn load ( ) -> Vec < PrevPost > {
let history ;
2023-08-31 23:36:37 +02:00
2023-06-19 00:26:50 +02:00
if Path ::new ( " posts.json " ) . exists ( ) {
let file_contents = match fs ::read_to_string ( " posts.json " ) {
Ok ( data ) = > data ,
2023-08-03 23:34:23 +02:00
Err ( e ) = > panic! ( " ERROR: posts.json could not be read: \n \n {:#?} " , e ) ,
2023-06-19 00:26:50 +02:00
} ;
if file_contents . len ( ) > 0 {
let history_parse : Vec < PrevPost > = match serde_json ::from_str ( & file_contents ) {
Ok ( data ) = > data ,
2023-08-03 23:34:23 +02:00
Err ( e ) = > panic! ( " ERROR: posts.json could not be parsed: \n \n {:#?} " , e ) ,
2023-06-19 00:26:50 +02:00
} ;
history = history_parse ;
2023-08-31 23:36:37 +02:00
} else {
2023-06-19 00:26:50 +02:00
history = [ ] . to_vec ( )
}
2023-08-31 23:36:37 +02:00
} else {
2023-06-19 00:26:50 +02:00
let _ = fs ::File ::create ( " posts.json " ) ;
history = [ ] . to_vec ( )
}
return history ;
}
pub ( crate ) fn save ( data : & Vec < PrevPost > ) {
2023-08-31 23:36:37 +02:00
let mut file = OpenOptions ::new ( )
. read ( true )
. write ( true )
. create ( true )
. open ( " posts.json " )
. unwrap ( ) ;
2023-06-19 00:26:50 +02:00
let json_data = serde_json ::to_string_pretty ( & data ) . unwrap ( ) ;
write! ( & mut file , " {} " , json_data ) . unwrap ( ) ;
}
}
// RSS Feed Structs
pub_struct! ( FeedData {
version : String ,
title : String ,
home_page_url : String ,
description : String ,
author : FeedAuthor ,
items : Vec < FeedEntry > ,
} ) ;
2023-08-31 23:36:37 +02:00
pub_struct! ( FeedAuthor { name : String , } ) ;
2023-06-19 00:26:50 +02:00
pub_struct! ( FeedEntry {
id : String ,
url : String ,
title : String ,
summary : String ,
image : Option < String > ,
date_published : String ,
} ) ;
2023-08-31 23:36:37 +02:00
2023-06-22 22:08:10 +02:00
// Bot Helper Structs
pub_struct! ( CommunitiesVector {
ids : Vec < ( CommunityId , String ) > ,
} ) ;
impl CommunitiesVector {
pub ( crate ) fn new ( ) -> CommunitiesVector {
2023-08-31 23:36:37 +02:00
CommunitiesVector { ids : vec ! [ ] }
2023-06-22 22:08:10 +02:00
}
2023-07-30 21:10:36 +02:00
#[ warn(unused_results) ]
2023-08-31 23:36:37 +02:00
pub ( crate ) async fn load (
& mut self ,
auth : & Sensitive < String > ,
base : & String ,
) -> Result < ( ) , Box < dyn Error > > {
2023-06-22 22:08:10 +02:00
let params = ListCommunities {
auth : Some ( auth . clone ( ) ) ,
2023-07-10 23:40:31 +02:00
type_ : Some ( ListingType ::Local ) ,
2023-06-22 22:08:10 +02:00
.. Default ::default ( )
} ;
2023-08-03 21:38:22 +02:00
let res = CLIENT
2023-06-22 22:08:10 +02:00
. get ( base . clone ( ) + " /api/v3/community/list " )
. query ( & params )
2023-08-31 23:36:37 +02:00
. send ( )
. await ?
. text ( )
. await ? ;
2023-06-22 22:08:10 +02:00
let site_data : ListCommunitiesResponse = serde_json ::from_str ( & res ) . unwrap ( ) ;
let mut ids = [ ] . to_vec ( ) ;
site_data . communities . iter ( ) . for_each ( | entry | {
let new_id = ( entry . community . id , entry . community . name . clone ( ) ) ;
ids . push ( new_id ) ;
} ) ;
self . ids = ids ;
2023-07-31 19:50:22 +02:00
return Ok ( ( ) ) ;
2023-06-22 22:08:10 +02:00
}
pub ( crate ) fn find ( & self , name : & LemmyCommunities ) -> CommunityId {
let mut ret_id = CommunityId ( 0 ) ;
self . ids . iter ( ) . for_each ( | id | {
let id_name = & id . 1 ;
if & name . to_string ( ) = = id_name {
ret_id = id . 0 ;
}
} ) ;
2023-08-31 23:36:37 +02:00
return ret_id ;
2023-06-22 22:08:10 +02:00
}
}