2023-08-31 23:36:37 +02:00
use std ::{
error ::Error ,
fs ::{ self , OpenOptions } ,
io ::Write ,
path ::Path ,
} ;
2023-09-18 21:00:55 +02:00
use chrono ::{ DateTime , Duration , Utc } ;
2023-08-31 23:36:37 +02:00
use lemmy_api_common ::{
community ::{ ListCommunities , ListCommunitiesResponse } ,
post ::CreatePost ,
sensitive ::Sensitive ,
} ;
use lemmy_db_schema ::{
2023-09-18 21:00:55 +02:00
newtypes ::{ CommunityId , LanguageId } ,
2023-08-31 23:36:37 +02:00
ListingType ,
} ;
2023-06-19 00:26:50 +02:00
use serde_derive ::{ Deserialize , Serialize } ;
2023-09-01 00:40:59 +02:00
use tokio ::time ::sleep ;
2023-06-22 22:08:10 +02:00
use url ::Url ;
2023-09-18 21:00:55 +02:00
use crate ::{
feeds ::{ FeedChapterData , FeedSeriesData , FeedSetting , FeedVolumeData } ,
PostQueueMetadata , CLIENT ,
} ;
macro_rules ! api_url {
( ) = > {
" https://labs.j-novel.club/app/v1/ " . to_string ( )
} ;
}
macro_rules ! chapter_url {
( ) = > {
" https://j-novel.club/read/ " . to_string ( )
} ;
}
macro_rules ! volume_url_base {
( ) = > {
" https://j-novel.club/series/ " . to_string ( )
} ;
}
2023-06-19 00:26:50 +02:00
macro_rules ! pub_struct {
( $name :ident { $( $field :ident : $t :ty , ) * } ) = > {
2023-06-19 19:21:28 +02:00
#[ derive(Serialize, Deserialize, Clone, PartialEq) ]
2023-06-19 00:26:50 +02:00
pub ( crate ) struct $name {
$( pub ( crate ) $field : $t ) , *
}
}
}
// Secrets structs
pub_struct! ( Secrets {
lemmy : LemmyLogin ,
reddit : RedditLogin ,
} ) ;
impl Secrets {
2023-06-22 22:08:10 +02:00
pub ( crate ) fn init ( ) -> Secrets {
2023-06-19 00:26:50 +02:00
let file_contents = match fs ::read_to_string ( " secrets.json " ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: secrets.json could not be read: \n \n {:#?} " , e ) ,
} ;
let config_parse : Secrets = match serde_json ::from_str ( & file_contents ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: secrets.json could not be parsed: \n \n {:#?} " , e ) ,
} ;
return config_parse ;
}
2023-09-18 21:00:55 +02:00
pub ( crate ) fn load ( & mut self ) {
let file_contents = match fs ::read_to_string ( " secrets.json " ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: secrets.json could not be read: \n \n {:#?} " , e ) ,
} ;
let config_parse : Secrets = match serde_json ::from_str ( & file_contents ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: secrets.json could not be parsed: \n \n {:#?} " , e ) ,
} ;
* self = config_parse ;
}
2023-06-19 00:26:50 +02:00
}
2023-06-19 19:21:28 +02:00
#[ derive(Serialize, Deserialize, Clone, PartialEq) ]
2023-06-19 00:26:50 +02:00
pub ( crate ) struct LemmyLogin {
pub ( crate ) username : String ,
2023-08-31 23:36:37 +02:00
password : String ,
2023-06-19 00:26:50 +02:00
}
impl LemmyLogin {
pub ( crate ) fn get_username ( & self ) -> Sensitive < String > {
return Sensitive ::new ( self . username . clone ( ) ) ;
}
pub ( crate ) fn get_password ( & self ) -> Sensitive < String > {
2023-08-31 23:36:37 +02:00
return Sensitive ::new ( self . password . clone ( ) ) ;
2023-06-19 00:26:50 +02:00
}
}
2023-06-19 19:21:28 +02:00
#[ derive(Serialize, Deserialize, Clone, PartialEq) ]
2023-06-19 00:26:50 +02:00
pub ( crate ) struct RedditLogin {
pub ( crate ) app_id : String ,
app_secret : String ,
refresh_token : String ,
redirect_uri : String ,
}
// Config structs
2023-09-18 21:00:55 +02:00
#[ derive(Serialize, Deserialize, Clone, PartialEq) ]
pub ( crate ) struct Config {
pub ( crate ) instance : String ,
pub ( crate ) reddit_config : RedditConfig ,
pub ( crate ) feeds : Vec < FeedSetting > ,
pub ( crate ) uptime_kuma : Option < String > ,
pub ( crate ) config_reload : Option < usize > ,
}
2023-06-19 00:26:50 +02:00
impl Config {
2023-06-22 22:08:10 +02:00
pub ( crate ) fn init ( ) -> Config {
2023-06-19 00:26:50 +02:00
let file_contents = match fs ::read_to_string ( " config.json " ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: config.json could not be read: \n \n {:#?} " , e ) ,
} ;
let config_parse : Config = match serde_json ::from_str ( & file_contents ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: config.json could not be parsed: \n \n {:#?} " , e ) ,
} ;
return config_parse ;
}
2023-06-22 22:08:10 +02:00
pub ( crate ) fn load ( & mut self ) {
let file_contents = match fs ::read_to_string ( " config.json " ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: config.json could not be read: \n \n {:#?} " , e ) ,
} ;
let config_parse : Config = match serde_json ::from_str ( & file_contents ) {
Ok ( data ) = > data ,
Err ( e ) = > panic! ( " ERROR: config.json could not be parsed: \n \n {:#?} " , e ) ,
} ;
2023-09-18 21:00:55 +02:00
* self = config_parse ;
}
#[ warn(unused_results) ]
pub ( crate ) async fn fetch_infos ( & self ) -> Result < ( ) , Box < dyn Error > > {
// Get Series Data
for feed in & self . feeds {
let res = CLIENT
. get ( api_url! ( ) + " series/ " + & feed . series_slug + " ?format=json " )
. send ( )
. await ?
. text ( )
. await ? ;
let data : FeedSeriesData = serde_json ::from_str ( & res ) . unwrap ( ) ;
}
return Ok ( ( ) ) ;
2023-06-22 22:08:10 +02:00
}
2023-09-18 21:00:55 +02:00
#[ warn(unused_results) ]
2023-08-31 23:36:37 +02:00
pub ( crate ) async fn check_feeds (
2023-09-18 21:00:55 +02:00
& self ,
post_history : & Vec < PrevPost > ,
2023-08-31 23:36:37 +02:00
community_ids : & CommunitiesVector ,
auth : & Sensitive < String > ,
2023-09-18 21:00:55 +02:00
) -> Result < Vec < ( CreatePost , PostQueueMetadata ) > , Box < dyn Error > > {
let mut post_queue : Vec < ( CreatePost , PostQueueMetadata ) > = vec! [ ] ;
for feed in & self . feeds {
let mut history_data : & PrevPost = & PrevPost {
id : feed . id ,
last_volume_slug : None ,
last_volume_time : None ,
last_part_slug : None ,
last_part_time : None ,
} ;
for post in post_history {
if post . id = = feed . id {
history_data = post ;
break ;
}
}
2023-08-31 23:36:37 +02:00
2023-09-18 21:00:55 +02:00
// Check for Volume Release
2023-08-03 21:38:22 +02:00
let res = CLIENT
2023-09-18 21:00:55 +02:00
. get ( api_url! ( ) + " series/ " + & feed . series_slug + " /volumes?format=json " )
2023-08-31 23:36:37 +02:00
. send ( )
. await ?
. text ( )
. await ? ;
2023-07-30 21:10:36 +02:00
2023-09-18 21:00:55 +02:00
#[ derive(Serialize, Deserialize, Debug, Clone, PartialEq) ]
struct FeedVolumeReturn {
volumes : Vec < FeedVolumeData > ,
}
let mut data : FeedVolumeReturn = serde_json ::from_str ( & res ) . unwrap ( ) ;
let current_volume_slug = data . volumes [ & data . volumes . len ( ) - 1 ] . slug . clone ( ) ;
let now = Utc ::now ( ) ;
data . volumes . reverse ( ) ;
for volume in data . volumes {
let published = DateTime ::parse_from_rfc3339 ( & volume . publishing ) . unwrap ( ) ;
// Get First Volume that has valid Release Data
if now > = published {
if Some ( volume . slug . clone ( ) ) ! = history_data . last_volume_slug {
if let Some ( volume_community ) = & feed . communities . volume {
let mut post_url = Url ::parse ( & ( volume_url_base! ( ) + & feed . series_slug ) ) ? ;
post_url . set_fragment ( Some ( & ( " volume- " . to_string ( ) + & volume . number . to_string ( ) ) ) ) ;
let new_post = CreatePost {
name : volume . title . clone ( ) ,
community_id : community_ids . find ( & volume_community ) ,
url : Some ( post_url ) ,
body : Some (
" [Reddit](https://reddit.com/r/HonzukinoGekokujou) \n \n \
[ Discord ] ( https ://discord.com/invite/fGefmzu)\n\n\
[ #ascendanceofabookworm ] ( https ://mastodon.social/tags/ascendanceofabookworm)\n\n\
[ #honzuki ] ( https ://mastodon.social/tags/honzuki)\n\n\
[ #本 好 き の 下 剋 上 ] ( https ://mastodon.social/tags/%E6%9C%AC%E5%A5%BD%E3%81%8D%E3%81%AE%E4%B8%8B%E5%89%8B%E4%B8%8A)"
. into ( ) ,
) ,
honeypot : None ,
nsfw : Some ( false ) ,
language_id : Some ( LanguageId ( 37 ) ) , // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future
auth : auth . clone ( ) ,
} ;
post_queue . push ( (
new_post ,
PostQueueMetadata {
2023-09-18 22:04:20 +02:00
id : feed . id ,
2023-09-18 21:00:55 +02:00
series : feed . series_slug . clone ( ) ,
part : None ,
volume : Some ( volume . slug ) ,
} ,
) ) ;
}
}
break ;
2023-08-31 23:36:37 +02:00
}
2023-09-18 21:00:55 +02:00
}
2023-08-03 23:34:23 +02:00
2023-09-18 21:00:55 +02:00
if let Some ( chapter_community ) = & feed . communities . chapter {
// Check for Part Release
let res = CLIENT
. get ( api_url! ( ) + " volumes/ " + & current_volume_slug + " /parts?format=json " )
. send ( )
. await ?
. text ( )
. await ? ;
#[ derive(Serialize, Deserialize, Debug, Clone, PartialEq) ]
struct FeedChapterReturn {
parts : Vec < FeedChapterData > ,
}
let mut data : FeedChapterReturn = serde_json ::from_str ( & res ) . unwrap ( ) ;
data . parts . reverse ( ) ;
for part in data . parts {
if Some ( part . slug . clone ( ) ) ! = history_data . last_part_slug {
let new_post = CreatePost {
name : part . title . clone ( ) ,
community_id : community_ids . find ( & chapter_community ) ,
url : Some ( Url ::parse ( & ( chapter_url! ( ) + & part . slug ) ) . unwrap ( ) ) ,
body : Some (
" [Reddit](https://reddit.com/r/HonzukinoGekokujou) \n \n \
[ Discord ] ( https ://discord.com/invite/fGefmzu)\n\n\
[ #ascendanceofabookworm ] ( https ://mastodon.social/tags/ascendanceofabookworm)\n\n\
[ #honzuki ] ( https ://mastodon.social/tags/honzuki)\n\n\
[ #本 好 き の 下 剋 上 ] ( https ://mastodon.social/tags/%E6%9C%AC%E5%A5%BD%E3%81%8D%E3%81%AE%E4%B8%8B%E5%89%8B%E4%B8%8A)"
. into ( ) ,
) ,
honeypot : None ,
nsfw : Some ( false ) ,
language_id : Some ( LanguageId ( 37 ) ) , // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future
auth : auth . clone ( ) ,
} ;
post_queue . push ( (
new_post ,
PostQueueMetadata {
2023-09-18 22:04:20 +02:00
id : feed . id ,
2023-09-18 21:00:55 +02:00
series : feed . series_slug . clone ( ) ,
part : Some ( part . slug ) ,
volume : None ,
} ,
) ) ;
}
break ;
}
2023-06-22 22:08:10 +02:00
}
2023-09-18 21:00:55 +02:00
sleep ( Duration ::milliseconds ( 100 ) . to_std ( ) . unwrap ( ) ) . await ; // Should prevent dos-ing J-Novel servers
2023-07-31 19:50:22 +02:00
}
2023-06-22 22:08:10 +02:00
2023-07-30 21:10:36 +02:00
return Ok ( post_queue ) ;
2023-06-22 22:08:10 +02:00
}
2023-06-19 00:26:50 +02:00
}
pub_struct! ( RedditConfig {
user_agent : String ,
subreddit : String ,
} ) ;
2023-09-18 21:00:55 +02:00
//noinspection ALL
2023-06-19 19:21:28 +02:00
#[ derive(Serialize, Deserialize, Clone, PartialEq, strum_macros::Display) ]
2023-06-19 00:26:50 +02:00
#[ allow(non_camel_case_types) ]
pub ( crate ) enum LemmyCommunities {
aobwebnovel ,
aobprepub ,
aoblightnovel ,
aobmanga ,
2023-08-31 23:36:37 +02:00
metadiscussions ,
2023-06-19 00:26:50 +02:00
}
// Posts structs
pub_struct! ( PrevPost {
2023-06-27 00:33:00 +02:00
id : usize ,
2023-09-18 21:00:55 +02:00
last_volume_slug : Option < String > ,
last_volume_time : Option < String > ,
last_part_slug : Option < String > ,
last_part_time : Option < String > ,
2023-06-19 00:26:50 +02:00
} ) ;
impl PrevPost {
pub ( crate ) fn load ( ) -> Vec < PrevPost > {
2023-09-18 21:00:55 +02:00
let mut history ;
2023-08-31 23:36:37 +02:00
2023-06-19 00:26:50 +02:00
if Path ::new ( " posts.json " ) . exists ( ) {
let file_contents = match fs ::read_to_string ( " posts.json " ) {
Ok ( data ) = > data ,
2023-08-03 23:34:23 +02:00
Err ( e ) = > panic! ( " ERROR: posts.json could not be read: \n \n {:#?} " , e ) ,
2023-06-19 00:26:50 +02:00
} ;
if file_contents . len ( ) > 0 {
let history_parse : Vec < PrevPost > = match serde_json ::from_str ( & file_contents ) {
Ok ( data ) = > data ,
2023-08-03 23:34:23 +02:00
Err ( e ) = > panic! ( " ERROR: posts.json could not be parsed: \n \n {:#?} " , e ) ,
2023-06-19 00:26:50 +02:00
} ;
history = history_parse ;
2023-08-31 23:36:37 +02:00
} else {
2023-06-19 00:26:50 +02:00
history = [ ] . to_vec ( )
}
2023-08-31 23:36:37 +02:00
} else {
2023-06-19 00:26:50 +02:00
let _ = fs ::File ::create ( " posts.json " ) ;
history = [ ] . to_vec ( )
}
2023-09-18 21:00:55 +02:00
history . sort_by ( | a , b | a . id . cmp ( & b . id ) ) ;
2023-06-19 00:26:50 +02:00
return history ;
}
pub ( crate ) fn save ( data : & Vec < PrevPost > ) {
2023-08-31 23:36:37 +02:00
let mut file = OpenOptions ::new ( )
. read ( true )
. write ( true )
. create ( true )
. open ( " posts.json " )
. unwrap ( ) ;
2023-06-19 00:26:50 +02:00
let json_data = serde_json ::to_string_pretty ( & data ) . unwrap ( ) ;
write! ( & mut file , " {} " , json_data ) . unwrap ( ) ;
}
}
2023-06-22 22:08:10 +02:00
// Bot Helper Structs
pub_struct! ( CommunitiesVector {
ids : Vec < ( CommunityId , String ) > ,
} ) ;
impl CommunitiesVector {
pub ( crate ) fn new ( ) -> CommunitiesVector {
2023-08-31 23:36:37 +02:00
CommunitiesVector { ids : vec ! [ ] }
2023-06-22 22:08:10 +02:00
}
2023-07-30 21:10:36 +02:00
#[ warn(unused_results) ]
2023-09-18 21:00:55 +02:00
pub ( crate ) async fn load ( & mut self , auth : & Sensitive < String > , base : & String ) -> Result < ( ) , Box < dyn Error > > {
2023-06-22 22:08:10 +02:00
let params = ListCommunities {
auth : Some ( auth . clone ( ) ) ,
2023-07-10 23:40:31 +02:00
type_ : Some ( ListingType ::Local ) ,
2023-06-22 22:08:10 +02:00
.. Default ::default ( )
} ;
2023-08-03 21:38:22 +02:00
let res = CLIENT
2023-06-22 22:08:10 +02:00
. get ( base . clone ( ) + " /api/v3/community/list " )
. query ( & params )
2023-08-31 23:36:37 +02:00
. send ( )
. await ?
. text ( )
. await ? ;
2023-06-22 22:08:10 +02:00
let site_data : ListCommunitiesResponse = serde_json ::from_str ( & res ) . unwrap ( ) ;
let mut ids = [ ] . to_vec ( ) ;
site_data . communities . iter ( ) . for_each ( | entry | {
let new_id = ( entry . community . id , entry . community . name . clone ( ) ) ;
ids . push ( new_id ) ;
} ) ;
self . ids = ids ;
2023-07-31 19:50:22 +02:00
return Ok ( ( ) ) ;
2023-06-22 22:08:10 +02:00
}
pub ( crate ) fn find ( & self , name : & LemmyCommunities ) -> CommunityId {
let mut ret_id = CommunityId ( 0 ) ;
self . ids . iter ( ) . for_each ( | id | {
let id_name = & id . 1 ;
if & name . to_string ( ) = = id_name {
ret_id = id . 0 ;
}
} ) ;
2023-08-31 23:36:37 +02:00
return ret_id ;
2023-06-22 22:08:10 +02:00
}
}