use std::{
    error::Error,
    fs::{self, OpenOptions},
    io::Write,
    path::Path,
};

use chrono::{DateTime, Duration, Utc};
use lemmy_api_common::{
    community::{ListCommunities, ListCommunitiesResponse},
    post::CreatePost,
    sensitive::Sensitive,
};
use lemmy_db_schema::{
    newtypes::{CommunityId, LanguageId},
    ListingType,
};
use serde_derive::{Deserialize, Serialize};
use tokio::time::sleep;
use url::Url;

use crate::{
    feeds::{FeedChapterData, FeedSeriesData, FeedSetting, FeedVolumeData},
    PostQueueMetadata, CLIENT,
};

macro_rules! api_url {
    () => {
        "https://labs.j-novel.club/app/v1/".to_string()
    };
}

macro_rules! chapter_url {
    () => {
        "https://j-novel.club/read/".to_string()
    };
}

macro_rules! volume_url_base {
    () => {
        "https://j-novel.club/series/".to_string()
    };
}

macro_rules! pub_struct {
    ($name:ident {$($field:ident: $t:ty,)*}) => {
        #[derive(Serialize, Deserialize, Clone, PartialEq)]
        pub(crate) struct $name {
            $(pub(crate) $field: $t), *
        }
    }
}

// Secrets structs
pub_struct!(Secrets {
    lemmy: LemmyLogin,
    reddit: RedditLogin,
});

impl Secrets {
    pub(crate) fn init() -> Secrets {
        let file_contents = match fs::read_to_string("secrets.json") {
            Ok(data) => data,
            Err(e) => panic!("ERROR: secrets.json could not be read:\n\n{:#?}", e),
        };
        let config_parse: Secrets = match serde_json::from_str(&file_contents) {
            Ok(data) => data,
            Err(e) => panic!("ERROR: secrets.json could not be parsed:\n\n{:#?}", e),
        };

        return config_parse;
    }

    pub(crate) fn load(&mut self) {
        let file_contents = match fs::read_to_string("secrets.json") {
            Ok(data) => data,
            Err(e) => panic!("ERROR: secrets.json could not be read:\n\n{:#?}", e),
        };
        let config_parse: Secrets = match serde_json::from_str(&file_contents) {
            Ok(data) => data,
            Err(e) => panic!("ERROR: secrets.json could not be parsed:\n\n{:#?}", e),
        };

        *self = config_parse;
    }
}

#[derive(Serialize, Deserialize, Clone, PartialEq)]
pub(crate) struct LemmyLogin {
    pub(crate) username: String,
    password: String,
}

impl LemmyLogin {
    pub(crate) fn get_username(&self) -> Sensitive<String> {
        return Sensitive::new(self.username.clone());
    }

    pub(crate) fn get_password(&self) -> Sensitive<String> {
        return Sensitive::new(self.password.clone());
    }
}

#[derive(Serialize, Deserialize, Clone, PartialEq)]
pub(crate) struct RedditLogin {
    pub(crate) app_id: String,
    app_secret: String,
    refresh_token: String,
    redirect_uri: String,
}

// Config structs
#[derive(Serialize, Deserialize, Clone, PartialEq)]
pub(crate) struct Config {
    pub(crate) instance: String,
    pub(crate) reddit_config: RedditConfig,
    pub(crate) feeds: Vec<FeedSetting>,
    pub(crate) uptime_kuma: Option<String>,
    pub(crate) config_reload: Option<usize>,
}

impl Config {
    pub(crate) fn init() -> Config {
        let file_contents = match fs::read_to_string("config.json") {
            Ok(data) => data,
            Err(e) => panic!("ERROR: config.json could not be read:\n\n{:#?}", e),
        };
        let config_parse: Config = match serde_json::from_str(&file_contents) {
            Ok(data) => data,
            Err(e) => panic!("ERROR: config.json could not be parsed:\n\n{:#?}", e),
        };

        return config_parse;
    }

    pub(crate) fn load(&mut self) {
        let file_contents = match fs::read_to_string("config.json") {
            Ok(data) => data,
            Err(e) => panic!("ERROR: config.json could not be read:\n\n{:#?}", e),
        };
        let config_parse: Config = match serde_json::from_str(&file_contents) {
            Ok(data) => data,
            Err(e) => panic!("ERROR: config.json could not be parsed:\n\n{:#?}", e),
        };

        *self = config_parse;
    }

    #[warn(unused_results)]
    pub(crate) async fn fetch_infos(&self) -> Result<(), Box<dyn Error>> {
        // Get Series Data
        for feed in &self.feeds {
            let res = CLIENT
                .get(api_url!() + "series/" + &feed.series_slug + "?format=json")
                .send()
                .await?
                .text()
                .await?;

            let data: FeedSeriesData = serde_json::from_str(&res).unwrap();
        }
        return Ok(());
    }

    #[warn(unused_results)]
    pub(crate) async fn check_feeds(
        &self,
        post_history: &Vec<PrevPost>,
        community_ids: &CommunitiesVector,
        auth: &Sensitive<String>,
    ) -> Result<Vec<(CreatePost, PostQueueMetadata)>, Box<dyn Error>> {
        let mut post_queue: Vec<(CreatePost, PostQueueMetadata)> = vec![];

        for feed in &self.feeds {
            let mut history_data: &PrevPost = &PrevPost {
                id: feed.id,
                last_volume_slug: None,
                last_volume_time: None,
                last_part_slug: None,
                last_part_time: None,
            };
            for post in post_history {
                if post.id == feed.id {
                    history_data = post;
                    break;
                }
            }

            // Check for Volume Release
            let res = CLIENT
                .get(api_url!() + "series/" + &feed.series_slug + "/volumes?format=json")
                .send()
                .await?
                .text()
                .await?;

            #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
            struct FeedVolumeReturn {
                volumes: Vec<FeedVolumeData>,
            }
            let mut data: FeedVolumeReturn = serde_json::from_str(&res).unwrap();

            let current_volume_slug = data.volumes[&data.volumes.len() - 1].slug.clone();

            let now = Utc::now();
            data.volumes.reverse();
            for volume in data.volumes {
                let published = DateTime::parse_from_rfc3339(&volume.publishing).unwrap();
                // Get First Volume that has valid Release Data
                if now >= published {
                    if Some(volume.slug.clone()) != history_data.last_volume_slug {
                        if let Some(volume_community) = &feed.communities.volume {
                            let mut post_url = Url::parse(&(volume_url_base!() + &feed.series_slug))?;
                            post_url.set_fragment(Some(&("volume-".to_string() + &volume.number.to_string())));

                            let new_post = CreatePost {
                                name: volume.title.clone(),
                                community_id: community_ids.find(&volume_community),
                                url: Some(post_url),
                                body: Some(
                                    "[Reddit](https://reddit.com/r/HonzukinoGekokujou)\n\n\
                                     [Discord](https://discord.com/invite/fGefmzu)\n\n\
                                     [#ascendanceofabookworm](https://mastodon.social/tags/ascendanceofabookworm)\n\n\
                                     [#honzuki](https://mastodon.social/tags/honzuki)\n\n\
                                     [#本好きの下剋上](https://mastodon.social/tags/%E6%9C%AC%E5%A5%BD%E3%81%8D%E3%81%AE%E4%B8%8B%E5%89%8B%E4%B8%8A)"
                                        .into(),
                                ),
                                honeypot: None,
                                nsfw: Some(false),
                                language_id: Some(LanguageId(37)), // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future
                                auth: auth.clone(),
                            };
                            post_queue.push((
                                new_post,
                                PostQueueMetadata {
                                    id: feed.id,
                                    series: feed.series_slug.clone(),
                                    part: None,
                                    volume: Some(volume.slug),
                                },
                            ));
                        }
                    }
                    break;
                }
            }

            if let Some(chapter_community) = &feed.communities.chapter {
                // Check for Part Release
                let res = CLIENT
                    .get(api_url!() + "volumes/" + &current_volume_slug + "/parts?format=json")
                    .send()
                    .await?
                    .text()
                    .await?;

                #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
                struct FeedChapterReturn {
                    parts: Vec<FeedChapterData>,
                }
                let mut data: FeedChapterReturn = serde_json::from_str(&res).unwrap();

                data.parts.reverse();
                for part in data.parts {
                    if Some(part.slug.clone()) != history_data.last_part_slug {
                        let new_post = CreatePost {
                            name: part.title.clone(),
                            community_id: community_ids.find(&chapter_community),
                            url: Some(Url::parse(&(chapter_url!() + &part.slug)).unwrap()),
                            body: Some(
                                "[Reddit](https://reddit.com/r/HonzukinoGekokujou)\n\n\
                                 [Discord](https://discord.com/invite/fGefmzu)\n\n\
                                 [#ascendanceofabookworm](https://mastodon.social/tags/ascendanceofabookworm)\n\n\
                                 [#honzuki](https://mastodon.social/tags/honzuki)\n\n\
                                 [#本好きの下剋上](https://mastodon.social/tags/%E6%9C%AC%E5%A5%BD%E3%81%8D%E3%81%AE%E4%B8%8B%E5%89%8B%E4%B8%8A)"
                                    .into(),
                            ),
                            honeypot: None,
                            nsfw: Some(false),
                            language_id: Some(LanguageId(37)), // TODO get this id once every few hours per API request, the ordering of IDs suggests that the EN Id might change in the future
                            auth: auth.clone(),
                        };
                        post_queue.push((
                            new_post,
                            PostQueueMetadata {
                                id: feed.id,
                                series: feed.series_slug.clone(),
                                part: Some(part.slug),
                                volume: None,
                            },
                        ));
                    }
                    break;
                }
            }
            sleep(Duration::milliseconds(100).to_std().unwrap()).await; // Should prevent dos-ing J-Novel servers
        }

        return Ok(post_queue);
    }
}

pub_struct!(RedditConfig {
    user_agent: String,
    subreddit: String,
});

//noinspection ALL
#[derive(Serialize, Deserialize, Clone, PartialEq, strum_macros::Display)]
#[allow(non_camel_case_types)]
pub(crate) enum LemmyCommunities {
    aobwebnovel,
    aobprepub,
    aoblightnovel,
    aobmanga,
    metadiscussions,
}

// Posts structs
pub_struct!(PrevPost {
    id: usize,
    last_volume_slug: Option<String>,
    last_volume_time: Option<String>,
    last_part_slug: Option<String>,
    last_part_time: Option<String>,
});

impl PrevPost {
    pub(crate) fn load() -> Vec<PrevPost> {
        let mut history;

        if Path::new("posts.json").exists() {
            let file_contents = match fs::read_to_string("posts.json") {
                Ok(data) => data,
                Err(e) => panic!("ERROR: posts.json could not be read:\n\n{:#?}", e),
            };

            if file_contents.len() > 0 {
                let history_parse: Vec<PrevPost> = match serde_json::from_str(&file_contents) {
                    Ok(data) => data,
                    Err(e) => panic!("ERROR: posts.json could not be parsed:\n\n{:#?}", e),
                };
                history = history_parse;
            } else {
                history = [].to_vec()
            }
        } else {
            let _ = fs::File::create("posts.json");
            history = [].to_vec()
        }

        history.sort_by(|a, b| a.id.cmp(&b.id));

        return history;
    }

    pub(crate) fn save(data: &Vec<PrevPost>) {
        let mut file = OpenOptions::new()
            .read(true)
            .write(true)
            .create(true)
            .open("posts.json")
            .unwrap();

        let json_data = serde_json::to_string_pretty(&data).unwrap();

        write!(&mut file, "{}", json_data).unwrap();
    }
}

// Bot Helper Structs
pub_struct!(CommunitiesVector {
    ids: Vec<(CommunityId, String)>,
});

impl CommunitiesVector {
    pub(crate) fn new() -> CommunitiesVector {
        CommunitiesVector { ids: vec![] }
    }

    #[warn(unused_results)]
    pub(crate) async fn load(&mut self, auth: &Sensitive<String>, base: &String) -> Result<(), Box<dyn Error>> {
        let params = ListCommunities {
            auth: Some(auth.clone()),
            type_: Some(ListingType::Local),
            ..Default::default()
        };

        let res = CLIENT
            .get(base.clone() + "/api/v3/community/list")
            .query(&params)
            .send()
            .await?
            .text()
            .await?;

        let site_data: ListCommunitiesResponse = serde_json::from_str(&res).unwrap();

        let mut ids = [].to_vec();

        site_data.communities.iter().for_each(|entry| {
            let new_id = (entry.community.id, entry.community.name.clone());
            ids.push(new_id);
        });

        self.ids = ids;
        return Ok(());
    }

    pub(crate) fn find(&self, name: &LemmyCommunities) -> CommunityId {
        let mut ret_id = CommunityId(0);

        self.ids.iter().for_each(|id| {
            let id_name = &id.1;
            if &name.to_string() == id_name {
                ret_id = id.0;
            }
        });
        return ret_id;
    }
}