Initial Code Commit
This commit is contained in:
parent
323fd01d0a
commit
dc67df51ce
8 changed files with 1436 additions and 0 deletions
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
/target
|
||||
.env
|
5
.idea/.gitignore
vendored
Normal file
5
.idea/.gitignore
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
10
.idea/material_theme_project_new.xml
Normal file
10
.idea/material_theme_project_new.xml
Normal file
|
@ -0,0 +1,10 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="MaterialThemeProjectNewConfig">
|
||||
<option name="metadata">
|
||||
<MTProjectMetadataState>
|
||||
<option name="userId" value="35e9767a:19031e016a2:-7f1b" />
|
||||
</MTProjectMetadataState>
|
||||
</option>
|
||||
</component>
|
||||
</project>
|
47
.idea/misc.xml
Normal file
47
.idea/misc.xml
Normal file
|
@ -0,0 +1,47 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectInspectionProfilesVisibleTreeState">
|
||||
<entry key="Project Default">
|
||||
<profile-state>
|
||||
<expanded-state>
|
||||
<State>
|
||||
<id>AccessibilityHTML</id>
|
||||
</State>
|
||||
<State>
|
||||
<id>Code style issuesJavaScript and TypeScript</id>
|
||||
</State>
|
||||
<State>
|
||||
<id>ES2015 migration aidsJavaScript and TypeScript</id>
|
||||
</State>
|
||||
<State>
|
||||
<id>GitHub actions</id>
|
||||
</State>
|
||||
<State>
|
||||
<id>HTML</id>
|
||||
</State>
|
||||
<State>
|
||||
<id>JavaScript and TypeScript</id>
|
||||
</State>
|
||||
<State>
|
||||
<id>RegExp</id>
|
||||
</State>
|
||||
<State>
|
||||
<id>Rust</id>
|
||||
</State>
|
||||
<State>
|
||||
<id>Switch statement issuesJavaScript and TypeScript</id>
|
||||
</State>
|
||||
<State>
|
||||
<id>TypeScriptJavaScript and TypeScript</id>
|
||||
</State>
|
||||
<State>
|
||||
<id>XML</id>
|
||||
</State>
|
||||
<State>
|
||||
<id>XSLT</id>
|
||||
</State>
|
||||
</expanded-state>
|
||||
</profile-state>
|
||||
</entry>
|
||||
</component>
|
||||
</project>
|
1026
Cargo.lock
generated
Normal file
1026
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
16
Cargo.toml
Normal file
16
Cargo.toml
Normal file
|
@ -0,0 +1,16 @@
|
|||
[package]
|
||||
name = "kavita-web-manager-api"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
axum = "0.7"
|
||||
chrono = "0.4"
|
||||
dotenv = "0.15"
|
||||
futures = "0.3"
|
||||
log = "0.4"
|
||||
parking_lot = "0.12"
|
||||
systemd-journal-logger = "2.1"
|
||||
tokio = {version = "1.39", features = ["full"]}
|
||||
tokio-util = { version = "0.7", features = ["io"] }
|
||||
urlencoding = "2.1"
|
114
src/logging.rs
Normal file
114
src/logging.rs
Normal file
|
@ -0,0 +1,114 @@
|
|||
use std::collections::{HashMap, VecDeque};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::{Level};
|
||||
use systemd_journal_logger::connected_to_journal;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Logging {
|
||||
mem_log: HashMap<Level, VecDeque<LogEvent>>,
|
||||
mem_log_lengths: HashMap<Level, i8>,
|
||||
}
|
||||
|
||||
impl Logging {
|
||||
pub fn new(log_lengths: Option<HashMap<Level, i8>>) -> Self {
|
||||
let mut mem_log = HashMap::new();
|
||||
let mut mem_log_lengths = HashMap::new();
|
||||
|
||||
Level::iter().for_each(|level| {
|
||||
let mem_vec = VecDeque::new();
|
||||
mem_log.insert(level, mem_vec);
|
||||
|
||||
let length = match level {
|
||||
Level::Error => -1,
|
||||
Level::Warn => 40,
|
||||
Level::Info => 20,
|
||||
Level::Debug => 10,
|
||||
Level::Trace => 5,
|
||||
};
|
||||
mem_log_lengths.insert(level, length);
|
||||
});
|
||||
|
||||
if let Some(lengths) = log_lengths {
|
||||
lengths.iter().for_each(|(level, length)| {
|
||||
mem_log_lengths.insert(*level, *length);
|
||||
});
|
||||
}
|
||||
|
||||
Self {
|
||||
mem_log,
|
||||
mem_log_lengths
|
||||
}
|
||||
}
|
||||
pub fn debug(&mut self, msg: String) {
|
||||
let msg = format!("[DEBUG] {msg}");
|
||||
match connected_to_journal() {
|
||||
true => log::debug!("{msg}"),
|
||||
false => println!("{msg}"),
|
||||
}
|
||||
self.mem_log(Level::Debug, Some(msg));
|
||||
}
|
||||
|
||||
pub fn info(&mut self, msg: String) {
|
||||
let msg = format!("[INFO] {msg}");
|
||||
match connected_to_journal() {
|
||||
true => log::info!("{msg}"),
|
||||
false => println!("{msg}"),
|
||||
}
|
||||
self.mem_log(Level::Info, Some(msg));
|
||||
}
|
||||
|
||||
pub fn warn(&mut self, msg: String) {
|
||||
let msg = format!("[WARN] {msg}");
|
||||
match connected_to_journal() {
|
||||
true => log::warn!("{msg}"),
|
||||
false => println!("{msg}"),
|
||||
}
|
||||
self.mem_log(Level::Warn, Some(msg));
|
||||
}
|
||||
|
||||
pub fn error(&mut self, msg: String) {
|
||||
let msg = format!("[ERROR] {msg}");
|
||||
match connected_to_journal() {
|
||||
true => log::error!("{msg}"),
|
||||
false => eprintln!("{msg}"),
|
||||
}
|
||||
self.mem_log(Level::Error, Some(msg));
|
||||
}
|
||||
|
||||
pub fn get_mem_log(&mut self, level: Level) -> VecDeque<LogEvent> {
|
||||
self.mem_log(level, None)
|
||||
}
|
||||
|
||||
fn mem_log(&mut self, level: Level, msg: Option<String>) -> VecDeque<LogEvent> {
|
||||
let max_len = self.mem_log_lengths.get(&level).expect("All Levels Should Have Lengths Initialized");
|
||||
let list = self.mem_log.get_mut(&level).expect("All Levels Should Have Empty Lists Initialized");
|
||||
|
||||
if let Some(msg) = msg {
|
||||
let now = Utc::now();
|
||||
let log_event = LogEvent::new(now, msg);
|
||||
list.push_back(log_event);
|
||||
if *max_len != -1 {
|
||||
while list.len() > *max_len as usize {
|
||||
list.pop_front();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
list.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct LogEvent {
|
||||
pub date: DateTime<Utc>,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
impl LogEvent {
|
||||
pub fn new(time: DateTime<Utc>, message: String) -> Self {
|
||||
Self {
|
||||
date: time,
|
||||
text: message
|
||||
}
|
||||
}
|
||||
}
|
216
src/main.rs
Normal file
216
src/main.rs
Normal file
|
@ -0,0 +1,216 @@
|
|||
mod logging;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use axum::body::BodyDataStream;
|
||||
use axum::extract::{Request, State};
|
||||
use axum::http::StatusCode;
|
||||
use axum::Router;
|
||||
use axum::routing::post;
|
||||
use dotenv::{dotenv, var};
|
||||
use futures::TryStreamExt;
|
||||
use log::LevelFilter;
|
||||
use systemd_journal_logger::JournalLog;
|
||||
use tokio::fs::File;
|
||||
use tokio::{fs, io};
|
||||
use tokio::io::BufWriter;
|
||||
use tokio_util::io::StreamReader;
|
||||
use urlencoding::decode;
|
||||
use crate::logging::Logging;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct App {
|
||||
log: Logging,
|
||||
directories: HashMap<String, String>
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub fn init_directories(&mut self) {
|
||||
let root_dir = match var("ROOT_DIRECTORY") {
|
||||
Ok(dir) => {
|
||||
self.log.info(format!("ROOT_DIRECTORY set to '{dir}'"));
|
||||
dir
|
||||
}
|
||||
Err(e) => {
|
||||
self.log.error(format!("ROOT_DIRECTORY not set: {e}. Aborting."));
|
||||
panic!("ROOT_DIRECTORY not set: {e}. Aborting.");
|
||||
}
|
||||
};
|
||||
|
||||
let novel_dir = match var("NOVEL_DIRECTORY") {
|
||||
Ok(dir) => {
|
||||
self.log.info(format!("NOVEL_DIRECTORY set to '{root_dir}/{dir}'"));
|
||||
format!("{root_dir}/{dir}")
|
||||
}
|
||||
Err(e) => {
|
||||
self.log.error(format!("NOVEL_DIRECTORY not set: {e}. Defaulting to '{root_dir}/novels'."));
|
||||
format!("{root_dir}/novels")
|
||||
}
|
||||
};
|
||||
|
||||
self.directories.insert("Novel".to_owned(), novel_dir);
|
||||
|
||||
let manga_dir = match var("MANGA_DIRECTORY") {
|
||||
Ok(dir) => {
|
||||
self.log.info(format!("MANGA_DIRECTORY set to '{root_dir}/{dir}'"));
|
||||
format!("{root_dir}/{dir}")
|
||||
}
|
||||
Err(e) => {
|
||||
self.log.error(format!("MANGA_DIRECTORY not set: {e}. Defaulting to '{root_dir}/manga'."));
|
||||
format!("{root_dir}/manga")
|
||||
}
|
||||
};
|
||||
|
||||
self.directories.insert("Manga".to_owned(), manga_dir);
|
||||
|
||||
let hentai_dir = match var("HENTAI_DIRECTORY") {
|
||||
Ok(dir) => {
|
||||
self.log.info(format!("HENTAI_DIRECTORY set to '{root_dir}/{dir}'"));
|
||||
format!("{root_dir}/{dir}")
|
||||
}
|
||||
Err(e) => {
|
||||
self.log.error(format!("HENTAI_DIRECTORY not set: {e}. Defaulting to '{root_dir}/hentai'."));
|
||||
format!("{root_dir}/hentai")
|
||||
}
|
||||
};
|
||||
|
||||
self.directories.insert("Hentai".to_owned(), hentai_dir);
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
dotenv().expect("Failed to init dotenv");
|
||||
|
||||
JournalLog::new()
|
||||
.expect("Systemd-Logger crate error")
|
||||
.install()
|
||||
.expect("Systemd-Logger crate error");
|
||||
|
||||
match var("LOG_LEVEL") {
|
||||
Ok(level) => {
|
||||
match level.as_str() {
|
||||
"debug" => log::set_max_level(LevelFilter::Debug),
|
||||
"info" => log::set_max_level(LevelFilter::Info),
|
||||
_ => log::set_max_level(LevelFilter::Info),
|
||||
}
|
||||
}
|
||||
_ => log::set_max_level(LevelFilter::Info),
|
||||
}
|
||||
|
||||
let mut app = App {
|
||||
log: Logging::new(None),
|
||||
directories: HashMap::new(),
|
||||
};
|
||||
|
||||
app.init_directories();
|
||||
|
||||
let api = Router::new()
|
||||
.route("/upload", post(|State(mut state): State<App>, request: Request| async move {
|
||||
upload_file(&mut state, request).await;
|
||||
}))
|
||||
.with_state(app);
|
||||
|
||||
let listener = tokio::net::TcpListener::bind("[::]:3000").await.unwrap();
|
||||
axum::serve(listener, api).await.unwrap();
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct FilePath {
|
||||
format: String,
|
||||
series: String,
|
||||
volume: String,
|
||||
extension: String
|
||||
}
|
||||
|
||||
impl FilePath {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
format: "".to_owned(),
|
||||
series: "".to_owned(),
|
||||
volume: "".to_owned(),
|
||||
extension: "".to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
fn check_valid(&self) -> bool {
|
||||
if self.format == "" || self.series == "" || self.volume == "" || self.extension == "" {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
fn to_pathbuf(&self) -> PathBuf {
|
||||
Path::new(format!("{}/{}/{}/{}.{}", self.format, self.series, self.volume, self.volume, self.extension).as_str()).to_path_buf()
|
||||
}
|
||||
}
|
||||
|
||||
async fn upload_file(state: &mut App, request: Request) {
|
||||
let params_raw: Vec<&str> = request.uri().query().unwrap().split('&').collect();
|
||||
let mut file = FilePath::new();
|
||||
params_raw.iter().for_each(|param| {
|
||||
let split: Vec<&str> = param.split('=').collect();
|
||||
state.log.info(format!("Parsing Parameter Key-Value Pair '{param}'"));
|
||||
match split[0] {
|
||||
"format" => {
|
||||
file.format.clone_from(state.directories.get(split[1]).expect("Assume Valid Format Was Provided"));
|
||||
},
|
||||
"series" => {
|
||||
file.series = decode(split[1]).expect("UTF-8").to_string();
|
||||
},
|
||||
"volume" => {
|
||||
file.volume = decode(split[1]).expect("UTF-8").to_string();
|
||||
},
|
||||
k => {
|
||||
state.log.warn(format!("Parameter {k} is not known and will be ignored"));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let content_type = request.headers().get("Content-Type").expect("Content Type Should Have Been provided").to_str().expect("Content Type Should Be String");
|
||||
|
||||
file.extension = match content_type {
|
||||
"application/epub+zip" => "epub".to_owned(),
|
||||
"application/comic+zip" => "cbz".to_owned(),
|
||||
"application/pdf" => "pdf".to_owned(),
|
||||
ct => {
|
||||
state.log.error(format!("Invalid Content Type '{ct}' Provided, Aborting"));
|
||||
panic!("Invalid Content Type '{ct}'")
|
||||
}
|
||||
};
|
||||
|
||||
println!("{:#?}", file);
|
||||
|
||||
if !file.check_valid() {
|
||||
//return Err((StatusCode::BAD_REQUEST, "Format not specified".to_owned()));
|
||||
}
|
||||
|
||||
let pathbuf = file.to_pathbuf();
|
||||
state.log.info(format!("File Path '{}'", pathbuf.clone().display()));
|
||||
|
||||
let file_stream = request.into_body().into_data_stream();
|
||||
if let Err(e) = stream_to_file(&pathbuf, file_stream).await {
|
||||
state.log.error(format!("{}: {}", e.0, e.1));
|
||||
};
|
||||
}
|
||||
|
||||
async fn stream_to_file(path: &PathBuf, stream: BodyDataStream) -> Result<(), (StatusCode, String)>
|
||||
{
|
||||
if !Path::exists(path.parent().unwrap()) {
|
||||
fs::create_dir_all(path.parent().unwrap()).await.expect("Unable to Create Path");
|
||||
}
|
||||
|
||||
async {
|
||||
let body_with_io_error = stream.map_err(|err| io::Error::new(io::ErrorKind::Other, err));
|
||||
let body_reader = StreamReader::new(body_with_io_error);
|
||||
futures::pin_mut!(body_reader);
|
||||
|
||||
let mut file = BufWriter::new(File::create(path).await?);
|
||||
|
||||
io::copy(&mut body_reader, &mut file).await?;
|
||||
|
||||
Ok::<_, io::Error>(())
|
||||
}
|
||||
.await
|
||||
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))
|
||||
}
|
Loading…
Reference in a new issue