Compare commits

...

3 commits

Author SHA1 Message Date
34883f2d5c
Add Debian Configuration
All checks were successful
Run Tests on Code / run-tests (push) Successful in 28s
2024-08-07 18:32:38 +02:00
ad53bc9ee3
Forgejo Actions 2024-08-07 18:32:37 +02:00
dc67df51ce
Initial Code Commit 2024-08-07 18:32:37 +02:00
12 changed files with 1714 additions and 0 deletions

View file

@ -0,0 +1,146 @@
name: 'Build and release binary file and packages'
author: 'Neshura'
on:
push:
tags:
- '[0-9]+.[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
jobs:
test:
runs-on: docker
container: forgejo.neshweb.net/ci-docker-images/rust-node:latest
steps:
-
name: Add Clippy
run: rustup component add clippy
-
name: Checking Out Repository Code
uses: https://code.forgejo.org/actions/checkout@v3
-
name: Set Up Cargo Cache
uses: actions/cache@v3
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
-
name: Run Clippy
run: cargo clippy
-
name: Check if Version in Cargo.toml matches Tag
run: |
VERSION=$(cat Cargo.toml | grep -E "(^|\|)version =" | cut -f2- -d= | tr -d \" | tr -d " ")
if test $VERSION != "${{ github.ref_name }}"; then
echo "Expected Version is: '${{ github.ref_name }}' actual Version is: '$VERSION'";
exit 1
else
echo "Version is: '$VERSION'";
fi
build:
needs: test
if: success()
runs-on: docker
container: forgejo.neshweb.net/ci-docker-images/rust-node:latest
steps:
-
name: Checking Out Repository Code
uses: https://code.forgejo.org/actions/checkout@v3
-
name: Prepare build environment
run: mkdir dist
-
name: Compiling To Linux Target
run: |
cargo build -r
mv target/release/${{ github.event.repository.name }} dist/${{ github.event.repository.name }}-linux-amd64
-
name: Bundle .deb package
run: |
cargo deb
DEBIAN_REF=$(echo ${{ github.ref_name }} | tr - \~)
echo "DEBIAN_REF=$DEBIAN_REF" >> dist/build.env
DEBIAN_REV=-$(cat Cargo.toml | grep -E "(^|\|)revision =" | cut -f2- -d= | tr -d \" | tr -d " ")
echo "DEBIAN_REV=$DEBIAN_REV" >> dist/build.env
mv target/debian/${{ github.event.repository.name }}_"$DEBIAN_REF""$DEBIAN_REV"_amd64.deb dist/${{ github.event.repository.name }}_"$DEBIAN_REF""$DEBIAN_REV"_amd64.deb
-
name: Uploading Build Artifact
uses: actions/upload-artifact@v3
with:
name: release_blobs
path: dist
if-no-files-found: error
upload-generic-package:
needs: build
if: success()
runs-on: docker
steps:
-
name: Downloading All Build Artifacts
uses: actions/download-artifact@v3
-
name: Upload Binary
run: |
echo 'curl -v --user ${{ secrets.FORGEJO_USERNAME }}:${{ secrets.FORGEJO_TOKEN }} \
--upload-file release_blobs/${{ github.event.repository.name }}-linux-amd64 \
https://forgejo.neshweb.net/api/packages/${{ secrets.FORGEJO_USERNAME }}/generic/${{ github.event.repository.name }}/${{ github.ref_name }}/${{ github.event.repository.name }}-linux-amd64'
curl -v --user ${{ secrets.FORGEJO_USERNAME }}:${{ secrets.FORGEJO_TOKEN }} \
--upload-file release_blobs/${{ github.event.repository.name }}-linux-amd64 \
https://forgejo.neshweb.net/api/packages/${{ secrets.FORGEJO_USERNAME }}/generic/${{ github.event.repository.name }}/${{ github.ref_name }}/${{ github.event.repository.name }}-linux-amd64
upload-debian-package:
needs: build
if: success()
runs-on: docker
steps:
-
name: Downloading All Build Artifacts
uses: actions/download-artifact@v3
-
name: Upload Debian Package to staging
run: |
source release_blobs/build.env
echo 'curl -v --user ${{ secrets.FORGEJO_USERNAME }}:${{ secrets.FORGEJO_TOKEN }} \
--upload-file release_blobs/${{ github.event.repository.name }}_'"$DEBIAN_REF""$DEBIAN_REV"'_amd64.deb \
https://forgejo.neshweb.net/api/packages/${{ secrets.FORGEJO_USERNAME }}/debian/pool/bookworm/staging/upload'
curl -v --user ${{ secrets.FORGEJO_USERNAME }}:${{ secrets.FORGEJO_TOKEN }} \
--upload-file release_blobs/${{ github.event.repository.name }}_"$DEBIAN_REF""$DEBIAN_REV"_amd64.deb \
https://forgejo.neshweb.net/api/packages/${{ secrets.FORGEJO_USERNAME }}/debian/pool/bookworm/staging/upload
-
name: Upload Debian Package to main
if: (! contains(github.ref_name, '-rc'))
run: |
source release_blobs/build.env
echo 'curl -v --user ${{ secrets.FORGEJO_USERNAME }}:${{ secrets.FORGEJO_TOKEN }} \
--upload-file release_blobs/${{ github.event.repository.name }}_'"$DEBIAN_REF""$DEBIAN_REV"'_amd64.deb \
https://forgejo.neshweb.net/api/packages/${{ secrets.FORGEJO_USERNAME }}/debian/pool/bookworm/main/upload'
curl -v --user ${{ secrets.FORGEJO_USERNAME }}:${{ secrets.FORGEJO_TOKEN }} \
--upload-file release_blobs/${{ github.event.repository.name }}_"$DEBIAN_REF""$DEBIAN_REV"_amd64.deb \
https://forgejo.neshweb.net/api/packages/${{ secrets.FORGEJO_USERNAME }}/debian/pool/bookworm/main/upload
create-release:
needs: build
if: success()
runs-on: docker
steps:
-
name: Downloading All Build Artifacts
uses: actions/download-artifact@v3
-
name: Filter out env files
run: rm release_blobs/build.env
-
name: Release New Version
uses: actions/forgejo-release@v2
with:
direction: upload
url: https://forgejo.neshweb.net
release-dir: release_blobs
token: ${{ secrets.FORGEJO_TOKEN }}
tag: ${{ github.ref_name }}

View file

@ -0,0 +1,67 @@
name: 'Build binary file and bundle packages'
author: 'Neshura'
on:
pull_request:
branches:
- main
jobs:
test:
runs-on: docker
container: forgejo.neshweb.net/ci-docker-images/rust-node:latest
steps:
-
name: Add Clippy
run: rustup component add clippy
-
name: Checking Out Repository Code
uses: https://code.forgejo.org/actions/checkout@v3
-
name: Set Up Cargo Cache
uses: actions/cache@v3
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
-
name: Run Clippy
run: cargo clippy
build:
needs: test
if: success()
runs-on: docker
container: forgejo.neshweb.net/ci-docker-images/rust-node:latest
steps:
-
name: Checking Out Repository Code
uses: https://code.forgejo.org/actions/checkout@v3
-
name: Prepare build environment
run: mkdir dist
-
name: Compiling To Linux Target
run: |
cargo build -r
mv target/release/${{ github.event.repository.name }} dist/${{ github.event.repository.name }}-linux-amd64
-
name: Bundle .deb package
run: |
cargo deb
DEBIAN_REF=$(cat Cargo.toml | grep -E "(^|\|)version =" | cut -f2- -d= | tr -d \" | tr -d " " | tr - \~)
echo "DEBIAN_REF=$DEBIAN_REF" >> dist/build.env
DEBIAN_REV=-$(cat Cargo.toml | grep -E "(^|\|)revision =" | cut -f2- -d= | tr -d \" | tr -d " ")
echo "DEBIAN_REV=$DEBIAN_REV" >> dist/build.env
mv target/debian/${{ github.event.repository.name }}_"$DEBIAN_REF""$DEBIAN_REV"_amd64.deb dist/${{ github.event.repository.name }}_"$DEBIAN_REF""$DEBIAN_REV"_amd64.deb
-
name: Uploading Build Artifact
uses: actions/upload-artifact@v3
with:
name: release_blobs
path: dist
if-no-files-found: error

View file

@ -0,0 +1,34 @@
name: 'Run Tests on Code'
author: 'Neshura'
on:
push:
tags-ignore:
- '**'
branches:
- '**'
jobs:
run-tests:
runs-on: docker
container: forgejo.neshweb.net/ci-docker-images/rust-node:latest
steps:
-
name: Add Clippy
run: rustup component add clippy
-
name: Checking Out Repository Code
uses: https://code.forgejo.org/actions/checkout@v3
-
name: Set Up Cargo Cache
uses: actions/cache@v3
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
-
name: Run Clippy
run: cargo clippy

2
.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
/target
.env

5
.idea/.gitignore vendored Normal file
View file

@ -0,0 +1,5 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/

View file

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="MaterialThemeProjectNewConfig">
<option name="metadata">
<MTProjectMetadataState>
<option name="userId" value="35e9767a:19031e016a2:-7f1b" />
</MTProjectMetadataState>
</option>
</component>
</project>

47
.idea/misc.xml Normal file
View file

@ -0,0 +1,47 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectInspectionProfilesVisibleTreeState">
<entry key="Project Default">
<profile-state>
<expanded-state>
<State>
<id>AccessibilityHTML</id>
</State>
<State>
<id>Code style issuesJavaScript and TypeScript</id>
</State>
<State>
<id>ES2015 migration aidsJavaScript and TypeScript</id>
</State>
<State>
<id>GitHub actions</id>
</State>
<State>
<id>HTML</id>
</State>
<State>
<id>JavaScript and TypeScript</id>
</State>
<State>
<id>RegExp</id>
</State>
<State>
<id>Rust</id>
</State>
<State>
<id>Switch statement issuesJavaScript and TypeScript</id>
</State>
<State>
<id>TypeScriptJavaScript and TypeScript</id>
</State>
<State>
<id>XML</id>
</State>
<State>
<id>XSLT</id>
</State>
</expanded-state>
</profile-state>
</entry>
</component>
</project>

1026
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

34
Cargo.toml Normal file
View file

@ -0,0 +1,34 @@
[package]
authors = ["Neshura"]
name = "kavita-web-manager-api"
version = "0.1.0"
edition = "2021"
description = "API Backend For Easier Uploading To Kavita"
license = "GPL-3.0-or-later"
[package.metadata.deb]
extended-description = "API Backend For Easier Uploading To Kavita"
maintainer-scripts = "debian/"
revision = "1"
depends = ["libc6", "libssl3", "systemd"]
assets = [
[
"target/release/kavita-web-manager-api",
"/usr/local/bin/kavita-web-manager-api",
"755",
]
]
systemd-units = { enable = false }
[dependencies]
axum = "0.7"
chrono = "0.4"
dotenv = "0.15"
futures = "0.3"
log = "0.4"
parking_lot = "0.12"
systemd-journal-logger = "2.1"
tokio = {version = "1.39", features = ["full"]}
tokio-util = { version = "0.7", features = ["io"] }
urlencoding = "2.1"

13
debian/kavita-web-manager-api.service vendored Normal file
View file

@ -0,0 +1,13 @@
[Unit]
Description="Application for automatically updating Cloudflare DNS records"
After=syslog.target
After=network-online.target
[Service]
Type=simple
ExecStart=/usr/local/bin/kavita-web-manager-api
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

114
src/logging.rs Normal file
View file

@ -0,0 +1,114 @@
use std::collections::{HashMap, VecDeque};
use chrono::{DateTime, Utc};
use log::{Level};
use systemd_journal_logger::connected_to_journal;
#[derive(Clone)]
pub struct Logging {
mem_log: HashMap<Level, VecDeque<LogEvent>>,
mem_log_lengths: HashMap<Level, i8>,
}
impl Logging {
pub fn new(log_lengths: Option<HashMap<Level, i8>>) -> Self {
let mut mem_log = HashMap::new();
let mut mem_log_lengths = HashMap::new();
Level::iter().for_each(|level| {
let mem_vec = VecDeque::new();
mem_log.insert(level, mem_vec);
let length = match level {
Level::Error => -1,
Level::Warn => 40,
Level::Info => 20,
Level::Debug => 10,
Level::Trace => 5,
};
mem_log_lengths.insert(level, length);
});
if let Some(lengths) = log_lengths {
lengths.iter().for_each(|(level, length)| {
mem_log_lengths.insert(*level, *length);
});
}
Self {
mem_log,
mem_log_lengths
}
}
pub fn debug(&mut self, msg: String) {
let msg = format!("[DEBUG] {msg}");
match connected_to_journal() {
true => log::debug!("{msg}"),
false => println!("{msg}"),
}
self.mem_log(Level::Debug, Some(msg));
}
pub fn info(&mut self, msg: String) {
let msg = format!("[INFO] {msg}");
match connected_to_journal() {
true => log::info!("{msg}"),
false => println!("{msg}"),
}
self.mem_log(Level::Info, Some(msg));
}
pub fn warn(&mut self, msg: String) {
let msg = format!("[WARN] {msg}");
match connected_to_journal() {
true => log::warn!("{msg}"),
false => println!("{msg}"),
}
self.mem_log(Level::Warn, Some(msg));
}
pub fn error(&mut self, msg: String) {
let msg = format!("[ERROR] {msg}");
match connected_to_journal() {
true => log::error!("{msg}"),
false => eprintln!("{msg}"),
}
self.mem_log(Level::Error, Some(msg));
}
pub fn get_mem_log(&mut self, level: Level) -> VecDeque<LogEvent> {
self.mem_log(level, None)
}
fn mem_log(&mut self, level: Level, msg: Option<String>) -> VecDeque<LogEvent> {
let max_len = self.mem_log_lengths.get(&level).expect("All Levels Should Have Lengths Initialized");
let list = self.mem_log.get_mut(&level).expect("All Levels Should Have Empty Lists Initialized");
if let Some(msg) = msg {
let now = Utc::now();
let log_event = LogEvent::new(now, msg);
list.push_back(log_event);
if *max_len != -1 {
while list.len() > *max_len as usize {
list.pop_front();
}
}
}
list.clone()
}
}
#[derive(Clone)]
pub(crate) struct LogEvent {
pub date: DateTime<Utc>,
pub text: String,
}
impl LogEvent {
pub fn new(time: DateTime<Utc>, message: String) -> Self {
Self {
date: time,
text: message
}
}
}

216
src/main.rs Normal file
View file

@ -0,0 +1,216 @@
mod logging;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use axum::body::BodyDataStream;
use axum::extract::{Request, State};
use axum::http::StatusCode;
use axum::Router;
use axum::routing::post;
use dotenv::{dotenv, var};
use futures::TryStreamExt;
use log::LevelFilter;
use systemd_journal_logger::JournalLog;
use tokio::fs::File;
use tokio::{fs, io};
use tokio::io::BufWriter;
use tokio_util::io::StreamReader;
use urlencoding::decode;
use crate::logging::Logging;
#[derive(Clone)]
struct App {
log: Logging,
directories: HashMap<String, String>
}
impl App {
pub fn init_directories(&mut self) {
let root_dir = match var("ROOT_DIRECTORY") {
Ok(dir) => {
self.log.info(format!("ROOT_DIRECTORY set to '{dir}'"));
dir
}
Err(e) => {
self.log.error(format!("ROOT_DIRECTORY not set: {e}. Aborting."));
panic!("ROOT_DIRECTORY not set: {e}. Aborting.");
}
};
let novel_dir = match var("NOVEL_DIRECTORY") {
Ok(dir) => {
self.log.info(format!("NOVEL_DIRECTORY set to '{root_dir}/{dir}'"));
format!("{root_dir}/{dir}")
}
Err(e) => {
self.log.error(format!("NOVEL_DIRECTORY not set: {e}. Defaulting to '{root_dir}/novels'."));
format!("{root_dir}/novels")
}
};
self.directories.insert("Novel".to_owned(), novel_dir);
let manga_dir = match var("MANGA_DIRECTORY") {
Ok(dir) => {
self.log.info(format!("MANGA_DIRECTORY set to '{root_dir}/{dir}'"));
format!("{root_dir}/{dir}")
}
Err(e) => {
self.log.error(format!("MANGA_DIRECTORY not set: {e}. Defaulting to '{root_dir}/manga'."));
format!("{root_dir}/manga")
}
};
self.directories.insert("Manga".to_owned(), manga_dir);
let hentai_dir = match var("HENTAI_DIRECTORY") {
Ok(dir) => {
self.log.info(format!("HENTAI_DIRECTORY set to '{root_dir}/{dir}'"));
format!("{root_dir}/{dir}")
}
Err(e) => {
self.log.error(format!("HENTAI_DIRECTORY not set: {e}. Defaulting to '{root_dir}/hentai'."));
format!("{root_dir}/hentai")
}
};
self.directories.insert("Hentai".to_owned(), hentai_dir);
}
}
#[tokio::main]
async fn main() {
dotenv().expect("Failed to init dotenv");
JournalLog::new()
.expect("Systemd-Logger crate error")
.install()
.expect("Systemd-Logger crate error");
match var("LOG_LEVEL") {
Ok(level) => {
match level.as_str() {
"debug" => log::set_max_level(LevelFilter::Debug),
"info" => log::set_max_level(LevelFilter::Info),
_ => log::set_max_level(LevelFilter::Info),
}
}
_ => log::set_max_level(LevelFilter::Info),
}
let mut app = App {
log: Logging::new(None),
directories: HashMap::new(),
};
app.init_directories();
let api = Router::new()
.route("/upload", post(|State(mut state): State<App>, request: Request| async move {
upload_file(&mut state, request).await;
}))
.with_state(app);
let listener = tokio::net::TcpListener::bind("[::]:3000").await.unwrap();
axum::serve(listener, api).await.unwrap();
}
#[derive(Debug)]
struct FilePath {
format: String,
series: String,
volume: String,
extension: String
}
impl FilePath {
fn new() -> Self {
Self {
format: "".to_owned(),
series: "".to_owned(),
volume: "".to_owned(),
extension: "".to_owned()
}
}
fn check_valid(&self) -> bool {
if self.format == "" || self.series == "" || self.volume == "" || self.extension == "" {
return false
}
return true
}
fn to_pathbuf(&self) -> PathBuf {
Path::new(format!("{}/{}/{}/{}.{}", self.format, self.series, self.volume, self.volume, self.extension).as_str()).to_path_buf()
}
}
async fn upload_file(state: &mut App, request: Request) {
let params_raw: Vec<&str> = request.uri().query().unwrap().split('&').collect();
let mut file = FilePath::new();
params_raw.iter().for_each(|param| {
let split: Vec<&str> = param.split('=').collect();
state.log.info(format!("Parsing Parameter Key-Value Pair '{param}'"));
match split[0] {
"format" => {
file.format.clone_from(state.directories.get(split[1]).expect("Assume Valid Format Was Provided"));
},
"series" => {
file.series = decode(split[1]).expect("UTF-8").to_string();
},
"volume" => {
file.volume = decode(split[1]).expect("UTF-8").to_string();
},
k => {
state.log.warn(format!("Parameter {k} is not known and will be ignored"));
}
}
});
let content_type = request.headers().get("Content-Type").expect("Content Type Should Have Been provided").to_str().expect("Content Type Should Be String");
file.extension = match content_type {
"application/epub+zip" => "epub".to_owned(),
"application/comic+zip" => "cbz".to_owned(),
"application/pdf" => "pdf".to_owned(),
ct => {
state.log.error(format!("Invalid Content Type '{ct}' Provided, Aborting"));
panic!("Invalid Content Type '{ct}'")
}
};
println!("{:#?}", file);
if !file.check_valid() {
//return Err((StatusCode::BAD_REQUEST, "Format not specified".to_owned()));
}
let pathbuf = file.to_pathbuf();
state.log.info(format!("File Path '{}'", pathbuf.clone().display()));
let file_stream = request.into_body().into_data_stream();
if let Err(e) = stream_to_file(&pathbuf, file_stream).await {
state.log.error(format!("{}: {}", e.0, e.1));
};
}
async fn stream_to_file(path: &PathBuf, stream: BodyDataStream) -> Result<(), (StatusCode, String)>
{
if !Path::exists(path.parent().unwrap()) {
fs::create_dir_all(path.parent().unwrap()).await.expect("Unable to Create Path");
}
async {
let body_with_io_error = stream.map_err(|err| io::Error::new(io::ErrorKind::Other, err));
let body_reader = StreamReader::new(body_with_io_error);
futures::pin_mut!(body_reader);
let mut file = BufWriter::new(File::create(path).await?);
io::copy(&mut body_reader, &mut file).await?;
Ok::<_, io::Error>(())
}
.await
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))
}