Enhanced Logging (closes #27) #31
3 changed files with 328 additions and 31 deletions
|
@ -27,6 +27,7 @@ struct CloudflareApiResult {
|
|||
}
|
||||
|
||||
pub(crate) struct CloudflareZone {
|
||||
name: String,
|
||||
email: String,
|
||||
key: String,
|
||||
id: String,
|
||||
|
@ -36,6 +37,7 @@ impl CloudflareZone {
|
|||
pub(crate) fn new(zone: &ZoneConfig) -> Result<Self, VarError> {
|
||||
let key = env::var("CF_API_TOKEN")?;
|
||||
Ok(Self {
|
||||
name: zone.name.clone(),
|
||||
email: zone.email.clone(),
|
||||
key,
|
||||
id: zone.id.clone(),
|
||||
|
@ -75,7 +77,7 @@ impl CloudflareZone {
|
|||
|
||||
Ok(entries.result)
|
||||
} else {
|
||||
let err_msg = format!("Unable to fetch Cloudflare Zone Entries. Error: {}", response.status());
|
||||
let err_msg = format!("Unable to fetch Cloudflare Zone Entries for {}. Error: {}",self.name ,response.status());
|
||||
match connected_to_journal() {
|
||||
true => error!("[ERROR] {err_msg}"),
|
||||
false => eprintln!("[ERROR] {err_msg}"),
|
||||
|
|
|
@ -10,7 +10,7 @@ use systemd_journal_logger::connected_to_journal;
|
|||
use crate::cloudflare::DnsRecordType;
|
||||
use crate::cloudflare::DnsRecordType::{A, AAAA};
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
|
||||
pub(crate) struct InterfaceConfig {
|
||||
pub(crate) host_address: Ipv6Addr,
|
||||
pub(crate) interfaces: HashMap<String, Ipv6Addr>,
|
||||
|
@ -45,8 +45,7 @@ impl InterfaceConfig {
|
|||
return Err(());
|
||||
}
|
||||
};
|
||||
let interface_ip = host_range.bitor(interface_address);
|
||||
Ok(interface_ip)
|
||||
Ok(host_range.bitor(interface_address))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -61,7 +60,7 @@ impl Default for InterfaceConfig {
|
|||
|
||||
///////////////
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
|
||||
pub(crate) struct ZoneEntry {
|
||||
pub(crate) name: String,
|
||||
pub(crate) r#type: Vec<DnsRecordType>,
|
||||
|
@ -78,7 +77,7 @@ impl Default for ZoneEntry {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
|
||||
pub(crate) struct ZoneConfig {
|
||||
pub(crate) email: String,
|
||||
pub(crate) name: String,
|
||||
|
@ -92,7 +91,8 @@ impl ZoneConfig {
|
|||
let path = confy::get_configuration_file_path(env!("CARGO_PKG_NAME"), "interfaces").expect("Something went wrong with confy");
|
||||
let zones_dir = path.parent().expect("Something went wrong with confy").join("zones.d/");
|
||||
|
||||
let zones = fs::read_dir(zones_dir).unwrap();
|
||||
let zones = fs::read_dir(zones_dir).expect("Directory creation handling does not yet exist");
|
||||
|
||||
|
||||
let mut zone_configs: Vec<Self> = vec![];
|
||||
|
||||
|
@ -106,7 +106,9 @@ impl ZoneConfig {
|
|||
false => eprintln!("[WARN] {warn_msg}"),
|
||||
}
|
||||
}
|
||||
else {
|
||||
else if !path.file_name().expect("Directories should have been filtered out by the leading if")
|
||||
.to_str().expect("Conversion to str should not fail for a file name")
|
||||
.starts_with('.') {
|
||||
let zone_config_path = format!("zones.d/{}", path.file_stem()
|
||||
.expect("stem could not be extracted from filename").to_str()
|
||||
.expect("&OsStr could not be converted to &str"));
|
||||
|
|
339
src/main.rs
339
src/main.rs
|
@ -10,6 +10,7 @@ use log::{info, warn, error, LevelFilter};
|
|||
use reqwest::StatusCode;
|
||||
use systemd_journal_logger::{connected_to_journal, JournalLog};
|
||||
use crate::cloudflare::{CloudflareZone, DnsRecordType};
|
||||
use crate::config::{InterfaceConfig, ZoneConfig, ZoneEntry};
|
||||
|
||||
mod config;
|
||||
mod cloudflare;
|
||||
|
@ -145,13 +146,99 @@ impl Addresses {
|
|||
}
|
||||
}
|
||||
|
||||
fn compare_zones(old_zone: &ZoneConfig, new_zone: &ZoneConfig) -> Vec<String> {
|
||||
let mut info_pieces = vec![];
|
||||
if old_zone.id != new_zone.id || old_zone.email != new_zone.email {
|
||||
let msg = "changed id or email".to_owned();
|
||||
info_pieces.push(msg);
|
||||
}
|
||||
|
||||
if old_zone.entries != new_zone.entries {
|
||||
let mut added: Vec<&ZoneEntry> = vec![];
|
||||
let mut modified: Vec<&ZoneEntry> = vec![];
|
||||
|
||||
new_zone.entries.iter().for_each(|entry| {
|
||||
let matches: Vec<&ZoneEntry> = old_zone.entries.iter().filter(|old_entry| {
|
||||
if old_entry.name == entry.name {
|
||||
if old_entry != &entry {
|
||||
modified.push(entry);
|
||||
}
|
||||
true
|
||||
}
|
||||
else {
|
||||
false
|
||||
}
|
||||
}).collect();
|
||||
|
||||
if matches.is_empty() {
|
||||
added.push(entry);
|
||||
}
|
||||
});
|
||||
|
||||
let deleted: Vec<&ZoneEntry> = old_zone.entries.iter().filter(|old_entry| {
|
||||
!new_zone.entries.contains(old_entry) &&
|
||||
!new_zone.entries.iter().any(|changed_entry| { changed_entry.name == old_entry.name })
|
||||
}).collect();
|
||||
|
||||
if !deleted.is_empty() {
|
||||
let mut deleted_entries_vec = vec![];
|
||||
|
||||
for entry in deleted {
|
||||
deleted_entries_vec.push(entry.name.clone());
|
||||
}
|
||||
|
||||
let deleted_entries = match deleted_entries_vec.len() {
|
||||
1 => deleted_entries_vec[0].clone(),
|
||||
2 => deleted_entries_vec.join(" & "),
|
||||
_ => deleted_entries_vec.join(", "),
|
||||
};
|
||||
let msg = format!("deleted {deleted_entries}");
|
||||
info_pieces.push(msg);
|
||||
}
|
||||
|
||||
if !added.is_empty() {
|
||||
let mut added_entries_vec = vec![];
|
||||
|
||||
for entry in added {
|
||||
added_entries_vec.push(entry.name.clone());
|
||||
}
|
||||
|
||||
let added_entries = match added_entries_vec.len() {
|
||||
1 => added_entries_vec[0].clone(),
|
||||
2 => added_entries_vec.join(" & "),
|
||||
_ => added_entries_vec.join(", "),
|
||||
};
|
||||
let msg = format!("added {added_entries}");
|
||||
info_pieces.push(msg);
|
||||
}
|
||||
|
||||
if !modified.is_empty() {
|
||||
let mut modified_entries_vec = vec![];
|
||||
|
||||
for entry in modified {
|
||||
modified_entries_vec.push(entry.name.clone());
|
||||
}
|
||||
|
||||
let modified_entries = match modified_entries_vec.len() {
|
||||
1 => modified_entries_vec[0].clone(),
|
||||
2 => modified_entries_vec.join(" & "),
|
||||
_ => modified_entries_vec.join(", "),
|
||||
};
|
||||
let msg = format!("modified {modified_entries}");
|
||||
info_pieces.push(msg);
|
||||
}
|
||||
}
|
||||
|
||||
info_pieces
|
||||
}
|
||||
|
||||
fn main() {
|
||||
dotenv().ok();
|
||||
JournalLog::new().expect("Systemd-Logger crate error").install().expect("Systemd-Logger crate error");
|
||||
log::set_max_level(LevelFilter::Info);
|
||||
|
||||
let mut ifaces = config::InterfaceConfig::load().unwrap();
|
||||
let mut zone_cfgs= config::ZoneConfig::load().unwrap();
|
||||
let mut ifaces = InterfaceConfig::load().unwrap();
|
||||
let mut zone_cfgs = ZoneConfig::load().unwrap();
|
||||
|
||||
let mut now = Utc::now() - Duration::seconds(59);
|
||||
let mut start = now;
|
||||
|
@ -163,11 +250,88 @@ fn main() {
|
|||
|
||||
loop {
|
||||
now = Utc::now();
|
||||
if now >= start + Duration::seconds(60) {
|
||||
if now >= start + Duration::seconds(10) { // DEBUG was 60
|
||||
start = now;
|
||||
|
||||
match config::InterfaceConfig::load() {
|
||||
Ok(new) => ifaces = new,
|
||||
match InterfaceConfig::load() {
|
||||
Ok(new_cfg) => {
|
||||
if ifaces != new_cfg {
|
||||
if ifaces.host_address != new_cfg.host_address {
|
||||
let info_msg = format!("Host address in interfaces.toml changed from '{}' to '{}'", ifaces.host_address, new_cfg.host_address);
|
||||
match connected_to_journal() {
|
||||
true => info!("[INFO] {info_msg}"),
|
||||
false => println!("[INFO] {info_msg}"),
|
||||
}
|
||||
}
|
||||
|
||||
if ifaces.interfaces != new_cfg.interfaces {
|
||||
let mut new: Vec<(&String, &Ipv6Addr)> = vec![];
|
||||
let mut modified: Vec<(&String, &Ipv6Addr)> = vec![];
|
||||
|
||||
new_cfg.interfaces.iter().for_each(|(interface, address)| {
|
||||
if ifaces.interfaces.contains_key(interface) {
|
||||
if ifaces.interfaces.get(interface) != Some(address) {
|
||||
modified.push((interface, address));
|
||||
}
|
||||
}
|
||||
else {
|
||||
let matches: Vec<&Ipv6Addr> = ifaces.interfaces.values().filter(|addr| {
|
||||
addr == &address
|
||||
}).collect();
|
||||
|
||||
if matches.is_empty() {
|
||||
new.push((interface, address));
|
||||
}
|
||||
else {
|
||||
modified.push((interface, address));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let deleted: Vec<(&String, &Ipv6Addr)> = ifaces.interfaces.iter().filter(|(interface, address)| {
|
||||
!new_cfg.interfaces.contains_key(*interface) && !modified.iter().any(|(_, new_addr)| { new_addr == address })
|
||||
}).collect();
|
||||
|
||||
for (name, addr) in deleted {
|
||||
let info_msg = format!("Deleted interface '{name}' with address '{addr}'");
|
||||
match connected_to_journal() {
|
||||
true => info!("[INFO] {info_msg}"),
|
||||
false => println!("[INFO] {info_msg}"),
|
||||
}
|
||||
}
|
||||
|
||||
for (name, addr) in new {
|
||||
let info_msg = format!("Added interface '{name}' with address '{addr}'");
|
||||
match connected_to_journal() {
|
||||
true => info!("[INFO] {info_msg}"),
|
||||
false => println!("[INFO] {info_msg}"),
|
||||
}
|
||||
}
|
||||
|
||||
for (name, addr) in modified {
|
||||
let info_msg= if ifaces.interfaces.contains_key(name) {
|
||||
let old_addr = ifaces.interfaces.get(name).expect("contains check on ifaces was successful");
|
||||
format!("Changed interface address of '{name}' from '{old_addr}' to '{addr}'")
|
||||
}
|
||||
else {
|
||||
let old_name = ifaces.interfaces.iter()
|
||||
.find(|(_, old_addr)| { old_addr == &addr })
|
||||
.expect("modified entry should not exist if this fails")
|
||||
.0;
|
||||
format!("Changed interface name for '{addr}' from '{old_name}' to '{name}'")
|
||||
};
|
||||
|
||||
match connected_to_journal() {
|
||||
true => info!("[INFO] {info_msg}"),
|
||||
false => println!("[INFO] {info_msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ifaces = new_cfg
|
||||
}
|
||||
|
||||
},
|
||||
Err(e) => {
|
||||
let err_msg = format!("Unable to load ínterfaces.toml with error: {}", e);
|
||||
match connected_to_journal() {
|
||||
|
@ -177,8 +341,137 @@ fn main() {
|
|||
}
|
||||
}
|
||||
|
||||
match config::ZoneConfig::load() {
|
||||
Ok(new) => zone_cfgs = new,
|
||||
match ZoneConfig::load() {
|
||||
Ok(new_cfgs) => {
|
||||
if zone_cfgs != new_cfgs {
|
||||
if zone_cfgs.len() != new_cfgs.len() {
|
||||
let new_zones: Vec<&ZoneConfig> = new_cfgs.iter().filter(|zone_cfg| {
|
||||
!zone_cfgs.contains(zone_cfg)
|
||||
}).collect();
|
||||
|
||||
let deleted_zones: Vec<&ZoneConfig> = zone_cfgs.iter().filter(|zone_cfg| {
|
||||
!new_cfgs.contains(zone_cfg)
|
||||
}).collect();
|
||||
|
||||
for new_zone in new_zones {
|
||||
let name = new_zone.name.as_str();
|
||||
let entry_count = new_zone.entries.len();
|
||||
let info_msg = format!("Added Zone '{name}' with {entry_count} entries");
|
||||
match connected_to_journal() {
|
||||
true => info!("[INFO] {info_msg}"),
|
||||
false => println!("[INFO] {info_msg}"),
|
||||
}
|
||||
}
|
||||
|
||||
for deleted_zone in deleted_zones {
|
||||
let name = deleted_zone.name.as_str();
|
||||
let entry_count = deleted_zone.entries.len();
|
||||
let info_msg = format!("Deleted Zone '{name}' with {entry_count} entries");
|
||||
match connected_to_journal() {
|
||||
true => info!("[INFO] {info_msg}"),
|
||||
false => println!("[INFO] {info_msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
let changed_zones: Vec<&ZoneConfig> = new_cfgs.iter().filter(|new_cfg| {
|
||||
zone_cfgs.iter().any(|old_cfg| {
|
||||
old_cfg.name == new_cfg.name && &old_cfg != new_cfg
|
||||
})
|
||||
}).collect();
|
||||
|
||||
let deleted_zones: Vec<&ZoneConfig> = zone_cfgs.iter().filter(|old_cfg| {
|
||||
!new_cfgs.iter().any(|new_cfg| {
|
||||
old_cfg.name == new_cfg.name
|
||||
})
|
||||
}).collect();
|
||||
|
||||
let added_zones: Vec<&ZoneConfig> = new_cfgs.iter().filter(|new_cfg| {
|
||||
!zone_cfgs.iter().any(|old_cfg| {
|
||||
old_cfg.name == new_cfg.name
|
||||
})
|
||||
}).collect();
|
||||
|
||||
if deleted_zones.len() == 1 && added_zones.len() == 1 {
|
||||
let new_zone = added_zones[0];
|
||||
let old_zone = deleted_zones[0];
|
||||
|
||||
let mut info_pieces = vec![];
|
||||
|
||||
{
|
||||
let msg = format!("name from '{}' to '{}'", old_zone.name, new_zone.name);
|
||||
info_pieces.push(msg);
|
||||
}
|
||||
|
||||
let info_pieces = [info_pieces, compare_zones(old_zone, new_zone)].concat();
|
||||
|
||||
println!("{}", info_pieces.join(", "));
|
||||
}
|
||||
else {
|
||||
if !deleted_zones.is_empty() {
|
||||
let mut info_pieces = vec![];
|
||||
for deleted_zone in deleted_zones {
|
||||
info_pieces.push(deleted_zone.name.clone());
|
||||
}
|
||||
let deleted_info = match info_pieces.len() {
|
||||
1 => info_pieces[0].clone(),
|
||||
2 => info_pieces.join(" & "),
|
||||
_ => info_pieces.join(", "),
|
||||
};
|
||||
let info_msg = format!("Deleted {deleted_info}");
|
||||
match connected_to_journal() {
|
||||
true => info!("[INFO] {info_msg}"),
|
||||
false => println!("[INFO] {info_msg}"),
|
||||
}
|
||||
}
|
||||
|
||||
if !added_zones.is_empty() {
|
||||
let mut info_pieces = vec![];
|
||||
for added_zone in added_zones {
|
||||
info_pieces.push(added_zone.name.clone());
|
||||
}
|
||||
let added_info = match info_pieces.len() {
|
||||
1 => info_pieces[0].clone(),
|
||||
2 => info_pieces.join(" & "),
|
||||
_ => info_pieces.join(", "),
|
||||
};
|
||||
let info_msg = format!("Added {added_info}");
|
||||
match connected_to_journal() {
|
||||
true => info!("[INFO] {info_msg}"),
|
||||
false => println!("[INFO] {info_msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for changed_zone in changed_zones {
|
||||
// try find element where one of these is unchanged
|
||||
let old_zone = zone_cfgs.iter().find(|zone_cfg| {
|
||||
zone_cfg.name == changed_zone.name
|
||||
}).expect("This element should exist because it was added to the changed_zones vector");
|
||||
|
||||
let info_pieces = compare_zones(old_zone, changed_zone);
|
||||
|
||||
let changed_info = match info_pieces.len() {
|
||||
1 => info_pieces[0].clone(),
|
||||
2 => info_pieces.join(" & "),
|
||||
_ => info_pieces.join(", "),
|
||||
};
|
||||
let info_msg_raw = format!("{changed_info} for {}", changed_zone.name);
|
||||
let mut info_msg_chars = info_msg_raw.chars();
|
||||
let info_msg = match info_msg_chars.next() {
|
||||
None => String::new(),
|
||||
Some(first) => first.to_uppercase().collect::<String>() + info_msg_chars.as_str(),
|
||||
};
|
||||
match connected_to_journal() {
|
||||
true => info!("[INFO] {info_msg}"),
|
||||
false => println!("[INFO] {info_msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
zone_cfgs = new_cfgs
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let err_msg = format!("Unable to load from zones.d with error: {}", e);
|
||||
match connected_to_journal() {
|
||||
|
@ -259,23 +552,23 @@ fn main() {
|
|||
cf_entry.name == entry.name && &cf_entry.r#type == r#type
|
||||
});
|
||||
|
||||
match cf_entry.unwrap().r#type {
|
||||
DnsRecordType::A => {
|
||||
let cf_ip = Ipv4Addr::from_str(cf_entry.unwrap().content.as_str()).expect("Cloudflare return should always be valid IP");
|
||||
if Some(cf_ip) == ipv4 {
|
||||
continue
|
||||
}
|
||||
},
|
||||
DnsRecordType::AAAA => {
|
||||
let cf_ip = Ipv6Addr::from_str(cf_entry.unwrap().content.as_str()).expect("Cloudflare return should always be valid IP");
|
||||
if Some(cf_ip) == ipv6 {
|
||||
continue
|
||||
}
|
||||
},
|
||||
_ => {},
|
||||
}
|
||||
|
||||
if let Some(cf_entry) = cf_entry {
|
||||
match cf_entry.r#type {
|
||||
DnsRecordType::A => {
|
||||
let cf_ip = Ipv4Addr::from_str(cf_entry.content.as_str()).expect("Cloudflare return should always be valid IP");
|
||||
if Some(cf_ip) == ipv4 {
|
||||
continue
|
||||
}
|
||||
},
|
||||
DnsRecordType::AAAA => {
|
||||
let cf_ip = Ipv6Addr::from_str(cf_entry.content.as_str()).expect("Cloudflare return should always be valid IP");
|
||||
if Some(cf_ip) == ipv6 {
|
||||
continue
|
||||
}
|
||||
},
|
||||
_ => {},
|
||||
}
|
||||
|
||||
if cf_zone.update(entry, r#type, &cf_entry.id, ipv6, ipv4).is_ok() {
|
||||
let info_msg = format!("Updated {} DNS Record for entry '{}' in zone '{}'", r#type, entry.name, zone.name);
|
||||
match connected_to_journal() {
|
||||
|
|
Loading…
Reference in a new issue