Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 43 additions & 16 deletions src/disk.rs
Original file line number Diff line number Diff line change
@@ -1,27 +1,44 @@
use crate::{InboundPaymentInfoStorage, NetworkGraph, OutboundPaymentInfoStorage};
use bitcoin::Network;
use chrono::Utc;
use chrono::{NaiveDate, Utc};
use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringDecayParameters};
use lightning::util::hash_tables::new_hash_map;
use lightning::util::logger::{Level, Logger, Record};
use lightning::util::ser::{Readable, ReadableArgs};
use std::fs;
use std::fs::File;
use std::io::{BufReader, Write};
use std::io::{BufReader, BufWriter, Write};
use std::path::Path;
use std::sync::Arc;
use std::sync::{Arc, Mutex};

pub(crate) const INBOUND_PAYMENTS_FNAME: &str = "inbound_payments";
pub(crate) const OUTBOUND_PAYMENTS_FNAME: &str = "outbound_payments";

struct LogWriter {
file: BufWriter<File>,
current_date: NaiveDate,
logs_dir: String,
}

impl LogWriter {
fn open_log_file(logs_dir: &str, date: NaiveDate) -> std::io::Result<BufWriter<File>> {
let path = format!("{}/logs-{}.txt", logs_dir, date.format("%Y-%m-%d"));
let file = fs::OpenOptions::new().create(true).append(true).open(path)?;
Ok(BufWriter::new(file))
}
}

pub(crate) struct FilesystemLogger {
data_dir: String,
writer: Mutex<LogWriter>,
}
impl FilesystemLogger {
pub(crate) fn new(data_dir: String) -> Self {
let logs_path = format!("{}/logs", data_dir);
fs::create_dir_all(logs_path.clone()).unwrap();
Self { data_dir: logs_path }
let logs_dir = format!("{}/logs", data_dir);
fs::create_dir_all(logs_dir.clone()).unwrap();
let today = Utc::now().date_naive();
let file =
LogWriter::open_log_file(&logs_dir, today).expect("Failed to open initial log file");
Self { writer: Mutex::new(LogWriter { file, current_date: today, logs_dir }) }
}
}
impl Logger for FilesystemLogger {
Expand All @@ -30,26 +47,36 @@ impl Logger for FilesystemLogger {
// Gossip-level logs are incredibly verbose, and thus we skip them by default.
return;
}
let now = Utc::now();
let raw_log = record.args.to_string();
let log = format!(
"{} {:<5} [{}:{}] {}\n",
// Note that a "real" lightning node almost certainly does *not* want subsecond
// precision for message-receipt information as it makes log entries a target for
// deanonymization attacks. For testing, however, its quite useful.
Utc::now().format("%Y-%m-%d %H:%M:%S%.3f"),
now.format("%Y-%m-%d %H:%M:%S%.3f"),
record.level.to_string(),
record.module_path,
record.line,
raw_log
);
let logs_file_path = format!("{}/logs.txt", self.data_dir.clone());
fs::OpenOptions::new()
.create(true)
.append(true)
.open(logs_file_path)
.unwrap()
.write_all(log.as_bytes())
.unwrap();
let Ok(mut writer) = self.writer.lock() else {
// Mutex poisoned — another thread panicked while holding it.
// Falling back to stderr is the safest option; never panic in a logger.
eprintln!("{}", log);
return;
};
// Daily rotation: if the date has changed, open a new log file.
let today = now.date_naive();
if today != writer.current_date {
if let Ok(new_file) = LogWriter::open_log_file(&writer.logs_dir, today) {
writer.file = new_file;
writer.current_date = today;
}
// If the new file can't be opened, keep writing to the old one.
}
let _ = writer.file.write_all(log.as_bytes());
let _ = writer.file.flush();
}
}

Expand Down
8 changes: 7 additions & 1 deletion src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1048,7 +1048,13 @@ async fn start_ldk() {
.expect("Failed to bind to listen port - is something else already listening on it?");
loop {
let peer_mgr = peer_manager_connection_handler.clone();
let tcp_stream = listener.accept().await.unwrap().0;
let (tcp_stream, _) = match listener.accept().await {
Ok(conn) => conn,
Err(e) => {
eprintln!("Failed to accept inbound connection: {}", e);
continue;
},
};
if stop_listen.load(Ordering::Acquire) {
return;
}
Expand Down