Files
dkl/src/bin/dkl.rs

222 lines
6.1 KiB
Rust
Raw Normal View History

2025-07-20 18:48:47 +02:00
use clap::{CommandFactory, Parser, Subcommand};
use eyre::{format_err, Result};
use log::{debug, error};
use tokio::fs;
#[derive(Parser)]
#[command()]
struct Cli {
#[command(subcommand)]
command: Command,
}
#[derive(Subcommand)]
enum Command {
ApplyConfig {
/// config file to use
#[arg(default_value = "config.yaml")]
config: String,
/// glob filters to select files to apply
#[arg(short = 'F', long)]
filters: Vec<String>,
/// path prefix (aka chroot)
#[arg(short = 'P', long, default_value = "/")]
prefix: String,
},
Logger {
2025-07-20 22:30:10 +02:00
/// Path where the logs are stored
#[arg(long, short = 'p', default_value = "/var/log", env = "DKL_LOG_PATH")]
2025-07-20 18:48:47 +02:00
log_path: String,
2025-07-20 22:30:10 +02:00
/// Name of the log instead of the command's basename
2025-07-20 18:48:47 +02:00
#[arg(long, short = 'n')]
log_name: Option<String>,
2025-07-20 22:30:10 +02:00
/// prefix log lines with time & stream
2025-07-20 18:48:47 +02:00
#[arg(long)]
with_prefix: bool,
command: String,
args: Vec<String>,
},
Log {
2025-07-20 22:30:10 +02:00
/// Path where the logs are stored
#[arg(long, short = 'p', default_value = "/var/log", env = "DKL_LOG_PATH")]
2025-07-20 18:48:47 +02:00
log_path: String,
log_name: String,
2025-07-20 22:30:10 +02:00
#[command(subcommand)]
op: LogOp,
2025-07-20 18:48:47 +02:00
},
}
#[tokio::main(flavor = "current_thread")]
async fn main() -> Result<()> {
clap_complete::CompleteEnv::with_factory(Cli::command).complete();
let cli = Cli::parse();
env_logger::builder()
.parse_filters("info")
.parse_default_env()
.init();
use Command as C;
match cli.command {
C::ApplyConfig {
config,
filters,
prefix,
} => {
let filters = parse_globs(&filters)?;
apply_config(&config, &filters, &prefix).await
}
C::Logger {
ref log_path,
ref log_name,
with_prefix,
command,
args,
} => {
let command = command.as_str();
let log_name = log_name.as_deref().unwrap_or_else(|| basename(command));
dkl::logger::Logger {
log_path,
log_name,
with_prefix,
}
.run(command, &args)
.await
}
C::Log {
log_path,
log_name,
2025-07-20 22:30:10 +02:00
op,
} => op.run(&log_path, &log_name).await,
}
}
2025-07-20 18:48:47 +02:00
2025-07-20 22:30:10 +02:00
async fn apply_config(config_file: &str, filters: &[glob::Pattern], chroot: &str) -> Result<()> {
let config = fs::read_to_string(config_file).await?;
let config: dkl::Config = serde_yaml::from_str(&config)?;
2025-07-20 18:48:47 +02:00
2025-07-20 22:30:10 +02:00
let files = if filters.is_empty() {
config.files
} else {
(config.files.into_iter())
.filter(|f| filters.iter().any(|filter| filter.matches(&f.path)))
.collect()
};
dkl::apply::files(&files, chroot).await
}
2025-07-20 18:48:47 +02:00
2025-07-20 22:30:10 +02:00
#[derive(Subcommand)]
enum LogOp {
Ls {
#[arg(short = 'l', long)]
detail: bool,
},
Cleanup {
/// days of log to keep
days: u64,
},
Cat {
/// print logs >= since
since: Option<String>,
/// print logs <= until
until: Option<String>,
},
}
impl LogOp {
async fn run(self, log_path: &str, log_name: &str) -> Result<()> {
let mut files = dkl::logger::log_files(&log_path, &log_name).await?;
files.sort();
use LogOp as Op;
match self {
Op::Ls { detail } => {
for f in files {
let path = f.path.to_string_lossy();
if detail {
println!("{ts} {path}", ts = f.timestamp);
} else {
println!("{path}");
}
2025-07-20 18:48:47 +02:00
}
2025-07-20 22:30:10 +02:00
}
Op::Cleanup { days } => {
let deadline = chrono::Utc::now() - chrono::Days::new(days);
let deadline = dkl::logger::trunc_ts(deadline);
debug!("cleanup {log_name} logs < {deadline}");
for f in files {
if f.timestamp < deadline {
debug!("removing {}", f.path.to_string_lossy());
fs::remove_file(f.path).await?;
}
}
}
Op::Cat { since, until } => {
let since = parse_ts_arg(since)?;
let until = parse_ts_arg(until)?;
let mut out = tokio::io::stdout();
for f in files {
if !since.is_none_or(|since| f.timestamp >= since) {
continue;
}
if !until.is_none_or(|until| f.timestamp <= until) {
continue;
}
debug!(
"cat {path} (timestamp={ts}, compressed={comp})",
path = f.path.to_string_lossy(),
ts = f.timestamp.to_rfc3339(),
comp = f.compressed
);
if let Err(e) = f.copy_to(&mut out).await {
error!("{file}: {e}", file = f.path.to_string_lossy());
}
2025-07-20 18:48:47 +02:00
}
}
}
2025-07-20 22:30:10 +02:00
Ok(())
2025-07-20 18:48:47 +02:00
}
}
fn parse_ts_arg(ts: Option<String>) -> Result<Option<dkl::logger::Timestamp>> {
match ts {
None => Ok(None),
Some(ts) => {
let ts = dkl::logger::parse_ts(&ts)
.map_err(|e| format_err!("invalid timestamp: {ts}: {e}"))?;
Ok(Some(ts))
}
}
}
fn basename(path: &str) -> &str {
path.rsplit_once('/').map_or(path, |split| split.1)
}
fn parse_globs(filters: &[String]) -> Result<Vec<glob::Pattern>> {
let mut errors = false;
let filters = (filters.iter())
.filter_map(|s| {
glob::Pattern::new(s)
.inspect_err(|e| {
error!("invalid filter: {s:?}: {e}");
errors = true;
})
.ok()
})
.collect();
if errors {
return Err(format_err!("invalid filters"));
}
Ok(filters)
}