Compare commits

..

6 Commits

Author SHA1 Message Date
be5db231d9 remove rsmount dependency 2025-07-21 13:18:08 +02:00
b01c41b856 chore 2025-07-21 12:26:44 +02:00
4ccda5039b logger: oh, with_extension is replace, not append 2025-07-21 10:11:05 +02:00
852738bec3 logger: use official zst extension (not zstd) 2025-07-21 09:49:02 +02:00
7d02d8f932 add dynlay 2025-07-21 01:41:03 +02:00
52c23653ac more log subcommands 2025-07-20 22:30:10 +02:00
8 changed files with 480 additions and 61 deletions

64
Cargo.lock generated
View File

@ -143,9 +143,9 @@ checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.2.29" version = "1.2.30"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c1599538de2394445747c8cf7935946e3cc27e9625f889d979bfb2aaf569362" checksum = "deec109607ca693028562ed836a5f1c4b8bd77755c4e132fc5ce11b0b6211ae7"
dependencies = [ dependencies = [
"jobserver", "jobserver",
"libc", "libc",
@ -158,6 +158,12 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
[[package]]
name = "cfg_aliases"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]] [[package]]
name = "chrono" name = "chrono"
version = "0.4.41" version = "0.4.41"
@ -263,9 +269,13 @@ dependencies = [
"clap_complete", "clap_complete",
"env_logger", "env_logger",
"eyre", "eyre",
"futures",
"futures-util", "futures-util",
"glob", "glob",
"hex",
"log", "log",
"nix",
"openssl",
"page_size", "page_size",
"reqwest", "reqwest",
"serde", "serde",
@ -369,6 +379,21 @@ dependencies = [
"percent-encoding", "percent-encoding",
] ]
[[package]]
name = "futures"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
dependencies = [
"futures-channel",
"futures-core",
"futures-executor",
"futures-io",
"futures-sink",
"futures-task",
"futures-util",
]
[[package]] [[package]]
name = "futures-channel" name = "futures-channel"
version = "0.3.31" version = "0.3.31"
@ -376,6 +401,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-sink",
] ]
[[package]] [[package]]
@ -384,6 +410,17 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]]
name = "futures-executor"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
dependencies = [
"futures-core",
"futures-task",
"futures-util",
]
[[package]] [[package]]
name = "futures-io" name = "futures-io"
version = "0.3.31" version = "0.3.31"
@ -419,6 +456,7 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
dependencies = [ dependencies = [
"futures-channel",
"futures-core", "futures-core",
"futures-io", "futures-io",
"futures-macro", "futures-macro",
@ -496,6 +534,12 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]] [[package]]
name = "http" name = "http"
version = "1.3.1" version = "1.3.1"
@ -902,6 +946,18 @@ dependencies = [
"tempfile", "tempfile",
] ]
[[package]]
name = "nix"
version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
dependencies = [
"bitflags",
"cfg-if",
"cfg_aliases",
"libc",
]
[[package]] [[package]]
name = "num-traits" name = "num-traits"
version = "0.2.19" version = "0.2.19"
@ -1262,9 +1318,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.140" version = "1.0.141"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
dependencies = [ dependencies = [
"itoa", "itoa",
"memchr", "memchr",

View File

@ -18,11 +18,15 @@ clap = { version = "4.5.40", features = ["derive", "env"] }
clap_complete = { version = "4.5.54", features = ["unstable-dynamic"] } clap_complete = { version = "4.5.54", features = ["unstable-dynamic"] }
env_logger = "0.11.8" env_logger = "0.11.8"
eyre = "0.6.12" eyre = "0.6.12"
futures = "0.3.31"
futures-util = "0.3.31" futures-util = "0.3.31"
glob = "0.3.2" glob = "0.3.2"
hex = "0.4.3"
log = "0.4.27" log = "0.4.27"
nix = { version = "0.30.1", features = ["user"] }
openssl = "0.10.73"
page_size = "0.6.0" page_size = "0.6.0"
reqwest = { version = "0.12.20", features = ["json", "stream"] } reqwest = { version = "0.12.20", features = ["json", "stream", "native-tls"] }
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.140" serde_json = "1.0.140"
serde_yaml = "0.9.34" serde_yaml = "0.9.34"

View File

@ -24,21 +24,46 @@ enum Command {
prefix: String, prefix: String,
}, },
Logger { Logger {
#[arg(long, short = 'p', default_value = "/var/log")] /// Path where the logs are stored
#[arg(long, short = 'p', default_value = "/var/log", env = "DKL_LOG_PATH")]
log_path: String, log_path: String,
/// Name of the log instead of the command's basename
#[arg(long, short = 'n')] #[arg(long, short = 'n')]
log_name: Option<String>, log_name: Option<String>,
/// prefix log lines with time & stream
#[arg(long)] #[arg(long)]
with_prefix: bool, with_prefix: bool,
command: String, command: String,
args: Vec<String>, args: Vec<String>,
}, },
Log { Log {
#[arg(long, short = 'p', default_value = "/var/log")] /// Path where the logs are stored
#[arg(long, short = 'p', default_value = "/var/log", env = "DKL_LOG_PATH")]
log_path: String, log_path: String,
/// Name of the log set to operate on.
log_name: String, log_name: String,
since: Option<String>, #[command(subcommand)]
until: Option<String>, op: LogOp,
},
Dynlay {
layer: String,
version: String,
#[arg(
long,
short = 'u',
default_value = "https://dkl.novit.io/dist/layers",
env = "DKL_DYNLAY_URL"
)]
url_prefix: String,
#[arg(
long,
short = 'd',
default_value = "/opt/dynlay",
env = "DKL_DYNLAY_DIR"
)]
layers_dir: String,
#[arg(long, default_value = "/")]
chroot: std::path::PathBuf,
}, },
} }
@ -84,33 +109,118 @@ async fn main() -> Result<()> {
C::Log { C::Log {
log_path, log_path,
log_name, log_name,
since, op,
until, } => op.run(&log_path, &log_name).await,
C::Dynlay {
ref layer,
ref version,
ref url_prefix,
ref layers_dir,
chroot,
} => { } => {
let since = parse_ts_arg(since)?; dkl::dynlay::Dynlay {
let until = parse_ts_arg(until)?; url_prefix,
layers_dir,
let mut files = dkl::logger::log_files(&log_path, &log_name).await?; chroot,
files.sort();
let mut out = tokio::io::stdout();
for f in files {
if !since.is_none_or(|since| f.timestamp >= since) {
continue;
}
if !until.is_none_or(|until| f.timestamp <= until) {
continue;
}
debug!("{f:?}");
f.copy_to(&mut out).await?;
} }
Ok(()) .install(layer, version)
.await
} }
} }
} }
async fn apply_config(config_file: &str, filters: &[glob::Pattern], chroot: &str) -> Result<()> {
let config = fs::read_to_string(config_file).await?;
let config: dkl::Config = serde_yaml::from_str(&config)?;
let files = if filters.is_empty() {
config.files
} else {
(config.files.into_iter())
.filter(|f| filters.iter().any(|filter| filter.matches(&f.path)))
.collect()
};
dkl::apply::files(&files, chroot).await
}
#[derive(Subcommand)]
enum LogOp {
Ls {
#[arg(short = 'l', long)]
detail: bool,
},
Cleanup {
/// days of log to keep
days: u64,
},
Cat {
/// print logs >= since
since: Option<String>,
/// print logs <= until
until: Option<String>,
},
}
impl LogOp {
async fn run(self, log_path: &str, log_name: &str) -> Result<()> {
let mut files = dkl::logger::log_files(&log_path, &log_name).await?;
files.sort();
use LogOp as Op;
match self {
Op::Ls { detail } => {
for f in files {
let path = f.path.to_string_lossy();
if detail {
println!("{ts} {path}", ts = f.timestamp);
} else {
println!("{path}");
}
}
}
Op::Cleanup { days } => {
let deadline = chrono::Utc::now() - chrono::Days::new(days);
let deadline = dkl::logger::trunc_ts(deadline);
debug!("cleanup {log_name} logs < {deadline}");
for f in files {
if f.timestamp < deadline {
debug!("removing {}", f.path.to_string_lossy());
fs::remove_file(f.path).await?;
}
}
}
Op::Cat { since, until } => {
let since = parse_ts_arg(since)?;
let until = parse_ts_arg(until)?;
let mut out = tokio::io::stdout();
for f in files {
if !since.is_none_or(|since| f.timestamp >= since) {
continue;
}
if !until.is_none_or(|until| f.timestamp <= until) {
continue;
}
debug!(
"cat {path} (timestamp={ts}, compressed={comp})",
path = f.path.to_string_lossy(),
ts = f.timestamp.to_rfc3339(),
comp = f.compressed
);
if let Err(e) = f.copy_to(&mut out).await {
error!("{file}: {e}", file = f.path.to_string_lossy());
}
}
}
}
Ok(())
}
}
fn parse_ts_arg(ts: Option<String>) -> Result<Option<dkl::logger::Timestamp>> { fn parse_ts_arg(ts: Option<String>) -> Result<Option<dkl::logger::Timestamp>> {
match ts { match ts {
None => Ok(None), None => Ok(None),
@ -126,21 +236,6 @@ fn basename(path: &str) -> &str {
path.rsplit_once('/').map_or(path, |split| split.1) path.rsplit_once('/').map_or(path, |split| split.1)
} }
async fn apply_config(config_file: &str, filters: &[glob::Pattern], chroot: &str) -> Result<()> {
let config = fs::read_to_string(config_file).await?;
let config: dkl::Config = serde_yaml::from_str(&config)?;
let files = if filters.is_empty() {
config.files
} else {
(config.files.into_iter())
.filter(|f| filters.iter().any(|filter| filter.matches(&f.path)))
.collect()
};
dkl::apply::files(&files, chroot).await
}
fn parse_globs(filters: &[String]) -> Result<Vec<glob::Pattern>> { fn parse_globs(filters: &[String]) -> Result<Vec<glob::Pattern>> {
let mut errors = false; let mut errors = false;
let filters = (filters.iter()) let filters = (filters.iter())

188
src/dynlay.rs Normal file
View File

@ -0,0 +1,188 @@
use eyre::{format_err, Result};
use log::{debug, error, info, warn};
use std::path::PathBuf;
use tokio::{fs, io::AsyncWriteExt, process::Command};
use crate::fs::spawn_walk_dir;
pub struct Dynlay<'t> {
pub url_prefix: &'t str,
pub layers_dir: &'t str,
pub chroot: PathBuf,
}
impl<'t> Dynlay<'t> {
pub async fn install(&self, layer: &str, version: &str) -> Result<()> {
let lay_dir = &format!("{base}/{layer}", base = self.layers_dir);
debug!("mkdir -p {lay_dir}");
fs::create_dir_all(lay_dir).await?;
let lay_path = &format!("{lay_dir}/{version}");
if !fs::try_exists(lay_path).await? {
let part_file = &format!("{lay_dir}/{version}.tmp");
self.fetch(layer, version, part_file).await?;
(fs::rename(part_file, lay_path).await)
.map_err(|e| format_err!("failed mv {part_file} {lay_path}: {e}"))?;
}
let mount_path = PathBuf::from(lay_dir).join("mounts").join(layer);
(fs::create_dir_all(&mount_path).await)
.map_err(|e| format_err!("mkdir -p {mount_path:?} failed: {e}"))?;
let mount_path = &fs::canonicalize(mount_path).await?;
let mount_path_str = &mount_path.to_string_lossy().into_owned();
if is_mount_point(mount_path_str).await? {
info!("clearing previous mount");
let mut paths = spawn_walk_dir(mount_path.clone());
while let Some(result) = paths.recv().await {
let Ok((path, md)) = result else {
continue;
};
if !md.is_dir() {
let path = self.chroot.join(&path);
debug!("rm {path:?}");
if let Err(e) = fs::remove_file(&path).await {
warn!("rm {path:?} failed: {e}");
}
}
}
sudo("umount", &[mount_path]).await?;
}
// mount layer
info!("mounting layer");
sudo("mount", &["-t", "squashfs", lay_path, &mount_path_str]).await?;
let mut paths = spawn_walk_dir(mount_path.clone());
while let Some(result) = paths.recv().await {
let Ok((path, md)) = result else {
continue;
};
let target = self.chroot.join(&path);
if md.is_dir() {
debug!("mkdir -p {target:?}");
if let Err(e) = fs::create_dir_all(&target).await {
error!("mkdir -p {target:?} failed: {e}");
}
} else {
let _ = fs::remove_file(&target).await;
let source = mount_path.join(&path);
debug!("ln -s {source:?} {target:?}");
if let Err(e) = fs::symlink(&source, &target).await {
error!("ln -s {source:?} {target:?} failed: {e}");
}
}
}
Ok(())
}
async fn fetch(&self, layer: &str, version: &str, part_file: &str) -> Result<()> {
let url = &format!("{}/{layer}/{version}", self.url_prefix);
info!("fetching {url}");
let mut out = (fs::File::create(part_file).await)
.map_err(|e| format_err!("failed to open {part_file}: {e}"))?;
let resp = reqwest::get(url).await?;
if !resp.status().is_success() {
return Err(format_err!("fetch failed: {}", resp.status()));
}
let sha1 = (resp.headers().get("x-content-sha1"))
.ok_or(format_err!("no content hash in response"))?;
let sha1 = (sha1.to_str()).map_err(|e| format_err!("invalid sha1: {e}"))?;
debug!("content sha1: {sha1}");
let mut exp_sha1 = [0; 20];
hex::decode_to_slice(sha1, &mut exp_sha1).map_err(|e| format_err!("invalid sha1: {e}"))?;
let mut hash = openssl::sha::Sha1::new();
use futures::StreamExt;
let mut stream = resp.bytes_stream();
while let Some(bytes) = stream.next().await {
let bytes = bytes.map_err(|e| format_err!("remote read error: {e}"))?;
hash.update(&bytes);
(out.write_all(&bytes).await).map_err(|e| format_err!("local write error: {e}"))?;
}
(out.flush().await).map_err(|e| format_err!("local write error: {e}"))?;
drop(out);
let dl_sha1 = hash.finish();
if dl_sha1 != exp_sha1 {
if let Err(e) = fs::remove_file(part_file).await {
error!("failed to remove {part_file}: {e}");
}
return Err(format_err!(
"invalid content hash: expected {exp}, got {got}",
exp = hex::encode(exp_sha1),
got = hex::encode(dl_sha1)
));
}
Ok(())
}
}
async fn sudo<I, S>(program: &str, args: I) -> Result<()>
where
I: IntoIterator<Item = S>,
S: AsRef<std::ffi::OsStr>,
{
let mut cmd = if nix::unistd::geteuid().is_root() {
let mut cmd = Command::new(program);
cmd.args(args);
cmd
} else {
let mut cmd = Command::new("sudo");
cmd.arg(program).args(args);
cmd
};
let status = cmd.status().await?;
if status.success() {
Ok(())
} else {
Err(format_err!("{program} failed: {status}"))
}
}
async fn is_mount_point(target: &str) -> Result<bool> {
for line in fs::read_to_string("/proc/self/mounts").await?.lines() {
let line = line.trim_ascii();
if line.is_empty() || line.starts_with("#") {
continue;
}
let split: Vec<_> = line.split_ascii_whitespace().collect();
if split.len() < 6 {
continue;
}
// let dev = split[0];
let mount_point = split[1];
// let fstype = split[2];
// let mntops = split[3];
// let fs_freq = split[4];
// let fsck_passno = split[5];
if mount_point == target {
return Ok(true);
}
}
Ok(false)
}

60
src/fs.rs Normal file
View File

@ -0,0 +1,60 @@
use eyre::Result;
use std::fs::Metadata;
use std::path::PathBuf;
use tokio::fs::read_dir;
use tokio::sync::mpsc;
pub fn spawn_walk_dir(
dir: impl Into<PathBuf> + Send + 'static,
) -> mpsc::Receiver<Result<(PathBuf, Metadata)>> {
let (tx, rx) = mpsc::channel(1);
tokio::spawn(walk_dir(dir, tx));
rx
}
pub async fn walk_dir(dir: impl Into<PathBuf>, tx: mpsc::Sender<Result<(PathBuf, Metadata)>>) {
let dir: PathBuf = dir.into();
let mut todo = std::collections::LinkedList::new();
if let Ok(rd) = read_dir(&dir).await {
todo.push_front(rd);
}
while let Some(rd) = todo.front_mut() {
let entry = match rd.next_entry().await {
Ok(v) => v,
Err(e) => {
if tx.send(Err(e.into())).await.is_err() {
return;
}
todo.pop_front(); // skip dir on error
continue;
}
};
let Some(entry) = entry else {
todo.pop_front();
continue;
};
let Ok(md) = entry.metadata().await else {
continue;
};
let is_dir = md.is_dir();
let Ok(path) = entry.path().strip_prefix(&dir).map(|p| p.to_path_buf()) else {
continue; // sub-entry not in dir, weird but semantically, we ignore
};
if tx.send(Ok((path, md))).await.is_err() {
return;
}
// recurse in sub directories
if is_dir {
if let Ok(rd) = read_dir(entry.path()).await {
todo.push_front(rd);
}
}
}
}

View File

@ -2,6 +2,8 @@ pub mod apply;
pub mod bootstrap; pub mod bootstrap;
pub mod dls; pub mod dls;
pub mod logger; pub mod logger;
pub mod dynlay;
pub mod fs;
#[derive(Debug, serde::Deserialize, serde::Serialize)] #[derive(Debug, serde::Deserialize, serde::Serialize)]
pub struct Config { pub struct Config {

View File

@ -35,7 +35,7 @@ impl<'t> Logger<'t> {
let archives_read_dir = (fs::read_dir(archives_path).await) let archives_read_dir = (fs::read_dir(archives_path).await)
.map_err(|e| format_err!("failed to list archives: {e}"))?; .map_err(|e| format_err!("failed to list archives: {e}"))?;
let mut prev_stamp = ts_trunc(Utc::now()); let mut prev_stamp = trunc_ts(Utc::now());
let mut current_log = BufWriter::new(self.open_log(prev_stamp).await?); let mut current_log = BufWriter::new(self.open_log(prev_stamp).await?);
tokio::spawn(compress_archives( tokio::spawn(compress_archives(
@ -94,7 +94,7 @@ impl<'t> Logger<'t> {
prev_stamp: &mut Timestamp, prev_stamp: &mut Timestamp,
out: &mut BufWriter<File>, out: &mut BufWriter<File>,
) -> Result<()> { ) -> Result<()> {
let trunc_ts = ts_trunc(log.ts); let trunc_ts = trunc_ts(log.ts);
if *prev_stamp < trunc_ts { if *prev_stamp < trunc_ts {
// switch log // switch log
out.flush().await?; out.flush().await?;
@ -193,7 +193,7 @@ async fn copy(
} }
} }
fn ts_trunc(ts: Timestamp) -> Timestamp { pub fn trunc_ts(ts: Timestamp) -> Timestamp {
ts.duration_trunc(TRUNC_DELTA) ts.duration_trunc(TRUNC_DELTA)
.expect("duration_trunc failed") .expect("duration_trunc failed")
} }
@ -243,7 +243,7 @@ async fn compress(path: impl AsRef<Path>) {
.await .await
.map_err(|e| format_err!("open {path_str} failed: {e}"))?; .map_err(|e| format_err!("open {path_str} failed: {e}"))?;
let out_path = path.with_extension("zstd"); let out_path = path.with_extension("zst");
let out = (File::create(&out_path).await) // create output let out = (File::create(&out_path).await) // create output
.map_err(|e| format_err!("create {} failed: {e}", out_path.to_string_lossy()))?; .map_err(|e| format_err!("create {} failed: {e}", out_path.to_string_lossy()))?;
@ -285,14 +285,16 @@ pub async fn log_files(log_path: &str, log_name: &str) -> std::io::Result<Vec<Lo
continue; continue;
}; };
let (name, compressed) = file_name let Some((name, ext)) = file_name.rsplit_once('.') else {
.strip_suffix(".zstd")
.map_or((file_name, false), |s| (s, true));
let Some(name) = name.strip_suffix(".log") else {
continue; continue;
}; };
let compressed = match ext {
"zst" => true,
"log" => false,
_ => continue,
};
let Some((name, timestamp)) = name.rsplit_once('.') else { let Some((name, timestamp)) = name.rsplit_once('.') else {
continue; continue;
}; };
@ -331,12 +333,12 @@ impl PartialOrd for LogFile {
impl LogFile { impl LogFile {
pub async fn copy_to(&self, out: &mut (impl AsyncWrite + Unpin)) -> io::Result<u64> { pub async fn copy_to(&self, out: &mut (impl AsyncWrite + Unpin)) -> io::Result<u64> {
let mut input = File::open(&self.path).await?; let input = &mut File::open(&self.path).await?;
if self.compressed { if self.compressed {
let mut out = ZstdDecoder::new(out); let out = &mut ZstdDecoder::new(out);
tokio::io::copy(&mut input, &mut out).await tokio::io::copy(input, out).await
} else { } else {
tokio::io::copy(&mut input, out).await tokio::io::copy(input, out).await
} }
} }
} }

View File

@ -3,7 +3,7 @@ set -ex
dkl=target/debug/dkl dkl=target/debug/dkl
test=${1:-log} test=${1:-dynlay}
export RUST_LOG=debug export RUST_LOG=debug
@ -19,8 +19,20 @@ case $test in
cat tmp/log/bash.log cat tmp/log/bash.log
;; ;;
log) log-ls)
$dkl log --log-path tmp/log bash 20250720_12 20250720_16 $dkl log --log-path tmp/log bash ls -l
;;
log-cat)
$dkl log --log-path tmp/log bash cat 20250720_12 20301231_23
;;
log-clean)
$dkl log --log-path tmp/log bash ls -l
$dkl log --log-path tmp/log bash cleanup 0
;;
dynlay)
mkdir -p tmp/system
$dkl dynlay --layers-dir tmp/dynlay --chroot tmp/system kubernetes v1.33.3_containerd.2.0.5
;; ;;
*) echo 1>&2 "unknown test: $test"; exit 1 ;; *) echo 1>&2 "unknown test: $test"; exit 1 ;;