use super::{ArchiveCounts, ArchiveEvent, BINARIES_METADATA_FILE_NAME, CARGO_METADATA_FILE_NAME};
use crate::{
config::{
get_num_cpus, ArchiveConfig, ArchiveIncludeOnMissing, EvaluatableProfile, RecursionDepth,
},
errors::{ArchiveCreateError, UnknownArchiveFormat},
helpers::{convert_rel_path_to_forward_slash, rel_path_join},
list::{BinaryList, OutputFormat, SerializableFormat},
redact::Redactor,
reuse_build::{PathMapper, LIBDIRS_BASE_DIR},
};
use atomicwrites::{AtomicFile, OverwriteBehavior};
use camino::{Utf8Path, Utf8PathBuf};
use core::fmt;
use guppy::{graph::PackageGraph, PackageId};
use std::{
collections::HashSet,
fs,
io::{self, BufWriter, Write},
time::{Instant, SystemTime},
};
use tracing::{debug, trace, warn};
use zstd::Encoder;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[non_exhaustive]
pub enum ArchiveFormat {
TarZst,
}
impl ArchiveFormat {
pub const SUPPORTED_FORMATS: &'static [(&'static str, Self)] = &[(".tar.zst", Self::TarZst)];
pub fn autodetect(archive_file: &Utf8Path) -> Result<Self, UnknownArchiveFormat> {
let file_name = archive_file.file_name().unwrap_or("");
for (extension, format) in Self::SUPPORTED_FORMATS {
if file_name.ends_with(extension) {
return Ok(*format);
}
}
Err(UnknownArchiveFormat {
file_name: file_name.to_owned(),
})
}
}
#[expect(clippy::too_many_arguments)]
pub fn archive_to_file<'a, F>(
profile: EvaluatableProfile<'a>,
binary_list: &'a BinaryList,
cargo_metadata: &'a str,
graph: &'a PackageGraph,
path_mapper: &'a PathMapper,
format: ArchiveFormat,
zstd_level: i32,
output_file: &'a Utf8Path,
mut callback: F,
redactor: Redactor,
) -> Result<(), ArchiveCreateError>
where
F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
{
let config = profile.archive_config();
let start_time = Instant::now();
let file = AtomicFile::new(output_file, OverwriteBehavior::AllowOverwrite);
let file_count = file
.write(|file| {
let (host_stdlib, host_stdlib_err) = if let Some(libdir) = binary_list
.rust_build_meta
.build_platforms
.host
.libdir
.as_path()
{
split_result(find_std(libdir))
} else {
(None, None)
};
let (target_stdlib, target_stdlib_err) =
if let Some(target) = &binary_list.rust_build_meta.build_platforms.target {
if let Some(libdir) = target.libdir.as_path() {
split_result(find_std(libdir))
} else {
(None, None)
}
} else {
(None, None)
};
let stdlib_count = host_stdlib.is_some() as usize + target_stdlib.is_some() as usize;
let archiver = Archiver::new(
config,
binary_list,
cargo_metadata,
graph,
path_mapper,
host_stdlib,
target_stdlib,
format,
zstd_level,
file,
redactor,
)?;
let test_binary_count = binary_list.rust_binaries.len();
let non_test_binary_count = binary_list.rust_build_meta.non_test_binaries.len();
let build_script_out_dir_count =
binary_list.rust_build_meta.build_script_out_dirs.len();
let linked_path_count = binary_list.rust_build_meta.linked_paths.len();
let extra_path_count = config.include.len();
let counts = ArchiveCounts {
test_binary_count,
non_test_binary_count,
build_script_out_dir_count,
linked_path_count,
extra_path_count,
stdlib_count,
};
callback(ArchiveEvent::ArchiveStarted {
counts,
output_file,
})
.map_err(ArchiveCreateError::ReporterIo)?;
if let Some(err) = host_stdlib_err {
callback(ArchiveEvent::StdlibPathError {
error: &err.to_string(),
})
.map_err(ArchiveCreateError::ReporterIo)?;
}
if let Some(err) = target_stdlib_err {
callback(ArchiveEvent::StdlibPathError {
error: &err.to_string(),
})
.map_err(ArchiveCreateError::ReporterIo)?;
}
let (_, file_count) = archiver.archive(&mut callback)?;
Ok(file_count)
})
.map_err(|err| match err {
atomicwrites::Error::Internal(err) => ArchiveCreateError::OutputArchiveIo(err),
atomicwrites::Error::User(err) => err,
})?;
let elapsed = start_time.elapsed();
callback(ArchiveEvent::Archived {
file_count,
output_file,
elapsed,
})
.map_err(ArchiveCreateError::ReporterIo)?;
Ok(())
}
struct Archiver<'a, W: Write> {
binary_list: &'a BinaryList,
cargo_metadata: &'a str,
graph: &'a PackageGraph,
path_mapper: &'a PathMapper,
host_stdlib: Option<Utf8PathBuf>,
target_stdlib: Option<Utf8PathBuf>,
builder: tar::Builder<Encoder<'static, BufWriter<W>>>,
unix_timestamp: u64,
added_files: HashSet<Utf8PathBuf>,
config: &'a ArchiveConfig,
redactor: Redactor,
}
impl<'a, W: Write> Archiver<'a, W> {
#[expect(clippy::too_many_arguments)]
fn new(
config: &'a ArchiveConfig,
binary_list: &'a BinaryList,
cargo_metadata: &'a str,
graph: &'a PackageGraph,
path_mapper: &'a PathMapper,
host_stdlib: Option<Utf8PathBuf>,
target_stdlib: Option<Utf8PathBuf>,
format: ArchiveFormat,
compression_level: i32,
writer: W,
redactor: Redactor,
) -> Result<Self, ArchiveCreateError> {
let buf_writer = BufWriter::new(writer);
let builder = match format {
ArchiveFormat::TarZst => {
let mut encoder = zstd::Encoder::new(buf_writer, compression_level)
.map_err(ArchiveCreateError::OutputArchiveIo)?;
encoder
.include_checksum(true)
.map_err(ArchiveCreateError::OutputArchiveIo)?;
encoder
.multithread(get_num_cpus() as u32)
.map_err(ArchiveCreateError::OutputArchiveIo)?;
tar::Builder::new(encoder)
}
};
let unix_timestamp = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.expect("current time should be after 1970-01-01")
.as_secs();
Ok(Self {
binary_list,
cargo_metadata,
graph,
path_mapper,
host_stdlib,
target_stdlib,
builder,
unix_timestamp,
added_files: HashSet::new(),
config,
redactor,
})
}
fn archive<F>(mut self, callback: &mut F) -> Result<(W, usize), ArchiveCreateError>
where
F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
{
let binaries_metadata = self
.binary_list
.to_string(OutputFormat::Serializable(SerializableFormat::JsonPretty))
.map_err(ArchiveCreateError::CreateBinaryList)?;
self.append_from_memory(BINARIES_METADATA_FILE_NAME, &binaries_metadata)?;
self.append_from_memory(CARGO_METADATA_FILE_NAME, self.cargo_metadata)?;
let target_dir = &self.binary_list.rust_build_meta.target_directory;
fn filter_map_err<T>(result: io::Result<()>) -> Option<Result<T, ArchiveCreateError>> {
match result {
Ok(()) => None,
Err(err) => Some(Err(ArchiveCreateError::ReporterIo(err))),
}
}
let archive_include_paths = self
.config
.include
.iter()
.filter_map(|include| {
let src_path = include.join_path(target_dir);
let src_path = self.path_mapper.map_binary(src_path);
match src_path.symlink_metadata() {
Ok(metadata) => {
if metadata.is_dir() {
if include.depth().is_zero() {
filter_map_err(callback(ArchiveEvent::DirectoryAtDepthZero {
path: &src_path,
}))
} else {
Some(Ok((include, src_path)))
}
} else if metadata.is_file() || metadata.is_symlink() {
Some(Ok((include, src_path)))
} else {
filter_map_err(callback(ArchiveEvent::UnknownFileType {
step: ArchiveStep::ExtraPaths,
path: &src_path,
}))
}
}
Err(error) => {
if error.kind() == io::ErrorKind::NotFound {
match include.on_missing() {
ArchiveIncludeOnMissing::Error => {
Some(Err(ArchiveCreateError::MissingExtraPath {
path: src_path.to_owned(),
redactor: self.redactor.clone(),
}))
}
ArchiveIncludeOnMissing::Warn => {
filter_map_err(callback(ArchiveEvent::ExtraPathMissing {
path: &src_path,
warn: true,
}))
}
ArchiveIncludeOnMissing::Ignore => {
filter_map_err(callback(ArchiveEvent::ExtraPathMissing {
path: &src_path,
warn: false,
}))
}
}
} else {
Some(Err(ArchiveCreateError::InputFileRead {
step: ArchiveStep::ExtraPaths,
path: src_path.to_owned(),
is_dir: None,
error,
}))
}
}
}
})
.collect::<Result<Vec<_>, ArchiveCreateError>>()?;
for binary in &self.binary_list.rust_binaries {
let rel_path = binary
.path
.strip_prefix(target_dir)
.expect("binary paths must be within target directory");
let rel_path = Utf8Path::new("target").join(rel_path);
let rel_path = convert_rel_path_to_forward_slash(&rel_path);
self.append_file(ArchiveStep::TestBinaries, &binary.path, &rel_path)?;
}
for non_test_binary in self
.binary_list
.rust_build_meta
.non_test_binaries
.iter()
.flat_map(|(_, binaries)| binaries)
{
let src_path = self
.binary_list
.rust_build_meta
.target_directory
.join(&non_test_binary.path);
let src_path = self.path_mapper.map_binary(src_path);
let rel_path = Utf8Path::new("target").join(&non_test_binary.path);
let rel_path = convert_rel_path_to_forward_slash(&rel_path);
self.append_file(ArchiveStep::NonTestBinaries, &src_path, &rel_path)?;
}
for build_script_out_dir in self
.binary_list
.rust_build_meta
.build_script_out_dirs
.values()
{
let src_path = self
.binary_list
.rust_build_meta
.target_directory
.join(build_script_out_dir);
let src_path = self.path_mapper.map_binary(src_path);
let rel_path = Utf8Path::new("target").join(build_script_out_dir);
let rel_path = convert_rel_path_to_forward_slash(&rel_path);
self.append_path_recursive(
ArchiveStep::BuildScriptOutDirs,
&src_path,
&rel_path,
RecursionDepth::Finite(1),
false,
callback,
)?;
let Some(out_dir_parent) = build_script_out_dir.parent() else {
warn!("could not determine parent directory of output directory {build_script_out_dir}");
continue;
};
let out_file_path = out_dir_parent.join("output");
let src_path = self
.binary_list
.rust_build_meta
.target_directory
.join(&out_file_path);
let rel_path = Utf8Path::new("target").join(out_file_path);
let rel_path = convert_rel_path_to_forward_slash(&rel_path);
self.append_file(ArchiveStep::BuildScriptOutDirs, &src_path, &rel_path)?;
}
for (linked_path, requested_by) in &self.binary_list.rust_build_meta.linked_paths {
let src_path = self
.binary_list
.rust_build_meta
.target_directory
.join(linked_path);
let src_path = self.path_mapper.map_binary(src_path);
if !src_path.exists() {
let mut requested_by: Vec<_> = requested_by
.iter()
.map(|package_id| {
self.graph
.metadata(&PackageId::new(package_id.clone()))
.map_or_else(
|_| {
package_id.to_owned()
},
|metadata| format!("{} v{}", metadata.name(), metadata.version()),
)
})
.collect();
requested_by.sort_unstable();
callback(ArchiveEvent::LinkedPathNotFound {
path: &src_path,
requested_by: &requested_by,
})
.map_err(ArchiveCreateError::ReporterIo)?;
continue;
}
let rel_path = Utf8Path::new("target").join(linked_path);
let rel_path = convert_rel_path_to_forward_slash(&rel_path);
self.append_path_recursive(
ArchiveStep::LinkedPaths,
&src_path,
&rel_path,
RecursionDepth::Finite(1),
false,
callback,
)?;
}
for (include, src_path) in archive_include_paths {
let rel_path = include.join_path(Utf8Path::new("target"));
let rel_path = convert_rel_path_to_forward_slash(&rel_path);
if src_path.exists() {
self.append_path_recursive(
ArchiveStep::ExtraPaths,
&src_path,
&rel_path,
include.depth(),
true,
callback,
)?;
}
}
if let Some(host_stdlib) = self.host_stdlib.clone() {
let rel_path = Utf8Path::new(LIBDIRS_BASE_DIR)
.join("host")
.join(host_stdlib.file_name().unwrap());
let rel_path = convert_rel_path_to_forward_slash(&rel_path);
self.append_file(ArchiveStep::ExtraPaths, &host_stdlib, &rel_path)?;
}
if let Some(target_stdlib) = self.target_stdlib.clone() {
let rel_path = Utf8Path::new(LIBDIRS_BASE_DIR)
.join("target/0")
.join(target_stdlib.file_name().unwrap());
let rel_path = convert_rel_path_to_forward_slash(&rel_path);
self.append_file(ArchiveStep::ExtraPaths, &target_stdlib, &rel_path)?;
}
let encoder = self
.builder
.into_inner()
.map_err(ArchiveCreateError::OutputArchiveIo)?;
let buf_writer = encoder
.finish()
.map_err(ArchiveCreateError::OutputArchiveIo)?;
let writer = buf_writer
.into_inner()
.map_err(|err| ArchiveCreateError::OutputArchiveIo(err.into_error()))?;
Ok((writer, self.added_files.len()))
}
fn append_from_memory(&mut self, name: &str, contents: &str) -> Result<(), ArchiveCreateError> {
let mut header = tar::Header::new_gnu();
header.set_size(contents.len() as u64);
header.set_mtime(self.unix_timestamp);
header.set_mode(0o664);
header.set_cksum();
self.builder
.append_data(&mut header, name, io::Cursor::new(contents))
.map_err(ArchiveCreateError::OutputArchiveIo)?;
self.added_files.insert(name.into());
Ok(())
}
fn append_path_recursive<F>(
&mut self,
step: ArchiveStep,
src_path: &Utf8Path,
rel_path: &Utf8Path,
limit: RecursionDepth,
warn_on_exceed_depth: bool,
callback: &mut F,
) -> Result<(), ArchiveCreateError>
where
F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
{
let metadata =
fs::symlink_metadata(src_path).map_err(|error| ArchiveCreateError::InputFileRead {
step,
path: src_path.to_owned(),
is_dir: None,
error,
})?;
let mut stack = vec![(limit, src_path.to_owned(), rel_path.to_owned(), metadata)];
while let Some((depth, src_path, rel_path, metadata)) = stack.pop() {
trace!(
target: "nextest-runner",
"processing `{src_path}` with metadata {metadata:?} \
(depth: {depth})",
);
if metadata.is_dir() {
if depth.is_zero() {
callback(ArchiveEvent::RecursionDepthExceeded {
step,
path: &src_path,
limit: limit.unwrap_finite(),
warn: warn_on_exceed_depth,
})
.map_err(ArchiveCreateError::ReporterIo)?;
continue;
}
debug!(
target: "nextest-runner",
"recursing into `{}`",
src_path
);
let entries = src_path.read_dir_utf8().map_err(|error| {
ArchiveCreateError::InputFileRead {
step,
path: src_path.to_owned(),
is_dir: Some(true),
error,
}
})?;
for entry in entries {
let entry = entry.map_err(|error| ArchiveCreateError::DirEntryRead {
path: src_path.to_owned(),
error,
})?;
let metadata =
entry
.metadata()
.map_err(|error| ArchiveCreateError::InputFileRead {
step,
path: entry.path().to_owned(),
is_dir: None,
error,
})?;
let entry_rel_path = rel_path_join(&rel_path, entry.file_name().as_ref());
stack.push((
depth.decrement(),
entry.into_path(),
entry_rel_path,
metadata,
));
}
} else if metadata.is_file() || metadata.is_symlink() {
self.append_file(step, &src_path, &rel_path)?;
} else {
callback(ArchiveEvent::UnknownFileType {
step,
path: &src_path,
})
.map_err(ArchiveCreateError::ReporterIo)?;
}
}
Ok(())
}
fn append_file(
&mut self,
step: ArchiveStep,
src: &Utf8Path,
dest: &Utf8Path,
) -> Result<(), ArchiveCreateError> {
if !self.added_files.contains(dest) {
debug!(
target: "nextest-runner",
"adding `{src}` to archive as `{dest}`",
);
self.builder
.append_path_with_name(src, dest)
.map_err(|error| ArchiveCreateError::InputFileRead {
step,
path: src.to_owned(),
is_dir: Some(false),
error,
})?;
self.added_files.insert(dest.into());
}
Ok(())
}
}
fn find_std(libdir: &Utf8Path) -> io::Result<Utf8PathBuf> {
for path in libdir.read_dir_utf8()? {
let path = path?;
let file_name = path.file_name();
let is_unix = file_name.starts_with("libstd-")
&& (file_name.ends_with(".so") || file_name.ends_with(".dylib"));
let is_windows = file_name.starts_with("std-") && file_name.ends_with(".dll");
if is_unix || is_windows {
return Ok(path.into_path());
}
}
Err(io::Error::new(
io::ErrorKind::Other,
"could not find the Rust standard library in the libdir",
))
}
fn split_result<T, E>(result: Result<T, E>) -> (Option<T>, Option<E>) {
match result {
Ok(v) => (Some(v), None),
Err(e) => (None, Some(e)),
}
}
#[derive(Clone, Copy, Debug)]
pub enum ArchiveStep {
TestBinaries,
NonTestBinaries,
BuildScriptOutDirs,
LinkedPaths,
ExtraPaths,
Stdlib,
}
impl fmt::Display for ArchiveStep {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::TestBinaries => write!(f, "test binaries"),
Self::NonTestBinaries => write!(f, "non-test binaries"),
Self::BuildScriptOutDirs => write!(f, "build script output directories"),
Self::LinkedPaths => write!(f, "linked paths"),
Self::ExtraPaths => write!(f, "extra paths"),
Self::Stdlib => write!(f, "standard library"),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_archive_format_autodetect() {
assert_eq!(
ArchiveFormat::autodetect("foo.tar.zst".as_ref()).unwrap(),
ArchiveFormat::TarZst,
);
assert_eq!(
ArchiveFormat::autodetect("foo/bar.tar.zst".as_ref()).unwrap(),
ArchiveFormat::TarZst,
);
ArchiveFormat::autodetect("foo".as_ref()).unwrap_err();
ArchiveFormat::autodetect("/".as_ref()).unwrap_err();
}
}