nextest_runner/reuse_build/
archiver.rs

1// Copyright (c) The nextest Contributors
2// SPDX-License-Identifier: MIT OR Apache-2.0
3
4use super::{ArchiveCounts, ArchiveEvent, BINARIES_METADATA_FILE_NAME, CARGO_METADATA_FILE_NAME};
5use crate::{
6    config::{
7        core::{EvaluatableProfile, get_num_cpus},
8        elements::{ArchiveConfig, ArchiveIncludeOnMissing, RecursionDepth},
9    },
10    errors::{ArchiveCreateError, FromMessagesError, UnknownArchiveFormat, WriteTestListError},
11    helpers::{convert_rel_path_to_forward_slash, rel_path_join},
12    list::{BinaryList, RustBuildMeta, RustTestArtifact},
13    redact::Redactor,
14    reuse_build::{ArchiveFilterCounts, LIBDIRS_BASE_DIR, PathMapper},
15    test_filter::{BinaryFilter, FilterBinaryMatch, FilterBound},
16};
17use atomicwrites::{AtomicFile, OverwriteBehavior};
18use camino::{Utf8Path, Utf8PathBuf};
19use core::fmt;
20use guppy::{PackageId, graph::PackageGraph};
21use nextest_filtering::EvalContext;
22use std::{
23    collections::{BTreeSet, HashSet},
24    fs,
25    io::{self, BufWriter, Write},
26    sync::Arc,
27    time::{Instant, SystemTime},
28};
29use tracing::{debug, trace};
30use zstd::Encoder;
31
32/// Applies archive filters to a [`BinaryList`].
33pub fn apply_archive_filters(
34    graph: &PackageGraph,
35    binary_list: Arc<BinaryList>,
36    filter: &BinaryFilter,
37    ecx: &EvalContext<'_>,
38    path_mapper: &PathMapper,
39) -> Result<(BinaryList, ArchiveFilterCounts), FromMessagesError> {
40    let rust_build_meta = binary_list.rust_build_meta.map_paths(path_mapper);
41    let test_artifacts = RustTestArtifact::from_binary_list(
42        graph,
43        binary_list.clone(),
44        &rust_build_meta,
45        path_mapper,
46        None,
47    )?;
48
49    // Apply filterset to `RustTestArtifact` list.
50    let test_artifacts: BTreeSet<_> = test_artifacts
51        .iter()
52        .filter(|test_artifact| {
53            // Don't obey the default filter here. The default filter will
54            // be applied while running tests from the archive (the
55            // configuration is expected to be present at that time).
56            let filter_match = filter.check_match(test_artifact, ecx, FilterBound::All);
57
58            debug_assert!(
59                !matches!(filter_match, FilterBinaryMatch::Possible),
60                "build_filtersets should have errored out on test filters, \
61                 Possible should never be returned"
62            );
63            matches!(filter_match, FilterBinaryMatch::Definite)
64        })
65        .map(|test_artifact| &test_artifact.binary_id)
66        .collect();
67
68    let filtered_binaries: Vec<_> = binary_list
69        .rust_binaries
70        .iter()
71        .filter(|binary| test_artifacts.contains(&binary.id))
72        .cloned()
73        .collect();
74
75    // Build a map of package IDs included in the filtered set, then use that to
76    // filter out non-test binaries not referred to by any package.
77    let relevant_package_ids: HashSet<_> = filtered_binaries
78        .iter()
79        .map(|binary| &binary.package_id)
80        .collect();
81    let mut filtered_non_test_binaries = binary_list.rust_build_meta.non_test_binaries.clone();
82    filtered_non_test_binaries.retain(|package_id, _| relevant_package_ids.contains(package_id));
83
84    // Also filter out build script out directories and env vars.
85    let mut filtered_build_script_out_dirs =
86        binary_list.rust_build_meta.build_script_out_dirs.clone();
87    filtered_build_script_out_dirs
88        .retain(|package_id, _| relevant_package_ids.contains(package_id));
89    let filtered_build_script_info =
90        binary_list
91            .rust_build_meta
92            .build_script_info
93            .as_ref()
94            .map(|info| {
95                info.iter()
96                    .filter(|(package_id, _)| relevant_package_ids.contains(package_id))
97                    .map(|(k, v)| (k.clone(), v.clone()))
98                    .collect()
99            });
100
101    let filtered_out_test_binary_count = binary_list
102        .rust_binaries
103        .len()
104        .saturating_sub(filtered_binaries.len());
105    let filtered_out_non_test_binary_count = binary_list
106        .rust_build_meta
107        .non_test_binaries
108        .len()
109        .saturating_sub(filtered_non_test_binaries.len());
110    let filtered_out_build_script_out_dir_count = binary_list
111        .rust_build_meta
112        .build_script_out_dirs
113        .len()
114        .saturating_sub(filtered_build_script_out_dirs.len());
115
116    let filtered_build_meta = RustBuildMeta {
117        non_test_binaries: filtered_non_test_binaries,
118        build_script_out_dirs: filtered_build_script_out_dirs,
119        build_script_info: filtered_build_script_info,
120        ..binary_list.rust_build_meta.clone()
121    };
122
123    Ok((
124        BinaryList {
125            rust_build_meta: filtered_build_meta,
126            rust_binaries: filtered_binaries,
127        },
128        ArchiveFilterCounts {
129            filtered_out_test_binary_count,
130            filtered_out_non_test_binary_count,
131            filtered_out_build_script_out_dir_count,
132        },
133    ))
134}
135
136/// Archive format.
137#[derive(Clone, Copy, Debug, PartialEq, Eq)]
138#[non_exhaustive]
139pub enum ArchiveFormat {
140    /// A Zstandard-compressed tarball.
141    TarZst,
142}
143
144impl ArchiveFormat {
145    /// The list of supported formats as a list of (file extension, format) pairs.
146    pub const SUPPORTED_FORMATS: &'static [(&'static str, Self)] = &[(".tar.zst", Self::TarZst)];
147
148    /// Automatically detects an archive format from a given file name, and returns an error if the
149    /// detection failed.
150    pub fn autodetect(archive_file: &Utf8Path) -> Result<Self, UnknownArchiveFormat> {
151        let file_name = archive_file.file_name().unwrap_or("");
152        for (extension, format) in Self::SUPPORTED_FORMATS {
153            if file_name.ends_with(extension) {
154                return Ok(*format);
155            }
156        }
157
158        Err(UnknownArchiveFormat {
159            file_name: file_name.to_owned(),
160        })
161    }
162}
163
164/// Archives test binaries along with metadata to the given file.
165///
166/// The output file is a Zstandard-compressed tarball (`.tar.zst`).
167#[expect(clippy::too_many_arguments)]
168pub fn archive_to_file<'a, F>(
169    profile: EvaluatableProfile<'a>,
170    binary_list: &'a BinaryList,
171    filter_counts: ArchiveFilterCounts,
172    cargo_metadata: &'a str,
173    graph: &'a PackageGraph,
174    path_mapper: &'a PathMapper,
175    format: ArchiveFormat,
176    zstd_level: i32,
177    output_file: &'a Utf8Path,
178    mut callback: F,
179    redactor: Redactor,
180) -> Result<(), ArchiveCreateError>
181where
182    F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
183{
184    let config = profile.archive_config();
185
186    let start_time = Instant::now();
187
188    let file = AtomicFile::new(output_file, OverwriteBehavior::AllowOverwrite);
189    let file_count = file
190        .write(|file| {
191            // Tests require the standard library in two cases:
192            // * proc-macro tests (host)
193            // * tests compiled with -C prefer-dynamic (target)
194            //
195            // We only care about libstd -- empirically, other libraries in the path aren't
196            // required.
197            let (host_stdlib, host_stdlib_err) = if let Some(libdir) = binary_list
198                .rust_build_meta
199                .build_platforms
200                .host
201                .libdir
202                .as_path()
203            {
204                split_result(find_std(libdir))
205            } else {
206                (None, None)
207            };
208
209            let (target_stdlib, target_stdlib_err) =
210                if let Some(target) = &binary_list.rust_build_meta.build_platforms.target {
211                    if let Some(libdir) = target.libdir.as_path() {
212                        split_result(find_std(libdir))
213                    } else {
214                        (None, None)
215                    }
216                } else {
217                    (None, None)
218                };
219
220            let stdlib_count = host_stdlib.is_some() as usize + target_stdlib.is_some() as usize;
221
222            let archiver = Archiver::new(
223                config,
224                binary_list,
225                cargo_metadata,
226                graph,
227                path_mapper,
228                host_stdlib,
229                target_stdlib,
230                format,
231                zstd_level,
232                file,
233                redactor,
234            )?;
235
236            let test_binary_count = binary_list.rust_binaries.len();
237            let non_test_binary_count = binary_list.rust_build_meta.non_test_binaries.len();
238            let build_script_out_dir_count =
239                binary_list.rust_build_meta.build_script_out_dirs.len();
240            let linked_path_count = binary_list.rust_build_meta.linked_paths.len();
241            let extra_path_count = config.include.len();
242
243            let counts = ArchiveCounts {
244                test_binary_count,
245                filter_counts,
246                non_test_binary_count,
247                build_script_out_dir_count,
248                linked_path_count,
249                extra_path_count,
250                stdlib_count,
251            };
252
253            callback(ArchiveEvent::ArchiveStarted {
254                counts,
255                output_file,
256            })
257            .map_err(ArchiveCreateError::ReporterIo)?;
258
259            // Was there an error finding the standard library?
260            if let Some(err) = host_stdlib_err {
261                callback(ArchiveEvent::StdlibPathError {
262                    error: &err.to_string(),
263                })
264                .map_err(ArchiveCreateError::ReporterIo)?;
265            }
266            if let Some(err) = target_stdlib_err {
267                callback(ArchiveEvent::StdlibPathError {
268                    error: &err.to_string(),
269                })
270                .map_err(ArchiveCreateError::ReporterIo)?;
271            }
272
273            let (_, file_count) = archiver.archive(&mut callback)?;
274            Ok(file_count)
275        })
276        .map_err(|err| match err {
277            atomicwrites::Error::Internal(err) => ArchiveCreateError::OutputArchiveIo(err),
278            atomicwrites::Error::User(err) => err,
279        })?;
280
281    let elapsed = start_time.elapsed();
282
283    callback(ArchiveEvent::Archived {
284        file_count,
285        output_file,
286        elapsed,
287    })
288    .map_err(ArchiveCreateError::ReporterIo)?;
289
290    Ok(())
291}
292
293struct Archiver<'a, W: Write> {
294    binary_list: &'a BinaryList,
295    cargo_metadata: &'a str,
296    graph: &'a PackageGraph,
297    path_mapper: &'a PathMapper,
298    host_stdlib: Option<Utf8PathBuf>,
299    target_stdlib: Option<Utf8PathBuf>,
300    builder: tar::Builder<Encoder<'static, BufWriter<W>>>,
301    unix_timestamp: u64,
302    added_files: HashSet<Utf8PathBuf>,
303    config: &'a ArchiveConfig,
304    redactor: Redactor,
305}
306
307impl<'a, W: Write> Archiver<'a, W> {
308    #[expect(clippy::too_many_arguments)]
309    fn new(
310        config: &'a ArchiveConfig,
311        binary_list: &'a BinaryList,
312        cargo_metadata: &'a str,
313        graph: &'a PackageGraph,
314        path_mapper: &'a PathMapper,
315        host_stdlib: Option<Utf8PathBuf>,
316        target_stdlib: Option<Utf8PathBuf>,
317        format: ArchiveFormat,
318        compression_level: i32,
319        writer: W,
320        redactor: Redactor,
321    ) -> Result<Self, ArchiveCreateError> {
322        let buf_writer = BufWriter::new(writer);
323        let builder = match format {
324            ArchiveFormat::TarZst => {
325                let mut encoder = zstd::Encoder::new(buf_writer, compression_level)
326                    .map_err(ArchiveCreateError::OutputArchiveIo)?;
327                encoder
328                    .include_checksum(true)
329                    .map_err(ArchiveCreateError::OutputArchiveIo)?;
330                if let Err(err) = encoder.multithread(get_num_cpus() as u32) {
331                    tracing::warn!(
332                        ?err,
333                        "libzstd compiled without multithreading, defaulting to single-thread"
334                    );
335                }
336                tar::Builder::new(encoder)
337            }
338        };
339
340        let unix_timestamp = SystemTime::now()
341            .duration_since(SystemTime::UNIX_EPOCH)
342            .expect("current time should be after 1970-01-01")
343            .as_secs();
344
345        Ok(Self {
346            binary_list,
347            cargo_metadata,
348            graph,
349            path_mapper,
350            host_stdlib,
351            target_stdlib,
352            builder,
353            unix_timestamp,
354            added_files: HashSet::new(),
355            config,
356            redactor,
357        })
358    }
359
360    fn archive<F>(mut self, callback: &mut F) -> Result<(W, usize), ArchiveCreateError>
361    where
362        F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
363    {
364        // Add the binaries metadata first so that while unarchiving, reports
365        // are instant. Use to_archive_summary() so that build_directory is omitted
366        // (it defaults to target_directory in archive context).
367        let archive_summary = self.binary_list.to_archive_summary();
368        let binaries_metadata = serde_json::to_string_pretty(&archive_summary)
369            .map_err(|e| ArchiveCreateError::CreateBinaryList(WriteTestListError::Json(e)))?;
370
371        self.append_from_memory(BINARIES_METADATA_FILE_NAME, &binaries_metadata)?;
372
373        self.append_from_memory(CARGO_METADATA_FILE_NAME, self.cargo_metadata)?;
374
375        let target_dir = &self.binary_list.rust_build_meta.target_directory;
376        let build_directory = &self.binary_list.rust_build_meta.build_directory;
377
378        fn filter_map_err<T>(result: io::Result<()>) -> Option<Result<T, ArchiveCreateError>> {
379            match result {
380                Ok(()) => None,
381                Err(err) => Some(Err(ArchiveCreateError::ReporterIo(err))),
382            }
383        }
384
385        // Check that all archive.include paths exist.
386        let archive_include_paths = self
387            .config
388            .include
389            .iter()
390            .filter_map(|include| {
391                let src_path = include.join_path(target_dir);
392                // Archive include paths are joined with the target directory.
393                let src_path = self.path_mapper.map_target_path(src_path);
394
395                match src_path.symlink_metadata() {
396                    Ok(metadata) => {
397                        if metadata.is_dir() {
398                            if include.depth().is_zero() {
399                                // A directory with depth 0 will not be archived, so warn on that.
400                                filter_map_err(callback(ArchiveEvent::DirectoryAtDepthZero {
401                                    path: &src_path,
402                                }))
403                            } else {
404                                Some(Ok((include, src_path)))
405                            }
406                        } else if metadata.is_file() || metadata.is_symlink() {
407                            Some(Ok((include, src_path)))
408                        } else {
409                            filter_map_err(callback(ArchiveEvent::UnknownFileType {
410                                step: ArchiveStep::ExtraPaths,
411                                path: &src_path,
412                            }))
413                        }
414                    }
415                    Err(error) => {
416                        if error.kind() == io::ErrorKind::NotFound {
417                            match include.on_missing() {
418                                ArchiveIncludeOnMissing::Error => {
419                                    // TODO: accumulate errors rather than failing on the first one
420                                    Some(Err(ArchiveCreateError::MissingExtraPath {
421                                        path: src_path.to_owned(),
422                                        redactor: self.redactor.clone(),
423                                    }))
424                                }
425                                ArchiveIncludeOnMissing::Warn => {
426                                    filter_map_err(callback(ArchiveEvent::ExtraPathMissing {
427                                        path: &src_path,
428                                        warn: true,
429                                    }))
430                                }
431                                ArchiveIncludeOnMissing::Ignore => {
432                                    filter_map_err(callback(ArchiveEvent::ExtraPathMissing {
433                                        path: &src_path,
434                                        warn: false,
435                                    }))
436                                }
437                            }
438                        } else {
439                            Some(Err(ArchiveCreateError::InputFileRead {
440                                step: ArchiveStep::ExtraPaths,
441                                path: src_path.to_owned(),
442                                is_dir: None,
443                                error,
444                            }))
445                        }
446                    }
447                }
448            })
449            .collect::<Result<Vec<_>, ArchiveCreateError>>()?;
450
451        // Write all discovered binaries into the archive. Test binaries
452        // are in the build directory (never uplifted by Cargo).
453        for binary in &self.binary_list.rust_binaries {
454            let rel_path = binary
455                .path
456                .strip_prefix(build_directory)
457                .expect("test binary paths must be within the build directory");
458            // Store under "target/" in the archive for portability.
459            let rel_path = Utf8Path::new("target").join(rel_path);
460            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
461
462            self.append_file(ArchiveStep::TestBinaries, &binary.path, &rel_path)?;
463        }
464        for non_test_binary in self
465            .binary_list
466            .rust_build_meta
467            .non_test_binaries
468            .iter()
469            .flat_map(|(_, binaries)| binaries)
470        {
471            let src_path = self
472                .binary_list
473                .rust_build_meta
474                .target_directory
475                .join(&non_test_binary.path);
476            // Non-test binaries are uplifted to the target directory.
477            let src_path = self.path_mapper.map_target_path(src_path);
478
479            let rel_path = Utf8Path::new("target").join(&non_test_binary.path);
480            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
481
482            self.append_file(ArchiveStep::NonTestBinaries, &src_path, &rel_path)?;
483        }
484
485        // Write build script output directories to the archive. Build
486        // script out_dirs are relative to the build directory.
487        for build_script_out_dir in self
488            .binary_list
489            .rust_build_meta
490            .build_script_out_dirs
491            .values()
492        {
493            let src_path = build_directory.join(build_script_out_dir);
494            let src_path = self.path_mapper.map_build_path(src_path);
495
496            let rel_path = Utf8Path::new("target").join(build_script_out_dir);
497            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
498
499            // XXX: For now, we only archive one level of build script output directories as a
500            // conservative solution. If necessary, we may have to either broaden this by default or
501            // add configuration for this. Archiving too much can cause unnecessary slowdowns.
502            self.append_path_recursive(
503                ArchiveStep::BuildScriptOutDirs,
504                &src_path,
505                &rel_path,
506                RecursionDepth::Finite(1),
507                false,
508                callback,
509            )?;
510
511            // Note: the build script output file is no longer archived. Build
512            // script env vars are captured in build_script_info instead, which
513            // is layout-independent and works with -Zbuild-dir-new-layout.
514        }
515
516        // Write linked paths to the archive.
517        for (linked_path, requested_by) in &self.binary_list.rust_build_meta.linked_paths {
518            // Linked paths are relative to the build directory,
519            // e.g. debug/foo/bar.
520            let src_path = build_directory.join(linked_path);
521            let src_path = self.path_mapper.map_build_path(src_path);
522
523            // Some crates produce linked paths that don't exist. This is a bug in those libraries.
524            if !src_path.exists() {
525                // Map each requested_by to its package name and version.
526                let mut requested_by: Vec<_> = requested_by
527                    .iter()
528                    .map(|package_id| {
529                        self.graph
530                            .metadata(&PackageId::new(package_id.clone()))
531                            .map_or_else(
532                                |_| {
533                                    // If a package ID is not found in the graph, it's strange but not
534                                    // fatal -- just use the ID.
535                                    package_id.to_owned()
536                                },
537                                |metadata| format!("{} v{}", metadata.name(), metadata.version()),
538                            )
539                    })
540                    .collect();
541                requested_by.sort_unstable();
542
543                callback(ArchiveEvent::LinkedPathNotFound {
544                    path: &src_path,
545                    requested_by: &requested_by,
546                })
547                .map_err(ArchiveCreateError::ReporterIo)?;
548                continue;
549            }
550
551            let rel_path = Utf8Path::new("target").join(linked_path);
552            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
553            // Since LD_LIBRARY_PATH etc aren't recursive, we only need to add the top-level files
554            // from linked paths.
555            self.append_path_recursive(
556                ArchiveStep::LinkedPaths,
557                &src_path,
558                &rel_path,
559                RecursionDepth::Finite(1),
560                false,
561                callback,
562            )?;
563        }
564
565        // Also include extra paths.
566        for (include, src_path) in archive_include_paths {
567            let rel_path = include.join_path(Utf8Path::new("target"));
568            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
569
570            if src_path.exists() {
571                self.append_path_recursive(
572                    ArchiveStep::ExtraPaths,
573                    &src_path,
574                    &rel_path,
575                    include.depth(),
576                    // Warn if the implicit depth limit for these paths is in use.
577                    true,
578                    callback,
579                )?;
580            }
581        }
582
583        // Add the standard libraries to the archive if available.
584        if let Some(host_stdlib) = self.host_stdlib.clone() {
585            let rel_path = Utf8Path::new(LIBDIRS_BASE_DIR)
586                .join("host")
587                .join(host_stdlib.file_name().unwrap());
588            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
589
590            self.append_file(ArchiveStep::ExtraPaths, &host_stdlib, &rel_path)?;
591        }
592        if let Some(target_stdlib) = self.target_stdlib.clone() {
593            // Use libdir/target/0 as the path to the target standard library, to support multiple
594            // targets in the future.
595            let rel_path = Utf8Path::new(LIBDIRS_BASE_DIR)
596                .join("target/0")
597                .join(target_stdlib.file_name().unwrap());
598            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
599
600            self.append_file(ArchiveStep::ExtraPaths, &target_stdlib, &rel_path)?;
601        }
602
603        // Finish writing the archive.
604        let encoder = self
605            .builder
606            .into_inner()
607            .map_err(ArchiveCreateError::OutputArchiveIo)?;
608        // Finish writing the zstd stream.
609        let buf_writer = encoder
610            .finish()
611            .map_err(ArchiveCreateError::OutputArchiveIo)?;
612        let writer = buf_writer
613            .into_inner()
614            .map_err(|err| ArchiveCreateError::OutputArchiveIo(err.into_error()))?;
615
616        Ok((writer, self.added_files.len()))
617    }
618
619    // ---
620    // Helper methods
621    // ---
622
623    fn append_from_memory(&mut self, name: &str, contents: &str) -> Result<(), ArchiveCreateError> {
624        let mut header = tar::Header::new_gnu();
625        header.set_size(contents.len() as u64);
626        header.set_mtime(self.unix_timestamp);
627        header.set_mode(0o664);
628        header.set_cksum();
629
630        self.builder
631            .append_data(&mut header, name, io::Cursor::new(contents))
632            .map_err(ArchiveCreateError::OutputArchiveIo)?;
633        // We always prioritize appending files from memory over files on disk, so don't check
634        // membership in added_files before adding the file to the archive.
635        self.added_files.insert(name.into());
636        Ok(())
637    }
638
639    fn append_path_recursive<F>(
640        &mut self,
641        step: ArchiveStep,
642        src_path: &Utf8Path,
643        rel_path: &Utf8Path,
644        limit: RecursionDepth,
645        warn_on_exceed_depth: bool,
646        callback: &mut F,
647    ) -> Result<(), ArchiveCreateError>
648    where
649        F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
650    {
651        // Within the loop, the metadata will be part of the directory entry.
652        let metadata =
653            fs::symlink_metadata(src_path).map_err(|error| ArchiveCreateError::InputFileRead {
654                step,
655                path: src_path.to_owned(),
656                is_dir: None,
657                error,
658            })?;
659
660        // Use an explicit stack to avoid the unlikely but possible situation of a stack overflow.
661        let mut stack = vec![(limit, src_path.to_owned(), rel_path.to_owned(), metadata)];
662
663        while let Some((depth, src_path, rel_path, metadata)) = stack.pop() {
664            trace!(
665                target: "nextest-runner",
666                "processing `{src_path}` with metadata {metadata:?} \
667                 (depth: {depth})",
668            );
669
670            if metadata.is_dir() {
671                // Check the recursion limit.
672                if depth.is_zero() {
673                    callback(ArchiveEvent::RecursionDepthExceeded {
674                        step,
675                        path: &src_path,
676                        limit: limit.unwrap_finite(),
677                        warn: warn_on_exceed_depth,
678                    })
679                    .map_err(ArchiveCreateError::ReporterIo)?;
680                    continue;
681                }
682
683                // Iterate over this directory.
684                debug!(
685                    target: "nextest-runner",
686                    "recursing into `{}`",
687                    src_path
688                );
689                let entries = src_path.read_dir_utf8().map_err(|error| {
690                    ArchiveCreateError::InputFileRead {
691                        step,
692                        path: src_path.to_owned(),
693                        is_dir: Some(true),
694                        error,
695                    }
696                })?;
697                for entry in entries {
698                    let entry = entry.map_err(|error| ArchiveCreateError::DirEntryRead {
699                        path: src_path.to_owned(),
700                        error,
701                    })?;
702                    let metadata =
703                        entry
704                            .metadata()
705                            .map_err(|error| ArchiveCreateError::InputFileRead {
706                                step,
707                                path: entry.path().to_owned(),
708                                is_dir: None,
709                                error,
710                            })?;
711                    let entry_rel_path = rel_path_join(&rel_path, entry.file_name().as_ref());
712                    stack.push((
713                        depth.decrement(),
714                        entry.into_path(),
715                        entry_rel_path,
716                        metadata,
717                    ));
718                }
719            } else if metadata.is_file() || metadata.is_symlink() {
720                self.append_file(step, &src_path, &rel_path)?;
721            } else {
722                // Don't archive other kinds of files.
723                callback(ArchiveEvent::UnknownFileType {
724                    step,
725                    path: &src_path,
726                })
727                .map_err(ArchiveCreateError::ReporterIo)?;
728            }
729        }
730
731        Ok(())
732    }
733
734    fn append_file(
735        &mut self,
736        step: ArchiveStep,
737        src: &Utf8Path,
738        dest: &Utf8Path,
739    ) -> Result<(), ArchiveCreateError> {
740        // Check added_files to ensure we aren't adding duplicate files.
741        if !self.added_files.contains(dest) {
742            debug!(
743                target: "nextest-runner",
744                "adding `{src}` to archive as `{dest}`",
745            );
746            self.builder
747                .append_path_with_name(src, dest)
748                .map_err(|error| ArchiveCreateError::InputFileRead {
749                    step,
750                    path: src.to_owned(),
751                    is_dir: Some(false),
752                    error,
753                })?;
754            self.added_files.insert(dest.into());
755        }
756        Ok(())
757    }
758}
759
760fn find_std(libdir: &Utf8Path) -> io::Result<Utf8PathBuf> {
761    for path in libdir.read_dir_utf8()? {
762        let path = path?;
763        // As of Rust 1.78, std is of the form:
764        //
765        //   libstd-<hash>.so (non-macOS Unix)
766        //   libstd-<hash>.dylib (macOS)
767        //   std-<hash>.dll (Windows)
768        let file_name = path.file_name();
769        let is_unix = file_name.starts_with("libstd-")
770            && (file_name.ends_with(".so") || file_name.ends_with(".dylib"));
771        let is_windows = file_name.starts_with("std-") && file_name.ends_with(".dll");
772
773        if is_unix || is_windows {
774            return Ok(path.into_path());
775        }
776    }
777
778    Err(io::Error::other(
779        "could not find the Rust standard library in the libdir",
780    ))
781}
782
783fn split_result<T, E>(result: Result<T, E>) -> (Option<T>, Option<E>) {
784    match result {
785        Ok(v) => (Some(v), None),
786        Err(e) => (None, Some(e)),
787    }
788}
789
790/// The part of the archive process that is currently in progress.
791///
792/// This is used for better warnings and errors.
793#[derive(Clone, Copy, Debug)]
794pub enum ArchiveStep {
795    /// Test binaries are being archived.
796    TestBinaries,
797
798    /// Non-test binaries are being archived.
799    NonTestBinaries,
800
801    /// Build script output directories are being archived.
802    BuildScriptOutDirs,
803
804    /// Linked paths are being archived.
805    LinkedPaths,
806
807    /// Extra paths are being archived.
808    ExtraPaths,
809
810    /// The standard library is being archived.
811    Stdlib,
812}
813
814impl fmt::Display for ArchiveStep {
815    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
816        match self {
817            Self::TestBinaries => write!(f, "test binaries"),
818            Self::NonTestBinaries => write!(f, "non-test binaries"),
819            Self::BuildScriptOutDirs => write!(f, "build script output directories"),
820            Self::LinkedPaths => write!(f, "linked paths"),
821            Self::ExtraPaths => write!(f, "extra paths"),
822            Self::Stdlib => write!(f, "standard library"),
823        }
824    }
825}
826
827#[cfg(test)]
828mod tests {
829    use super::*;
830
831    #[test]
832    fn test_archive_format_autodetect() {
833        assert_eq!(
834            ArchiveFormat::autodetect("foo.tar.zst".as_ref()).unwrap(),
835            ArchiveFormat::TarZst,
836        );
837        assert_eq!(
838            ArchiveFormat::autodetect("foo/bar.tar.zst".as_ref()).unwrap(),
839            ArchiveFormat::TarZst,
840        );
841        ArchiveFormat::autodetect("foo".as_ref()).unwrap_err();
842        ArchiveFormat::autodetect("/".as_ref()).unwrap_err();
843    }
844}