nextest_runner/reuse_build/
archiver.rs

1// Copyright (c) The nextest Contributors
2// SPDX-License-Identifier: MIT OR Apache-2.0
3
4use super::{ArchiveCounts, ArchiveEvent, BINARIES_METADATA_FILE_NAME, CARGO_METADATA_FILE_NAME};
5use crate::{
6    config::{
7        core::{EvaluatableProfile, get_num_cpus},
8        elements::{ArchiveConfig, ArchiveIncludeOnMissing, RecursionDepth},
9    },
10    errors::{ArchiveCreateError, FromMessagesError, UnknownArchiveFormat, WriteTestListError},
11    helpers::{convert_rel_path_to_forward_slash, rel_path_join},
12    list::{BinaryList, RustBuildMeta, RustTestArtifact},
13    redact::Redactor,
14    reuse_build::{ArchiveFilterCounts, LIBDIRS_BASE_DIR, PathMapper},
15    test_filter::{BinaryFilter, FilterBinaryMatch, FilterBound},
16};
17use atomicwrites::{AtomicFile, OverwriteBehavior};
18use camino::{Utf8Path, Utf8PathBuf};
19use core::fmt;
20use guppy::{PackageId, graph::PackageGraph};
21use nextest_filtering::EvalContext;
22use std::{
23    collections::{BTreeSet, HashSet},
24    fs,
25    io::{self, BufWriter, Write},
26    sync::Arc,
27    time::{Instant, SystemTime},
28};
29use tracing::{debug, trace};
30use zstd::Encoder;
31
32/// Applies archive filters to a [`BinaryList`].
33pub fn apply_archive_filters(
34    graph: &PackageGraph,
35    binary_list: Arc<BinaryList>,
36    filter: &BinaryFilter,
37    ecx: &EvalContext<'_>,
38    path_mapper: &PathMapper,
39) -> Result<(BinaryList, ArchiveFilterCounts), FromMessagesError> {
40    let rust_build_meta = binary_list.rust_build_meta.map_paths(path_mapper);
41    let test_artifacts = RustTestArtifact::from_binary_list(
42        graph,
43        binary_list.clone(),
44        &rust_build_meta,
45        path_mapper,
46        None,
47    )?;
48
49    // Apply filterset to `RustTestArtifact` list.
50    let test_artifacts: BTreeSet<_> = test_artifacts
51        .iter()
52        .filter(|test_artifact| {
53            // Don't obey the default filter here. The default filter will
54            // be applied while running tests from the archive (the
55            // configuration is expected to be present at that time).
56            let query = test_artifact.to_binary_query();
57            let filter_match = filter.check_match(&query, ecx, FilterBound::All);
58
59            debug_assert!(
60                !matches!(filter_match, FilterBinaryMatch::Possible),
61                "build_filtersets should have errored out on test filters, \
62                 Possible should never be returned"
63            );
64            matches!(filter_match, FilterBinaryMatch::Definite)
65        })
66        .map(|test_artifact| &test_artifact.binary_id)
67        .collect();
68
69    let filtered_binaries: Vec<_> = binary_list
70        .rust_binaries
71        .iter()
72        .filter(|binary| test_artifacts.contains(&binary.id))
73        .cloned()
74        .collect();
75
76    // Build a map of package IDs included in the filtered set, then use that to
77    // filter out non-test binaries not referred to by any package.
78    let relevant_package_ids: HashSet<_> = filtered_binaries
79        .iter()
80        .map(|binary| &binary.package_id)
81        .collect();
82    let mut filtered_non_test_binaries = binary_list.rust_build_meta.non_test_binaries.clone();
83    filtered_non_test_binaries.retain(|package_id, _| relevant_package_ids.contains(package_id));
84
85    // Also filter out build script out directories and env vars.
86    let mut filtered_build_script_out_dirs =
87        binary_list.rust_build_meta.build_script_out_dirs.clone();
88    filtered_build_script_out_dirs
89        .retain(|package_id, _| relevant_package_ids.contains(package_id));
90    let filtered_build_script_info =
91        binary_list
92            .rust_build_meta
93            .build_script_info
94            .as_ref()
95            .map(|info| {
96                info.iter()
97                    .filter(|(package_id, _)| relevant_package_ids.contains(package_id))
98                    .map(|(k, v)| (k.clone(), v.clone()))
99                    .collect()
100            });
101
102    let filtered_out_test_binary_count = binary_list
103        .rust_binaries
104        .len()
105        .saturating_sub(filtered_binaries.len());
106    let filtered_out_non_test_binary_count = binary_list
107        .rust_build_meta
108        .non_test_binaries
109        .len()
110        .saturating_sub(filtered_non_test_binaries.len());
111    let filtered_out_build_script_out_dir_count = binary_list
112        .rust_build_meta
113        .build_script_out_dirs
114        .len()
115        .saturating_sub(filtered_build_script_out_dirs.len());
116
117    let filtered_build_meta = RustBuildMeta {
118        non_test_binaries: filtered_non_test_binaries,
119        build_script_out_dirs: filtered_build_script_out_dirs,
120        build_script_info: filtered_build_script_info,
121        ..binary_list.rust_build_meta.clone()
122    };
123
124    Ok((
125        BinaryList {
126            rust_build_meta: filtered_build_meta,
127            rust_binaries: filtered_binaries,
128        },
129        ArchiveFilterCounts {
130            filtered_out_test_binary_count,
131            filtered_out_non_test_binary_count,
132            filtered_out_build_script_out_dir_count,
133        },
134    ))
135}
136
137/// Archive format.
138#[derive(Clone, Copy, Debug, PartialEq, Eq)]
139#[non_exhaustive]
140pub enum ArchiveFormat {
141    /// A Zstandard-compressed tarball.
142    TarZst,
143}
144
145impl ArchiveFormat {
146    /// The list of supported formats as a list of (file extension, format) pairs.
147    pub const SUPPORTED_FORMATS: &'static [(&'static str, Self)] = &[(".tar.zst", Self::TarZst)];
148
149    /// Automatically detects an archive format from a given file name, and returns an error if the
150    /// detection failed.
151    pub fn autodetect(archive_file: &Utf8Path) -> Result<Self, UnknownArchiveFormat> {
152        let file_name = archive_file.file_name().unwrap_or("");
153        for (extension, format) in Self::SUPPORTED_FORMATS {
154            if file_name.ends_with(extension) {
155                return Ok(*format);
156            }
157        }
158
159        Err(UnknownArchiveFormat {
160            file_name: file_name.to_owned(),
161        })
162    }
163}
164
165/// Archives test binaries along with metadata to the given file.
166///
167/// The output file is a Zstandard-compressed tarball (`.tar.zst`).
168#[expect(clippy::too_many_arguments)]
169pub fn archive_to_file<'a, F>(
170    profile: EvaluatableProfile<'a>,
171    binary_list: &'a BinaryList,
172    filter_counts: ArchiveFilterCounts,
173    cargo_metadata: &'a str,
174    graph: &'a PackageGraph,
175    path_mapper: &'a PathMapper,
176    format: ArchiveFormat,
177    zstd_level: i32,
178    output_file: &'a Utf8Path,
179    mut callback: F,
180    redactor: Redactor,
181) -> Result<(), ArchiveCreateError>
182where
183    F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
184{
185    let config = profile.archive_config();
186
187    let start_time = Instant::now();
188
189    let file = AtomicFile::new(output_file, OverwriteBehavior::AllowOverwrite);
190    let file_count = file
191        .write(|file| {
192            // Tests require the standard library in two cases:
193            // * proc-macro tests (host)
194            // * tests compiled with -C prefer-dynamic (target)
195            //
196            // We only care about libstd -- empirically, other libraries in the path aren't
197            // required.
198            let (host_stdlib, host_stdlib_err) = if let Some(libdir) = binary_list
199                .rust_build_meta
200                .build_platforms
201                .host
202                .libdir
203                .as_path()
204            {
205                split_result(find_std(libdir))
206            } else {
207                (None, None)
208            };
209
210            let (target_stdlib, target_stdlib_err) =
211                if let Some(target) = &binary_list.rust_build_meta.build_platforms.target {
212                    if let Some(libdir) = target.libdir.as_path() {
213                        split_result(find_std(libdir))
214                    } else {
215                        (None, None)
216                    }
217                } else {
218                    (None, None)
219                };
220
221            let stdlib_count = host_stdlib.is_some() as usize + target_stdlib.is_some() as usize;
222
223            let archiver = Archiver::new(
224                config,
225                binary_list,
226                cargo_metadata,
227                graph,
228                path_mapper,
229                host_stdlib,
230                target_stdlib,
231                format,
232                zstd_level,
233                file,
234                redactor,
235            )?;
236
237            let test_binary_count = binary_list.rust_binaries.len();
238            let non_test_binary_count = binary_list.rust_build_meta.non_test_binaries.len();
239            let build_script_out_dir_count =
240                binary_list.rust_build_meta.build_script_out_dirs.len();
241            let linked_path_count = binary_list.rust_build_meta.linked_paths.len();
242            let extra_path_count = config.include.len();
243
244            let counts = ArchiveCounts {
245                test_binary_count,
246                filter_counts,
247                non_test_binary_count,
248                build_script_out_dir_count,
249                linked_path_count,
250                extra_path_count,
251                stdlib_count,
252            };
253
254            callback(ArchiveEvent::ArchiveStarted {
255                counts,
256                output_file,
257            })
258            .map_err(ArchiveCreateError::ReporterIo)?;
259
260            // Was there an error finding the standard library?
261            if let Some(err) = host_stdlib_err {
262                callback(ArchiveEvent::StdlibPathError {
263                    error: &err.to_string(),
264                })
265                .map_err(ArchiveCreateError::ReporterIo)?;
266            }
267            if let Some(err) = target_stdlib_err {
268                callback(ArchiveEvent::StdlibPathError {
269                    error: &err.to_string(),
270                })
271                .map_err(ArchiveCreateError::ReporterIo)?;
272            }
273
274            let (_, file_count) = archiver.archive(&mut callback)?;
275            Ok(file_count)
276        })
277        .map_err(|err| match err {
278            atomicwrites::Error::Internal(err) => ArchiveCreateError::OutputArchiveIo(err),
279            atomicwrites::Error::User(err) => err,
280        })?;
281
282    let elapsed = start_time.elapsed();
283
284    callback(ArchiveEvent::Archived {
285        file_count,
286        output_file,
287        elapsed,
288    })
289    .map_err(ArchiveCreateError::ReporterIo)?;
290
291    Ok(())
292}
293
294struct Archiver<'a, W: Write> {
295    binary_list: &'a BinaryList,
296    cargo_metadata: &'a str,
297    graph: &'a PackageGraph,
298    path_mapper: &'a PathMapper,
299    host_stdlib: Option<Utf8PathBuf>,
300    target_stdlib: Option<Utf8PathBuf>,
301    builder: tar::Builder<Encoder<'static, BufWriter<W>>>,
302    unix_timestamp: u64,
303    added_files: HashSet<Utf8PathBuf>,
304    config: &'a ArchiveConfig,
305    redactor: Redactor,
306}
307
308impl<'a, W: Write> Archiver<'a, W> {
309    #[expect(clippy::too_many_arguments)]
310    fn new(
311        config: &'a ArchiveConfig,
312        binary_list: &'a BinaryList,
313        cargo_metadata: &'a str,
314        graph: &'a PackageGraph,
315        path_mapper: &'a PathMapper,
316        host_stdlib: Option<Utf8PathBuf>,
317        target_stdlib: Option<Utf8PathBuf>,
318        format: ArchiveFormat,
319        compression_level: i32,
320        writer: W,
321        redactor: Redactor,
322    ) -> Result<Self, ArchiveCreateError> {
323        let buf_writer = BufWriter::new(writer);
324        let builder = match format {
325            ArchiveFormat::TarZst => {
326                let mut encoder = zstd::Encoder::new(buf_writer, compression_level)
327                    .map_err(ArchiveCreateError::OutputArchiveIo)?;
328                encoder
329                    .include_checksum(true)
330                    .map_err(ArchiveCreateError::OutputArchiveIo)?;
331                if let Err(err) = encoder.multithread(get_num_cpus() as u32) {
332                    tracing::warn!(
333                        ?err,
334                        "libzstd compiled without multithreading, defaulting to single-thread"
335                    );
336                }
337                tar::Builder::new(encoder)
338            }
339        };
340
341        let unix_timestamp = SystemTime::now()
342            .duration_since(SystemTime::UNIX_EPOCH)
343            .expect("current time should be after 1970-01-01")
344            .as_secs();
345
346        Ok(Self {
347            binary_list,
348            cargo_metadata,
349            graph,
350            path_mapper,
351            host_stdlib,
352            target_stdlib,
353            builder,
354            unix_timestamp,
355            added_files: HashSet::new(),
356            config,
357            redactor,
358        })
359    }
360
361    fn archive<F>(mut self, callback: &mut F) -> Result<(W, usize), ArchiveCreateError>
362    where
363        F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
364    {
365        // Add the binaries metadata first so that while unarchiving, reports
366        // are instant. Use to_archive_summary() so that build_directory is omitted
367        // (it defaults to target_directory in archive context).
368        let archive_summary = self.binary_list.to_archive_summary();
369        let binaries_metadata = serde_json::to_string_pretty(&archive_summary)
370            .map_err(|e| ArchiveCreateError::CreateBinaryList(WriteTestListError::Json(e)))?;
371
372        self.append_from_memory(BINARIES_METADATA_FILE_NAME, &binaries_metadata)?;
373
374        self.append_from_memory(CARGO_METADATA_FILE_NAME, self.cargo_metadata)?;
375
376        let target_dir = &self.binary_list.rust_build_meta.target_directory;
377        let build_directory = &self.binary_list.rust_build_meta.build_directory;
378
379        fn filter_map_err<T>(result: io::Result<()>) -> Option<Result<T, ArchiveCreateError>> {
380            match result {
381                Ok(()) => None,
382                Err(err) => Some(Err(ArchiveCreateError::ReporterIo(err))),
383            }
384        }
385
386        // Check that all archive.include paths exist.
387        let archive_include_paths = self
388            .config
389            .include
390            .iter()
391            .filter_map(|include| {
392                let src_path = include.join_path(target_dir);
393                // Archive include paths are joined with the target directory.
394                let src_path = self.path_mapper.map_target_path(src_path);
395
396                match src_path.symlink_metadata() {
397                    Ok(metadata) => {
398                        if metadata.is_dir() {
399                            if include.depth().is_zero() {
400                                // A directory with depth 0 will not be archived, so warn on that.
401                                filter_map_err(callback(ArchiveEvent::DirectoryAtDepthZero {
402                                    path: &src_path,
403                                }))
404                            } else {
405                                Some(Ok((include, src_path)))
406                            }
407                        } else if metadata.is_file() || metadata.is_symlink() {
408                            Some(Ok((include, src_path)))
409                        } else {
410                            filter_map_err(callback(ArchiveEvent::UnknownFileType {
411                                step: ArchiveStep::ExtraPaths,
412                                path: &src_path,
413                            }))
414                        }
415                    }
416                    Err(error) => {
417                        if error.kind() == io::ErrorKind::NotFound {
418                            match include.on_missing() {
419                                ArchiveIncludeOnMissing::Error => {
420                                    // TODO: accumulate errors rather than failing on the first one
421                                    Some(Err(ArchiveCreateError::MissingExtraPath {
422                                        path: src_path.to_owned(),
423                                        redactor: self.redactor.clone(),
424                                    }))
425                                }
426                                ArchiveIncludeOnMissing::Warn => {
427                                    filter_map_err(callback(ArchiveEvent::ExtraPathMissing {
428                                        path: &src_path,
429                                        warn: true,
430                                    }))
431                                }
432                                ArchiveIncludeOnMissing::Ignore => {
433                                    filter_map_err(callback(ArchiveEvent::ExtraPathMissing {
434                                        path: &src_path,
435                                        warn: false,
436                                    }))
437                                }
438                            }
439                        } else {
440                            Some(Err(ArchiveCreateError::InputFileRead {
441                                step: ArchiveStep::ExtraPaths,
442                                path: src_path.to_owned(),
443                                is_dir: None,
444                                error,
445                            }))
446                        }
447                    }
448                }
449            })
450            .collect::<Result<Vec<_>, ArchiveCreateError>>()?;
451
452        // Write all discovered binaries into the archive. Test binaries
453        // are in the build directory (never uplifted by Cargo).
454        for binary in &self.binary_list.rust_binaries {
455            let rel_path = binary
456                .path
457                .strip_prefix(build_directory)
458                .expect("test binary paths must be within the build directory");
459            // Store under "target/" in the archive for portability.
460            let rel_path = Utf8Path::new("target").join(rel_path);
461            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
462
463            self.append_file(ArchiveStep::TestBinaries, &binary.path, &rel_path)?;
464        }
465        for non_test_binary in self
466            .binary_list
467            .rust_build_meta
468            .non_test_binaries
469            .iter()
470            .flat_map(|(_, binaries)| binaries)
471        {
472            let src_path = self
473                .binary_list
474                .rust_build_meta
475                .target_directory
476                .join(&non_test_binary.path);
477            // Non-test binaries are uplifted to the target directory.
478            let src_path = self.path_mapper.map_target_path(src_path);
479
480            let rel_path = Utf8Path::new("target").join(&non_test_binary.path);
481            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
482
483            self.append_file(ArchiveStep::NonTestBinaries, &src_path, &rel_path)?;
484        }
485
486        // Write build script output directories to the archive. Build
487        // script out_dirs are relative to the build directory.
488        for build_script_out_dir in self
489            .binary_list
490            .rust_build_meta
491            .build_script_out_dirs
492            .values()
493        {
494            let src_path = build_directory.join(build_script_out_dir);
495            let src_path = self.path_mapper.map_build_path(src_path);
496
497            let rel_path = Utf8Path::new("target").join(build_script_out_dir);
498            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
499
500            // XXX: For now, we only archive one level of build script output directories as a
501            // conservative solution. If necessary, we may have to either broaden this by default or
502            // add configuration for this. Archiving too much can cause unnecessary slowdowns.
503            self.append_path_recursive(
504                ArchiveStep::BuildScriptOutDirs,
505                &src_path,
506                &rel_path,
507                RecursionDepth::Finite(1),
508                false,
509                callback,
510            )?;
511
512            // Note: the build script output file is no longer archived. Build
513            // script env vars are captured in build_script_info instead, which
514            // is layout-independent and works with -Zbuild-dir-new-layout.
515        }
516
517        // Write linked paths to the archive.
518        for (linked_path, requested_by) in &self.binary_list.rust_build_meta.linked_paths {
519            // Linked paths are relative to the build directory,
520            // e.g. debug/foo/bar.
521            let src_path = build_directory.join(linked_path);
522            let src_path = self.path_mapper.map_build_path(src_path);
523
524            // Some crates produce linked paths that don't exist. This is a bug in those libraries.
525            if !src_path.exists() {
526                // Map each requested_by to its package name and version.
527                let mut requested_by: Vec<_> = requested_by
528                    .iter()
529                    .map(|package_id| {
530                        self.graph
531                            .metadata(&PackageId::new(package_id.clone()))
532                            .map_or_else(
533                                |_| {
534                                    // If a package ID is not found in the graph, it's strange but not
535                                    // fatal -- just use the ID.
536                                    package_id.to_owned()
537                                },
538                                |metadata| format!("{} v{}", metadata.name(), metadata.version()),
539                            )
540                    })
541                    .collect();
542                requested_by.sort_unstable();
543
544                callback(ArchiveEvent::LinkedPathNotFound {
545                    path: &src_path,
546                    requested_by: &requested_by,
547                })
548                .map_err(ArchiveCreateError::ReporterIo)?;
549                continue;
550            }
551
552            let rel_path = Utf8Path::new("target").join(linked_path);
553            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
554            // Since LD_LIBRARY_PATH etc aren't recursive, we only need to add the top-level files
555            // from linked paths.
556            self.append_path_recursive(
557                ArchiveStep::LinkedPaths,
558                &src_path,
559                &rel_path,
560                RecursionDepth::Finite(1),
561                false,
562                callback,
563            )?;
564        }
565
566        // Also include extra paths.
567        for (include, src_path) in archive_include_paths {
568            let rel_path = include.join_path(Utf8Path::new("target"));
569            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
570
571            if src_path.exists() {
572                self.append_path_recursive(
573                    ArchiveStep::ExtraPaths,
574                    &src_path,
575                    &rel_path,
576                    include.depth(),
577                    // Warn if the implicit depth limit for these paths is in use.
578                    true,
579                    callback,
580                )?;
581            }
582        }
583
584        // Add the standard libraries to the archive if available.
585        if let Some(host_stdlib) = self.host_stdlib.clone() {
586            let rel_path = Utf8Path::new(LIBDIRS_BASE_DIR)
587                .join("host")
588                .join(host_stdlib.file_name().unwrap());
589            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
590
591            self.append_file(ArchiveStep::ExtraPaths, &host_stdlib, &rel_path)?;
592        }
593        if let Some(target_stdlib) = self.target_stdlib.clone() {
594            // Use libdir/target/0 as the path to the target standard library, to support multiple
595            // targets in the future.
596            let rel_path = Utf8Path::new(LIBDIRS_BASE_DIR)
597                .join("target/0")
598                .join(target_stdlib.file_name().unwrap());
599            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
600
601            self.append_file(ArchiveStep::ExtraPaths, &target_stdlib, &rel_path)?;
602        }
603
604        // Finish writing the archive.
605        let encoder = self
606            .builder
607            .into_inner()
608            .map_err(ArchiveCreateError::OutputArchiveIo)?;
609        // Finish writing the zstd stream.
610        let buf_writer = encoder
611            .finish()
612            .map_err(ArchiveCreateError::OutputArchiveIo)?;
613        let writer = buf_writer
614            .into_inner()
615            .map_err(|err| ArchiveCreateError::OutputArchiveIo(err.into_error()))?;
616
617        Ok((writer, self.added_files.len()))
618    }
619
620    // ---
621    // Helper methods
622    // ---
623
624    fn append_from_memory(&mut self, name: &str, contents: &str) -> Result<(), ArchiveCreateError> {
625        let mut header = tar::Header::new_gnu();
626        header.set_size(contents.len() as u64);
627        header.set_mtime(self.unix_timestamp);
628        header.set_mode(0o664);
629        header.set_cksum();
630
631        self.builder
632            .append_data(&mut header, name, io::Cursor::new(contents))
633            .map_err(ArchiveCreateError::OutputArchiveIo)?;
634        // We always prioritize appending files from memory over files on disk, so don't check
635        // membership in added_files before adding the file to the archive.
636        self.added_files.insert(name.into());
637        Ok(())
638    }
639
640    fn append_path_recursive<F>(
641        &mut self,
642        step: ArchiveStep,
643        src_path: &Utf8Path,
644        rel_path: &Utf8Path,
645        limit: RecursionDepth,
646        warn_on_exceed_depth: bool,
647        callback: &mut F,
648    ) -> Result<(), ArchiveCreateError>
649    where
650        F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
651    {
652        // Within the loop, the metadata will be part of the directory entry.
653        let metadata =
654            fs::symlink_metadata(src_path).map_err(|error| ArchiveCreateError::InputFileRead {
655                step,
656                path: src_path.to_owned(),
657                is_dir: None,
658                error,
659            })?;
660
661        // Use an explicit stack to avoid the unlikely but possible situation of a stack overflow.
662        let mut stack = vec![(limit, src_path.to_owned(), rel_path.to_owned(), metadata)];
663
664        while let Some((depth, src_path, rel_path, metadata)) = stack.pop() {
665            trace!(
666                target: "nextest-runner",
667                "processing `{src_path}` with metadata {metadata:?} \
668                 (depth: {depth})",
669            );
670
671            if metadata.is_dir() {
672                // Check the recursion limit.
673                if depth.is_zero() {
674                    callback(ArchiveEvent::RecursionDepthExceeded {
675                        step,
676                        path: &src_path,
677                        limit: limit.unwrap_finite(),
678                        warn: warn_on_exceed_depth,
679                    })
680                    .map_err(ArchiveCreateError::ReporterIo)?;
681                    continue;
682                }
683
684                // Iterate over this directory.
685                debug!(
686                    target: "nextest-runner",
687                    "recursing into `{}`",
688                    src_path
689                );
690                let entries = src_path.read_dir_utf8().map_err(|error| {
691                    ArchiveCreateError::InputFileRead {
692                        step,
693                        path: src_path.to_owned(),
694                        is_dir: Some(true),
695                        error,
696                    }
697                })?;
698                for entry in entries {
699                    let entry = entry.map_err(|error| ArchiveCreateError::DirEntryRead {
700                        path: src_path.to_owned(),
701                        error,
702                    })?;
703                    let metadata =
704                        entry
705                            .metadata()
706                            .map_err(|error| ArchiveCreateError::InputFileRead {
707                                step,
708                                path: entry.path().to_owned(),
709                                is_dir: None,
710                                error,
711                            })?;
712                    let entry_rel_path = rel_path_join(&rel_path, entry.file_name().as_ref());
713                    stack.push((
714                        depth.decrement(),
715                        entry.into_path(),
716                        entry_rel_path,
717                        metadata,
718                    ));
719                }
720            } else if metadata.is_file() || metadata.is_symlink() {
721                self.append_file(step, &src_path, &rel_path)?;
722            } else {
723                // Don't archive other kinds of files.
724                callback(ArchiveEvent::UnknownFileType {
725                    step,
726                    path: &src_path,
727                })
728                .map_err(ArchiveCreateError::ReporterIo)?;
729            }
730        }
731
732        Ok(())
733    }
734
735    fn append_file(
736        &mut self,
737        step: ArchiveStep,
738        src: &Utf8Path,
739        dest: &Utf8Path,
740    ) -> Result<(), ArchiveCreateError> {
741        // Check added_files to ensure we aren't adding duplicate files.
742        if !self.added_files.contains(dest) {
743            debug!(
744                target: "nextest-runner",
745                "adding `{src}` to archive as `{dest}`",
746            );
747            self.builder
748                .append_path_with_name(src, dest)
749                .map_err(|error| ArchiveCreateError::InputFileRead {
750                    step,
751                    path: src.to_owned(),
752                    is_dir: Some(false),
753                    error,
754                })?;
755            self.added_files.insert(dest.into());
756        }
757        Ok(())
758    }
759}
760
761fn find_std(libdir: &Utf8Path) -> io::Result<Utf8PathBuf> {
762    for path in libdir.read_dir_utf8()? {
763        let path = path?;
764        // As of Rust 1.78, std is of the form:
765        //
766        //   libstd-<hash>.so (non-macOS Unix)
767        //   libstd-<hash>.dylib (macOS)
768        //   std-<hash>.dll (Windows)
769        let file_name = path.file_name();
770        let is_unix = file_name.starts_with("libstd-")
771            && (file_name.ends_with(".so") || file_name.ends_with(".dylib"));
772        let is_windows = file_name.starts_with("std-") && file_name.ends_with(".dll");
773
774        if is_unix || is_windows {
775            return Ok(path.into_path());
776        }
777    }
778
779    Err(io::Error::other(
780        "could not find the Rust standard library in the libdir",
781    ))
782}
783
784fn split_result<T, E>(result: Result<T, E>) -> (Option<T>, Option<E>) {
785    match result {
786        Ok(v) => (Some(v), None),
787        Err(e) => (None, Some(e)),
788    }
789}
790
791/// The part of the archive process that is currently in progress.
792///
793/// This is used for better warnings and errors.
794#[derive(Clone, Copy, Debug)]
795pub enum ArchiveStep {
796    /// Test binaries are being archived.
797    TestBinaries,
798
799    /// Non-test binaries are being archived.
800    NonTestBinaries,
801
802    /// Build script output directories are being archived.
803    BuildScriptOutDirs,
804
805    /// Linked paths are being archived.
806    LinkedPaths,
807
808    /// Extra paths are being archived.
809    ExtraPaths,
810
811    /// The standard library is being archived.
812    Stdlib,
813}
814
815impl fmt::Display for ArchiveStep {
816    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
817        match self {
818            Self::TestBinaries => write!(f, "test binaries"),
819            Self::NonTestBinaries => write!(f, "non-test binaries"),
820            Self::BuildScriptOutDirs => write!(f, "build script output directories"),
821            Self::LinkedPaths => write!(f, "linked paths"),
822            Self::ExtraPaths => write!(f, "extra paths"),
823            Self::Stdlib => write!(f, "standard library"),
824        }
825    }
826}
827
828#[cfg(test)]
829mod tests {
830    use super::*;
831
832    #[test]
833    fn test_archive_format_autodetect() {
834        assert_eq!(
835            ArchiveFormat::autodetect("foo.tar.zst".as_ref()).unwrap(),
836            ArchiveFormat::TarZst,
837        );
838        assert_eq!(
839            ArchiveFormat::autodetect("foo/bar.tar.zst".as_ref()).unwrap(),
840            ArchiveFormat::TarZst,
841        );
842        ArchiveFormat::autodetect("foo".as_ref()).unwrap_err();
843        ArchiveFormat::autodetect("/".as_ref()).unwrap_err();
844    }
845}