nextest_runner/reuse_build/
archiver.rs

1// Copyright (c) The nextest Contributors
2// SPDX-License-Identifier: MIT OR Apache-2.0
3
4use super::{ArchiveCounts, ArchiveEvent, BINARIES_METADATA_FILE_NAME, CARGO_METADATA_FILE_NAME};
5use crate::{
6    config::{
7        core::{EvaluatableProfile, get_num_cpus},
8        elements::{ArchiveConfig, ArchiveIncludeOnMissing, RecursionDepth},
9    },
10    errors::{ArchiveCreateError, FromMessagesError, UnknownArchiveFormat},
11    helpers::{convert_rel_path_to_forward_slash, rel_path_join},
12    list::{BinaryList, OutputFormat, RustBuildMeta, RustTestArtifact, SerializableFormat},
13    redact::Redactor,
14    reuse_build::{ArchiveFilterCounts, LIBDIRS_BASE_DIR, PathMapper},
15    test_filter::{BinaryFilter, FilterBinaryMatch, FilterBound},
16};
17use atomicwrites::{AtomicFile, OverwriteBehavior};
18use camino::{Utf8Path, Utf8PathBuf};
19use core::fmt;
20use guppy::{PackageId, graph::PackageGraph};
21use nextest_filtering::EvalContext;
22use std::{
23    collections::{BTreeSet, HashSet},
24    fs,
25    io::{self, BufWriter, Write},
26    sync::Arc,
27    time::{Instant, SystemTime},
28};
29use tracing::{debug, trace, warn};
30use zstd::Encoder;
31
32/// Applies archive filters to a [`BinaryList`].
33pub fn apply_archive_filters(
34    graph: &PackageGraph,
35    binary_list: Arc<BinaryList>,
36    filter: &BinaryFilter,
37    ecx: &EvalContext<'_>,
38    path_mapper: &PathMapper,
39) -> Result<(BinaryList, ArchiveFilterCounts), FromMessagesError> {
40    let rust_build_meta = binary_list.rust_build_meta.map_paths(path_mapper);
41    let test_artifacts = RustTestArtifact::from_binary_list(
42        graph,
43        binary_list.clone(),
44        &rust_build_meta,
45        path_mapper,
46        None,
47    )?;
48
49    // Apply filterset to `RustTestArtifact` list.
50    let test_artifacts: BTreeSet<_> = test_artifacts
51        .iter()
52        .filter(|test_artifact| {
53            // Don't obey the default filter here. The default filter will
54            // be applied while running tests from the archive (the
55            // configuration is expected to be present at that time).
56            let filter_match = filter.check_match(test_artifact, ecx, FilterBound::All);
57
58            debug_assert!(
59                !matches!(filter_match, FilterBinaryMatch::Possible),
60                "build_filtersets should have errored out on test filters, \
61                 Possible should never be returned"
62            );
63            matches!(filter_match, FilterBinaryMatch::Definite)
64        })
65        .map(|test_artifact| &test_artifact.binary_id)
66        .collect();
67
68    let filtered_binaries: Vec<_> = binary_list
69        .rust_binaries
70        .iter()
71        .filter(|binary| test_artifacts.contains(&binary.id))
72        .cloned()
73        .collect();
74
75    // Build a map of package IDs included in the filtered set, then use that to
76    // filter out non-test binaries not referred to by any package.
77    let relevant_package_ids: HashSet<_> = filtered_binaries
78        .iter()
79        .map(|binary| &binary.package_id)
80        .collect();
81    let mut filtered_non_test_binaries = binary_list.rust_build_meta.non_test_binaries.clone();
82    filtered_non_test_binaries.retain(|package_id, _| relevant_package_ids.contains(package_id));
83
84    // Also filter out build script out directories.
85    let mut filtered_build_script_out_dirs =
86        binary_list.rust_build_meta.build_script_out_dirs.clone();
87    filtered_build_script_out_dirs
88        .retain(|package_id, _| relevant_package_ids.contains(package_id));
89
90    let filtered_out_test_binary_count = binary_list
91        .rust_binaries
92        .len()
93        .saturating_sub(filtered_binaries.len());
94    let filtered_out_non_test_binary_count = binary_list
95        .rust_build_meta
96        .non_test_binaries
97        .len()
98        .saturating_sub(filtered_non_test_binaries.len());
99    let filtered_out_build_script_out_dir_count = binary_list
100        .rust_build_meta
101        .build_script_out_dirs
102        .len()
103        .saturating_sub(filtered_build_script_out_dirs.len());
104
105    let filtered_build_meta = RustBuildMeta {
106        non_test_binaries: filtered_non_test_binaries,
107        build_script_out_dirs: filtered_build_script_out_dirs,
108        ..binary_list.rust_build_meta.clone()
109    };
110
111    Ok((
112        BinaryList {
113            rust_build_meta: filtered_build_meta,
114            rust_binaries: filtered_binaries,
115        },
116        ArchiveFilterCounts {
117            filtered_out_test_binary_count,
118            filtered_out_non_test_binary_count,
119            filtered_out_build_script_out_dir_count,
120        },
121    ))
122}
123
124/// Archive format.
125#[derive(Clone, Copy, Debug, PartialEq, Eq)]
126#[non_exhaustive]
127pub enum ArchiveFormat {
128    /// A Zstandard-compressed tarball.
129    TarZst,
130}
131
132impl ArchiveFormat {
133    /// The list of supported formats as a list of (file extension, format) pairs.
134    pub const SUPPORTED_FORMATS: &'static [(&'static str, Self)] = &[(".tar.zst", Self::TarZst)];
135
136    /// Automatically detects an archive format from a given file name, and returns an error if the
137    /// detection failed.
138    pub fn autodetect(archive_file: &Utf8Path) -> Result<Self, UnknownArchiveFormat> {
139        let file_name = archive_file.file_name().unwrap_or("");
140        for (extension, format) in Self::SUPPORTED_FORMATS {
141            if file_name.ends_with(extension) {
142                return Ok(*format);
143            }
144        }
145
146        Err(UnknownArchiveFormat {
147            file_name: file_name.to_owned(),
148        })
149    }
150}
151
152/// Archives test binaries along with metadata to the given file.
153///
154/// The output file is a Zstandard-compressed tarball (`.tar.zst`).
155#[expect(clippy::too_many_arguments)]
156pub fn archive_to_file<'a, F>(
157    profile: EvaluatableProfile<'a>,
158    binary_list: &'a BinaryList,
159    filter_counts: ArchiveFilterCounts,
160    cargo_metadata: &'a str,
161    graph: &'a PackageGraph,
162    path_mapper: &'a PathMapper,
163    format: ArchiveFormat,
164    zstd_level: i32,
165    output_file: &'a Utf8Path,
166    mut callback: F,
167    redactor: Redactor,
168) -> Result<(), ArchiveCreateError>
169where
170    F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
171{
172    let config = profile.archive_config();
173
174    let start_time = Instant::now();
175
176    let file = AtomicFile::new(output_file, OverwriteBehavior::AllowOverwrite);
177    let file_count = file
178        .write(|file| {
179            // Tests require the standard library in two cases:
180            // * proc-macro tests (host)
181            // * tests compiled with -C prefer-dynamic (target)
182            //
183            // We only care about libstd -- empirically, other libraries in the path aren't
184            // required.
185            let (host_stdlib, host_stdlib_err) = if let Some(libdir) = binary_list
186                .rust_build_meta
187                .build_platforms
188                .host
189                .libdir
190                .as_path()
191            {
192                split_result(find_std(libdir))
193            } else {
194                (None, None)
195            };
196
197            let (target_stdlib, target_stdlib_err) =
198                if let Some(target) = &binary_list.rust_build_meta.build_platforms.target {
199                    if let Some(libdir) = target.libdir.as_path() {
200                        split_result(find_std(libdir))
201                    } else {
202                        (None, None)
203                    }
204                } else {
205                    (None, None)
206                };
207
208            let stdlib_count = host_stdlib.is_some() as usize + target_stdlib.is_some() as usize;
209
210            let archiver = Archiver::new(
211                config,
212                binary_list,
213                cargo_metadata,
214                graph,
215                path_mapper,
216                host_stdlib,
217                target_stdlib,
218                format,
219                zstd_level,
220                file,
221                redactor,
222            )?;
223
224            let test_binary_count = binary_list.rust_binaries.len();
225            let non_test_binary_count = binary_list.rust_build_meta.non_test_binaries.len();
226            let build_script_out_dir_count =
227                binary_list.rust_build_meta.build_script_out_dirs.len();
228            let linked_path_count = binary_list.rust_build_meta.linked_paths.len();
229            let extra_path_count = config.include.len();
230
231            let counts = ArchiveCounts {
232                test_binary_count,
233                filter_counts,
234                non_test_binary_count,
235                build_script_out_dir_count,
236                linked_path_count,
237                extra_path_count,
238                stdlib_count,
239            };
240
241            callback(ArchiveEvent::ArchiveStarted {
242                counts,
243                output_file,
244            })
245            .map_err(ArchiveCreateError::ReporterIo)?;
246
247            // Was there an error finding the standard library?
248            if let Some(err) = host_stdlib_err {
249                callback(ArchiveEvent::StdlibPathError {
250                    error: &err.to_string(),
251                })
252                .map_err(ArchiveCreateError::ReporterIo)?;
253            }
254            if let Some(err) = target_stdlib_err {
255                callback(ArchiveEvent::StdlibPathError {
256                    error: &err.to_string(),
257                })
258                .map_err(ArchiveCreateError::ReporterIo)?;
259            }
260
261            let (_, file_count) = archiver.archive(&mut callback)?;
262            Ok(file_count)
263        })
264        .map_err(|err| match err {
265            atomicwrites::Error::Internal(err) => ArchiveCreateError::OutputArchiveIo(err),
266            atomicwrites::Error::User(err) => err,
267        })?;
268
269    let elapsed = start_time.elapsed();
270
271    callback(ArchiveEvent::Archived {
272        file_count,
273        output_file,
274        elapsed,
275    })
276    .map_err(ArchiveCreateError::ReporterIo)?;
277
278    Ok(())
279}
280
281struct Archiver<'a, W: Write> {
282    binary_list: &'a BinaryList,
283    cargo_metadata: &'a str,
284    graph: &'a PackageGraph,
285    path_mapper: &'a PathMapper,
286    host_stdlib: Option<Utf8PathBuf>,
287    target_stdlib: Option<Utf8PathBuf>,
288    builder: tar::Builder<Encoder<'static, BufWriter<W>>>,
289    unix_timestamp: u64,
290    added_files: HashSet<Utf8PathBuf>,
291    config: &'a ArchiveConfig,
292    redactor: Redactor,
293}
294
295impl<'a, W: Write> Archiver<'a, W> {
296    #[expect(clippy::too_many_arguments)]
297    fn new(
298        config: &'a ArchiveConfig,
299        binary_list: &'a BinaryList,
300        cargo_metadata: &'a str,
301        graph: &'a PackageGraph,
302        path_mapper: &'a PathMapper,
303        host_stdlib: Option<Utf8PathBuf>,
304        target_stdlib: Option<Utf8PathBuf>,
305        format: ArchiveFormat,
306        compression_level: i32,
307        writer: W,
308        redactor: Redactor,
309    ) -> Result<Self, ArchiveCreateError> {
310        let buf_writer = BufWriter::new(writer);
311        let builder = match format {
312            ArchiveFormat::TarZst => {
313                let mut encoder = zstd::Encoder::new(buf_writer, compression_level)
314                    .map_err(ArchiveCreateError::OutputArchiveIo)?;
315                encoder
316                    .include_checksum(true)
317                    .map_err(ArchiveCreateError::OutputArchiveIo)?;
318                if let Err(err) = encoder.multithread(get_num_cpus() as u32) {
319                    tracing::warn!(
320                        ?err,
321                        "libzstd compiled without multithreading, defaulting to single-thread"
322                    );
323                }
324                tar::Builder::new(encoder)
325            }
326        };
327
328        let unix_timestamp = SystemTime::now()
329            .duration_since(SystemTime::UNIX_EPOCH)
330            .expect("current time should be after 1970-01-01")
331            .as_secs();
332
333        Ok(Self {
334            binary_list,
335            cargo_metadata,
336            graph,
337            path_mapper,
338            host_stdlib,
339            target_stdlib,
340            builder,
341            unix_timestamp,
342            added_files: HashSet::new(),
343            config,
344            redactor,
345        })
346    }
347
348    fn archive<F>(mut self, callback: &mut F) -> Result<(W, usize), ArchiveCreateError>
349    where
350        F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
351    {
352        // Add the binaries metadata first so that while unarchiving, reports are instant.
353        let binaries_metadata = self
354            .binary_list
355            .to_string(OutputFormat::Serializable(SerializableFormat::JsonPretty))
356            .map_err(ArchiveCreateError::CreateBinaryList)?;
357
358        self.append_from_memory(BINARIES_METADATA_FILE_NAME, &binaries_metadata)?;
359
360        self.append_from_memory(CARGO_METADATA_FILE_NAME, self.cargo_metadata)?;
361
362        let target_dir = &self.binary_list.rust_build_meta.target_directory;
363
364        fn filter_map_err<T>(result: io::Result<()>) -> Option<Result<T, ArchiveCreateError>> {
365            match result {
366                Ok(()) => None,
367                Err(err) => Some(Err(ArchiveCreateError::ReporterIo(err))),
368            }
369        }
370
371        // Check that all archive.include paths exist.
372        let archive_include_paths = self
373            .config
374            .include
375            .iter()
376            .filter_map(|include| {
377                let src_path = include.join_path(target_dir);
378                let src_path = self.path_mapper.map_binary(src_path);
379
380                match src_path.symlink_metadata() {
381                    Ok(metadata) => {
382                        if metadata.is_dir() {
383                            if include.depth().is_zero() {
384                                // A directory with depth 0 will not be archived, so warn on that.
385                                filter_map_err(callback(ArchiveEvent::DirectoryAtDepthZero {
386                                    path: &src_path,
387                                }))
388                            } else {
389                                Some(Ok((include, src_path)))
390                            }
391                        } else if metadata.is_file() || metadata.is_symlink() {
392                            Some(Ok((include, src_path)))
393                        } else {
394                            filter_map_err(callback(ArchiveEvent::UnknownFileType {
395                                step: ArchiveStep::ExtraPaths,
396                                path: &src_path,
397                            }))
398                        }
399                    }
400                    Err(error) => {
401                        if error.kind() == io::ErrorKind::NotFound {
402                            match include.on_missing() {
403                                ArchiveIncludeOnMissing::Error => {
404                                    // TODO: accumulate errors rather than failing on the first one
405                                    Some(Err(ArchiveCreateError::MissingExtraPath {
406                                        path: src_path.to_owned(),
407                                        redactor: self.redactor.clone(),
408                                    }))
409                                }
410                                ArchiveIncludeOnMissing::Warn => {
411                                    filter_map_err(callback(ArchiveEvent::ExtraPathMissing {
412                                        path: &src_path,
413                                        warn: true,
414                                    }))
415                                }
416                                ArchiveIncludeOnMissing::Ignore => {
417                                    filter_map_err(callback(ArchiveEvent::ExtraPathMissing {
418                                        path: &src_path,
419                                        warn: false,
420                                    }))
421                                }
422                            }
423                        } else {
424                            Some(Err(ArchiveCreateError::InputFileRead {
425                                step: ArchiveStep::ExtraPaths,
426                                path: src_path.to_owned(),
427                                is_dir: None,
428                                error,
429                            }))
430                        }
431                    }
432                }
433            })
434            .collect::<Result<Vec<_>, ArchiveCreateError>>()?;
435
436        // Write all discovered binaries into the archive.
437        for binary in &self.binary_list.rust_binaries {
438            let rel_path = binary
439                .path
440                .strip_prefix(target_dir)
441                .expect("binary paths must be within target directory");
442            // The target directory might not be called "target", so strip all of it then add
443            // "target" to the beginning.
444            let rel_path = Utf8Path::new("target").join(rel_path);
445            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
446
447            self.append_file(ArchiveStep::TestBinaries, &binary.path, &rel_path)?;
448        }
449        for non_test_binary in self
450            .binary_list
451            .rust_build_meta
452            .non_test_binaries
453            .iter()
454            .flat_map(|(_, binaries)| binaries)
455        {
456            let src_path = self
457                .binary_list
458                .rust_build_meta
459                .target_directory
460                .join(&non_test_binary.path);
461            let src_path = self.path_mapper.map_binary(src_path);
462
463            let rel_path = Utf8Path::new("target").join(&non_test_binary.path);
464            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
465
466            self.append_file(ArchiveStep::NonTestBinaries, &src_path, &rel_path)?;
467        }
468
469        // Write build script output directories to the archive.
470        for build_script_out_dir in self
471            .binary_list
472            .rust_build_meta
473            .build_script_out_dirs
474            .values()
475        {
476            let src_path = self
477                .binary_list
478                .rust_build_meta
479                .target_directory
480                .join(build_script_out_dir);
481            let src_path = self.path_mapper.map_binary(src_path);
482
483            let rel_path = Utf8Path::new("target").join(build_script_out_dir);
484            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
485
486            // XXX: For now, we only archive one level of build script output directories as a
487            // conservative solution. If necessary, we may have to either broaden this by default or
488            // add configuration for this. Archiving too much can cause unnecessary slowdowns.
489            self.append_path_recursive(
490                ArchiveStep::BuildScriptOutDirs,
491                &src_path,
492                &rel_path,
493                RecursionDepth::Finite(1),
494                false,
495                callback,
496            )?;
497
498            // Archive build script output in order to set environment variables from there
499            let Some(out_dir_parent) = build_script_out_dir.parent() else {
500                warn!(
501                    "could not determine parent directory of output directory {build_script_out_dir}"
502                );
503                continue;
504            };
505            let out_file_path = out_dir_parent.join("output");
506            let src_path = self
507                .binary_list
508                .rust_build_meta
509                .target_directory
510                .join(&out_file_path);
511
512            let rel_path = Utf8Path::new("target").join(out_file_path);
513            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
514
515            self.append_file(ArchiveStep::BuildScriptOutDirs, &src_path, &rel_path)?;
516        }
517
518        // Write linked paths to the archive.
519        for (linked_path, requested_by) in &self.binary_list.rust_build_meta.linked_paths {
520            // Linked paths are relative, e.g. debug/foo/bar. We need to prepend the target
521            // directory.
522            let src_path = self
523                .binary_list
524                .rust_build_meta
525                .target_directory
526                .join(linked_path);
527            let src_path = self.path_mapper.map_binary(src_path);
528
529            // Some crates produce linked paths that don't exist. This is a bug in those libraries.
530            if !src_path.exists() {
531                // Map each requested_by to its package name and version.
532                let mut requested_by: Vec<_> = requested_by
533                    .iter()
534                    .map(|package_id| {
535                        self.graph
536                            .metadata(&PackageId::new(package_id.clone()))
537                            .map_or_else(
538                                |_| {
539                                    // If a package ID is not found in the graph, it's strange but not
540                                    // fatal -- just use the ID.
541                                    package_id.to_owned()
542                                },
543                                |metadata| format!("{} v{}", metadata.name(), metadata.version()),
544                            )
545                    })
546                    .collect();
547                requested_by.sort_unstable();
548
549                callback(ArchiveEvent::LinkedPathNotFound {
550                    path: &src_path,
551                    requested_by: &requested_by,
552                })
553                .map_err(ArchiveCreateError::ReporterIo)?;
554                continue;
555            }
556
557            let rel_path = Utf8Path::new("target").join(linked_path);
558            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
559            // Since LD_LIBRARY_PATH etc aren't recursive, we only need to add the top-level files
560            // from linked paths.
561            self.append_path_recursive(
562                ArchiveStep::LinkedPaths,
563                &src_path,
564                &rel_path,
565                RecursionDepth::Finite(1),
566                false,
567                callback,
568            )?;
569        }
570
571        // Also include extra paths.
572        for (include, src_path) in archive_include_paths {
573            let rel_path = include.join_path(Utf8Path::new("target"));
574            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
575
576            if src_path.exists() {
577                self.append_path_recursive(
578                    ArchiveStep::ExtraPaths,
579                    &src_path,
580                    &rel_path,
581                    include.depth(),
582                    // Warn if the implicit depth limit for these paths is in use.
583                    true,
584                    callback,
585                )?;
586            }
587        }
588
589        // Add the standard libraries to the archive if available.
590        if let Some(host_stdlib) = self.host_stdlib.clone() {
591            let rel_path = Utf8Path::new(LIBDIRS_BASE_DIR)
592                .join("host")
593                .join(host_stdlib.file_name().unwrap());
594            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
595
596            self.append_file(ArchiveStep::ExtraPaths, &host_stdlib, &rel_path)?;
597        }
598        if let Some(target_stdlib) = self.target_stdlib.clone() {
599            // Use libdir/target/0 as the path to the target standard library, to support multiple
600            // targets in the future.
601            let rel_path = Utf8Path::new(LIBDIRS_BASE_DIR)
602                .join("target/0")
603                .join(target_stdlib.file_name().unwrap());
604            let rel_path = convert_rel_path_to_forward_slash(&rel_path);
605
606            self.append_file(ArchiveStep::ExtraPaths, &target_stdlib, &rel_path)?;
607        }
608
609        // Finish writing the archive.
610        let encoder = self
611            .builder
612            .into_inner()
613            .map_err(ArchiveCreateError::OutputArchiveIo)?;
614        // Finish writing the zstd stream.
615        let buf_writer = encoder
616            .finish()
617            .map_err(ArchiveCreateError::OutputArchiveIo)?;
618        let writer = buf_writer
619            .into_inner()
620            .map_err(|err| ArchiveCreateError::OutputArchiveIo(err.into_error()))?;
621
622        Ok((writer, self.added_files.len()))
623    }
624
625    // ---
626    // Helper methods
627    // ---
628
629    fn append_from_memory(&mut self, name: &str, contents: &str) -> Result<(), ArchiveCreateError> {
630        let mut header = tar::Header::new_gnu();
631        header.set_size(contents.len() as u64);
632        header.set_mtime(self.unix_timestamp);
633        header.set_mode(0o664);
634        header.set_cksum();
635
636        self.builder
637            .append_data(&mut header, name, io::Cursor::new(contents))
638            .map_err(ArchiveCreateError::OutputArchiveIo)?;
639        // We always prioritize appending files from memory over files on disk, so don't check
640        // membership in added_files before adding the file to the archive.
641        self.added_files.insert(name.into());
642        Ok(())
643    }
644
645    fn append_path_recursive<F>(
646        &mut self,
647        step: ArchiveStep,
648        src_path: &Utf8Path,
649        rel_path: &Utf8Path,
650        limit: RecursionDepth,
651        warn_on_exceed_depth: bool,
652        callback: &mut F,
653    ) -> Result<(), ArchiveCreateError>
654    where
655        F: for<'b> FnMut(ArchiveEvent<'b>) -> io::Result<()>,
656    {
657        // Within the loop, the metadata will be part of the directory entry.
658        let metadata =
659            fs::symlink_metadata(src_path).map_err(|error| ArchiveCreateError::InputFileRead {
660                step,
661                path: src_path.to_owned(),
662                is_dir: None,
663                error,
664            })?;
665
666        // Use an explicit stack to avoid the unlikely but possible situation of a stack overflow.
667        let mut stack = vec![(limit, src_path.to_owned(), rel_path.to_owned(), metadata)];
668
669        while let Some((depth, src_path, rel_path, metadata)) = stack.pop() {
670            trace!(
671                target: "nextest-runner",
672                "processing `{src_path}` with metadata {metadata:?} \
673                 (depth: {depth})",
674            );
675
676            if metadata.is_dir() {
677                // Check the recursion limit.
678                if depth.is_zero() {
679                    callback(ArchiveEvent::RecursionDepthExceeded {
680                        step,
681                        path: &src_path,
682                        limit: limit.unwrap_finite(),
683                        warn: warn_on_exceed_depth,
684                    })
685                    .map_err(ArchiveCreateError::ReporterIo)?;
686                    continue;
687                }
688
689                // Iterate over this directory.
690                debug!(
691                    target: "nextest-runner",
692                    "recursing into `{}`",
693                    src_path
694                );
695                let entries = src_path.read_dir_utf8().map_err(|error| {
696                    ArchiveCreateError::InputFileRead {
697                        step,
698                        path: src_path.to_owned(),
699                        is_dir: Some(true),
700                        error,
701                    }
702                })?;
703                for entry in entries {
704                    let entry = entry.map_err(|error| ArchiveCreateError::DirEntryRead {
705                        path: src_path.to_owned(),
706                        error,
707                    })?;
708                    let metadata =
709                        entry
710                            .metadata()
711                            .map_err(|error| ArchiveCreateError::InputFileRead {
712                                step,
713                                path: entry.path().to_owned(),
714                                is_dir: None,
715                                error,
716                            })?;
717                    let entry_rel_path = rel_path_join(&rel_path, entry.file_name().as_ref());
718                    stack.push((
719                        depth.decrement(),
720                        entry.into_path(),
721                        entry_rel_path,
722                        metadata,
723                    ));
724                }
725            } else if metadata.is_file() || metadata.is_symlink() {
726                self.append_file(step, &src_path, &rel_path)?;
727            } else {
728                // Don't archive other kinds of files.
729                callback(ArchiveEvent::UnknownFileType {
730                    step,
731                    path: &src_path,
732                })
733                .map_err(ArchiveCreateError::ReporterIo)?;
734            }
735        }
736
737        Ok(())
738    }
739
740    fn append_file(
741        &mut self,
742        step: ArchiveStep,
743        src: &Utf8Path,
744        dest: &Utf8Path,
745    ) -> Result<(), ArchiveCreateError> {
746        // Check added_files to ensure we aren't adding duplicate files.
747        if !self.added_files.contains(dest) {
748            debug!(
749                target: "nextest-runner",
750                "adding `{src}` to archive as `{dest}`",
751            );
752            self.builder
753                .append_path_with_name(src, dest)
754                .map_err(|error| ArchiveCreateError::InputFileRead {
755                    step,
756                    path: src.to_owned(),
757                    is_dir: Some(false),
758                    error,
759                })?;
760            self.added_files.insert(dest.into());
761        }
762        Ok(())
763    }
764}
765
766fn find_std(libdir: &Utf8Path) -> io::Result<Utf8PathBuf> {
767    for path in libdir.read_dir_utf8()? {
768        let path = path?;
769        // As of Rust 1.78, std is of the form:
770        //
771        //   libstd-<hash>.so (non-macOS Unix)
772        //   libstd-<hash>.dylib (macOS)
773        //   std-<hash>.dll (Windows)
774        let file_name = path.file_name();
775        let is_unix = file_name.starts_with("libstd-")
776            && (file_name.ends_with(".so") || file_name.ends_with(".dylib"));
777        let is_windows = file_name.starts_with("std-") && file_name.ends_with(".dll");
778
779        if is_unix || is_windows {
780            return Ok(path.into_path());
781        }
782    }
783
784    Err(io::Error::other(
785        "could not find the Rust standard library in the libdir",
786    ))
787}
788
789fn split_result<T, E>(result: Result<T, E>) -> (Option<T>, Option<E>) {
790    match result {
791        Ok(v) => (Some(v), None),
792        Err(e) => (None, Some(e)),
793    }
794}
795
796/// The part of the archive process that is currently in progress.
797///
798/// This is used for better warnings and errors.
799#[derive(Clone, Copy, Debug)]
800pub enum ArchiveStep {
801    /// Test binaries are being archived.
802    TestBinaries,
803
804    /// Non-test binaries are being archived.
805    NonTestBinaries,
806
807    /// Build script output directories are being archived.
808    BuildScriptOutDirs,
809
810    /// Linked paths are being archived.
811    LinkedPaths,
812
813    /// Extra paths are being archived.
814    ExtraPaths,
815
816    /// The standard library is being archived.
817    Stdlib,
818}
819
820impl fmt::Display for ArchiveStep {
821    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
822        match self {
823            Self::TestBinaries => write!(f, "test binaries"),
824            Self::NonTestBinaries => write!(f, "non-test binaries"),
825            Self::BuildScriptOutDirs => write!(f, "build script output directories"),
826            Self::LinkedPaths => write!(f, "linked paths"),
827            Self::ExtraPaths => write!(f, "extra paths"),
828            Self::Stdlib => write!(f, "standard library"),
829        }
830    }
831}
832
833#[cfg(test)]
834mod tests {
835    use super::*;
836
837    #[test]
838    fn test_archive_format_autodetect() {
839        assert_eq!(
840            ArchiveFormat::autodetect("foo.tar.zst".as_ref()).unwrap(),
841            ArchiveFormat::TarZst,
842        );
843        assert_eq!(
844            ArchiveFormat::autodetect("foo/bar.tar.zst".as_ref()).unwrap(),
845            ArchiveFormat::TarZst,
846        );
847        ArchiveFormat::autodetect("foo".as_ref()).unwrap_err();
848        ArchiveFormat::autodetect("/".as_ref()).unwrap_err();
849    }
850}