nextest_runner/config/overrides/
imp.rs

1// Copyright (c) The nextest Contributors
2// SPDX-License-Identifier: MIT OR Apache-2.0
3
4use crate::{
5    config::{
6        core::{
7            EvaluatableProfile, FinalConfig, NextestConfig, NextestConfigImpl, PreBuildPlatform,
8        },
9        elements::{
10            FlakyResult, JunitFlakyFailStatus, LeakTimeout, RetryPolicy, SlowTimeout, TestGroup,
11            TestPriority, ThreadsRequired,
12        },
13        scripts::{
14            CompiledProfileScripts, DeserializedProfileScriptConfig, ScriptId, WrapperScriptConfig,
15        },
16    },
17    errors::{
18        ConfigCompileError, ConfigCompileErrorKind, ConfigCompileSection, ConfigParseErrorKind,
19    },
20    platform::BuildPlatforms,
21    reporter::TestOutputDisplay,
22    run_mode::NextestRunMode,
23};
24use guppy::graph::cargo::BuildPlatform;
25use nextest_filtering::{
26    BinaryQuery, CompiledExpr, Filterset, FiltersetKind, ParseContext, TestQuery,
27};
28use owo_colors::{OwoColorize, Style};
29use serde::{Deserialize, Deserializer};
30use smol_str::SmolStr;
31use std::collections::HashMap;
32use target_spec::{Platform, TargetSpec};
33
34/// Settings for a test binary.
35#[derive(Clone, Debug)]
36pub struct ListSettings<'p, Source = ()> {
37    list_wrapper: Option<(&'p WrapperScriptConfig, Source)>,
38}
39
40impl<'p, Source: Copy> ListSettings<'p, Source> {
41    pub(in crate::config) fn new(
42        profile: &'p EvaluatableProfile<'_>,
43        query: &BinaryQuery<'_>,
44    ) -> Self
45    where
46        Source: TrackSource<'p>,
47    {
48        let ecx = profile.filterset_ecx();
49
50        let mut list_wrapper = None;
51
52        for override_ in &profile.compiled_data.scripts {
53            if let Some(wrapper) = &override_.list_wrapper
54                && list_wrapper.is_none()
55            {
56                let (wrapper, source) =
57                    map_wrapper_script(profile, Source::track_script(wrapper.clone(), override_));
58
59                if !override_
60                    .is_enabled_binary(query, &ecx)
61                    .expect("test() in list-time scripts should have been rejected")
62                {
63                    continue;
64                }
65
66                list_wrapper = Some((wrapper, source));
67            }
68        }
69
70        Self { list_wrapper }
71    }
72}
73
74impl<'p> ListSettings<'p> {
75    /// Returns a default list-settings without a wrapper script.
76    ///
77    /// Debug command used for testing.
78    pub fn debug_empty() -> Self {
79        Self { list_wrapper: None }
80    }
81
82    /// Sets the wrapper to use for list-time scripts.
83    ///
84    /// Debug command used for testing.
85    pub fn debug_set_list_wrapper(&mut self, wrapper: &'p WrapperScriptConfig) -> &mut Self {
86        self.list_wrapper = Some((wrapper, ()));
87        self
88    }
89
90    /// Returns the list-time wrapper script.
91    pub fn list_wrapper(&self) -> Option<&'p WrapperScriptConfig> {
92        self.list_wrapper.as_ref().map(|(wrapper, _)| *wrapper)
93    }
94}
95
96/// Settings for individual tests.
97///
98/// Returned by [`EvaluatableProfile::settings_for`].
99///
100/// The `Source` parameter tracks an optional source; this isn't used by any public APIs at the
101/// moment.
102#[derive(Clone, Debug)]
103pub struct TestSettings<'p, Source = ()> {
104    priority: (TestPriority, Source),
105    threads_required: (ThreadsRequired, Source),
106    run_wrapper: Option<(&'p WrapperScriptConfig, Source)>,
107    run_extra_args: (&'p [String], Source),
108    retries: (RetryPolicy, Source),
109    flaky_result: (FlakyResult, Source),
110    slow_timeout: (SlowTimeout, Source),
111    leak_timeout: (LeakTimeout, Source),
112    test_group: (TestGroup, Source),
113    success_output: (TestOutputDisplay, Source),
114    failure_output: (TestOutputDisplay, Source),
115    junit_store_success_output: (bool, Source),
116    junit_store_failure_output: (bool, Source),
117    junit_flaky_fail_status: (JunitFlakyFailStatus, Source),
118}
119
120pub(crate) trait TrackSource<'p>: Sized {
121    fn track_default<T>(value: T) -> (T, Self);
122    fn track_profile<T>(value: T) -> (T, Self);
123    fn track_override<T>(value: T, source: &'p CompiledOverride<FinalConfig>) -> (T, Self);
124    fn track_script<T>(value: T, source: &'p CompiledProfileScripts<FinalConfig>) -> (T, Self);
125}
126
127impl<'p> TrackSource<'p> for () {
128    fn track_default<T>(value: T) -> (T, Self) {
129        (value, ())
130    }
131
132    fn track_profile<T>(value: T) -> (T, Self) {
133        (value, ())
134    }
135
136    fn track_override<T>(value: T, _source: &'p CompiledOverride<FinalConfig>) -> (T, Self) {
137        (value, ())
138    }
139
140    fn track_script<T>(value: T, _source: &'p CompiledProfileScripts<FinalConfig>) -> (T, Self) {
141        (value, ())
142    }
143}
144
145#[derive(Copy, Clone, Debug)]
146pub(crate) enum SettingSource<'p> {
147    /// A default configuration not specified in, or possible to override from,
148    /// a profile.
149    Default,
150
151    /// A configuration specified in a profile.
152    Profile,
153
154    /// An override specified in a profile.
155    Override(&'p CompiledOverride<FinalConfig>),
156
157    /// An override specified in the `scripts` section.
158    #[expect(dead_code)]
159    Script(&'p CompiledProfileScripts<FinalConfig>),
160}
161
162impl<'p> TrackSource<'p> for SettingSource<'p> {
163    fn track_default<T>(value: T) -> (T, Self) {
164        (value, SettingSource::Default)
165    }
166
167    fn track_profile<T>(value: T) -> (T, Self) {
168        (value, SettingSource::Profile)
169    }
170
171    fn track_override<T>(value: T, source: &'p CompiledOverride<FinalConfig>) -> (T, Self) {
172        (value, SettingSource::Override(source))
173    }
174
175    fn track_script<T>(value: T, source: &'p CompiledProfileScripts<FinalConfig>) -> (T, Self) {
176        (value, SettingSource::Script(source))
177    }
178}
179
180impl<'p> TestSettings<'p> {
181    /// Returns the test's priority.
182    pub fn priority(&self) -> TestPriority {
183        self.priority.0
184    }
185
186    /// Returns the number of threads required for this test.
187    pub fn threads_required(&self) -> ThreadsRequired {
188        self.threads_required.0
189    }
190
191    /// Returns the run-time wrapper script for this test.
192    pub fn run_wrapper(&self) -> Option<&'p WrapperScriptConfig> {
193        self.run_wrapper.map(|(script, _)| script)
194    }
195
196    /// Returns extra arguments to pass at runtime for this test.
197    pub fn run_extra_args(&self) -> &'p [String] {
198        self.run_extra_args.0
199    }
200
201    /// Returns the number of retries for this test.
202    pub fn retries(&self) -> RetryPolicy {
203        self.retries.0
204    }
205
206    /// Returns the flaky result behavior for this test.
207    pub fn flaky_result(&self) -> FlakyResult {
208        self.flaky_result.0
209    }
210
211    /// Returns the slow timeout for this test.
212    pub fn slow_timeout(&self) -> SlowTimeout {
213        self.slow_timeout.0
214    }
215
216    /// Returns the leak timeout for this test.
217    pub fn leak_timeout(&self) -> LeakTimeout {
218        self.leak_timeout.0
219    }
220
221    /// Returns the test group for this test.
222    pub fn test_group(&self) -> &TestGroup {
223        &self.test_group.0
224    }
225
226    /// Returns the success output setting for this test.
227    pub fn success_output(&self) -> TestOutputDisplay {
228        self.success_output.0
229    }
230
231    /// Returns the failure output setting for this test.
232    pub fn failure_output(&self) -> TestOutputDisplay {
233        self.failure_output.0
234    }
235
236    /// Returns whether success output should be stored in JUnit.
237    pub fn junit_store_success_output(&self) -> bool {
238        self.junit_store_success_output.0
239    }
240
241    /// Returns whether failure output should be stored in JUnit.
242    pub fn junit_store_failure_output(&self) -> bool {
243        self.junit_store_failure_output.0
244    }
245
246    /// Returns the JUnit flaky-fail status for this test.
247    pub fn junit_flaky_fail_status(&self) -> JunitFlakyFailStatus {
248        self.junit_flaky_fail_status.0
249    }
250}
251
252#[expect(dead_code)]
253impl<'p, Source: Copy> TestSettings<'p, Source> {
254    pub(in crate::config) fn new(
255        profile: &'p EvaluatableProfile<'_>,
256        run_mode: NextestRunMode,
257        query: &TestQuery<'_>,
258    ) -> Self
259    where
260        Source: TrackSource<'p>,
261    {
262        let ecx = profile.filterset_ecx();
263
264        let mut priority = None;
265        let mut threads_required = None;
266        let mut run_wrapper = None;
267        let mut run_extra_args = None;
268        let mut retries = None;
269        let mut flaky_result = None;
270        let mut slow_timeout = None;
271        let mut leak_timeout = None;
272        let mut test_group = None;
273        let mut success_output = None;
274        let mut failure_output = None;
275        let mut junit_store_success_output = None;
276        let mut junit_store_failure_output = None;
277        let mut junit_flaky_fail_status = None;
278
279        for override_ in &profile.compiled_data.overrides {
280            if !override_.state.host_eval {
281                continue;
282            }
283            if query.binary_query.platform == BuildPlatform::Host && !override_.state.host_test_eval
284            {
285                continue;
286            }
287            if query.binary_query.platform == BuildPlatform::Target && !override_.state.target_eval
288            {
289                continue;
290            }
291
292            if let Some(expr) = &override_.filter()
293                && !expr.matches_test(query, &ecx)
294            {
295                continue;
296            }
297            // If no expression is present, it's equivalent to "all()".
298
299            if priority.is_none()
300                && let Some(p) = override_.data.priority
301            {
302                priority = Some(Source::track_override(p, override_));
303            }
304            if threads_required.is_none()
305                && let Some(t) = override_.data.threads_required
306            {
307                threads_required = Some(Source::track_override(t, override_));
308            }
309            if run_extra_args.is_none()
310                && let Some(r) = override_.data.run_extra_args.as_deref()
311            {
312                run_extra_args = Some(Source::track_override(r, override_));
313            }
314            if retries.is_none()
315                && let Some(r) = override_.data.retries
316            {
317                retries = Some(Source::track_override(r, override_));
318            }
319            if flaky_result.is_none()
320                && let Some(fr) = override_.data.flaky_result
321            {
322                flaky_result = Some(Source::track_override(fr, override_));
323            }
324            if slow_timeout.is_none() {
325                // Use the appropriate slow timeout based on run mode. Note that
326                // there's no fallback from bench to test timeout.
327                let timeout_for_mode = match run_mode {
328                    NextestRunMode::Test => override_.data.slow_timeout,
329                    NextestRunMode::Benchmark => override_.data.bench_slow_timeout,
330                };
331                if let Some(s) = timeout_for_mode {
332                    slow_timeout = Some(Source::track_override(s, override_));
333                }
334            }
335            if leak_timeout.is_none()
336                && let Some(l) = override_.data.leak_timeout
337            {
338                leak_timeout = Some(Source::track_override(l, override_));
339            }
340            if test_group.is_none()
341                && let Some(t) = &override_.data.test_group
342            {
343                test_group = Some(Source::track_override(t.clone(), override_));
344            }
345            if success_output.is_none()
346                && let Some(s) = override_.data.success_output
347            {
348                success_output = Some(Source::track_override(s, override_));
349            }
350            if failure_output.is_none()
351                && let Some(f) = override_.data.failure_output
352            {
353                failure_output = Some(Source::track_override(f, override_));
354            }
355            if junit_store_success_output.is_none()
356                && let Some(s) = override_.data.junit.store_success_output
357            {
358                junit_store_success_output = Some(Source::track_override(s, override_));
359            }
360            if junit_store_failure_output.is_none()
361                && let Some(f) = override_.data.junit.store_failure_output
362            {
363                junit_store_failure_output = Some(Source::track_override(f, override_));
364            }
365            if junit_flaky_fail_status.is_none()
366                && let Some(s) = override_.data.junit.flaky_fail_status
367            {
368                junit_flaky_fail_status = Some(Source::track_override(s, override_));
369            }
370        }
371
372        for override_ in &profile.compiled_data.scripts {
373            if !override_.is_enabled(query, &ecx) {
374                continue;
375            }
376
377            if run_wrapper.is_none()
378                && let Some(wrapper) = &override_.run_wrapper
379            {
380                run_wrapper = Some(Source::track_script(wrapper.clone(), override_));
381            }
382        }
383
384        // If no overrides were found, use the profile defaults.
385        let priority = priority.unwrap_or_else(|| Source::track_default(TestPriority::default()));
386        let threads_required =
387            threads_required.unwrap_or_else(|| Source::track_profile(profile.threads_required()));
388        let run_wrapper = run_wrapper.map(|wrapper| map_wrapper_script(profile, wrapper));
389        let run_extra_args =
390            run_extra_args.unwrap_or_else(|| Source::track_profile(profile.run_extra_args()));
391        let retries = retries.unwrap_or_else(|| Source::track_profile(profile.retries()));
392        let flaky_result =
393            flaky_result.unwrap_or_else(|| Source::track_profile(profile.flaky_result()));
394        let slow_timeout =
395            slow_timeout.unwrap_or_else(|| Source::track_profile(profile.slow_timeout(run_mode)));
396        let leak_timeout =
397            leak_timeout.unwrap_or_else(|| Source::track_profile(profile.leak_timeout()));
398        let test_group = test_group.unwrap_or_else(|| Source::track_profile(TestGroup::Global));
399        let success_output =
400            success_output.unwrap_or_else(|| Source::track_profile(profile.success_output()));
401        let failure_output =
402            failure_output.unwrap_or_else(|| Source::track_profile(profile.failure_output()));
403        let junit_store_success_output = junit_store_success_output.unwrap_or_else(|| {
404            // If the profile doesn't have JUnit enabled, success output can just be false.
405            Source::track_profile(profile.junit().is_some_and(|j| j.store_success_output()))
406        });
407        let junit_store_failure_output = junit_store_failure_output.unwrap_or_else(|| {
408            // If the profile doesn't have JUnit enabled, failure output can just be false.
409            Source::track_profile(profile.junit().is_some_and(|j| j.store_failure_output()))
410        });
411        let junit_flaky_fail_status = junit_flaky_fail_status.unwrap_or_else(|| {
412            Source::track_profile(
413                profile
414                    .junit()
415                    .map_or(JunitFlakyFailStatus::default(), |j| j.flaky_fail_status()),
416            )
417        });
418
419        TestSettings {
420            threads_required,
421            run_extra_args,
422            run_wrapper,
423            retries,
424            flaky_result,
425            priority,
426            slow_timeout,
427            leak_timeout,
428            test_group,
429            success_output,
430            failure_output,
431            junit_store_success_output,
432            junit_store_failure_output,
433            junit_flaky_fail_status,
434        }
435    }
436
437    /// Returns the number of threads required for this test, with the source attached.
438    pub(crate) fn threads_required_with_source(&self) -> (ThreadsRequired, Source) {
439        self.threads_required
440    }
441
442    /// Returns the number of retries for this test, with the source attached.
443    pub(crate) fn retries_with_source(&self) -> (RetryPolicy, Source) {
444        self.retries
445    }
446
447    /// Returns the slow timeout for this test, with the source attached.
448    pub(crate) fn slow_timeout_with_source(&self) -> (SlowTimeout, Source) {
449        self.slow_timeout
450    }
451
452    /// Returns the leak timeout for this test, with the source attached.
453    pub(crate) fn leak_timeout_with_source(&self) -> (LeakTimeout, Source) {
454        self.leak_timeout
455    }
456
457    /// Returns the test group for this test, with the source attached.
458    pub(crate) fn test_group_with_source(&self) -> &(TestGroup, Source) {
459        &self.test_group
460    }
461}
462
463fn map_wrapper_script<'p, Source>(
464    profile: &'p EvaluatableProfile<'_>,
465    (script, source): (ScriptId, Source),
466) -> (&'p WrapperScriptConfig, Source)
467where
468    Source: TrackSource<'p>,
469{
470    let wrapper_config = profile
471        .script_config()
472        .wrapper
473        .get(&script)
474        .unwrap_or_else(|| {
475            panic!(
476                "wrapper script {script} not found \
477                 (should have been checked while reading config)"
478            )
479        });
480    (wrapper_config, source)
481}
482
483#[derive(Clone, Debug)]
484pub(in crate::config) struct CompiledByProfile {
485    pub(in crate::config) default: CompiledData<PreBuildPlatform>,
486    pub(in crate::config) other: HashMap<String, CompiledData<PreBuildPlatform>>,
487}
488
489impl CompiledByProfile {
490    pub(in crate::config) fn new(
491        pcx: &ParseContext<'_>,
492        config: &NextestConfigImpl,
493    ) -> Result<Self, ConfigParseErrorKind> {
494        let mut errors = vec![];
495        let default = CompiledData::new(
496            pcx,
497            "default",
498            Some(config.default_profile().default_filter()),
499            config.default_profile().overrides(),
500            config.default_profile().setup_scripts(),
501            &mut errors,
502        );
503        let other: HashMap<_, _> = config
504            .other_profiles()
505            .map(|(profile_name, profile)| {
506                (
507                    profile_name.to_owned(),
508                    CompiledData::new(
509                        pcx,
510                        profile_name,
511                        profile.default_filter(),
512                        profile.overrides(),
513                        profile.scripts(),
514                        &mut errors,
515                    ),
516                )
517            })
518            .collect();
519
520        if errors.is_empty() {
521            Ok(Self { default, other })
522        } else {
523            Err(ConfigParseErrorKind::CompileErrors(errors))
524        }
525    }
526
527    /// Returns the compiled data for the default config.
528    ///
529    /// The default config does not depend on the package graph, so we create it separately here.
530    /// But we don't implement `Default` to make sure that the value is for the default _config_,
531    /// not the default _profile_ (which repo config can customize).
532    pub(in crate::config) fn for_default_config() -> Self {
533        Self {
534            default: CompiledData {
535                profile_default_filter: Some(CompiledDefaultFilter::for_default_config()),
536                overrides: vec![],
537                scripts: vec![],
538            },
539            other: HashMap::new(),
540        }
541    }
542}
543
544/// A compiled form of the default filter for a profile.
545///
546/// Returned by [`EvaluatableProfile::default_filter`].
547#[derive(Clone, Debug)]
548pub struct CompiledDefaultFilter {
549    /// The compiled expression.
550    ///
551    /// This is a bit tricky -- in some cases, the default config is constructed without a
552    /// `PackageGraph` being available. But parsing filtersets requires a `PackageGraph`. So we hack
553    /// around it by only storing the compiled expression here, and by setting it to `all()` (which
554    /// matches the config).
555    ///
556    /// This does make the default-filter defined in default-config.toml a bit
557    /// of a lie (since we don't use it directly, but instead replicate it in
558    /// code). But it's not too bad.
559    pub expr: CompiledExpr,
560
561    /// The profile name the default filter originates from.
562    pub profile: String,
563
564    /// The section of the config that the default filter comes from.
565    pub section: CompiledDefaultFilterSection,
566}
567
568impl CompiledDefaultFilter {
569    pub(crate) fn for_default_config() -> Self {
570        Self {
571            expr: CompiledExpr::ALL,
572            profile: NextestConfig::DEFAULT_PROFILE.to_owned(),
573            section: CompiledDefaultFilterSection::Profile,
574        }
575    }
576
577    /// Displays a configuration string for the default filter.
578    pub fn display_config(&self, bold_style: Style) -> String {
579        match &self.section {
580            CompiledDefaultFilterSection::Profile => {
581                format!("profile.{}.default-filter", self.profile)
582                    .style(bold_style)
583                    .to_string()
584            }
585            CompiledDefaultFilterSection::Override(_) => {
586                format!(
587                    "default-filter in {}",
588                    format!("profile.{}.overrides", self.profile).style(bold_style)
589                )
590            }
591        }
592    }
593}
594
595/// Within [`CompiledDefaultFilter`], the part of the config that the default
596/// filter comes from.
597#[derive(Clone, Copy, Debug)]
598pub enum CompiledDefaultFilterSection {
599    /// The config comes from the top-level `profile.<profile-name>.default-filter`.
600    Profile,
601
602    /// The config comes from the override at the given index.
603    Override(usize),
604}
605
606#[derive(Clone, Debug)]
607pub(in crate::config) struct CompiledData<State> {
608    // The default filter specified at the profile level.
609    //
610    // Overrides might also specify their own filters, and in that case the
611    // overrides take priority.
612    pub(in crate::config) profile_default_filter: Option<CompiledDefaultFilter>,
613    pub(in crate::config) overrides: Vec<CompiledOverride<State>>,
614    pub(in crate::config) scripts: Vec<CompiledProfileScripts<State>>,
615}
616
617impl CompiledData<PreBuildPlatform> {
618    fn new(
619        pcx: &ParseContext<'_>,
620        profile_name: &str,
621        profile_default_filter: Option<&str>,
622        overrides: &[DeserializedOverride],
623        scripts: &[DeserializedProfileScriptConfig],
624        errors: &mut Vec<ConfigCompileError>,
625    ) -> Self {
626        let profile_default_filter =
627            profile_default_filter.and_then(|filter| {
628                match Filterset::parse(filter.to_owned(), pcx, FiltersetKind::DefaultFilter) {
629                    Ok(expr) => Some(CompiledDefaultFilter {
630                        expr: expr.compiled,
631                        profile: profile_name.to_owned(),
632                        section: CompiledDefaultFilterSection::Profile,
633                    }),
634                    Err(err) => {
635                        errors.push(ConfigCompileError {
636                            profile_name: profile_name.to_owned(),
637                            section: ConfigCompileSection::DefaultFilter,
638                            kind: ConfigCompileErrorKind::Parse {
639                                host_parse_error: None,
640                                target_parse_error: None,
641                                filter_parse_errors: vec![err],
642                            },
643                        });
644                        None
645                    }
646                }
647            });
648
649        let overrides = overrides
650            .iter()
651            .enumerate()
652            .filter_map(|(index, source)| {
653                CompiledOverride::new(pcx, profile_name, index, source, errors)
654            })
655            .collect();
656        let scripts = scripts
657            .iter()
658            .enumerate()
659            .filter_map(|(index, source)| {
660                CompiledProfileScripts::new(pcx, profile_name, index, source, errors)
661            })
662            .collect();
663        Self {
664            profile_default_filter,
665            overrides,
666            scripts,
667        }
668    }
669
670    pub(in crate::config) fn extend_reverse(&mut self, other: Self) {
671        // For the default filter, other wins (it is last, and after reversing, it will be first).
672        if other.profile_default_filter.is_some() {
673            self.profile_default_filter = other.profile_default_filter;
674        }
675        self.overrides.extend(other.overrides.into_iter().rev());
676        self.scripts.extend(other.scripts.into_iter().rev());
677    }
678
679    pub(in crate::config) fn reverse(&mut self) {
680        self.overrides.reverse();
681        self.scripts.reverse();
682    }
683
684    /// Chains this data with another set of data, treating `other` as lower-priority than `self`.
685    pub(in crate::config) fn chain(self, other: Self) -> Self {
686        let profile_default_filter = self.profile_default_filter.or(other.profile_default_filter);
687        let mut overrides = self.overrides;
688        let mut scripts = self.scripts;
689        overrides.extend(other.overrides);
690        scripts.extend(other.scripts);
691        Self {
692            profile_default_filter,
693            overrides,
694            scripts,
695        }
696    }
697
698    pub(in crate::config) fn apply_build_platforms(
699        self,
700        build_platforms: &BuildPlatforms,
701    ) -> CompiledData<FinalConfig> {
702        let profile_default_filter = self.profile_default_filter;
703        let overrides = self
704            .overrides
705            .into_iter()
706            .map(|override_| override_.apply_build_platforms(build_platforms))
707            .collect();
708        let setup_scripts = self
709            .scripts
710            .into_iter()
711            .map(|setup_script| setup_script.apply_build_platforms(build_platforms))
712            .collect();
713        CompiledData {
714            profile_default_filter,
715            overrides,
716            scripts: setup_scripts,
717        }
718    }
719}
720
721#[derive(Clone, Debug)]
722pub(crate) struct CompiledOverride<State> {
723    id: OverrideId,
724    state: State,
725    pub(in crate::config) data: ProfileOverrideData,
726}
727
728impl<State> CompiledOverride<State> {
729    pub(crate) fn id(&self) -> &OverrideId {
730        &self.id
731    }
732}
733
734#[derive(Clone, Debug, Eq, Hash, PartialEq)]
735pub(crate) struct OverrideId {
736    pub(crate) profile_name: SmolStr,
737    index: usize,
738}
739
740#[derive(Clone, Debug)]
741pub(in crate::config) struct ProfileOverrideData {
742    host_spec: MaybeTargetSpec,
743    target_spec: MaybeTargetSpec,
744    filter: Option<FilterOrDefaultFilter>,
745    priority: Option<TestPriority>,
746    threads_required: Option<ThreadsRequired>,
747    run_extra_args: Option<Vec<String>>,
748    retries: Option<RetryPolicy>,
749    flaky_result: Option<FlakyResult>,
750    slow_timeout: Option<SlowTimeout>,
751    bench_slow_timeout: Option<SlowTimeout>,
752    leak_timeout: Option<LeakTimeout>,
753    pub(in crate::config) test_group: Option<TestGroup>,
754    success_output: Option<TestOutputDisplay>,
755    failure_output: Option<TestOutputDisplay>,
756    junit: DeserializedJunitOutput,
757}
758
759impl CompiledOverride<PreBuildPlatform> {
760    fn new(
761        pcx: &ParseContext<'_>,
762        profile_name: &str,
763        index: usize,
764        source: &DeserializedOverride,
765        errors: &mut Vec<ConfigCompileError>,
766    ) -> Option<Self> {
767        if source.platform.host.is_none()
768            && source.platform.target.is_none()
769            && source.filter.is_none()
770        {
771            errors.push(ConfigCompileError {
772                profile_name: profile_name.to_owned(),
773                section: ConfigCompileSection::Override(index),
774                kind: ConfigCompileErrorKind::ConstraintsNotSpecified {
775                    default_filter_specified: source.default_filter.is_some(),
776                },
777            });
778            return None;
779        }
780
781        let host_spec = MaybeTargetSpec::new(source.platform.host.as_deref());
782        let target_spec = MaybeTargetSpec::new(source.platform.target.as_deref());
783        let filter = source.filter.as_ref().map_or(Ok(None), |filter| {
784            Some(Filterset::parse(filter.clone(), pcx, FiltersetKind::Test)).transpose()
785        });
786        let default_filter = source.default_filter.as_ref().map_or(Ok(None), |filter| {
787            Some(Filterset::parse(
788                filter.clone(),
789                pcx,
790                FiltersetKind::DefaultFilter,
791            ))
792            .transpose()
793        });
794
795        match (host_spec, target_spec, filter, default_filter) {
796            (Ok(host_spec), Ok(target_spec), Ok(filter), Ok(default_filter)) => {
797                // At most one of filter and default-filter can be specified.
798                let filter = match (filter, default_filter) {
799                    (Some(_), Some(_)) => {
800                        errors.push(ConfigCompileError {
801                            profile_name: profile_name.to_owned(),
802                            section: ConfigCompileSection::Override(index),
803                            kind: ConfigCompileErrorKind::FilterAndDefaultFilterSpecified,
804                        });
805                        return None;
806                    }
807                    (Some(filter), None) => Some(FilterOrDefaultFilter::Filter(filter)),
808                    (None, Some(default_filter)) => {
809                        let compiled = CompiledDefaultFilter {
810                            expr: default_filter.compiled,
811                            profile: profile_name.to_owned(),
812                            section: CompiledDefaultFilterSection::Override(index),
813                        };
814                        Some(FilterOrDefaultFilter::DefaultFilter(compiled))
815                    }
816                    (None, None) => None,
817                };
818
819                Some(Self {
820                    id: OverrideId {
821                        profile_name: profile_name.into(),
822                        index,
823                    },
824                    state: PreBuildPlatform {},
825                    data: ProfileOverrideData {
826                        host_spec,
827                        target_spec,
828                        filter,
829                        priority: source.priority,
830                        threads_required: source.threads_required,
831                        run_extra_args: source.run_extra_args.clone(),
832                        retries: source.retries,
833                        flaky_result: source.flaky_result,
834                        slow_timeout: source.slow_timeout,
835                        bench_slow_timeout: source.bench.slow_timeout,
836                        leak_timeout: source.leak_timeout,
837                        test_group: source.test_group.clone(),
838                        success_output: source.success_output,
839                        failure_output: source.failure_output,
840                        junit: source.junit,
841                    },
842                })
843            }
844            (maybe_host_err, maybe_target_err, maybe_filter_err, maybe_default_filter_err) => {
845                let host_parse_error = maybe_host_err.err();
846                let target_parse_error = maybe_target_err.err();
847                let filter_parse_errors = maybe_filter_err
848                    .err()
849                    .into_iter()
850                    .chain(maybe_default_filter_err.err())
851                    .collect();
852
853                errors.push(ConfigCompileError {
854                    profile_name: profile_name.to_owned(),
855                    section: ConfigCompileSection::Override(index),
856                    kind: ConfigCompileErrorKind::Parse {
857                        host_parse_error,
858                        target_parse_error,
859                        filter_parse_errors,
860                    },
861                });
862                None
863            }
864        }
865    }
866
867    pub(in crate::config) fn apply_build_platforms(
868        self,
869        build_platforms: &BuildPlatforms,
870    ) -> CompiledOverride<FinalConfig> {
871        let host_eval = self.data.host_spec.eval(&build_platforms.host.platform);
872        let host_test_eval = self.data.target_spec.eval(&build_platforms.host.platform);
873        let target_eval = build_platforms
874            .target
875            .as_ref()
876            .map_or(host_test_eval, |target| {
877                self.data.target_spec.eval(&target.triple.platform)
878            });
879
880        CompiledOverride {
881            id: self.id,
882            state: FinalConfig {
883                host_eval,
884                host_test_eval,
885                target_eval,
886            },
887            data: self.data,
888        }
889    }
890}
891
892impl CompiledOverride<FinalConfig> {
893    /// Returns the target spec.
894    pub(crate) fn target_spec(&self) -> &MaybeTargetSpec {
895        &self.data.target_spec
896    }
897
898    /// Returns the filter to apply to overrides, if any.
899    pub(crate) fn filter(&self) -> Option<&Filterset> {
900        match self.data.filter.as_ref() {
901            Some(FilterOrDefaultFilter::Filter(filter)) => Some(filter),
902            _ => None,
903        }
904    }
905
906    /// Returns the default filter if it matches the platform.
907    pub(crate) fn default_filter_if_matches_platform(&self) -> Option<&CompiledDefaultFilter> {
908        match self.data.filter.as_ref() {
909            Some(FilterOrDefaultFilter::DefaultFilter(filter)) => {
910                // Which kind of evaluation to assume: matching the *target*
911                // filter against the *target* platform (host_eval +
912                // target_eval), or matching the *target* filter against the
913                // *host* platform (host_eval + host_test_eval)? The former
914                // makes much more sense, since in a cross-compile scenario you
915                // want to match a (host, target) pair.
916                (self.state.host_eval && self.state.target_eval).then_some(filter)
917            }
918            _ => None,
919        }
920    }
921}
922
923/// Represents a [`TargetSpec`] that might have been provided.
924#[derive(Clone, Debug, Default)]
925pub(crate) enum MaybeTargetSpec {
926    Provided(TargetSpec),
927    #[default]
928    Any,
929}
930
931impl MaybeTargetSpec {
932    pub(in crate::config) fn new(platform_str: Option<&str>) -> Result<Self, target_spec::Error> {
933        Ok(match platform_str {
934            Some(platform_str) => {
935                MaybeTargetSpec::Provided(TargetSpec::new(platform_str.to_owned())?)
936            }
937            None => MaybeTargetSpec::Any,
938        })
939    }
940
941    pub(in crate::config) fn eval(&self, platform: &Platform) -> bool {
942        match self {
943            MaybeTargetSpec::Provided(spec) => spec
944                .eval(platform)
945                .unwrap_or(/* unknown results are mapped to true */ true),
946            MaybeTargetSpec::Any => true,
947        }
948    }
949}
950
951/// Either a filter override or a default filter specified for a platform.
952///
953/// At most one of these can be specified.
954#[derive(Clone, Debug)]
955pub(crate) enum FilterOrDefaultFilter {
956    Filter(Filterset),
957    DefaultFilter(CompiledDefaultFilter),
958}
959
960/// Deserialized form of profile overrides before compilation.
961#[derive(Clone, Debug, Deserialize)]
962#[serde(rename_all = "kebab-case")]
963pub(in crate::config) struct DeserializedOverride {
964    /// The host and/or target platforms to match against.
965    #[serde(default)]
966    platform: PlatformStrings,
967    /// The filterset to match against.
968    #[serde(default)]
969    filter: Option<String>,
970    /// Overrides. (This used to use serde(flatten) but that has issues:
971    /// https://github.com/serde-rs/serde/issues/2312.)
972    #[serde(default)]
973    priority: Option<TestPriority>,
974    #[serde(default)]
975    default_filter: Option<String>,
976    #[serde(default)]
977    threads_required: Option<ThreadsRequired>,
978    #[serde(default)]
979    run_extra_args: Option<Vec<String>>,
980    /// Retry policy for this override.
981    #[serde(
982        default,
983        deserialize_with = "crate::config::elements::deserialize_retry_policy"
984    )]
985    retries: Option<RetryPolicy>,
986    #[serde(default)]
987    flaky_result: Option<FlakyResult>,
988    #[serde(
989        default,
990        deserialize_with = "crate::config::elements::deserialize_slow_timeout"
991    )]
992    slow_timeout: Option<SlowTimeout>,
993    #[serde(
994        default,
995        deserialize_with = "crate::config::elements::deserialize_leak_timeout"
996    )]
997    leak_timeout: Option<LeakTimeout>,
998    #[serde(default)]
999    test_group: Option<TestGroup>,
1000    #[serde(default)]
1001    success_output: Option<TestOutputDisplay>,
1002    #[serde(default)]
1003    failure_output: Option<TestOutputDisplay>,
1004    #[serde(default)]
1005    junit: DeserializedJunitOutput,
1006    /// Benchmark-specific overrides.
1007    #[serde(default)]
1008    bench: DeserializedOverrideBench,
1009}
1010
1011#[derive(Copy, Clone, Debug, Default, Deserialize)]
1012#[serde(rename_all = "kebab-case")]
1013pub(in crate::config) struct DeserializedJunitOutput {
1014    store_success_output: Option<bool>,
1015    store_failure_output: Option<bool>,
1016    flaky_fail_status: Option<JunitFlakyFailStatus>,
1017}
1018
1019/// Deserialized form of benchmark-specific overrides.
1020#[derive(Clone, Debug, Default, Deserialize)]
1021#[serde(rename_all = "kebab-case")]
1022pub(in crate::config) struct DeserializedOverrideBench {
1023    #[serde(
1024        default,
1025        deserialize_with = "crate::config::elements::deserialize_slow_timeout"
1026    )]
1027    slow_timeout: Option<SlowTimeout>,
1028}
1029
1030#[derive(Clone, Debug, Default)]
1031pub(in crate::config) struct PlatformStrings {
1032    pub(in crate::config) host: Option<String>,
1033    pub(in crate::config) target: Option<String>,
1034}
1035
1036impl<'de> Deserialize<'de> for PlatformStrings {
1037    fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
1038        struct V;
1039
1040        impl<'de2> serde::de::Visitor<'de2> for V {
1041            type Value = PlatformStrings;
1042
1043            fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
1044                formatter.write_str(
1045                    "a table ({ host = \"x86_64-apple-darwin\", \
1046                        target = \"cfg(windows)\" }) \
1047                        or a string (\"x86_64-unknown-gnu-linux\")",
1048                )
1049            }
1050
1051            fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
1052            where
1053                E: serde::de::Error,
1054            {
1055                Ok(PlatformStrings {
1056                    host: None,
1057                    target: Some(v.to_owned()),
1058                })
1059            }
1060
1061            fn visit_map<A>(self, map: A) -> Result<Self::Value, A::Error>
1062            where
1063                A: serde::de::MapAccess<'de2>,
1064            {
1065                #[derive(Deserialize)]
1066                struct PlatformStringsInner {
1067                    #[serde(default)]
1068                    host: Option<String>,
1069                    #[serde(default)]
1070                    target: Option<String>,
1071                }
1072
1073                let inner = PlatformStringsInner::deserialize(
1074                    serde::de::value::MapAccessDeserializer::new(map),
1075                )?;
1076                Ok(PlatformStrings {
1077                    host: inner.host,
1078                    target: inner.target,
1079                })
1080            }
1081        }
1082
1083        deserializer.deserialize_any(V)
1084    }
1085}
1086
1087#[cfg(test)]
1088mod tests {
1089    use super::*;
1090    use crate::config::{
1091        core::NextestConfig,
1092        elements::{LeakTimeoutResult, SlowTimeoutResult},
1093        utils::test_helpers::*,
1094    };
1095    use camino_tempfile::tempdir;
1096    use indoc::indoc;
1097    use nextest_metadata::TestCaseName;
1098    use std::{num::NonZeroUsize, time::Duration};
1099    use test_case::test_case;
1100
1101    /// Basic test to ensure overrides work. Add new override parameters to this test.
1102    #[test]
1103    fn test_overrides_basic() {
1104        let config_contents = indoc! {r#"
1105            # Override 1
1106            [[profile.default.overrides]]
1107            platform = 'aarch64-apple-darwin'  # this is the target platform
1108            filter = "test(test)"
1109            retries = { backoff = "exponential", count = 20, delay = "1s", max-delay = "20s" }
1110            slow-timeout = { period = "120s", terminate-after = 1, grace-period = "0s" }
1111            success-output = "immediate-final"
1112            junit = { store-success-output = true }
1113
1114            # Override 2
1115            [[profile.default.overrides]]
1116            filter = "test(test)"
1117            threads-required = 8
1118            retries = 3
1119            slow-timeout = "60s"
1120            leak-timeout = "300ms"
1121            test-group = "my-group"
1122            failure-output = "final"
1123            junit = { store-failure-output = false }
1124
1125            # Override 3
1126            [[profile.default.overrides]]
1127            platform = { host = "cfg(unix)" }
1128            filter = "test(override3)"
1129            retries = 5
1130
1131            # Override 4 -- host not matched
1132            [[profile.default.overrides]]
1133            platform = { host = 'aarch64-apple-darwin' }
1134            retries = 10
1135
1136            # Override 5 -- no filter provided, just platform
1137            [[profile.default.overrides]]
1138            platform = { host = 'cfg(target_os = "linux")', target = 'aarch64-apple-darwin' }
1139            filter = "test(override5)"
1140            retries = 8
1141
1142            # Override 6 -- timeout result success
1143            [[profile.default.overrides]]
1144            filter = "test(timeout_success)"
1145            slow-timeout = { period = "30s", on-timeout = "pass" }
1146
1147            [profile.default.junit]
1148            path = "my-path.xml"
1149
1150            [test-groups.my-group]
1151            max-threads = 20
1152        "#};
1153
1154        let workspace_dir = tempdir().unwrap();
1155
1156        let graph = temp_workspace(&workspace_dir, config_contents);
1157        let package_id = graph.workspace().iter().next().unwrap().id();
1158
1159        let pcx = ParseContext::new(&graph);
1160
1161        let nextest_config_result = NextestConfig::from_sources(
1162            graph.workspace().root(),
1163            &pcx,
1164            None,
1165            &[][..],
1166            &Default::default(),
1167        )
1168        .expect("config is valid");
1169        let profile = nextest_config_result
1170            .profile("default")
1171            .expect("valid profile name")
1172            .apply_build_platforms(&build_platforms());
1173
1174        // This query matches override 2.
1175        let host_binary_query =
1176            binary_query(&graph, package_id, "lib", "my-binary", BuildPlatform::Host);
1177        let test_name = TestCaseName::new("test");
1178        let query = TestQuery {
1179            binary_query: host_binary_query.to_query(),
1180            test_name: &test_name,
1181        };
1182        let overrides = profile.settings_for(NextestRunMode::Test, &query);
1183
1184        assert_eq!(overrides.threads_required(), ThreadsRequired::Count(8));
1185        assert_eq!(overrides.retries(), RetryPolicy::new_without_delay(3));
1186        assert_eq!(
1187            overrides.slow_timeout(),
1188            SlowTimeout {
1189                period: Duration::from_secs(60),
1190                on_timeout: SlowTimeoutResult::default(),
1191                terminate_after: None,
1192                grace_period: Duration::from_secs(10),
1193            }
1194        );
1195        assert_eq!(
1196            overrides.leak_timeout(),
1197            LeakTimeout {
1198                period: Duration::from_millis(300),
1199                result: LeakTimeoutResult::Pass,
1200            }
1201        );
1202        assert_eq!(overrides.test_group(), &test_group("my-group"));
1203        assert_eq!(overrides.success_output(), TestOutputDisplay::Never);
1204        assert_eq!(overrides.failure_output(), TestOutputDisplay::Final);
1205        // For clarity.
1206        #[expect(clippy::bool_assert_comparison)]
1207        {
1208            assert_eq!(overrides.junit_store_success_output(), false);
1209            assert_eq!(overrides.junit_store_failure_output(), false);
1210        }
1211
1212        // This query matches override 1 and 2.
1213        let target_binary_query = binary_query(
1214            &graph,
1215            package_id,
1216            "lib",
1217            "my-binary",
1218            BuildPlatform::Target,
1219        );
1220        let test_name = TestCaseName::new("test");
1221        let query = TestQuery {
1222            binary_query: target_binary_query.to_query(),
1223            test_name: &test_name,
1224        };
1225        let overrides = profile.settings_for(NextestRunMode::Test, &query);
1226
1227        assert_eq!(overrides.threads_required(), ThreadsRequired::Count(8));
1228        assert_eq!(
1229            overrides.retries(),
1230            RetryPolicy::Exponential {
1231                count: 20,
1232                delay: Duration::from_secs(1),
1233                jitter: false,
1234                max_delay: Some(Duration::from_secs(20)),
1235            }
1236        );
1237        assert_eq!(
1238            overrides.slow_timeout(),
1239            SlowTimeout {
1240                period: Duration::from_secs(120),
1241                terminate_after: Some(NonZeroUsize::new(1).unwrap()),
1242                grace_period: Duration::ZERO,
1243                on_timeout: SlowTimeoutResult::default(),
1244            }
1245        );
1246        assert_eq!(
1247            overrides.leak_timeout(),
1248            LeakTimeout {
1249                period: Duration::from_millis(300),
1250                result: LeakTimeoutResult::Pass,
1251            }
1252        );
1253        assert_eq!(overrides.test_group(), &test_group("my-group"));
1254        assert_eq!(
1255            overrides.success_output(),
1256            TestOutputDisplay::ImmediateFinal
1257        );
1258        assert_eq!(overrides.failure_output(), TestOutputDisplay::Final);
1259        // For clarity.
1260        #[expect(clippy::bool_assert_comparison)]
1261        {
1262            assert_eq!(overrides.junit_store_success_output(), true);
1263            assert_eq!(overrides.junit_store_failure_output(), false);
1264        }
1265
1266        // This query matches override 3.
1267        let test_name = TestCaseName::new("override3");
1268        let query = TestQuery {
1269            binary_query: target_binary_query.to_query(),
1270            test_name: &test_name,
1271        };
1272        let overrides = profile.settings_for(NextestRunMode::Test, &query);
1273        assert_eq!(overrides.retries(), RetryPolicy::new_without_delay(5));
1274
1275        // This query matches override 5.
1276        let test_name = TestCaseName::new("override5");
1277        let query = TestQuery {
1278            binary_query: target_binary_query.to_query(),
1279            test_name: &test_name,
1280        };
1281        let overrides = profile.settings_for(NextestRunMode::Test, &query);
1282        assert_eq!(overrides.retries(), RetryPolicy::new_without_delay(8));
1283
1284        // This query matches override 6.
1285        let test_name = TestCaseName::new("timeout_success");
1286        let query = TestQuery {
1287            binary_query: target_binary_query.to_query(),
1288            test_name: &test_name,
1289        };
1290        let overrides = profile.settings_for(NextestRunMode::Test, &query);
1291        assert_eq!(
1292            overrides.slow_timeout(),
1293            SlowTimeout {
1294                period: Duration::from_secs(30),
1295                on_timeout: SlowTimeoutResult::Pass,
1296                terminate_after: None,
1297                grace_period: Duration::from_secs(10),
1298            }
1299        );
1300
1301        // This query does not match any overrides.
1302        let test_name = TestCaseName::new("no_match");
1303        let query = TestQuery {
1304            binary_query: target_binary_query.to_query(),
1305            test_name: &test_name,
1306        };
1307        let overrides = profile.settings_for(NextestRunMode::Test, &query);
1308        assert_eq!(overrides.retries(), RetryPolicy::new_without_delay(0));
1309    }
1310
1311    /// Test that bench.slow-timeout works correctly in overrides.
1312    #[test]
1313    fn test_overrides_bench_slow_timeout() {
1314        let config_contents = indoc! {r#"
1315            # Profile-level benchmark slow-timeout (used as fallback).
1316            [profile.default]
1317            bench.slow-timeout = { period = "30y" }
1318
1319            # Override 1: Both test and bench slow-timeout specified.
1320            [[profile.default.overrides]]
1321            filter = "test(both_specified)"
1322            slow-timeout = "60s"
1323            bench.slow-timeout = { period = "5m", terminate-after = 2 }
1324
1325            # Override 2: Only test slow-timeout specified.
1326            [[profile.default.overrides]]
1327            filter = "test(test_only)"
1328            slow-timeout = "90s"
1329
1330            # Override 3: Only bench slow-timeout specified.
1331            [[profile.default.overrides]]
1332            filter = "test(bench_only)"
1333            bench.slow-timeout = "10m"
1334        "#};
1335
1336        let workspace_dir = tempdir().unwrap();
1337        let graph = temp_workspace(&workspace_dir, config_contents);
1338        let package_id = graph.workspace().iter().next().unwrap().id();
1339        let pcx = ParseContext::new(&graph);
1340
1341        let nextest_config_result = NextestConfig::from_sources(
1342            graph.workspace().root(),
1343            &pcx,
1344            None,
1345            &[][..],
1346            &Default::default(),
1347        )
1348        .expect("config is valid");
1349        let profile = nextest_config_result
1350            .profile("default")
1351            .expect("valid profile name")
1352            .apply_build_platforms(&build_platforms());
1353
1354        let host_binary_query =
1355            binary_query(&graph, package_id, "lib", "my-binary", BuildPlatform::Host);
1356
1357        // Test "both_specified": tests get slow-timeout, benchmarks get
1358        // bench.slow-timeout.
1359        let test_name = TestCaseName::new("both_specified");
1360        let query = TestQuery {
1361            binary_query: host_binary_query.to_query(),
1362            test_name: &test_name,
1363        };
1364
1365        let test_settings = profile.settings_for(NextestRunMode::Test, &query);
1366        assert_eq!(test_settings.slow_timeout().period, Duration::from_secs(60));
1367
1368        let bench_settings = profile.settings_for(NextestRunMode::Benchmark, &query);
1369        assert_eq!(
1370            bench_settings.slow_timeout(),
1371            SlowTimeout {
1372                period: Duration::from_secs(5 * 60),
1373                terminate_after: Some(NonZeroUsize::new(2).unwrap()),
1374                grace_period: Duration::from_secs(10),
1375                on_timeout: SlowTimeoutResult::default(),
1376            }
1377        );
1378
1379        // Test "test_only": tests get the override, benchmarks fall back to
1380        // profile default (no fallback from slow-timeout to
1381        // bench.slow-timeout).
1382        let test_name = TestCaseName::new("test_only");
1383        let query = TestQuery {
1384            binary_query: host_binary_query.to_query(),
1385            test_name: &test_name,
1386        };
1387
1388        let test_settings = profile.settings_for(NextestRunMode::Test, &query);
1389        assert_eq!(test_settings.slow_timeout().period, Duration::from_secs(90));
1390
1391        let bench_settings = profile.settings_for(NextestRunMode::Benchmark, &query);
1392        // Should use profile-level bench.slow-timeout (30 years), not the
1393        // override's slow-timeout. humantime parses "30y" accounting for leap
1394        // years, so we check >= VERY_LARGE rather than an exact value.
1395        assert!(
1396            bench_settings.slow_timeout().period >= SlowTimeout::VERY_LARGE.period,
1397            "should be >= VERY_LARGE, got {:?}",
1398            bench_settings.slow_timeout().period
1399        );
1400
1401        // Test "bench_only": tests get profile default, benchmarks get the
1402        // override.
1403        let test_name = TestCaseName::new("bench_only");
1404        let query = TestQuery {
1405            binary_query: host_binary_query.to_query(),
1406            test_name: &test_name,
1407        };
1408
1409        let test_settings = profile.settings_for(NextestRunMode::Test, &query);
1410        // Tests use the default slow-timeout (60s from default-config.toml).
1411        assert_eq!(test_settings.slow_timeout().period, Duration::from_secs(60));
1412
1413        let bench_settings = profile.settings_for(NextestRunMode::Benchmark, &query);
1414        assert_eq!(
1415            bench_settings.slow_timeout().period,
1416            Duration::from_secs(10 * 60)
1417        );
1418    }
1419
1420    #[test_case(
1421        indoc! {r#"
1422            [[profile.default.overrides]]
1423            retries = 2
1424        "#},
1425        "default",
1426        &[MietteJsonReport {
1427            message: "at least one of `platform` and `filter` must be specified".to_owned(),
1428            labels: vec![],
1429        }]
1430
1431        ; "neither platform nor filter specified"
1432    )]
1433    #[test_case(
1434        indoc! {r#"
1435            [[profile.default.overrides]]
1436            default-filter = "test(test1)"
1437            retries = 2
1438        "#},
1439        "default",
1440        &[MietteJsonReport {
1441            message: "for override with `default-filter`, `platform` must also be specified".to_owned(),
1442            labels: vec![],
1443        }]
1444
1445        ; "default-filter without platform"
1446    )]
1447    #[test_case(
1448        indoc! {r#"
1449            [[profile.default.overrides]]
1450            platform = 'cfg(unix)'
1451            default-filter = "not default()"
1452            retries = 2
1453        "#},
1454        "default",
1455        &[MietteJsonReport {
1456            message: "predicate not allowed in `default-filter` expressions".to_owned(),
1457            labels: vec![
1458                MietteJsonLabel {
1459                    label: "this predicate causes infinite recursion".to_owned(),
1460                    span: MietteJsonSpan { offset: 4, length: 9 },
1461                },
1462            ],
1463        }]
1464
1465        ; "default filterset in default-filter"
1466    )]
1467    #[test_case(
1468        indoc! {r#"
1469            [[profile.default.overrides]]
1470            filter = 'test(test1)'
1471            default-filter = "test(test2)"
1472            retries = 2
1473        "#},
1474        "default",
1475        &[MietteJsonReport {
1476            message: "at most one of `filter` and `default-filter` must be specified".to_owned(),
1477            labels: vec![],
1478        }]
1479
1480        ; "both filter and default-filter specified"
1481    )]
1482    #[test_case(
1483        indoc! {r#"
1484            [[profile.default.overrides]]
1485            filter = 'test(test1)'
1486            platform = 'cfg(unix)'
1487            default-filter = "test(test2)"
1488            retries = 2
1489        "#},
1490        "default",
1491        &[MietteJsonReport {
1492            message: "at most one of `filter` and `default-filter` must be specified".to_owned(),
1493            labels: vec![],
1494        }]
1495
1496        ; "both filter and default-filter specified with platform"
1497    )]
1498    #[test_case(
1499        indoc! {r#"
1500            [[profile.default.overrides]]
1501            platform = {}
1502            retries = 2
1503        "#},
1504        "default",
1505        &[MietteJsonReport {
1506            message: "at least one of `platform` and `filter` must be specified".to_owned(),
1507            labels: vec![],
1508        }]
1509
1510        ; "empty platform map"
1511    )]
1512    #[test_case(
1513        indoc! {r#"
1514            [[profile.ci.overrides]]
1515            platform = 'cfg(target_os = "macos)'
1516            retries = 2
1517        "#},
1518        "ci",
1519        &[MietteJsonReport {
1520            message: "error parsing cfg() expression".to_owned(),
1521            labels: vec![
1522                MietteJsonLabel { label: "unclosed quotes".to_owned(), span: MietteJsonSpan { offset: 16, length: 6 } }
1523            ]
1524        }]
1525
1526        ; "invalid platform expression"
1527    )]
1528    #[test_case(
1529        indoc! {r#"
1530            [[profile.ci.overrides]]
1531            filter = 'test(/foo)'
1532            retries = 2
1533        "#},
1534        "ci",
1535        &[MietteJsonReport {
1536            message: "expected close regex".to_owned(),
1537            labels: vec![
1538                MietteJsonLabel { label: "missing `/`".to_owned(), span: MietteJsonSpan { offset: 9, length: 0 } }
1539            ]
1540        }]
1541
1542        ; "invalid filterset"
1543    )]
1544    #[test_case(
1545        // Not strictly an override error, but convenient to put here.
1546        indoc! {r#"
1547            [profile.ci]
1548            default-filter = "test(foo) or default()"
1549        "#},
1550        "ci",
1551        &[MietteJsonReport {
1552            message: "predicate not allowed in `default-filter` expressions".to_owned(),
1553            labels: vec![
1554                MietteJsonLabel { label: "this predicate causes infinite recursion".to_owned(), span: MietteJsonSpan { offset: 13, length: 9 } }
1555            ]
1556        }]
1557
1558        ; "default-filter with default"
1559    )]
1560    fn parse_overrides_invalid(
1561        config_contents: &str,
1562        faulty_profile: &str,
1563        expected_reports: &[MietteJsonReport],
1564    ) {
1565        let workspace_dir = tempdir().unwrap();
1566
1567        let graph = temp_workspace(&workspace_dir, config_contents);
1568        let pcx = ParseContext::new(&graph);
1569
1570        let err = NextestConfig::from_sources(
1571            graph.workspace().root(),
1572            &pcx,
1573            None,
1574            [],
1575            &Default::default(),
1576        )
1577        .expect_err("config is invalid");
1578        match err.kind() {
1579            ConfigParseErrorKind::CompileErrors(compile_errors) => {
1580                assert_eq!(
1581                    compile_errors.len(),
1582                    1,
1583                    "exactly one override error must be produced"
1584                );
1585                let error = compile_errors.first().unwrap();
1586                assert_eq!(
1587                    error.profile_name, faulty_profile,
1588                    "compile error profile matches"
1589                );
1590                let handler = miette::JSONReportHandler::new();
1591                let reports = error
1592                    .kind
1593                    .reports()
1594                    .map(|report| {
1595                        let mut out = String::new();
1596                        handler.render_report(&mut out, report.as_ref()).unwrap();
1597
1598                        let json_report: MietteJsonReport = serde_json::from_str(&out)
1599                            .unwrap_or_else(|err| {
1600                                panic!(
1601                                    "failed to deserialize JSON message produced by miette: {err}"
1602                                )
1603                            });
1604                        json_report
1605                    })
1606                    .collect::<Vec<_>>();
1607                assert_eq!(&reports, expected_reports, "reports match");
1608            }
1609            other => {
1610                panic!(
1611                    "for config error {other:?}, expected ConfigParseErrorKind::FiltersetOrCfgParseError"
1612                );
1613            }
1614        };
1615    }
1616
1617    /// Test that `cfg(unix)` works with a custom platform.
1618    ///
1619    /// This was broken with older versions of target-spec.
1620    #[test]
1621    fn cfg_unix_with_custom_platform() {
1622        let config_contents = indoc! {r#"
1623            [[profile.default.overrides]]
1624            platform = { host = "cfg(unix)" }
1625            filter = "test(test)"
1626            retries = 5
1627        "#};
1628
1629        let workspace_dir = tempdir().unwrap();
1630
1631        let graph = temp_workspace(&workspace_dir, config_contents);
1632        let package_id = graph.workspace().iter().next().unwrap().id();
1633        let pcx = ParseContext::new(&graph);
1634
1635        let nextest_config = NextestConfig::from_sources(
1636            graph.workspace().root(),
1637            &pcx,
1638            None,
1639            &[][..],
1640            &Default::default(),
1641        )
1642        .expect("config is valid");
1643
1644        let build_platforms = custom_build_platforms(workspace_dir.path());
1645
1646        let profile = nextest_config
1647            .profile("default")
1648            .expect("valid profile name")
1649            .apply_build_platforms(&build_platforms);
1650
1651        // Check that the override is correctly applied.
1652        let target_binary_query = binary_query(
1653            &graph,
1654            package_id,
1655            "lib",
1656            "my-binary",
1657            BuildPlatform::Target,
1658        );
1659        let test_name = TestCaseName::new("test");
1660        let query = TestQuery {
1661            binary_query: target_binary_query.to_query(),
1662            test_name: &test_name,
1663        };
1664        let overrides = profile.settings_for(NextestRunMode::Test, &query);
1665        assert_eq!(
1666            overrides.retries(),
1667            RetryPolicy::new_without_delay(5),
1668            "retries applied to custom platform"
1669        );
1670    }
1671}