1use crate::errors::{UpdateError, UpdateVersionParseError};
7use camino::{Utf8Path, Utf8PathBuf};
8use mukti_metadata::{
9 DigestAlgorithm, MuktiProject, MuktiReleasesJson, ReleaseLocation, ReleaseStatus,
10};
11use self_update::{ArchiveKind, Compression, Download, Extract};
12use semver::{Version, VersionReq};
13use serde::Deserialize;
14use sha2::{Digest, Sha256};
15use std::{
16 fs,
17 io::{self, BufWriter},
18 str::FromStr,
19};
20use target_spec::Platform;
21use tracing::{debug, info, warn};
22
23#[derive(Clone, Debug)]
25pub struct MuktiBackend {
26 pub url: String,
28
29 pub package_name: String,
31}
32
33impl MuktiBackend {
34 pub fn fetch_releases(&self, current_version: Version) -> Result<NextestReleases, UpdateError> {
36 info!(target: "nextest-runner::update", "checking for self-updates");
37 let as_path = Utf8Path::new(&self.url);
39 let releases_buf = if as_path.exists() {
40 fs::read(as_path).map_err(|error| UpdateError::ReadLocalMetadata {
41 path: as_path.to_owned(),
42 error,
43 })?
44 } else {
45 let mut releases_buf: Vec<u8> = Vec::new();
46 Download::from_url(&self.url)
47 .download_to(&mut releases_buf)
48 .map_err(UpdateError::SelfUpdate)?;
49 releases_buf
50 };
51
52 let mut releases_json: MuktiReleasesJson =
53 serde_json::from_slice(&releases_buf).map_err(UpdateError::ReleaseMetadataDe)?;
54
55 let project = match releases_json.projects.remove(&self.package_name) {
56 Some(project) => project,
57 None => {
58 return Err(UpdateError::MuktiProjectNotFound {
59 not_found: self.package_name.clone(),
60 known: releases_json.projects.keys().cloned().collect(),
61 });
62 }
63 };
64
65 NextestReleases::new(&self.package_name, project, current_version)
66 }
67}
68
69#[derive(Clone, Debug)]
73#[non_exhaustive]
74pub struct NextestReleases {
75 pub package_name: String,
77
78 pub project: MuktiProject,
80
81 pub current_version: Version,
83
84 pub bin_install_path: Utf8PathBuf,
86}
87
88impl NextestReleases {
89 fn new(
90 package_name: &str,
91 project: MuktiProject,
92 current_version: Version,
93 ) -> Result<Self, UpdateError> {
94 let bin_install_path = std::env::current_exe()
95 .and_then(|exe| {
96 Utf8PathBuf::try_from(exe)
97 .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))
98 })
99 .map_err(UpdateError::CurrentExe)?;
100
101 Ok(Self {
102 package_name: package_name.to_owned(),
103 project,
104 current_version,
105 bin_install_path,
106 })
107 }
108
109 pub fn check<'a>(
111 &'a self,
112 version: &UpdateVersion,
113 force: bool,
114 bin_path_in_archive: &'a Utf8Path,
115 perform_setup_fn: impl FnOnce(&Version) -> bool,
116 ) -> Result<CheckStatus<'a>, UpdateError> {
117 let (version, version_data) = self.get_version_data(version)?;
118 debug!(
119 target: "nextest-runner::update",
120 "current version is {}, update version is {version}",
121 self.current_version,
122 );
123
124 if &self.current_version == version && !force {
125 return Ok(CheckStatus::AlreadyOnRequested(version.clone()));
126 }
127 if &self.current_version > version && !force {
128 return Ok(CheckStatus::DowngradeNotAllowed {
129 current_version: self.current_version.clone(),
130 requested: version.clone(),
131 });
132 }
133
134 let triple = self.target_triple();
136 debug!(target: "nextest-runner::update", "target triple: {triple}");
137
138 let location = version_data
139 .locations
140 .iter()
141 .find(|&data| data.format == TAR_GZ_SUFFIX && data.target == triple)
142 .ok_or_else(|| {
143 let known_triples = version_data
144 .locations
145 .iter()
146 .filter(|data| data.format == TAR_GZ_SUFFIX)
147 .map(|data| data.target.clone())
148 .collect();
149 UpdateError::NoTargetData {
150 version: version.clone(),
151 triple,
152 known_triples,
153 }
154 })?;
155
156 let force_disable_setup = version_data
157 .metadata
158 .is_some_and(|metadata| metadata.force_disable_setup);
159 let perform_setup = !force_disable_setup && perform_setup_fn(version);
160
161 Ok(CheckStatus::Success(MuktiUpdateContext {
162 context: self,
163 version: version.clone(),
164 location: location.clone(),
165 bin_path_in_archive,
166 perform_setup,
167 }))
168 }
169
170 fn get_version_data(
175 &self,
176 version: &UpdateVersion,
177 ) -> Result<(&Version, ReleaseVersionData), UpdateError> {
178 let (version, release_data) = match version {
179 UpdateVersion::Exact(version) => {
180 self.project.get_version_data(version).ok_or_else(|| {
181 let known = self
182 .project
183 .all_versions()
184 .map(|(v, release_data)| (v.clone(), release_data.status))
185 .collect();
186 UpdateError::VersionNotFound {
187 version: version.clone(),
188 known,
189 }
190 })?
191 }
192 UpdateVersion::Req(req) => self
193 .project
194 .get_latest_matching(req)
195 .ok_or_else(|| UpdateError::NoMatchForVersionReq { req: req.clone() })?,
196 };
197
198 let metadata = if release_data.metadata.is_null() {
200 None
201 } else {
202 match serde_json::from_value::<NextestReleaseMetadata>(release_data.metadata.clone()) {
204 Ok(metadata) => Some(metadata),
205 Err(error) => {
206 warn!(
207 target: "nextest-runner::update",
208 "failed to parse custom release metadata: {error}",
209 );
210 None
211 }
212 }
213 };
214
215 let release_data = ReleaseVersionData {
216 release_url: release_data.release_url.clone(),
217 status: release_data.status,
218 locations: release_data.locations.clone(),
219 metadata,
220 };
221 Ok((version, release_data))
222 }
223
224 fn target_triple(&self) -> String {
225 let current = Platform::build_target().expect("build target could not be detected");
229 let triple_str = current.triple_str();
230 if triple_str.ends_with("-apple-darwin") {
231 "universal-apple-darwin".to_owned()
233 } else {
234 triple_str.to_owned()
235 }
236 }
237}
238
239#[derive(Clone, Debug)]
241pub struct ReleaseVersionData {
242 pub release_url: String,
244
245 pub status: ReleaseStatus,
247
248 pub locations: Vec<ReleaseLocation>,
250
251 pub metadata: Option<NextestReleaseMetadata>,
253}
254
255#[derive(Clone, Debug, Deserialize)]
257pub struct NextestReleaseMetadata {
258 #[serde(default)]
260 pub force_disable_setup: bool,
261}
262
263#[derive(Clone, Debug)]
265pub enum CheckStatus<'a> {
266 AlreadyOnRequested(Version),
268
269 DowngradeNotAllowed {
271 current_version: Version,
273
274 requested: Version,
276 },
277
278 Success(MuktiUpdateContext<'a>),
280}
281#[derive(Clone, Debug)]
285#[non_exhaustive]
286pub struct MuktiUpdateContext<'a> {
287 pub context: &'a NextestReleases,
289
290 pub version: Version,
292
293 pub location: ReleaseLocation,
295
296 pub bin_path_in_archive: &'a Utf8Path,
298
299 pub perform_setup: bool,
301}
302
303impl MuktiUpdateContext<'_> {
304 pub fn do_update(&self) -> Result<(), UpdateError> {
306 let tmp_dir_parent = self.context.bin_install_path.parent().ok_or_else(|| {
309 UpdateError::CurrentExe(io::Error::new(
310 io::ErrorKind::InvalidData,
311 format!(
312 "parent directory of current exe `{}` could not be determined",
313 self.context.bin_install_path
314 ),
315 ))
316 })?;
317 let tmp_backup_dir_prefix = format!("__{}_backup", self.context.package_name);
318 #[expect(clippy::redundant_clone)]
319 let tmp_backup_filename = tmp_backup_dir_prefix.clone();
320
321 if cfg!(windows) {
322 let _ = cleanup_backup_temp_directories(
327 tmp_dir_parent,
328 &tmp_backup_dir_prefix,
329 &tmp_backup_filename,
330 );
331 }
332
333 let tmp_archive_dir_prefix = format!("{}_download", self.context.package_name);
334 let tmp_archive_dir = camino_tempfile::Builder::new()
335 .prefix(&tmp_archive_dir_prefix)
336 .tempdir_in(tmp_dir_parent)
337 .map_err(|error| UpdateError::TempDirCreate {
338 location: tmp_dir_parent.to_owned(),
339 error,
340 })?;
341 let tmp_dir_path: &Utf8Path = tmp_archive_dir.path();
342 let tmp_archive_path =
343 tmp_dir_path.join(format!("{}.{TAR_GZ_SUFFIX}", self.context.package_name));
344 let tmp_archive = fs::File::create(&tmp_archive_path).map_err(|error| {
345 UpdateError::TempArchiveCreate {
346 archive_path: tmp_archive_path.clone(),
347 error,
348 }
349 })?;
350 let mut tmp_archive_buf = BufWriter::new(tmp_archive);
351
352 let mut download = Download::from_url(&self.location.url);
353 let mut headers = http::header::HeaderMap::new();
354 headers.insert(
355 http::header::ACCEPT,
356 "application/octet-stream".parse().unwrap(),
357 );
358 download.set_headers(headers);
359 download.show_progress(true);
360 download
363 .download_to(&mut tmp_archive_buf)
364 .map_err(UpdateError::SelfUpdate)?;
365
366 debug!(target: "nextest-runner::update", "downloaded to {tmp_archive_path}");
367
368 let tmp_archive =
369 tmp_archive_buf
370 .into_inner()
371 .map_err(|error| UpdateError::TempArchiveWrite {
372 archive_path: tmp_archive_path.clone(),
373 error: error.into_error(),
374 })?;
375 tmp_archive
376 .sync_all()
377 .map_err(|error| UpdateError::TempArchiveWrite {
378 archive_path: tmp_archive_path.clone(),
379 error,
380 })?;
381 std::mem::drop(tmp_archive);
382
383 let mut hasher = Sha256::default();
385 let mut tmp_archive =
389 fs::File::open(&tmp_archive_path).map_err(|error| UpdateError::TempArchiveRead {
390 archive_path: tmp_archive_path.clone(),
391 error,
392 })?;
393 io::copy(&mut tmp_archive, &mut hasher).map_err(|error| UpdateError::TempArchiveRead {
394 archive_path: tmp_archive_path.clone(),
395 error,
396 })?;
397 let hash = hasher.finalize();
398 let hash_str = hex::encode(hash);
399
400 match self.location.checksums.get(&DigestAlgorithm::SHA256) {
401 Some(checksum) => {
402 if checksum.0 != hash_str {
403 return Err(UpdateError::ChecksumMismatch {
404 expected: checksum.0.clone(),
405 actual: hash_str,
406 });
407 }
408 debug!(target: "nextest-runner::update", "SHA-256 checksum verified: {hash_str}");
409 }
410 None => {
411 warn!(target: "nextest-runner::update", "unable to verify SHA-256 checksum of downloaded archive ({hash_str})");
412 }
413 }
414
415 Extract::from_source(tmp_archive_path.as_std_path())
417 .archive(ArchiveKind::Tar(Some(Compression::Gz)))
418 .extract_file(
419 tmp_archive_dir.path().as_std_path(),
420 self.bin_path_in_archive,
421 )
422 .map_err(UpdateError::SelfUpdate)?;
423
424 let new_exe = tmp_dir_path.join(self.bin_path_in_archive);
428 debug!(target: "nextest-runner::update", "extracted to {new_exe}, replacing existing binary");
429
430 let tmp_backup_dir = camino_tempfile::Builder::new()
431 .prefix(&tmp_backup_dir_prefix)
432 .tempdir_in(tmp_dir_parent)
433 .map_err(|error| UpdateError::TempDirCreate {
434 location: tmp_dir_parent.to_owned(),
435 error,
436 })?;
437
438 let tmp_backup_dir_path: &Utf8Path = tmp_backup_dir.path();
439 let tmp_file_path = tmp_backup_dir_path.join(&tmp_backup_filename);
440
441 Move::from_source(&new_exe)
442 .replace_using_temp(&tmp_file_path)
443 .to_dest(&self.context.bin_install_path)?;
444
445 if self.perform_setup {
447 info!(target: "nextest-runner::update", "running `cargo nextest self setup`");
448 let mut cmd = std::process::Command::new(&self.context.bin_install_path);
449 cmd.args(["nextest", "self", "setup", "--source", "self-update"]);
450 let status = cmd.status().map_err(UpdateError::SelfSetup)?;
451 if !status.success() {
452 return Err(UpdateError::SelfSetup(io::Error::new(
453 io::ErrorKind::Other,
454 format!(
455 "`cargo nextest self setup` failed with exit code {}",
456 status
457 .code()
458 .map_or("(unknown)".to_owned(), |c| c.to_string())
459 ),
460 )));
461 }
462 }
463
464 Ok(())
465 }
466}
467
468#[derive(Debug)]
480struct Move<'a> {
481 source: &'a Utf8Path,
482 temp: Option<&'a Utf8Path>,
483}
484impl<'a> Move<'a> {
485 pub fn from_source(source: &'a Utf8Path) -> Move<'a> {
487 Self { source, temp: None }
488 }
489
490 pub fn replace_using_temp(&mut self, temp: &'a Utf8Path) -> &mut Self {
500 self.temp = Some(temp);
501 self
502 }
503
504 pub fn to_dest(&self, dest: &Utf8Path) -> Result<(), UpdateError> {
506 match self.temp {
507 None => Self::fs_rename(self.source, dest),
508 Some(temp) => {
509 if dest.exists() {
510 Self::fs_rename(dest, temp)?;
515 if let Err(e) = Self::fs_rename(self.source, dest) {
516 Self::fs_rename(temp, dest)?;
517 return Err(e);
518 }
519 } else {
520 Self::fs_rename(self.source, dest)?;
521 }
522 Ok(())
523 }
524 }
525 }
526
527 fn fs_rename(source: &Utf8Path, dest: &Utf8Path) -> Result<(), UpdateError> {
532 fs::rename(source, dest).map_err(|error| UpdateError::FsRename {
533 source: source.to_owned(),
534 dest: dest.to_owned(),
535 error,
536 })
537 }
538}
539
540fn cleanup_backup_temp_directories(
541 tmp_dir_parent: &Utf8Path,
542 tmp_dir_prefix: &str,
543 expected_tmp_filename: &str,
544) -> io::Result<()> {
545 for entry in fs::read_dir(tmp_dir_parent)? {
546 let entry = entry?;
547 let tmp_dir_name = if let Ok(tmp_dir_name) = entry.file_name().into_string() {
548 tmp_dir_name
549 } else {
550 continue;
551 };
552
553 let is_expected_tmp_file = |tmp_file_entry: std::io::Result<fs::DirEntry>| {
558 tmp_file_entry
559 .ok()
560 .filter(|e| e.file_name() == expected_tmp_filename)
561 .is_some()
562 };
563
564 if tmp_dir_name.starts_with(tmp_dir_prefix)
565 && fs::read_dir(entry.path())?.all(is_expected_tmp_file)
566 {
567 fs::remove_dir_all(entry.path())?;
568 }
569 }
570 Ok(())
571}
572
573const TAR_GZ_SUFFIX: &str = "tar.gz";
574
575#[derive(Clone, Debug, Eq, PartialEq)]
577pub enum UpdateVersion {
578 Exact(Version),
580
581 Req(VersionReq),
583}
584
585impl FromStr for UpdateVersion {
588 type Err = UpdateVersionParseError;
589
590 fn from_str(input: &str) -> Result<Self, Self::Err> {
591 if input == "latest" {
598 return Ok(UpdateVersion::Req(VersionReq::STAR));
599 }
600
601 let first = input
602 .chars()
603 .next()
604 .ok_or(UpdateVersionParseError::EmptyString)?;
605
606 let is_req = "<>=^~".contains(first) || input.contains('*');
607 if is_req {
608 match input.parse::<VersionReq>() {
609 Ok(v) => Ok(Self::Req(v)),
610 Err(error) => Err(UpdateVersionParseError::InvalidVersionReq {
611 input: input.to_owned(),
612 error,
613 }),
614 }
615 } else {
616 match input.parse::<Version>() {
617 Ok(v) => Ok(Self::Exact(v)),
618 Err(error) => Err(UpdateVersionParseError::InvalidVersion {
619 input: input.to_owned(),
620 error,
621 }),
622 }
623 }
624 }
625}