From 5ca1add0b08cae02b930e643df9cfe74616b1d72 Mon Sep 17 00:00:00 2001 From: mo8it Date: Fri, 9 Aug 2024 14:01:43 +0200 Subject: [PATCH] Use context instead of with_context --- src/cargo/core/compiler/build_runner/mod.rs | 8 ++++---- src/cargo/core/compiler/custom_build.rs | 2 +- src/cargo/core/compiler/future_incompat.rs | 4 ++-- src/cargo/core/compiler/job_queue/mod.rs | 4 ++-- src/cargo/core/compiler/timings.rs | 2 +- src/cargo/core/global_cache_tracker.rs | 4 ++-- src/cargo/core/package.rs | 8 ++++---- src/cargo/ops/cargo_package.rs | 6 +++--- .../ops/common_for_install_and_uninstall.rs | 5 ++--- src/cargo/ops/vendor.rs | 12 ++++++------ src/cargo/sources/registry/http_remote.rs | 6 +++--- src/cargo/sources/registry/mod.rs | 4 ++-- src/cargo/util/cache_lock.rs | 4 ++-- src/cargo/util/context/mod.rs | 16 ++++++++-------- src/cargo/util/diagnostic_server.rs | 2 +- src/cargo/util/lockserver.rs | 8 ++++---- src/cargo/util/toml/mod.rs | 2 +- src/cargo/util/toml/targets.rs | 2 +- 18 files changed, 49 insertions(+), 50 deletions(-) diff --git a/src/cargo/core/compiler/build_runner/mod.rs b/src/cargo/core/compiler/build_runner/mod.rs index f9e3cf64c3d..43d00e1ec85 100644 --- a/src/cargo/core/compiler/build_runner/mod.rs +++ b/src/cargo/core/compiler/build_runner/mod.rs @@ -100,8 +100,8 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { let jobserver = match bcx.gctx.jobserver_from_env() { Some(c) => c.clone(), None => { - let client = Client::new(bcx.jobs() as usize) - .with_context(|| "failed to create jobserver")?; + let client = + Client::new(bcx.jobs() as usize).context("failed to create jobserver")?; client.acquire_raw()?; client } @@ -354,11 +354,11 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { .unwrap() .host .prepare() - .with_context(|| "couldn't prepare build directories")?; + .context("couldn't prepare build directories")?; for target in self.files.as_mut().unwrap().target.values_mut() { target .prepare() - .with_context(|| "couldn't prepare build directories")?; + .context("couldn't prepare build directories")?; } let files = self.files.as_ref().unwrap(); diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index 513f9f09840..706783d9332 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -431,7 +431,7 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul // If we have an old build directory, then just move it into place, // otherwise create it! paths::create_dir_all(&script_out_dir) - .with_context(|| "failed to create script output directory for build command")?; + .context("failed to create script output directory for build command")?; // For all our native lib dependencies, pick up their metadata to pass // along to this custom build command. We're also careful to augment our diff --git a/src/cargo/core/compiler/future_incompat.rs b/src/cargo/core/compiler/future_incompat.rs index 7486138b77f..abe3de3e2a8 100644 --- a/src/cargo/core/compiler/future_incompat.rs +++ b/src/cargo/core/compiler/future_incompat.rs @@ -211,9 +211,9 @@ impl OnDiskReports { report_file .file() .read_to_string(&mut file_contents) - .with_context(|| "failed to read report")?; + .context("failed to read report")?; let on_disk_reports: OnDiskReports = - serde_json::from_str(&file_contents).with_context(|| "failed to load report")?; + serde_json::from_str(&file_contents).context("failed to load report")?; if on_disk_reports.version != ON_DISK_VERSION { bail!("unable to read reports; reports were saved from a future version of Cargo"); } diff --git a/src/cargo/core/compiler/job_queue/mod.rs b/src/cargo/core/compiler/job_queue/mod.rs index e6fccc7bd11..6c55697c1a0 100644 --- a/src/cargo/core/compiler/job_queue/mod.rs +++ b/src/cargo/core/compiler/job_queue/mod.rs @@ -513,7 +513,7 @@ impl<'gctx> JobQueue<'gctx> { .into_helper_thread(move |token| { messages.push(Message::Token(token)); }) - .with_context(|| "failed to create helper thread for jobserver management")?; + .context("failed to create helper thread for jobserver management")?; // Create a helper thread to manage the diagnostics for rustfix if // necessary. @@ -700,7 +700,7 @@ impl<'gctx> DrainState<'gctx> { .push(FutureIncompatReportPackage { package_id, items }); } Message::Token(acquired_token) => { - let token = acquired_token.with_context(|| "failed to acquire jobserver token")?; + let token = acquired_token.context("failed to acquire jobserver token")?; self.tokens.push(token); } } diff --git a/src/cargo/core/compiler/timings.rs b/src/cargo/core/compiler/timings.rs index f91a020e45a..d6cfa1ba7af 100644 --- a/src/cargo/core/compiler/timings.rs +++ b/src/cargo/core/compiler/timings.rs @@ -299,7 +299,7 @@ impl<'gctx> Timings<'gctx> { .sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap()); if self.report_html { self.report_html(build_runner, error) - .with_context(|| "failed to save timing report")?; + .context("failed to save timing report")?; } Ok(()) } diff --git a/src/cargo/core/global_cache_tracker.rs b/src/cargo/core/global_cache_tracker.rs index df80aa336bc..b6cb7a60fdb 100644 --- a/src/cargo/core/global_cache_tracker.rs +++ b/src/cargo/core/global_cache_tracker.rs @@ -543,7 +543,7 @@ impl GlobalCacheTracker { /// Deletes files from the global cache based on the given options. pub fn clean(&mut self, clean_ctx: &mut CleanContext<'_>, gc_opts: &GcOpts) -> CargoResult<()> { self.clean_inner(clean_ctx, gc_opts) - .with_context(|| "failed to clean entries from the global cache") + .context("failed to clean entries from the global cache") } #[tracing::instrument(skip_all)] @@ -575,7 +575,7 @@ impl GlobalCacheTracker { gc_opts.is_download_cache_size_set(), &mut delete_paths, ) - .with_context(|| "failed to sync tracking database")? + .context("failed to sync tracking database")? } if let Some(max_age) = gc_opts.max_index_age { let max_age = now - max_age.as_secs(); diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 5e054794a78..7dd412154f7 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -393,7 +393,7 @@ impl<'gctx> PackageSet<'gctx> { let multiplexing = gctx.http_config()?.multiplexing.unwrap_or(true); multi .pipelining(false, multiplexing) - .with_context(|| "failed to enable multiplexing/pipelining in curl")?; + .context("failed to enable multiplexing/pipelining in curl")?; // let's not flood crates.io with connections multi.set_max_host_connections(2)?; @@ -681,7 +681,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> { .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?; let pkg = source .download(id) - .with_context(|| "unable to get packages from source")?; + .context("unable to get packages from source")?; let (url, descriptor, authorization) = match pkg { MaybePackage::Ready(pkg) => { debug!("{} doesn't need a download", id); @@ -951,7 +951,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> { self.set .multi .perform() - .with_context(|| "failed to perform http requests") + .context("failed to perform http requests") })?; debug!(target: "network", "handles remaining: {}", n); let results = &mut self.results; @@ -981,7 +981,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> { self.set .multi .wait(&mut [], timeout) - .with_context(|| "failed to wait on curl `Multi`")?; + .context("failed to wait on curl `Multi`")?; } } } diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index 9e4ea279158..aabd7105527 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -147,13 +147,13 @@ fn create_package( .status("Packaging", pkg.package_id().to_string())?; dst.file().set_len(0)?; let uncompressed_size = tar(ws, pkg, local_reg, ar_files, dst.file(), &filename) - .with_context(|| "failed to prepare local package for uploading")?; + .context("failed to prepare local package for uploading")?; dst.seek(SeekFrom::Start(0))?; let src_path = dst.path(); let dst_path = dst.parent().join(&filename); fs::rename(&src_path, &dst_path) - .with_context(|| "failed to move temporary tarball into final location")?; + .context("failed to move temporary tarball into final location")?; let dst_metadata = dst .file() @@ -331,7 +331,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult, opts: &VendorOptions<'_>) -> CargoResult<()> { } let workspaces = extra_workspaces.iter().chain(Some(ws)).collect::>(); let _lock = gctx.acquire_package_cache_lock(CacheLockMode::MutateExclusive)?; - let vendor_config = sync(gctx, &workspaces, opts).with_context(|| "failed to sync")?; + let vendor_config = sync(gctx, &workspaces, opts).context("failed to sync")?; if gctx.shell().verbosity() != Verbosity::Quiet { if vendor_config.source.is_empty() { @@ -113,11 +113,11 @@ fn sync( // crate to work with. for ws in workspaces { let (packages, resolve) = - ops::resolve_ws(ws, dry_run).with_context(|| "failed to load pkg lockfile")?; + ops::resolve_ws(ws, dry_run).context("failed to load pkg lockfile")?; packages .get_many(resolve.iter()) - .with_context(|| "failed to download packages")?; + .context("failed to download packages")?; for pkg in resolve.iter() { // Don't delete actual source code! @@ -145,11 +145,11 @@ fn sync( // tables about them. for ws in workspaces { let (packages, resolve) = - ops::resolve_ws(ws, dry_run).with_context(|| "failed to load pkg lockfile")?; + ops::resolve_ws(ws, dry_run).context("failed to load pkg lockfile")?; packages .get_many(resolve.iter()) - .with_context(|| "failed to download packages")?; + .context("failed to download packages")?; for pkg in resolve.iter() { // No need to vendor path crates since they're already in the @@ -161,7 +161,7 @@ fn sync( pkg, packages .get_one(pkg) - .with_context(|| "failed to fetch package")? + .context("failed to fetch package")? .clone(), ); diff --git a/src/cargo/sources/registry/http_remote.rs b/src/cargo/sources/registry/http_remote.rs index aa9816a4d12..29ccf1f474f 100644 --- a/src/cargo/sources/registry/http_remote.rs +++ b/src/cargo/sources/registry/http_remote.rs @@ -268,7 +268,7 @@ impl<'gctx> HttpRegistry<'gctx> { self.multi .pipelining(false, self.multiplexing) - .with_context(|| "failed to enable multiplexing/pipelining in curl")?; + .context("failed to enable multiplexing/pipelining in curl")?; // let's not flood the server with connections self.multi.set_max_host_connections(2)?; @@ -802,7 +802,7 @@ impl<'gctx> RegistryData for HttpRegistry<'gctx> { let remaining_in_multi = tls::set(&self.downloads, || { self.multi .perform() - .with_context(|| "failed to perform http requests") + .context("failed to perform http requests") })?; trace!(target: "network", "{} transfers remaining", remaining_in_multi); @@ -823,7 +823,7 @@ impl<'gctx> RegistryData for HttpRegistry<'gctx> { .unwrap_or_else(|| Duration::new(1, 0)); self.multi .wait(&mut [], timeout) - .with_context(|| "failed to wait on curl `Multi`")?; + .context("failed to wait on curl `Multi`")?; } } } diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs index 31ebc48ae31..4cfe2005b46 100644 --- a/src/cargo/sources/registry/mod.rs +++ b/src/cargo/sources/registry/mod.rs @@ -642,10 +642,10 @@ impl<'gctx> RegistrySource<'gctx> { let prefix = unpack_dir.file_name().unwrap(); let parent = unpack_dir.parent().unwrap(); for entry in tar.entries()? { - let mut entry = entry.with_context(|| "failed to iterate over archive")?; + let mut entry = entry.context("failed to iterate over archive")?; let entry_path = entry .path() - .with_context(|| "failed to read entry path")? + .context("failed to read entry path")? .into_owned(); // We're going to unpack this tarball into the global source diff --git a/src/cargo/util/cache_lock.rs b/src/cargo/util/cache_lock.rs index e4a10375701..7d019d183e6 100644 --- a/src/cargo/util/cache_lock.rs +++ b/src/cargo/util/cache_lock.rs @@ -308,7 +308,7 @@ impl RecursiveLock { self.is_exclusive = true; return Ok(()); } else { - return Err(e).with_context(|| "failed to acquire package cache lock"); + return Err(e).context("failed to acquire package cache lock"); } } } @@ -331,7 +331,7 @@ impl RecursiveLock { self.is_exclusive = true; return Ok(result); } else { - return Err(e).with_context(|| "failed to acquire package cache lock"); + return Err(e).context("failed to acquire package cache lock"); } } } diff --git a/src/cargo/util/context/mod.rs b/src/cargo/util/context/mod.rs index a8fee94d5e4..1db5b9a3c98 100644 --- a/src/cargo/util/context/mod.rs +++ b/src/cargo/util/context/mod.rs @@ -335,8 +335,8 @@ impl GlobalContext { /// any config files from disk. Those will be loaded lazily as-needed. pub fn default() -> CargoResult { let shell = Shell::new(); - let cwd = env::current_dir() - .with_context(|| "couldn't get the current directory of the process")?; + let cwd = + env::current_dir().context("couldn't get the current directory of the process")?; let homedir = homedir(&cwd).ok_or_else(|| { anyhow!( "Cargo couldn't find your home directory. \ @@ -496,7 +496,7 @@ impl GlobalContext { let exe = from_env() .or_else(|_| from_current_exe()) .or_else(|_| from_argv()) - .with_context(|| "couldn't get the path to cargo executable")?; + .context("couldn't get the path to cargo executable")?; Ok(exe) }) .map(AsRef::as_ref) @@ -569,8 +569,8 @@ impl GlobalContext { /// /// There is not a need to also call [`Self::reload_rooted_at`]. pub fn reload_cwd(&mut self) -> CargoResult<()> { - let cwd = env::current_dir() - .with_context(|| "couldn't get the current directory of the process")?; + let cwd = + env::current_dir().context("couldn't get the current directory of the process")?; let homedir = homedir(&cwd).ok_or_else(|| { anyhow!( "Cargo couldn't find your home directory. \ @@ -1166,7 +1166,7 @@ impl GlobalContext { result.push(cv); Ok(()) }) - .with_context(|| "could not load Cargo configuration")?; + .context("could not load Cargo configuration")?; Ok(result) } @@ -1206,7 +1206,7 @@ impl GlobalContext { })?; Ok(()) }) - .with_context(|| "could not load Cargo configuration")?; + .context("could not load Cargo configuration")?; match cfg { CV::Table(map, _) => Ok(map), @@ -1495,7 +1495,7 @@ impl GlobalContext { }; let tmp_table = self .load_includes(tmp_table, &mut HashSet::new(), WhyLoad::Cli) - .with_context(|| "failed to load --config include".to_string())?; + .context("failed to load --config include".to_string())?; loaded_args .merge(tmp_table, true) .with_context(|| format!("failed to merge --config argument `{arg}`"))?; diff --git a/src/cargo/util/diagnostic_server.rs b/src/cargo/util/diagnostic_server.rs index 2cce962db92..93b378c54b8 100644 --- a/src/cargo/util/diagnostic_server.rs +++ b/src/cargo/util/diagnostic_server.rs @@ -268,7 +268,7 @@ pub struct StartedServer { impl RustfixDiagnosticServer { pub fn new() -> Result { let listener = TcpListener::bind(&LOCALHOST[..]) - .with_context(|| "failed to bind TCP listener to manage locking")?; + .context("failed to bind TCP listener to manage locking")?; let addr = listener.local_addr()?; Ok(RustfixDiagnosticServer { listener, addr }) diff --git a/src/cargo/util/lockserver.rs b/src/cargo/util/lockserver.rs index 3609a05fc8b..3dbb2126e64 100644 --- a/src/cargo/util/lockserver.rs +++ b/src/cargo/util/lockserver.rs @@ -47,7 +47,7 @@ struct ServerClient { impl LockServer { pub fn new() -> Result { let listener = TcpListener::bind(&LOCALHOST[..]) - .with_context(|| "failed to bind TCP listener to manage locking")?; + .context("failed to bind TCP listener to manage locking")?; let addr = listener.local_addr()?; Ok(LockServer { listener, @@ -159,15 +159,15 @@ impl Drop for LockServerStarted { impl LockServerClient { pub fn lock(addr: &SocketAddr, name: impl AsRef<[u8]>) -> Result { let mut client = - TcpStream::connect(&addr).with_context(|| "failed to connect to parent lock server")?; + TcpStream::connect(&addr).context("failed to connect to parent lock server")?; client .write_all(name.as_ref()) .and_then(|_| client.write_all(b"\n")) - .with_context(|| "failed to write to lock server")?; + .context("failed to write to lock server")?; let mut buf = [0]; client .read_exact(&mut buf) - .with_context(|| "failed to acquire lock")?; + .context("failed to acquire lock")?; Ok(LockServerClient { _socket: client }) } } diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index 172a4a736a5..02bf256a619 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -1131,7 +1131,7 @@ pub fn to_real_manifest( { let edition: Edition = edition .parse() - .with_context(|| "failed to parse the `edition` key")?; + .context("failed to parse the `edition` key")?; if let Some(pkg_msrv) = &rust_version { if let Some(edition_msrv) = edition.first_version() { let edition_msrv = RustVersion::try_from(edition_msrv).unwrap(); diff --git a/src/cargo/util/toml/targets.rs b/src/cargo/util/toml/targets.rs index 5de630501da..c80b31dba61 100644 --- a/src/cargo/util/toml/targets.rs +++ b/src/cargo/util/toml/targets.rs @@ -853,7 +853,7 @@ fn configure(toml: &TomlTarget, target: &mut Target) -> CargoResult<()> { target.set_edition( edition .parse() - .with_context(|| "failed to parse the `edition` key")?, + .context("failed to parse the `edition` key")?, ); } Ok(())