Skip to content

Commit

Permalink
Auto merge of #14377 - mo8it:context, r=weihanglo
Browse files Browse the repository at this point in the history
Use context instead of with_context

Replace `.with_context(|| "…")` with `.context("…")` to avoid calling a trivial closure. It is also shorter :)

On the other hand, use `.with_context(|| format!(…))` instead of `.context(format!(…))` to avoid unneeded string allocation.
  • Loading branch information
bors committed Aug 9, 2024
2 parents b66cad8 + af04e54 commit 73a1b76
Show file tree
Hide file tree
Showing 22 changed files with 63 additions and 68 deletions.
16 changes: 7 additions & 9 deletions crates/rustfix/tests/parse_and_replace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,8 +164,8 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err

debug!("next up: {:?}", file);
let code = fs::read_to_string(file)?;
let errors =
compile_and_get_json_errors(file).context(format!("could compile {}", file.display()))?;
let errors = compile_and_get_json_errors(file)
.with_context(|| format!("could not compile {}", file.display()))?;
let suggestions =
rustfix::get_suggestions_from_json(&errors, &HashSet::new(), filter_suggestions)
.context("could not load suggestions")?;
Expand All @@ -175,10 +175,8 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err
}

if std::env::var(settings::CHECK_JSON).is_ok() {
let expected_json = fs::read_to_string(&json_file).context(format!(
"could not load json fixtures for {}",
file.display()
))?;
let expected_json = fs::read_to_string(&json_file)
.with_context(|| format!("could not load json fixtures for {}", file.display()))?;
let expected_suggestions =
rustfix::get_suggestions_from_json(&expected_json, &HashSet::new(), filter_suggestions)
.context("could not load expected suggestions")?;
Expand All @@ -194,7 +192,7 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err
}

let fixed = apply_suggestions(&code, &suggestions)
.context(format!("could not apply suggestions to {}", file.display()))?
.with_context(|| format!("could not apply suggestions to {}", file.display()))?
.replace('\r', "");

if std::env::var(settings::RECORD_FIXED_RUST).is_ok() {
Expand All @@ -209,7 +207,7 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err
}

let expected_fixed = fs::read_to_string(&fixed_file)
.context(format!("could read fixed file for {}", file.display()))?
.with_context(|| format!("could read fixed file for {}", file.display()))?
.replace('\r', "");
ensure!(
fixed.trim() == expected_fixed.trim(),
Expand All @@ -236,7 +234,7 @@ fn get_fixture_files(p: &str) -> Result<Vec<PathBuf>, Error> {

fn assert_fixtures(dir: &str, mode: &str) {
let files = get_fixture_files(dir)
.context(format!("couldn't load dir `{}`", dir))
.with_context(|| format!("couldn't load dir `{dir}`"))
.unwrap();
let mut failures = 0;

Expand Down
8 changes: 4 additions & 4 deletions src/cargo/core/compiler/build_runner/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,8 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
let jobserver = match bcx.gctx.jobserver_from_env() {
Some(c) => c.clone(),
None => {
let client = Client::new(bcx.jobs() as usize)
.with_context(|| "failed to create jobserver")?;
let client =
Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
client.acquire_raw()?;
client
}
Expand Down Expand Up @@ -354,11 +354,11 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
.unwrap()
.host
.prepare()
.with_context(|| "couldn't prepare build directories")?;
.context("couldn't prepare build directories")?;
for target in self.files.as_mut().unwrap().target.values_mut() {
target
.prepare()
.with_context(|| "couldn't prepare build directories")?;
.context("couldn't prepare build directories")?;
}

let files = self.files.as_ref().unwrap();
Expand Down
2 changes: 1 addition & 1 deletion src/cargo/core/compiler/custom_build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul
// If we have an old build directory, then just move it into place,
// otherwise create it!
paths::create_dir_all(&script_out_dir)
.with_context(|| "failed to create script output directory for build command")?;
.context("failed to create script output directory for build command")?;

// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
Expand Down
4 changes: 2 additions & 2 deletions src/cargo/core/compiler/future_incompat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,9 +211,9 @@ impl OnDiskReports {
report_file
.file()
.read_to_string(&mut file_contents)
.with_context(|| "failed to read report")?;
.context("failed to read report")?;
let on_disk_reports: OnDiskReports =
serde_json::from_str(&file_contents).with_context(|| "failed to load report")?;
serde_json::from_str(&file_contents).context("failed to load report")?;
if on_disk_reports.version != ON_DISK_VERSION {
bail!("unable to read reports; reports were saved from a future version of Cargo");
}
Expand Down
4 changes: 2 additions & 2 deletions src/cargo/core/compiler/job_queue/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -513,7 +513,7 @@ impl<'gctx> JobQueue<'gctx> {
.into_helper_thread(move |token| {
messages.push(Message::Token(token));
})
.with_context(|| "failed to create helper thread for jobserver management")?;
.context("failed to create helper thread for jobserver management")?;

// Create a helper thread to manage the diagnostics for rustfix if
// necessary.
Expand Down Expand Up @@ -700,7 +700,7 @@ impl<'gctx> DrainState<'gctx> {
.push(FutureIncompatReportPackage { package_id, items });
}
Message::Token(acquired_token) => {
let token = acquired_token.with_context(|| "failed to acquire jobserver token")?;
let token = acquired_token.context("failed to acquire jobserver token")?;
self.tokens.push(token);
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/cargo/core/compiler/timings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ impl<'gctx> Timings<'gctx> {
.sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap());
if self.report_html {
self.report_html(build_runner, error)
.with_context(|| "failed to save timing report")?;
.context("failed to save timing report")?;
}
Ok(())
}
Expand Down
4 changes: 2 additions & 2 deletions src/cargo/core/global_cache_tracker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -543,7 +543,7 @@ impl GlobalCacheTracker {
/// Deletes files from the global cache based on the given options.
pub fn clean(&mut self, clean_ctx: &mut CleanContext<'_>, gc_opts: &GcOpts) -> CargoResult<()> {
self.clean_inner(clean_ctx, gc_opts)
.with_context(|| "failed to clean entries from the global cache")
.context("failed to clean entries from the global cache")
}

#[tracing::instrument(skip_all)]
Expand Down Expand Up @@ -575,7 +575,7 @@ impl GlobalCacheTracker {
gc_opts.is_download_cache_size_set(),
&mut delete_paths,
)
.with_context(|| "failed to sync tracking database")?
.context("failed to sync tracking database")?
}
if let Some(max_age) = gc_opts.max_index_age {
let max_age = now - max_age.as_secs();
Expand Down
8 changes: 4 additions & 4 deletions src/cargo/core/package.rs
Original file line number Diff line number Diff line change
Expand Up @@ -393,7 +393,7 @@ impl<'gctx> PackageSet<'gctx> {
let multiplexing = gctx.http_config()?.multiplexing.unwrap_or(true);
multi
.pipelining(false, multiplexing)
.with_context(|| "failed to enable multiplexing/pipelining in curl")?;
.context("failed to enable multiplexing/pipelining in curl")?;

// let's not flood crates.io with connections
multi.set_max_host_connections(2)?;
Expand Down Expand Up @@ -681,7 +681,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
.ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
let pkg = source
.download(id)
.with_context(|| "unable to get packages from source")?;
.context("unable to get packages from source")?;
let (url, descriptor, authorization) = match pkg {
MaybePackage::Ready(pkg) => {
debug!("{} doesn't need a download", id);
Expand Down Expand Up @@ -951,7 +951,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
self.set
.multi
.perform()
.with_context(|| "failed to perform http requests")
.context("failed to perform http requests")
})?;
debug!(target: "network", "handles remaining: {}", n);
let results = &mut self.results;
Expand Down Expand Up @@ -981,7 +981,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
self.set
.multi
.wait(&mut [], timeout)
.with_context(|| "failed to wait on curl `Multi`")?;
.context("failed to wait on curl `Multi`")?;
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/cargo/core/registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -444,8 +444,8 @@ impl<'gctx> PackageRegistry<'gctx> {
patches must point to different sources",
dep.package_name(),
url
))
.context(format!("failed to resolve patches for `{}`", url));
)
.context(format!("failed to resolve patches for `{}`", url)));
}
unlocked_summaries.push(summary);
}
Expand Down
7 changes: 3 additions & 4 deletions src/cargo/ops/cargo_add/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1085,10 +1085,9 @@ fn find_workspace_dep(toml_key: &str, root_manifest: &Path) -> CargoResult<Depen
.context("could not find `dependencies` table in `workspace`")?
.as_table_like()
.context("could not make `dependencies` into a table")?;
let dep_item = dependencies.get(toml_key).context(format!(
"could not find {} in `workspace.dependencies`",
toml_key
))?;
let dep_item = dependencies
.get(toml_key)
.with_context(|| format!("could not find {toml_key} in `workspace.dependencies`"))?;
Dependency::from_toml(root_manifest.parent().unwrap(), toml_key, dep_item)
}

Expand Down
6 changes: 3 additions & 3 deletions src/cargo/ops/cargo_package.rs
Original file line number Diff line number Diff line change
Expand Up @@ -147,13 +147,13 @@ fn create_package(
.status("Packaging", pkg.package_id().to_string())?;
dst.file().set_len(0)?;
let uncompressed_size = tar(ws, pkg, local_reg, ar_files, dst.file(), &filename)
.with_context(|| "failed to prepare local package for uploading")?;
.context("failed to prepare local package for uploading")?;

dst.seek(SeekFrom::Start(0))?;
let src_path = dst.path();
let dst_path = dst.parent().join(&filename);
fs::rename(&src_path, &dst_path)
.with_context(|| "failed to move temporary tarball into final location")?;
.context("failed to move temporary tarball into final location")?;

let dst_metadata = dst
.file()
Expand Down Expand Up @@ -331,7 +331,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<Fi
if opts.verify {
for (pkg, opts, tarball) in &outputs {
run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
.with_context(|| "failed to verify package tarball")?
.context("failed to verify package tarball")?
}
}

Expand Down
5 changes: 2 additions & 3 deletions src/cargo/ops/common_for_install_and_uninstall.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ impl InstallTracker {
if contents.is_empty() {
Ok(CrateListingV1::default())
} else {
Ok(toml::from_str(&contents).with_context(|| "invalid TOML found for metadata")?)
Ok(toml::from_str(&contents).context("invalid TOML found for metadata")?)
}
})()
.with_context(|| {
Expand All @@ -127,8 +127,7 @@ impl InstallTracker {
let mut v2 = if contents.is_empty() {
CrateListingV2::default()
} else {
serde_json::from_str(&contents)
.with_context(|| "invalid JSON found for metadata")?
serde_json::from_str(&contents).context("invalid JSON found for metadata")?
};
v2.sync_v1(&v1);
Ok(v2)
Expand Down
12 changes: 6 additions & 6 deletions src/cargo/ops/vendor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ pub fn vendor(ws: &Workspace<'_>, opts: &VendorOptions<'_>) -> CargoResult<()> {
}
let workspaces = extra_workspaces.iter().chain(Some(ws)).collect::<Vec<_>>();
let _lock = gctx.acquire_package_cache_lock(CacheLockMode::MutateExclusive)?;
let vendor_config = sync(gctx, &workspaces, opts).with_context(|| "failed to sync")?;
let vendor_config = sync(gctx, &workspaces, opts).context("failed to sync")?;

if gctx.shell().verbosity() != Verbosity::Quiet {
if vendor_config.source.is_empty() {
Expand Down Expand Up @@ -113,11 +113,11 @@ fn sync(
// crate to work with.
for ws in workspaces {
let (packages, resolve) =
ops::resolve_ws(ws, dry_run).with_context(|| "failed to load pkg lockfile")?;
ops::resolve_ws(ws, dry_run).context("failed to load pkg lockfile")?;

packages
.get_many(resolve.iter())
.with_context(|| "failed to download packages")?;
.context("failed to download packages")?;

for pkg in resolve.iter() {
// Don't delete actual source code!
Expand Down Expand Up @@ -145,11 +145,11 @@ fn sync(
// tables about them.
for ws in workspaces {
let (packages, resolve) =
ops::resolve_ws(ws, dry_run).with_context(|| "failed to load pkg lockfile")?;
ops::resolve_ws(ws, dry_run).context("failed to load pkg lockfile")?;

packages
.get_many(resolve.iter())
.with_context(|| "failed to download packages")?;
.context("failed to download packages")?;

for pkg in resolve.iter() {
// No need to vendor path crates since they're already in the
Expand All @@ -161,7 +161,7 @@ fn sync(
pkg,
packages
.get_one(pkg)
.with_context(|| "failed to fetch package")?
.context("failed to fetch package")?
.clone(),
);

Expand Down
5 changes: 2 additions & 3 deletions src/cargo/sources/git/oxide.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,6 @@ fn amend_authentication_hints(
_ => None,
};
if let Some(e) = e {
use anyhow::Context;
let auth_message = match e {
gix::protocol::handshake::Error::Credentials(_) => {
"\n* attempted to find username/password via \
Expand All @@ -206,7 +205,7 @@ fn amend_authentication_hints(
"if a proxy or similar is necessary `net.git-fetch-with-cli` may help here\n",
"https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli"
);
return Err(anyhow::Error::from(err)).context(msg);
return Err(anyhow::Error::from(err).context(msg));
}
_ => None,
};
Expand All @@ -225,7 +224,7 @@ fn amend_authentication_hints(
msg.push_str(
"https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli",
);
return Err(anyhow::Error::from(err)).context(msg);
return Err(anyhow::Error::from(err).context(msg));
}
}
Err(err.into())
Expand Down
6 changes: 3 additions & 3 deletions src/cargo/sources/registry/http_remote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ impl<'gctx> HttpRegistry<'gctx> {

self.multi
.pipelining(false, self.multiplexing)
.with_context(|| "failed to enable multiplexing/pipelining in curl")?;
.context("failed to enable multiplexing/pipelining in curl")?;

// let's not flood the server with connections
self.multi.set_max_host_connections(2)?;
Expand Down Expand Up @@ -802,7 +802,7 @@ impl<'gctx> RegistryData for HttpRegistry<'gctx> {
let remaining_in_multi = tls::set(&self.downloads, || {
self.multi
.perform()
.with_context(|| "failed to perform http requests")
.context("failed to perform http requests")
})?;
trace!(target: "network", "{} transfers remaining", remaining_in_multi);

Expand All @@ -823,7 +823,7 @@ impl<'gctx> RegistryData for HttpRegistry<'gctx> {
.unwrap_or_else(|| Duration::new(1, 0));
self.multi
.wait(&mut [], timeout)
.with_context(|| "failed to wait on curl `Multi`")?;
.context("failed to wait on curl `Multi`")?;
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/cargo/sources/registry/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -642,10 +642,10 @@ impl<'gctx> RegistrySource<'gctx> {
let prefix = unpack_dir.file_name().unwrap();
let parent = unpack_dir.parent().unwrap();
for entry in tar.entries()? {
let mut entry = entry.with_context(|| "failed to iterate over archive")?;
let mut entry = entry.context("failed to iterate over archive")?;
let entry_path = entry
.path()
.with_context(|| "failed to read entry path")?
.context("failed to read entry path")?
.into_owned();

// We're going to unpack this tarball into the global source
Expand Down
4 changes: 2 additions & 2 deletions src/cargo/util/cache_lock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ impl RecursiveLock {
self.is_exclusive = true;
return Ok(());
} else {
return Err(e).with_context(|| "failed to acquire package cache lock");
return Err(e).context("failed to acquire package cache lock");
}
}
}
Expand All @@ -331,7 +331,7 @@ impl RecursiveLock {
self.is_exclusive = true;
return Ok(result);
} else {
return Err(e).with_context(|| "failed to acquire package cache lock");
return Err(e).context("failed to acquire package cache lock");
}
}
}
Expand Down
Loading

0 comments on commit 73a1b76

Please sign in to comment.