Skip to content

Commit

Permalink
Auto merge of rust-lang#134257 - matthiaskrgr:rollup-l8uh1ee, r=matth…
Browse files Browse the repository at this point in the history
…iaskrgr

Rollup of 7 pull requests

Successful merges:

 - rust-lang#132038 (Add lint rule for `#[deprecated]` on re-exports)
 - rust-lang#132150 (Fix powerpc64 big-endian FreeBSD ABI)
 - rust-lang#133633 (don't show the full linker args unless `--verbose` is passed)
 - rust-lang#133942 (Clarify how to use `black_box()`)
 - rust-lang#134081 (Try to evaluate constants in legacy mangling)
 - rust-lang#134192 (Remove `Lexer`'s dependency on `Parser`.)
 - rust-lang#134211 (On Neutrino QNX, reduce the need to set archiver via environment variables)

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Dec 13, 2024
2 parents 4847d6a + 3149de6 commit b76bac2
Show file tree
Hide file tree
Showing 34 changed files with 413 additions and 207 deletions.
4 changes: 2 additions & 2 deletions compiler/rustc_codegen_ssa/src/back/link.rs
Original file line number Diff line number Diff line change
Expand Up @@ -992,12 +992,12 @@ fn link_natively(
let mut output = prog.stderr.clone();
output.extend_from_slice(&prog.stdout);
let escaped_output = escape_linker_output(&output, flavor);
// FIXME: Add UI tests for this error.
let err = errors::LinkingFailed {
linker_path: &linker_path,
exit_status: prog.status,
command: &cmd,
command: cmd,
escaped_output,
verbose: sess.opts.verbose,
};
sess.dcx().emit_err(err);
// If MSVC's `link.exe` was expected but the return code
Expand Down
70 changes: 66 additions & 4 deletions compiler/rustc_codegen_ssa/src/errors.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
//! Errors emitted by codegen_ssa
use std::borrow::Cow;
use std::ffi::OsString;
use std::io::Error;
use std::num::ParseIntError;
use std::path::{Path, PathBuf};
Expand Down Expand Up @@ -345,21 +346,82 @@ impl<G: EmissionGuarantee> Diagnostic<'_, G> for ThorinErrorWrapper {
}

pub(crate) struct LinkingFailed<'a> {
pub linker_path: &'a PathBuf,
pub linker_path: &'a Path,
pub exit_status: ExitStatus,
pub command: &'a Command,
pub command: Command,
pub escaped_output: String,
pub verbose: bool,
}

impl<G: EmissionGuarantee> Diagnostic<'_, G> for LinkingFailed<'_> {
fn into_diag(self, dcx: DiagCtxtHandle<'_>, level: Level) -> Diag<'_, G> {
fn into_diag(mut self, dcx: DiagCtxtHandle<'_>, level: Level) -> Diag<'_, G> {
let mut diag = Diag::new(dcx, level, fluent::codegen_ssa_linking_failed);
diag.arg("linker_path", format!("{}", self.linker_path.display()));
diag.arg("exit_status", format!("{}", self.exit_status));

let contains_undefined_ref = self.escaped_output.contains("undefined reference to");

diag.note(format!("{:?}", self.command)).note(self.escaped_output);
if self.verbose {
diag.note(format!("{:?}", self.command));
} else {
enum ArgGroup {
Regular(OsString),
Objects(usize),
Rlibs(PathBuf, Vec<OsString>),
}

// Omit rust object files and fold rlibs in the error by default to make linker errors a
// bit less verbose.
let orig_args = self.command.take_args();
let mut args: Vec<ArgGroup> = vec![];
for arg in orig_args {
if arg.as_encoded_bytes().ends_with(b".rcgu.o") {
if let Some(ArgGroup::Objects(n)) = args.last_mut() {
*n += 1;
} else {
args.push(ArgGroup::Objects(1));
}
} else if arg.as_encoded_bytes().ends_with(b".rlib") {
let rlib_path = Path::new(&arg);
let dir = rlib_path.parent().unwrap();
let filename = rlib_path.file_name().unwrap().to_owned();
if let Some(ArgGroup::Rlibs(parent, rlibs)) = args.last_mut() {
if parent == dir {
rlibs.push(filename);
} else {
args.push(ArgGroup::Rlibs(dir.to_owned(), vec![filename]));
}
} else {
args.push(ArgGroup::Rlibs(dir.to_owned(), vec![filename]));
}
} else {
args.push(ArgGroup::Regular(arg));
}
}
self.command.args(args.into_iter().map(|arg_group| match arg_group {
ArgGroup::Regular(arg) => arg,
ArgGroup::Objects(n) => OsString::from(format!("<{n} object files omitted>")),
ArgGroup::Rlibs(dir, rlibs) => {
let mut arg = dir.into_os_string();
arg.push("/{");
let mut first = true;
for rlib in rlibs {
if !first {
arg.push(",");
}
first = false;
arg.push(rlib);
}
arg.push("}");
arg
}
}));

diag.note(format!("{:?}", self.command));
diag.note("some arguments are omitted. use `--verbose` to show all linker arguments");
}

diag.note(self.escaped_output);

// Trying to match an error from OS linkers
// which by now we have no way to translate.
Expand Down
40 changes: 23 additions & 17 deletions compiler/rustc_parse/src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,24 +69,30 @@ pub(crate) fn lex_token_trees<'psess, 'src>(
token: Token::dummy(),
diag_info: TokenTreeDiagInfo::default(),
};
let (_open_spacing, stream, res) = lexer.lex_token_trees(/* is_delimited */ false);
let unmatched_delims = lexer.diag_info.unmatched_delims;

if res.is_ok() && unmatched_delims.is_empty() {
Ok(stream)
} else {
// Return error if there are unmatched delimiters or unclosed delimiters.
// We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
// because the delimiter mismatch is more likely to be the root cause of error
let mut buffer: Vec<_> = unmatched_delims
.into_iter()
.filter_map(|unmatched_delim| make_unclosed_delims_error(unmatched_delim, psess))
.collect();
if let Err(errs) = res {
// Add unclosing delimiter or diff marker errors
buffer.extend(errs);
let res = lexer.lex_token_trees(/* is_delimited */ false);

let mut unmatched_delims: Vec<_> = lexer
.diag_info
.unmatched_delims
.into_iter()
.filter_map(|unmatched_delim| make_unclosed_delims_error(unmatched_delim, psess))
.collect();

match res {
Ok((_open_spacing, stream)) => {
if unmatched_delims.is_empty() {
Ok(stream)
} else {
// Return error if there are unmatched delimiters or unclosed delimiters.
Err(unmatched_delims)
}
}
Err(errs) => {
// We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
// because the delimiter mismatch is more likely to be the root cause of error
unmatched_delims.extend(errs);
Err(unmatched_delims)
}
Err(buffer)
}
}

Expand Down
94 changes: 14 additions & 80 deletions compiler/rustc_parse/src/lexer/tokentrees.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,18 @@
use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
use rustc_ast_pretty::pprust::token_to_string;
use rustc_errors::{Applicability, Diag};
use rustc_span::symbol::kw;
use rustc_errors::Diag;

use super::diagnostics::{report_suspicious_mismatch_block, same_indentation_level};
use super::{Lexer, UnmatchedDelim};
use crate::Parser;

impl<'psess, 'src> Lexer<'psess, 'src> {
// Lex into a token stream. The `Spacing` in the result is that of the
// opening delimiter.
pub(super) fn lex_token_trees(
&mut self,
is_delimited: bool,
) -> (Spacing, TokenStream, Result<(), Vec<Diag<'psess>>>) {
) -> Result<(Spacing, TokenStream), Vec<Diag<'psess>>> {
// Move past the opening delimiter.
let open_spacing = self.bump_minimal();

Expand All @@ -27,25 +25,25 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
buf.push(match self.lex_token_tree_open_delim(delim) {
Ok(val) => val,
Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
Err(errs) => return Err(errs),
})
}
token::CloseDelim(delim) => {
// Invisible delimiters cannot occur here because `TokenTreesReader` parses
// code directly from strings, with no macro expansion involved.
debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
return (
open_spacing,
TokenStream::new(buf),
if is_delimited { Ok(()) } else { Err(vec![self.close_delim_err(delim)]) },
);
return if is_delimited {
Ok((open_spacing, TokenStream::new(buf)))
} else {
Err(vec![self.close_delim_err(delim)])
};
}
token::Eof => {
return (
open_spacing,
TokenStream::new(buf),
if is_delimited { Err(vec![self.eof_err()]) } else { Ok(()) },
);
return if is_delimited {
Err(vec![self.eof_err()])
} else {
Ok((open_spacing, TokenStream::new(buf)))
};
}
_ => {
// Get the next normal token.
Expand Down Expand Up @@ -107,10 +105,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
// Lex the token trees within the delimiters.
// We stop at any delimiter so we can try to recover if the user
// uses an incorrect delimiter.
let (open_spacing, tts, res) = self.lex_token_trees(/* is_delimited */ true);
if let Err(errs) = res {
return Err(self.unclosed_delim_err(tts, errs));
}
let (open_spacing, tts) = self.lex_token_trees(/* is_delimited */ true)?;

// Expand to cover the entire delimited token tree.
let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
Expand Down Expand Up @@ -247,67 +242,6 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
this_spacing
}

fn unclosed_delim_err(
&mut self,
tts: TokenStream,
mut errs: Vec<Diag<'psess>>,
) -> Vec<Diag<'psess>> {
// If there are unclosed delims, see if there are diff markers and if so, point them
// out instead of complaining about the unclosed delims.
let mut parser = Parser::new(self.psess, tts, None);
let mut diff_errs = vec![];
// Suggest removing a `{` we think appears in an `if`/`while` condition.
// We want to suggest removing a `{` only if we think we're in an `if`/`while` condition,
// but we have no way of tracking this in the lexer itself, so we piggyback on the parser.
let mut in_cond = false;
while parser.token != token::Eof {
if let Err(diff_err) = parser.err_vcs_conflict_marker() {
diff_errs.push(diff_err);
} else if parser.is_keyword_ahead(0, &[kw::If, kw::While]) {
in_cond = true;
} else if matches!(
parser.token.kind,
token::CloseDelim(Delimiter::Brace) | token::FatArrow
) {
// End of the `if`/`while` body, or the end of a `match` guard.
in_cond = false;
} else if in_cond && parser.token == token::OpenDelim(Delimiter::Brace) {
// Store the `&&` and `let` to use their spans later when creating the diagnostic
let maybe_andand = parser.look_ahead(1, |t| t.clone());
let maybe_let = parser.look_ahead(2, |t| t.clone());
if maybe_andand == token::OpenDelim(Delimiter::Brace) {
// This might be the beginning of the `if`/`while` body (i.e., the end of the
// condition).
in_cond = false;
} else if maybe_andand == token::AndAnd && maybe_let.is_keyword(kw::Let) {
let mut err = parser.dcx().struct_span_err(
parser.token.span,
"found a `{` in the middle of a let-chain",
);
err.span_suggestion(
parser.token.span,
"consider removing this brace to parse the `let` as part of the same chain",
"",
Applicability::MachineApplicable,
);
err.span_label(
maybe_andand.span.to(maybe_let.span),
"you might have meant to continue the let-chain here",
);
errs.push(err);
}
}
parser.bump();
}
if !diff_errs.is_empty() {
for err in errs {
err.cancel();
}
return diff_errs;
}
errs
}

fn close_delim_err(&mut self, delim: Delimiter) -> Diag<'psess> {
// An unexpected closing delimiter (i.e., there is no matching opening delimiter).
let token_str = token_to_string(&self.token);
Expand Down
3 changes: 3 additions & 0 deletions compiler/rustc_passes/src/stability.rs
Original file line number Diff line number Diff line change
Expand Up @@ -411,6 +411,9 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> {
kind = AnnotationKind::DeprecationProhibited;
const_stab_inherit = InheritConstStability::Yes;
}
hir::ItemKind::Use(_, _) => {
kind = AnnotationKind::DeprecationProhibited;
}
hir::ItemKind::Struct(ref sd, _) => {
if let Some(ctor_def_id) = sd.ctor_def_id() {
self.annotate(
Expand Down
29 changes: 28 additions & 1 deletion compiler/rustc_symbol_mangling/src/legacy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::fmt::{self, Write};
use std::mem::{self, discriminant};

use rustc_data_structures::stable_hasher::{Hash64, HashStable, StableHasher};
use rustc_hir::def_id::CrateNum;
use rustc_hir::def_id::{CrateNum, DefId};
use rustc_hir::definitions::{DefPathData, DisambiguatedDefPathData};
use rustc_middle::bug;
use rustc_middle::ty::print::{PrettyPrinter, Print, PrintError, Printer};
Expand Down Expand Up @@ -378,6 +378,33 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> {
Ok(())
}
}

fn print_impl_path(
&mut self,
impl_def_id: DefId,
args: &'tcx [GenericArg<'tcx>],
mut self_ty: Ty<'tcx>,
mut impl_trait_ref: Option<ty::TraitRef<'tcx>>,
) -> Result<(), PrintError> {
let mut typing_env = ty::TypingEnv::post_analysis(self.tcx, impl_def_id);
if !args.is_empty() {
typing_env.param_env =
ty::EarlyBinder::bind(typing_env.param_env).instantiate(self.tcx, args);
}

match &mut impl_trait_ref {
Some(impl_trait_ref) => {
assert_eq!(impl_trait_ref.self_ty(), self_ty);
*impl_trait_ref = self.tcx.normalize_erasing_regions(typing_env, *impl_trait_ref);
self_ty = impl_trait_ref.self_ty();
}
None => {
self_ty = self.tcx.normalize_erasing_regions(typing_env, self_ty);
}
}

self.default_print_impl_path(impl_def_id, args, self_ty, impl_trait_ref)
}
}

impl<'tcx> PrettyPrinter<'tcx> for SymbolPrinter<'tcx> {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_target/src/callconv/powerpc64.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ where
Ty: TyAbiInterface<'a, C> + Copy,
C: HasDataLayout + HasTargetSpec,
{
let abi = if cx.target_spec().env == "musl" {
let abi = if cx.target_spec().env == "musl" || cx.target_spec().os == "freebsd" {
ELFv2
} else if cx.target_spec().os == "aix" {
AIX
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ pub(crate) fn target() -> Target {
Target {
llvm_target: "powerpc64-unknown-freebsd".into(),
metadata: crate::spec::TargetMetadata {
description: Some("PPC64 FreeBSD (ELFv1 and ELFv2)".into()),
description: Some("PPC64 FreeBSD (ELFv2)".into()),
tier: Some(3),
host_tools: Some(true),
std: Some(true),
Expand Down
5 changes: 0 additions & 5 deletions library/core/src/alloc/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,6 @@ pub use self::global::GlobalAlloc;
#[stable(feature = "alloc_layout", since = "1.28.0")]
pub use self::layout::Layout;
#[stable(feature = "alloc_layout", since = "1.28.0")]
#[deprecated(
since = "1.52.0",
note = "Name does not follow std convention, use LayoutError",
suggestion = "LayoutError"
)]
#[allow(deprecated, deprecated_in_future)]
pub use self::layout::LayoutErr;
#[stable(feature = "alloc_layout_error", since = "1.50.0")]
Expand Down
Loading

0 comments on commit b76bac2

Please sign in to comment.