diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index fa0f532619611..8c9ad83608761 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -881,9 +881,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> { &item.vis, errors::VisibilityNotPermittedNote::TraitImpl, ); - // njn: use Dummy here - if let TyKind::Err(_) = self_ty.kind { - this.dcx().emit_err(errors::ObsoleteAuto { span: item.span }); + if let TyKind::Dummy = self_ty.kind { + // Abort immediately otherwise the `TyKind::Dummy` will reach HIR lowering, + // which isn't allowed. Not a problem for this obscure, obsolete syntax. + this.dcx().emit_fatal(errors::ObsoleteAuto { span: item.span }); } if let (&Unsafe::Yes(span), &ImplPolarity::Negative(sp)) = (unsafety, polarity) { diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs index 7099cdd5a7540..9e4e7911c3a73 100644 --- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs @@ -3,10 +3,11 @@ use either::{Left, Right}; use rustc_hir::def::DefKind; use rustc_middle::mir::interpret::{AllocId, ErrorHandled, InterpErrorInfo}; use rustc_middle::mir::{self, ConstAlloc, ConstValue}; +use rustc_middle::query::TyCtxtAt; use rustc_middle::traits::Reveal; use rustc_middle::ty::layout::LayoutOf; use rustc_middle::ty::print::with_no_trimmed_paths; -use rustc_middle::ty::{self, TyCtxt}; +use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_span::def_id::LocalDefId; use rustc_span::Span; use rustc_target::abi::{self, Abi}; @@ -87,13 +88,16 @@ fn eval_body_using_ecx<'mir, 'tcx>( } /// The `InterpCx` is only meant to be used to do field and index projections into constants for -/// `simd_shuffle` and const patterns in match arms. It never performs alignment checks. +/// `simd_shuffle` and const patterns in match arms. +/// +/// This should *not* be used to do any actual interpretation. In particular, alignment checks are +/// turned off! /// /// The function containing the `match` that is currently being analyzed may have generic bounds /// that inform us about the generic bounds of the constant. E.g., using an associated constant /// of a function's generic parameter will require knowledge about the bounds on the generic /// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument. -pub(crate) fn mk_eval_cx<'mir, 'tcx>( +pub(crate) fn mk_eval_cx_to_read_const_val<'mir, 'tcx>( tcx: TyCtxt<'tcx>, root_span: Span, param_env: ty::ParamEnv<'tcx>, @@ -108,6 +112,19 @@ pub(crate) fn mk_eval_cx<'mir, 'tcx>( ) } +/// Create an interpreter context to inspect the given `ConstValue`. +/// Returns both the context and an `OpTy` that represents the constant. +pub fn mk_eval_cx_for_const_val<'mir, 'tcx>( + tcx: TyCtxtAt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + val: mir::ConstValue<'tcx>, + ty: Ty<'tcx>, +) -> Option<(CompileTimeEvalContext<'mir, 'tcx>, OpTy<'tcx>)> { + let ecx = mk_eval_cx_to_read_const_val(tcx.tcx, tcx.span, param_env, CanAccessMutGlobal::No); + let op = ecx.const_val_to_op(val, ty, None).ok()?; + Some((ecx, op)) +} + /// This function converts an interpreter value into a MIR constant. /// /// The `for_diagnostics` flag turns the usual rules for returning `ConstValue::Scalar` into a @@ -203,7 +220,7 @@ pub(crate) fn turn_into_const_value<'tcx>( let def_id = cid.instance.def.def_id(); let is_static = tcx.is_static(def_id); // This is just accessing an already computed constant, so no need to check alignment here. - let ecx = mk_eval_cx( + let ecx = mk_eval_cx_to_read_const_val( tcx, tcx.def_span(key.value.instance.def_id()), key.param_env, diff --git a/compiler/rustc_const_eval/src/const_eval/mod.rs b/compiler/rustc_const_eval/src/const_eval/mod.rs index cd50701040e82..289dcb7d01d68 100644 --- a/compiler/rustc_const_eval/src/const_eval/mod.rs +++ b/compiler/rustc_const_eval/src/const_eval/mod.rs @@ -47,8 +47,7 @@ pub(crate) fn try_destructure_mir_constant_for_user_output<'tcx>( ty: Ty<'tcx>, ) -> Option> { let param_env = ty::ParamEnv::reveal_all(); - let ecx = mk_eval_cx(tcx.tcx, tcx.span, param_env, CanAccessMutGlobal::No); - let op = ecx.const_val_to_op(val, ty, None).ok()?; + let (ecx, op) = mk_eval_cx_for_const_val(tcx, param_env, val, ty)?; // We go to `usize` as we cannot allocate anything bigger anyway. let (field_count, variant, down) = match ty.kind() { diff --git a/compiler/rustc_const_eval/src/const_eval/valtrees.rs b/compiler/rustc_const_eval/src/const_eval/valtrees.rs index 514a6a7df7617..d3428d27d52fd 100644 --- a/compiler/rustc_const_eval/src/const_eval/valtrees.rs +++ b/compiler/rustc_const_eval/src/const_eval/valtrees.rs @@ -5,7 +5,7 @@ use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt}; use rustc_span::DUMMY_SP; use rustc_target::abi::{Abi, VariantIdx}; -use super::eval_queries::{mk_eval_cx, op_to_const}; +use super::eval_queries::{mk_eval_cx_to_read_const_val, op_to_const}; use super::machine::CompileTimeEvalContext; use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES}; use crate::const_eval::CanAccessMutGlobal; @@ -223,7 +223,7 @@ pub(crate) fn eval_to_valtree<'tcx>( let const_alloc = tcx.eval_to_allocation_raw(param_env.and(cid))?; // FIXME Need to provide a span to `eval_to_valtree` - let ecx = mk_eval_cx( + let ecx = mk_eval_cx_to_read_const_val( tcx, DUMMY_SP, param_env, @@ -287,7 +287,8 @@ pub fn valtree_to_const_value<'tcx>( } } ty::Ref(_, inner_ty, _) => { - let mut ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No); + let mut ecx = + mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No); let imm = valtree_to_ref(&mut ecx, valtree, *inner_ty); let imm = ImmTy::from_immediate(imm, tcx.layout_of(param_env_ty).unwrap()); op_to_const(&ecx, &imm.into(), /* for diagnostics */ false) @@ -314,7 +315,8 @@ pub fn valtree_to_const_value<'tcx>( bug!("could not find non-ZST field during in {layout:#?}"); } - let mut ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No); + let mut ecx = + mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No); // Need to create a place for this valtree. let place = create_valtree_place(&mut ecx, layout, valtree); diff --git a/compiler/rustc_const_eval/src/util/caller_location.rs b/compiler/rustc_const_eval/src/util/caller_location.rs index d1c2d22b5a912..b8e15c485f58f 100644 --- a/compiler/rustc_const_eval/src/util/caller_location.rs +++ b/compiler/rustc_const_eval/src/util/caller_location.rs @@ -6,7 +6,7 @@ use rustc_middle::ty::layout::LayoutOf; use rustc_span::symbol::Symbol; use rustc_type_ir::Mutability; -use crate::const_eval::{mk_eval_cx, CanAccessMutGlobal, CompileTimeEvalContext}; +use crate::const_eval::{mk_eval_cx_to_read_const_val, CanAccessMutGlobal, CompileTimeEvalContext}; use crate::interpret::*; /// Allocate a `const core::panic::Location` with the provided filename and line/column numbers. @@ -57,7 +57,12 @@ pub(crate) fn const_caller_location_provider( col: u32, ) -> mir::ConstValue<'_> { trace!("const_caller_location: {}:{}:{}", file, line, col); - let mut ecx = mk_eval_cx(tcx.tcx, tcx.span, ty::ParamEnv::reveal_all(), CanAccessMutGlobal::No); + let mut ecx = mk_eval_cx_to_read_const_val( + tcx.tcx, + tcx.span, + ty::ParamEnv::reveal_all(), + CanAccessMutGlobal::No, + ); let loc_place = alloc_caller_location(&mut ecx, file, line, col); if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() { diff --git a/compiler/rustc_errors/src/diagnostic.rs b/compiler/rustc_errors/src/diagnostic.rs index 57610635ee69c..f096f01591046 100644 --- a/compiler/rustc_errors/src/diagnostic.rs +++ b/compiler/rustc_errors/src/diagnostic.rs @@ -1,19 +1,22 @@ use crate::snippet::Style; use crate::{ - CodeSuggestion, DiagnosticBuilder, DiagnosticMessage, EmissionGuarantee, ErrCode, Level, - MultiSpan, SubdiagnosticMessage, Substitution, SubstitutionPart, SuggestionStyle, + CodeSuggestion, DiagCtxt, DiagnosticMessage, ErrCode, ErrorGuaranteed, ExplicitBug, Level, + MultiSpan, StashKey, SubdiagnosticMessage, Substitution, SubstitutionPart, SuggestionStyle, }; use rustc_data_structures::fx::FxIndexMap; use rustc_error_messages::fluent_value_from_str_list_sep_by_and; use rustc_error_messages::FluentValue; use rustc_lint_defs::{Applicability, LintExpectationId}; +use rustc_span::source_map::Spanned; use rustc_span::symbol::Symbol; use rustc_span::{Span, DUMMY_SP}; use std::borrow::Cow; use std::fmt::{self, Debug}; use std::hash::{Hash, Hasher}; +use std::marker::PhantomData; use std::ops::{Deref, DerefMut}; -use std::panic::Location; +use std::panic; +use std::thread::panicking; /// Error type for `Diagnostic`'s `suggestions` field, indicating that /// `.disable_suggestions()` was called on the `Diagnostic`. @@ -40,6 +43,86 @@ pub enum DiagnosticArgValue { StrListSepByAnd(Vec>), } +/// Trait for types that `DiagnosticBuilder::emit` can return as a "guarantee" +/// (or "proof") token that the emission happened. +pub trait EmissionGuarantee: Sized { + /// This exists so that bugs and fatal errors can both result in `!` (an + /// abort) when emitted, but have different aborting behaviour. + type EmitResult = Self; + + /// Implementation of `DiagnosticBuilder::emit`, fully controlled by each + /// `impl` of `EmissionGuarantee`, to make it impossible to create a value + /// of `Self::EmitResult` without actually performing the emission. + #[track_caller] + fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult; +} + +impl EmissionGuarantee for ErrorGuaranteed { + fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult { + db.emit_producing_error_guaranteed() + } +} + +impl EmissionGuarantee for () { + fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult { + db.emit_producing_nothing(); + } +} + +/// Marker type which enables implementation of `create_bug` and `emit_bug` functions for +/// bug diagnostics. +#[derive(Copy, Clone)] +pub struct BugAbort; + +impl EmissionGuarantee for BugAbort { + type EmitResult = !; + + fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult { + db.emit_producing_nothing(); + panic::panic_any(ExplicitBug); + } +} + +/// Marker type which enables implementation of `create_fatal` and `emit_fatal` functions for +/// fatal diagnostics. +#[derive(Copy, Clone)] +pub struct FatalAbort; + +impl EmissionGuarantee for FatalAbort { + type EmitResult = !; + + fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult { + db.emit_producing_nothing(); + crate::FatalError.raise() + } +} + +impl EmissionGuarantee for rustc_span::fatal_error::FatalError { + fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult { + db.emit_producing_nothing(); + rustc_span::fatal_error::FatalError + } +} + +/// Trait implemented by error types. This is rarely implemented manually. Instead, use +/// `#[derive(Diagnostic)]` -- see [rustc_macros::Diagnostic]. +#[rustc_diagnostic_item = "IntoDiagnostic"] +pub trait IntoDiagnostic<'a, G: EmissionGuarantee = ErrorGuaranteed> { + /// Write out as a diagnostic out of `DiagCtxt`. + #[must_use] + fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a, G>; +} + +impl<'a, T, G> IntoDiagnostic<'a, G> for Spanned +where + T: IntoDiagnostic<'a, G>, + G: EmissionGuarantee, +{ + fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a, G> { + self.node.into_diagnostic(dcx, level).with_span(self.span) + } +} + /// Converts a value of a type into a `DiagnosticArg` (typically a field of an `IntoDiagnostic` /// struct). Implemented as a custom trait rather than `From` so that it is implemented on the type /// being converted rather than on `DiagnosticArgValue`, which enables types from other `rustc_*` @@ -98,36 +181,6 @@ pub trait DecorateLint<'a, G: EmissionGuarantee> { fn msg(&self) -> DiagnosticMessage; } -/// The main part of a diagnostic. Note that `DiagnosticBuilder`, which wraps -/// this type, is used for most operations, and should be used instead whenever -/// possible. This type should only be used when `DiagnosticBuilder`'s lifetime -/// causes difficulties, e.g. when storing diagnostics within `DiagCtxt`. -#[must_use] -#[derive(Clone, Debug, Encodable, Decodable)] -pub struct Diagnostic { - // NOTE(eddyb) this is private to disallow arbitrary after-the-fact changes, - // outside of what methods in this crate themselves allow. - pub(crate) level: Level, - - pub messages: Vec<(DiagnosticMessage, Style)>, - pub code: Option, - pub span: MultiSpan, - pub children: Vec, - pub suggestions: Result, SuggestionsDisabled>, - args: FxIndexMap, - - /// This is not used for highlighting or rendering any error message. Rather, it can be used - /// as a sort key to sort a buffer of diagnostics. By default, it is the primary span of - /// `span` if there is one. Otherwise, it is `DUMMY_SP`. - pub sort_span: Span, - - pub is_lint: Option, - - /// With `-Ztrack_diagnostics` enabled, - /// we print where in rustc this error was emitted. - pub(crate) emitted_at: DiagnosticLocation, -} - #[derive(Clone, Debug, Encodable, Decodable)] pub struct DiagnosticLocation { file: Cow<'static, str>, @@ -138,7 +191,7 @@ pub struct DiagnosticLocation { impl DiagnosticLocation { #[track_caller] fn caller() -> Self { - let loc = Location::caller(); + let loc = panic::Location::caller(); DiagnosticLocation { file: loc.file().into(), line: loc.line(), col: loc.column() } } } @@ -157,15 +210,6 @@ pub struct IsLint { has_future_breakage: bool, } -/// A "sub"-diagnostic attached to a parent diagnostic. -/// For example, a note attached to an error. -#[derive(Clone, Debug, PartialEq, Hash, Encodable, Decodable)] -pub struct SubDiagnostic { - pub level: Level, - pub messages: Vec<(DiagnosticMessage, Style)>, - pub span: MultiSpan, -} - #[derive(Debug, PartialEq, Eq)] pub struct DiagnosticStyledString(pub Vec); @@ -215,6 +259,36 @@ impl StringPart { } } +/// The main part of a diagnostic. Note that `DiagnosticBuilder`, which wraps +/// this type, is used for most operations, and should be used instead whenever +/// possible. This type should only be used when `DiagnosticBuilder`'s lifetime +/// causes difficulties, e.g. when storing diagnostics within `DiagCtxt`. +#[must_use] +#[derive(Clone, Debug, Encodable, Decodable)] +pub struct Diagnostic { + // NOTE(eddyb) this is private to disallow arbitrary after-the-fact changes, + // outside of what methods in this crate themselves allow. + pub(crate) level: Level, + + pub messages: Vec<(DiagnosticMessage, Style)>, + pub code: Option, + pub span: MultiSpan, + pub children: Vec, + pub suggestions: Result, SuggestionsDisabled>, + args: FxIndexMap, + + /// This is not used for highlighting or rendering any error message. Rather, it can be used + /// as a sort key to sort a buffer of diagnostics. By default, it is the primary span of + /// `span` if there is one. Otherwise, it is `DUMMY_SP`. + pub sort_span: Span, + + pub is_lint: Option, + + /// With `-Ztrack_diagnostics` enabled, + /// we print where in rustc this error was emitted. + pub(crate) emitted_at: DiagnosticLocation, +} + impl Diagnostic { #[track_caller] pub fn new>(level: Level, message: M) -> Self { @@ -336,6 +410,118 @@ impl Diagnostic { pub fn replace_args(&mut self, args: FxIndexMap) { self.args = args; } + + /// Fields used for Hash, and PartialEq trait. + fn keys( + &self, + ) -> ( + &Level, + &[(DiagnosticMessage, Style)], + &Option, + &MultiSpan, + &[SubDiagnostic], + &Result, SuggestionsDisabled>, + Vec<(&DiagnosticArgName, &DiagnosticArgValue)>, + &Option, + ) { + ( + &self.level, + &self.messages, + &self.code, + &self.span, + &self.children, + &self.suggestions, + self.args().collect(), + // omit self.sort_span + &self.is_lint, + // omit self.emitted_at + ) + } +} + +impl Hash for Diagnostic { + fn hash(&self, state: &mut H) + where + H: Hasher, + { + self.keys().hash(state); + } +} + +impl PartialEq for Diagnostic { + fn eq(&self, other: &Self) -> bool { + self.keys() == other.keys() + } +} + +/// A "sub"-diagnostic attached to a parent diagnostic. +/// For example, a note attached to an error. +#[derive(Clone, Debug, PartialEq, Hash, Encodable, Decodable)] +pub struct SubDiagnostic { + pub level: Level, + pub messages: Vec<(DiagnosticMessage, Style)>, + pub span: MultiSpan, +} + +/// Used for emitting structured error messages and other diagnostic information. +/// Wraps a `Diagnostic`, adding some useful things. +/// - The `dcx` field, allowing it to (a) emit itself, and (b) do a drop check +/// that it has been emitted or cancelled. +/// - The `EmissionGuarantee`, which determines the type returned from `emit`. +/// +/// Each constructed `DiagnosticBuilder` must be consumed by a function such as +/// `emit`, `cancel`, `delay_as_bug`, or `into_diagnostic`. A panic occurrs if a +/// `DiagnosticBuilder` is dropped without being consumed by one of these +/// functions. +/// +/// If there is some state in a downstream crate you would like to +/// access in the methods of `DiagnosticBuilder` here, consider +/// extending `DiagCtxtFlags`. +#[must_use] +pub struct DiagnosticBuilder<'a, G: EmissionGuarantee = ErrorGuaranteed> { + pub dcx: &'a DiagCtxt, + + /// Why the `Option`? It is always `Some` until the `DiagnosticBuilder` is + /// consumed via `emit`, `cancel`, etc. At that point it is consumed and + /// replaced with `None`. Then `drop` checks that it is `None`; if not, it + /// panics because a diagnostic was built but not used. + /// + /// Why the Box? `Diagnostic` is a large type, and `DiagnosticBuilder` is + /// often used as a return value, especially within the frequently-used + /// `PResult` type. In theory, return value optimization (RVO) should avoid + /// unnecessary copying. In practice, it does not (at the time of writing). + diag: Option>, + + _marker: PhantomData, +} + +// Cloning a `DiagnosticBuilder` is a recipe for a diagnostic being emitted +// twice, which would be bad. +impl !Clone for DiagnosticBuilder<'_, G> {} + +rustc_data_structures::static_assert_size!( + DiagnosticBuilder<'_, ()>, + 2 * std::mem::size_of::() +); + +impl Deref for DiagnosticBuilder<'_, G> { + type Target = Diagnostic; + + fn deref(&self) -> &Diagnostic { + self.diag.as_ref().unwrap() + } +} + +impl DerefMut for DiagnosticBuilder<'_, G> { + fn deref_mut(&mut self) -> &mut Diagnostic { + self.diag.as_mut().unwrap() + } +} + +impl Debug for DiagnosticBuilder<'_, G> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.diag.fmt(f) + } } /// `DiagnosticBuilder` impls many `&mut self -> &mut Self` methods. Each one @@ -382,6 +568,20 @@ macro_rules! with_fn { } impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> { + #[rustc_lint_diagnostics] + #[track_caller] + pub fn new>(dcx: &'a DiagCtxt, level: Level, message: M) -> Self { + Self::new_diagnostic(dcx, Diagnostic::new(level, message)) + } + + /// Creates a new `DiagnosticBuilder` with an already constructed + /// diagnostic. + #[track_caller] + pub(crate) fn new_diagnostic(dcx: &'a DiagCtxt, diag: Diagnostic) -> Self { + debug!("Created new diagnostic"); + Self { dcx, diag: Some(Box::new(diag)), _marker: PhantomData } + } + /// Delay emission of this diagnostic as a bug. /// /// This can be useful in contexts where an error indicates a bug but @@ -1040,48 +1240,112 @@ impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> { let sub = SubDiagnostic { level, messages, span }; self.children.push(sub); } -} -impl Diagnostic { - /// Fields used for Hash, and PartialEq trait - fn keys( - &self, - ) -> ( - &Level, - &[(DiagnosticMessage, Style)], - &Option, - &MultiSpan, - &[SubDiagnostic], - &Result, SuggestionsDisabled>, - Vec<(&DiagnosticArgName, &DiagnosticArgValue)>, - &Option, - ) { - ( - &self.level, - &self.messages, - &self.code, - &self.span, - &self.children, - &self.suggestions, - self.args().collect(), - // omit self.sort_span - &self.is_lint, - // omit self.emitted_at - ) + /// Takes the diagnostic. For use by methods that consume the + /// DiagnosticBuilder: `emit`, `cancel`, etc. Afterwards, `drop` is the + /// only code that will be run on `self`. + fn take_diag(&mut self) -> Diagnostic { + Box::into_inner(self.diag.take().unwrap()) } -} -impl Hash for Diagnostic { - fn hash(&self, state: &mut H) - where - H: Hasher, - { - self.keys().hash(state); + /// Most `emit_producing_guarantee` functions use this as a starting point. + fn emit_producing_nothing(mut self) { + let diag = self.take_diag(); + self.dcx.emit_diagnostic(diag); + } + + /// `ErrorGuaranteed::emit_producing_guarantee` uses this. + fn emit_producing_error_guaranteed(mut self) -> ErrorGuaranteed { + let diag = self.take_diag(); + + // The only error levels that produce `ErrorGuaranteed` are + // `Error` and `DelayedBug`. But `DelayedBug` should never occur here + // because delayed bugs have their level changed to `Bug` when they are + // actually printed, so they produce an ICE. + // + // (Also, even though `level` isn't `pub`, the whole `Diagnostic` could + // be overwritten with a new one thanks to `DerefMut`. So this assert + // protects against that, too.) + assert!( + matches!(diag.level, Level::Error | Level::DelayedBug), + "invalid diagnostic level ({:?})", + diag.level, + ); + + let guar = self.dcx.emit_diagnostic(diag); + guar.unwrap() + } + + /// Emit and consume the diagnostic. + #[track_caller] + pub fn emit(self) -> G::EmitResult { + G::emit_producing_guarantee(self) + } + + /// Emit the diagnostic unless `delay` is true, + /// in which case the emission will be delayed as a bug. + /// + /// See `emit` and `delay_as_bug` for details. + #[track_caller] + pub fn emit_unless(mut self, delay: bool) -> G::EmitResult { + if delay { + self.downgrade_to_delayed_bug(); + } + self.emit() + } + + /// Cancel and consume the diagnostic. (A diagnostic must either be emitted or + /// cancelled or it will panic when dropped). + pub fn cancel(mut self) { + self.diag = None; + drop(self); + } + + /// Stashes diagnostic for possible later improvement in a different, + /// later stage of the compiler. The diagnostic can be accessed with + /// the provided `span` and `key` through [`DiagCtxt::steal_diagnostic()`]. + pub fn stash(mut self, span: Span, key: StashKey) { + self.dcx.stash_diagnostic(span, key, self.take_diag()); + } + + /// Delay emission of this diagnostic as a bug. + /// + /// This can be useful in contexts where an error indicates a bug but + /// typically this only happens when other compilation errors have already + /// happened. In those cases this can be used to defer emission of this + /// diagnostic as a bug in the compiler only if no other errors have been + /// emitted. + /// + /// In the meantime, though, callsites are required to deal with the "bug" + /// locally in whichever way makes the most sense. + #[track_caller] + pub fn delay_as_bug(mut self) -> G::EmitResult { + self.downgrade_to_delayed_bug(); + self.emit() } } -impl PartialEq for Diagnostic { - fn eq(&self, other: &Self) -> bool { - self.keys() == other.keys() +/// Destructor bomb: every `DiagnosticBuilder` must be consumed (emitted, +/// cancelled, etc.) or we emit a bug. +impl Drop for DiagnosticBuilder<'_, G> { + fn drop(&mut self) { + match self.diag.take() { + Some(diag) if !panicking() => { + self.dcx.emit_diagnostic(Diagnostic::new( + Level::Bug, + DiagnosticMessage::from("the following error was constructed but not emitted"), + )); + self.dcx.emit_diagnostic(*diag); + panic!("error was constructed but not emitted"); + } + _ => {} + } } } + +#[macro_export] +macro_rules! struct_span_code_err { + ($dcx:expr, $span:expr, $code:expr, $($message:tt)*) => ({ + $dcx.struct_span_err($span, format!($($message)*)).with_code($code) + }) +} diff --git a/compiler/rustc_errors/src/diagnostic_builder.rs b/compiler/rustc_errors/src/diagnostic_builder.rs deleted file mode 100644 index 3a6a494af95b9..0000000000000 --- a/compiler/rustc_errors/src/diagnostic_builder.rs +++ /dev/null @@ -1,282 +0,0 @@ -use crate::{ - DiagCtxt, Diagnostic, DiagnosticMessage, ErrorGuaranteed, ExplicitBug, Level, StashKey, -}; -use rustc_span::source_map::Spanned; -use rustc_span::Span; -use std::fmt::{self, Debug}; -use std::marker::PhantomData; -use std::ops::{Deref, DerefMut}; -use std::panic; -use std::thread::panicking; - -/// Trait implemented by error types. This is rarely implemented manually. Instead, use -/// `#[derive(Diagnostic)]` -- see [rustc_macros::Diagnostic]. -#[rustc_diagnostic_item = "IntoDiagnostic"] -pub trait IntoDiagnostic<'a, G: EmissionGuarantee = ErrorGuaranteed> { - /// Write out as a diagnostic out of `DiagCtxt`. - #[must_use] - fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a, G>; -} - -impl<'a, T, G> IntoDiagnostic<'a, G> for Spanned -where - T: IntoDiagnostic<'a, G>, - G: EmissionGuarantee, -{ - fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a, G> { - self.node.into_diagnostic(dcx, level).with_span(self.span) - } -} - -/// Used for emitting structured error messages and other diagnostic information. -/// Wraps a `Diagnostic`, adding some useful things. -/// - The `dcx` field, allowing it to (a) emit itself, and (b) do a drop check -/// that it has been emitted or cancelled. -/// - The `EmissionGuarantee`, which determines the type returned from `emit`. -/// -/// Each constructed `DiagnosticBuilder` must be consumed by a function such as -/// `emit`, `cancel`, `delay_as_bug`, or `into_diagnostic`. A panic occurrs if a -/// `DiagnosticBuilder` is dropped without being consumed by one of these -/// functions. -/// -/// If there is some state in a downstream crate you would like to -/// access in the methods of `DiagnosticBuilder` here, consider -/// extending `DiagCtxtFlags`. -#[must_use] -pub struct DiagnosticBuilder<'a, G: EmissionGuarantee = ErrorGuaranteed> { - pub dcx: &'a DiagCtxt, - - /// Why the `Option`? It is always `Some` until the `DiagnosticBuilder` is - /// consumed via `emit`, `cancel`, etc. At that point it is consumed and - /// replaced with `None`. Then `drop` checks that it is `None`; if not, it - /// panics because a diagnostic was built but not used. - /// - /// Why the Box? `Diagnostic` is a large type, and `DiagnosticBuilder` is - /// often used as a return value, especially within the frequently-used - /// `PResult` type. In theory, return value optimization (RVO) should avoid - /// unnecessary copying. In practice, it does not (at the time of writing). - // FIXME(nnethercote) Make private once this moves to diagnostic.rs. - pub(crate) diag: Option>, - - // FIXME(nnethercote) Make private once this moves to diagnostic.rs. - pub(crate) _marker: PhantomData, -} - -// Cloning a `DiagnosticBuilder` is a recipe for a diagnostic being emitted -// twice, which would be bad. -impl !Clone for DiagnosticBuilder<'_, G> {} - -rustc_data_structures::static_assert_size!( - DiagnosticBuilder<'_, ()>, - 2 * std::mem::size_of::() -); - -/// Trait for types that `DiagnosticBuilder::emit` can return as a "guarantee" -/// (or "proof") token that the emission happened. -pub trait EmissionGuarantee: Sized { - /// This exists so that bugs and fatal errors can both result in `!` (an - /// abort) when emitted, but have different aborting behaviour. - type EmitResult = Self; - - /// Implementation of `DiagnosticBuilder::emit`, fully controlled by each - /// `impl` of `EmissionGuarantee`, to make it impossible to create a value - /// of `Self::EmitResult` without actually performing the emission. - #[track_caller] - fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult; -} - -impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> { - /// Takes the diagnostic. For use by methods that consume the - /// DiagnosticBuilder: `emit`, `cancel`, etc. Afterwards, `drop` is the - /// only code that will be run on `self`. - // FIXME(nnethercote) Make private once this moves to diagnostic.rs. - pub(crate) fn take_diag(&mut self) -> Diagnostic { - Box::into_inner(self.diag.take().unwrap()) - } - - /// Most `emit_producing_guarantee` functions use this as a starting point. - // FIXME(nnethercote) Make private once this moves to diagnostic.rs. - pub(crate) fn emit_producing_nothing(mut self) { - let diag = self.take_diag(); - self.dcx.emit_diagnostic(diag); - } - - /// `ErrorGuaranteed::emit_producing_guarantee` uses this. - // FIXME(nnethercote) Make private once this moves to diagnostic.rs. - pub(crate) fn emit_producing_error_guaranteed(mut self) -> ErrorGuaranteed { - let diag = self.take_diag(); - - // The only error levels that produce `ErrorGuaranteed` are - // `Error` and `DelayedBug`. But `DelayedBug` should never occur here - // because delayed bugs have their level changed to `Bug` when they are - // actually printed, so they produce an ICE. - // - // (Also, even though `level` isn't `pub`, the whole `Diagnostic` could - // be overwritten with a new one thanks to `DerefMut`. So this assert - // protects against that, too.) - assert!( - matches!(diag.level, Level::Error | Level::DelayedBug), - "invalid diagnostic level ({:?})", - diag.level, - ); - - let guar = self.dcx.emit_diagnostic(diag); - guar.unwrap() - } -} - -impl EmissionGuarantee for ErrorGuaranteed { - fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult { - db.emit_producing_error_guaranteed() - } -} - -impl EmissionGuarantee for () { - fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult { - db.emit_producing_nothing(); - } -} - -/// Marker type which enables implementation of `create_bug` and `emit_bug` functions for -/// bug diagnostics. -#[derive(Copy, Clone)] -pub struct BugAbort; - -impl EmissionGuarantee for BugAbort { - type EmitResult = !; - - fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult { - db.emit_producing_nothing(); - panic::panic_any(ExplicitBug); - } -} - -/// Marker type which enables implementation of `create_fatal` and `emit_fatal` functions for -/// fatal diagnostics. -#[derive(Copy, Clone)] -pub struct FatalAbort; - -impl EmissionGuarantee for FatalAbort { - type EmitResult = !; - - fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult { - db.emit_producing_nothing(); - crate::FatalError.raise() - } -} - -impl EmissionGuarantee for rustc_span::fatal_error::FatalError { - fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult { - db.emit_producing_nothing(); - rustc_span::fatal_error::FatalError - } -} - -impl Deref for DiagnosticBuilder<'_, G> { - type Target = Diagnostic; - - fn deref(&self) -> &Diagnostic { - self.diag.as_ref().unwrap() - } -} - -impl DerefMut for DiagnosticBuilder<'_, G> { - fn deref_mut(&mut self) -> &mut Diagnostic { - self.diag.as_mut().unwrap() - } -} - -impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> { - #[rustc_lint_diagnostics] - #[track_caller] - pub fn new>(dcx: &'a DiagCtxt, level: Level, message: M) -> Self { - Self::new_diagnostic(dcx, Diagnostic::new(level, message)) - } - - /// Creates a new `DiagnosticBuilder` with an already constructed - /// diagnostic. - #[track_caller] - pub(crate) fn new_diagnostic(dcx: &'a DiagCtxt, diag: Diagnostic) -> Self { - debug!("Created new diagnostic"); - Self { dcx, diag: Some(Box::new(diag)), _marker: PhantomData } - } - - /// Emit and consume the diagnostic. - #[track_caller] - pub fn emit(self) -> G::EmitResult { - G::emit_producing_guarantee(self) - } - - /// Emit the diagnostic unless `delay` is true, - /// in which case the emission will be delayed as a bug. - /// - /// See `emit` and `delay_as_bug` for details. - #[track_caller] - pub fn emit_unless(mut self, delay: bool) -> G::EmitResult { - if delay { - self.downgrade_to_delayed_bug(); - } - self.emit() - } - - /// Cancel and consume the diagnostic. (A diagnostic must either be emitted or - /// cancelled or it will panic when dropped). - pub fn cancel(mut self) { - self.diag = None; - drop(self); - } - - /// Stashes diagnostic for possible later improvement in a different, - /// later stage of the compiler. The diagnostic can be accessed with - /// the provided `span` and `key` through [`DiagCtxt::steal_diagnostic()`]. - pub fn stash(mut self, span: Span, key: StashKey) { - self.dcx.stash_diagnostic(span, key, self.take_diag()); - } - - /// Delay emission of this diagnostic as a bug. - /// - /// This can be useful in contexts where an error indicates a bug but - /// typically this only happens when other compilation errors have already - /// happened. In those cases this can be used to defer emission of this - /// diagnostic as a bug in the compiler only if no other errors have been - /// emitted. - /// - /// In the meantime, though, callsites are required to deal with the "bug" - /// locally in whichever way makes the most sense. - #[track_caller] - pub fn delay_as_bug(mut self) -> G::EmitResult { - self.downgrade_to_delayed_bug(); - self.emit() - } -} - -impl Debug for DiagnosticBuilder<'_, G> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.diag.fmt(f) - } -} - -/// Destructor bomb: every `DiagnosticBuilder` must be consumed (emitted, -/// cancelled, etc.) or we emit a bug. -impl Drop for DiagnosticBuilder<'_, G> { - fn drop(&mut self) { - match self.diag.take() { - Some(diag) if !panicking() => { - self.dcx.emit_diagnostic(Diagnostic::new( - Level::Bug, - DiagnosticMessage::from("the following error was constructed but not emitted"), - )); - self.dcx.emit_diagnostic(*diag); - panic!("error was constructed but not emitted"); - } - _ => {} - } - } -} - -#[macro_export] -macro_rules! struct_span_code_err { - ($dcx:expr, $span:expr, $code:expr, $($message:tt)*) => ({ - $dcx.struct_span_err($span, format!($($message)*)).with_code($code) - }) -} diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index 73cda64f1cc6a..052d9b3a78376 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -37,12 +37,9 @@ extern crate self as rustc_errors; pub use codes::*; pub use diagnostic::{ - AddToDiagnostic, DecorateLint, Diagnostic, DiagnosticArg, DiagnosticArgName, - DiagnosticArgValue, DiagnosticStyledString, IntoDiagnosticArg, StringPart, SubDiagnostic, - SubdiagnosticMessageOp, -}; -pub use diagnostic_builder::{ - BugAbort, DiagnosticBuilder, EmissionGuarantee, FatalAbort, IntoDiagnostic, + AddToDiagnostic, BugAbort, DecorateLint, Diagnostic, DiagnosticArg, DiagnosticArgName, + DiagnosticArgValue, DiagnosticBuilder, DiagnosticStyledString, EmissionGuarantee, FatalAbort, + IntoDiagnostic, IntoDiagnosticArg, StringPart, SubDiagnostic, SubdiagnosticMessageOp, }; pub use diagnostic_impls::{ DiagnosticArgFromDisplay, DiagnosticSymbolList, ExpectedLifetimeParameter, @@ -87,7 +84,6 @@ use Level::*; pub mod annotate_snippet_emitter_writer; pub mod codes; mod diagnostic; -mod diagnostic_builder; mod diagnostic_impls; pub mod emitter; pub mod error; diff --git a/compiler/rustc_infer/src/infer/relate/combine.rs b/compiler/rustc_infer/src/infer/relate/combine.rs index 454de4f978519..f7690831c2acc 100644 --- a/compiler/rustc_infer/src/infer/relate/combine.rs +++ b/compiler/rustc_infer/src/infer/relate/combine.rs @@ -194,7 +194,7 @@ impl<'tcx> InferCtxt<'tcx> { ty::ConstKind::Infer(InferConst::Var(b_vid)), ) => { self.inner.borrow_mut().const_unification_table().union(a_vid, b_vid); - return Ok(a); + Ok(a) } ( @@ -202,7 +202,7 @@ impl<'tcx> InferCtxt<'tcx> { ty::ConstKind::Infer(InferConst::EffectVar(b_vid)), ) => { self.inner.borrow_mut().effect_unification_table().union(a_vid, b_vid); - return Ok(a); + Ok(a) } // All other cases of inference with other variables are errors. @@ -220,19 +220,21 @@ impl<'tcx> InferCtxt<'tcx> { } (ty::ConstKind::Infer(InferConst::Var(vid)), _) => { - return self.instantiate_const_var(vid, b); + self.instantiate_const_var(relation, relation.a_is_expected(), vid, b)?; + Ok(b) } (_, ty::ConstKind::Infer(InferConst::Var(vid))) => { - return self.instantiate_const_var(vid, a); + self.instantiate_const_var(relation, !relation.a_is_expected(), vid, a)?; + Ok(a) } (ty::ConstKind::Infer(InferConst::EffectVar(vid)), _) => { - return Ok(self.unify_effect_variable(vid, b)); + Ok(self.unify_effect_variable(vid, b)) } (_, ty::ConstKind::Infer(InferConst::EffectVar(vid))) => { - return Ok(self.unify_effect_variable(vid, a)); + Ok(self.unify_effect_variable(vid, a)) } (ty::ConstKind::Unevaluated(..), _) | (_, ty::ConstKind::Unevaluated(..)) @@ -240,7 +242,7 @@ impl<'tcx> InferCtxt<'tcx> { { let (a, b) = if relation.a_is_expected() { (a, b) } else { (b, a) }; - relation.register_predicates([ty::Binder::dummy(if self.next_trait_solver() { + relation.register_predicates([if self.next_trait_solver() { ty::PredicateKind::AliasRelate( a.into(), b.into(), @@ -248,14 +250,12 @@ impl<'tcx> InferCtxt<'tcx> { ) } else { ty::PredicateKind::ConstEquate(a, b) - })]); + }]); - return Ok(b); + Ok(b) } - _ => {} + _ => ty::relate::structurally_relate_consts(relation, a, b), } - - ty::relate::structurally_relate_consts(relation, a, b) } fn unify_integral_variable( diff --git a/compiler/rustc_infer/src/infer/relate/generalize.rs b/compiler/rustc_infer/src/infer/relate/generalize.rs index c0cb02916fe8f..371340c5bbfdf 100644 --- a/compiler/rustc_infer/src/infer/relate/generalize.rs +++ b/compiler/rustc_infer/src/infer/relate/generalize.rs @@ -22,7 +22,7 @@ impl<'tcx> InferCtxt<'tcx> { /// subtyping could occur. This also does the occurs checks, detecting whether /// instantiating `target_vid` would result in a cyclic type. We eagerly error /// in this case. - #[instrument(skip(self, relation, target_is_expected), level = "debug")] + #[instrument(level = "debug", skip(self, relation, target_is_expected))] pub(super) fn instantiate_ty_var>( &self, relation: &mut R, @@ -158,26 +158,22 @@ impl<'tcx> InferCtxt<'tcx> { /// As `3 + 4` contains `N` in its args, this must not succeed. /// /// See `tests/ui/const-generics/occurs-check/` for more examples where this is relevant. - #[instrument(level = "debug", skip(self))] - pub(super) fn instantiate_const_var( + #[instrument(level = "debug", skip(self, relation))] + pub(super) fn instantiate_const_var>( &self, + relation: &mut R, + target_is_expected: bool, target_vid: ty::ConstVid, source_ct: ty::Const<'tcx>, - ) -> RelateResult<'tcx, ty::Const<'tcx>> { - let span = match self.inner.borrow_mut().const_unification_table().probe_value(target_vid) { - ConstVariableValue::Known { value } => { - bug!("instantiating a known const var: {target_vid:?} {value} {source_ct}") - } - ConstVariableValue::Unknown { origin, universe: _ } => origin.span, - }; + ) -> RelateResult<'tcx, ()> { // FIXME(generic_const_exprs): Occurs check failures for unevaluated // constants and generic expressions are not yet handled correctly. let Generalization { value_may_be_infer: generalized_ct, has_unconstrained_ty_var } = - self.generalize(span, target_vid, ty::Variance::Invariant, source_ct)?; + self.generalize(relation.span(), target_vid, ty::Variance::Invariant, source_ct)?; debug_assert!(!generalized_ct.is_ct_infer()); if has_unconstrained_ty_var { - span_bug!(span, "unconstrained ty var when generalizing `{source_ct:?}`"); + bug!("unconstrained ty var when generalizing `{source_ct:?}`"); } self.inner @@ -185,9 +181,25 @@ impl<'tcx> InferCtxt<'tcx> { .const_unification_table() .union_value(target_vid, ConstVariableValue::Known { value: generalized_ct }); - // FIXME(generic_const_exprs): We have to make sure we actually equate - // `generalized_ct` and `source_ct` here. - Ok(generalized_ct) + // HACK: make sure that we `a_is_expected` continues to be + // correct when relating the generalized type with the source. + if target_is_expected == relation.a_is_expected() { + relation.relate_with_variance( + ty::Variance::Invariant, + ty::VarianceDiagInfo::default(), + generalized_ct, + source_ct, + )?; + } else { + relation.relate_with_variance( + ty::Variance::Invariant, + ty::VarianceDiagInfo::default(), + source_ct, + generalized_ct, + )?; + } + + Ok(()) } /// Attempts to generalize `source_term` for the type variable `target_vid`. @@ -287,6 +299,49 @@ impl<'tcx> Generalizer<'_, 'tcx> { ty::TermKind::Const(ct) => TypeError::CyclicConst(ct), } } + + /// An occurs check failure inside of an alias does not mean + /// that the types definitely don't unify. We may be able + /// to normalize the alias after all. + /// + /// We handle this by lazily equating the alias and generalizing + /// it to an inference variable. + /// + /// This is incomplete and will hopefully soon get fixed by #119106. + fn generalize_alias_ty( + &mut self, + alias: ty::AliasTy<'tcx>, + ) -> Result, TypeError<'tcx>> { + let is_nested_alias = mem::replace(&mut self.in_alias, true); + let result = match self.relate(alias, alias) { + Ok(alias) => Ok(alias.to_ty(self.tcx())), + Err(e) => { + if is_nested_alias { + return Err(e); + } else { + let mut visitor = MaxUniverse::new(); + alias.visit_with(&mut visitor); + let infer_replacement_is_complete = + self.for_universe.can_name(visitor.max_universe()) + && !alias.has_escaping_bound_vars(); + if !infer_replacement_is_complete { + warn!("may incompletely handle alias type: {alias:?}"); + } + + debug!("generalization failure in alias"); + Ok(self.infcx.next_ty_var_in_universe( + TypeVariableOrigin { + kind: TypeVariableOriginKind::MiscVariable, + span: self.span, + }, + self.for_universe, + )) + } + } + }; + self.in_alias = is_nested_alias; + result + } } impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> { @@ -433,43 +488,7 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> { } } - ty::Alias(kind, data) => { - // An occurs check failure inside of an alias does not mean - // that the types definitely don't unify. We may be able - // to normalize the alias after all. - // - // We handle this by lazily equating the alias and generalizing - // it to an inference variable. - let is_nested_alias = mem::replace(&mut self.in_alias, true); - let result = match self.relate(data, data) { - Ok(data) => Ok(Ty::new_alias(self.tcx(), kind, data)), - Err(e) => { - if is_nested_alias { - return Err(e); - } else { - let mut visitor = MaxUniverse::new(); - t.visit_with(&mut visitor); - let infer_replacement_is_complete = - self.for_universe.can_name(visitor.max_universe()) - && !t.has_escaping_bound_vars(); - if !infer_replacement_is_complete { - warn!("may incompletely handle alias type: {t:?}"); - } - - debug!("generalization failure in alias"); - Ok(self.infcx.next_ty_var_in_universe( - TypeVariableOrigin { - kind: TypeVariableOriginKind::MiscVariable, - span: self.span, - }, - self.for_universe, - )) - } - } - }; - self.in_alias = is_nested_alias; - result - } + ty::Alias(_, data) => self.generalize_alias_ty(data), _ => relate::structurally_relate_tys(self, t, t), }?; diff --git a/compiler/rustc_mir_build/src/build/matches/mod.rs b/compiler/rustc_mir_build/src/build/matches/mod.rs index ccf299649cf8f..88a5eae281b31 100644 --- a/compiler/rustc_mir_build/src/build/matches/mod.rs +++ b/compiler/rustc_mir_build/src/build/matches/mod.rs @@ -1052,7 +1052,7 @@ struct Ascription<'tcx> { variance: ty::Variance, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub(crate) struct MatchPair<'pat, 'tcx> { // This place... place: PlaceBuilder<'tcx>, @@ -1408,51 +1408,66 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { span: Span, scrutinee_span: Span, candidates: &mut [&mut Candidate<'_, 'tcx>], - block: BasicBlock, + start_block: BasicBlock, otherwise_block: BasicBlock, fake_borrows: &mut Option>>, ) { let (first_candidate, remaining_candidates) = candidates.split_first_mut().unwrap(); - - // All of the or-patterns have been sorted to the end, so if the first - // pattern is an or-pattern we only have or-patterns. - match first_candidate.match_pairs[0].pattern.kind { - PatKind::Or { .. } => (), - _ => { - self.test_candidates( - span, - scrutinee_span, - candidates, - block, - otherwise_block, - fake_borrows, - ); - return; - } + assert!(first_candidate.subcandidates.is_empty()); + if !matches!(first_candidate.match_pairs[0].pattern.kind, PatKind::Or { .. }) { + self.test_candidates( + span, + scrutinee_span, + candidates, + start_block, + otherwise_block, + fake_borrows, + ); + return; } let match_pairs = mem::take(&mut first_candidate.match_pairs); - first_candidate.pre_binding_block = Some(block); + let (first_match_pair, remaining_match_pairs) = match_pairs.split_first().unwrap(); + let PatKind::Or { ref pats } = &first_match_pair.pattern.kind else { unreachable!() }; let remainder_start = self.cfg.start_new_block(); - for match_pair in match_pairs { - let PatKind::Or { ref pats } = &match_pair.pattern.kind else { - bug!("Or-patterns should have been sorted to the end"); - }; - let or_span = match_pair.pattern.span; + let or_span = first_match_pair.pattern.span; + // Test the alternatives of this or-pattern. + self.test_or_pattern( + first_candidate, + start_block, + remainder_start, + pats, + or_span, + &first_match_pair.place, + fake_borrows, + ); + if !remaining_match_pairs.is_empty() { + // If more match pairs remain, test them after each subcandidate. + // We could add them to the or-candidates before the call to `test_or_pattern` but this + // would make it impossible to detect simplifiable or-patterns. That would guarantee + // exponentially large CFGs for cases like `(1 | 2, 3 | 4, ...)`. first_candidate.visit_leaves(|leaf_candidate| { - self.test_or_pattern( - leaf_candidate, - remainder_start, - pats, - or_span, - &match_pair.place, + assert!(leaf_candidate.match_pairs.is_empty()); + leaf_candidate.match_pairs.extend(remaining_match_pairs.iter().cloned()); + let or_start = leaf_candidate.pre_binding_block.unwrap(); + // In a case like `(a | b, c | d)`, if `a` succeeds and `c | d` fails, we know `(b, + // c | d)` will fail too. If there is no guard, we skip testing of `b` by branching + // directly to `remainder_start`. If there is a guard, we have to try `(b, c | d)`. + let or_otherwise = leaf_candidate.otherwise_block.unwrap_or(remainder_start); + self.test_candidates_with_or( + span, + scrutinee_span, + &mut [leaf_candidate], + or_start, + or_otherwise, fake_borrows, ); }); } + // Test the remaining candidates. self.match_candidates( span, scrutinee_span, @@ -1460,17 +1475,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { otherwise_block, remaining_candidates, fake_borrows, - ) + ); } #[instrument( - skip(self, otherwise, or_span, place, fake_borrows, candidate, pats), + skip(self, start_block, otherwise_block, or_span, place, fake_borrows, candidate, pats), level = "debug" )] fn test_or_pattern<'pat>( &mut self, candidate: &mut Candidate<'pat, 'tcx>, - otherwise: BasicBlock, + start_block: BasicBlock, + otherwise_block: BasicBlock, pats: &'pat [Box>], or_span: Span, place: &PlaceBuilder<'tcx>, @@ -1482,16 +1498,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .map(|pat| Candidate::new(place.clone(), pat, candidate.has_guard, self)) .collect(); let mut or_candidate_refs: Vec<_> = or_candidates.iter_mut().collect(); - let otherwise = if let Some(otherwise_block) = candidate.otherwise_block { - otherwise_block - } else { - otherwise - }; self.match_candidates( or_span, or_span, - candidate.pre_binding_block.unwrap(), - otherwise, + start_block, + otherwise_block, &mut or_candidate_refs, fake_borrows, ); diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl index 7c2ecf34c1754..55baf6f9f2eb0 100644 --- a/compiler/rustc_parse/messages.ftl +++ b/compiler/rustc_parse/messages.ftl @@ -27,6 +27,8 @@ parse_async_bound_modifier_in_2015 = `async` trait bounds are only allowed in Ru parse_async_fn_in_2015 = `async fn` is not permitted in Rust 2015 .label = to use `async fn`, switch to Rust 2018 or later +parse_async_impl = `async` trait implementations are unsupported + parse_async_move_block_in_2015 = `async move` blocks are only allowed in Rust 2018 or later parse_async_move_order_incorrect = the order of `move` and `async` is incorrect diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index fde67ac089aa3..2d4447a42c2c8 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -2975,3 +2975,10 @@ pub(crate) struct ArrayIndexInOffsetOf(#[primary_span] pub Span); #[derive(Diagnostic)] #[diag(parse_invalid_offset_of)] pub(crate) struct InvalidOffsetOf(#[primary_span] pub Span); + +#[derive(Diagnostic)] +#[diag(parse_async_impl)] +pub(crate) struct AsyncImpl { + #[primary_span] + pub span: Span, +} diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index e7b9076bd3c8f..77381ef46262f 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -562,6 +562,15 @@ impl<'a> Parser<'a> { self.sess.gated_spans.gate(sym::const_trait_impl, span); } + // Parse stray `impl async Trait` + if (self.token.uninterpolated_span().at_least_rust_2018() + && self.token.is_keyword(kw::Async)) + || self.is_kw_followed_by_ident(kw::Async) + { + self.bump(); + self.dcx().emit_err(errors::AsyncImpl { span: self.prev_token.span }); + } + let polarity = self.parse_polarity(); // Parse both types and traits as a type, then reinterpret if necessary. @@ -592,22 +601,10 @@ impl<'a> Parser<'a> { // We need to report this error after `cfg` expansion for compatibility reasons self.bump(); // `..`, do not add it to expected tokens - // FIXME(nnethercote): AST validation later detects this - // `TyKind::Err` and emits an errors. So why the unchecked - // ErrorGuaranteed? - // - A `span_delayed_bug` doesn't work here, because rustfmt can - // hit this path but then not hit the follow-up path in the AST - // validator that issues the error, which results in ICEs. - // - `TyKind::Dummy` doesn't work, because it ends up reaching HIR - // lowering, which results in ICEs. Changing `TyKind::Dummy` to - // `TyKind::Err` during AST validation might fix that, but that's - // not possible because AST validation doesn't allow mutability. - // - // #121072 will hopefully remove all this special handling of the - // obsolete `impl Trait for ..` and then this can go away. - #[allow(deprecated)] - let guar = rustc_errors::ErrorGuaranteed::unchecked_error_guaranteed(); - Some(self.mk_ty(self.prev_token.span, TyKind::Err(guar))) + // AST validation later detects this `TyKind::Dummy` and emits an + // error. (#121072 will hopefully remove all this special handling + // of the obsolete `impl Trait for ..` and then this can go away.) + Some(self.mk_ty(self.prev_token.span, TyKind::Dummy)) } else if has_for || self.token.can_begin_type() { Some(self.parse_ty()?) } else { diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index f79f2a813b223..23a92e6dd3dd3 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -778,9 +778,10 @@ impl<'a> Parser<'a> { || self.check(&token::Not) || self.check(&token::Question) || self.check(&token::Tilde) - || self.check_keyword(kw::Const) || self.check_keyword(kw::For) || self.check(&token::OpenDelim(Delimiter::Parenthesis)) + || self.check_keyword(kw::Const) + || self.check_keyword(kw::Async) } /// Parses a bound according to the grammar: @@ -882,11 +883,13 @@ impl<'a> Parser<'a> { BoundConstness::Never }; - let asyncness = if self.token.span.at_least_rust_2018() && self.eat_keyword(kw::Async) { + let asyncness = if self.token.uninterpolated_span().at_least_rust_2018() + && self.eat_keyword(kw::Async) + { self.sess.gated_spans.gate(sym::async_closure, self.prev_token.span); BoundAsyncness::Async(self.prev_token.span) } else if self.may_recover() - && self.token.span.is_rust_2015() + && self.token.uninterpolated_span().is_rust_2015() && self.is_kw_followed_by_ident(kw::Async) { self.bump(); // eat `async` diff --git a/compiler/rustc_trait_selection/src/solve/normalize.rs b/compiler/rustc_trait_selection/src/solve/normalize.rs index b07702e842105..91312c9fdd68b 100644 --- a/compiler/rustc_trait_selection/src/solve/normalize.rs +++ b/compiler/rustc_trait_selection/src/solve/normalize.rs @@ -85,25 +85,16 @@ impl<'tcx> NormalizationFolder<'_, 'tcx> { ), ); - // Do not emit an error if normalization is known to fail but instead - // keep the projection unnormalized. This is the case for projections - // with a `T: Trait` where-clause and opaque types outside of the defining - // scope. - let result = if infcx.predicate_may_hold(&obligation) { - self.fulfill_cx.register_predicate_obligation(infcx, obligation); - let errors = self.fulfill_cx.select_all_or_error(infcx); - if !errors.is_empty() { - return Err(errors); - } - let ty = infcx.resolve_vars_if_possible(new_infer_ty); - - // Alias is guaranteed to be fully structurally resolved, - // so we can super fold here. - ty.try_super_fold_with(self)? - } else { - alias_ty.try_super_fold_with(self)? - }; + self.fulfill_cx.register_predicate_obligation(infcx, obligation); + let errors = self.fulfill_cx.select_all_or_error(infcx); + if !errors.is_empty() { + return Err(errors); + } + // Alias is guaranteed to be fully structurally resolved, + // so we can super fold here. + let ty = infcx.resolve_vars_if_possible(new_infer_ty); + let result = ty.try_super_fold_with(self)?; self.depth -= 1; Ok(result) } @@ -178,6 +169,7 @@ impl<'tcx> FallibleTypeFolder> for NormalizationFolder<'_, 'tcx> { Ok(t) } + #[instrument(level = "debug", skip(self), ret)] fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result, Self::Error> { let infcx = self.at.infcx; debug_assert_eq!(ty, infcx.shallow_resolve(ty)); @@ -204,6 +196,7 @@ impl<'tcx> FallibleTypeFolder> for NormalizationFolder<'_, 'tcx> { } } + #[instrument(level = "debug", skip(self), ret)] fn try_fold_const(&mut self, ct: ty::Const<'tcx>) -> Result, Self::Error> { let infcx = self.at.infcx; debug_assert_eq!(ct, infcx.shallow_resolve(ct)); diff --git a/compiler/rustc_trait_selection/src/solve/normalizes_to/anon_const.rs b/compiler/rustc_trait_selection/src/solve/normalizes_to/anon_const.rs new file mode 100644 index 0000000000000..911462f4b9afe --- /dev/null +++ b/compiler/rustc_trait_selection/src/solve/normalizes_to/anon_const.rs @@ -0,0 +1,25 @@ +use crate::solve::EvalCtxt; +use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; +use rustc_middle::ty; + +impl<'tcx> EvalCtxt<'_, 'tcx> { + #[instrument(level = "debug", skip(self), ret)] + pub(super) fn normalize_anon_const( + &mut self, + goal: Goal<'tcx, ty::NormalizesTo<'tcx>>, + ) -> QueryResult<'tcx> { + if let Some(normalized_const) = self.try_const_eval_resolve( + goal.param_env, + ty::UnevaluatedConst::new(goal.predicate.alias.def_id, goal.predicate.alias.args), + self.tcx() + .type_of(goal.predicate.alias.def_id) + .no_bound_vars() + .expect("const ty should not rely on other generics"), + ) { + self.eq(goal.param_env, normalized_const, goal.predicate.term.ct().unwrap())?; + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } else { + self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) + } + } +} diff --git a/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs b/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs index d177109c42046..5f62583115675 100644 --- a/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs @@ -18,8 +18,9 @@ use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_middle::ty::{ToPredicate, TypeVisitableExt}; use rustc_span::{sym, ErrorGuaranteed, DUMMY_SP}; +mod anon_const; mod inherent; -mod opaques; +mod opaque_types; mod weak_types; impl<'tcx> EvalCtxt<'_, 'tcx> { @@ -31,34 +32,34 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { let def_id = goal.predicate.def_id(); match self.tcx().def_kind(def_id) { DefKind::AssocTy | DefKind::AssocConst => { - // To only compute normalization once for each projection we only - // assemble normalization candidates if the expected term is an - // unconstrained inference variable. - // - // Why: For better cache hits, since if we have an unconstrained RHS then - // there are only as many cache keys as there are (canonicalized) alias - // types in each normalizes-to goal. This also weakens inference in a - // forwards-compatible way so we don't use the value of the RHS term to - // affect candidate assembly for projections. - // - // E.g. for `::Assoc == u32` we recursively compute the goal - // `exists ::Assoc == U` and then take the resulting type for - // `U` and equate it with `u32`. This means that we don't need a separate - // projection cache in the solver, since we're piggybacking off of regular - // goal caching. - if self.term_is_fully_unconstrained(goal) { - match self.tcx().associated_item(def_id).container { - ty::AssocItemContainer::TraitContainer => { + match self.tcx().associated_item(def_id).container { + ty::AssocItemContainer::TraitContainer => { + // To only compute normalization once for each projection we only + // assemble normalization candidates if the expected term is an + // unconstrained inference variable. + // + // Why: For better cache hits, since if we have an unconstrained RHS then + // there are only as many cache keys as there are (canonicalized) alias + // types in each normalizes-to goal. This also weakens inference in a + // forwards-compatible way so we don't use the value of the RHS term to + // affect candidate assembly for projections. + // + // E.g. for `::Assoc == u32` we recursively compute the goal + // `exists ::Assoc == U` and then take the resulting type for + // `U` and equate it with `u32`. This means that we don't need a separate + // projection cache in the solver, since we're piggybacking off of regular + // goal caching. + if self.term_is_fully_unconstrained(goal) { let candidates = self.assemble_and_evaluate_candidates(goal); self.merge_candidates(candidates) + } else { + self.set_normalizes_to_hack_goal(goal); + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } - ty::AssocItemContainer::ImplContainer => { - self.normalize_inherent_associated_type(goal) - } } - } else { - self.set_normalizes_to_hack_goal(goal); - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ty::AssocItemContainer::ImplContainer => { + self.normalize_inherent_associated_type(goal) + } } } DefKind::AnonConst => self.normalize_anon_const(goal), @@ -67,26 +68,6 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { kind => bug!("unknown DefKind {} in projection goal: {goal:#?}", kind.descr(def_id)), } } - - #[instrument(level = "debug", skip(self), ret)] - fn normalize_anon_const( - &mut self, - goal: Goal<'tcx, ty::NormalizesTo<'tcx>>, - ) -> QueryResult<'tcx> { - if let Some(normalized_const) = self.try_const_eval_resolve( - goal.param_env, - ty::UnevaluatedConst::new(goal.predicate.alias.def_id, goal.predicate.alias.args), - self.tcx() - .type_of(goal.predicate.alias.def_id) - .no_bound_vars() - .expect("const ty should not rely on other generics"), - ) { - self.eq(goal.param_env, normalized_const, goal.predicate.term.ct().unwrap())?; - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } else { - self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) - } - } } impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { diff --git a/compiler/rustc_trait_selection/src/solve/normalizes_to/opaques.rs b/compiler/rustc_trait_selection/src/solve/normalizes_to/opaque_types.rs similarity index 100% rename from compiler/rustc_trait_selection/src/solve/normalizes_to/opaques.rs rename to compiler/rustc_trait_selection/src/solve/normalizes_to/opaque_types.rs diff --git a/src/bootstrap/src/bin/main.rs b/src/bootstrap/src/bin/main.rs index b97f73aa65275..070d951dba99a 100644 --- a/src/bootstrap/src/bin/main.rs +++ b/src/bootstrap/src/bin/main.rs @@ -39,14 +39,14 @@ fn main() { .open(&lock_path))); _build_lock_guard = match build_lock.try_write() { Ok(mut lock) => { - t!(lock.write(&process::id().to_string().as_ref())); + t!(lock.write(process::id().to_string().as_ref())); lock } err => { drop(err); println!("WARNING: build directory locked by process {pid}, waiting for lock"); let mut lock = t!(build_lock.write()); - t!(lock.write(&process::id().to_string().as_ref())); + t!(lock.write(process::id().to_string().as_ref())); lock } }; @@ -113,14 +113,14 @@ fn main() { continue; } - let file = t!(fs::File::open(&entry.path())); + let file = t!(fs::File::open(entry.path())); // To ensure deterministic results we must sort the dump lines. // This is necessary because the order of rustc invocations different // almost all the time. let mut lines: Vec = t!(BufReader::new(&file).lines().collect()); lines.sort_by_key(|t| t.to_lowercase()); - let mut file = t!(OpenOptions::new().write(true).truncate(true).open(&entry.path())); + let mut file = t!(OpenOptions::new().write(true).truncate(true).open(entry.path())); t!(file.write_all(lines.join("\n").as_bytes())); } } @@ -156,7 +156,7 @@ fn check_version(config: &Config) -> Option { msg.push_str("There have been changes to x.py since you last updated:\n"); for change in changes { - msg.push_str(&format!(" [{}] {}\n", change.severity.to_string(), change.summary)); + msg.push_str(&format!(" [{}] {}\n", change.severity, change.summary)); msg.push_str(&format!( " - PR Link https://github.com/rust-lang/rust/pull/{}\n", change.change_id diff --git a/src/bootstrap/src/bin/rustc.rs b/src/bootstrap/src/bin/rustc.rs index 38c55b2034496..74a924d86c796 100644 --- a/src/bootstrap/src/bin/rustc.rs +++ b/src/bootstrap/src/bin/rustc.rs @@ -276,7 +276,7 @@ fn main() { dur.as_secs(), dur.subsec_millis(), if rusage_data.is_some() { " " } else { "" }, - rusage_data.unwrap_or(String::new()), + rusage_data.unwrap_or_default(), ); } } @@ -440,5 +440,5 @@ fn format_rusage_data(_child: Child) -> Option { )); } - return Some(init_str); + Some(init_str) } diff --git a/src/bootstrap/src/bin/sccache-plus-cl.rs b/src/bootstrap/src/bin/sccache-plus-cl.rs index 554c2dd4d81ea..6e87d4222e863 100644 --- a/src/bootstrap/src/bin/sccache-plus-cl.rs +++ b/src/bootstrap/src/bin/sccache-plus-cl.rs @@ -18,9 +18,9 @@ fn main() { // Invoke sccache with said compiler let sccache_path = env::var_os("SCCACHE_PATH").unwrap(); - let mut cmd = Command::new(&sccache_path); + let mut cmd = Command::new(sccache_path); cmd.arg(compiler.path()); - for &(ref k, ref v) in compiler.env() { + for (k, v) in compiler.env() { cmd.env(k, v); } for arg in env::args().skip(1) { diff --git a/src/bootstrap/src/core/build_steps/check.rs b/src/bootstrap/src/core/build_steps/check.rs index 4a04dbf44aa6c..3ac60f15ef67e 100644 --- a/src/bootstrap/src/core/build_steps/check.rs +++ b/src/bootstrap/src/core/build_steps/check.rs @@ -34,7 +34,7 @@ fn args(builder: &Builder<'_>) -> Vec { &builder.config.cmd { // disable the most spammy clippy lints - let ignored_lints = vec![ + let ignored_lints = [ "many_single_char_names", // there are a lot in stdarch "collapsible_if", "type_complexity", @@ -150,7 +150,7 @@ impl Step for Std { if compiler.stage == 0 { let libdir = builder.sysroot_libdir(compiler, target); let hostdir = builder.sysroot_libdir(compiler, compiler.host); - add_to_sysroot(&builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target)); + add_to_sysroot(builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target)); } drop(_guard); @@ -301,7 +301,7 @@ impl Step for Rustc { let libdir = builder.sysroot_libdir(compiler, target); let hostdir = builder.sysroot_libdir(compiler, compiler.host); - add_to_sysroot(&builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target)); + add_to_sysroot(builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target)); } } @@ -353,7 +353,7 @@ impl Step for CodegenBackend { .arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml"))); rustc_cargo_env(builder, &mut cargo, target, compiler.stage); - let _guard = builder.msg_check(&backend, target); + let _guard = builder.msg_check(backend, target); run_cargo( builder, diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index 9d7f88a9d42ba..d349cd67fed2c 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -107,8 +107,8 @@ impl Std { ) -> Vec<(PathBuf, DependencyType)> { let mut deps = Vec::new(); if !self.is_for_mir_opt_tests { - deps.extend(copy_third_party_objects(builder, &compiler, target)); - deps.extend(copy_self_contained_objects(builder, &compiler, target)); + deps.extend(copy_third_party_objects(builder, compiler, target)); + deps.extend(copy_self_contained_objects(builder, compiler, target)); } deps } @@ -186,7 +186,7 @@ impl Step for Std { // Profiler information requires LLVM's compiler-rt if builder.config.profiler { - builder.update_submodule(&Path::new("src/llvm-project")); + builder.update_submodule(Path::new("src/llvm-project")); } let mut target_deps = builder.ensure(StartupObjects { compiler, target }); @@ -271,7 +271,7 @@ impl Step for Std { if target.is_synthetic() { cargo.env("RUSTC_BOOTSTRAP_SYNTHETIC_TARGET", "1"); } - for rustflag in self.extra_rust_args.into_iter() { + for rustflag in self.extra_rust_args.iter() { cargo.rustflag(rustflag); } @@ -333,7 +333,7 @@ fn copy_third_party_objects( // The sanitizers are only copied in stage1 or above, // to avoid creating dependency on LLVM. target_deps.extend( - copy_sanitizers(builder, &compiler, target) + copy_sanitizers(builder, compiler, target) .into_iter() .map(|d| (d, DependencyType::Target)), ); @@ -487,7 +487,7 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car // for no-std targets we only compile a few no_std crates cargo - .args(&["-p", "alloc"]) + .args(["-p", "alloc"]) .arg("--manifest-path") .arg(builder.src.join("library/alloc/Cargo.toml")) .arg("--features") @@ -626,20 +626,20 @@ impl Step for StdLink { .build .config .initial_rustc - .starts_with(builder.out.join(&compiler.host.triple).join("stage0/bin")) + .starts_with(builder.out.join(compiler.host.triple).join("stage0/bin")) { // Copy bin files from stage0/bin to stage0-sysroot/bin - let sysroot = builder.out.join(&compiler.host.triple).join("stage0-sysroot"); + let sysroot = builder.out.join(compiler.host.triple).join("stage0-sysroot"); let host = compiler.host.triple; - let stage0_bin_dir = builder.out.join(&host).join("stage0/bin"); + let stage0_bin_dir = builder.out.join(host).join("stage0/bin"); let sysroot_bin_dir = sysroot.join("bin"); t!(fs::create_dir_all(&sysroot_bin_dir)); builder.cp_r(&stage0_bin_dir, &sysroot_bin_dir); // Copy all *.so files from stage0/lib to stage0-sysroot/lib - let stage0_lib_dir = builder.out.join(&host).join("stage0/lib"); - if let Ok(files) = fs::read_dir(&stage0_lib_dir) { + let stage0_lib_dir = builder.out.join(host).join("stage0/lib"); + if let Ok(files) = fs::read_dir(stage0_lib_dir) { for file in files { let file = t!(file); let path = file.path(); @@ -654,9 +654,9 @@ impl Step for StdLink { t!(fs::create_dir_all(&sysroot_codegen_backends)); let stage0_codegen_backends = builder .out - .join(&host) + .join(host) .join("stage0/lib/rustlib") - .join(&host) + .join(host) .join("codegen-backends"); if stage0_codegen_backends.exists() { builder.cp_r(&stage0_codegen_backends, &sysroot_codegen_backends); @@ -1179,7 +1179,7 @@ fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelect // The config can also specify its own llvm linker flags. if let Some(ref s) = builder.config.llvm_ldflags { if !llvm_linker_flags.is_empty() { - llvm_linker_flags.push_str(" "); + llvm_linker_flags.push(' '); } llvm_linker_flags.push_str(s); } @@ -1270,7 +1270,7 @@ fn needs_codegen_config(run: &RunConfig<'_>) -> bool { for path_set in &run.paths { needs_codegen_cfg = match path_set { PathSet::Set(set) => set.iter().any(|p| is_codegen_cfg_needed(p, run)), - PathSet::Suite(suite) => is_codegen_cfg_needed(&suite, run), + PathSet::Suite(suite) => is_codegen_cfg_needed(suite, run), } } needs_codegen_cfg @@ -1279,7 +1279,7 @@ fn needs_codegen_config(run: &RunConfig<'_>) -> bool { pub(crate) const CODEGEN_BACKEND_PREFIX: &str = "rustc_codegen_"; fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool { - if path.path.to_str().unwrap().contains(&CODEGEN_BACKEND_PREFIX) { + if path.path.to_str().unwrap().contains(CODEGEN_BACKEND_PREFIX) { let mut needs_codegen_backend_config = true; for &backend in run.builder.config.codegen_backends(run.target) { if path @@ -1300,7 +1300,7 @@ fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool { } } - return false; + false } impl Step for CodegenBackend { @@ -1393,7 +1393,7 @@ impl Step for CodegenBackend { } let stamp = codegen_backend_stamp(builder, compiler, target, backend); let codegen_backend = codegen_backend.to_str().unwrap(); - t!(fs::write(&stamp, &codegen_backend)); + t!(fs::write(stamp, codegen_backend)); } } @@ -1441,7 +1441,7 @@ fn copy_codegen_backends_to_sysroot( let dot = filename.find('.').unwrap(); format!("{}-{}{}", &filename[..dash], builder.rust_release(), &filename[dot..]) }; - builder.copy(&file, &dst.join(target_filename)); + builder.copy(file, &dst.join(target_filename)); } } @@ -1519,7 +1519,7 @@ impl Step for Sysroot { /// 1-3. fn run(self, builder: &Builder<'_>) -> Interned { let compiler = self.compiler; - let host_dir = builder.out.join(&compiler.host.triple); + let host_dir = builder.out.join(compiler.host.triple); let sysroot_dir = |stage| { if stage == 0 { @@ -1578,7 +1578,7 @@ impl Step for Sysroot { let mut add_filtered_files = |suffix, contents| { for path in contents { let path = Path::new(&path); - if path.parent().map_or(false, |parent| parent.ends_with(&suffix)) { + if path.parent().map_or(false, |parent| parent.ends_with(suffix)) { filtered_files.push(path.file_name().unwrap().to_owned()); } } @@ -1802,7 +1802,7 @@ impl Step for Assemble { if let Some(lld_install) = lld_install { let src_exe = exe("lld", target_compiler.host); let dst_exe = exe("rust-lld", target_compiler.host); - builder.copy(&lld_install.join("bin").join(&src_exe), &libdir_bin.join(&dst_exe)); + builder.copy(&lld_install.join("bin").join(src_exe), &libdir_bin.join(dst_exe)); let self_contained_lld_dir = libdir_bin.join("gcc-ld"); t!(fs::create_dir_all(&self_contained_lld_dir)); let lld_wrapper_exe = builder.ensure(crate::core::build_steps::tool::LldWrapper { @@ -1850,7 +1850,7 @@ impl Step for Assemble { let out_dir = builder.cargo_out(build_compiler, Mode::Rustc, host); let rustc = out_dir.join(exe("rustc-main", host)); let bindir = sysroot.join("bin"); - t!(fs::create_dir_all(&bindir)); + t!(fs::create_dir_all(bindir)); let compiler = builder.rustc(target_compiler); builder.copy(&rustc, &compiler); @@ -1869,9 +1869,9 @@ pub fn add_to_sysroot( stamp: &Path, ) { let self_contained_dst = &sysroot_dst.join("self-contained"); - t!(fs::create_dir_all(&sysroot_dst)); - t!(fs::create_dir_all(&sysroot_host_dst)); - t!(fs::create_dir_all(&self_contained_dst)); + t!(fs::create_dir_all(sysroot_dst)); + t!(fs::create_dir_all(sysroot_host_dst)); + t!(fs::create_dir_all(self_contained_dst)); for (path, dependency_type) in builder.read_stamp_file(stamp) { let dst = match dependency_type { DependencyType::Host => sysroot_host_dst, @@ -2009,14 +2009,14 @@ pub fn run_cargo( .map(|e| (e.path(), e.file_name().into_string().unwrap(), t!(e.metadata()))) .collect::>(); for (prefix, extension, expected_len) in toplevel { - let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| { + let candidates = contents.iter().filter(|&(_, filename, meta)| { meta.len() == expected_len && filename .strip_prefix(&prefix[..]) .map(|s| s.starts_with('-') && s.ends_with(&extension[..])) .unwrap_or(false) }); - let max = candidates.max_by_key(|&&(_, _, ref metadata)| { + let max = candidates.max_by_key(|&(_, _, metadata)| { metadata.modified().expect("mtime should be available on all relevant OSes") }); let path_to_add = match max { @@ -2045,7 +2045,7 @@ pub fn run_cargo( new_contents.extend(dep.to_str().unwrap().as_bytes()); new_contents.extend(b"\0"); } - t!(fs::write(&stamp, &new_contents)); + t!(fs::write(stamp, &new_contents)); deps.into_iter().map(|(d, _)| d).collect() } diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index 750d3095ff68e..d9ab18e7250b0 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -78,7 +78,7 @@ impl Step for Docs { let mut tarball = Tarball::new(builder, "rust-docs", &host.triple); tarball.set_product_name("Rust Documentation"); tarball.add_bulk_dir(&builder.doc_out(host), dest); - tarball.add_file(&builder.src.join("src/doc/robots.txt"), dest, 0o644); + tarball.add_file(builder.src.join("src/doc/robots.txt"), dest, 0o644); Some(tarball.generate()) } } @@ -342,7 +342,7 @@ impl Step for Mingw { // thrown away (this contains the runtime DLLs included in the rustc package // above) and the second argument is where to place all the MinGW components // (which is what we want). - make_win_dist(&tmpdir(builder), tarball.image_dir(), host, &builder); + make_win_dist(&tmpdir(builder), tarball.image_dir(), host, builder); Some(tarball.generate()) } @@ -658,7 +658,7 @@ impl Step for Std { let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); let stamp = compile::libstd_stamp(builder, compiler_to_use, target); verify_uefi_rlib_format(builder, target, &stamp); - copy_target_libs(builder, target, &tarball.image_dir(), &stamp); + copy_target_libs(builder, target, tarball.image_dir(), &stamp); Some(tarball.generate()) } @@ -734,7 +734,7 @@ impl Step for Analysis { const DEFAULT: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "analysis"); + let default = should_build_extended_tool(run.builder, "analysis"); run.alias("rust-analysis").default_condition(default) } @@ -890,7 +890,7 @@ impl Step for Src { /// Creates the `rust-src` installer component fn run(self, builder: &Builder<'_>) -> GeneratedTarball { if !builder.config.dry_run() { - builder.update_submodule(&Path::new("src/llvm-project")); + builder.update_submodule(Path::new("src/llvm-project")); } let tarball = Tarball::new_targetless(builder, "rust-src"); @@ -976,7 +976,7 @@ impl Step for PlainSourceTarball { ]; let src_dirs = ["src", "compiler", "library", "tests"]; - copy_src_dirs(builder, &builder.src, &src_dirs, &[], &plain_dst_src); + copy_src_dirs(builder, &builder.src, &src_dirs, &[], plain_dst_src); // Copy the files normally for item in &src_files { @@ -986,8 +986,8 @@ impl Step for PlainSourceTarball { // Create the version file builder.create(&plain_dst_src.join("version"), &builder.rust_version()); if let Some(info) = builder.rust_info().info() { - channel::write_commit_hash_file(&plain_dst_src, &info.sha); - channel::write_commit_info_file(&plain_dst_src, info); + channel::write_commit_hash_file(plain_dst_src, &info.sha); + channel::write_commit_info_file(plain_dst_src, info); } // If we're building from git or tarball sources, we need to vendor @@ -1014,7 +1014,7 @@ impl Step for PlainSourceTarball { // Will read the libstd Cargo.toml // which uses the unstable `public-dependency` feature. .env("RUSTC_BOOTSTRAP", "1") - .current_dir(&plain_dst_src); + .current_dir(plain_dst_src); let config = if !builder.config.dry_run() { t!(String::from_utf8(t!(cmd.output()).stdout)) @@ -1043,7 +1043,7 @@ impl Step for Cargo { const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "cargo"); + let default = should_build_extended_tool(run.builder, "cargo"); run.alias("cargo").default_condition(default) } @@ -1070,7 +1070,7 @@ impl Step for Cargo { let mut tarball = Tarball::new(builder, "cargo", &target.triple); tarball.set_overlay(OverlayKind::Cargo); - tarball.add_file(&cargo, "bin", 0o755); + tarball.add_file(cargo, "bin", 0o755); tarball.add_file(etc.join("_cargo"), "share/zsh/site-functions", 0o644); tarball.add_renamed_file(etc.join("cargo.bashcomp.sh"), "etc/bash_completion.d", "cargo"); tarball.add_dir(etc.join("man"), "share/man/man1"); @@ -1092,7 +1092,7 @@ impl Step for Rls { const DEFAULT: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "rls"); + let default = should_build_extended_tool(run.builder, "rls"); run.alias("rls").default_condition(default) } @@ -1134,7 +1134,7 @@ impl Step for RustAnalyzer { const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "rust-analyzer"); + let default = should_build_extended_tool(run.builder, "rust-analyzer"); run.alias("rust-analyzer").default_condition(default) } @@ -1176,7 +1176,7 @@ impl Step for Clippy { const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "clippy"); + let default = should_build_extended_tool(run.builder, "clippy"); run.alias("clippy").default_condition(default) } @@ -1224,7 +1224,7 @@ impl Step for Miri { const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "miri"); + let default = should_build_extended_tool(run.builder, "miri"); run.alias("miri").default_condition(default) } @@ -1337,12 +1337,12 @@ impl Step for CodegenBackend { let src = builder.sysroot(compiler); let backends_src = builder.sysroot_codegen_backends(compiler); let backends_rel = backends_src - .strip_prefix(&src) + .strip_prefix(src) .unwrap() .strip_prefix(builder.sysroot_libdir_relative(compiler)) .unwrap(); // Don't use custom libdir here because ^lib/ will be resolved again with installer - let backends_dst = PathBuf::from("lib").join(&backends_rel); + let backends_dst = PathBuf::from("lib").join(backends_rel); let backend_name = format!("rustc_codegen_{}", backend); let mut found_backend = false; @@ -1371,7 +1371,7 @@ impl Step for Rustfmt { const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "rustfmt"); + let default = should_build_extended_tool(run.builder, "rustfmt"); run.alias("rustfmt").default_condition(default) } @@ -1454,7 +1454,7 @@ impl Step for RustDemangler { let mut tarball = Tarball::new(builder, "rust-demangler", &target.triple); tarball.set_overlay(OverlayKind::RustDemangler); tarball.is_preview(true); - tarball.add_file(&rust_demangler, "bin", 0o755); + tarball.add_file(rust_demangler, "bin", 0o755); tarball.add_legal_and_readme_to("share/doc/rust-demangler"); Some(tarball.generate()) } @@ -1609,7 +1609,7 @@ impl Step for Extended { let prepare = |name: &str| { builder.create_dir(&pkg.join(name)); builder.cp_r( - &work.join(&format!("{}-{}", pkgname(builder, name), target.triple)), + &work.join(format!("{}-{}", pkgname(builder, name), target.triple)), &pkg.join(name), ); builder.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755); @@ -1673,7 +1673,7 @@ impl Step for Extended { name.to_string() }; builder.cp_r( - &work.join(&format!("{}-{}", pkgname(builder, name), target.triple)).join(dir), + &work.join(format!("{}-{}", pkgname(builder, name), target.triple)).join(dir), &exe.join(name), ); builder.remove(&exe.join(name).join("manifest.in")); @@ -1707,7 +1707,7 @@ impl Step for Extended { .current_dir(&exe) .arg("dir") .arg("rustc") - .args(&heat_flags) + .args(heat_flags) .arg("-cg") .arg("RustcGroup") .arg("-dr") @@ -1723,7 +1723,7 @@ impl Step for Extended { .current_dir(&exe) .arg("dir") .arg("rust-docs") - .args(&heat_flags) + .args(heat_flags) .arg("-cg") .arg("DocsGroup") .arg("-dr") @@ -1741,7 +1741,7 @@ impl Step for Extended { .current_dir(&exe) .arg("dir") .arg("cargo") - .args(&heat_flags) + .args(heat_flags) .arg("-cg") .arg("CargoGroup") .arg("-dr") @@ -1758,7 +1758,7 @@ impl Step for Extended { .current_dir(&exe) .arg("dir") .arg("rust-std") - .args(&heat_flags) + .args(heat_flags) .arg("-cg") .arg("StdGroup") .arg("-dr") @@ -1774,7 +1774,7 @@ impl Step for Extended { .current_dir(&exe) .arg("dir") .arg("rust-analyzer") - .args(&heat_flags) + .args(heat_flags) .arg("-cg") .arg("RustAnalyzerGroup") .arg("-dr") @@ -1793,7 +1793,7 @@ impl Step for Extended { .current_dir(&exe) .arg("dir") .arg("clippy") - .args(&heat_flags) + .args(heat_flags) .arg("-cg") .arg("ClippyGroup") .arg("-dr") @@ -1812,7 +1812,7 @@ impl Step for Extended { .current_dir(&exe) .arg("dir") .arg("rust-demangler") - .args(&heat_flags) + .args(heat_flags) .arg("-cg") .arg("RustDemanglerGroup") .arg("-dr") @@ -1831,7 +1831,7 @@ impl Step for Extended { .current_dir(&exe) .arg("dir") .arg("miri") - .args(&heat_flags) + .args(heat_flags) .arg("-cg") .arg("MiriGroup") .arg("-dr") @@ -1849,7 +1849,7 @@ impl Step for Extended { .current_dir(&exe) .arg("dir") .arg("rust-analysis") - .args(&heat_flags) + .args(heat_flags) .arg("-cg") .arg("AnalysisGroup") .arg("-dr") @@ -1867,7 +1867,7 @@ impl Step for Extended { .current_dir(&exe) .arg("dir") .arg("rust-mingw") - .args(&heat_flags) + .args(heat_flags) .arg("-cg") .arg("GccGroup") .arg("-dr") @@ -1890,10 +1890,10 @@ impl Step for Extended { .arg("-dStdDir=rust-std") .arg("-dAnalysisDir=rust-analysis") .arg("-arch") - .arg(&arch) + .arg(arch) .arg("-out") .arg(&output) - .arg(&input); + .arg(input); add_env(builder, &mut cmd, target); if built_tools.contains("clippy") { @@ -2026,7 +2026,7 @@ fn install_llvm_file(builder: &Builder<'_>, source: &Path, destination: &Path) { return; } - builder.install(&source, destination, 0o644); + builder.install(source, destination, 0o644); } /// Maybe add LLVM object files to the given destination lib-dir. Allows either static or dynamic linking. @@ -2123,7 +2123,7 @@ impl Step for LlvmTools { const DEFAULT: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "llvm-tools"); + let default = should_build_extended_tool(run.builder, "llvm-tools"); // FIXME: allow using the names of the tools themselves? run.alias("llvm-tools").default_condition(default) } @@ -2231,12 +2231,12 @@ impl Step for RustDev { tarball.add_file(lld_path, "bin", 0o755); } - tarball.add_file(&builder.llvm_filecheck(target), "bin", 0o755); + tarball.add_file(builder.llvm_filecheck(target), "bin", 0o755); // Copy the include directory as well; needed mostly to build // librustc_llvm properly (e.g., llvm-config.h is in here). But also // just broadly useful to be able to link against the bundled LLVM. - tarball.add_dir(&builder.llvm_out(target).join("include"), "include"); + tarball.add_dir(builder.llvm_out(target).join("include"), "include"); // Copy libLLVM.so to the target lib dir as well, so the RPATH like // `$ORIGIN/../lib` can find it. It may also be used as a dependency @@ -2312,7 +2312,7 @@ impl Step for BuildManifest { let build_manifest = builder.tool_exe(Tool::BuildManifest); let tarball = Tarball::new(builder, "build-manifest", &self.target.triple); - tarball.add_file(&build_manifest, "bin", 0o755); + tarball.add_file(build_manifest, "bin", 0o755); tarball.generate() } } diff --git a/src/bootstrap/src/core/build_steps/doc.rs b/src/bootstrap/src/core/build_steps/doc.rs index fdb099e4ab1d7..7a122a8676ba7 100644 --- a/src/bootstrap/src/core/build_steps/doc.rs +++ b/src/bootstrap/src/core/build_steps/doc.rs @@ -151,7 +151,7 @@ impl Step for RustbookSrc

{ builder.info(&format!("Rustbook ({target}) - {name}")); let _ = fs::remove_dir_all(&out); - builder.run(rustbook_cmd.arg("build").arg(&src).arg("-d").arg(out)); + builder.run(rustbook_cmd.arg("build").arg(src).arg("-d").arg(out)); } if self.parent.is_some() { @@ -384,7 +384,7 @@ impl Step for Standalone { // with no particular explicit doc requested (e.g. library/core). if builder.paths.is_empty() || builder.was_invoked_explicitly::(Kind::Doc) { let index = out.join("index.html"); - builder.open_in_browser(&index); + builder.open_in_browser(index); } } } @@ -517,7 +517,7 @@ impl Step for SharedAssets { .replace("VERSION", &builder.rust_release()) .replace("SHORT_HASH", builder.rust_info().sha_short().unwrap_or("")) .replace("STAMP", builder.rust_info().sha().unwrap_or("")); - t!(fs::write(&version_info, &info)); + t!(fs::write(&version_info, info)); } builder.copy(&builder.src.join("src").join("doc").join("rust.css"), &out.join("rust.css")); @@ -714,11 +714,11 @@ fn doc_std( } let description = - format!("library{} in {} format", crate_description(&requested_crates), format.as_str()); - let _guard = builder.msg_doc(compiler, &description, target); + format!("library{} in {} format", crate_description(requested_crates), format.as_str()); + let _guard = builder.msg_doc(compiler, description, target); builder.run(&mut cargo.into()); - builder.cp_r(&out_dir, &out); + builder.cp_r(&out_dir, out); } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] @@ -781,7 +781,7 @@ impl Step for Rustc { let _guard = builder.msg_sysroot_tool( Kind::Doc, stage, - &format!("compiler{}", crate_description(&self.crates)), + format!("compiler{}", crate_description(&self.crates)), compiler.host, target, ); @@ -819,7 +819,7 @@ impl Step for Rustc { // Create all crate output directories first to make sure rustdoc uses // relative links. // FIXME: Cargo should probably do this itself. - let dir_name = krate.replace("-", "_"); + let dir_name = krate.replace('-', "_"); t!(fs::create_dir_all(out_dir.join(&*dir_name))); cargo.arg("-p").arg(krate); if to_open.is_none() { @@ -844,7 +844,7 @@ impl Step for Rustc { if !builder.config.dry_run() { // Sanity check on linked compiler crates for krate in &*self.crates { - let dir_name = krate.replace("-", "_"); + let dir_name = krate.replace('-', "_"); // Making sure the directory exists and is not empty. assert!(out.join(&*dir_name).read_dir().unwrap().next().is_some()); } @@ -1160,7 +1160,7 @@ impl Step for RustcBook { cmd.arg(&out_listing); cmd.arg("--rustc"); cmd.arg(&rustc); - cmd.arg("--rustc-target").arg(&self.target.rustc_target_arg()); + cmd.arg("--rustc-target").arg(self.target.rustc_target_arg()); if builder.is_verbose() { cmd.arg("--verbose"); } diff --git a/src/bootstrap/src/core/build_steps/format.rs b/src/bootstrap/src/core/build_steps/format.rs index e792d38b7ea67..69c8792b03148 100644 --- a/src/bootstrap/src/core/build_steps/format.rs +++ b/src/bootstrap/src/core/build_steps/format.rs @@ -11,7 +11,7 @@ use std::process::{Command, Stdio}; use std::sync::mpsc::SyncSender; fn rustfmt(src: &Path, rustfmt: &Path, paths: &[PathBuf], check: bool) -> impl FnMut(bool) -> bool { - let mut cmd = Command::new(&rustfmt); + let mut cmd = Command::new(rustfmt); // avoid the submodule config paths from coming into play, // we only allow a single global config for the workspace for now cmd.arg("--config-path").arg(&src.canonicalize().unwrap()); @@ -162,7 +162,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) { // against anything like `compiler/rustc_foo/src/foo.rs`, // preventing the latter from being formatted. untracked_count += 1; - fmt_override.add(&format!("!/{untracked_path}")).expect(&untracked_path); + fmt_override.add(&format!("!/{untracked_path}")).expect(untracked_path); } // Only check modified files locally to speed up runtime. // We still check all files in CI to avoid bugs in `get_modified_rs_files` letting regressions slip through; @@ -221,7 +221,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) { assert!(rustfmt_path.exists(), "{}", rustfmt_path.display()); let src = build.src.clone(); let (tx, rx): (SyncSender, _) = std::sync::mpsc::sync_channel(128); - let walker = match paths.get(0) { + let walker = match paths.first() { Some(first) => { let find_shortcut_candidates = |p: &PathBuf| { let mut candidates = Vec::new(); diff --git a/src/bootstrap/src/core/build_steps/install.rs b/src/bootstrap/src/core/build_steps/install.rs index 0225f8f24a5ef..29238b90225af 100644 --- a/src/bootstrap/src/core/build_steps/install.rs +++ b/src/bootstrap/src/core/build_steps/install.rs @@ -24,7 +24,7 @@ const SHELL: &str = "sh"; // We have to run a few shell scripts, which choke quite a bit on both `\` // characters and on `C:\` paths, so normalize both of them away. fn sanitize_sh(path: &Path) -> String { - let path = path.to_str().unwrap().replace("\\", "/"); + let path = path.to_str().unwrap().replace('\\', "/"); return change_drive(unc_to_lfs(&path)).unwrap_or(path); fn unc_to_lfs(s: &str) -> &str { @@ -44,7 +44,7 @@ fn sanitize_sh(path: &Path) -> String { } } -fn is_dir_writable_for_user(dir: &PathBuf) -> bool { +fn is_dir_writable_for_user(dir: &Path) -> bool { let tmp = dir.join(".tmp"); match fs::create_dir_all(&tmp) { Ok(_) => { diff --git a/src/bootstrap/src/core/build_steps/llvm.rs b/src/bootstrap/src/core/build_steps/llvm.rs index 836ac3de94d36..9622321a74e7c 100644 --- a/src/bootstrap/src/core/build_steps/llvm.rs +++ b/src/bootstrap/src/core/build_steps/llvm.rs @@ -110,7 +110,7 @@ pub fn prebuilt_llvm_config( let smart_stamp_hash = STAMP_HASH_MEMO.get_or_init(|| { generate_smart_stamp_hash( &builder.config.src.join("src/llvm-project"), - &builder.in_tree_llvm_info.sha().unwrap_or_default(), + builder.in_tree_llvm_info.sha().unwrap_or_default(), ) }); @@ -289,7 +289,7 @@ impl Step for Llvm { let _guard = builder.msg_unstaged(Kind::Build, "LLVM", target); t!(stamp.remove()); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); t!(fs::create_dir_all(&out_dir)); // https://llvm.org/docs/CMake.html @@ -355,7 +355,7 @@ impl Step for Llvm { cfg.define("LLVM_BUILD_RUNTIME", "No"); } if let Some(path) = builder.config.llvm_profile_use.as_ref() { - cfg.define("LLVM_PROFDATA_FILE", &path); + cfg.define("LLVM_PROFDATA_FILE", path); } // Disable zstd to avoid a dependency on libzstd.so. @@ -643,7 +643,7 @@ fn configure_cmake( let sanitize_cc = |cc: &Path| { if target.is_msvc() { - OsString::from(cc.to_str().unwrap().replace("\\", "/")) + OsString::from(cc.to_str().unwrap().replace('\\', "/")) } else { cc.as_os_str().to_owned() } @@ -808,10 +808,10 @@ fn configure_llvm(builder: &Builder<'_>, target: TargetSelection, cfg: &mut cmak // Adapted from https://github.com/alexcrichton/cc-rs/blob/fba7feded71ee4f63cfe885673ead6d7b4f2f454/src/lib.rs#L2347-L2365 fn get_var(var_base: &str, host: &str, target: &str) -> Option { let kind = if host == target { "HOST" } else { "TARGET" }; - let target_u = target.replace("-", "_"); - env::var_os(&format!("{var_base}_{target}")) - .or_else(|| env::var_os(&format!("{}_{}", var_base, target_u))) - .or_else(|| env::var_os(&format!("{}_{}", kind, var_base))) + let target_u = target.replace('-', "_"); + env::var_os(format!("{var_base}_{target}")) + .or_else(|| env::var_os(format!("{}_{}", var_base, target_u))) + .or_else(|| env::var_os(format!("{}_{}", kind, var_base))) .or_else(|| env::var_os(var_base)) } @@ -862,7 +862,7 @@ impl Step for Lld { } let _guard = builder.msg_unstaged(Kind::Build, "LLD", target); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); t!(fs::create_dir_all(&out_dir)); let mut cfg = cmake::Config::new(builder.src.join("src/llvm-project/lld")); @@ -986,7 +986,7 @@ impl Step for Sanitizers { let _guard = builder.msg_unstaged(Kind::Build, "sanitizers", self.target); t!(stamp.remove()); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); let mut cfg = cmake::Config::new(&compiler_rt_dir); cfg.profile("Release"); @@ -1051,7 +1051,7 @@ fn supported_sanitizers( .map(move |c| SanitizerRuntime { cmake_target: format!("clang_rt.{}_{}_dynamic", c, os), path: out_dir - .join(&format!("build/lib/darwin/libclang_rt.{}_{}_dynamic.dylib", c, os)), + .join(format!("build/lib/darwin/libclang_rt.{}_{}_dynamic.dylib", c, os)), name: format!("librustc-{}_rt.{}.dylib", channel, c), }) .collect() @@ -1062,7 +1062,7 @@ fn supported_sanitizers( .iter() .map(move |c| SanitizerRuntime { cmake_target: format!("clang_rt.{}-{}", c, arch), - path: out_dir.join(&format!("build/lib/{}/libclang_rt.{}-{}.a", os, c, arch)), + path: out_dir.join(format!("build/lib/{}/libclang_rt.{}-{}.a", os, c, arch)), name: format!("librustc-{}_rt.{}.a", channel, c), }) .collect() @@ -1165,7 +1165,7 @@ impl Step for CrtBeginEnd { /// Build crtbegin.o/crtend.o for musl target. fn run(self, builder: &Builder<'_>) -> Self::Output { - builder.update_submodule(&Path::new("src/llvm-project")); + builder.update_submodule(Path::new("src/llvm-project")); let out_dir = builder.native_dir(self.target).join("crt"); @@ -1233,7 +1233,7 @@ impl Step for Libunwind { /// Build libunwind.a fn run(self, builder: &Builder<'_>) -> Self::Output { - builder.update_submodule(&Path::new("src/llvm-project")); + builder.update_submodule(Path::new("src/llvm-project")); if builder.config.dry_run() { return PathBuf::new(); diff --git a/src/bootstrap/src/core/build_steps/run.rs b/src/bootstrap/src/core/build_steps/run.rs index 5fa5f2d47946a..27b0c7760f078 100644 --- a/src/bootstrap/src/core/build_steps/run.rs +++ b/src/bootstrap/src/core/build_steps/run.rs @@ -23,7 +23,7 @@ impl Step for ExpandYamlAnchors { fn run(self, builder: &Builder<'_>) { builder.info("Expanding YAML anchors in the GitHub Actions configuration"); builder.run_delaying_failure( - &mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src), + builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src), ); } diff --git a/src/bootstrap/src/core/build_steps/setup.rs b/src/bootstrap/src/core/build_steps/setup.rs index f7747e66dd9c5..74a5578b43ec1 100644 --- a/src/bootstrap/src/core/build_steps/setup.rs +++ b/src/bootstrap/src/core/build_steps/setup.rs @@ -8,7 +8,7 @@ use std::env::consts::EXE_SUFFIX; use std::fmt::Write as _; use std::fs::File; use std::io::Write; -use std::path::{Path, PathBuf, MAIN_SEPARATOR}; +use std::path::{Path, PathBuf, MAIN_SEPARATOR_STR}; use std::process::Command; use std::str::FromStr; use std::{fmt, fs, io}; @@ -257,8 +257,7 @@ impl Step for Link { return; } let stage_path = - ["build", config.build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string()); - + ["build", config.build.rustc_target_arg(), "stage1"].join(MAIN_SEPARATOR_STR); if !rustup_installed() { eprintln!("`rustup` is not installed; cannot link `stage1` toolchain"); } else if stage_dir_exists(&stage_path[..]) && !config.dry_run() { @@ -276,7 +275,7 @@ fn rustup_installed() -> bool { } fn stage_dir_exists(stage_path: &str) -> bool { - match fs::create_dir(&stage_path) { + match fs::create_dir(stage_path) { Ok(_) => true, Err(_) => Path::new(&stage_path).exists(), } @@ -294,7 +293,7 @@ fn attempt_toolchain_link(stage_path: &str) { return; } - if try_link_toolchain(&stage_path) { + if try_link_toolchain(stage_path) { println!( "Added `stage1` rustup toolchain; try `cargo +stage1 build` on a separate rust project to run a newly-built toolchain" ); @@ -310,7 +309,7 @@ fn attempt_toolchain_link(stage_path: &str) { fn toolchain_is_linked() -> bool { match Command::new("rustup") - .args(&["toolchain", "list"]) + .args(["toolchain", "list"]) .stdout(std::process::Stdio::piped()) .output() { @@ -337,7 +336,7 @@ fn toolchain_is_linked() -> bool { fn try_link_toolchain(stage_path: &str) -> bool { Command::new("rustup") .stdout(std::process::Stdio::null()) - .args(&["toolchain", "link", "stage1", &stage_path]) + .args(["toolchain", "link", "stage1", stage_path]) .output() .map_or(false, |output| output.status.success()) } @@ -366,7 +365,7 @@ fn ensure_stage1_toolchain_placeholder_exists(stage_path: &str) -> bool { return false; } - return true; + true } // Used to get the path for `Subcommand::Setup` @@ -469,13 +468,13 @@ impl Step for Hook { if config.dry_run() { return; } - t!(install_git_hook_maybe(&config)); + t!(install_git_hook_maybe(config)); } } // install a git hook to automatically run tidy, if they want fn install_git_hook_maybe(config: &Config) -> io::Result<()> { - let git = t!(config.git().args(&["rev-parse", "--git-common-dir"]).output().map(|output| { + let git = t!(config.git().args(["rev-parse", "--git-common-dir"]).output().map(|output| { assert!(output.status.success(), "failed to run `git`"); PathBuf::from(t!(String::from_utf8(output.stdout)).trim()) })); @@ -541,7 +540,7 @@ impl Step for Vscode { if config.dry_run() { return; } - while !t!(create_vscode_settings_maybe(&config)) {} + while !t!(create_vscode_settings_maybe(config)) {} } } @@ -608,7 +607,7 @@ fn create_vscode_settings_maybe(config: &Config) -> io::Result { } _ => "Created", }; - fs::write(&vscode_settings, &RUST_ANALYZER_SETTINGS)?; + fs::write(&vscode_settings, RUST_ANALYZER_SETTINGS)?; println!("{verb} `.vscode/settings.json`"); } else { println!("\n{RUST_ANALYZER_SETTINGS}"); diff --git a/src/bootstrap/src/core/build_steps/suggest.rs b/src/bootstrap/src/core/build_steps/suggest.rs index 93da27560c65c..c057fa9a5667b 100644 --- a/src/bootstrap/src/core/build_steps/suggest.rs +++ b/src/bootstrap/src/core/build_steps/suggest.rs @@ -36,7 +36,7 @@ pub fn suggest(builder: &Builder<'_>, run: bool) { // this code expects one suggestion per line in the following format: // {some number of flags} [optional stage number] let cmd = sections.next().unwrap(); - let stage = sections.next_back().map(|s| str::parse(s).ok()).flatten(); + let stage = sections.next_back().and_then(|s| str::parse(s).ok()); let paths: Vec = sections.map(|p| PathBuf::from_str(p).unwrap()).collect(); (cmd, stage, paths) diff --git a/src/bootstrap/src/core/build_steps/synthetic_targets.rs b/src/bootstrap/src/core/build_steps/synthetic_targets.rs index 9acdcaeb517ba..a00835402ec8b 100644 --- a/src/bootstrap/src/core/build_steps/synthetic_targets.rs +++ b/src/bootstrap/src/core/build_steps/synthetic_targets.rs @@ -79,7 +79,7 @@ fn create_synthetic_target( customize(spec_map); - std::fs::write(&path, &serde_json::to_vec_pretty(&spec).unwrap()).unwrap(); + std::fs::write(&path, serde_json::to_vec_pretty(&spec).unwrap()).unwrap(); let target = TargetSelection::create_synthetic(&name, path.to_str().unwrap()); crate::utils::cc_detect::find_target(builder, target); diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 0c7e751c8daac..791f847a8661a 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -156,7 +156,7 @@ You can skip linkcheck with --skip src/tools/linkchecker" // Run the linkchecker. let _guard = builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); builder.run_delaying_failure(linkchecker.arg(builder.out.join(host.triple).join("doc"))); } @@ -253,15 +253,15 @@ impl Step for Cargotest { let out_dir = builder.out.join("ct"); t!(fs::create_dir_all(&out_dir)); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); let mut cmd = builder.tool_cmd(Tool::CargoTest); - let mut cmd = cmd + let cmd = cmd .arg(&cargo) .arg(&out_dir) .args(builder.config.test_args()) .env("RUSTC", builder.rustc(compiler)) .env("RUSTDOC", builder.rustdoc(compiler)); - add_rustdoc_cargo_linker_args(&mut cmd, builder, compiler.host, LldThreads::No); + add_rustdoc_cargo_linker_args(cmd, builder, compiler.host, LldThreads::No); builder.run_delaying_failure(cmd); } } @@ -322,7 +322,7 @@ impl Step for Cargo { builder, ); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); add_flags_and_try_run_tests(builder, &mut cargo); } } @@ -474,7 +474,7 @@ impl Step for RustDemangler { ); let dir = testdir(builder, compiler.host); - t!(fs::create_dir_all(&dir)); + t!(fs::create_dir_all(dir)); cargo.env("RUST_DEMANGLER_DRIVER_PATH", rust_demangler); cargo.add_rustc_lib_path(builder); @@ -525,7 +525,7 @@ impl Miri { // Tell `cargo miri setup` where to find the sources. cargo.env("MIRI_LIB_SRC", builder.src.join("library")); // Tell it where to find Miri. - cargo.env("MIRI", &miri); + cargo.env("MIRI", miri); // Tell it where to put the sysroot. cargo.env("MIRI_SYSROOT", &miri_sysroot); // Debug things. @@ -637,7 +637,7 @@ impl Step for Miri { // does not understand the flags added by `add_flags_and_try_run_test`. let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder); { - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); builder.run(&mut cargo); } @@ -649,11 +649,11 @@ impl Step for Miri { // `MIRI_SKIP_UI_CHECKS` and `RUSTC_BLESS` are incompatible cargo.env_remove("RUSTC_BLESS"); // Optimizations can change error locations and remove UB so don't run `fail` tests. - cargo.args(&["tests/pass", "tests/panic"]); + cargo.args(["tests/pass", "tests/panic"]); let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder); { - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); builder.run(&mut cargo); } } @@ -693,7 +693,7 @@ impl Step for Miri { let mut cargo = Command::from(cargo); { - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); builder.run(&mut cargo); } } @@ -946,7 +946,7 @@ impl Step for RustdocJSNotStd { } fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option { - let mut command = Command::new(&npm); + let mut command = Command::new(npm); command.arg("list").arg("--parseable").arg("--long").arg("--depth=0"); if global { command.arg("--global"); @@ -954,7 +954,7 @@ fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option let lines = command .output() .map(|output| String::from_utf8_lossy(&output.stdout).into_owned()) - .unwrap_or(String::new()); + .unwrap_or_default(); lines .lines() .find_map(|l| l.split(':').nth(1)?.strip_prefix("browser-ui-test@")) @@ -1048,7 +1048,7 @@ impl Step for RustdocGUI { cmd.arg("--npm").arg(npm); } - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); let _guard = builder.msg_sysroot_tool( Kind::Test, self.compiler.stage, @@ -1096,7 +1096,7 @@ impl Step for Tidy { cmd.arg(format!("--extra-checks={s}")); } let mut args = std::env::args_os(); - if let Some(_) = args.find(|arg| arg == OsStr::new("--")) { + if args.any(|arg| arg == OsStr::new("--")) { cmd.arg("--"); cmd.args(args); } @@ -1116,7 +1116,7 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to ); crate::exit!(1); } - crate::core::build_steps::format::format(&builder, !builder.config.cmd.bless(), &[]); + crate::core::build_steps::format::format(builder, !builder.config.cmd.bless(), &[]); } builder.info("tidy check"); @@ -1171,7 +1171,7 @@ impl Step for ExpandYamlAnchors { } builder.info("Ensuring the YAML anchors in the GitHub Actions config were expanded"); builder.run_delaying_failure( - &mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src), + builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src), ); } @@ -1759,7 +1759,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the for exclude in &builder.config.skip { cmd.arg("--skip"); - cmd.arg(&exclude); + cmd.arg(exclude); } // Get paths from cmd args @@ -1780,7 +1780,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the // so the correct filters are passed to libtest if cfg!(windows) { let test_args_win: Vec = - test_args.iter().map(|s| s.replace("/", "\\")).collect(); + test_args.iter().map(|s| s.replace('/', "\\")).collect(); cmd.args(&test_args_win); } else { cmd.args(&test_args); @@ -1900,7 +1900,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the // Note that if we encounter `PATH` we make sure to append to our own `PATH` // rather than stomp over it. if !builder.config.dry_run() && target.is_msvc() { - for &(ref k, ref v) in builder.cc.borrow()[&target].env() { + for (k, v) in builder.cc.borrow()[&target].env() { if k != "PATH" { cmd.env(k, v); } @@ -1996,7 +1996,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the let _group = builder.msg( Kind::Test, compiler.stage, - &format!("compiletest suite={suite} mode={mode}"), + format!("compiletest suite={suite} mode={mode}"), compiler.host, target, ); @@ -2022,7 +2022,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the "Check compiletest suite={} mode={} compare_mode={} ({} -> {})", suite, mode, compare_mode, &compiler.host, target )); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); try_run_tests(builder, &mut cmd, false); } } @@ -2094,7 +2094,7 @@ impl BookTest { compiler.host, compiler.host, ); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); let toolstate = if builder.run_delaying_failure(&mut rustbook_cmd) { ToolState::TestPass } else { @@ -2111,12 +2111,12 @@ impl BookTest { builder.ensure(compile::Std::new(compiler, host)); let _guard = - builder.msg(Kind::Test, compiler.stage, &format!("book {}", self.name), host, host); + builder.msg(Kind::Test, compiler.stage, format!("book {}", self.name), host, host); // Do a breadth-first traversal of the `src/doc` directory and just run // tests for all files that end in `*.md` let mut stack = vec![builder.src.join(self.path)]; - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); let mut files = Vec::new(); while let Some(p) = stack.pop() { if p.is_dir() { @@ -2227,7 +2227,7 @@ impl Step for ErrorIndex { let guard = builder.msg(Kind::Test, compiler.stage, "error-index", compiler.host, compiler.host); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); builder.run_quiet(&mut tool); drop(guard); // The tests themselves need to link to std, so make sure it is @@ -2315,11 +2315,8 @@ impl Step for CrateLibrustc { let builder = run.builder; let host = run.build_triple(); let compiler = builder.compiler_for(builder.top_stage, host, host); - let crates = run - .paths - .iter() - .map(|p| builder.crate_paths[&p.assert_single_path().path].clone()) - .collect(); + let crates = + run.paths.iter().map(|p| builder.crate_paths[&p.assert_single_path().path]).collect(); builder.ensure(CrateLibrustc { compiler, target: run.target, crates }); } @@ -2351,7 +2348,7 @@ fn run_cargo_test<'a>( ) -> bool { let mut cargo = prepare_cargo_test(cargo, libtest_args, crates, primary_crate, compiler, target, builder); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); let _group = description.into().and_then(|what| { builder.msg_sysroot_tool(Kind::Test, compiler.stage, what, compiler.host, target) }); @@ -2406,7 +2403,7 @@ fn prepare_cargo_test( if krate.has_lib { cargo.arg("--lib"); } - cargo.args(&["--bins", "--examples", "--tests", "--benches"]); + cargo.args(["--bins", "--examples", "--tests", "--benches"]); } DocTests::Yes => {} } @@ -2468,11 +2465,8 @@ impl Step for Crate { let builder = run.builder; let host = run.build_triple(); let compiler = builder.compiler_for(builder.top_stage, host, host); - let crates = run - .paths - .iter() - .map(|p| builder.crate_paths[&p.assert_single_path().path].clone()) - .collect(); + let crates = + run.paths.iter().map(|p| builder.crate_paths[&p.assert_single_path().path]).collect(); builder.ensure(Crate { compiler, target: run.target, mode: Mode::Std, crates }); } @@ -2844,11 +2838,11 @@ impl Step for Bootstrap { let compiler = builder.compiler(0, host); let _guard = builder.msg(Kind::Test, 0, "bootstrap", host, host); - let mut check_bootstrap = Command::new(&builder.python()); + let mut check_bootstrap = Command::new(builder.python()); check_bootstrap .args(["-m", "unittest", "bootstrap_test.py"]) .env("BUILD_DIR", &builder.out) - .env("BUILD_PLATFORM", &builder.build.build.triple) + .env("BUILD_PLATFORM", builder.build.build.triple) .current_dir(builder.src.join("src/bootstrap/")); // NOTE: we intentionally don't pass test_args here because the args for unittest and cargo test are mutually incompatible. // Use `python -m unittest` manually if you want to pass arguments. @@ -3171,7 +3165,7 @@ impl Step for CodegenCranelift { &compiler.host, target )); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); // FIXME handle vendoring for source tarballs before removing the --skip-test below let download_dir = builder.out.join("cg_clif_download"); @@ -3300,7 +3294,7 @@ impl Step for CodegenGCC { &compiler.host, target )); - let _time = helpers::timeit(&builder); + let _time = helpers::timeit(builder); // FIXME: Uncomment the `prepare` command below once vendoring is implemented. /* diff --git a/src/bootstrap/src/core/build_steps/tool.rs b/src/bootstrap/src/core/build_steps/tool.rs index a3daf22c9a99d..ba867a04ec55f 100644 --- a/src/bootstrap/src/core/build_steps/tool.rs +++ b/src/bootstrap/src/core/build_steps/tool.rs @@ -819,7 +819,7 @@ impl<'a> Builder<'a> { if compiler.host.is_msvc() { let curpaths = env::var_os("PATH").unwrap_or_default(); let curpaths = env::split_paths(&curpaths).collect::>(); - for &(ref k, ref v) in self.cc.borrow()[&compiler.host].env() { + for (k, v) in self.cc.borrow()[&compiler.host].env() { if k != "PATH" { continue; } diff --git a/src/bootstrap/src/core/build_steps/toolstate.rs b/src/bootstrap/src/core/build_steps/toolstate.rs index a451f92b6bcbb..deb782cad0ce4 100644 --- a/src/bootstrap/src/core/build_steps/toolstate.rs +++ b/src/bootstrap/src/core/build_steps/toolstate.rs @@ -346,7 +346,7 @@ fn prepare_toolstate_config(token: &str) { let credential = format!("https://{token}:x-oauth-basic@github.com\n",); let git_credential_path = PathBuf::from(t!(env::var("HOME"))).join(".git-credentials"); - t!(fs::write(&git_credential_path, credential)); + t!(fs::write(git_credential_path, credential)); } /// Reads the latest toolstate from the toolstate repo. @@ -389,7 +389,7 @@ fn commit_toolstate_change(current_toolstate: &ToolstateData) { // Upload the test results (the new commit-to-toolstate mapping) to the toolstate repo. // This does *not* change the "current toolstate"; that only happens post-landing // via `src/ci/docker/publish_toolstate.sh`. - publish_test_results(¤t_toolstate); + publish_test_results(current_toolstate); // `git commit` failing means nothing to commit. let status = t!(Command::new("git") diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs index 50264ca3b3ab3..97819403ab7f2 100644 --- a/src/bootstrap/src/core/builder.rs +++ b/src/bootstrap/src/core/builder.rs @@ -290,7 +290,7 @@ impl PathSet { const PATH_REMAP: &[(&str, &str)] = &[("rust-analyzer-proc-macro-srv", "proc-macro-srv-cli")]; -fn remap_paths(paths: &mut Vec<&Path>) { +fn remap_paths(paths: &mut [&Path]) { for path in paths.iter_mut() { for &(search, replace) in PATH_REMAP { if path.to_str() == Some(search) { @@ -329,7 +329,7 @@ impl StepDescription { } fn is_excluded(&self, builder: &Builder<'_>, pathset: &PathSet) -> bool { - if builder.config.skip.iter().any(|e| pathset.has(&e, builder.kind)) { + if builder.config.skip.iter().any(|e| pathset.has(e, builder.kind)) { if !matches!(builder.config.dry_run, DryRun::SelfCheck) { println!("Skipping {pathset:?} because it is excluded"); } @@ -369,8 +369,7 @@ impl StepDescription { } // strip CurDir prefix if present - let mut paths: Vec<_> = - paths.into_iter().map(|p| p.strip_prefix(".").unwrap_or(p)).collect(); + let mut paths: Vec<_> = paths.iter().map(|p| p.strip_prefix(".").unwrap_or(p)).collect(); remap_paths(&mut paths); @@ -378,7 +377,7 @@ impl StepDescription { // (This is separate from the loop below to avoid having to handle multiple paths in `is_suite_path` somehow.) paths.retain(|path| { for (desc, should_run) in v.iter().zip(&should_runs) { - if let Some(suite) = should_run.is_suite_path(&path) { + if let Some(suite) = should_run.is_suite_path(path) { desc.maybe_run(builder, vec![suite.clone()]); return false; } @@ -537,7 +536,7 @@ impl<'a> ShouldRun<'a> { .iter() .map(|p| { // assert only if `p` isn't submodule - if submodules_paths.iter().find(|sm_p| p.contains(*sm_p)).is_none() { + if !submodules_paths.iter().any(|sm_p| p.contains(sm_p)) { assert!( self.builder.src.join(p).exists(), "`should_run.paths` should correspond to real on-disk paths - use `alias` if there is no relevant path: {}", @@ -1208,7 +1207,7 @@ impl<'a> Builder<'a> { } pub fn rustdoc_cmd(&self, compiler: Compiler) -> Command { - let mut cmd = Command::new(&self.bootstrap_out.join("rustdoc")); + let mut cmd = Command::new(self.bootstrap_out.join("rustdoc")); cmd.env("RUSTC_STAGE", compiler.stage.to_string()) .env("RUSTC_SYSROOT", self.sysroot(compiler)) // Note that this is *not* the sysroot_libdir because rustdoc must be linked @@ -1351,7 +1350,7 @@ impl<'a> Builder<'a> { // See comment in rustc_llvm/build.rs for why this is necessary, largely llvm-config // needs to not accidentally link to libLLVM in stage0/lib. - cargo.env("REAL_LIBRARY_PATH_VAR", &helpers::dylib_path_var()); + cargo.env("REAL_LIBRARY_PATH_VAR", helpers::dylib_path_var()); if let Some(e) = env::var_os(helpers::dylib_path_var()) { cargo.env("REAL_LIBRARY_PATH", e); } @@ -1620,8 +1619,8 @@ impl<'a> Builder<'a> { .env("RUSTBUILD_NATIVE_DIR", self.native_dir(target)) .env("RUSTC_REAL", self.rustc(compiler)) .env("RUSTC_STAGE", stage.to_string()) - .env("RUSTC_SYSROOT", &sysroot) - .env("RUSTC_LIBDIR", &libdir) + .env("RUSTC_SYSROOT", sysroot) + .env("RUSTC_LIBDIR", libdir) .env("RUSTDOC", self.bootstrap_out.join("rustdoc")) .env( "RUSTDOC_REAL", @@ -1754,7 +1753,7 @@ impl<'a> Builder<'a> { cargo.env("RUSTC_BOOTSTRAP", "1"); if self.config.dump_bootstrap_shims { - prepare_behaviour_dump_dir(&self.build); + prepare_behaviour_dump_dir(self.build); cargo .env("DUMP_BOOTSTRAP_SHIMS", self.build.out.join("bootstrap-shims-dump")) @@ -1793,7 +1792,7 @@ impl<'a> Builder<'a> { // platform-specific environment variable as a workaround. if mode == Mode::ToolRustc || mode == Mode::Codegen { if let Some(llvm_config) = self.llvm_config(target) { - let llvm_libdir = output(Command::new(&llvm_config).arg("--libdir")); + let llvm_libdir = output(Command::new(llvm_config).arg("--libdir")); add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cargo); } } @@ -2080,7 +2079,7 @@ impl<'a> Builder<'a> { if self.config.print_step_timings && !self.config.dry_run() { let step_string = format!("{step:?}"); - let brace_index = step_string.find("{").unwrap_or(0); + let brace_index = step_string.find('{').unwrap_or(0); let type_string = type_name::(); println!( "[TIMING] {} {} -- {}.{:03}", @@ -2429,7 +2428,7 @@ impl Cargo { _ => s.display().to_string(), } }; - let triple_underscored = target.triple.replace("-", "_"); + let triple_underscored = target.triple.replace('-', "_"); let cc = ccacheify(&builder.cc(target)); self.command.env(format!("CC_{triple_underscored}"), &cc); diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 1605776f77277..927d46c67a9f8 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -468,7 +468,7 @@ pub struct TargetSelectionList(Vec); pub fn target_selection_list(s: &str) -> Result { Ok(TargetSelectionList( - s.split(",").filter(|s| !s.is_empty()).map(TargetSelection::from_user).collect(), + s.split(',').filter(|s| !s.is_empty()).map(TargetSelection::from_user).collect(), )) } @@ -963,10 +963,10 @@ impl<'de> serde::de::Visitor<'de> for OptimizeVisitor { where E: serde::de::Error, { - if ["s", "z"].iter().find(|x| **x == value).is_some() { + if matches!(value, "s" | "z") { Ok(RustOptimize::String(value.to_string())) } else { - Err(format_optimize_error_msg(value)).map_err(serde::de::Error::custom) + Err(serde::de::Error::custom(format_optimize_error_msg(value))) } } @@ -977,7 +977,7 @@ impl<'de> serde::de::Visitor<'de> for OptimizeVisitor { if matches!(value, 0..=3) { Ok(RustOptimize::Int(value as u8)) } else { - Err(format_optimize_error_msg(value)).map_err(serde::de::Error::custom) + Err(serde::de::Error::custom(format_optimize_error_msg(value))) } } @@ -1144,41 +1144,44 @@ define_config! { impl Config { pub fn default_opts() -> Config { - let mut config = Config::default(); - config.bypass_bootstrap_lock = false; - config.llvm_optimize = true; - config.ninja_in_file = true; - config.llvm_static_stdcpp = false; - config.backtrace = true; - config.rust_optimize = RustOptimize::Bool(true); - config.rust_optimize_tests = true; - config.submodules = None; - config.docs = true; - config.docs_minification = true; - config.rust_rpath = true; - config.rust_strip = false; - config.channel = "dev".to_string(); - config.codegen_tests = true; - config.rust_dist_src = true; - config.rust_codegen_backends = vec![INTERNER.intern_str("llvm")]; - config.deny_warnings = true; - config.bindir = "bin".into(); - config.dist_include_mingw_linker = true; - config.dist_compression_profile = "fast".into(); - config.rustc_parallel = true; - - config.stdout_is_tty = std::io::stdout().is_terminal(); - config.stderr_is_tty = std::io::stderr().is_terminal(); - - // set by build.rs - config.build = TargetSelection::from_user(&env!("BUILD_TRIPLE")); - - let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - // Undo `src/bootstrap` - config.src = manifest_dir.parent().unwrap().parent().unwrap().to_owned(); - config.out = PathBuf::from("build"); - - config + Config { + bypass_bootstrap_lock: false, + llvm_optimize: true, + ninja_in_file: true, + llvm_static_stdcpp: false, + backtrace: true, + rust_optimize: RustOptimize::Bool(true), + rust_optimize_tests: true, + submodules: None, + docs: true, + docs_minification: true, + rust_rpath: true, + rust_strip: false, + channel: "dev".to_string(), + codegen_tests: true, + rust_dist_src: true, + rust_codegen_backends: vec![INTERNER.intern_str("llvm")], + deny_warnings: true, + bindir: "bin".into(), + dist_include_mingw_linker: true, + dist_compression_profile: "fast".into(), + rustc_parallel: true, + + stdout_is_tty: std::io::stdout().is_terminal(), + stderr_is_tty: std::io::stderr().is_terminal(), + + // set by build.rs + build: TargetSelection::from_user(env!("BUILD_TRIPLE")), + + src: { + let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + // Undo `src/bootstrap` + manifest_dir.parent().unwrap().parent().unwrap().to_owned() + }, + out: PathBuf::from("build"), + + ..Default::default() + } } pub fn parse(args: &[String]) -> Config { @@ -1204,7 +1207,7 @@ impl Config { } pub(crate) fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfig) -> Config { - let mut flags = Flags::parse(&args); + let mut flags = Flags::parse(args); let mut config = Config::default_opts(); // Set flags. @@ -1252,7 +1255,7 @@ impl Config { // Bootstrap is quite bad at handling /? in front of paths let src = match s.strip_prefix("\\\\?\\") { Some(p) => PathBuf::from(p), - None => PathBuf::from(git_root), + None => git_root, }; // If this doesn't have at least `stage0.json`, we guessed wrong. This can happen when, // for example, the build directory is inside of another unrelated git directory. @@ -1278,7 +1281,7 @@ impl Config { .to_path_buf(); } - let stage0_json = t!(std::fs::read(&config.src.join("src").join("stage0.json"))); + let stage0_json = t!(std::fs::read(config.src.join("src").join("stage0.json"))); config.stage0_metadata = t!(serde_json::from_slice::(&stage0_json)); @@ -1324,8 +1327,7 @@ impl Config { let mut override_toml = TomlConfig::default(); for option in flags.set.iter() { fn get_table(option: &str) -> Result { - toml::from_str(&option) - .and_then(|table: toml::Value| TomlConfig::deserialize(table)) + toml::from_str(option).and_then(|table: toml::Value| TomlConfig::deserialize(table)) } let mut err = match get_table(option) { @@ -1337,7 +1339,7 @@ impl Config { }; // We want to be able to set string values without quotes, // like in `configure.py`. Try adding quotes around the right hand side - if let Some((key, value)) = option.split_once("=") { + if let Some((key, value)) = option.split_once('=') { if !value.contains('"') { match get_table(&format!(r#"{key}="{value}""#)) { Ok(v) => { @@ -1660,7 +1662,7 @@ impl Config { llvm_libunwind.map(|v| v.parse().expect("failed to parse rust.llvm-libunwind")); if let Some(ref backends) = codegen_backends { - let available_backends = vec!["llvm", "cranelift", "gcc"]; + let available_backends = ["llvm", "cranelift", "gcc"]; config.rust_codegen_backends = backends.iter().map(|s| { if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) { @@ -1808,7 +1810,7 @@ impl Config { let mut target = Target::from_triple(&triple); if let Some(ref s) = cfg.llvm_config { - if config.download_rustc_commit.is_some() && triple == &*config.build.triple { + if config.download_rustc_commit.is_some() && triple == *config.build.triple { panic!( "setting llvm_config for the host is incompatible with download-rustc" ); @@ -1847,7 +1849,7 @@ impl Config { target.rpath = cfg.rpath; if let Some(ref backends) = cfg.codegen_backends { - let available_backends = vec!["llvm", "cranelift", "gcc"]; + let available_backends = ["llvm", "cranelift", "gcc"]; target.codegen_backends = Some(backends.iter().map(|s| { if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) { @@ -1874,7 +1876,7 @@ impl Config { let build_target = config .target_config .entry(config.build) - .or_insert_with(|| Target::from_triple(&triple)); + .or_insert_with(|| Target::from_triple(triple)); check_ci_llvm!(build_target.llvm_config); check_ci_llvm!(build_target.llvm_filecheck); @@ -2208,7 +2210,7 @@ impl Config { } pub fn sanitizers_enabled(&self, target: TargetSelection) -> bool { - self.target_config.get(&target).map(|t| t.sanitizers).flatten().unwrap_or(self.sanitizers) + self.target_config.get(&target).and_then(|t| t.sanitizers).unwrap_or(self.sanitizers) } pub fn needs_sanitizer_runtime_built(&self, target: TargetSelection) -> bool { @@ -2243,7 +2245,7 @@ impl Config { } pub fn rpath_enabled(&self, target: TargetSelection) -> bool { - self.target_config.get(&target).map(|t| t.rpath).flatten().unwrap_or(self.rust_rpath) + self.target_config.get(&target).and_then(|t| t.rpath).unwrap_or(self.rust_rpath) } pub fn llvm_enabled(&self, target: TargetSelection) -> bool { @@ -2274,7 +2276,7 @@ impl Config { } pub fn default_codegen_backend(&self, target: TargetSelection) -> Option> { - self.codegen_backends(target).get(0).cloned() + self.codegen_backends(target).first().cloned() } pub fn git_config(&self) -> GitConfig<'_> { @@ -2303,9 +2305,9 @@ impl Config { .next() .unwrap() .to_owned(); - let rustc_version = Version::parse(&rustc_output.trim()).unwrap(); + let rustc_version = Version::parse(rustc_output.trim()).unwrap(); let source_version = - Version::parse(&fs::read_to_string(self.src.join("src/version")).unwrap().trim()) + Version::parse(fs::read_to_string(self.src.join("src/version")).unwrap().trim()) .unwrap(); if !(source_version == rustc_version || (source_version.major == rustc_version.major @@ -2333,7 +2335,7 @@ impl Config { }; // Handle running from a directory other than the top level - let top_level = output(self.git().args(&["rev-parse", "--show-toplevel"])); + let top_level = output(self.git().args(["rev-parse", "--show-toplevel"])); let top_level = top_level.trim_end(); let compiler = format!("{top_level}/compiler/"); let library = format!("{top_level}/library/"); @@ -2344,7 +2346,7 @@ impl Config { self.git() .arg("rev-list") .arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email)) - .args(&["-n1", "--first-parent", "HEAD"]), + .args(["-n1", "--first-parent", "HEAD"]), ); let commit = merge_base.trim_end(); if commit.is_empty() { @@ -2358,7 +2360,7 @@ impl Config { // Warn if there were changes to the compiler or standard library since the ancestor commit. let has_changes = !t!(self .git() - .args(&["diff-index", "--quiet", &commit, "--", &compiler, &library]) + .args(["diff-index", "--quiet", commit, "--", &compiler, &library]) .status()) .success(); if has_changes { @@ -2397,7 +2399,7 @@ impl Config { // there are some untracked changes in the the given paths. false } else { - llvm::is_ci_llvm_available(&self, asserts) + llvm::is_ci_llvm_available(self, asserts) } }; match download_ci_llvm { @@ -2406,7 +2408,7 @@ impl Config { // FIXME: "if-available" is deprecated. Remove this block later (around mid 2024) // to not break builds between the recent-to-old checkouts. Some(StringOrBool::String(s)) if s == "if-available" => { - llvm::is_ci_llvm_available(&self, asserts) + llvm::is_ci_llvm_available(self, asserts) } Some(StringOrBool::String(s)) if s == "if-unchanged" => if_unchanged(), Some(StringOrBool::String(other)) => { @@ -2424,7 +2426,7 @@ impl Config { if_unchanged: bool, ) -> Option { // Handle running from a directory other than the top level - let top_level = output(self.git().args(&["rev-parse", "--show-toplevel"])); + let top_level = output(self.git().args(["rev-parse", "--show-toplevel"])); let top_level = top_level.trim_end(); // Look for a version to compare to based on the current commit. @@ -2433,7 +2435,7 @@ impl Config { self.git() .arg("rev-list") .arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email)) - .args(&["-n1", "--first-parent", "HEAD"]), + .args(["-n1", "--first-parent", "HEAD"]), ); let commit = merge_base.trim_end(); if commit.is_empty() { @@ -2446,7 +2448,7 @@ impl Config { // Warn if there were changes to the compiler or standard library since the ancestor commit. let mut git = self.git(); - git.args(&["diff-index", "--quiet", &commit, "--"]); + git.args(["diff-index", "--quiet", commit, "--"]); for path in modified_paths { git.arg(format!("{top_level}/{path}")); diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index b4ae3578ce317..185089a646bfa 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -159,7 +159,7 @@ impl Config { "; nix_build_succeeded = try_run( self, - Command::new("nix-build").args(&[ + Command::new("nix-build").args([ Path::new("-E"), Path::new(NIX_EXPR), Path::new("-o"), @@ -188,7 +188,7 @@ impl Config { let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker"); // FIXME: can we support utf8 here? `args` doesn't accept Vec, only OsString ... let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path)))); - patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]); + patchelf.args(["--set-interpreter", dynamic_linker.trim_end()]); } let _ = try_run(self, patchelf.arg(fname)); @@ -218,7 +218,7 @@ impl Config { println!("downloading {url}"); // Try curl. If that fails and we are on windows, fallback to PowerShell. let mut curl = Command::new("curl"); - curl.args(&[ + curl.args([ "-y", "30", "-Y", @@ -242,7 +242,7 @@ impl Config { if self.build.contains("windows-msvc") { eprintln!("Fallback to PowerShell"); for _ in 0..3 { - if try_run(self, Command::new("PowerShell.exe").args(&[ + if try_run(self, Command::new("PowerShell.exe").args([ "/nologo", "-Command", "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", @@ -388,7 +388,7 @@ impl Config { let bin_root = self.out.join(host.triple).join("stage0"); let clippy_stamp = bin_root.join(".clippy-stamp"); let cargo_clippy = bin_root.join("bin").join(exe("cargo-clippy", host)); - if cargo_clippy.exists() && !program_out_of_date(&clippy_stamp, &date) { + if cargo_clippy.exists() && !program_out_of_date(&clippy_stamp, date) { return cargo_clippy; } @@ -421,14 +421,14 @@ impl Config { DownloadSource::Dist, format!("rustfmt-{version}-{build}.tar.xz", build = host.triple), "rustfmt-preview", - &date, + date, "rustfmt", ); self.download_component( DownloadSource::Dist, format!("rustc-{version}-{build}.tar.xz", build = host.triple), "rustc", - &date, + date, "rustfmt", ); @@ -665,7 +665,7 @@ download-rustc = false } let llvm_root = self.ci_llvm_root(); let llvm_stamp = llvm_root.join(".llvm-stamp"); - let llvm_sha = detect_llvm_sha(&self, self.rust_info.is_managed_git_subrepository()); + let llvm_sha = detect_llvm_sha(self, self.rust_info.is_managed_git_subrepository()); let key = format!("{}{}", llvm_sha, self.llvm_assertions); if program_out_of_date(&llvm_stamp, &key) && !self.dry_run() { self.download_ci_llvm(&llvm_sha); @@ -685,11 +685,11 @@ download-rustc = false // rebuild. let now = filetime::FileTime::from_system_time(std::time::SystemTime::now()); let llvm_config = llvm_root.join("bin").join(exe("llvm-config", self.build)); - t!(filetime::set_file_times(&llvm_config, now, now)); + t!(filetime::set_file_times(llvm_config, now, now)); if self.should_fix_bins_and_dylibs() { let llvm_lib = llvm_root.join("lib"); - for entry in t!(fs::read_dir(&llvm_lib)) { + for entry in t!(fs::read_dir(llvm_lib)) { let lib = t!(entry).path(); if lib.extension().map_or(false, |ext| ext == "so") { self.fix_bin_or_dylib(&lib); diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index 121ed88c92fea..965d788bb8371 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -467,7 +467,7 @@ impl Build { } // Make a symbolic link so we can use a consistent directory in the documentation. - let build_triple = build.out.join(&build.build.triple); + let build_triple = build.out.join(build.build.triple); t!(fs::create_dir_all(&build_triple)); let host = build.out.join("host"); if host.is_symlink() { @@ -491,7 +491,7 @@ impl Build { /// /// `relative_path` should be relative to the root of the git repository, not an absolute path. pub(crate) fn update_submodule(&self, relative_path: &Path) { - if !self.config.submodules(&self.rust_info()) { + if !self.config.submodules(self.rust_info()) { return; } @@ -507,11 +507,11 @@ impl Build { // check_submodule let checked_out_hash = - output(Command::new("git").args(&["rev-parse", "HEAD"]).current_dir(&absolute_path)); + output(Command::new("git").args(["rev-parse", "HEAD"]).current_dir(&absolute_path)); // update_submodules let recorded = output( Command::new("git") - .args(&["ls-tree", "HEAD"]) + .args(["ls-tree", "HEAD"]) .arg(relative_path) .current_dir(&self.config.src), ); @@ -529,7 +529,7 @@ impl Build { println!("Updating submodule {}", relative_path.display()); self.run( Command::new("git") - .args(&["submodule", "-q", "sync"]) + .args(["submodule", "-q", "sync"]) .arg(relative_path) .current_dir(&self.config.src), ); @@ -560,7 +560,7 @@ impl Build { let branch = branch.strip_prefix("heads/").unwrap_or(&branch); git.arg("-c").arg(format!("branch.{branch}.remote=origin")); } - git.args(&["submodule", "update", "--init", "--recursive", "--depth=1"]); + git.args(["submodule", "update", "--init", "--recursive", "--depth=1"]); if progress { git.arg("--progress"); } @@ -577,7 +577,7 @@ impl Build { let has_local_modifications = !self.run_cmd( BootstrapCommand::from( Command::new("git") - .args(&["diff-index", "--quiet", "HEAD"]) + .args(["diff-index", "--quiet", "HEAD"]) .current_dir(&absolute_path), ) .allow_failure() @@ -587,14 +587,14 @@ impl Build { }), ); if has_local_modifications { - self.run(Command::new("git").args(&["stash", "push"]).current_dir(&absolute_path)); + self.run(Command::new("git").args(["stash", "push"]).current_dir(&absolute_path)); } - self.run(Command::new("git").args(&["reset", "-q", "--hard"]).current_dir(&absolute_path)); - self.run(Command::new("git").args(&["clean", "-qdfx"]).current_dir(&absolute_path)); + self.run(Command::new("git").args(["reset", "-q", "--hard"]).current_dir(&absolute_path)); + self.run(Command::new("git").args(["clean", "-qdfx"]).current_dir(&absolute_path)); if has_local_modifications { - self.run(Command::new("git").args(&["stash", "pop"]).current_dir(absolute_path)); + self.run(Command::new("git").args(["stash", "pop"]).current_dir(absolute_path)); } } @@ -602,20 +602,20 @@ impl Build { /// This avoids contributors checking in a submodule change by accident. pub fn update_existing_submodules(&self) { // Avoid running git when there isn't a git checkout. - if !self.config.submodules(&self.rust_info()) { + if !self.config.submodules(self.rust_info()) { return; } let output = output( self.config .git() - .args(&["config", "--file"]) + .args(["config", "--file"]) .arg(&self.config.src.join(".gitmodules")) - .args(&["--get-regexp", "path"]), + .args(["--get-regexp", "path"]), ); for line in output.lines() { // Look for `submodule.$name.path = $path` // Sample output: `submodule.src/rust-installer.path src/tools/rust-installer` - let submodule = Path::new(line.splitn(2, ' ').nth(1).unwrap()); + let submodule = Path::new(line.split_once(' ').unwrap().1); // Don't update the submodule unless it's already been cloned. if GitInfo::new(false, submodule).is_managed_git_subrepository() { self.update_submodule(submodule); @@ -630,26 +630,26 @@ impl Build { } // Download rustfmt early so that it can be used in rust-analyzer configs. - let _ = &builder::Builder::new(&self).initial_rustfmt(); + let _ = &builder::Builder::new(self).initial_rustfmt(); // hardcoded subcommands match &self.config.cmd { Subcommand::Format { check } => { return core::build_steps::format::format( - &builder::Builder::new(&self), + &builder::Builder::new(self), *check, &self.config.paths, ); } Subcommand::Suggest { run } => { - return core::build_steps::suggest::suggest(&builder::Builder::new(&self), *run); + return core::build_steps::suggest::suggest(&builder::Builder::new(self), *run); } _ => (), } { - let builder = builder::Builder::new(&self); - if let Some(path) = builder.paths.get(0) { + let builder = builder::Builder::new(self); + if let Some(path) = builder.paths.first() { if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") { return; } @@ -659,14 +659,14 @@ impl Build { if !self.config.dry_run() { { self.config.dry_run = DryRun::SelfCheck; - let builder = builder::Builder::new(&self); + let builder = builder::Builder::new(self); builder.execute_cli(); } self.config.dry_run = DryRun::Disabled; - let builder = builder::Builder::new(&self); + let builder = builder::Builder::new(self); builder.execute_cli(); } else { - let builder = builder::Builder::new(&self); + let builder = builder::Builder::new(self); builder.execute_cli(); } @@ -936,7 +936,7 @@ impl Build { static SYSROOT_CACHE: OnceLock = OnceLock::new(); SYSROOT_CACHE.get_or_init(|| { let mut rustc = Command::new(&self.initial_rustc); - rustc.args(&["--print", "sysroot"]); + rustc.args(["--print", "sysroot"]); output(&mut rustc).trim().into() }) } @@ -1162,7 +1162,7 @@ impl Build { fn group(&self, msg: &str) -> Option { match self.config.dry_run { DryRun::SelfCheck => None, - DryRun::Disabled | DryRun::UserSelected => Some(gha::group(&msg)), + DryRun::Disabled | DryRun::UserSelected => Some(gha::group(msg)), } } @@ -1322,7 +1322,7 @@ impl Build { .target_config .get(&target) .and_then(|t| t.musl_root.as_ref()) - .or_else(|| self.config.musl_root.as_ref()) + .or(self.config.musl_root.as_ref()) .map(|p| &**p) } @@ -1511,11 +1511,11 @@ impl Build { /// Returns the `a.b.c` version that the given package is at. fn release_num(&self, package: &str) -> String { - let toml_file_name = self.src.join(&format!("src/tools/{package}/Cargo.toml")); - let toml = t!(fs::read_to_string(&toml_file_name)); + let toml_file_name = self.src.join(format!("src/tools/{package}/Cargo.toml")); + let toml = t!(fs::read_to_string(toml_file_name)); for line in toml.lines() { if let Some(stripped) = - line.strip_prefix("version = \"").and_then(|s| s.strip_suffix("\"")) + line.strip_prefix("version = \"").and_then(|s| s.strip_suffix('"')) { return stripped.to_owned(); } @@ -1618,7 +1618,7 @@ impl Build { if src == dst { return; } - let _ = fs::remove_file(&dst); + let _ = fs::remove_file(dst); let metadata = t!(src.symlink_metadata()); let mut src = src.to_path_buf(); if metadata.file_type().is_symlink() { @@ -1908,7 +1908,7 @@ pub fn prepare_behaviour_dump_dir(build: &Build) { let dump_path = build.out.join("bootstrap-shims-dump"); - let initialized = INITIALIZED.get().unwrap_or_else(|| &false); + let initialized = INITIALIZED.get().unwrap_or(&false); if !initialized { // clear old dumps if dump_path.exists() { diff --git a/src/bootstrap/src/utils/bin_helpers.rs b/src/bootstrap/src/utils/bin_helpers.rs index 9c4e039ea69dd..5fbbe0bde0e28 100644 --- a/src/bootstrap/src/utils/bin_helpers.rs +++ b/src/bootstrap/src/utils/bin_helpers.rs @@ -39,8 +39,7 @@ pub(crate) fn maybe_dump(dump_name: String, cmd: &Command) { if let Ok(dump_dir) = env::var("DUMP_BOOTSTRAP_SHIMS") { let dump_file = format!("{dump_dir}/{dump_name}"); - let mut file = - OpenOptions::new().create(true).write(true).append(true).open(&dump_file).unwrap(); + let mut file = OpenOptions::new().create(true).append(true).open(dump_file).unwrap(); let cmd_dump = format!("{:?}\n", cmd); let cmd_dump = cmd_dump.replace(&env::var("BUILD_OUT").unwrap(), "${BUILD_OUT}"); diff --git a/src/bootstrap/src/utils/cache.rs b/src/bootstrap/src/utils/cache.rs index 1b2aa9c234bba..2b86585a9d3c0 100644 --- a/src/bootstrap/src/utils/cache.rs +++ b/src/bootstrap/src/utils/cache.rs @@ -64,7 +64,7 @@ unsafe impl Sync for Interned {} impl fmt::Display for Interned { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let s: &str = &*self; + let s: &str = self; f.write_str(s) } } @@ -74,7 +74,7 @@ where Self: Deref, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let s: &U = &*self; + let s: &U = self; f.write_fmt(format_args!("{s:?}")) } } @@ -132,7 +132,7 @@ impl TyIntern { B: Eq + Hash + ToOwned + ?Sized, T: Borrow, { - if let Some(i) = self.set.get(&item) { + if let Some(i) = self.set.get(item) { return *i; } let item = item.to_owned(); @@ -233,7 +233,7 @@ impl Cache { let type_id = TypeId::of::(); let stepcache = cache .entry(type_id) - .or_insert_with(|| Box::new(HashMap::::new())) + .or_insert_with(|| Box::>::default()) .downcast_mut::>() .expect("invalid type mapped"); assert!(!stepcache.contains_key(&step), "processing {step:?} a second time"); @@ -245,7 +245,7 @@ impl Cache { let type_id = TypeId::of::(); let stepcache = cache .entry(type_id) - .or_insert_with(|| Box::new(HashMap::::new())) + .or_insert_with(|| Box::>::default()) .downcast_mut::>() .expect("invalid type mapped"); stepcache.get(step).cloned() diff --git a/src/bootstrap/src/utils/cc_detect.rs b/src/bootstrap/src/utils/cc_detect.rs index fb5b9d8c88f7d..ff2992bc896ec 100644 --- a/src/bootstrap/src/utils/cc_detect.rs +++ b/src/bootstrap/src/utils/cc_detect.rs @@ -35,7 +35,7 @@ use crate::{Build, CLang, GitRepo}; // try to infer the archiver path from the C compiler path. // In the future this logic should be replaced by calling into the `cc` crate. fn cc2ar(cc: &Path, target: TargetSelection) -> Option { - if let Some(ar) = env::var_os(format!("AR_{}", target.triple.replace("-", "_"))) { + if let Some(ar) = env::var_os(format!("AR_{}", target.triple.replace('-', "_"))) { Some(PathBuf::from(ar)) } else if let Some(ar) = env::var_os("AR") { Some(PathBuf::from(ar)) @@ -172,11 +172,9 @@ fn default_compiler( // When compiling for android we may have the NDK configured in the // config.toml in which case we look there. Otherwise the default // compiler already takes into account the triple in question. - t if t.contains("android") => build - .config - .android_ndk - .as_ref() - .map(|ndk| ndk_compiler(compiler, &*target.triple, ndk)), + t if t.contains("android") => { + build.config.android_ndk.as_ref().map(|ndk| ndk_compiler(compiler, &target.triple, ndk)) + } // The default gcc version from OpenBSD may be too old, try using egcc, // which is a gcc version from ports, if this is the case. @@ -230,7 +228,7 @@ fn default_compiler( } pub(crate) fn ndk_compiler(compiler: Language, triple: &str, ndk: &Path) -> PathBuf { - let mut triple_iter = triple.split("-"); + let mut triple_iter = triple.split('-'); let triple_translated = if let Some(arch) = triple_iter.next() { let arch_new = match arch { "arm" | "armv7" | "armv7neon" | "thumbv7" | "thumbv7neon" => "armv7a", diff --git a/src/bootstrap/src/utils/change_tracker.rs b/src/bootstrap/src/utils/change_tracker.rs index fe5b5fe317565..b813d82ca6f53 100644 --- a/src/bootstrap/src/utils/change_tracker.rs +++ b/src/bootstrap/src/utils/change_tracker.rs @@ -2,6 +2,8 @@ //! with the goal of keeping developers synchronized with important modifications in //! the bootstrap. +use std::fmt::Display; + #[cfg(test)] mod tests; @@ -24,11 +26,11 @@ pub enum ChangeSeverity { Warning, } -impl ToString for ChangeSeverity { - fn to_string(&self) -> String { +impl Display for ChangeSeverity { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - ChangeSeverity::Info => "INFO".to_string(), - ChangeSeverity::Warning => "WARNING".to_string(), + ChangeSeverity::Info => write!(f, "INFO"), + ChangeSeverity::Warning => write!(f, "WARNING"), } } } @@ -40,7 +42,7 @@ pub fn find_recent_config_change_ids(current_id: usize) -> Vec { // older one); otherwise, return the full list (assuming the user provided // the incorrect change-id by accident). if let Some(config) = CONFIG_CHANGE_HISTORY.iter().max_by_key(|config| config.change_id) { - if ¤t_id > &config.change_id { + if current_id > config.change_id { return Vec::new(); } } diff --git a/src/bootstrap/src/utils/channel.rs b/src/bootstrap/src/utils/channel.rs index e59d7f22aaaea..88988c3391617 100644 --- a/src/bootstrap/src/utils/channel.rs +++ b/src/bootstrap/src/utils/channel.rs @@ -97,7 +97,7 @@ impl GitInfo { pub fn version(&self, build: &Build, num: &str) -> String { let mut version = build.release(num); - if let Some(ref inner) = self.info() { + if let Some(inner) = self.info() { version.push_str(" ("); version.push_str(&inner.short_sha); version.push(' '); @@ -150,7 +150,7 @@ pub fn read_commit_info_file(root: &Path) -> Option { /// root. pub fn write_commit_info_file(root: &Path, info: &Info) { let commit_info = format!("{}\n{}\n{}\n", info.sha, info.short_sha, info.commit_date); - t!(fs::write(root.join("git-commit-info"), &commit_info)); + t!(fs::write(root.join("git-commit-info"), commit_info)); } /// Write the commit hash to the `git-commit-hash` file given the project root. diff --git a/src/bootstrap/src/utils/helpers.rs b/src/bootstrap/src/utils/helpers.rs index d1f713af91709..a40ee18900182 100644 --- a/src/bootstrap/src/utils/helpers.rs +++ b/src/bootstrap/src/utils/helpers.rs @@ -425,7 +425,7 @@ pub fn get_clang_cl_resource_dir(clang_cl_path: &str) -> PathBuf { // Similar to how LLVM does it, to find clang's library runtime directory: // - we ask `clang-cl` to locate the `clang_rt.builtins` lib. let mut builtins_locator = Command::new(clang_cl_path); - builtins_locator.args(&["/clang:-print-libgcc-file-name", "/clang:--rtlib=compiler-rt"]); + builtins_locator.args(["/clang:-print-libgcc-file-name", "/clang:--rtlib=compiler-rt"]); let clang_rt_builtins = output(&mut builtins_locator); let clang_rt_builtins = Path::new(clang_rt_builtins.trim()); @@ -475,7 +475,7 @@ pub fn dir_is_empty(dir: &Path) -> bool { /// the "y" part from the string. pub fn extract_beta_rev(version: &str) -> Option { let parts = version.splitn(2, "-beta.").collect::>(); - let count = parts.get(1).and_then(|s| s.find(' ').map(|p| (&s[..p]).to_string())); + let count = parts.get(1).and_then(|s| s.find(' ').map(|p| s[..p].to_string())); count } @@ -559,11 +559,10 @@ pub fn check_cfg_arg(name: &str, values: Option<&[&str]>) -> String { // ',values("tvos","watchos")' or '' (nothing) when there are no values. let next = match values { Some(values) => { - let mut tmp = - values.iter().map(|val| [",", "\"", val, "\""]).flatten().collect::(); + let mut tmp = values.iter().flat_map(|val| [",", "\"", val, "\""]).collect::(); tmp.insert_str(1, "values("); - tmp.push_str(")"); + tmp.push(')'); tmp } None => "".to_string(), diff --git a/src/bootstrap/src/utils/render_tests.rs b/src/bootstrap/src/utils/render_tests.rs index bff47f65c51f6..bfbb53f8c8121 100644 --- a/src/bootstrap/src/utils/render_tests.rs +++ b/src/bootstrap/src/utils/render_tests.rs @@ -15,10 +15,10 @@ use termcolor::{Color, ColorSpec, WriteColor}; const TERSE_TESTS_PER_LINE: usize = 88; pub(crate) fn add_flags_and_try_run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool { - if cmd.get_args().position(|arg| arg == "--").is_none() { + if !cmd.get_args().any(|arg| arg == "--") { cmd.arg("--"); } - cmd.args(&["-Z", "unstable-options", "--format", "json"]); + cmd.args(["-Z", "unstable-options", "--format", "json"]); try_run_tests(builder, cmd, false) } @@ -303,19 +303,19 @@ impl Outcome<'_> { fn write_short(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> { match self { Outcome::Ok => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?; + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; write!(writer, ".")?; } Outcome::BenchOk => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?; + writer.set_color(ColorSpec::new().set_fg(Some(Color::Cyan)))?; write!(writer, "b")?; } Outcome::Failed => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?; + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; write!(writer, "F")?; } Outcome::Ignored { .. } => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?; + writer.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?; write!(writer, "i")?; } } @@ -325,19 +325,19 @@ impl Outcome<'_> { fn write_long(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> { match self { Outcome::Ok => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?; + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; write!(writer, "ok")?; } Outcome::BenchOk => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?; + writer.set_color(ColorSpec::new().set_fg(Some(Color::Cyan)))?; write!(writer, "benchmarked")?; } Outcome::Failed => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?; + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; write!(writer, "FAILED")?; } Outcome::Ignored { reason } => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?; + writer.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?; write!(writer, "ignored")?; if let Some(reason) = reason { write!(writer, ", {reason}")?; diff --git a/src/bootstrap/src/utils/tarball.rs b/src/bootstrap/src/utils/tarball.rs index a8393f88f8a14..573d923ed8fdd 100644 --- a/src/bootstrap/src/utils/tarball.rs +++ b/src/bootstrap/src/utils/tarball.rs @@ -226,8 +226,7 @@ impl<'a> Tarball<'a> { if self.include_target_in_component_name { component_name.push('-'); component_name.push_str( - &self - .target + self.target .as_ref() .expect("include_target_in_component_name used in a targetless tarball"), ); @@ -326,7 +325,7 @@ impl<'a> Tarball<'a> { assert!(!formats.is_empty(), "dist.compression-formats can't be empty"); cmd.arg("--compression-formats").arg(formats.join(",")); } - cmd.args(&["--compression-profile", &self.builder.config.dist_compression_profile]); + cmd.args(["--compression-profile", &self.builder.config.dist_compression_profile]); self.builder.run(&mut cmd); // Ensure there are no symbolic links in the tarball. In particular, @@ -347,7 +346,7 @@ impl<'a> Tarball<'a> { .config .dist_compression_formats .as_ref() - .and_then(|formats| formats.get(0)) + .and_then(|formats| formats.first()) .map(|s| s.as_str()) .unwrap_or("gz"); diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index ed120ee877a53..f3e2a940f36cb 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -1936,7 +1936,7 @@ impl<'test> TestCx<'test> { fn document(&self, out_dir: &Path) -> ProcRes { if self.props.build_aux_docs { for rel_ab in &self.props.aux_builds { - let aux_testpaths = self.compute_aux_test_paths(rel_ab); + let aux_testpaths = self.compute_aux_test_paths(&self.testpaths, rel_ab); let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision, self.config); let aux_cx = TestCx { @@ -2092,24 +2092,18 @@ impl<'test> TestCx<'test> { proc_res } - /// For each `aux-build: foo/bar` annotation, we check to find the - /// file in an `auxiliary` directory relative to the test itself. - fn compute_aux_test_paths(&self, rel_ab: &str) -> TestPaths { - let test_ab = self - .testpaths - .file - .parent() - .expect("test file path has no parent") - .join("auxiliary") - .join(rel_ab); + /// For each `aux-build: foo/bar` annotation, we check to find the file in an `auxiliary` + /// directory relative to the test itself (not any intermediate auxiliaries). + fn compute_aux_test_paths(&self, of: &TestPaths, rel_ab: &str) -> TestPaths { + let test_ab = + of.file.parent().expect("test file path has no parent").join("auxiliary").join(rel_ab); if !test_ab.exists() { self.fatal(&format!("aux-build `{}` source not found", test_ab.display())) } TestPaths { file: test_ab, - relative_dir: self - .testpaths + relative_dir: of .relative_dir .join(self.output_testname_unique()) .join("auxiliary") @@ -2135,7 +2129,7 @@ impl<'test> TestCx<'test> { self.config.target.contains("vxworks") && !self.is_vxworks_pure_static() } - fn build_all_auxiliary(&self, rustc: &mut Command) -> PathBuf { + fn aux_output_dir(&self) -> PathBuf { let aux_dir = self.aux_output_dir_name(); if !self.props.aux_builds.is_empty() { @@ -2143,22 +2137,26 @@ impl<'test> TestCx<'test> { create_dir_all(&aux_dir).unwrap(); } + aux_dir + } + + fn build_all_auxiliary(&self, of: &TestPaths, aux_dir: &Path, rustc: &mut Command) { for rel_ab in &self.props.aux_builds { - self.build_auxiliary(rel_ab, &aux_dir); + self.build_auxiliary(of, rel_ab, &aux_dir); } for (aux_name, aux_path) in &self.props.aux_crates { - let is_dylib = self.build_auxiliary(&aux_path, &aux_dir); + let is_dylib = self.build_auxiliary(of, &aux_path, &aux_dir); let lib_name = get_lib_name(&aux_path.trim_end_matches(".rs").replace('-', "_"), is_dylib); rustc.arg("--extern").arg(format!("{}={}/{}", aux_name, aux_dir.display(), lib_name)); } - - aux_dir } fn compose_and_run_compiler(&self, mut rustc: Command, input: Option) -> ProcRes { - let aux_dir = self.build_all_auxiliary(&mut rustc); + let aux_dir = self.aux_output_dir(); + self.build_all_auxiliary(&self.testpaths, &aux_dir, &mut rustc); + self.props.unset_rustc_env.iter().fold(&mut rustc, Command::env_remove); rustc.envs(self.props.rustc_env.clone()); self.compose_and_run( @@ -2172,10 +2170,10 @@ impl<'test> TestCx<'test> { /// Builds an aux dependency. /// /// Returns whether or not it is a dylib. - fn build_auxiliary(&self, source_path: &str, aux_dir: &Path) -> bool { - let aux_testpaths = self.compute_aux_test_paths(source_path); + fn build_auxiliary(&self, of: &TestPaths, source_path: &str, aux_dir: &Path) -> bool { + let aux_testpaths = self.compute_aux_test_paths(of, source_path); let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision, self.config); - let aux_output = TargetLocation::ThisDirectory(self.aux_output_dir_name()); + let aux_output = TargetLocation::ThisDirectory(aux_dir.to_path_buf()); let aux_cx = TestCx { config: self.config, props: &aux_props, @@ -2193,6 +2191,7 @@ impl<'test> TestCx<'test> { LinkToAux::No, Vec::new(), ); + aux_cx.build_all_auxiliary(of, aux_dir, &mut aux_rustc); for key in &aux_props.unset_rustc_env { aux_rustc.env_remove(key); @@ -3034,7 +3033,8 @@ impl<'test> TestCx<'test> { LinkToAux::Yes, Vec::new(), ); - new_rustdoc.build_all_auxiliary(&mut rustc); + let aux_dir = new_rustdoc.aux_output_dir(); + new_rustdoc.build_all_auxiliary(&new_rustdoc.testpaths, &aux_dir, &mut rustc); let proc_res = new_rustdoc.document(&compare_dir); if !proc_res.status.success() { diff --git a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.stderr b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.stderr index 0d61e15b0f142..7844e60c654a4 100644 --- a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.stderr +++ b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.stderr @@ -8,7 +8,7 @@ LL | arg: NotIntoDiagnosticArg, | ^^^^^^^^^^^^^^^^^^^^ the trait `IntoDiagnosticArg` is not implemented for `NotIntoDiagnosticArg` | = help: normalized in stderr -note: required by a bound in `rustc_errors::diagnostic::>::arg` +note: required by a bound in `DiagnosticBuilder::<'a, G>::arg` --> $COMPILER_DIR/rustc_errors/src/diagnostic.rs:LL:CC = note: this error originates in the macro `with_fn` (in Nightly builds, run with -Z macro-backtrace for more info) @@ -22,7 +22,7 @@ LL | arg: NotIntoDiagnosticArg, | ^^^^^^^^^^^^^^^^^^^^ the trait `IntoDiagnosticArg` is not implemented for `NotIntoDiagnosticArg` | = help: normalized in stderr -note: required by a bound in `rustc_errors::diagnostic::>::arg` +note: required by a bound in `DiagnosticBuilder::<'a, G>::arg` --> $COMPILER_DIR/rustc_errors/src/diagnostic.rs:LL:CC = note: this error originates in the macro `with_fn` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.stderr b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.stderr index ddbb3c6df268c..8732629db47ff 100644 --- a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.stderr +++ b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.stderr @@ -628,7 +628,7 @@ LL | other: Hello, | ^^^^^ the trait `IntoDiagnosticArg` is not implemented for `Hello` | = help: normalized in stderr -note: required by a bound in `rustc_errors::diagnostic::>::arg` +note: required by a bound in `DiagnosticBuilder::<'a, G>::arg` --> $COMPILER_DIR/rustc_errors/src/diagnostic.rs:LL:CC = note: this error originates in the macro `with_fn` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/tests/ui/async-await/async-fn/impl-header.rs b/tests/ui/async-await/async-fn/impl-header.rs index b9ae90292bb2c..9af5f1f42a957 100644 --- a/tests/ui/async-await/async-fn/impl-header.rs +++ b/tests/ui/async-await/async-fn/impl-header.rs @@ -3,6 +3,10 @@ struct F; impl async Fn<()> for F {} -//~^ ERROR expected type, found keyword `async` +//~^ ERROR `async` trait implementations are unsupported +//~| ERROR the precise format of `Fn`-family traits' type parameters is subject to change +//~| ERROR manual implementations of `Fn` are experimental +//~| ERROR expected a `FnMut()` closure, found `F` +//~| ERROR not all trait items implemented, missing: `call` fn main() {} diff --git a/tests/ui/async-await/async-fn/impl-header.stderr b/tests/ui/async-await/async-fn/impl-header.stderr index 02cb432624274..2fb862af04e49 100644 --- a/tests/ui/async-await/async-fn/impl-header.stderr +++ b/tests/ui/async-await/async-fn/impl-header.stderr @@ -1,8 +1,47 @@ -error: expected type, found keyword `async` +error: `async` trait implementations are unsupported --> $DIR/impl-header.rs:5:6 | LL | impl async Fn<()> for F {} - | ^^^^^ expected type + | ^^^^^ -error: aborting due to 1 previous error +error[E0658]: the precise format of `Fn`-family traits' type parameters is subject to change + --> $DIR/impl-header.rs:5:12 + | +LL | impl async Fn<()> for F {} + | ^^^^^^ + | + = note: see issue #29625 for more information + = help: add `#![feature(unboxed_closures)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0183]: manual implementations of `Fn` are experimental + --> $DIR/impl-header.rs:5:12 + | +LL | impl async Fn<()> for F {} + | ^^^^^^ manual implementations of `Fn` are experimental + | + = help: add `#![feature(unboxed_closures)]` to the crate attributes to enable + +error[E0277]: expected a `FnMut()` closure, found `F` + --> $DIR/impl-header.rs:5:23 + | +LL | impl async Fn<()> for F {} + | ^ expected an `FnMut()` closure, found `F` + | + = help: the trait `FnMut<()>` is not implemented for `F` + = note: wrap the `F` in a closure with no arguments: `|| { /* code */ }` +note: required by a bound in `Fn` + --> $SRC_DIR/core/src/ops/function.rs:LL:COL + +error[E0046]: not all trait items implemented, missing: `call` + --> $DIR/impl-header.rs:5:1 + | +LL | impl async Fn<()> for F {} + | ^^^^^^^^^^^^^^^^^^^^^^^ missing `call` in implementation + | + = help: implement the missing item: `fn call(&self, _: ()) -> >::Output { todo!() }` + +error: aborting due to 5 previous errors +Some errors have detailed explanations: E0046, E0183, E0277, E0658. +For more information about an error, try `rustc --explain E0046`. diff --git a/tests/ui/async-await/async-fn/mbe-async-trait-bound-theoretical-regression.rs b/tests/ui/async-await/async-fn/mbe-async-trait-bound-theoretical-regression.rs new file mode 100644 index 0000000000000..abc429772fdc8 --- /dev/null +++ b/tests/ui/async-await/async-fn/mbe-async-trait-bound-theoretical-regression.rs @@ -0,0 +1,21 @@ +// Demonstrates and records a theoretical regressions / breaking changes caused by the +// introduction of async trait bounds. + +// Setting the edition to 2018 since we don't regress `demo! { dyn async }` in Rust <2018. +//@ edition:2018 + +macro_rules! demo { + ($ty:ty) => { compile_error!("ty"); }; + //~^ ERROR ty + //~| ERROR ty + (impl $c:ident Trait) => {}; + (dyn $c:ident Trait) => {}; +} + +demo! { impl async Trait } +//~^ ERROR async closures are unstable + +demo! { dyn async Trait } +//~^ ERROR async closures are unstable + +fn main() {} diff --git a/tests/ui/async-await/async-fn/mbe-async-trait-bound-theoretical-regression.stderr b/tests/ui/async-await/async-fn/mbe-async-trait-bound-theoretical-regression.stderr new file mode 100644 index 0000000000000..13b8e72b49dc6 --- /dev/null +++ b/tests/ui/async-await/async-fn/mbe-async-trait-bound-theoretical-regression.stderr @@ -0,0 +1,47 @@ +error: ty + --> $DIR/mbe-async-trait-bound-theoretical-regression.rs:8:19 + | +LL | ($ty:ty) => { compile_error!("ty"); }; + | ^^^^^^^^^^^^^^^^^^^^ +... +LL | demo! { impl async Trait } + | -------------------------- in this macro invocation + | + = note: this error originates in the macro `demo` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: ty + --> $DIR/mbe-async-trait-bound-theoretical-regression.rs:8:19 + | +LL | ($ty:ty) => { compile_error!("ty"); }; + | ^^^^^^^^^^^^^^^^^^^^ +... +LL | demo! { dyn async Trait } + | ------------------------- in this macro invocation + | + = note: this error originates in the macro `demo` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0658]: async closures are unstable + --> $DIR/mbe-async-trait-bound-theoretical-regression.rs:15:14 + | +LL | demo! { impl async Trait } + | ^^^^^ + | + = note: see issue #62290 for more information + = help: add `#![feature(async_closure)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + = help: to use an async block, remove the `||`: `async {` + +error[E0658]: async closures are unstable + --> $DIR/mbe-async-trait-bound-theoretical-regression.rs:18:13 + | +LL | demo! { dyn async Trait } + | ^^^^^ + | + = note: see issue #62290 for more information + = help: add `#![feature(async_closure)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + = help: to use an async block, remove the `||`: `async {` + +error: aborting due to 4 previous errors + +For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/async-await/async-fn/trait-bounds-in-macro.rs b/tests/ui/async-await/async-fn/trait-bounds-in-macro.rs new file mode 100644 index 0000000000000..329a1528e8b4f --- /dev/null +++ b/tests/ui/async-await/async-fn/trait-bounds-in-macro.rs @@ -0,0 +1,12 @@ +//@ edition: 2021 + +macro_rules! x { + ($x:item) => {} +} + +x! { + async fn foo() -> impl async Fn() { } + //~^ ERROR async closures are unstable +} + +fn main() {} diff --git a/tests/ui/async-await/async-fn/trait-bounds-in-macro.stderr b/tests/ui/async-await/async-fn/trait-bounds-in-macro.stderr new file mode 100644 index 0000000000000..f68c09737dbc7 --- /dev/null +++ b/tests/ui/async-await/async-fn/trait-bounds-in-macro.stderr @@ -0,0 +1,14 @@ +error[E0658]: async closures are unstable + --> $DIR/trait-bounds-in-macro.rs:8:28 + | +LL | async fn foo() -> impl async Fn() { } + | ^^^^^ + | + = note: see issue #62290 for more information + = help: add `#![feature(async_closure)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + = help: to use an async block, remove the `||`: `async {` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/coherence/occurs-check/associated-type.next.stderr b/tests/ui/coherence/occurs-check/associated-type.next.stderr index e405f389f5e4b..6119e6149a710 100644 --- a/tests/ui/coherence/occurs-check/associated-type.next.stderr +++ b/tests/ui/coherence/occurs-check/associated-type.next.stderr @@ -1,11 +1,11 @@ -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } error[E0119]: conflicting implementations of trait `Overlap fn(&'a (), ())>` for type `for<'a> fn(&'a (), ())` --> $DIR/associated-type.rs:31:1 | diff --git a/tests/ui/coherence/occurs-check/associated-type.old.stderr b/tests/ui/coherence/occurs-check/associated-type.old.stderr index 4a67a777f1051..655809b827ec7 100644 --- a/tests/ui/coherence/occurs-check/associated-type.old.stderr +++ b/tests/ui/coherence/occurs-check/associated-type.old.stderr @@ -1,11 +1,11 @@ -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }) +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) } error[E0119]: conflicting implementations of trait `Overlap fn(&'a (), _)>` for type `for<'a> fn(&'a (), _)` --> $DIR/associated-type.rs:31:1 | diff --git a/tests/ui/compiletest-self-test/aux-aux.rs b/tests/ui/compiletest-self-test/aux-aux.rs new file mode 100644 index 0000000000000..c87905ff77590 --- /dev/null +++ b/tests/ui/compiletest-self-test/aux-aux.rs @@ -0,0 +1,14 @@ +//@ aux-crate: aux_aux_foo=aux_aux_foo.rs +//@ aux-crate: aux_aux_bar=aux_aux_bar.rs +//@ edition: 2021 +//@ compile-flags: --crate-type lib +//@ check-pass + +use aux_aux_foo::Bar as IndirectBar; +use aux_aux_bar::Bar as DirectBar; + +fn foo(x: IndirectBar) {} + +fn main() { + foo(DirectBar); +} diff --git a/tests/ui/compiletest-self-test/auxiliary/aux_aux_bar.rs b/tests/ui/compiletest-self-test/auxiliary/aux_aux_bar.rs new file mode 100644 index 0000000000000..eefcc270c38bb --- /dev/null +++ b/tests/ui/compiletest-self-test/auxiliary/aux_aux_bar.rs @@ -0,0 +1,3 @@ +//@ edition: 2021 + +pub struct Bar; diff --git a/tests/ui/compiletest-self-test/auxiliary/aux_aux_foo.rs b/tests/ui/compiletest-self-test/auxiliary/aux_aux_foo.rs new file mode 100644 index 0000000000000..f96c6bb0b2788 --- /dev/null +++ b/tests/ui/compiletest-self-test/auxiliary/aux_aux_foo.rs @@ -0,0 +1,4 @@ +//@ aux-crate: aux_aux_bar=aux_aux_bar.rs +//@ edition: 2021 + +pub use aux_aux_bar::Bar; diff --git a/tests/ui/parser/bad-recover-kw-after-impl.rs b/tests/ui/parser/bad-recover-kw-after-impl.rs index 23abceaf49376..15c0b377c8ae5 100644 --- a/tests/ui/parser/bad-recover-kw-after-impl.rs +++ b/tests/ui/parser/bad-recover-kw-after-impl.rs @@ -1,4 +1,4 @@ -//@ check-pass +// This is just `mbe-async-trait-bound-theoretical-regression.rs` in practice. //@ edition:2021 // for the `impl` + keyword test @@ -11,5 +11,7 @@ macro_rules! impl_primitive { } impl_primitive!(impl async); +//~^ ERROR expected identifier, found `` +//~| ERROR async closures are unstable fn main() {} diff --git a/tests/ui/parser/bad-recover-kw-after-impl.stderr b/tests/ui/parser/bad-recover-kw-after-impl.stderr new file mode 100644 index 0000000000000..f617cf6549886 --- /dev/null +++ b/tests/ui/parser/bad-recover-kw-after-impl.stderr @@ -0,0 +1,23 @@ +error: expected identifier, found `` + --> $DIR/bad-recover-kw-after-impl.rs:13:22 + | +LL | ($ty:ty) => { + | ------ while parsing argument for this `ty` macro fragment +... +LL | impl_primitive!(impl async); + | ^^^^^ expected identifier + +error[E0658]: async closures are unstable + --> $DIR/bad-recover-kw-after-impl.rs:13:22 + | +LL | impl_primitive!(impl async); + | ^^^^^ + | + = note: see issue #62290 for more information + = help: add `#![feature(async_closure)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + = help: to use an async block, remove the `||`: `async {` + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/parser/trait-object-delimiters.rs b/tests/ui/parser/trait-object-delimiters.rs index 84cd16c279684..d6bc629aa114b 100644 --- a/tests/ui/parser/trait-object-delimiters.rs +++ b/tests/ui/parser/trait-object-delimiters.rs @@ -8,7 +8,7 @@ fn foo2(_: &dyn (Drop + AsRef)) {} //~ ERROR incorrect parentheses around t fn foo2_no_space(_: &dyn(Drop + AsRef)) {} //~ ERROR incorrect parentheses around trait bounds fn foo3(_: &dyn {Drop + AsRef}) {} //~ ERROR expected parameter name, found `{` -//~^ ERROR expected one of `!`, `(`, `)`, `*`, `,`, `?`, `const`, `for`, `~`, lifetime, or path, found `{` +//~^ ERROR expected one of `!`, `(`, `)`, `*`, `,`, `?`, `async`, `const`, `for`, `~`, lifetime, or path, found `{` //~| ERROR at least one trait is required for an object type fn foo4(_: &dyn >) {} //~ ERROR expected identifier, found `<` diff --git a/tests/ui/parser/trait-object-delimiters.stderr b/tests/ui/parser/trait-object-delimiters.stderr index 2ddb734cee067..2b1f8df991f4b 100644 --- a/tests/ui/parser/trait-object-delimiters.stderr +++ b/tests/ui/parser/trait-object-delimiters.stderr @@ -34,11 +34,11 @@ error: expected parameter name, found `{` LL | fn foo3(_: &dyn {Drop + AsRef}) {} | ^ expected parameter name -error: expected one of `!`, `(`, `)`, `*`, `,`, `?`, `const`, `for`, `~`, lifetime, or path, found `{` +error: expected one of `!`, `(`, `)`, `*`, `,`, `?`, `async`, `const`, `for`, `~`, lifetime, or path, found `{` --> $DIR/trait-object-delimiters.rs:10:17 | LL | fn foo3(_: &dyn {Drop + AsRef}) {} - | -^ expected one of 11 possible tokens + | -^ expected one of 12 possible tokens | | | help: missing `,` diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.rs index 99806922ba556..3dcdb0cad9497 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.rs +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.rs @@ -6,15 +6,16 @@ macro_rules! demo { ($ty:ty) => { compile_error!("ty"); }; - (impl $c:ident) => {}; - (dyn $c:ident) => {}; + //~^ ERROR ty + //~| ERROR ty + (impl $c:ident Trait) => {}; + (dyn $c:ident Trait) => {}; } -demo! { impl const } -//~^ ERROR expected identifier, found `` +demo! { impl const Trait } +//~^ ERROR const trait impls are experimental -demo! { dyn const } +demo! { dyn const Trait } //~^ ERROR const trait impls are experimental -//~| ERROR expected identifier, found `` fn main() {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.stderr index fd9184b9dff32..f4b401b738699 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.stderr @@ -1,31 +1,45 @@ -error: expected identifier, found `` - --> $DIR/mbe-const-trait-bound-theoretical-regression.rs:13:14 +error: ty + --> $DIR/mbe-const-trait-bound-theoretical-regression.rs:8:19 | LL | ($ty:ty) => { compile_error!("ty"); }; - | ------ while parsing argument for this `ty` macro fragment + | ^^^^^^^^^^^^^^^^^^^^ ... -LL | demo! { impl const } - | ^^^^^ expected identifier +LL | demo! { impl const Trait } + | -------------------------- in this macro invocation + | + = note: this error originates in the macro `demo` (in Nightly builds, run with -Z macro-backtrace for more info) -error: expected identifier, found `` - --> $DIR/mbe-const-trait-bound-theoretical-regression.rs:16:13 +error: ty + --> $DIR/mbe-const-trait-bound-theoretical-regression.rs:8:19 | LL | ($ty:ty) => { compile_error!("ty"); }; - | ------ while parsing argument for this `ty` macro fragment + | ^^^^^^^^^^^^^^^^^^^^ ... -LL | demo! { dyn const } - | ^^^^^ expected identifier +LL | demo! { dyn const Trait } + | ------------------------- in this macro invocation + | + = note: this error originates in the macro `demo` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0658]: const trait impls are experimental + --> $DIR/mbe-const-trait-bound-theoretical-regression.rs:15:14 + | +LL | demo! { impl const Trait } + | ^^^^^ + | + = note: see issue #67792 for more information + = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date error[E0658]: const trait impls are experimental - --> $DIR/mbe-const-trait-bound-theoretical-regression.rs:16:13 + --> $DIR/mbe-const-trait-bound-theoretical-regression.rs:18:13 | -LL | demo! { dyn const } +LL | demo! { dyn const Trait } | ^^^^^ | = note: see issue #67792 for more information = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date -error: aborting due to 3 previous errors +error: aborting due to 4 previous errors For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/traits/next-solver/issue-118950-root-region.stderr b/tests/ui/traits/next-solver/issue-118950-root-region.stderr index f4638348358fc..e33320ed9e60b 100644 --- a/tests/ui/traits/next-solver/issue-118950-root-region.stderr +++ b/tests/ui/traits/next-solver/issue-118950-root-region.stderr @@ -13,14 +13,14 @@ LL | #![feature(lazy_type_alias)] = note: see issue #112792 for more information = note: `#[warn(incomplete_features)]` on by default -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }) -WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }) +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) } +WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) } error[E0119]: conflicting implementations of trait `Overlap` for type `fn(_)` --> $DIR/issue-118950-root-region.rs:19:1 |