Skip to content

Commit

Permalink
Auto merge of #130016 - matthiaskrgr:rollup-fopistw, r=matthiaskrgr
Browse files Browse the repository at this point in the history
Rollup of 6 pull requests

Successful merges:

 - #129021 (Check WF of source type's signature on fn pointer cast)
 - #129781 (Make `./x.py <cmd> compiler/<crate>` aware of the crate's features)
 - #129963 (Inaccurate `{Path,OsStr}::to_string_lossy()` documentation)
 - #129969 (Make `Ty::boxed_ty` return an `Option`)
 - #129995 (Remove wasm32-wasip2's tier 2 status from release notes)
 - #130013 (coverage: Count await when the Future is immediately ready )

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Sep 6, 2024
2 parents d678b81 + 11d5614 commit a3af208
Show file tree
Hide file tree
Showing 47 changed files with 395 additions and 138 deletions.
1 change: 0 additions & 1 deletion RELEASES.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ Compiler
- [Add Tier 3 `std` Xtensa targets:](https://github.com/rust-lang/rust/pull/126380/) `xtensa-esp32-espidf`, `xtensa-esp32s2-espidf`, `xtensa-esp32s3-espidf`
- [Add Tier 3 i686 Redox OS target:](https://github.com/rust-lang/rust/pull/126192/) `i686-unknown-redox`
- [Promote `arm64ec-pc-windows-msvc` to Tier 2.](https://github.com/rust-lang/rust/pull/126039/)
- [Promote `wasm32-wasip2` to Tier 2.](https://github.com/rust-lang/rust/pull/126967/)
- [Promote `loongarch64-unknown-linux-musl` to Tier 2 with host tools.](https://github.com/rust-lang/rust/pull/126298/)
- [Enable full tools and profiler for LoongArch Linux targets.](https://github.com/rust-lang/rust/pull/127078/)
- [Unconditionally warn on usage of `wasm32-wasi`.](https://github.com/rust-lang/rust/pull/126662/) (see compatibility note below)
Expand Down
3 changes: 2 additions & 1 deletion compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -662,9 +662,10 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, '_, 'tcx> {
// `&dyn Trait`
ty::Ref(_, ty, _) if ty.is_trait() => true,
// `Box<dyn Trait>`
_ if ty.is_box() && ty.boxed_ty().is_trait() => {
_ if ty.boxed_ty().is_some_and(Ty::is_trait) => {
true
}

// `dyn Trait`
_ if ty.is_trait() => true,
// Anything else.
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_borrowck/src/diagnostics/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -345,9 +345,9 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> {
variant_index: Option<VariantIdx>,
including_tuple_field: IncludingTupleField,
) -> Option<String> {
if ty.is_box() {
if let Some(boxed_ty) = ty.boxed_ty() {
// If the type is a box, the field is described from the boxed type
self.describe_field_from_ty(ty.boxed_ty(), field, variant_index, including_tuple_field)
self.describe_field_from_ty(boxed_ty, field, variant_index, including_tuple_field)
} else {
match *ty.kind() {
ty::Adt(def, _) => {
Expand Down
71 changes: 64 additions & 7 deletions compiler/rustc_borrowck/src/type_check/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1979,19 +1979,76 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {

match cast_kind {
CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer) => {
let fn_sig = op.ty(body, tcx).fn_sig(tcx);
let src_sig = op.ty(body, tcx).fn_sig(tcx);

// HACK: This shouldn't be necessary... We can remove this when we actually
// get binders with where clauses, then elaborate implied bounds into that
// binder, and implement a higher-ranked subtyping algorithm that actually
// respects these implied bounds.
//
// This protects against the case where we are casting from a higher-ranked
// fn item to a non-higher-ranked fn pointer, where the cast throws away
// implied bounds that would've needed to be checked at the call site. This
// only works when we're casting to a non-higher-ranked fn ptr, since
// placeholders in the target signature could have untracked implied
// bounds, resulting in incorrect errors.
//
// We check that this signature is WF before subtyping the signature with
// the target fn sig.
if src_sig.has_bound_regions()
&& let ty::FnPtr(target_fn_tys, target_hdr) = *ty.kind()
&& let target_sig = target_fn_tys.with(target_hdr)
&& let Some(target_sig) = target_sig.no_bound_vars()
{
let src_sig = self.infcx.instantiate_binder_with_fresh_vars(
span,
BoundRegionConversionTime::HigherRankedType,
src_sig,
);
let src_ty = Ty::new_fn_ptr(self.tcx(), ty::Binder::dummy(src_sig));
self.prove_predicate(
ty::ClauseKind::WellFormed(src_ty.into()),
location.to_locations(),
ConstraintCategory::Cast { unsize_to: None },
);

let src_ty = self.normalize(src_ty, location);
if let Err(terr) = self.sub_types(
src_ty,
*ty,
location.to_locations(),
ConstraintCategory::Cast { unsize_to: None },
) {
span_mirbug!(
self,
rvalue,
"equating {:?} with {:?} yields {:?}",
target_sig,
src_sig,
terr
);
};
}

let src_ty = Ty::new_fn_ptr(tcx, src_sig);
// HACK: We want to assert that the signature of the source fn is
// well-formed, because we don't enforce that via the WF of FnDef
// types normally. This should be removed when we improve the tracking
// of implied bounds of fn signatures.
self.prove_predicate(
ty::ClauseKind::WellFormed(src_ty.into()),
location.to_locations(),
ConstraintCategory::Cast { unsize_to: None },
);

// The type that we see in the fcx is like
// `foo::<'a, 'b>`, where `foo` is the path to a
// function definition. When we extract the
// signature, it comes from the `fn_sig` query,
// and hence may contain unnormalized results.
let fn_sig = self.normalize(fn_sig, location);

let ty_fn_ptr_from = Ty::new_fn_ptr(tcx, fn_sig);

let src_ty = self.normalize(src_ty, location);
if let Err(terr) = self.sub_types(
ty_fn_ptr_from,
src_ty,
*ty,
location.to_locations(),
ConstraintCategory::Cast { unsize_to: None },
Expand All @@ -2000,7 +2057,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
self,
rvalue,
"equating {:?} with {:?} yields {:?}",
ty_fn_ptr_from,
src_ty,
ty,
terr
);
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -456,7 +456,7 @@ pub(crate) fn type_di_node<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) ->
if def.is_box()
&& args.get(1).map_or(true, |arg| cx.layout_of(arg.expect_ty()).is_1zst()) =>
{
build_pointer_or_reference_di_node(cx, t, t.boxed_ty(), unique_type_id)
build_pointer_or_reference_di_node(cx, t, t.expect_boxed_ty(), unique_type_id)
}
ty::FnDef(..) | ty::FnPtr(..) => build_subroutine_type_di_node(cx, unique_type_id),
ty::Closure(..) => build_closure_env_di_node(cx, unique_type_id),
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_const_eval/src/interpret/call.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
ty::Ref(_, ty, _) => *ty,
ty::RawPtr(ty, _) => *ty,
// We only accept `Box` with the default allocator.
_ if ty.is_box_global(*self.tcx) => ty.boxed_ty(),
_ if ty.is_box_global(*self.tcx) => ty.expect_boxed_ty(),
_ => return Ok(None),
}))
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Instead, the problem is that the array-into_iter hack will no longer
// apply in Rust 2021.
(ARRAY_INTO_ITER, "2021")
} else if self_ty.is_box()
&& self_ty.boxed_ty().is_slice()
} else if self_ty.boxed_ty().is_some_and(Ty::is_slice)
&& !span.at_least_rust_2024()
{
// In this case, it wasn't really a prelude addition that was the problem.
Expand Down
3 changes: 1 addition & 2 deletions compiler/rustc_hir_typeck/src/method/probe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1485,8 +1485,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {

// Some trait methods are excluded for boxed slices before 2024.
// (`boxed_slice.into_iter()` wants a slice iterator for compatibility.)
if self_ty.is_box()
&& self_ty.boxed_ty().is_slice()
if self_ty.boxed_ty().is_some_and(Ty::is_slice)
&& !method_name.span.at_least_rust_2024()
{
let trait_def = self.tcx.trait_def(poly_trait_ref.def_id());
Expand Down
7 changes: 2 additions & 5 deletions compiler/rustc_lint/src/shadowed_into_iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,12 +94,9 @@ impl<'tcx> LateLintPass<'tcx> for ShadowedIntoIter {
fn is_ref_to_array(ty: Ty<'_>) -> bool {
if let ty::Ref(_, pointee_ty, _) = *ty.kind() { pointee_ty.is_array() } else { false }
}
fn is_boxed_slice(ty: Ty<'_>) -> bool {
ty.is_box() && ty.boxed_ty().is_slice()
}
fn is_ref_to_boxed_slice(ty: Ty<'_>) -> bool {
if let ty::Ref(_, pointee_ty, _) = *ty.kind() {
is_boxed_slice(pointee_ty)
pointee_ty.boxed_ty().is_some_and(Ty::is_slice)
} else {
false
}
Expand All @@ -119,7 +116,7 @@ impl<'tcx> LateLintPass<'tcx> for ShadowedIntoIter {
.iter()
.copied()
.take_while(|ty| !is_ref_to_boxed_slice(*ty))
.position(|ty| is_boxed_slice(ty))
.position(|ty| ty.boxed_ty().is_some_and(Ty::is_slice))
{
(BOXED_SLICE_INTO_ITER, "Box<[T]>", "2024", idx == 0)
} else {
Expand Down
6 changes: 4 additions & 2 deletions compiler/rustc_lint/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1304,8 +1304,10 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {

match *ty.kind() {
ty::Adt(def, args) => {
if def.is_box() && matches!(self.mode, CItemKind::Definition) {
if ty.boxed_ty().is_sized(tcx, self.cx.param_env) {
if let Some(boxed) = ty.boxed_ty()
&& matches!(self.mode, CItemKind::Definition)
{
if boxed.is_sized(tcx, self.cx.param_env) {
return FfiSafe;
} else {
return FfiUnsafe {
Expand Down
5 changes: 2 additions & 3 deletions compiler/rustc_lint/src/unused.rs
Original file line number Diff line number Diff line change
Expand Up @@ -283,9 +283,8 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
}

match *ty.kind() {
ty::Adt(..) if ty.is_box() => {
let boxed_ty = ty.boxed_ty();
is_ty_must_use(cx, boxed_ty, expr, span)
ty::Adt(..) if let Some(boxed) = ty.boxed_ty() => {
is_ty_must_use(cx, boxed, expr, span)
.map(|inner| MustUsePath::Boxed(Box::new(inner)))
}
ty::Adt(def, args) if cx.tcx.is_lang_item(def.did(), LangItem::Pin) => {
Expand Down
6 changes: 4 additions & 2 deletions compiler/rustc_middle/src/ty/layout.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1075,11 +1075,13 @@ where
// the raw pointer, so size and align are set to the boxed type, but `pointee.safe`
// will still be `None`.
if let Some(ref mut pointee) = result {
if offset.bytes() == 0 && this.ty.is_box() {
if offset.bytes() == 0
&& let Some(boxed_ty) = this.ty.boxed_ty()
{
debug_assert!(pointee.safe.is_none());
let optimize = tcx.sess.opts.optimize != OptLevel::No;
pointee.safe = Some(PointerKind::Box {
unpin: optimize && this.ty.boxed_ty().is_unpin(tcx, cx.param_env()),
unpin: optimize && boxed_ty.is_unpin(tcx, cx.param_env()),
global: this.ty.is_box_global(tcx),
});
}
Expand Down
15 changes: 10 additions & 5 deletions compiler/rustc_middle/src/ty/sty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1170,14 +1170,19 @@ impl<'tcx> Ty<'tcx> {
}
}

/// Panics if called on any type other than `Box<T>`.
pub fn boxed_ty(self) -> Ty<'tcx> {
pub fn boxed_ty(self) -> Option<Ty<'tcx>> {
match self.kind() {
Adt(def, args) if def.is_box() => args.type_at(0),
_ => bug!("`boxed_ty` is called on non-box type {:?}", self),
Adt(def, args) if def.is_box() => Some(args.type_at(0)),
_ => None,
}
}

/// Panics if called on any type other than `Box<T>`.
pub fn expect_boxed_ty(self) -> Ty<'tcx> {
self.boxed_ty()
.unwrap_or_else(|| bug!("`expect_boxed_ty` is called on non-box type {:?}", self))
}

/// A scalar type is one that denotes an atomic datum, with no sub-components.
/// (A RawPtr is scalar because it represents a non-managed pointer, so its
/// contents are abstract to rustc.)
Expand Down Expand Up @@ -1323,7 +1328,7 @@ impl<'tcx> Ty<'tcx> {
/// Some types -- notably unsafe ptrs -- can only be dereferenced explicitly.
pub fn builtin_deref(self, explicit: bool) -> Option<Ty<'tcx>> {
match *self.kind() {
Adt(def, _) if def.is_box() => Some(self.boxed_ty()),
_ if let Some(boxed) = self.boxed_ty() => Some(boxed),
Ref(_, ty, _) => Some(ty),
RawPtr(ty, _) if explicit => Some(ty),
_ => None,
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_middle/src/ty/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1628,7 +1628,7 @@ impl<'tcx> ExplicitSelf<'tcx> {
_ if is_self_ty(self_arg_ty) => ByValue,
ty::Ref(region, ty, mutbl) if is_self_ty(ty) => ByReference(region, mutbl),
ty::RawPtr(ty, mutbl) if is_self_ty(ty) => ByRawPointer(mutbl),
ty::Adt(def, _) if def.is_box() && is_self_ty(self_arg_ty.boxed_ty()) => ByBox,
_ if self_arg_ty.boxed_ty().is_some_and(is_self_ty) => ByBox,
_ => Other,
}
}
Expand Down
34 changes: 21 additions & 13 deletions compiler/rustc_mir_transform/src/coverage/spans.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::collections::VecDeque;
use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::FxHashSet;
use rustc_middle::mir;
use rustc_span::Span;
use rustc_span::{DesugaringKind, ExpnKind, MacroKind, Span};
use tracing::{debug, debug_span, instrument};

use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
Expand All @@ -25,7 +25,7 @@ pub(super) fn extract_refined_covspans(

// First, perform the passes that need macro information.
covspans.sort_by(|a, b| basic_coverage_blocks.cmp_in_dominator_order(a.bcb, b.bcb));
remove_unwanted_macro_spans(&mut covspans);
remove_unwanted_expansion_spans(&mut covspans);
split_visible_macro_spans(&mut covspans);

// We no longer need the extra information in `SpanFromMir`, so convert to `Covspan`.
Expand Down Expand Up @@ -76,18 +76,24 @@ pub(super) fn extract_refined_covspans(
/// invocation, which is unhelpful. Keeping only the first such span seems to
/// give better mappings, so remove the others.
///
/// Similarly, `await` expands to a branch on the discriminant of `Poll`, which
/// leads to incorrect coverage if the `Future` is immediately ready (#98712).
///
/// (The input spans should be sorted in BCB dominator order, so that the
/// retained "first" span is likely to dominate the others.)
fn remove_unwanted_macro_spans(covspans: &mut Vec<SpanFromMir>) {
let mut seen_macro_spans = FxHashSet::default();
fn remove_unwanted_expansion_spans(covspans: &mut Vec<SpanFromMir>) {
let mut deduplicated_spans = FxHashSet::default();

covspans.retain(|covspan| {
// Ignore (retain) non-macro-expansion spans.
if covspan.visible_macro.is_none() {
return true;
match covspan.expn_kind {
// Retain only the first await-related or macro-expanded covspan with this span.
Some(ExpnKind::Desugaring(kind)) if kind == DesugaringKind::Await => {
deduplicated_spans.insert(covspan.span)
}
Some(ExpnKind::Macro(MacroKind::Bang, _)) => deduplicated_spans.insert(covspan.span),
// Ignore (retain) other spans.
_ => true,
}

// Retain only the first macro-expanded covspan with this span.
seen_macro_spans.insert(covspan.span)
});
}

Expand All @@ -99,7 +105,9 @@ fn split_visible_macro_spans(covspans: &mut Vec<SpanFromMir>) {
let mut extra_spans = vec![];

covspans.retain(|covspan| {
let Some(visible_macro) = covspan.visible_macro else { return true };
let Some(ExpnKind::Macro(MacroKind::Bang, visible_macro)) = covspan.expn_kind else {
return true;
};

let split_len = visible_macro.as_str().len() as u32 + 1;
let (before, after) = covspan.span.split_at(split_len);
Expand All @@ -111,8 +119,8 @@ fn split_visible_macro_spans(covspans: &mut Vec<SpanFromMir>) {
return true;
}

extra_spans.push(SpanFromMir::new(before, covspan.visible_macro, covspan.bcb));
extra_spans.push(SpanFromMir::new(after, covspan.visible_macro, covspan.bcb));
extra_spans.push(SpanFromMir::new(before, covspan.expn_kind.clone(), covspan.bcb));
extra_spans.push(SpanFromMir::new(after, covspan.expn_kind.clone(), covspan.bcb));
false // Discard the original covspan that we just split.
});

Expand Down
Loading

0 comments on commit a3af208

Please sign in to comment.