Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 7 pull requests #83789

Closed
wants to merge 29 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
7c89cc4
Add SharedResource abstraction and use it in write_shared
jyn514 Mar 25, 2021
f77ebd4
Add unstable option to only emit shared/crate-specific files
jyn514 Mar 25, 2021
0dbed61
Rework `std::sys::windows::alloc`
CDirkx Mar 12, 2021
b01bf0e
Apply suggestions from code review
CDirkx Mar 22, 2021
4cce9e3
Cache `GetProcessHeap`
CDirkx Mar 26, 2021
fa89c0f
add testcase for double-drop during Vec in-place collection
the8472 Mar 29, 2021
421f5d2
fix double-drop in in-place collect specialization
the8472 Mar 29, 2021
f2e52ff
2229: Produce a rustfix migration suggestion
arora-aman Mar 29, 2021
8f77356
give full path of constraint in suggest_constraining_type_param
Rustin170506 Mar 30, 2021
98c5649
use undef for uninitialized bytes in constants
erikdesjardins Mar 31, 2021
e78fac5
Handle the case of partially captured drop type
arora-aman Mar 31, 2021
d4f3f91
Enforce that Toolchain files are static and Crate files are dynamic
jyn514 Mar 31, 2021
1086d9b
Rename CrateSpecific -> InvocationSpecific
jyn514 Mar 31, 2021
413938d
Fix `--external-css` to be invocation-specific and note main.js shoul…
jyn514 Mar 31, 2021
be66c2e
impl Step for Size; copy-paste fixme
erikdesjardins Mar 31, 2021
18af989
Update lint message
arora-aman Apr 1, 2021
da86348
Update test cases
arora-aman Apr 1, 2021
a721957
Don't introduce a block if a block exists
arora-aman Apr 2, 2021
1b9620d
Make the diagnostic message more readable
arora-aman Apr 2, 2021
fad5388
Simplify coverage tests
richkadel Apr 1, 2021
c86e098
Introduce `get_process_heap` and fix atomic ordering.
CDirkx Apr 2, 2021
db1d003
Remove `debug_assert`
CDirkx Apr 2, 2021
7aeaf43
Rollup merge of #83065 - CDirkx:win-alloc, r=dtolnay
Dylan-DPC Apr 2, 2021
95b5f28
Rollup merge of #83478 - jyn514:fine-grained-files, r=Mark-Simulacrum
Dylan-DPC Apr 2, 2021
e25e2b9
Rollup merge of #83629 - the8472:fix-inplace-panic-on-drop, r=m-ou-se
Dylan-DPC Apr 2, 2021
0c4b370
Rollup merge of #83673 - hi-rustin:rustin-patch-suggestion, r=estebank
Dylan-DPC Apr 2, 2021
44e70ba
Rollup merge of #83698 - erikdesjardins:undefconst, r=oli-obk
Dylan-DPC Apr 2, 2021
853f435
Rollup merge of #83755 - richkadel:cov-test-simplify, r=tmandry
Dylan-DPC Apr 2, 2021
83ca4e2
Rollup merge of #83757 - sexxi-goose:migrations_out, r=nikomatsakis
Dylan-DPC Apr 2, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 30 additions & 13 deletions compiler/rustc_codegen_llvm/src/consts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,22 +8,47 @@ use crate::value::Value;
use cstr::cstr;
use libc::c_uint;
use rustc_codegen_ssa::traits::*;
use rustc_data_structures::captures::Captures;
use rustc_hir::def_id::DefId;
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
use rustc_middle::mir::interpret::{
read_target_uint, Allocation, ErrorHandled, GlobalAlloc, Pointer,
read_target_uint, Allocation, ErrorHandled, GlobalAlloc, InitChunk, Pointer,
};
use rustc_middle::mir::mono::MonoItem;
use rustc_middle::ty::{self, Instance, Ty};
use rustc_middle::{bug, span_bug};
use rustc_target::abi::{AddressSpace, Align, HasDataLayout, LayoutOf, Primitive, Scalar, Size};
use std::ops::Range;
use tracing::debug;

pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll Value {
let mut llvals = Vec::with_capacity(alloc.relocations().len() + 1);
let dl = cx.data_layout();
let pointer_size = dl.pointer_size.bytes() as usize;

// Note: this function may call `inspect_with_uninit_and_ptr_outside_interpreter`,
// so `range` must be within the bounds of `alloc` and not within a relocation.
fn chunks_of_init_and_uninit_bytes<'ll, 'a, 'b>(
cx: &'a CodegenCx<'ll, 'b>,
alloc: &'a Allocation,
range: Range<usize>,
) -> impl Iterator<Item = &'ll Value> + Captures<'a> + Captures<'b> {
alloc
.init_mask()
.range_as_init_chunks(Size::from_bytes(range.start), Size::from_bytes(range.end))
.map(move |chunk| match chunk {
InitChunk::Init(range) => {
let range = (range.start.bytes() as usize)..(range.end.bytes() as usize);
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
cx.const_bytes(bytes)
}
InitChunk::Uninit(range) => {
let len = range.end.bytes() - range.start.bytes();
cx.const_undef(cx.type_array(cx.type_i8(), len))
}
})
}

let mut next_offset = 0;
for &(offset, ((), alloc_id)) in alloc.relocations().iter() {
let offset = offset.bytes();
Expand All @@ -32,12 +57,8 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll
if offset > next_offset {
// This `inspect` is okay since we have checked that it is not within a relocation, it
// is within the bounds of the allocation, and it doesn't affect interpreter execution
// (we inspect the result after interpreter execution). Any undef byte is replaced with
// some arbitrary byte value.
//
// FIXME: relay undef bytes to codegen as undef const bytes
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(next_offset..offset);
llvals.push(cx.const_bytes(bytes));
// (we inspect the result after interpreter execution).
llvals.extend(chunks_of_init_and_uninit_bytes(cx, alloc, next_offset..offset));
}
let ptr_offset = read_target_uint(
dl.endian,
Expand Down Expand Up @@ -65,12 +86,8 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll
let range = next_offset..alloc.len();
// This `inspect` is okay since we have check that it is after all relocations, it is
// within the bounds of the allocation, and it doesn't affect interpreter execution (we
// inspect the result after interpreter execution). Any undef byte is replaced with some
// arbitrary byte value.
//
// FIXME: relay undef bytes to codegen as undef const bytes
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
llvals.push(cx.const_bytes(bytes));
// inspect the result after interpreter execution).
llvals.extend(chunks_of_init_and_uninit_bytes(cx, alloc, range));
}

cx.const_struct(&llvals, true)
Expand Down
57 changes: 52 additions & 5 deletions compiler/rustc_middle/src/mir/interpret/allocation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -761,20 +761,24 @@ impl InitMask {
}

// FIXME(oli-obk): optimize this for allocations larger than a block.
let idx = (start.bytes()..end.bytes()).map(Size::from_bytes).find(|&i| !self.get(i));
let idx = (start..end).find(|&i| !self.get(i));

match idx {
Some(idx) => {
let uninit_end = (idx.bytes()..end.bytes())
.map(Size::from_bytes)
.find(|&i| self.get(i))
.unwrap_or(end);
let uninit_end = (idx..end).find(|&i| self.get(i)).unwrap_or(end);
Err(idx..uninit_end)
}
None => Ok(()),
}
}

/// Returns an iterator, yielding a range of byte indexes for each contiguous region
/// of initialized or uninitialized bytes inside the range `start..end` (end-exclusive).
#[inline]
pub fn range_as_init_chunks(&self, start: Size, end: Size) -> InitChunkIter<'_> {
InitChunkIter::new(self, start, end)
}

pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
let len = self.len;
if end > len {
Expand Down Expand Up @@ -867,6 +871,49 @@ impl InitMask {
}
}

/// Yields [`InitChunk`]s. See [`InitMask::range_as_init_chunks`].
pub struct InitChunkIter<'a> {
init_mask: &'a InitMask,
/// The current byte index into `init_mask`.
start: Size,
/// The end byte index into `init_mask`.
end: Size,
}

/// A contiguous chunk of initialized or uninitialized memory.
pub enum InitChunk {
Init(Range<Size>),
Uninit(Range<Size>),
}

impl<'a> InitChunkIter<'a> {
fn new(init_mask: &'a InitMask, start: Size, end: Size) -> Self {
assert!(start <= end);
assert!(end <= init_mask.len);
Self { init_mask, start, end }
}
}

impl<'a> Iterator for InitChunkIter<'a> {
type Item = InitChunk;

fn next(&mut self) -> Option<Self::Item> {
if self.start >= self.end {
return None;
}

let is_init = self.init_mask.get(self.start);
// FIXME(oli-obk): optimize this for allocations larger than a block.
let end_of_chunk =
(self.start..self.end).find(|&i| self.init_mask.get(i) != is_init).unwrap_or(self.end);
let range = self.start..end_of_chunk;

self.start = end_of_chunk;

Some(if is_init { InitChunk::Init(range) } else { InitChunk::Uninit(range) })
}
}

#[inline]
fn bit_index(bits: Size) -> (usize, usize) {
let bits = bits.bytes();
Expand Down
4 changes: 3 additions & 1 deletion compiler/rustc_middle/src/mir/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,9 @@ pub use self::error::{

pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar, ScalarMaybeUninit};

pub use self::allocation::{Allocation, AllocationExtra, InitMask, Relocations};
pub use self::allocation::{
Allocation, AllocationExtra, InitChunk, InitChunkIter, InitMask, Relocations,
};

pub use self::pointer::{Pointer, PointerArithmetic};

Expand Down
38 changes: 38 additions & 0 deletions compiler/rustc_target/src/abi/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use crate::spec::Target;

use std::convert::{TryFrom, TryInto};
use std::fmt;
use std::iter::Step;
use std::num::NonZeroUsize;
use std::ops::{Add, AddAssign, Deref, Mul, Range, RangeInclusive, Sub};
use std::str::FromStr;
Expand Down Expand Up @@ -433,6 +434,43 @@ impl AddAssign for Size {
}
}

unsafe impl Step for Size {
#[inline]
fn steps_between(start: &Self, end: &Self) -> Option<usize> {
u64::steps_between(&start.bytes(), &end.bytes())
}

#[inline]
fn forward_checked(start: Self, count: usize) -> Option<Self> {
u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
}

#[inline]
fn forward(start: Self, count: usize) -> Self {
Self::from_bytes(u64::forward(start.bytes(), count))
}

#[inline]
unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
Self::from_bytes(u64::forward_unchecked(start.bytes(), count))
}

#[inline]
fn backward_checked(start: Self, count: usize) -> Option<Self> {
u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
}

#[inline]
fn backward(start: Self, count: usize) -> Self {
Self::from_bytes(u64::backward(start.bytes(), count))
}

#[inline]
unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
Self::from_bytes(u64::backward_unchecked(start.bytes(), count))
}
}

/// Alignment of a type in bytes (always a power of two).
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Encodable, Decodable)]
#[derive(HashStable_Generic)]
Expand Down
3 changes: 3 additions & 0 deletions compiler/rustc_target/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@
#![feature(never_type)]
#![feature(associated_type_bounds)]
#![feature(exhaustive_patterns)]
#![feature(step_trait)]
#![feature(step_trait_ext)]
#![feature(unchecked_math)]

#[macro_use]
extern crate rustc_macros;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ use std::fmt;

use super::InferCtxtPrivExt;
use crate::traits::query::evaluate_obligation::InferCtxtExt as _;
use rustc_middle::ty::print::with_no_trimmed_paths;

#[derive(Debug)]
pub enum GeneratorInteriorOrUpvar {
Expand Down Expand Up @@ -440,7 +441,8 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
{
// Missing generic type parameter bound.
let param_name = self_ty.to_string();
let constraint = trait_ref.print_only_trait_path().to_string();
let constraint =
with_no_trimmed_paths(|| trait_ref.print_only_trait_path().to_string());
if suggest_constraining_type_param(
self.tcx,
generics,
Expand Down
Loading