diff --git a/Cargo.lock b/Cargo.lock index 0fe3138427468..b717460e5d427 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2182,9 +2182,9 @@ checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" [[package]] name = "openssl-src" -version = "111.3.0+1.1.1c" +version = "111.6.0+1.1.1d" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53ed5f31d294bdf5f7a4ba0a206c2754b0f60e9a63b7e3076babc5317873c797" +checksum = "b9c2da1de8a7a3f860919c01540b03a6db16de042405a8a07a5e9d0b4b825d9c" dependencies = [ "cc", ] diff --git a/src/libcore/option.rs b/src/libcore/option.rs index 79bd04b724390..5569d99f8d81d 100644 --- a/src/libcore/option.rs +++ b/src/libcore/option.rs @@ -295,7 +295,7 @@ impl Option { /// [`Pin`]: ../pin/struct.Pin.html #[inline] #[stable(feature = "pin", since = "1.33.0")] - pub fn as_pin_ref<'a>(self: Pin<&'a Option>) -> Option> { + pub fn as_pin_ref(self: Pin<&Self>) -> Option> { unsafe { Pin::get_ref(self).as_ref().map(|x| Pin::new_unchecked(x)) } @@ -306,7 +306,7 @@ impl Option { /// [`Pin`]: ../pin/struct.Pin.html #[inline] #[stable(feature = "pin", since = "1.33.0")] - pub fn as_pin_mut<'a>(self: Pin<&'a mut Option>) -> Option> { + pub fn as_pin_mut(self: Pin<&mut Self>) -> Option> { unsafe { Pin::get_unchecked_mut(self).as_mut().map(|x| Pin::new_unchecked(x)) } diff --git a/src/libcore/pin.rs b/src/libcore/pin.rs index 1080fd32a8862..be59e830beda3 100644 --- a/src/libcore/pin.rs +++ b/src/libcore/pin.rs @@ -233,7 +233,7 @@ //! # type Field = i32; //! # struct Struct { field: Field } //! impl Struct { -//! fn pin_get_field<'a>(self: Pin<&'a mut Self>) -> &'a mut Field { +//! fn pin_get_field(self: Pin<&mut Self>) -> &mut Field { //! // This is okay because `field` is never considered pinned. //! unsafe { &mut self.get_unchecked_mut().field } //! } @@ -257,7 +257,7 @@ //! # type Field = i32; //! # struct Struct { field: Field } //! impl Struct { -//! fn pin_get_field<'a>(self: Pin<&'a mut Self>) -> Pin<&'a mut Field> { +//! fn pin_get_field(self: Pin<&mut Self>) -> Pin<&mut Field> { //! // This is okay because `field` is pinned when `self` is. //! unsafe { self.map_unchecked_mut(|s| &mut s.field) } //! } @@ -549,7 +549,7 @@ impl Pin

{ /// ruled out by the contract of `Pin::new_unchecked`. #[stable(feature = "pin", since = "1.33.0")] #[inline(always)] - pub fn as_ref(self: &Pin

) -> Pin<&P::Target> { + pub fn as_ref(&self) -> Pin<&P::Target> { unsafe { Pin::new_unchecked(&*self.pointer) } } @@ -586,7 +586,7 @@ impl Pin

{ /// ruled out by the contract of `Pin::new_unchecked`. #[stable(feature = "pin", since = "1.33.0")] #[inline(always)] - pub fn as_mut(self: &mut Pin

) -> Pin<&mut P::Target> { + pub fn as_mut(&mut self) -> Pin<&mut P::Target> { unsafe { Pin::new_unchecked(&mut *self.pointer) } } @@ -596,7 +596,7 @@ impl Pin

{ /// run before being overwritten, so no pinning guarantee is violated. #[stable(feature = "pin", since = "1.33.0")] #[inline(always)] - pub fn set(self: &mut Pin

, value: P::Target) + pub fn set(&mut self, value: P::Target) where P::Target: Sized, { @@ -621,7 +621,7 @@ impl<'a, T: ?Sized> Pin<&'a T> { /// /// [`pin` module]: ../../std/pin/index.html#projections-and-structural-pinning #[stable(feature = "pin", since = "1.33.0")] - pub unsafe fn map_unchecked(self: Pin<&'a T>, func: F) -> Pin<&'a U> where + pub unsafe fn map_unchecked(self, func: F) -> Pin<&'a U> where F: FnOnce(&T) -> &U, { let pointer = &*self.pointer; @@ -648,7 +648,7 @@ impl<'a, T: ?Sized> Pin<&'a T> { /// ["pinning projections"]: ../../std/pin/index.html#projections-and-structural-pinning #[stable(feature = "pin", since = "1.33.0")] #[inline(always)] - pub fn get_ref(self: Pin<&'a T>) -> &'a T { + pub fn get_ref(self) -> &'a T { self.pointer } } @@ -657,7 +657,7 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { /// Converts this `Pin<&mut T>` into a `Pin<&T>` with the same lifetime. #[stable(feature = "pin", since = "1.33.0")] #[inline(always)] - pub fn into_ref(self: Pin<&'a mut T>) -> Pin<&'a T> { + pub fn into_ref(self) -> Pin<&'a T> { Pin { pointer: self.pointer } } @@ -672,7 +672,7 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { /// with the same lifetime as the original `Pin`. #[stable(feature = "pin", since = "1.33.0")] #[inline(always)] - pub fn get_mut(self: Pin<&'a mut T>) -> &'a mut T + pub fn get_mut(self) -> &'a mut T where T: Unpin, { self.pointer @@ -690,7 +690,7 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { /// instead. #[stable(feature = "pin", since = "1.33.0")] #[inline(always)] - pub unsafe fn get_unchecked_mut(self: Pin<&'a mut T>) -> &'a mut T { + pub unsafe fn get_unchecked_mut(self) -> &'a mut T { self.pointer } @@ -710,7 +710,7 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { /// /// [`pin` module]: ../../std/pin/index.html#projections-and-structural-pinning #[stable(feature = "pin", since = "1.33.0")] - pub unsafe fn map_unchecked_mut(self: Pin<&'a mut T>, func: F) -> Pin<&'a mut U> where + pub unsafe fn map_unchecked_mut(self, func: F) -> Pin<&'a mut U> where F: FnOnce(&mut T) -> &mut U, { let pointer = Pin::get_unchecked_mut(self); diff --git a/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs b/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs index b4fb018920647..19bd38b45b30a 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs @@ -192,23 +192,28 @@ impl NiceRegionError<'me, 'tcx> { vid, sub_placeholder, sup_placeholder, trait_def_id, expected_substs, actual_substs ); - let mut err = self.tcx().sess.struct_span_err( - cause.span(self.tcx()), - &format!( - "implementation of `{}` is not general enough", - self.tcx().def_path_str(trait_def_id), - ), + let span = cause.span(self.tcx()); + let msg = format!( + "implementation of `{}` is not general enough", + self.tcx().def_path_str(trait_def_id), + ); + let mut err = self.tcx().sess.struct_span_err(span, &msg); + err.span_label( + self.tcx().def_span(trait_def_id), + format!("trait `{}` defined here", self.tcx().def_path_str(trait_def_id)), ); - match cause.code { - ObligationCauseCode::ItemObligation(def_id) => { - err.note(&format!( - "Due to a where-clause on `{}`,", - self.tcx().def_path_str(def_id), - )); - } - _ => (), - } + let leading_ellipsis = if let ObligationCauseCode::ItemObligation(def_id) = cause.code { + err.span_label(span, "doesn't satisfy where-clause"); + err.span_label( + self.tcx().def_span(def_id), + &format!("due to a where-clause on `{}`...", self.tcx().def_path_str(def_id)), + ); + true + } else { + err.span_label(span, &msg); + false + }; let expected_trait_ref = self.infcx.resolve_vars_if_possible(&ty::TraitRef { def_id: trait_def_id, @@ -295,6 +300,7 @@ impl NiceRegionError<'me, 'tcx> { expected_has_vid, actual_has_vid, any_self_ty_has_vid, + leading_ellipsis, ); err @@ -318,6 +324,7 @@ impl NiceRegionError<'me, 'tcx> { expected_has_vid: Option, actual_has_vid: Option, any_self_ty_has_vid: bool, + leading_ellipsis: bool, ) { // HACK(eddyb) maybe move this in a more central location. #[derive(Copy, Clone)] @@ -392,13 +399,15 @@ impl NiceRegionError<'me, 'tcx> { let mut note = if passive_voice { format!( - "`{}` would have to be implemented for the type `{}`", + "{}`{}` would have to be implemented for the type `{}`", + if leading_ellipsis { "..." } else { "" }, expected_trait_ref, expected_trait_ref.map(|tr| tr.self_ty()), ) } else { format!( - "`{}` must implement `{}`", + "{}`{}` must implement `{}`", + if leading_ellipsis { "..." } else { "" }, expected_trait_ref.map(|tr| tr.self_ty()), expected_trait_ref, ) @@ -407,20 +416,20 @@ impl NiceRegionError<'me, 'tcx> { match (has_sub, has_sup) { (Some(n1), Some(n2)) => { let _ = write!(note, - ", for any two lifetimes `'{}` and `'{}`", + ", for any two lifetimes `'{}` and `'{}`...", std::cmp::min(n1, n2), std::cmp::max(n1, n2), ); } (Some(n), _) | (_, Some(n)) => { let _ = write!(note, - ", for any lifetime `'{}`", + ", for any lifetime `'{}`...", n, ); } (None, None) => if let Some(n) = expected_has_vid { let _ = write!(note, - ", for some specific lifetime `'{}`", + ", for some specific lifetime `'{}`...", n, ); }, @@ -439,13 +448,13 @@ impl NiceRegionError<'me, 'tcx> { let mut note = if passive_voice { format!( - "but `{}` is actually implemented for the type `{}`", + "...but `{}` is actually implemented for the type `{}`", actual_trait_ref, actual_trait_ref.map(|tr| tr.self_ty()), ) } else { format!( - "but `{}` actually implements `{}`", + "...but `{}` actually implements `{}`", actual_trait_ref.map(|tr| tr.self_ty()), actual_trait_ref, ) diff --git a/src/librustc_data_structures/indexed_vec.rs b/src/librustc_data_structures/indexed_vec.rs index 6f40d059be27f..6e80b48a68560 100644 --- a/src/librustc_data_structures/indexed_vec.rs +++ b/src/librustc_data_structures/indexed_vec.rs @@ -149,7 +149,7 @@ macro_rules! newtype_index { #[inline] $v const unsafe fn from_u32_unchecked(value: u32) -> Self { - unsafe { $type { private: value } } + $type { private: value } } /// Extracts the value of this index as an integer. diff --git a/src/librustc_data_structures/obligation_forest/graphviz.rs b/src/librustc_data_structures/obligation_forest/graphviz.rs index a0363e165e049..b2120b182fa7b 100644 --- a/src/librustc_data_structures/obligation_forest/graphviz.rs +++ b/src/librustc_data_structures/obligation_forest/graphviz.rs @@ -74,9 +74,9 @@ impl<'a, O: ForestObligation + 'a> dot::GraphWalk<'a> for &'a ObligationForest ambiguous result. Obligation was neither a success +//! - `Unchanged` -> ambiguous result. Obligation was neither a success //! nor a failure. It is assumed that further attempts to process the //! obligation will yield the same result unless something in the //! surrounding environment changes. -//! - `Ok(Some(C))` - the obligation was *shallowly successful*. The +//! - `Changed(C)` - the obligation was *shallowly successful*. The //! vector `C` is a list of subobligations. The meaning of this is that //! `O` was successful on the assumption that all the obligations in `C` //! are also successful. Therefore, `O` is only considered a "true" @@ -34,7 +34,7 @@ //! state and the obligations in `C` become the new pending //! obligations. They will be processed the next time you call //! `process_obligations`. -//! - `Err(E)` -> obligation failed with error `E`. We will collect this +//! - `Error(E)` -> obligation failed with error `E`. We will collect this //! error and return it from `process_obligations`, along with the //! "backtrace" of obligations (that is, the list of obligations up to //! and including the root of the failed obligation). No further @@ -47,55 +47,50 @@ //! - `completed`: a list of obligations where processing was fully //! completed without error (meaning that all transitive subobligations //! have also been completed). So, for example, if the callback from -//! `process_obligations` returns `Ok(Some(C))` for some obligation `O`, +//! `process_obligations` returns `Changed(C)` for some obligation `O`, //! then `O` will be considered completed right away if `C` is the //! empty vector. Otherwise it will only be considered completed once //! all the obligations in `C` have been found completed. //! - `errors`: a list of errors that occurred and associated backtraces //! at the time of error, which can be used to give context to the user. //! - `stalled`: if true, then none of the existing obligations were -//! *shallowly successful* (that is, no callback returned `Ok(Some(_))`). +//! *shallowly successful* (that is, no callback returned `Changed(_)`). //! This implies that all obligations were either errors or returned an //! ambiguous result, which means that any further calls to //! `process_obligations` would simply yield back further ambiguous //! results. This is used by the `FulfillmentContext` to decide when it //! has reached a steady state. //! -//! #### Snapshots -//! -//! The `ObligationForest` supports a limited form of snapshots; see -//! `start_snapshot`, `commit_snapshot`, and `rollback_snapshot`. In -//! particular, you can use a snapshot to roll back new root -//! obligations. However, it is an error to attempt to -//! `process_obligations` during a snapshot. -//! //! ### Implementation details //! //! For the most part, comments specific to the implementation are in the //! code. This file only contains a very high-level overview. Basically, //! the forest is stored in a vector. Each element of the vector is a node -//! in some tree. Each node in the vector has the index of an (optional) -//! parent and (for convenience) its root (which may be itself). It also -//! has a current state, described by `NodeState`. After each -//! processing step, we compress the vector to remove completed and error -//! nodes, which aren't needed anymore. +//! in some tree. Each node in the vector has the index of its dependents, +//! including the first dependent which is known as the parent. It also +//! has a current state, described by `NodeState`. After each processing +//! step, we compress the vector to remove completed and error nodes, which +//! aren't needed anymore. use crate::fx::{FxHashMap, FxHashSet}; +use crate::indexed_vec::Idx; +use crate::newtype_index; -use std::cell::Cell; +use std::cell::{Cell, RefCell}; use std::collections::hash_map::Entry; use std::fmt::Debug; use std::hash; use std::marker::PhantomData; -mod node_index; -use self::node_index::NodeIndex; - mod graphviz; #[cfg(test)] mod tests; +newtype_index! { + pub struct NodeIndex { .. } +} + pub trait ForestObligation : Clone + Debug { type Predicate : Clone + hash::Hash + Eq + Debug; @@ -148,18 +143,22 @@ pub struct ObligationForest { /// At the end of processing, those nodes will be removed by a /// call to `compress`. /// - /// At all times we maintain the invariant that every node appears - /// at a higher index than its parent. This is needed by the - /// backtrace iterator (which uses `split_at`). + /// Ideally, this would be an `IndexVec>`. But that is + /// slower, because this vector is accessed so often that the + /// `u32`-to-`usize` conversions required for accesses are significant. nodes: Vec>, /// A cache of predicates that have been successfully completed. done_cache: FxHashSet, - /// An cache of the nodes in `nodes`, indexed by predicate. + /// A cache of the nodes in `nodes`, indexed by predicate. Unfortunately, + /// its contents are not guaranteed to match those of `nodes`. See the + /// comments in `process_obligation` for details. waiting_cache: FxHashMap, - scratch: Option>, + /// A scratch vector reused in various operations, to avoid allocating new + /// vectors. + scratch: RefCell>, obligation_tree_id_generator: ObligationTreeIdGenerator, @@ -178,19 +177,41 @@ struct Node { obligation: O, state: Cell, - /// The parent of a node - the original obligation of - /// which it is a subobligation. Except for error reporting, - /// it is just like any member of `dependents`. + /// The parent of a node - the original obligation of which it is a + /// subobligation. Except for error reporting, it is just like any member + /// of `dependents`. + /// + /// Unlike `ObligationForest::nodes`, this uses `NodeIndex` rather than + /// `usize` for the index, because keeping the size down is more important + /// than the cost of converting to a `usize` for indexing. parent: Option, - /// Obligations that depend on this obligation for their - /// completion. They must all be in a non-pending state. + /// Obligations that depend on this obligation for their completion. They + /// must all be in a non-pending state. + /// + /// This uses `NodeIndex` for the same reason as `parent`. dependents: Vec, /// Identifier of the obligation tree to which this node belongs. obligation_tree_id: ObligationTreeId, } +impl Node { + fn new( + parent: Option, + obligation: O, + obligation_tree_id: ObligationTreeId + ) -> Node { + Node { + obligation, + state: Cell::new(NodeState::Pending), + parent, + dependents: vec![], + obligation_tree_id, + } + } +} + /// The state of one node in some tree within the forest. This /// represents the current state of processing for the obligation (of /// type `O`) associated with this node. @@ -262,7 +283,7 @@ impl ObligationForest { nodes: vec![], done_cache: Default::default(), waiting_cache: Default::default(), - scratch: Some(vec![]), + scratch: RefCell::new(vec![]), obligation_tree_id_generator: (0..).map(ObligationTreeId), error_cache: Default::default(), } @@ -275,14 +296,12 @@ impl ObligationForest { } /// Registers an obligation. - /// - /// This CAN be done in a snapshot pub fn register_obligation(&mut self, obligation: O) { // Ignore errors here - there is no guarantee of success. let _ = self.register_obligation_at(obligation, None); } - // returns Err(()) if we already know this obligation failed. + // Returns Err(()) if we already know this obligation failed. fn register_obligation_at(&mut self, obligation: O, parent: Option) -> Result<(), ()> { @@ -294,15 +313,16 @@ impl ObligationForest { Entry::Occupied(o) => { debug!("register_obligation_at({:?}, {:?}) - duplicate of {:?}!", obligation, parent, o.get()); - let node = &mut self.nodes[o.get().get()]; - if let Some(parent) = parent { + let node = &mut self.nodes[o.get().index()]; + if let Some(parent_index) = parent { // If the node is already in `waiting_cache`, it's already // been marked with a parent. (It's possible that parent // has been cleared by `apply_rewrites`, though.) So just // dump `parent` into `node.dependents`... unless it's // already in `node.dependents` or `node.parent`. - if !node.dependents.contains(&parent) && Some(parent) != node.parent { - node.dependents.push(parent); + if !node.dependents.contains(&parent_index) && + Some(parent_index) != node.parent { + node.dependents.push(parent_index); } } if let NodeState::Error = node.state.get() { @@ -316,9 +336,8 @@ impl ObligationForest { obligation, parent, self.nodes.len()); let obligation_tree_id = match parent { - Some(p) => { - let parent_node = &self.nodes[p.get()]; - parent_node.obligation_tree_id + Some(parent_index) => { + self.nodes[parent_index.index()].obligation_tree_id } None => self.obligation_tree_id_generator.next().unwrap() }; @@ -342,13 +361,11 @@ impl ObligationForest { } /// Converts all remaining obligations to the given error. - /// - /// This cannot be done during a snapshot. pub fn to_errors(&mut self, error: E) -> Vec> { let mut errors = vec![]; - for index in 0..self.nodes.len() { - if let NodeState::Pending = self.nodes[index].state.get() { - let backtrace = self.error_at(index); + for (i, node) in self.nodes.iter().enumerate() { + if let NodeState::Pending = node.state.get() { + let backtrace = self.error_at(i); errors.push(Error { error: error.clone(), backtrace, @@ -373,7 +390,6 @@ impl ObligationForest { fn insert_into_error_cache(&mut self, node_index: usize) { let node = &self.nodes[node_index]; - self.error_cache .entry(node.obligation_tree_id) .or_default() @@ -393,16 +409,22 @@ impl ObligationForest { let mut errors = vec![]; let mut stalled = true; - for index in 0..self.nodes.len() { - debug!("process_obligations: node {} == {:?}", index, self.nodes[index]); + for i in 0..self.nodes.len() { + let node = &mut self.nodes[i]; - let result = match self.nodes[index] { - Node { ref state, ref mut obligation, .. } if state.get() == NodeState::Pending => - processor.process_obligation(obligation), + debug!("process_obligations: node {} == {:?}", i, node); + + // `processor.process_obligation` can modify the predicate within + // `node.obligation`, and that predicate is the key used for + // `self.waiting_cache`. This means that `self.waiting_cache` can + // get out of sync with `nodes`. It's not very common, but it does + // happen, and code in `compress` has to allow for it. + let result = match node.state.get() { + NodeState::Pending => processor.process_obligation(&mut node.obligation), _ => continue }; - debug!("process_obligations: node {} got result {:?}", index, result); + debug!("process_obligations: node {} got result {:?}", i, result); match result { ProcessResult::Unchanged => { @@ -411,23 +433,23 @@ impl ObligationForest { ProcessResult::Changed(children) => { // We are not (yet) stalled. stalled = false; - self.nodes[index].state.set(NodeState::Success); + node.state.set(NodeState::Success); for child in children { let st = self.register_obligation_at( child, - Some(NodeIndex::new(index)) + Some(NodeIndex::new(i)) ); if let Err(()) = st { - // error already reported - propagate it + // Error already reported - propagate it // to our node. - self.error_at(index); + self.error_at(i); } } } ProcessResult::Error(err) => { stalled = false; - let backtrace = self.error_at(index); + let backtrace = self.error_at(i); errors.push(Error { error: err, backtrace, @@ -448,8 +470,6 @@ impl ObligationForest { self.mark_as_waiting(); self.process_cycles(processor); - - // Now we have to compress the result let completed = self.compress(do_completed); debug!("process_obligations: complete"); @@ -465,97 +485,92 @@ impl ObligationForest { /// report all cycles between them. This should be called /// after `mark_as_waiting` marks all nodes with pending /// subobligations as NodeState::Waiting. - fn process_cycles

(&mut self, processor: &mut P) + fn process_cycles

(&self, processor: &mut P) where P: ObligationProcessor { - let mut stack = self.scratch.take().unwrap(); + let mut stack = self.scratch.replace(vec![]); debug_assert!(stack.is_empty()); debug!("process_cycles()"); - for index in 0..self.nodes.len() { + for (i, node) in self.nodes.iter().enumerate() { // For rustc-benchmarks/inflate-0.1.0 this state test is extremely // hot and the state is almost always `Pending` or `Waiting`. It's // a win to handle the no-op cases immediately to avoid the cost of // the function call. - let state = self.nodes[index].state.get(); - match state { + match node.state.get() { NodeState::Waiting | NodeState::Pending | NodeState::Done | NodeState::Error => {}, - _ => self.find_cycles_from_node(&mut stack, processor, index), + _ => self.find_cycles_from_node(&mut stack, processor, i), } } debug!("process_cycles: complete"); debug_assert!(stack.is_empty()); - self.scratch = Some(stack); + self.scratch.replace(stack); } - fn find_cycles_from_node

(&self, stack: &mut Vec, - processor: &mut P, index: usize) + fn find_cycles_from_node

(&self, stack: &mut Vec, processor: &mut P, i: usize) where P: ObligationProcessor { - let node = &self.nodes[index]; - let state = node.state.get(); - match state { + let node = &self.nodes[i]; + match node.state.get() { NodeState::OnDfsStack => { - let index = - stack.iter().rposition(|n| *n == index).unwrap(); - processor.process_backedge(stack[index..].iter().map(GetObligation(&self.nodes)), + let i = stack.iter().rposition(|n| *n == i).unwrap(); + processor.process_backedge(stack[i..].iter().map(GetObligation(&self.nodes)), PhantomData); } NodeState::Success => { node.state.set(NodeState::OnDfsStack); - stack.push(index); - for dependent in node.parent.iter().chain(node.dependents.iter()) { - self.find_cycles_from_node(stack, processor, dependent.get()); + stack.push(i); + for index in node.parent.iter().chain(node.dependents.iter()) { + self.find_cycles_from_node(stack, processor, index.index()); } stack.pop(); node.state.set(NodeState::Done); }, NodeState::Waiting | NodeState::Pending => { - // this node is still reachable from some pending node. We + // This node is still reachable from some pending node. We // will get to it when they are all processed. } NodeState::Done | NodeState::Error => { - // already processed that node + // Already processed that node. } }; } /// Returns a vector of obligations for `p` and all of its /// ancestors, putting them into the error state in the process. - fn error_at(&mut self, p: usize) -> Vec { - let mut error_stack = self.scratch.take().unwrap(); + fn error_at(&self, mut i: usize) -> Vec { + let mut error_stack = self.scratch.replace(vec![]); let mut trace = vec![]; - let mut n = p; loop { - self.nodes[n].state.set(NodeState::Error); - trace.push(self.nodes[n].obligation.clone()); - error_stack.extend(self.nodes[n].dependents.iter().map(|x| x.get())); + let node = &self.nodes[i]; + node.state.set(NodeState::Error); + trace.push(node.obligation.clone()); + error_stack.extend(node.dependents.iter().map(|index| index.index())); - // loop to the parent - match self.nodes[n].parent { - Some(q) => n = q.get(), + // Loop to the parent. + match node.parent { + Some(parent_index) => i = parent_index.index(), None => break } } while let Some(i) = error_stack.pop() { - match self.nodes[i].state.get() { + let node = &self.nodes[i]; + match node.state.get() { NodeState::Error => continue, - _ => self.nodes[i].state.set(NodeState::Error), + _ => node.state.set(NodeState::Error), } - let node = &self.nodes[i]; - error_stack.extend( - node.parent.iter().chain(node.dependents.iter()).map(|x| x.get()) + node.parent.iter().chain(node.dependents.iter()).map(|index| index.index()) ); } - self.scratch = Some(error_stack); + self.scratch.replace(error_stack); trace } @@ -563,7 +578,7 @@ impl ObligationForest { #[inline(always)] fn inlined_mark_neighbors_as_waiting_from(&self, node: &Node) { for dependent in node.parent.iter().chain(node.dependents.iter()) { - self.mark_as_waiting_from(&self.nodes[dependent.get()]); + self.mark_as_waiting_from(&self.nodes[dependent.index()]); } } @@ -609,7 +624,7 @@ impl ObligationForest { #[inline(never)] fn compress(&mut self, do_completed: DoCompleted) -> Option> { let nodes_len = self.nodes.len(); - let mut node_rewrites: Vec<_> = self.scratch.take().unwrap(); + let mut node_rewrites: Vec<_> = self.scratch.replace(vec![]); node_rewrites.extend(0..nodes_len); let mut dead_nodes = 0; @@ -620,7 +635,8 @@ impl ObligationForest { // self.nodes[i - dead_nodes..i] are all dead // self.nodes[i..] are unchanged for i in 0..self.nodes.len() { - match self.nodes[i].state.get() { + let node = &self.nodes[i]; + match node.state.get() { NodeState::Pending | NodeState::Waiting => { if dead_nodes > 0 { self.nodes.swap(i, i - dead_nodes); @@ -628,13 +644,16 @@ impl ObligationForest { } } NodeState::Done => { - // Avoid cloning the key (predicate) in case it exists in the waiting cache + // This lookup can fail because the contents of + // `self.waiting_cache` is not guaranteed to match those of + // `self.nodes`. See the comment in `process_obligation` + // for more details. if let Some((predicate, _)) = self.waiting_cache - .remove_entry(self.nodes[i].obligation.as_predicate()) + .remove_entry(node.obligation.as_predicate()) { self.done_cache.insert(predicate); } else { - self.done_cache.insert(self.nodes[i].obligation.as_predicate().clone()); + self.done_cache.insert(node.obligation.as_predicate().clone()); } node_rewrites[i] = nodes_len; dead_nodes += 1; @@ -643,7 +662,7 @@ impl ObligationForest { // We *intentionally* remove the node from the cache at this point. Otherwise // tests must come up with a different type on every type error they // check against. - self.waiting_cache.remove(self.nodes[i].obligation.as_predicate()); + self.waiting_cache.remove(node.obligation.as_predicate()); node_rewrites[i] = nodes_len; dead_nodes += 1; self.insert_into_error_cache(i); @@ -655,12 +674,11 @@ impl ObligationForest { // No compression needed. if dead_nodes == 0 { node_rewrites.truncate(0); - self.scratch = Some(node_rewrites); + self.scratch.replace(node_rewrites); return if do_completed == DoCompleted::Yes { Some(vec![]) } else { None }; } - // Pop off all the nodes we killed and extract the success - // stories. + // Pop off all the nodes we killed and extract the success stories. let successful = if do_completed == DoCompleted::Yes { Some((0..dead_nodes) .map(|_| self.nodes.pop().unwrap()) @@ -679,7 +697,7 @@ impl ObligationForest { self.apply_rewrites(&node_rewrites); node_rewrites.truncate(0); - self.scratch = Some(node_rewrites); + self.scratch.replace(node_rewrites); successful } @@ -689,58 +707,41 @@ impl ObligationForest { for node in &mut self.nodes { if let Some(index) = node.parent { - let new_index = node_rewrites[index.get()]; - if new_index >= nodes_len { - // parent dead due to error + let new_i = node_rewrites[index.index()]; + if new_i >= nodes_len { node.parent = None; } else { - node.parent = Some(NodeIndex::new(new_index)); + node.parent = Some(NodeIndex::new(new_i)); } } let mut i = 0; while i < node.dependents.len() { - let new_index = node_rewrites[node.dependents[i].get()]; - if new_index >= nodes_len { + let new_i = node_rewrites[node.dependents[i].index()]; + if new_i >= nodes_len { node.dependents.swap_remove(i); } else { - node.dependents[i] = NodeIndex::new(new_index); + node.dependents[i] = NodeIndex::new(new_i); i += 1; } } } - let mut kill_list = vec![]; - for (predicate, index) in &mut self.waiting_cache { - let new_index = node_rewrites[index.get()]; - if new_index >= nodes_len { - kill_list.push(predicate.clone()); + // This updating of `self.waiting_cache` is necessary because the + // removal of nodes within `compress` can fail. See above. + self.waiting_cache.retain(|_predicate, index| { + let new_i = node_rewrites[index.index()]; + if new_i >= nodes_len { + false } else { - *index = NodeIndex::new(new_index); + *index = NodeIndex::new(new_i); + true } - } - - for predicate in kill_list { self.waiting_cache.remove(&predicate); } - } -} - -impl Node { - fn new( - parent: Option, - obligation: O, - obligation_tree_id: ObligationTreeId - ) -> Node { - Node { - obligation, - state: Cell::new(NodeState::Pending), - parent, - dependents: vec![], - obligation_tree_id, - } + }); } } -// I need a Clone closure +// I need a Clone closure. #[derive(Clone)] struct GetObligation<'a, O>(&'a [Node]); diff --git a/src/librustc_data_structures/obligation_forest/node_index.rs b/src/librustc_data_structures/obligation_forest/node_index.rs deleted file mode 100644 index 69ea473e05461..0000000000000 --- a/src/librustc_data_structures/obligation_forest/node_index.rs +++ /dev/null @@ -1,20 +0,0 @@ -use std::num::NonZeroU32; -use std::u32; - -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct NodeIndex { - index: NonZeroU32, -} - -impl NodeIndex { - #[inline] - pub fn new(value: usize) -> NodeIndex { - assert!(value < (u32::MAX as usize)); - NodeIndex { index: NonZeroU32::new((value as u32) + 1).unwrap() } - } - - #[inline] - pub fn get(self) -> usize { - (self.index.get() - 1) as usize - } -} diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs index efa18587b7ddb..26a89b4e7a8d1 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs @@ -13,7 +13,7 @@ use rustc::infer::NLLRegionVariableOrigin; use rustc::mir::{ConstraintCategory, Location, Body}; use rustc::ty::{self, RegionVid}; use rustc_data_structures::indexed_vec::IndexVec; -use rustc_errors::{Diagnostic, DiagnosticBuilder}; +use rustc_errors::DiagnosticBuilder; use std::collections::VecDeque; use syntax::errors::Applicability; use syntax::symbol::kw; @@ -22,7 +22,7 @@ use syntax_pos::Span; mod region_name; mod var_name; -crate use self::region_name::{RegionName, RegionNameSource}; +crate use self::region_name::{RegionName, RegionNameSource, RegionErrorNamingCtx}; impl ConstraintDescription for ConstraintCategory { fn description(&self) -> &'static str { @@ -54,6 +54,39 @@ enum Trace { NotVisited, } +/// Various pieces of state used when reporting borrow checker errors. +pub struct ErrorReportingCtx<'a, 'b, 'tcx> { + /// The region inference context used for borrow chekcing this MIR body. + #[allow(dead_code)] // FIXME(mark-i-m): used by outlives suggestions + region_infcx: &'b RegionInferenceContext<'tcx>, + + /// The inference context used for type checking. + infcx: &'b InferCtxt<'a, 'tcx>, + + /// The MIR def we are reporting errors on. + mir_def_id: DefId, + + /// The MIR body we are reporting errors on (for convenience). + body: &'b Body<'tcx>, + + /// Any upvars for the MIR body we have kept track of during borrow checking. + upvars: &'b [Upvar], +} + +/// Information about the various region constraints involved in a borrow checker error. +#[derive(Clone, Debug)] +pub struct ErrorConstraintInfo { + // fr: outlived_fr + fr: RegionVid, + fr_is_local: bool, + outlived_fr: RegionVid, + outlived_fr_is_local: bool, + + // Category and span for best blame constraint + category: ConstraintCategory, + span: Span, +} + impl<'tcx> RegionInferenceContext<'tcx> { /// Tries to find the best constraint to blame for the fact that /// `R: from_region`, where `R` is some region that meets @@ -257,16 +290,16 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// ``` /// /// Here we would be invoked with `fr = 'a` and `outlived_fr = `'b`. - pub(super) fn report_error( - &self, + pub(super) fn report_error<'a>( + &'a self, body: &Body<'tcx>, upvars: &[Upvar], - infcx: &InferCtxt<'_, 'tcx>, + infcx: &'a InferCtxt<'a, 'tcx>, mir_def_id: DefId, fr: RegionVid, outlived_fr: RegionVid, - errors_buffer: &mut Vec, - ) { + renctx: &mut RegionErrorNamingCtx, + ) -> DiagnosticBuilder<'a> { debug!("report_error(fr={:?}, outlived_fr={:?})", fr, outlived_fr); let (category, _, span) = self.best_blame_constraint(body, fr, |r| { @@ -279,8 +312,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { let tables = infcx.tcx.typeck_tables_of(mir_def_id); let nice = NiceRegionError::new_from_span(infcx, span, o, f, Some(tables)); if let Some(diag) = nice.try_report_from_nll() { - diag.buffer(errors_buffer); - return; + return diag; } } @@ -293,45 +325,28 @@ impl<'tcx> RegionInferenceContext<'tcx> { "report_error: fr_is_local={:?} outlived_fr_is_local={:?} category={:?}", fr_is_local, outlived_fr_is_local, category ); + + let errctx = ErrorReportingCtx { + region_infcx: self, + infcx, + mir_def_id, + body, + upvars, + }; + + let errci = ErrorConstraintInfo { + fr, outlived_fr, fr_is_local, outlived_fr_is_local, category, span + }; + match (category, fr_is_local, outlived_fr_is_local) { (ConstraintCategory::Return, true, false) if self.is_closure_fn_mut(infcx, fr) => { - self.report_fnmut_error( - body, - upvars, - infcx, - mir_def_id, - fr, - outlived_fr, - span, - errors_buffer, - ) + self.report_fnmut_error(&errctx, &errci, renctx) } (ConstraintCategory::Assignment, true, false) - | (ConstraintCategory::CallArgument, true, false) => self.report_escaping_data_error( - body, - upvars, - infcx, - mir_def_id, - fr, - outlived_fr, - category, - span, - errors_buffer, - ), - _ => self.report_general_error( - body, - upvars, - infcx, - mir_def_id, - fr, - fr_is_local, - outlived_fr, - outlived_fr_is_local, - category, - span, - errors_buffer, - ), - }; + | (ConstraintCategory::CallArgument, true, false) => + self.report_escaping_data_error(&errctx, &errci, renctx), + _ => self.report_general_error(&errctx, &errci, renctx), + } } /// We have a constraint `fr1: fr2` that is not satisfied, where @@ -379,19 +394,19 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// ``` fn report_fnmut_error( &self, - body: &Body<'tcx>, - upvars: &[Upvar], - infcx: &InferCtxt<'_, 'tcx>, - mir_def_id: DefId, - _fr: RegionVid, - outlived_fr: RegionVid, - span: Span, - errors_buffer: &mut Vec, - ) { - let mut diag = infcx + errctx: &ErrorReportingCtx<'_, '_, 'tcx>, + errci: &ErrorConstraintInfo, + renctx: &mut RegionErrorNamingCtx, + ) -> DiagnosticBuilder<'_> { + let ErrorConstraintInfo { + outlived_fr, span, .. + } = errci; + + let mut diag = errctx + .infcx .tcx .sess - .struct_span_err(span, "captured variable cannot escape `FnMut` closure body"); + .struct_span_err(*span, "captured variable cannot escape `FnMut` closure body"); // We should check if the return type of this closure is in fact a closure - in that // case, we can special case the error further. @@ -403,11 +418,9 @@ impl<'tcx> RegionInferenceContext<'tcx> { "returns a reference to a captured variable which escapes the closure body" }; - diag.span_label(span, message); + diag.span_label(*span, message); - match self.give_region_a_name(infcx, body, upvars, mir_def_id, outlived_fr, &mut 1) - .unwrap().source - { + match self.give_region_a_name(errctx, renctx, *outlived_fr).unwrap().source { RegionNameSource::NamedEarlyBoundRegion(fr_span) | RegionNameSource::NamedFreeRegion(fr_span) | RegionNameSource::SynthesizedFreeEnvRegion(fr_span, _) @@ -427,7 +440,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { ); diag.note("...therefore, they cannot allow references to captured variables to escape"); - diag.buffer(errors_buffer); + diag } /// Reports a error specifically for when data is escaping a closure. @@ -444,20 +457,22 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// ``` fn report_escaping_data_error( &self, - body: &Body<'tcx>, - upvars: &[Upvar], - infcx: &InferCtxt<'_, 'tcx>, - mir_def_id: DefId, - fr: RegionVid, - outlived_fr: RegionVid, - category: ConstraintCategory, - span: Span, - errors_buffer: &mut Vec, - ) { + errctx: &ErrorReportingCtx<'_, '_, 'tcx>, + errci: &ErrorConstraintInfo, + renctx: &mut RegionErrorNamingCtx, + ) -> DiagnosticBuilder<'_> { + let ErrorReportingCtx { + infcx, body, upvars, .. + } = errctx; + + let ErrorConstraintInfo { + span, category, .. + } = errci; + let fr_name_and_span = - self.get_var_name_and_span_for_region(infcx.tcx, body, upvars, fr); + self.get_var_name_and_span_for_region(infcx.tcx, body, upvars, errci.fr); let outlived_fr_name_and_span = - self.get_var_name_and_span_for_region(infcx.tcx, body, upvars, outlived_fr); + self.get_var_name_and_span_for_region(infcx.tcx, body, upvars, errci.outlived_fr); let escapes_from = match self.universal_regions.defining_ty { DefiningTy::Closure(..) => "closure", @@ -469,27 +484,23 @@ impl<'tcx> RegionInferenceContext<'tcx> { // Revert to the normal error in these cases. // Assignments aren't "escapes" in function items. if (fr_name_and_span.is_none() && outlived_fr_name_and_span.is_none()) - || (category == ConstraintCategory::Assignment && escapes_from == "function") + || (*category == ConstraintCategory::Assignment && escapes_from == "function") || escapes_from == "const" { return self.report_general_error( - body, - upvars, - infcx, - mir_def_id, - fr, - true, - outlived_fr, - false, - category, - span, - errors_buffer, + errctx, + &ErrorConstraintInfo { + fr_is_local: true, + outlived_fr_is_local: false, + .. *errci + }, + renctx, ); } let mut diag = borrowck_errors::borrowed_data_escapes_closure( infcx.tcx, - span, + *span, escapes_from, ); @@ -513,12 +524,12 @@ impl<'tcx> RegionInferenceContext<'tcx> { ); diag.span_label( - span, + *span, format!("`{}` escapes the {} body here", fr_name, escapes_from), ); } - diag.buffer(errors_buffer); + diag } /// Reports a region inference error for the general case with named/synthesized lifetimes to @@ -538,41 +549,37 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// ``` fn report_general_error( &self, - body: &Body<'tcx>, - upvars: &[Upvar], - infcx: &InferCtxt<'_, 'tcx>, - mir_def_id: DefId, - fr: RegionVid, - fr_is_local: bool, - outlived_fr: RegionVid, - outlived_fr_is_local: bool, - category: ConstraintCategory, - span: Span, - errors_buffer: &mut Vec, - ) { + errctx: &ErrorReportingCtx<'_, '_, 'tcx>, + errci: &ErrorConstraintInfo, + renctx: &mut RegionErrorNamingCtx, + ) -> DiagnosticBuilder<'_> { + let ErrorReportingCtx { + infcx, mir_def_id, .. + } = errctx; + let ErrorConstraintInfo { + fr, fr_is_local, outlived_fr, outlived_fr_is_local, span, category, .. + } = errci; + let mut diag = infcx.tcx.sess.struct_span_err( - span, + *span, "lifetime may not live long enough" ); - let counter = &mut 1; - let fr_name = self.give_region_a_name( - infcx, body, upvars, mir_def_id, fr, counter).unwrap(); - fr_name.highlight_region_name(&mut diag); - let outlived_fr_name = - self.give_region_a_name(infcx, body, upvars, mir_def_id, outlived_fr, counter).unwrap(); - outlived_fr_name.highlight_region_name(&mut diag); - - let mir_def_name = if infcx.tcx.is_closure(mir_def_id) { + let mir_def_name = if infcx.tcx.is_closure(*mir_def_id) { "closure" } else { "function" }; + let fr_name = self.give_region_a_name(errctx, renctx, *fr).unwrap(); + fr_name.highlight_region_name(&mut diag); + let outlived_fr_name = self.give_region_a_name(errctx, renctx, *outlived_fr).unwrap(); + outlived_fr_name.highlight_region_name(&mut diag); + match (category, outlived_fr_is_local, fr_is_local) { (ConstraintCategory::Return, true, _) => { diag.span_label( - span, + *span, format!( "{} was supposed to return data with lifetime `{}` but it is returning \ data with lifetime `{}`", @@ -582,7 +589,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { } _ => { diag.span_label( - span, + *span, format!( "{}requires that `{}` must outlive `{}`", category.description(), @@ -593,9 +600,9 @@ impl<'tcx> RegionInferenceContext<'tcx> { } } - self.add_static_impl_trait_suggestion(infcx, &mut diag, fr, fr_name, outlived_fr); + self.add_static_impl_trait_suggestion(infcx, &mut diag, *fr, fr_name, *outlived_fr); - diag.buffer(errors_buffer); + diag } /// Adds a suggestion to errors where a `impl Trait` is returned. @@ -704,8 +711,14 @@ impl<'tcx> RegionInferenceContext<'tcx> { borrow_region, |r| self.provides_universal_region(r, borrow_region, outlived_region) ); - let outlived_fr_name = - self.give_region_a_name(infcx, body, upvars, mir_def_id, outlived_region, &mut 1); + + let mut renctx = RegionErrorNamingCtx::new(); + let errctx = ErrorReportingCtx { + infcx, body, upvars, mir_def_id, + region_infcx: self, + }; + let outlived_fr_name = self.give_region_a_name(&errctx, &mut renctx, outlived_region); + (category, from_closure, span, outlived_fr_name) } diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs index 75a31628a54b6..6fa94269107f5 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs @@ -1,5 +1,9 @@ use std::fmt::{self, Display}; -use crate::borrow_check::nll::region_infer::RegionInferenceContext; + +use crate::borrow_check::nll::region_infer::{ + RegionInferenceContext, + error_reporting::ErrorReportingCtx, +}; use crate::borrow_check::nll::universal_regions::DefiningTy; use crate::borrow_check::nll::ToRegionVid; use crate::borrow_check::Upvar; @@ -13,29 +17,75 @@ use rustc::ty::{self, RegionKind, RegionVid, Ty, TyCtxt}; use rustc::ty::print::RegionHighlightMode; use rustc_errors::DiagnosticBuilder; use syntax::symbol::kw; -use syntax_pos::Span; -use syntax_pos::symbol::InternedString; +use rustc_data_structures::fx::FxHashMap; +use syntax_pos::{Span, symbol::InternedString}; -#[derive(Debug)] +/// A name for a particular region used in emitting diagnostics. This name could be a generated +/// name like `'1`, a name used by the user like `'a`, or a name like `'static`. +#[derive(Debug, Clone)] crate struct RegionName { + /// The name of the region (interned). crate name: InternedString, + /// Where the region comes from. crate source: RegionNameSource, } -#[derive(Debug)] +/// Denotes the source of a region that is named by a `RegionName`. For example, a free region that +/// was named by the user would get `NamedFreeRegion` and `'static` lifetime would get `Static`. +/// This helps to print the right kinds of diagnostics. +#[derive(Debug, Clone)] crate enum RegionNameSource { + /// A bound (not free) region that was substituted at the def site (not an HRTB). NamedEarlyBoundRegion(Span), + /// A free region that the user has a name (`'a`) for. NamedFreeRegion(Span), + /// The `'static` region. Static, + /// The free region corresponding to the environment of a closure. SynthesizedFreeEnvRegion(Span, String), + /// The region name corresponds to a region where the type annotation is completely missing + /// from the code, e.g. in a closure arguments `|x| { ... }`, where `x` is a reference. CannotMatchHirTy(Span, String), + /// The region name corresponds a reference that was found by traversing the type in the HIR. MatchedHirTy(Span), + /// A region name from the generics list of a struct/enum/union. MatchedAdtAndSegment(Span), + /// The region corresponding to a closure upvar. AnonRegionFromUpvar(Span, String), + /// The region corresponding to the return type of a closure. AnonRegionFromOutput(Span, String, String), AnonRegionFromYieldTy(Span, String), } +/// Records region names that have been assigned before so that we can use the same ones in later +/// diagnostics. +#[derive(Debug, Clone)] +crate struct RegionErrorNamingCtx { + /// Record the region names generated for each region in the given + /// MIR def so that we can reuse them later in help/error messages. + renctx: FxHashMap, + + /// The counter for generating new region names. + counter: usize, +} + +impl RegionErrorNamingCtx { + crate fn new() -> Self { + Self { + counter: 1, + renctx: FxHashMap::default(), + } + } + + crate fn get(&self, region: &RegionVid) -> Option<&RegionName> { + self.renctx.get(region) + } + + crate fn insert(&mut self, region: RegionVid, name: RegionName) { + self.renctx.insert(region, name); + } +} + impl RegionName { #[allow(dead_code)] crate fn was_named(&self) -> bool { @@ -63,43 +113,40 @@ impl RegionName { self.name } - crate fn highlight_region_name( - &self, - diag: &mut DiagnosticBuilder<'_> - ) { + crate fn highlight_region_name(&self, diag: &mut DiagnosticBuilder<'_>) { match &self.source { - RegionNameSource::NamedFreeRegion(span) | - RegionNameSource::NamedEarlyBoundRegion(span) => { - diag.span_label( - *span, - format!("lifetime `{}` defined here", self), - ); - }, + RegionNameSource::NamedFreeRegion(span) + | RegionNameSource::NamedEarlyBoundRegion(span) => { + diag.span_label(*span, format!("lifetime `{}` defined here", self)); + } RegionNameSource::SynthesizedFreeEnvRegion(span, note) => { diag.span_label( *span, format!("lifetime `{}` represents this closure's body", self), ); diag.note(¬e); - }, + } RegionNameSource::CannotMatchHirTy(span, type_name) => { diag.span_label(*span, format!("has type `{}`", type_name)); - }, + } RegionNameSource::MatchedHirTy(span) => { diag.span_label( *span, format!("let's call the lifetime of this reference `{}`", self), ); - }, + } RegionNameSource::MatchedAdtAndSegment(span) => { diag.span_label(*span, format!("let's call this `{}`", self)); - }, + } RegionNameSource::AnonRegionFromUpvar(span, upvar_name) => { diag.span_label( *span, - format!("lifetime `{}` appears in the type of `{}`", self, upvar_name), + format!( + "lifetime `{}` appears in the type of `{}`", + self, upvar_name + ), ); - }, + } RegionNameSource::AnonRegionFromOutput(span, mir_description, type_name) => { diag.span_label( *span, @@ -151,39 +198,49 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// and then return the name `'1` for us to use. crate fn give_region_a_name( &self, - infcx: &InferCtxt<'_, 'tcx>, - body: &Body<'tcx>, - upvars: &[Upvar], - mir_def_id: DefId, + errctx: &ErrorReportingCtx<'_, '_, 'tcx>, + renctx: &mut RegionErrorNamingCtx, fr: RegionVid, - counter: &mut usize, ) -> Option { - debug!("give_region_a_name(fr={:?}, counter={})", fr, counter); + let ErrorReportingCtx { + infcx, body, mir_def_id, upvars, .. + } = errctx; + + debug!("give_region_a_name(fr={:?}, counter={:?})", fr, renctx.counter); assert!(self.universal_regions.is_universal_region(fr)); - let value = self.give_name_from_error_region(infcx.tcx, mir_def_id, fr, counter) + if let Some(value) = renctx.get(&fr) { + return Some(value.clone()); + } + + let value = self + .give_name_from_error_region(infcx.tcx, *mir_def_id, fr, renctx) .or_else(|| { self.give_name_if_anonymous_region_appears_in_arguments( - infcx, body, mir_def_id, fr, counter, + infcx, body, *mir_def_id, fr, renctx, ) }) .or_else(|| { self.give_name_if_anonymous_region_appears_in_upvars( - infcx.tcx, upvars, fr, counter, + infcx.tcx, upvars, fr, renctx ) }) .or_else(|| { self.give_name_if_anonymous_region_appears_in_output( - infcx, body, mir_def_id, fr, counter, + infcx, body, *mir_def_id, fr, renctx, ) }) .or_else(|| { self.give_name_if_anonymous_region_appears_in_yield_ty( - infcx, body, mir_def_id, fr, counter, + infcx, body, *mir_def_id, fr, renctx, ) }); + if let Some(ref value) = value { + renctx.insert(fr, value.clone()); + } + debug!("give_region_a_name: gave name {:?}", value); value } @@ -197,7 +254,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { tcx: TyCtxt<'tcx>, mir_def_id: DefId, fr: RegionVid, - counter: &mut usize, + renctx: &mut RegionErrorNamingCtx, ) -> Option { let error_region = self.to_error_region(fr)?; @@ -208,7 +265,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { let span = self.get_named_span(tcx, error_region, ebr.name); Some(RegionName { name: ebr.name, - source: RegionNameSource::NamedEarlyBoundRegion(span) + source: RegionNameSource::NamedEarlyBoundRegion(span), }) } else { None @@ -227,12 +284,10 @@ impl<'tcx> RegionInferenceContext<'tcx> { name, source: RegionNameSource::NamedFreeRegion(span), }) - }, + } ty::BoundRegion::BrEnv => { - let mir_hir_id = tcx.hir() - .as_local_hir_id(mir_def_id) - .expect("non-local mir"); + let mir_hir_id = tcx.hir().as_local_hir_id(mir_def_id).expect("non-local mir"); let def_ty = self.universal_regions.defining_ty; if let DefiningTy::Closure(def_id, substs) = def_ty { @@ -243,7 +298,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { } else { bug!("Closure is not defined by a closure expr"); }; - let region_name = self.synthesize_region_name(counter); + let region_name = self.synthesize_region_name(renctx); let closure_kind_ty = substs.closure_kind_ty(def_id, tcx); let note = match closure_kind_ty.to_opt_closure_kind() { @@ -265,7 +320,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { name: region_name, source: RegionNameSource::SynthesizedFreeEnvRegion( args_span, - note.to_string() + note.to_string(), ), }) } else { @@ -335,7 +390,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { body: &Body<'tcx>, mir_def_id: DefId, fr: RegionVid, - counter: &mut usize, + renctx: &mut RegionErrorNamingCtx, ) -> Option { let implicit_inputs = self.universal_regions.defining_ty.implicit_inputs(); let argument_index = self.get_argument_index_for_region(infcx.tcx, fr)?; @@ -349,12 +404,12 @@ impl<'tcx> RegionInferenceContext<'tcx> { fr, arg_ty, argument_index, - counter, + renctx, ) { return Some(region_name); } - self.give_name_if_we_cannot_match_hir_ty(infcx, body, fr, arg_ty, counter) + self.give_name_if_we_cannot_match_hir_ty(infcx, body, fr, arg_ty, renctx) } fn give_name_if_we_can_match_hir_ty_from_argument( @@ -365,7 +420,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { needle_fr: RegionVid, argument_ty: Ty<'tcx>, argument_index: usize, - counter: &mut usize, + renctx: &mut RegionErrorNamingCtx, ) -> Option { let mir_hir_id = infcx.tcx.hir().as_local_hir_id(mir_def_id)?; let fn_decl = infcx.tcx.hir().fn_decl_by_hir_id(mir_hir_id)?; @@ -379,7 +434,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { body, needle_fr, argument_ty, - counter, + renctx, ), _ => self.give_name_if_we_can_match_hir_ty( @@ -387,7 +442,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { needle_fr, argument_ty, argument_hir_ty, - counter, + renctx, ), } } @@ -409,10 +464,11 @@ impl<'tcx> RegionInferenceContext<'tcx> { body: &Body<'tcx>, needle_fr: RegionVid, argument_ty: Ty<'tcx>, - counter: &mut usize, + renctx: &mut RegionErrorNamingCtx, ) -> Option { + let counter = renctx.counter; let mut highlight = RegionHighlightMode::default(); - highlight.highlighting_region_vid(needle_fr, *counter); + highlight.highlighting_region_vid(needle_fr, counter); let type_name = infcx.extract_type_name(&argument_ty, Some(highlight)).0; debug!( @@ -428,7 +484,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { // This counter value will already have been used, so this function will increment // it so the next value will be used next and return the region name that would // have been used. - name: self.synthesize_region_name(counter), + name: self.synthesize_region_name(renctx), source: RegionNameSource::CannotMatchHirTy(span, type_name), }) } else { @@ -455,7 +511,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// type. Once we find that, we can use the span of the `hir::Ty` /// to add the highlight. /// - /// This is a somewhat imperfect process, so long the way we also + /// This is a somewhat imperfect process, so along the way we also /// keep track of the **closest** type we've found. If we fail to /// find the exact `&` or `'_` to highlight, then we may fall back /// to highlighting that closest type instead. @@ -465,7 +521,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { needle_fr: RegionVid, argument_ty: Ty<'tcx>, argument_hir_ty: &hir::Ty, - counter: &mut usize, + renctx: &mut RegionErrorNamingCtx, ) -> Option { let search_stack: &mut Vec<(Ty<'tcx>, &hir::Ty)> = &mut vec![(argument_ty, argument_hir_ty)]; @@ -483,7 +539,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { hir::TyKind::Rptr(_lifetime, referent_hir_ty), ) => { if region.to_region_vid() == needle_fr { - let region_name = self.synthesize_region_name(counter); + let region_name = self.synthesize_region_name(renctx); // Just grab the first character, the `&`. let source_map = tcx.sess.source_map(); @@ -515,7 +571,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { substs, needle_fr, last_segment, - counter, + renctx, search_stack, ) { return Some(name); @@ -559,18 +615,19 @@ impl<'tcx> RegionInferenceContext<'tcx> { substs: SubstsRef<'tcx>, needle_fr: RegionVid, last_segment: &'hir hir::PathSegment, - counter: &mut usize, + renctx: &mut RegionErrorNamingCtx, search_stack: &mut Vec<(Ty<'tcx>, &'hir hir::Ty)>, ) -> Option { // Did the user give explicit arguments? (e.g., `Foo<..>`) let args = last_segment.args.as_ref()?; - let lifetime = self.try_match_adt_and_generic_args(substs, needle_fr, args, search_stack)?; + let lifetime = + self.try_match_adt_and_generic_args(substs, needle_fr, args, search_stack)?; match lifetime.name { hir::LifetimeName::Param(_) | hir::LifetimeName::Error | hir::LifetimeName::Static | hir::LifetimeName::Underscore => { - let region_name = self.synthesize_region_name(counter); + let region_name = self.synthesize_region_name(renctx); let ampersand_span = lifetime.span; Some(RegionName { name: region_name, @@ -657,12 +714,12 @@ impl<'tcx> RegionInferenceContext<'tcx> { tcx: TyCtxt<'tcx>, upvars: &[Upvar], fr: RegionVid, - counter: &mut usize, + renctx: &mut RegionErrorNamingCtx, ) -> Option { let upvar_index = self.get_upvar_index_for_region(tcx, fr)?; let (upvar_name, upvar_span) = self.get_upvar_name_and_span_for_region(tcx, upvars, upvar_index); - let region_name = self.synthesize_region_name(counter); + let region_name = self.synthesize_region_name(renctx); Some(RegionName { name: region_name, @@ -680,7 +737,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { body: &Body<'tcx>, mir_def_id: DefId, fr: RegionVid, - counter: &mut usize, + renctx: &mut RegionErrorNamingCtx, ) -> Option { let tcx = infcx.tcx; @@ -694,7 +751,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { } let mut highlight = RegionHighlightMode::default(); - highlight.highlighting_region_vid(fr, *counter); + highlight.highlighting_region_vid(fr, renctx.counter); let type_name = infcx.extract_type_name(&return_ty, Some(highlight)).0; let mir_hir_id = tcx.hir().as_local_hir_id(mir_def_id).expect("non-local mir"); @@ -725,11 +782,11 @@ impl<'tcx> RegionInferenceContext<'tcx> { // This counter value will already have been used, so this function will increment it // so the next value will be used next and return the region name that would have been // used. - name: self.synthesize_region_name(counter), + name: self.synthesize_region_name(renctx), source: RegionNameSource::AnonRegionFromOutput( return_span, mir_description.to_string(), - type_name + type_name, ), }) } @@ -740,7 +797,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { body: &Body<'tcx>, mir_def_id: DefId, fr: RegionVid, - counter: &mut usize, + renctx: &mut RegionErrorNamingCtx, ) -> Option { // Note: generators from `async fn` yield `()`, so we don't have to // worry about them here. @@ -757,7 +814,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { } let mut highlight = RegionHighlightMode::default(); - highlight.highlighting_region_vid(fr, *counter); + highlight.highlighting_region_vid(fr, renctx.counter); let type_name = infcx.extract_type_name(&yield_ty, Some(highlight)).0; let mir_hir_id = tcx.hir().as_local_hir_id(mir_def_id).expect("non-local mir"); @@ -780,16 +837,15 @@ impl<'tcx> RegionInferenceContext<'tcx> { ); Some(RegionName { - name: self.synthesize_region_name(counter), + name: self.synthesize_region_name(renctx), source: RegionNameSource::AnonRegionFromYieldTy(yield_span, type_name), }) } - /// Creates a synthetic region named `'1`, incrementing the - /// counter. - fn synthesize_region_name(&self, counter: &mut usize) -> InternedString { - let c = *counter; - *counter += 1; + /// Creates a synthetic region named `'1`, incrementing the counter. + fn synthesize_region_name(&self, renctx: &mut RegionErrorNamingCtx) -> InternedString { + let c = renctx.counter; + renctx.counter += 1; InternedString::intern(&format!("'{:?}", c)) } diff --git a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs index 40388722bcac9..78e7943598d68 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs @@ -1,15 +1,20 @@ -use super::universal_regions::UniversalRegions; -use crate::borrow_check::nll::constraints::graph::NormalConstraintGraph; -use crate::borrow_check::nll::constraints::{ - ConstraintSccIndex, OutlivesConstraint, OutlivesConstraintSet, -}; -use crate::borrow_check::nll::member_constraints::{MemberConstraintSet, NllMemberConstraintIndex}; -use crate::borrow_check::nll::region_infer::values::{ - PlaceholderIndices, RegionElement, ToElementIndex, +use std::rc::Rc; + +use crate::borrow_check::nll::{ + constraints::{ + graph::NormalConstraintGraph, + ConstraintSccIndex, + OutlivesConstraint, + OutlivesConstraintSet, + }, + member_constraints::{MemberConstraintSet, NllMemberConstraintIndex}, + region_infer::values::{ + PlaceholderIndices, RegionElement, ToElementIndex + }, + type_check::{free_region_relations::UniversalRegionRelations, Locations}, }; -use crate::borrow_check::nll::type_check::free_region_relations::UniversalRegionRelations; -use crate::borrow_check::nll::type_check::Locations; use crate::borrow_check::Upvar; + use rustc::hir::def_id::DefId; use rustc::infer::canonical::QueryOutlivesConstraint; use rustc::infer::opaque_types; @@ -31,16 +36,16 @@ use rustc_data_structures::indexed_vec::IndexVec; use rustc_errors::{Diagnostic, DiagnosticBuilder}; use syntax_pos::Span; -use std::rc::Rc; +crate use self::error_reporting::{RegionName, RegionNameSource, RegionErrorNamingCtx}; +use self::values::{LivenessValues, RegionValueElements, RegionValues}; +use super::universal_regions::UniversalRegions; +use super::ToRegionVid; mod dump_mir; mod error_reporting; -crate use self::error_reporting::{RegionName, RegionNameSource}; mod graphviz; -pub mod values; -use self::values::{LivenessValues, RegionValueElements, RegionValues}; -use super::ToRegionVid; +pub mod values; pub struct RegionInferenceContext<'tcx> { /// Contains the definition for every region variable. Region @@ -487,6 +492,12 @@ impl<'tcx> RegionInferenceContext<'tcx> { errors_buffer, ); + // If we produce any errors, we keep track of the names of all regions, so that we can use + // the same error names in any suggestions we produce. Note that we need names to be unique + // across different errors for the same MIR def so that we can make suggestions that fix + // multiple problems. + let mut region_naming = RegionErrorNamingCtx::new(); + self.check_universal_regions( infcx, body, @@ -494,6 +505,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { mir_def_id, outlives_requirements.as_mut(), errors_buffer, + &mut region_naming, ); self.check_member_constraints(infcx, mir_def_id, errors_buffer); @@ -1312,6 +1324,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { mir_def_id: DefId, mut propagated_outlives_requirements: Option<&mut Vec>>, errors_buffer: &mut Vec, + region_naming: &mut RegionErrorNamingCtx, ) { for (fr, fr_definition) in self.definitions.iter_enumerated() { match fr_definition.origin { @@ -1327,6 +1340,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { fr, &mut propagated_outlives_requirements, errors_buffer, + region_naming, ); } @@ -1358,6 +1372,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { longer_fr: RegionVid, propagated_outlives_requirements: &mut Option<&mut Vec>>, errors_buffer: &mut Vec, + region_naming: &mut RegionErrorNamingCtx, ) { debug!("check_universal_region(fr={:?})", longer_fr); @@ -1385,6 +1400,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { mir_def_id, propagated_outlives_requirements, errors_buffer, + region_naming, ); return; } @@ -1401,8 +1417,13 @@ impl<'tcx> RegionInferenceContext<'tcx> { mir_def_id, propagated_outlives_requirements, errors_buffer, + region_naming, ) { // continuing to iterate just reports more errors than necessary + // + // FIXME It would also allow us to report more Outlives Suggestions, though, so + // it's not clear that that's a bad thing. Somebody should try commenting out this + // line and see it is actually a regression. return; } } @@ -1418,6 +1439,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { mir_def_id: DefId, propagated_outlives_requirements: &mut Option<&mut Vec>>, errors_buffer: &mut Vec, + region_naming: &mut RegionErrorNamingCtx, ) -> Option { // If it is known that `fr: o`, carry on. if self.universal_region_relations.outlives(longer_fr, shorter_fr) { @@ -1466,7 +1488,18 @@ impl<'tcx> RegionInferenceContext<'tcx> { // // Note: in this case, we use the unapproximated regions to report the // error. This gives better error messages in some cases. - self.report_error(body, upvars, infcx, mir_def_id, longer_fr, shorter_fr, errors_buffer); + let db = self.report_error( + body, + upvars, + infcx, + mir_def_id, + longer_fr, + shorter_fr, + region_naming, + ); + + db.buffer(errors_buffer); + Some(ErrorReported) } diff --git a/src/test/ui/c-variadic/variadic-ffi-4.nll.stderr b/src/test/ui/c-variadic/variadic-ffi-4.nll.stderr index 4947d6e529108..ab8398ec5e935 100644 --- a/src/test/ui/c-variadic/variadic-ffi-4.nll.stderr +++ b/src/test/ui/c-variadic/variadic-ffi-4.nll.stderr @@ -37,11 +37,11 @@ error: lifetime may not live long enough --> $DIR/variadic-ffi-4.rs:20:5 | LL | pub unsafe extern "C" fn no_escape3(_: usize, mut ap0: &mut VaListImpl, mut ap1: ...) { - | ------- ------- has type `core::ffi::VaListImpl<'1>` + | ------- ------- has type `core::ffi::VaListImpl<'2>` | | - | has type `&mut core::ffi::VaListImpl<'2>` + | has type `&mut core::ffi::VaListImpl<'1>` LL | *ap0 = ap1; - | ^^^^ assignment requires that `'1` must outlive `'2` + | ^^^^ assignment requires that `'2` must outlive `'1` error: lifetime may not live long enough --> $DIR/variadic-ffi-4.rs:25:5 @@ -57,11 +57,11 @@ error: lifetime may not live long enough --> $DIR/variadic-ffi-4.rs:25:5 | LL | pub unsafe extern "C" fn no_escape4(_: usize, ap0: &mut VaListImpl, mut ap1: ...) { - | --- ------- has type `core::ffi::VaListImpl<'1>` + | --- ------- has type `core::ffi::VaListImpl<'2>` | | - | has type `&mut core::ffi::VaListImpl<'2>` + | has type `&mut core::ffi::VaListImpl<'1>` LL | ap0 = &mut ap1; - | ^^^^^^^^^^^^^^ assignment requires that `'1` must outlive `'2` + | ^^^^^^^^^^^^^^ assignment requires that `'2` must outlive `'1` error[E0384]: cannot assign to immutable argument `ap0` --> $DIR/variadic-ffi-4.rs:25:5 @@ -99,11 +99,11 @@ error: lifetime may not live long enough --> $DIR/variadic-ffi-4.rs:33:12 | LL | pub unsafe extern "C" fn no_escape5(_: usize, mut ap0: &mut VaListImpl, mut ap1: ...) { - | ------- ------- has type `core::ffi::VaListImpl<'1>` + | ------- ------- has type `core::ffi::VaListImpl<'2>` | | - | has type `&mut core::ffi::VaListImpl<'2>` + | has type `&mut core::ffi::VaListImpl<'1>` LL | *ap0 = ap1.clone(); - | ^^^^^^^^^^^ argument requires that `'1` must outlive `'2` + | ^^^^^^^^^^^ argument requires that `'2` must outlive `'1` error: aborting due to 11 previous errors diff --git a/src/test/ui/generator/auto-trait-regions.stderr b/src/test/ui/generator/auto-trait-regions.stderr index 92f92e2a32a36..dab4d348ceb60 100644 --- a/src/test/ui/generator/auto-trait-regions.stderr +++ b/src/test/ui/generator/auto-trait-regions.stderr @@ -1,20 +1,26 @@ error: implementation of `Foo` is not general enough --> $DIR/auto-trait-regions.rs:30:5 | +LL | auto trait Foo {} + | ----------------- trait `Foo` defined here +... LL | assert_foo(gen); - | ^^^^^^^^^^ + | ^^^^^^^^^^ implementation of `Foo` is not general enough | - = note: `Foo` would have to be implemented for the type `&'0 OnlyFooIfStaticRef`, for any lifetime `'0` - = note: but `Foo` is actually implemented for the type `&'1 OnlyFooIfStaticRef`, for some specific lifetime `'1` + = note: `Foo` would have to be implemented for the type `&'0 OnlyFooIfStaticRef`, for any lifetime `'0`... + = note: ...but `Foo` is actually implemented for the type `&'1 OnlyFooIfStaticRef`, for some specific lifetime `'1` error: implementation of `Foo` is not general enough --> $DIR/auto-trait-regions.rs:48:5 | +LL | auto trait Foo {} + | ----------------- trait `Foo` defined here +... LL | assert_foo(gen); - | ^^^^^^^^^^ + | ^^^^^^^^^^ implementation of `Foo` is not general enough | - = note: `Foo` would have to be implemented for the type `A<'0, '1>`, for any two lifetimes `'0` and `'1` - = note: but `Foo` is actually implemented for the type `A<'_, '2>`, for some specific lifetime `'2` + = note: `Foo` would have to be implemented for the type `A<'0, '1>`, for any two lifetimes `'0` and `'1`... + = note: ...but `Foo` is actually implemented for the type `A<'_, '2>`, for some specific lifetime `'2` error: aborting due to 2 previous errors diff --git a/src/test/ui/hrtb/due-to-where-clause.rs b/src/test/ui/hrtb/due-to-where-clause.rs new file mode 100644 index 0000000000000..04e2ddd4a6090 --- /dev/null +++ b/src/test/ui/hrtb/due-to-where-clause.rs @@ -0,0 +1,16 @@ +// ignore-compare-mode-nll +// ^ This code works in nll mode. + +fn main() { + test::(&mut 42); //~ ERROR implementation of `Foo` is not general enough +} + +trait Foo<'a> {} + +struct FooS<'a> { + data: &'a mut u32, +} + +impl<'a, 'b: 'a> Foo<'b> for FooS<'a> {} + +fn test<'a, F>(data: &'a mut u32) where F: for<'b> Foo<'b> {} diff --git a/src/test/ui/hrtb/due-to-where-clause.stderr b/src/test/ui/hrtb/due-to-where-clause.stderr new file mode 100644 index 0000000000000..e698584bb716f --- /dev/null +++ b/src/test/ui/hrtb/due-to-where-clause.stderr @@ -0,0 +1,17 @@ +error: implementation of `Foo` is not general enough + --> $DIR/due-to-where-clause.rs:5:5 + | +LL | test::(&mut 42); + | ^^^^^^^^^^^^ doesn't satisfy where-clause +... +LL | trait Foo<'a> {} + | ---------------- trait `Foo` defined here +... +LL | fn test<'a, F>(data: &'a mut u32) where F: for<'b> Foo<'b> {} + | ------------------------------------------------------------- due to a where-clause on `test`... + | + = note: ...`FooS<'_>` must implement `Foo<'0>`, for any lifetime `'0`... + = note: ...but `FooS<'_>` actually implements `Foo<'1>`, for some specific lifetime `'1` + +error: aborting due to previous error + diff --git a/src/test/ui/hrtb/hrtb-cache-issue-54302.stderr b/src/test/ui/hrtb/hrtb-cache-issue-54302.stderr index 77a5491cb63a7..003f32659351f 100644 --- a/src/test/ui/hrtb/hrtb-cache-issue-54302.stderr +++ b/src/test/ui/hrtb/hrtb-cache-issue-54302.stderr @@ -1,11 +1,14 @@ error: implementation of `Deserialize` is not general enough --> $DIR/hrtb-cache-issue-54302.rs:19:5 | +LL | trait Deserialize<'de> {} + | ------------------------- trait `Deserialize` defined here +... LL | assert_deserialize_owned::<&'static str>(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ implementation of `Deserialize` is not general enough | - = note: `&'static str` must implement `Deserialize<'0>`, for any lifetime `'0` - = note: but `&str` actually implements `Deserialize<'1>`, for some specific lifetime `'1` + = note: `&'static str` must implement `Deserialize<'0>`, for any lifetime `'0`... + = note: ...but `&str` actually implements `Deserialize<'1>`, for some specific lifetime `'1` error: aborting due to previous error diff --git a/src/test/ui/hrtb/issue-30786.migrate.stderr b/src/test/ui/hrtb/issue-30786.migrate.stderr index e7deca7644b0e..c0e3fd3cf4679 100644 --- a/src/test/ui/hrtb/issue-30786.migrate.stderr +++ b/src/test/ui/hrtb/issue-30786.migrate.stderr @@ -1,11 +1,17 @@ error: implementation of `Stream` is not general enough --> $DIR/issue-30786.rs:108:22 | -LL | let map = source.map(|x: &_| x); - | ^^^ +LL | / pub trait Stream { +LL | | type Item; +LL | | fn next(self) -> Option; +LL | | } + | |_- trait `Stream` defined here +... +LL | let map = source.map(|x: &_| x); + | ^^^ implementation of `Stream` is not general enough | - = note: `Stream` would have to be implemented for the type `&'0 mut Map`, for any lifetime `'0` - = note: but `Stream` is actually implemented for the type `&'1 mut Map`, for some specific lifetime `'1` + = note: `Stream` would have to be implemented for the type `&'0 mut Map`, for any lifetime `'0`... + = note: ...but `Stream` is actually implemented for the type `&'1 mut Map`, for some specific lifetime `'1` error: aborting due to previous error diff --git a/src/test/ui/hrtb/issue-30786.nll.stderr b/src/test/ui/hrtb/issue-30786.nll.stderr index 8614d86d93ac3..1cfd93e59d935 100644 --- a/src/test/ui/hrtb/issue-30786.nll.stderr +++ b/src/test/ui/hrtb/issue-30786.nll.stderr @@ -1,11 +1,11 @@ error: higher-ranked subtype error - --> $DIR/issue-30786.rs:112:18 + --> $DIR/issue-30786.rs:113:18 | LL | let filter = map.filter(|x: &_| true); | ^^^^^^^^^^^^^^^^^^^^^^^^ error: higher-ranked subtype error - --> $DIR/issue-30786.rs:114:17 + --> $DIR/issue-30786.rs:115:17 | LL | let count = filter.count(); // Assert that we still have a valid stream. | ^^^^^^^^^^^^^^ diff --git a/src/test/ui/hrtb/issue-30786.rs b/src/test/ui/hrtb/issue-30786.rs index b9920a1950498..c42297ca68346 100644 --- a/src/test/ui/hrtb/issue-30786.rs +++ b/src/test/ui/hrtb/issue-30786.rs @@ -16,7 +16,7 @@ //[nll]compile-flags: -Z borrowck=mir -pub trait Stream { +pub trait Stream { //[migrate]~ NOTE trait `Stream` defined here type Item; fn next(self) -> Option; } @@ -109,6 +109,7 @@ fn main() { //[migrate]~^ ERROR implementation of `Stream` is not general enough //[migrate]~| NOTE `Stream` would have to be implemented for the type `&'0 mut Map //[migrate]~| NOTE but `Stream` is actually implemented for the type `&'1 + //[migrate]~| NOTE implementation of `Stream` is not general enough let filter = map.filter(|x: &_| true); //[nll]~^ ERROR higher-ranked subtype error let count = filter.count(); // Assert that we still have a valid stream. diff --git a/src/test/ui/issues/issue-54302-cases.stderr b/src/test/ui/issues/issue-54302-cases.stderr index 98637611b79fe..3ed2779164301 100644 --- a/src/test/ui/issues/issue-54302-cases.stderr +++ b/src/test/ui/issues/issue-54302-cases.stderr @@ -1,38 +1,58 @@ error: implementation of `Foo` is not general enough --> $DIR/issue-54302-cases.rs:63:5 | -LL | >::ref_foo(a) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: `Foo<'static, u32>` would have to be implemented for the type `&'0 u32`, for any lifetime `'0` - = note: but `Foo<'_, u32>` is actually implemented for the type `&'1 u32`, for some specific lifetime `'1` +LL | / trait Foo<'x, T> { +LL | | fn foo(self) -> &'x T; +LL | | } + | |_- trait `Foo` defined here +... +LL | >::ref_foo(a) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ implementation of `Foo` is not general enough + | + = note: `Foo<'static, u32>` would have to be implemented for the type `&'0 u32`, for any lifetime `'0`... + = note: ...but `Foo<'_, u32>` is actually implemented for the type `&'1 u32`, for some specific lifetime `'1` error: implementation of `Foo` is not general enough --> $DIR/issue-54302-cases.rs:69:5 | -LL | >::ref_foo(a) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | / trait Foo<'x, T> { +LL | | fn foo(self) -> &'x T; +LL | | } + | |_- trait `Foo` defined here +... +LL | >::ref_foo(a) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ implementation of `Foo` is not general enough | - = note: `Foo<'static, i32>` would have to be implemented for the type `&'0 i32`, for any lifetime `'0` - = note: but `Foo<'_, i32>` is actually implemented for the type `&'1 i32`, for some specific lifetime `'1` + = note: `Foo<'static, i32>` would have to be implemented for the type `&'0 i32`, for any lifetime `'0`... + = note: ...but `Foo<'_, i32>` is actually implemented for the type `&'1 i32`, for some specific lifetime `'1` error: implementation of `Foo` is not general enough --> $DIR/issue-54302-cases.rs:75:5 | -LL | >::ref_foo(a) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | / trait Foo<'x, T> { +LL | | fn foo(self) -> &'x T; +LL | | } + | |_- trait `Foo` defined here +... +LL | >::ref_foo(a) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ implementation of `Foo` is not general enough | - = note: `Foo<'static, u64>` would have to be implemented for the type `&'0 u64`, for any lifetime `'0` - = note: but `Foo<'_, u64>` is actually implemented for the type `&'1 u64`, for some specific lifetime `'1` + = note: `Foo<'static, u64>` would have to be implemented for the type `&'0 u64`, for any lifetime `'0`... + = note: ...but `Foo<'_, u64>` is actually implemented for the type `&'1 u64`, for some specific lifetime `'1` error: implementation of `Foo` is not general enough --> $DIR/issue-54302-cases.rs:81:5 | -LL | >::ref_foo(a) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | / trait Foo<'x, T> { +LL | | fn foo(self) -> &'x T; +LL | | } + | |_- trait `Foo` defined here +... +LL | >::ref_foo(a) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ implementation of `Foo` is not general enough | - = note: `Foo<'static, i64>` would have to be implemented for the type `&'0 i64`, for any lifetime `'0` - = note: but `Foo<'_, i64>` is actually implemented for the type `&'1 i64`, for some specific lifetime `'1` + = note: `Foo<'static, i64>` would have to be implemented for the type `&'0 i64`, for any lifetime `'0`... + = note: ...but `Foo<'_, i64>` is actually implemented for the type `&'1 i64`, for some specific lifetime `'1` error: aborting due to 4 previous errors diff --git a/src/test/ui/issues/issue-54302.stderr b/src/test/ui/issues/issue-54302.stderr index c6d0805f3ab2a..1b3f57ba188a3 100644 --- a/src/test/ui/issues/issue-54302.stderr +++ b/src/test/ui/issues/issue-54302.stderr @@ -1,11 +1,14 @@ error: implementation of `Deserialize` is not general enough --> $DIR/issue-54302.rs:13:5 | +LL | trait Deserialize<'de> {} + | ------------------------- trait `Deserialize` defined here +... LL | assert_deserialize_owned::<&'static str>(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ implementation of `Deserialize` is not general enough | - = note: `&'static str` must implement `Deserialize<'0>`, for any lifetime `'0` - = note: but `&str` actually implements `Deserialize<'1>`, for some specific lifetime `'1` + = note: `&'static str` must implement `Deserialize<'0>`, for any lifetime `'0`... + = note: ...but `&str` actually implements `Deserialize<'1>`, for some specific lifetime `'1` error: aborting due to previous error diff --git a/src/test/ui/issues/issue-55731.stderr b/src/test/ui/issues/issue-55731.stderr index f25e18e5d90cb..f44c842187cc2 100644 --- a/src/test/ui/issues/issue-55731.stderr +++ b/src/test/ui/issues/issue-55731.stderr @@ -1,11 +1,16 @@ error: implementation of `DistributedIteratorMulti` is not general enough --> $DIR/issue-55731.rs:48:5 | -LL | multi(Map { - | ^^^^^ +LL | / trait DistributedIteratorMulti { +LL | | type Item; +LL | | } + | |_- trait `DistributedIteratorMulti` defined here +... +LL | multi(Map { + | ^^^^^ implementation of `DistributedIteratorMulti` is not general enough | - = note: `DistributedIteratorMulti<&'0 ()>` would have to be implemented for the type `Cloned<&()>`, for any lifetime `'0` - = note: but `DistributedIteratorMulti<&'1 ()>` is actually implemented for the type `Cloned<&'1 ()>`, for some specific lifetime `'1` + = note: `DistributedIteratorMulti<&'0 ()>` would have to be implemented for the type `Cloned<&()>`, for any lifetime `'0`... + = note: ...but `DistributedIteratorMulti<&'1 ()>` is actually implemented for the type `Cloned<&'1 ()>`, for some specific lifetime `'1` error: aborting due to previous error diff --git a/src/test/ui/lifetimes/lifetime-errors/ex3-both-anon-regions-3.nll.stderr b/src/test/ui/lifetimes/lifetime-errors/ex3-both-anon-regions-3.nll.stderr index 779e2eb8b9205..2ed4d6d4401aa 100644 --- a/src/test/ui/lifetimes/lifetime-errors/ex3-both-anon-regions-3.nll.stderr +++ b/src/test/ui/lifetimes/lifetime-errors/ex3-both-anon-regions-3.nll.stderr @@ -12,11 +12,11 @@ error: lifetime may not live long enough --> $DIR/ex3-both-anon-regions-3.rs:2:5 | LL | fn foo(z: &mut Vec<(&u8,&u8)>, (x, y): (&u8, &u8)) { - | - - let's call the lifetime of this reference `'1` + | - - let's call the lifetime of this reference `'3` | | - | let's call the lifetime of this reference `'2` + | let's call the lifetime of this reference `'4` LL | z.push((x,y)); - | ^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2` + | ^^^^^^^^^^^^^ argument requires that `'3` must outlive `'4` error: aborting due to 2 previous errors