From 37ce212f1f0a009a181d3a0a27dbd52505d4ac07 Mon Sep 17 00:00:00 2001 From: Trevor Spiteri Date: Wed, 23 Sep 2020 12:01:25 +0200 Subject: [PATCH 01/11] make exp_m1 examples more representative of use With this commit, the examples for exp_m1 would fail if x.exp() - 1.0 is used instead of x.exp_m1(). --- library/std/src/f32.rs | 9 +++++---- library/std/src/f64.rs | 9 +++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/library/std/src/f32.rs b/library/std/src/f32.rs index 59c2da5273bde..cd9065b3a2115 100644 --- a/library/std/src/f32.rs +++ b/library/std/src/f32.rs @@ -719,12 +719,13 @@ impl f32 { /// # Examples /// /// ``` - /// let x = 6.0f32; + /// let x = 1e-8_f32; /// - /// // e^(ln(6)) - 1 - /// let abs_difference = (x.ln().exp_m1() - 5.0).abs(); + /// // for very small x, e^x is approximately 1 + x + x^2 / 2 + /// let approx = x + x * x / 2.0; + /// let abs_difference = (x.exp_m1() - approx).abs(); /// - /// assert!(abs_difference <= f32::EPSILON); + /// assert!(abs_difference < 1e-10); /// ``` #[must_use = "method returns a new number and does not mutate the original value"] #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/std/src/f64.rs b/library/std/src/f64.rs index bd094bdb55dc3..e412f89432c76 100644 --- a/library/std/src/f64.rs +++ b/library/std/src/f64.rs @@ -721,12 +721,13 @@ impl f64 { /// # Examples /// /// ``` - /// let x = 7.0_f64; + /// let x = 1e-16_f64; /// - /// // e^(ln(7)) - 1 - /// let abs_difference = (x.ln().exp_m1() - 6.0).abs(); + /// // for very small x, e^x is approximately 1 + x + x^2 / 2 + /// let approx = x + x * x / 2.0; + /// let abs_difference = (x.exp_m1() - approx).abs(); /// - /// assert!(abs_difference < 1e-10); + /// assert!(abs_difference < 1e-20); /// ``` #[must_use = "method returns a new number and does not mutate the original value"] #[stable(feature = "rust1", since = "1.0.0")] From 50d3ddcb0cbc36f782fa5939d1ef24422f6902d4 Mon Sep 17 00:00:00 2001 From: Trevor Spiteri Date: Wed, 23 Sep 2020 12:02:49 +0200 Subject: [PATCH 02/11] make ln_1p examples more representative of use With this commit, the examples for ln_1p would fail if (x + 1.0).ln() is used instead of x.ln_1p(). --- library/std/src/f32.rs | 9 +++++---- library/std/src/f64.rs | 9 +++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/library/std/src/f32.rs b/library/std/src/f32.rs index cd9065b3a2115..ed975c4287981 100644 --- a/library/std/src/f32.rs +++ b/library/std/src/f32.rs @@ -740,12 +740,13 @@ impl f32 { /// # Examples /// /// ``` - /// let x = std::f32::consts::E - 1.0; + /// let x = 1e-8_f32; /// - /// // ln(1 + (e - 1)) == ln(e) == 1 - /// let abs_difference = (x.ln_1p() - 1.0).abs(); + /// // for very small x, ln(1 + x) is approximately x - x^2 / 2 + /// let approx = x - x * x / 2.0; + /// let abs_difference = (x.ln_1p() - approx).abs(); /// - /// assert!(abs_difference <= f32::EPSILON); + /// assert!(abs_difference < 1e-10); /// ``` #[must_use = "method returns a new number and does not mutate the original value"] #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/std/src/f64.rs b/library/std/src/f64.rs index e412f89432c76..8d0a85e056f71 100644 --- a/library/std/src/f64.rs +++ b/library/std/src/f64.rs @@ -742,12 +742,13 @@ impl f64 { /// # Examples /// /// ``` - /// let x = std::f64::consts::E - 1.0; + /// let x = 1e-16_f64; /// - /// // ln(1 + (e - 1)) == ln(e) == 1 - /// let abs_difference = (x.ln_1p() - 1.0).abs(); + /// // for very small x, ln(1 + x) is approximately x - x^2 / 2 + /// let approx = x - x * x / 2.0; + /// let abs_difference = (x.ln_1p() - approx).abs(); /// - /// assert!(abs_difference < 1e-10); + /// assert!(abs_difference < 1e-20); /// ``` #[must_use = "method returns a new number and does not mutate the original value"] #[stable(feature = "rust1", since = "1.0.0")] From 01b0aff1df1dd5ee7c60e8fbeff15cc3edaa3208 Mon Sep 17 00:00:00 2001 From: Mara Bos Date: Wed, 22 Jul 2020 12:17:36 +0200 Subject: [PATCH 03/11] Add std::panic::panic_box. --- library/std/src/panic.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/library/std/src/panic.rs b/library/std/src/panic.rs index 4281867314cca..06ce66c10f7ea 100644 --- a/library/std/src/panic.rs +++ b/library/std/src/panic.rs @@ -23,6 +23,20 @@ pub use crate::panicking::{set_hook, take_hook}; #[stable(feature = "panic_hooks", since = "1.10.0")] pub use core::panic::{Location, PanicInfo}; +/// Panic the current thread, with the given payload as the panic message. +/// +/// This supports an arbitrary panic payload, instead of just (formatted) strings. +/// +/// The message is attached as a `Box<'static + Any + Send>`, which can be +/// accessed using [`PanicInfo::payload`]. +/// +/// See the [`panic!`] macro for more information about panicking. +#[unstable(feature = "panic_box", issue = "none")] +#[inline] +pub fn panic_box(msg: M) -> ! { + crate::panicking::begin_panic(msg); +} + /// A marker trait which represents "panic safe" types in Rust. /// /// This trait is implemented by default for many types and behaves similarly in From 16201da6a4ff613d00ca3680c43cbb1b52f60cf1 Mon Sep 17 00:00:00 2001 From: Mara Bos Date: Sun, 18 Oct 2020 12:29:13 +0200 Subject: [PATCH 04/11] Rename panic_box to panic_any. --- library/std/src/panic.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/library/std/src/panic.rs b/library/std/src/panic.rs index 06ce66c10f7ea..9a756e4bbb4d4 100644 --- a/library/std/src/panic.rs +++ b/library/std/src/panic.rs @@ -23,17 +23,17 @@ pub use crate::panicking::{set_hook, take_hook}; #[stable(feature = "panic_hooks", since = "1.10.0")] pub use core::panic::{Location, PanicInfo}; -/// Panic the current thread, with the given payload as the panic message. +/// Panic the current thread with the given message as the panic payload. /// -/// This supports an arbitrary panic payload, instead of just (formatted) strings. +/// The message can be of any (`Any + Send`) type, not just strings. /// -/// The message is attached as a `Box<'static + Any + Send>`, which can be -/// accessed using [`PanicInfo::payload`]. +/// The message is wrapped in a `Box<'static + Any + Send>`, which can be +/// accessed later using [`PanicInfo::payload`]. /// /// See the [`panic!`] macro for more information about panicking. #[unstable(feature = "panic_box", issue = "none")] #[inline] -pub fn panic_box(msg: M) -> ! { +pub fn panic_any(msg: M) -> ! { crate::panicking::begin_panic(msg); } From a9d334d386e5abf79d8ee60f94bf32147b755c4c Mon Sep 17 00:00:00 2001 From: Mara Bos Date: Wed, 28 Oct 2020 21:21:41 +0100 Subject: [PATCH 05/11] Update panic_any feature name. Co-authored-by: Camelid --- library/std/src/panic.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/std/src/panic.rs b/library/std/src/panic.rs index 9a756e4bbb4d4..ad91933d65102 100644 --- a/library/std/src/panic.rs +++ b/library/std/src/panic.rs @@ -31,7 +31,7 @@ pub use core::panic::{Location, PanicInfo}; /// accessed later using [`PanicInfo::payload`]. /// /// See the [`panic!`] macro for more information about panicking. -#[unstable(feature = "panic_box", issue = "none")] +#[unstable(feature = "panic_any", issue = "none")] #[inline] pub fn panic_any(msg: M) -> ! { crate::panicking::begin_panic(msg); From b48fee010c92dde304154ba45c0e41d396e60568 Mon Sep 17 00:00:00 2001 From: Mara Bos Date: Wed, 28 Oct 2020 21:23:45 +0100 Subject: [PATCH 06/11] Add tracking issue number for panic_any. --- library/std/src/panic.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/std/src/panic.rs b/library/std/src/panic.rs index ad91933d65102..d18b94b6c1aef 100644 --- a/library/std/src/panic.rs +++ b/library/std/src/panic.rs @@ -31,7 +31,7 @@ pub use core::panic::{Location, PanicInfo}; /// accessed later using [`PanicInfo::payload`]. /// /// See the [`panic!`] macro for more information about panicking. -#[unstable(feature = "panic_any", issue = "none")] +#[unstable(feature = "panic_any", issue = "78500")] #[inline] pub fn panic_any(msg: M) -> ! { crate::panicking::begin_panic(msg); From 4ba57aa703397a34e92f055c9e07bc880b771226 Mon Sep 17 00:00:00 2001 From: Aaron Hill Date: Thu, 29 Oct 2020 11:37:55 -0400 Subject: [PATCH 07/11] Strip tokens from trait and impl items before printing AST JSON Fixes #78510 --- compiler/rustc_interface/src/passes.rs | 11 +++++++++++ src/test/ui/ast-json/issue-78510-assoc-ice.rs | 18 ++++++++++++++++++ .../ui/ast-json/issue-78510-assoc-ice.stderr | 15 +++++++++++++++ .../ui/ast-json/issue-78510-assoc-ice.stdout | 1 + 4 files changed, 45 insertions(+) create mode 100644 src/test/ui/ast-json/issue-78510-assoc-ice.rs create mode 100644 src/test/ui/ast-json/issue-78510-assoc-ice.stderr create mode 100644 src/test/ui/ast-json/issue-78510-assoc-ice.stdout diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 9dbd59506b188..a1487aa0060d5 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -70,6 +70,17 @@ impl mut_visit::MutVisitor for TokenStripper { i.tokens = None; mut_visit::noop_flat_map_foreign_item(i, self) } + fn flat_map_trait_item( + &mut self, + mut i: P, + ) -> SmallVec<[P; 1]> { + i.tokens = None; + mut_visit::noop_flat_map_assoc_item(i, self) + } + fn flat_map_impl_item(&mut self, mut i: P) -> SmallVec<[P; 1]> { + i.tokens = None; + mut_visit::noop_flat_map_assoc_item(i, self) + } fn visit_block(&mut self, b: &mut P) { b.tokens = None; mut_visit::noop_visit_block(b, self); diff --git a/src/test/ui/ast-json/issue-78510-assoc-ice.rs b/src/test/ui/ast-json/issue-78510-assoc-ice.rs new file mode 100644 index 0000000000000..ef3117c49cad3 --- /dev/null +++ b/src/test/ui/ast-json/issue-78510-assoc-ice.rs @@ -0,0 +1,18 @@ +// compile-flags: -Zast-json +// +// Regression test for issue #78510 +// Tests that we don't ICE when we have tokens for an associated item + +struct S; + +impl S { + #[derive(Debug)] //~ ERROR `derive` may only be applied to structs, enums and unions + fn f() {} +} + +trait Bar { + #[derive(Debug)] //~ ERROR `derive` may only be applied to structs, enums and unions + fn foo() {} +} + +fn main() {} diff --git a/src/test/ui/ast-json/issue-78510-assoc-ice.stderr b/src/test/ui/ast-json/issue-78510-assoc-ice.stderr new file mode 100644 index 0000000000000..3573c203a7893 --- /dev/null +++ b/src/test/ui/ast-json/issue-78510-assoc-ice.stderr @@ -0,0 +1,15 @@ +error[E0774]: `derive` may only be applied to structs, enums and unions + --> $DIR/issue-78510-assoc-ice.rs:9:5 + | +LL | #[derive(Debug)] + | ^^^^^^^^^^^^^^^^ + +error[E0774]: `derive` may only be applied to structs, enums and unions + --> $DIR/issue-78510-assoc-ice.rs:14:5 + | +LL | #[derive(Debug)] + | ^^^^^^^^^^^^^^^^ + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0774`. diff --git a/src/test/ui/ast-json/issue-78510-assoc-ice.stdout b/src/test/ui/ast-json/issue-78510-assoc-ice.stdout new file mode 100644 index 0000000000000..fef9504285b53 --- /dev/null +++ b/src/test/ui/ast-json/issue-78510-assoc-ice.stdout @@ -0,0 +1 @@ +{"module":{"inner":{"lo":139,"hi":397},"unsafety":"No","items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":3,"args":null}],"tokens":null},"args":"Empty","tokens":null}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0},"tokens":null}],"id":4,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":5,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":6,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":7,"args":null},{"ident":{"name":"v1","span":{"lo":0,"hi":0}},"id":8,"args":null}],"tokens":null},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":9,"args":null}],"tokens":null},"args":"Empty","tokens":null}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0},"tokens":null}],"id":10,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":11,"span":{"lo":139,"hi":148},"vis":{"kind":"Inherited","span":{"lo":139,"hi":139},"tokens":null},"ident":{"name":"S","span":{"lo":146,"hi":147}},"kind":{"variant":"Struct","fields":[{"variant":"Unit","fields":[12]},{"params":[],"where_clause":{"has_where_token":false,"predicates":[],"span":{"lo":147,"hi":147}},"span":{"lo":147,"hi":147}}]},"tokens":null},{"attrs":[],"id":13,"span":{"lo":150,"hi":263},"vis":{"kind":"Inherited","span":{"lo":150,"hi":150},"tokens":null},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Impl","fields":["No","Positive","Final","No",{"params":[],"where_clause":{"has_where_token":false,"predicates":[],"span":{"lo":156,"hi":156}},"span":{"lo":154,"hi":154}},null,{"id":14,"kind":{"variant":"Path","fields":[null,{"span":{"lo":155,"hi":156},"segments":[{"ident":{"name":"S","span":{"lo":155,"hi":156}},"id":15,"args":null}],"tokens":null}]},"span":{"lo":155,"hi":156},"tokens":null},[{"attrs":[],"id":19,"span":{"lo":252,"hi":261},"vis":{"kind":"Inherited","span":{"lo":252,"hi":252},"tokens":null},"ident":{"name":"f","span":{"lo":255,"hi":256}},"kind":{"variant":"Fn","fields":["Final",{"header":{"unsafety":"No","asyncness":"No","constness":"No","ext":"None"},"decl":{"inputs":[],"output":{"variant":"Default","fields":[{"lo":259,"hi":259}]}},"span":{"lo":252,"hi":258}},{"params":[],"where_clause":{"has_where_token":false,"predicates":[],"span":{"lo":258,"hi":258}},"span":{"lo":256,"hi":256}},{"stmts":[],"id":20,"rules":"Default","span":{"lo":259,"hi":261},"tokens":null}]},"tokens":null}]]},"tokens":null},{"attrs":[],"id":16,"span":{"lo":265,"hi":383},"vis":{"kind":"Inherited","span":{"lo":265,"hi":265},"tokens":null},"ident":{"name":"Bar","span":{"lo":271,"hi":274}},"kind":{"variant":"Trait","fields":["No","No",{"params":[],"where_clause":{"has_where_token":false,"predicates":[],"span":{"lo":274,"hi":274}},"span":{"lo":274,"hi":274}},[],[{"attrs":[],"id":21,"span":{"lo":370,"hi":381},"vis":{"kind":"Inherited","span":{"lo":370,"hi":370},"tokens":null},"ident":{"name":"foo","span":{"lo":373,"hi":376}},"kind":{"variant":"Fn","fields":["Final",{"header":{"unsafety":"No","asyncness":"No","constness":"No","ext":"None"},"decl":{"inputs":[],"output":{"variant":"Default","fields":[{"lo":379,"hi":379}]}},"span":{"lo":370,"hi":378}},{"params":[],"where_clause":{"has_where_token":false,"predicates":[],"span":{"lo":378,"hi":378}},"span":{"lo":376,"hi":376}},{"stmts":[],"id":22,"rules":"Default","span":{"lo":379,"hi":381},"tokens":null}]},"tokens":null}]]},"tokens":null},{"attrs":[],"id":17,"span":{"lo":385,"hi":397},"vis":{"kind":"Inherited","span":{"lo":385,"hi":385},"tokens":null},"ident":{"name":"main","span":{"lo":388,"hi":392}},"kind":{"variant":"Fn","fields":["Final",{"header":{"unsafety":"No","asyncness":"No","constness":"No","ext":"None"},"decl":{"inputs":[],"output":{"variant":"Default","fields":[{"lo":395,"hi":395}]}},"span":{"lo":385,"hi":394}},{"params":[],"where_clause":{"has_where_token":false,"predicates":[],"span":{"lo":394,"hi":394}},"span":{"lo":392,"hi":392}},{"stmts":[],"id":18,"rules":"Default","span":{"lo":395,"hi":397},"tokens":null}]},"tokens":null}],"inline":true},"attrs":[],"span":{"lo":139,"hi":397},"proc_macros":[]} From b816e5dfb49b7a135813b204b9123bbc5f41be7a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Mi=C4=85sko?= Date: Fri, 30 Oct 2020 00:00:00 +0000 Subject: [PATCH 08/11] validator: Extend aliasing check to a call terminator --- compiler/rustc_mir/src/transform/validate.rs | 45 +++++++++++++++++--- 1 file changed, 40 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_mir/src/transform/validate.rs b/compiler/rustc_mir/src/transform/validate.rs index 7b22d643ab646..ecdd0c517ef4f 100644 --- a/compiler/rustc_mir/src/transform/validate.rs +++ b/compiler/rustc_mir/src/transform/validate.rs @@ -10,8 +10,8 @@ use rustc_middle::mir::{ visit::{PlaceContext, Visitor}, }; use rustc_middle::mir::{ - AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPhase, Operand, Rvalue, - SourceScope, Statement, StatementKind, Terminator, TerminatorKind, VarDebugInfo, + AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPhase, Operand, PlaceRef, + Rvalue, SourceScope, Statement, StatementKind, Terminator, TerminatorKind, VarDebugInfo, }; use rustc_middle::ty::relate::{Relate, RelateResult, TypeRelation}; use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt}; @@ -46,8 +46,16 @@ impl<'tcx> MirPass<'tcx> for Validator { .iterate_to_fixpoint() .into_results_cursor(body); - TypeChecker { when: &self.when, body, tcx, param_env, mir_phase, storage_liveness } - .visit_body(body); + TypeChecker { + when: &self.when, + body, + tcx, + param_env, + mir_phase, + storage_liveness, + place_cache: Vec::new(), + } + .visit_body(body); } } @@ -150,6 +158,7 @@ struct TypeChecker<'a, 'tcx> { param_env: ParamEnv<'tcx>, mir_phase: MirPhase, storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive>, + place_cache: Vec>, } impl<'a, 'tcx> TypeChecker<'a, 'tcx> { @@ -391,7 +400,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { self.check_edge(location, *unwind, EdgeKind::Unwind); } } - TerminatorKind::Call { func, destination, cleanup, .. } => { + TerminatorKind::Call { func, args, destination, cleanup, .. } => { let func_ty = func.ty(&self.body.local_decls, self.tcx); match func_ty.kind() { ty::FnPtr(..) | ty::FnDef(..) => {} @@ -406,6 +415,32 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { if let Some(cleanup) = cleanup { self.check_edge(location, *cleanup, EdgeKind::Unwind); } + + // The call destination place and Operand::Move place used as an argument might be + // passed by a reference to the callee. Consequently they must be non-overlapping. + // Currently this simply checks for duplicate places. + self.place_cache.clear(); + if let Some((destination, _)) = destination { + self.place_cache.push(destination.as_ref()); + } + for arg in args { + if let Operand::Move(place) = arg { + self.place_cache.push(place.as_ref()); + } + } + let all_len = self.place_cache.len(); + self.place_cache.sort_unstable(); + self.place_cache.dedup(); + let has_duplicates = all_len != self.place_cache.len(); + if has_duplicates { + self.fail( + location, + format!( + "encountered overlapping memory in `Call` terminator: {:?}", + terminator.kind, + ), + ); + } } TerminatorKind::Assert { cond, target, cleanup, .. } => { let cond_ty = cond.ty(&self.body.local_decls, self.tcx); From 8cf7d66d0a7e5405f55887ca008cca7fce7f7396 Mon Sep 17 00:00:00 2001 From: Joshua Nelson Date: Thu, 29 Oct 2020 21:23:55 -0400 Subject: [PATCH 09/11] Create config.toml in the current directory, not the top-level directory See https://github.com/rust-lang/rust/issues/78509 for discussion. --- src/bootstrap/setup.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bootstrap/setup.rs b/src/bootstrap/setup.rs index c6e1c99564c09..f5ce45a5bd11b 100644 --- a/src/bootstrap/setup.rs +++ b/src/bootstrap/setup.rs @@ -89,7 +89,7 @@ pub fn setup(src_path: &Path, profile: Profile) { std::process::exit(1); } - let path = cfg_file.unwrap_or_else(|| src_path.join("config.toml")); + let path = cfg_file.unwrap_or("config.toml".into()); let settings = format!( "# Includes one of the default files in src/bootstrap/defaults\n\ profile = \"{}\"\n\ From 307cc11bebe930fa13dec86408e8ae6dbc04a037 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20du=20Garreau?= Date: Fri, 30 Oct 2020 19:24:08 +0100 Subject: [PATCH 10/11] Constantify more BTreeMap and BTreeSet functions - BTreeMap::len - BTreeMap::is_empty - BTreeSet::len - BTreeSet::is_empty --- library/alloc/src/collections/btree/map.rs | 6 ++++-- library/alloc/src/collections/btree/map/tests.rs | 7 +++++++ library/alloc/src/collections/btree/set.rs | 6 ++++-- library/alloc/src/collections/btree/set/tests.rs | 7 +++++++ 4 files changed, 22 insertions(+), 4 deletions(-) diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs index 2704aab322924..07c23d29e20a6 100644 --- a/library/alloc/src/collections/btree/map.rs +++ b/library/alloc/src/collections/btree/map.rs @@ -2188,7 +2188,8 @@ impl BTreeMap { /// assert_eq!(a.len(), 1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn len(&self) -> usize { + #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] + pub const fn len(&self) -> usize { self.length } @@ -2207,7 +2208,8 @@ impl BTreeMap { /// assert!(!a.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn is_empty(&self) -> bool { + #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] + pub const fn is_empty(&self) -> bool { self.len() == 0 } diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs index adb94972f5bb6..09aabdcd0fb27 100644 --- a/library/alloc/src/collections/btree/map/tests.rs +++ b/library/alloc/src/collections/btree/map/tests.rs @@ -1527,6 +1527,13 @@ fn test_send() { } } +#[allow(dead_code)] +fn test_const() { + const MAP: &'static BTreeMap<(), ()> = &BTreeMap::new(); + const LEN: usize = MAP.len(); + const IS_EMPTY: bool = MAP.is_empty(); +} + #[test] fn test_occupied_entry_key() { let mut a = BTreeMap::new(); diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs index 3ad74969bec33..684019f8f5f5e 100644 --- a/library/alloc/src/collections/btree/set.rs +++ b/library/alloc/src/collections/btree/set.rs @@ -950,7 +950,8 @@ impl BTreeSet { /// assert_eq!(v.len(), 1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn len(&self) -> usize { + #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] + pub const fn len(&self) -> usize { self.map.len() } @@ -967,7 +968,8 @@ impl BTreeSet { /// assert!(!v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn is_empty(&self) -> bool { + #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] + pub const fn is_empty(&self) -> bool { self.len() == 0 } } diff --git a/library/alloc/src/collections/btree/set/tests.rs b/library/alloc/src/collections/btree/set/tests.rs index 9267435728216..2069cde4dba3b 100644 --- a/library/alloc/src/collections/btree/set/tests.rs +++ b/library/alloc/src/collections/btree/set/tests.rs @@ -15,6 +15,13 @@ fn test_clone_eq() { assert_eq!(m.clone(), m); } +#[allow(dead_code)] +fn test_const() { + const SET: &'static BTreeSet<()> = &BTreeSet::new(); + const LEN: usize = SET.len(); + const IS_EMPTY: bool = SET.is_empty(); +} + #[test] fn test_iter_min_max() { let mut a = BTreeSet::new(); From d0c63bccc5f5214fb0defb974dfe75a2ea3ef6cb Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sat, 31 Oct 2020 00:40:41 +0300 Subject: [PATCH 11/11] parser: Cleanup `LazyTokenStream` and avoid some clones by using a named struct instead of a closure. --- compiler/rustc_ast/src/tokenstream.rs | 65 +++++++++----------------- compiler/rustc_expand/src/config.rs | 13 +++--- compiler/rustc_parse/src/lib.rs | 23 ++++----- compiler/rustc_parse/src/parser/mod.rs | 63 ++++++++++++++----------- 4 files changed, 77 insertions(+), 87 deletions(-) diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index cb419b6362ce7..b53acb97aeb9e 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -22,7 +22,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_span::{Span, DUMMY_SP}; use smallvec::{smallvec, SmallVec}; -use std::{iter, mem}; +use std::{fmt, iter, mem}; /// When the main rust parser encounters a syntax-extension invocation, it /// parses the arguments to the invocation as a token-tree. This is a very @@ -120,72 +120,51 @@ where } } -// A cloneable callback which produces a `TokenStream`. Each clone -// of this should produce the same `TokenStream` -pub trait CreateTokenStream: sync::Send + sync::Sync + FnOnce() -> TokenStream { - // Workaround for the fact that `Clone` is not object-safe - fn clone_it(&self) -> Box; +pub trait CreateTokenStream: sync::Send + sync::Sync { + fn create_token_stream(&self) -> TokenStream; } -impl TokenStream> CreateTokenStream - for F -{ - fn clone_it(&self) -> Box { - Box::new(self.clone()) - } -} - -impl Clone for Box { - fn clone(&self) -> Self { - let val: &(dyn CreateTokenStream) = &**self; - val.clone_it() +impl CreateTokenStream for TokenStream { + fn create_token_stream(&self) -> TokenStream { + self.clone() } } -/// A lazy version of `TokenStream`, which may defer creation +/// A lazy version of `TokenStream`, which defers creation /// of an actual `TokenStream` until it is needed. -pub type LazyTokenStream = Lrc; - +/// `Box` is here only to reduce the structure size. #[derive(Clone)] -pub enum LazyTokenStreamInner { - Lazy(Box), - Ready(TokenStream), -} +pub struct LazyTokenStream(Lrc>); -impl std::fmt::Debug for LazyTokenStreamInner { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - LazyTokenStreamInner::Lazy(..) => f.debug_struct("LazyTokenStream::Lazy").finish(), - LazyTokenStreamInner::Ready(..) => f.debug_struct("LazyTokenStream::Ready").finish(), - } +impl LazyTokenStream { + pub fn new(inner: impl CreateTokenStream + 'static) -> LazyTokenStream { + LazyTokenStream(Lrc::new(Box::new(inner))) + } + + pub fn create_token_stream(&self) -> TokenStream { + self.0.create_token_stream() } } -impl LazyTokenStreamInner { - pub fn into_token_stream(&self) -> TokenStream { - match self { - // Note that we do not cache this. If this ever becomes a performance - // problem, we should investigate wrapping `LazyTokenStreamInner` - // in a lock - LazyTokenStreamInner::Lazy(cb) => (cb.clone())(), - LazyTokenStreamInner::Ready(stream) => stream.clone(), - } +impl fmt::Debug for LazyTokenStream { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt("LazyTokenStream", f) } } -impl Encodable for LazyTokenStreamInner { +impl Encodable for LazyTokenStream { fn encode(&self, _s: &mut S) -> Result<(), S::Error> { panic!("Attempted to encode LazyTokenStream"); } } -impl Decodable for LazyTokenStreamInner { +impl Decodable for LazyTokenStream { fn decode(_d: &mut D) -> Result { panic!("Attempted to decode LazyTokenStream"); } } -impl HashStable for LazyTokenStreamInner { +impl HashStable for LazyTokenStream { fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) { panic!("Attempted to compute stable hash for LazyTokenStream"); } diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 3551b92967c47..c124ab6421862 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -4,12 +4,11 @@ use rustc_ast::attr::HasAttrs; use rustc_ast::mut_visit::*; use rustc_ast::ptr::P; use rustc_ast::token::{DelimToken, Token, TokenKind}; -use rustc_ast::tokenstream::{DelimSpan, LazyTokenStreamInner, Spacing, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing, TokenStream, TokenTree}; use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem}; use rustc_attr as attr; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::map_in_place::MapInPlace; -use rustc_data_structures::sync::Lrc; use rustc_errors::{error_code, struct_span_err, Applicability, Handler}; use rustc_feature::{Feature, Features, State as FeatureState}; use rustc_feature::{ @@ -303,7 +302,7 @@ impl<'a> StripUnconfigured<'a> { // Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token // for `attr` when we expand it to `#[attr]` - let pound_token = orig_tokens.into_token_stream().trees().next().unwrap(); + let pound_token = orig_tokens.create_token_stream().trees().next().unwrap(); if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) { panic!("Bad tokens for attribute {:?}", attr); } @@ -313,16 +312,16 @@ impl<'a> StripUnconfigured<'a> { DelimSpan::from_single(pound_token.span()), DelimToken::Bracket, item.tokens - .clone() + .as_ref() .unwrap_or_else(|| panic!("Missing tokens for {:?}", item)) - .into_token_stream(), + .create_token_stream(), ); let mut attr = attr::mk_attr_from_item(attr.style, item, span); - attr.tokens = Some(Lrc::new(LazyTokenStreamInner::Ready(TokenStream::new(vec![ + attr.tokens = Some(LazyTokenStream::new(TokenStream::new(vec![ (pound_token, Spacing::Alone), (bracket_group, Spacing::Alone), - ])))); + ]))); self.process_cfg_attr(attr) }) .collect() diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 5c404161004a4..e851451269e32 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -249,29 +249,30 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // came from. Here we attempt to extract these lossless token streams // before we fall back to the stringification. - let convert_tokens = |tokens: Option| tokens.map(|t| t.into_token_stream()); + let convert_tokens = + |tokens: &Option| tokens.as_ref().map(|t| t.create_token_stream()); let tokens = match *nt { Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), - Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()), + Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens), Nonterminal::NtStmt(ref stmt) => { // FIXME: We currently only collect tokens for `:stmt` // matchers in `macro_rules!` macros. When we start collecting // tokens for attributes on statements, we will need to prepend // attributes here - convert_tokens(stmt.tokens.clone()) + convert_tokens(&stmt.tokens) } - Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.clone()), - Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.clone()), + Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens), + Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens), Nonterminal::NtIdent(ident, is_raw) => { Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into()) } Nonterminal::NtLifetime(ident) => { Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into()) } - Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.clone()), - Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.clone()), - Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.clone()), + Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens), + Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens), + Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens), Nonterminal::NtTT(ref tt) => Some(tt.clone().into()), Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => { if expr.tokens.is_none() { @@ -604,7 +605,7 @@ fn prepend_attrs( attrs: &[ast::Attribute], tokens: Option<&tokenstream::LazyTokenStream>, ) -> Option { - let tokens = tokens?.clone().into_token_stream(); + let tokens = tokens?.create_token_stream(); if attrs.is_empty() { return Some(tokens); } @@ -617,9 +618,9 @@ fn prepend_attrs( ); builder.push( attr.tokens - .clone() + .as_ref() .unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr)) - .into_token_stream(), + .create_token_stream(), ); } builder.push(tokens); diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index d99fcb0c4a10f..da1c54e88b5e2 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -16,8 +16,8 @@ pub use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{self, DelimToken, Token, TokenKind}; -use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, LazyTokenStreamInner, Spacing}; -use rustc_ast::tokenstream::{TokenStream, TokenTree}; +use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing}; +use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree}; use rustc_ast::DUMMY_NODE_ID; use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe}; use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit}; @@ -1199,15 +1199,12 @@ impl<'a> Parser<'a> { f: impl FnOnce(&mut Self) -> PResult<'a, R>, ) -> PResult<'a, (R, Option)> { let start_token = (self.token.clone(), self.token_spacing); - let mut cursor_snapshot = self.token_cursor.clone(); + let cursor_snapshot = self.token_cursor.clone(); let ret = f(self)?; - let new_calls = self.token_cursor.num_next_calls; - let num_calls = new_calls - cursor_snapshot.num_next_calls; - let desugar_doc_comments = self.desugar_doc_comments; - // We didn't capture any tokens + let num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls; if num_calls == 0 { return Ok((ret, None)); } @@ -1220,27 +1217,41 @@ impl<'a> Parser<'a> { // // This also makes `Parser` very cheap to clone, since // there is no intermediate collection buffer to clone. - let lazy_cb = move || { - // The token produced by the final call to `next` or `next_desugared` - // was not actually consumed by the callback. The combination - // of chaining the initial token and using `take` produces the desired - // result - we produce an empty `TokenStream` if no calls were made, - // and omit the final token otherwise. - let tokens = std::iter::once(start_token) - .chain((0..num_calls).map(|_| { - if desugar_doc_comments { - cursor_snapshot.next_desugared() - } else { - cursor_snapshot.next() - } - })) - .take(num_calls); + struct LazyTokenStreamImpl { + start_token: (Token, Spacing), + cursor_snapshot: TokenCursor, + num_calls: usize, + desugar_doc_comments: bool, + } + impl CreateTokenStream for LazyTokenStreamImpl { + fn create_token_stream(&self) -> TokenStream { + // The token produced by the final call to `next` or `next_desugared` + // was not actually consumed by the callback. The combination + // of chaining the initial token and using `take` produces the desired + // result - we produce an empty `TokenStream` if no calls were made, + // and omit the final token otherwise. + let mut cursor_snapshot = self.cursor_snapshot.clone(); + let tokens = std::iter::once(self.start_token.clone()) + .chain((0..self.num_calls).map(|_| { + if self.desugar_doc_comments { + cursor_snapshot.next_desugared() + } else { + cursor_snapshot.next() + } + })) + .take(self.num_calls); - make_token_stream(tokens) - }; - let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb))); + make_token_stream(tokens) + } + } - Ok((ret, Some(stream))) + let lazy_impl = LazyTokenStreamImpl { + start_token, + cursor_snapshot, + num_calls, + desugar_doc_comments: self.desugar_doc_comments, + }; + Ok((ret, Some(LazyTokenStream::new(lazy_impl)))) } /// `::{` or `::*`