diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index cb419b6362ce7..b53acb97aeb9e 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -22,7 +22,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_span::{Span, DUMMY_SP}; use smallvec::{smallvec, SmallVec}; -use std::{iter, mem}; +use std::{fmt, iter, mem}; /// When the main rust parser encounters a syntax-extension invocation, it /// parses the arguments to the invocation as a token-tree. This is a very @@ -120,72 +120,51 @@ where } } -// A cloneable callback which produces a `TokenStream`. Each clone -// of this should produce the same `TokenStream` -pub trait CreateTokenStream: sync::Send + sync::Sync + FnOnce() -> TokenStream { - // Workaround for the fact that `Clone` is not object-safe - fn clone_it(&self) -> Box; +pub trait CreateTokenStream: sync::Send + sync::Sync { + fn create_token_stream(&self) -> TokenStream; } -impl TokenStream> CreateTokenStream - for F -{ - fn clone_it(&self) -> Box { - Box::new(self.clone()) - } -} - -impl Clone for Box { - fn clone(&self) -> Self { - let val: &(dyn CreateTokenStream) = &**self; - val.clone_it() +impl CreateTokenStream for TokenStream { + fn create_token_stream(&self) -> TokenStream { + self.clone() } } -/// A lazy version of `TokenStream`, which may defer creation +/// A lazy version of `TokenStream`, which defers creation /// of an actual `TokenStream` until it is needed. -pub type LazyTokenStream = Lrc; - +/// `Box` is here only to reduce the structure size. #[derive(Clone)] -pub enum LazyTokenStreamInner { - Lazy(Box), - Ready(TokenStream), -} +pub struct LazyTokenStream(Lrc>); -impl std::fmt::Debug for LazyTokenStreamInner { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - LazyTokenStreamInner::Lazy(..) => f.debug_struct("LazyTokenStream::Lazy").finish(), - LazyTokenStreamInner::Ready(..) => f.debug_struct("LazyTokenStream::Ready").finish(), - } +impl LazyTokenStream { + pub fn new(inner: impl CreateTokenStream + 'static) -> LazyTokenStream { + LazyTokenStream(Lrc::new(Box::new(inner))) + } + + pub fn create_token_stream(&self) -> TokenStream { + self.0.create_token_stream() } } -impl LazyTokenStreamInner { - pub fn into_token_stream(&self) -> TokenStream { - match self { - // Note that we do not cache this. If this ever becomes a performance - // problem, we should investigate wrapping `LazyTokenStreamInner` - // in a lock - LazyTokenStreamInner::Lazy(cb) => (cb.clone())(), - LazyTokenStreamInner::Ready(stream) => stream.clone(), - } +impl fmt::Debug for LazyTokenStream { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt("LazyTokenStream", f) } } -impl Encodable for LazyTokenStreamInner { +impl Encodable for LazyTokenStream { fn encode(&self, _s: &mut S) -> Result<(), S::Error> { panic!("Attempted to encode LazyTokenStream"); } } -impl Decodable for LazyTokenStreamInner { +impl Decodable for LazyTokenStream { fn decode(_d: &mut D) -> Result { panic!("Attempted to decode LazyTokenStream"); } } -impl HashStable for LazyTokenStreamInner { +impl HashStable for LazyTokenStream { fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) { panic!("Attempted to compute stable hash for LazyTokenStream"); } diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 3551b92967c47..c124ab6421862 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -4,12 +4,11 @@ use rustc_ast::attr::HasAttrs; use rustc_ast::mut_visit::*; use rustc_ast::ptr::P; use rustc_ast::token::{DelimToken, Token, TokenKind}; -use rustc_ast::tokenstream::{DelimSpan, LazyTokenStreamInner, Spacing, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing, TokenStream, TokenTree}; use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem}; use rustc_attr as attr; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::map_in_place::MapInPlace; -use rustc_data_structures::sync::Lrc; use rustc_errors::{error_code, struct_span_err, Applicability, Handler}; use rustc_feature::{Feature, Features, State as FeatureState}; use rustc_feature::{ @@ -303,7 +302,7 @@ impl<'a> StripUnconfigured<'a> { // Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token // for `attr` when we expand it to `#[attr]` - let pound_token = orig_tokens.into_token_stream().trees().next().unwrap(); + let pound_token = orig_tokens.create_token_stream().trees().next().unwrap(); if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) { panic!("Bad tokens for attribute {:?}", attr); } @@ -313,16 +312,16 @@ impl<'a> StripUnconfigured<'a> { DelimSpan::from_single(pound_token.span()), DelimToken::Bracket, item.tokens - .clone() + .as_ref() .unwrap_or_else(|| panic!("Missing tokens for {:?}", item)) - .into_token_stream(), + .create_token_stream(), ); let mut attr = attr::mk_attr_from_item(attr.style, item, span); - attr.tokens = Some(Lrc::new(LazyTokenStreamInner::Ready(TokenStream::new(vec![ + attr.tokens = Some(LazyTokenStream::new(TokenStream::new(vec![ (pound_token, Spacing::Alone), (bracket_group, Spacing::Alone), - ])))); + ]))); self.process_cfg_attr(attr) }) .collect() diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 5c404161004a4..e851451269e32 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -249,29 +249,30 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // came from. Here we attempt to extract these lossless token streams // before we fall back to the stringification. - let convert_tokens = |tokens: Option| tokens.map(|t| t.into_token_stream()); + let convert_tokens = + |tokens: &Option| tokens.as_ref().map(|t| t.create_token_stream()); let tokens = match *nt { Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), - Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()), + Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens), Nonterminal::NtStmt(ref stmt) => { // FIXME: We currently only collect tokens for `:stmt` // matchers in `macro_rules!` macros. When we start collecting // tokens for attributes on statements, we will need to prepend // attributes here - convert_tokens(stmt.tokens.clone()) + convert_tokens(&stmt.tokens) } - Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.clone()), - Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.clone()), + Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens), + Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens), Nonterminal::NtIdent(ident, is_raw) => { Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into()) } Nonterminal::NtLifetime(ident) => { Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into()) } - Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.clone()), - Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.clone()), - Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.clone()), + Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens), + Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens), + Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens), Nonterminal::NtTT(ref tt) => Some(tt.clone().into()), Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => { if expr.tokens.is_none() { @@ -604,7 +605,7 @@ fn prepend_attrs( attrs: &[ast::Attribute], tokens: Option<&tokenstream::LazyTokenStream>, ) -> Option { - let tokens = tokens?.clone().into_token_stream(); + let tokens = tokens?.create_token_stream(); if attrs.is_empty() { return Some(tokens); } @@ -617,9 +618,9 @@ fn prepend_attrs( ); builder.push( attr.tokens - .clone() + .as_ref() .unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr)) - .into_token_stream(), + .create_token_stream(), ); } builder.push(tokens); diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index d99fcb0c4a10f..da1c54e88b5e2 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -16,8 +16,8 @@ pub use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{self, DelimToken, Token, TokenKind}; -use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, LazyTokenStreamInner, Spacing}; -use rustc_ast::tokenstream::{TokenStream, TokenTree}; +use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing}; +use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree}; use rustc_ast::DUMMY_NODE_ID; use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe}; use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit}; @@ -1199,15 +1199,12 @@ impl<'a> Parser<'a> { f: impl FnOnce(&mut Self) -> PResult<'a, R>, ) -> PResult<'a, (R, Option)> { let start_token = (self.token.clone(), self.token_spacing); - let mut cursor_snapshot = self.token_cursor.clone(); + let cursor_snapshot = self.token_cursor.clone(); let ret = f(self)?; - let new_calls = self.token_cursor.num_next_calls; - let num_calls = new_calls - cursor_snapshot.num_next_calls; - let desugar_doc_comments = self.desugar_doc_comments; - // We didn't capture any tokens + let num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls; if num_calls == 0 { return Ok((ret, None)); } @@ -1220,27 +1217,41 @@ impl<'a> Parser<'a> { // // This also makes `Parser` very cheap to clone, since // there is no intermediate collection buffer to clone. - let lazy_cb = move || { - // The token produced by the final call to `next` or `next_desugared` - // was not actually consumed by the callback. The combination - // of chaining the initial token and using `take` produces the desired - // result - we produce an empty `TokenStream` if no calls were made, - // and omit the final token otherwise. - let tokens = std::iter::once(start_token) - .chain((0..num_calls).map(|_| { - if desugar_doc_comments { - cursor_snapshot.next_desugared() - } else { - cursor_snapshot.next() - } - })) - .take(num_calls); + struct LazyTokenStreamImpl { + start_token: (Token, Spacing), + cursor_snapshot: TokenCursor, + num_calls: usize, + desugar_doc_comments: bool, + } + impl CreateTokenStream for LazyTokenStreamImpl { + fn create_token_stream(&self) -> TokenStream { + // The token produced by the final call to `next` or `next_desugared` + // was not actually consumed by the callback. The combination + // of chaining the initial token and using `take` produces the desired + // result - we produce an empty `TokenStream` if no calls were made, + // and omit the final token otherwise. + let mut cursor_snapshot = self.cursor_snapshot.clone(); + let tokens = std::iter::once(self.start_token.clone()) + .chain((0..self.num_calls).map(|_| { + if self.desugar_doc_comments { + cursor_snapshot.next_desugared() + } else { + cursor_snapshot.next() + } + })) + .take(self.num_calls); - make_token_stream(tokens) - }; - let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb))); + make_token_stream(tokens) + } + } - Ok((ret, Some(stream))) + let lazy_impl = LazyTokenStreamImpl { + start_token, + cursor_snapshot, + num_calls, + desugar_doc_comments: self.desugar_doc_comments, + }; + Ok((ret, Some(LazyTokenStream::new(lazy_impl)))) } /// `::{` or `::*`