Skip to content

Commit

Permalink
Optimize TokenStreamBuilder::push.
Browse files Browse the repository at this point in the history
Currently, when two tokens must be glued together, this function duplicates
large chunks of the existing streams. This can cause quadratic behaviour.

This commit changes the function so that it overwrites the last token with a
glued token, which avoids the quadratic behaviour. This removes the need for
`TokenStreamBuilder::push_all_but_{first,last}_tree`.

The commit also restructures `push` somewhat, by removing
`TokenStream::{first_tree_and_joint,last_tree_if_joint}` in favour of more
pattern matching and some comments. This makes the code shorter, and in my
opinion, more readable.
  • Loading branch information
nnethercote committed Oct 8, 2019
1 parent 3832a63 commit 75e0078
Showing 1 changed file with 43 additions and 51 deletions.
94 changes: 43 additions & 51 deletions src/libsyntax/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -390,25 +390,6 @@ impl TokenStream {
.collect())
}))
}

fn first_tree_and_joint(&self) -> Option<TreeAndJoint> {
self.0.as_ref().map(|stream| {
stream.first().unwrap().clone()
})
}

fn last_tree_if_joint(&self) -> Option<TokenTree> {
match self.0 {
None => None,
Some(ref stream) => {
if let (tree, Joint) = stream.last().unwrap() {
Some(tree.clone())
} else {
None
}
}
}
}
}

// 99.5%+ of the time we have 1 or 2 elements in this vector.
Expand All @@ -421,18 +402,49 @@ impl TokenStreamBuilder {
}

pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
let stream = stream.into();
let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint);
if let Some(TokenTree::Token(last_token)) = last_tree_if_joint {
if let Some((TokenTree::Token(token), is_joint)) = stream.first_tree_and_joint() {
if let Some(glued_tok) = last_token.glue(&token) {
let last_stream = self.0.pop().unwrap();
self.push_all_but_last_tree(&last_stream);
let glued_tt = TokenTree::Token(glued_tok);
let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
self.0.push(glued_tokenstream);
self.push_all_but_first_tree(&stream);
return
let mut stream = stream.into();

// If `self` is not empty and the last tree within the last stream is a
// token tree marked with `Joint`...
if let Some(TokenStream(Some(ref mut last_stream_lrc))) = self.0.last_mut() {
if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() {

// ...and `stream` is not empty and the first tree within it is
// a token tree...
if let TokenStream(Some(ref mut stream_lrc)) = stream {
if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {

// ...and the two tokens can be glued together...
if let Some(glued_tok) = last_token.glue(&token) {

// ...then do so, by overwriting the last token
// tree in `self` and removing the first token tree
// from `stream`. This requires using `make_mut()`
// on the last stream in `self` and on `stream`,
// and in practice this doesn't cause cloning 99.9%
// of the time.

// Overwrite the last token tree with the merged
// token.
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
*last_vec_mut.last_mut().unwrap() =
(TokenTree::Token(glued_tok), *is_joint);

// Remove the first token tree from `stream`. (This
// is almost always the only tree in `stream`.)
let stream_vec_mut = Lrc::make_mut(stream_lrc);
stream_vec_mut.remove(0);

// Don't push `stream` if it's empty -- that could
// block subsequent token gluing, by getting
// between two token trees that should be glued
// together.
if !stream.is_empty() {
self.0.push(stream);
}
return;
}
}
}
}
}
Expand All @@ -442,26 +454,6 @@ impl TokenStreamBuilder {
pub fn build(self) -> TokenStream {
TokenStream::from_streams(self.0)
}

fn push_all_but_last_tree(&mut self, stream: &TokenStream) {
if let Some(ref streams) = stream.0 {
let len = streams.len();
match len {
1 => {}
_ => self.0.push(TokenStream(Some(Lrc::new(streams[0 .. len - 1].to_vec())))),
}
}
}

fn push_all_but_first_tree(&mut self, stream: &TokenStream) {
if let Some(ref streams) = stream.0 {
let len = streams.len();
match len {
1 => {}
_ => self.0.push(TokenStream(Some(Lrc::new(streams[1 .. len].to_vec())))),
}
}
}
}

#[derive(Clone)]
Expand Down

0 comments on commit 75e0078

Please sign in to comment.