Skip to content

Commit

Permalink
Used pending tokens in replace with ##
Browse files Browse the repository at this point in the history
Add active flag to HashHash so it does not get replaced indefinitely

Ignore test which relies on jyn514#513 - stringify out of order
  • Loading branch information
hdamron17 committed Aug 17, 2020
1 parent 4f0e7d6 commit 6396e51
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 10 deletions.
8 changes: 4 additions & 4 deletions src/data/lex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,9 +231,9 @@ pub enum Token {

// Misc
Ellipsis,
StructDeref, // ->
Hash, // #, used for preprocessing
HashHash, // ##, used for preprocessing
StructDeref, // ->
Hash, // #, used for preprocessing
HashHash(bool), // ##, used for preprocessing (the bool is true unless it is created by `# ## #`)
}

/* impls */
Expand Down Expand Up @@ -394,7 +394,7 @@ impl std::fmt::Display for Token {
Ellipsis => write!(f, "..."),
StructDeref => write!(f, "->"),
Hash => write!(f, "#"),
HashHash => write!(f, "##"),
HashHash(_) => write!(f, "##"),
}
}
}
Expand Down
7 changes: 5 additions & 2 deletions src/lex/cpp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1959,7 +1959,7 @@ h",
assert_concat("+", "+", Some(PlusPlus));
assert_concat(">>", "=", Some(Assignment(ShrEqual)));
assert_concat(">", "=", Some(Comparison(GreaterEqual)));
assert_concat("#", "#", Some(HashHash));
assert_concat("#", "#", Some(HashHash(false)));
assert_concat("-", ">", Some(StructDeref));
assert_concat("const", "ance", Some(Id("constance".into())));
assert_concat("xyz", "123", Some(Id("xyz123".into())));
Expand All @@ -1974,7 +1974,10 @@ h",
assert_concat(r#""x""#, r#""y""#, None);
assert_concat("0b1", "6", None);
assert_concat("/", "/", None); // Not a comment

}
#[test]
#[ignore] // Related to https://github.com/jyn514/saltwater/issues/513
fn hash_and_hashhash() {
assert_same(
"#define hash_hash # ## #
#define mkstr(a) # a
Expand Down
2 changes: 1 addition & 1 deletion src/lex/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ impl Iterator for Lexer {
'#' => match self.peek() {
Some('#') => {
self.next_char();
Token::HashHash
Token::HashHash(true)
}
_ => Token::Hash,
},
Expand Down
12 changes: 9 additions & 3 deletions src/lex/replace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ pub fn replace(
.into(),
)
});
replacements.push(concat_token); // TODO don't bypass pending
pending.push_back(concat_token);
continue;
}
Ok(Locatable {
Expand Down Expand Up @@ -245,7 +245,7 @@ pub fn replace(
}
}
Ok(Locatable {
data: Token::HashHash,
data: Token::HashHash(true),
..
}) => {
let preceding_tok = loop {
Expand Down Expand Up @@ -492,7 +492,13 @@ fn stringify(args: Vec<Token>) -> Token {
fn concat(x: &Token, y: &Token, location: &Location) -> Option<Locatable<Token>> {
let mut lexer = Lexer::new(location.file, format!("{}{}", x, y), false);
match lexer.next() {
Some(Ok(tok)) if lexer.next().is_none() => Some(tok),
Some(Ok(tok)) if lexer.next().is_none() => Some(match tok {
Locatable {
data: Token::HashHash(_),
location,
} => location.with(Token::HashHash(false)),
tok => tok,
}),
_ => None,
}
}
Expand Down

0 comments on commit 6396e51

Please sign in to comment.