diff --git a/crates/nargo_cli/tests/test_data/generics/src/main.nr b/crates/nargo_cli/tests/test_data/generics/src/main.nr index c506995adc3..bfde9d3c957 100644 --- a/crates/nargo_cli/tests/test_data/generics/src/main.nr +++ b/crates/nargo_cli/tests/test_data/generics/src/main.nr @@ -49,4 +49,9 @@ fn main(x: Field, y: Field) { // Expected type error // assert(bar2.get_other() == bar2.other); + + let one = x; + let two = y; + let nested_generics: Bar> = Bar { one, two, other: Bar { one, two, other: 0 } }; + assert(nested_generics.other.other == bar1.get_other()); } diff --git a/crates/noirc_frontend/src/lexer/lexer.rs b/crates/noirc_frontend/src/lexer/lexer.rs index 5e0d99cfed9..2c8583ef2c7 100644 --- a/crates/noirc_frontend/src/lexer/lexer.rs +++ b/crates/noirc_frontend/src/lexer/lexer.rs @@ -162,9 +162,8 @@ impl<'a> Lexer<'a> { if self.peek_char_is('=') { self.next_char(); Ok(Token::GreaterEqual.into_span(start, start + 1)) - } else if self.peek_char_is('>') { - self.next_char(); - Ok(Token::ShiftRight.into_span(start, start + 1)) + // Note: There is deliberately no case for RightShift. We always lex >> as + // two separate Greater tokens to help the parser parse nested generic types. } else { Ok(prev_token.into_single_span(start)) } @@ -387,7 +386,8 @@ fn test_single_double_char() { Token::Assign, Token::Equal, Token::ShiftLeft, - Token::ShiftRight, + Token::Greater, + Token::Greater, Token::EOF, ]; diff --git a/crates/noirc_frontend/src/parser/errors.rs b/crates/noirc_frontend/src/parser/errors.rs index 7012c0fbda5..c339835fbc3 100644 --- a/crates/noirc_frontend/src/parser/errors.rs +++ b/crates/noirc_frontend/src/parser/errors.rs @@ -147,7 +147,7 @@ impl chumsky::Error for ParserError { self.reason = other.reason; } - assert_eq!(self.span, other.span); + self.span = self.span.merge(other.span); self } } diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index d83cf6fd710..3a8c8f49303 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -517,9 +517,22 @@ where ) } +/// Parse an assignment operator `=` optionally prefixed by a binary operator for a combined +/// assign statement shorthand. Notably, this must handle a few corner cases with how `>>` is +/// lexed as two separate greater-than operators rather than a single right-shift. fn assign_operator() -> impl NoirParser { let shorthand_operators = Token::assign_shorthand_operators(); - let shorthand_syntax = one_of(shorthand_operators).then_ignore(just(Token::Assign)); + // We need to explicitly check for right_shift here since it is actually + // two separate greater-than operators. + let shorthand_operators = right_shift_operator().or(one_of(shorthand_operators)); + let shorthand_syntax = shorthand_operators.then_ignore(just(Token::Assign)); + + // Since >> is lexed as two separate greater-thans, >>= is lexed as > >=, so + // we need to account for that case here as well. + let right_shift_fix = + just(Token::Greater).then(just(Token::GreaterEqual)).map(|_| Token::ShiftRight); + + let shorthand_syntax = shorthand_syntax.or(right_shift_fix); just(Token::Assign).or(shorthand_syntax) } @@ -726,14 +739,23 @@ fn create_infix_expression(lhs: Expression, (operator, rhs): (BinaryOp, Expressi Expression { span, kind: ExpressionKind::Infix(infix) } } +// Right-shift (>>) is issued as two separate > tokens by the lexer as this makes it easier +// to parse nested generic types. For normal expressions however, it means we have to manually +// parse two greater-than tokens as a single right-shift here. +fn right_shift_operator() -> impl NoirParser { + just(Token::Greater).then(just(Token::Greater)).map(|_| Token::ShiftRight) +} + fn operator_with_precedence(precedence: Precedence) -> impl NoirParser> { - filter_map(move |span, token: Token| { - if Precedence::token_precedence(&token) == Some(precedence) { - Ok(token.try_into_binary_op(span).unwrap()) - } else { - Err(ParserError::expected_label("binary operator".to_string(), token, span)) - } - }) + right_shift_operator() + .or(any()) // Parse any single token, we're validating it as an operator next + .try_map(move |token, span| { + if Precedence::token_precedence(&token) == Some(precedence) { + Ok(token.try_into_binary_op(span).unwrap()) + } else { + Err(ParserError::expected_label("binary operator".to_string(), token, span)) + } + }) } fn term<'a, P>(expr_parser: P) -> impl NoirParser + 'a