Skip to content

Commit

Permalink
Make LexicalError fields private
Browse files Browse the repository at this point in the history
  • Loading branch information
MichaReiser committed Feb 7, 2024
1 parent 4593742 commit 94f9a43
Show file tree
Hide file tree
Showing 10 changed files with 165 additions and 160 deletions.
4 changes: 2 additions & 2 deletions crates/ruff_python_formatter/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,8 @@ pub fn format_module_source(
let source_type = options.source_type();
let (tokens, comment_ranges) =
tokens_and_ranges(source, source_type).map_err(|err| ParseError {
offset: err.location,
error: ParseErrorType::Lexical(err.error),
offset: err.location(),
error: ParseErrorType::Lexical(err.into_error()),
})?;
let module = parse_tokens(tokens, source, source_type.as_mode())?;
let formatted = format_module_ast(&module, &comment_ranges, source, options)?;
Expand Down
4 changes: 2 additions & 2 deletions crates/ruff_python_formatter/src/range.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ pub fn format_range(

let (tokens, comment_ranges) =
tokens_and_ranges(source, options.source_type()).map_err(|err| ParseError {
offset: err.location,
error: ParseErrorType::Lexical(err.error),
offset: err.location(),
error: ParseErrorType::Lexical(err.into_error()),
})?;

assert_valid_char_boundaries(range, source);
Expand Down
42 changes: 20 additions & 22 deletions crates/ruff_python_parser/src/function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@ pub(crate) fn validate_arguments(arguments: &ast::Parameters) -> Result<(), Lexi
let range = arg.range;
let arg_name = arg.name.as_str();
if !all_arg_names.insert(arg_name) {
return Err(LexicalError {
error: LexicalErrorType::DuplicateArgumentError(arg_name.to_string()),
location: range.start(),
});
return Err(LexicalError::new(
LexicalErrorType::DuplicateArgumentError(arg_name.to_string()),
range.start(),
));
}
}

Expand All @@ -64,10 +64,10 @@ pub(crate) fn validate_pos_params(
.skip_while(|arg| arg.default.is_some()) // and then args with default
.next(); // there must not be any more args without default
if let Some(invalid) = first_invalid {
return Err(LexicalError {
error: LexicalErrorType::DefaultArgumentError,
location: invalid.parameter.start(),
});
return Err(LexicalError::new(
LexicalErrorType::DefaultArgumentError,
invalid.parameter.start(),
));
}
Ok(())
}
Expand All @@ -94,12 +94,10 @@ pub(crate) fn parse_arguments(
// Check for duplicate keyword arguments in the call.
if let Some(keyword_name) = &name {
if !keyword_names.insert(keyword_name.to_string()) {
return Err(LexicalError {
error: LexicalErrorType::DuplicateKeywordArgumentError(
keyword_name.to_string(),
),
location: start,
});
return Err(LexicalError::new(
LexicalErrorType::DuplicateKeywordArgumentError(keyword_name.to_string()),
start,
));
}
} else {
double_starred = true;
Expand All @@ -113,17 +111,17 @@ pub(crate) fn parse_arguments(
} else {
// Positional arguments mustn't follow keyword arguments.
if !keywords.is_empty() && !is_starred(&value) {
return Err(LexicalError {
error: LexicalErrorType::PositionalArgumentError,
location: value.start(),
});
return Err(LexicalError::new(
LexicalErrorType::PositionalArgumentError,
value.start(),
));
// Allow starred arguments after keyword arguments but
// not after double-starred arguments.
} else if double_starred {
return Err(LexicalError {
error: LexicalErrorType::UnpackedArgumentError,
location: value.start(),
});
return Err(LexicalError::new(
LexicalErrorType::UnpackedArgumentError,
value.start(),
));
}

args.push(value);
Expand Down
2 changes: 1 addition & 1 deletion crates/ruff_python_parser/src/invalid.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ pub(crate) fn assignment_target(target: &Expr) -> Result<(), LexicalError> {

let err = |location: TextSize| -> LexicalError {
let error = LexicalErrorType::AssignmentError;
LexicalError { error, location }
LexicalError::new(error, location)
};
match *target {
BoolOp(ref e) => Err(err(e.range.start())),
Expand Down
16 changes: 14 additions & 2 deletions crates/ruff_python_parser/src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1297,16 +1297,28 @@ impl FusedIterator for Lexer<'_> {}
#[derive(Debug, Clone, PartialEq)]
pub struct LexicalError {
/// The type of error that occurred.
pub error: LexicalErrorType,
error: LexicalErrorType,
/// The location of the error.
pub location: TextSize,
location: TextSize,
}

impl LexicalError {
/// Creates a new `LexicalError` with the given error type and location.
pub fn new(error: LexicalErrorType, location: TextSize) -> Self {
Self { error, location }
}

pub fn error(&self) -> &LexicalErrorType {
&self.error
}

pub fn into_error(self) -> LexicalErrorType {
self.error
}

pub fn location(&self) -> TextSize {
self.location
}
}

impl std::ops::Deref for LexicalError {
Expand Down
8 changes: 4 additions & 4 deletions crates/ruff_python_parser/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -285,8 +285,8 @@ fn parse_error_from_lalrpop(err: LalrpopError<TextSize, Tok, LexicalError>) -> P
offset: token.0,
},
LalrpopError::User { error } => ParseError {
error: ParseErrorType::Lexical(error.error),
offset: error.location,
offset: error.location(),
error: ParseErrorType::Lexical(error.into_error()),
},
LalrpopError::UnrecognizedToken { token, expected } => {
// Hacky, but it's how CPython does it. See PyParser_AddToken,
Expand Down Expand Up @@ -359,8 +359,8 @@ impl ParseErrorType {
impl From<LexicalError> for ParseError {
fn from(error: LexicalError) -> Self {
ParseError {
error: ParseErrorType::Lexical(error.error),
offset: error.location,
offset: error.location(),
error: ParseErrorType::Lexical(error.into_error()),
}
}
}
Expand Down
94 changes: 47 additions & 47 deletions crates/ruff_python_parser/src/python.lalrpop
Original file line number Diff line number Diff line change
Expand Up @@ -337,10 +337,10 @@ IpyEscapeCommandStatement: ast::Stmt = {
}
))
} else {
Err(LexicalError {
error: LexicalErrorType::OtherError("IPython escape commands are only allowed in `Mode::Ipython`".to_string()),
Err(LexicalError::new(
LexicalErrorType::OtherError("IPython escape commands are only allowed in `Mode::Ipython`".to_string()),
location,
})?
))?
}
}
}
Expand All @@ -350,21 +350,21 @@ IpyEscapeCommandExpr: crate::parser::ParenthesizedExpr = {
if mode == Mode::Ipython {
// This should never occur as the lexer won't allow it.
if !matches!(c.0, IpyEscapeKind::Magic | IpyEscapeKind::Shell) {
return Err(LexicalError {
error: LexicalErrorType::OtherError("IPython escape command expr is only allowed for % and !".to_string()),
return Err(LexicalError::new(
LexicalErrorType::OtherError("IPython escape command expr is only allowed for % and !".to_string()),
location,
})?;
))?;
}
Ok(ast::ExprIpyEscapeCommand {
kind: c.0,
value: c.1,
range: (location..end_location).into()
}.into())
} else {
Err(LexicalError {
error: LexicalErrorType::OtherError("IPython escape commands are only allowed in `Mode::Ipython`".to_string()),
Err(LexicalError::new(
LexicalErrorType::OtherError("IPython escape commands are only allowed in `Mode::Ipython`".to_string()),
location,
})?
))?
}
}
}
Expand All @@ -381,10 +381,10 @@ IpyHelpEndEscapeCommandStatement: ast::Stmt = {
},
ast::Expr::Subscript(ast::ExprSubscript { value, slice, range, .. }) => {
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral { value: ast::Number::Int(integer), .. }) = slice.as_ref() else {
return Err(LexicalError {
error: LexicalErrorType::OtherError("only integer literals are allowed in Subscript expressions in help end escape command".to_string()),
location: range.start(),
});
return Err(LexicalError::new(
LexicalErrorType::OtherError("only integer literals are allowed in Subscript expressions in help end escape command".to_string()),
range.start(),
));
};
unparse_expr(value, buffer)?;
buffer.push('[');
Expand All @@ -397,21 +397,21 @@ IpyHelpEndEscapeCommandStatement: ast::Stmt = {
buffer.push_str(attr.as_str());
},
_ => {
return Err(LexicalError {
error: LexicalErrorType::OtherError("only Name, Subscript and Attribute expressions are allowed in help end escape command".to_string()),
location: expr.start(),
});
return Err(LexicalError::new(
LexicalErrorType::OtherError("only Name, Subscript and Attribute expressions are allowed in help end escape command".to_string()),
expr.start(),
));
}
}
Ok(())
}

if mode != Mode::Ipython {
return Err(ParseError::User {
error: LexicalError {
error: LexicalErrorType::OtherError("IPython escape commands are only allowed in `Mode::Ipython`".to_string()),
error: LexicalError::new(
LexicalErrorType::OtherError("IPython escape commands are only allowed in `Mode::Ipython`".to_string()),
location,
},
),
});
}

Expand All @@ -420,10 +420,10 @@ IpyHelpEndEscapeCommandStatement: ast::Stmt = {
2 => IpyEscapeKind::Help2,
_ => {
return Err(ParseError::User {
error: LexicalError {
error: LexicalErrorType::OtherError("maximum of 2 `?` tokens are allowed in help end escape command".to_string()),
error: LexicalError::new(
LexicalErrorType::OtherError("maximum of 2 `?` tokens are allowed in help end escape command".to_string()),
location,
},
),
});
}
};
Expand Down Expand Up @@ -561,10 +561,10 @@ Pattern: ast::Pattern = {
AsPattern: ast::Pattern = {
<location:@L> <pattern:OrPattern> "as" <name:Identifier> <end_location:@R> =>? {
if name.as_str() == "_" {
Err(LexicalError {
error: LexicalErrorType::OtherError("cannot use '_' as a target".to_string()),
Err(LexicalError::new(
LexicalErrorType::OtherError("cannot use '_' as a target".to_string()),
location,
})?
))?
} else {
Ok(ast::Pattern::MatchAs(
ast::PatternMatchAs {
Expand Down Expand Up @@ -1247,10 +1247,10 @@ DoubleStarTypedParameter: ast::Parameter = {
ParameterListStarArgs<ParameterType, StarParameterType, DoubleStarParameterType>: (Option<Box<ast::Parameter>>, Vec<ast::ParameterWithDefault>, Option<Box<ast::Parameter>>) = {
<location:@L> "*" <va:StarParameterType?> <kwonlyargs:("," <ParameterDef<ParameterType>>)*> <kwarg:("," <KwargParameter<DoubleStarParameterType>>)?> =>? {
if va.is_none() && kwonlyargs.is_empty() && kwarg.is_none() {
return Err(LexicalError {
error: LexicalErrorType::OtherError("named arguments must follow bare *".to_string()),
return Err(LexicalError::new(
LexicalErrorType::OtherError("named arguments must follow bare *".to_string()),
location,
})?;
))?;
}

let kwarg = kwarg.flatten();
Expand Down Expand Up @@ -1364,10 +1364,10 @@ NamedExpression: crate::parser::ParenthesizedExpr = {
LambdaDef: crate::parser::ParenthesizedExpr = {
<location:@L> "lambda" <location_args:@L> <parameters:ParameterList<UntypedParameter, StarUntypedParameter, StarUntypedParameter>?> <end_location_args:@R> ":" <fstring_middle:fstring_middle?> <body:Test<"all">> <end_location:@R> =>? {
if fstring_middle.is_some() {
return Err(LexicalError {
error: LexicalErrorType::FStringError(FStringErrorType::LambdaWithoutParentheses),
return Err(LexicalError::new(
LexicalErrorType::FStringError(FStringErrorType::LambdaWithoutParentheses),
location,
})?;
))?;
}
parameters.as_ref().map(validate_arguments).transpose()?;

Expand Down Expand Up @@ -1630,10 +1630,10 @@ FStringMiddlePattern: ast::FStringElement = {
FStringReplacementField: ast::FStringElement = {
<location:@L> "{" <value:TestListOrYieldExpr> <debug:"="?> <conversion:FStringConversion?> <format_spec:FStringFormatSpecSuffix?> "}" <end_location:@R> =>? {
if value.expr.is_lambda_expr() && !value.is_parenthesized() {
return Err(LexicalError {
error: LexicalErrorType::FStringError(FStringErrorType::LambdaWithoutParentheses),
location: value.start(),
})?;
return Err(LexicalError::new(
LexicalErrorType::FStringError(FStringErrorType::LambdaWithoutParentheses),
value.start(),
))?;
}
let debug_text = debug.map(|_| {
let start_offset = location + "{".text_len();
Expand Down Expand Up @@ -1681,10 +1681,10 @@ FStringConversion: (TextSize, ast::ConversionFlag) = {
"s" => ast::ConversionFlag::Str,
"r" => ast::ConversionFlag::Repr,
"a" => ast::ConversionFlag::Ascii,
_ => Err(LexicalError {
error: LexicalErrorType::FStringError(FStringErrorType::InvalidConversionFlag),
location: name_location,
})?
_ => Err(LexicalError::new(
LexicalErrorType::FStringError(FStringErrorType::InvalidConversionFlag),
name_location,
))?
};
Ok((location, conversion))
}
Expand Down Expand Up @@ -1722,10 +1722,10 @@ Atom<Goal>: crate::parser::ParenthesizedExpr = {
<location:@L> "(" <left:(<OneOrMore<Test<"all">>> ",")?> <mid:NamedOrStarExpr> <right:("," <TestOrStarNamedExpr>)*> <trailing_comma:","?> ")" <end_location:@R> =>? {
if left.is_none() && right.is_empty() && trailing_comma.is_none() {
if mid.expr.is_starred_expr() {
return Err(LexicalError{
error: LexicalErrorType::OtherError("cannot use starred expression here".to_string()),
location: mid.start(),
})?;
return Err(LexicalError::new(
LexicalErrorType::OtherError("cannot use starred expression here".to_string()),
mid.start(),
))?;
}
Ok(crate::parser::ParenthesizedExpr {
expr: mid.into(),
Expand All @@ -1751,10 +1751,10 @@ Atom<Goal>: crate::parser::ParenthesizedExpr = {
range: (location..end_location).into(),
}.into(),
"(" <location:@L> "**" <e:Expression<"all">> ")" <end_location:@R> =>? {
Err(LexicalError{
error : LexicalErrorType::OtherError("cannot use double starred expression here".to_string()),
Err(LexicalError::new(
LexicalErrorType::OtherError("cannot use double starred expression here".to_string()),
location,
}.into())
).into())
},
<location:@L> "{" <e:DictLiteralValues?> "}" <end_location:@R> => {
let (keys, values) = e
Expand Down
Loading

0 comments on commit 94f9a43

Please sign in to comment.