From 32fa05765a5a8f10aaa331d905d47d88f63f11aa Mon Sep 17 00:00:00 2001 From: Dhruv Manilawala Date: Sat, 5 Aug 2023 06:02:07 +0530 Subject: [PATCH] Use `Jupyter` mode while parsing Notebook files (#5552) ## Summary Enable using the new `Mode::Jupyter` for the tokenizer/parser to parse Jupyter line magic tokens. The individual call to the lexer i.e., `lex_starts_at` done by various rules should consider the context of the source code (is this content from a Jupyter Notebook?). Thus, a new field `source_type` (of type `PySourceType`) is added to `Checker` which is being passed around as an argument to the relevant functions. This is then used to determine the `Mode` for the lexer. ## Test Plan Add new test cases to make sure that the magic statement is considered while generating the diagnostic and autofix: * For `I001`, if there's a magic statement in between two import blocks, they should be sorted independently fixes: #6090 --- .../fixtures/jupyter/cell/cell_magic.json | 8 +++ .../test/fixtures/jupyter/isort.ipynb | 17 +++++ .../fixtures/jupyter/isort_expected.ipynb | 17 +++++ .../test/fixtures/jupyter/line_magics.ipynb | 52 ++++++++++++++++ .../jupyter/line_magics_expected.ipynb | 51 +++++++++++++++ crates/ruff/src/autofix/edits.rs | 11 ++-- .../checkers/ast/analyze/deferred_scopes.rs | 4 +- .../src/checkers/ast/analyze/definitions.rs | 2 +- .../src/checkers/ast/analyze/expression.rs | 22 +++---- .../src/checkers/ast/analyze/parameters.rs | 2 +- .../src/checkers/ast/analyze/statement.rs | 26 ++++---- crates/ruff/src/checkers/ast/mod.rs | 19 +++--- crates/ruff/src/checkers/imports.rs | 24 ++++--- crates/ruff/src/importer/insertion.rs | 21 ++++--- crates/ruff/src/importer/mod.rs | 14 +++-- crates/ruff/src/jupyter/notebook.rs | 46 ++++++++------ ...yter__notebook__tests__import_sorting.snap | 39 ++++++++++++ ...jupyter__notebook__tests__line_magics.snap | 23 +++++++ crates/ruff/src/linter.rs | 36 ++++++++--- .../src/rules/flake8_annotations/fixes.rs | 9 ++- .../flake8_annotations/rules/definition.rs | 18 ++++-- .../flake8_pytest_style/rules/fixture.rs | 1 + .../flake8_pytest_style/rules/parametrize.rs | 31 +++++++--- .../unnecessary_paren_on_raise_exception.rs | 14 +++-- .../src/rules/flake8_simplify/rules/ast_if.rs | 1 + .../rules/flake8_simplify/rules/ast_with.rs | 1 + .../rules/typing_only_runtime_import.rs | 1 + crates/ruff/src/rules/isort/annotate.rs | 5 +- crates/ruff/src/rules/isort/comments.rs | 11 +++- crates/ruff/src/rules/isort/helpers.rs | 12 ++-- crates/ruff/src/rules/isort/mod.rs | 10 ++- .../rules/isort/rules/add_required_imports.rs | 12 ++-- .../src/rules/isort/rules/organize_imports.rs | 5 +- .../pandas_vet/rules/inplace_argument.rs | 22 +++++-- .../src/rules/pydocstyle/rules/not_missing.rs | 2 +- crates/ruff/src/rules/pyflakes/mod.rs | 7 ++- .../rules/f_string_missing_placeholders.rs | 11 ++-- .../rules/pyflakes/rules/unused_variable.rs | 62 +++++++++++++------ .../pylint/rules/bad_string_format_type.rs | 6 +- .../rules/printf_string_formatting.rs | 4 +- .../pyupgrade/rules/redundant_open_modes.rs | 13 ++-- .../pyupgrade/rules/replace_stdout_stderr.rs | 8 ++- .../rules/unnecessary_encode_utf8.rs | 24 +++++-- .../rules/useless_object_inheritance.rs | 10 ++- crates/ruff/src/test.rs | 16 +++-- crates/ruff_benchmark/benches/linter.rs | 5 +- crates/ruff_cli/src/diagnostics.rs | 15 ++++- crates/ruff_dev/src/print_ast.rs | 12 +++- crates/ruff_dev/src/print_tokens.rs | 10 ++- crates/ruff_python_parser/src/lib.rs | 41 ++++++++++-- crates/ruff_shrinking/src/main.rs | 9 +-- crates/ruff_wasm/src/lib.rs | 6 +- 52 files changed, 652 insertions(+), 196 deletions(-) create mode 100644 crates/ruff/resources/test/fixtures/jupyter/cell/cell_magic.json create mode 100644 crates/ruff/resources/test/fixtures/jupyter/line_magics.ipynb create mode 100644 crates/ruff/resources/test/fixtures/jupyter/line_magics_expected.ipynb create mode 100644 crates/ruff/src/jupyter/snapshots/ruff__jupyter__notebook__tests__line_magics.snap diff --git a/crates/ruff/resources/test/fixtures/jupyter/cell/cell_magic.json b/crates/ruff/resources/test/fixtures/jupyter/cell/cell_magic.json new file mode 100644 index 0000000000000..ef68b202e6811 --- /dev/null +++ b/crates/ruff/resources/test/fixtures/jupyter/cell/cell_magic.json @@ -0,0 +1,8 @@ +{ + "execution_count": null, + "cell_type": "code", + "id": "1", + "metadata": {}, + "outputs": [], + "source": ["%%timeit\n", "print('hello world')"] +} diff --git a/crates/ruff/resources/test/fixtures/jupyter/isort.ipynb b/crates/ruff/resources/test/fixtures/jupyter/isort.ipynb index aef5ff2e8b8aa..9572f7b25e8ad 100644 --- a/crates/ruff/resources/test/fixtures/jupyter/isort.ipynb +++ b/crates/ruff/resources/test/fixtures/jupyter/isort.ipynb @@ -25,6 +25,23 @@ "def foo():\n", " pass" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "16214f6f-bb32-4594-81be-79fb27c6ec92", + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "import sys\n", + "\n", + "%matplotlib \\\n", + " --inline\n", + "\n", + "import math\n", + "import abc" + ] } ], "metadata": { diff --git a/crates/ruff/resources/test/fixtures/jupyter/isort_expected.ipynb b/crates/ruff/resources/test/fixtures/jupyter/isort_expected.ipynb index 009c598e71672..5118aa36615e2 100644 --- a/crates/ruff/resources/test/fixtures/jupyter/isort_expected.ipynb +++ b/crates/ruff/resources/test/fixtures/jupyter/isort_expected.ipynb @@ -27,6 +27,23 @@ "def foo():\n", " pass" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6d6c55c6-4a34-4662-914b-4ee11c9c24a5", + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "from pathlib import Path\n", + "\n", + "%matplotlib \\\n", + " --inline\n", + "\n", + "import abc\n", + "import math" + ] } ], "metadata": { diff --git a/crates/ruff/resources/test/fixtures/jupyter/line_magics.ipynb b/crates/ruff/resources/test/fixtures/jupyter/line_magics.ipynb new file mode 100644 index 0000000000000..5e9b10bb7b0e1 --- /dev/null +++ b/crates/ruff/resources/test/fixtures/jupyter/line_magics.ipynb @@ -0,0 +1,52 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "eab4754a-d6df-4b41-8ee8-7e23aef440f9", + "metadata": {}, + "outputs": [], + "source": [ + "import math\n", + "\n", + "%matplotlib inline\n", + "\n", + "import os\n", + "\n", + "_ = math.pi" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2b0e2986-1b87-4bb6-9b1d-c11ca1decd87", + "metadata": {}, + "outputs": [], + "source": [ + "%%timeit\n", + "import sys" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python (ruff)", + "language": "python", + "name": "ruff" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/crates/ruff/resources/test/fixtures/jupyter/line_magics_expected.ipynb b/crates/ruff/resources/test/fixtures/jupyter/line_magics_expected.ipynb new file mode 100644 index 0000000000000..8419f031e78f8 --- /dev/null +++ b/crates/ruff/resources/test/fixtures/jupyter/line_magics_expected.ipynb @@ -0,0 +1,51 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "cad32845-44f9-4a53-8b8c-a6b1bb3f3378", + "metadata": {}, + "outputs": [], + "source": [ + "import math\n", + "\n", + "%matplotlib inline\n", + "\n", + "\n", + "_ = math.pi" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d7b8e967-8b4a-493b-b6f7-d5cecfb3a5c3", + "metadata": {}, + "outputs": [], + "source": [ + "%%timeit\n", + "import sys" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python (ruff)", + "language": "python", + "name": "ruff" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/crates/ruff/src/autofix/edits.rs b/crates/ruff/src/autofix/edits.rs index 19e2daa3f2f59..6e90771d8e10b 100644 --- a/crates/ruff/src/autofix/edits.rs +++ b/crates/ruff/src/autofix/edits.rs @@ -3,10 +3,12 @@ use anyhow::{bail, Result}; use ruff_diagnostics::Edit; -use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, Keyword, Ranged, Stmt}; +use ruff_python_ast::{ + self as ast, Arguments, ExceptHandler, Expr, Keyword, PySourceType, Ranged, Stmt, +}; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; -use ruff_python_parser::{lexer, Mode}; +use ruff_python_parser::{lexer, AsMode}; use ruff_python_trivia::{has_leading_content, is_python_whitespace, PythonWhitespace}; use ruff_source_file::{Locator, NewlineWithTrailingNewline}; use ruff_text_size::{TextLen, TextRange, TextSize}; @@ -88,6 +90,7 @@ pub(crate) fn remove_argument( arguments: &Arguments, parentheses: Parentheses, locator: &Locator, + source_type: PySourceType, ) -> Result { // TODO(sbrugman): Preserve trailing comments. if arguments.keywords.len() + arguments.args.len() > 1 { @@ -106,7 +109,7 @@ pub(crate) fn remove_argument( let mut seen_comma = false; for (tok, range) in lexer::lex_starts_at( locator.slice(arguments.range()), - Mode::Module, + source_type.as_mode(), arguments.start(), ) .flatten() @@ -135,7 +138,7 @@ pub(crate) fn remove_argument( // previous comma to the end of the argument. for (tok, range) in lexer::lex_starts_at( locator.slice(arguments.range()), - Mode::Module, + source_type.as_mode(), arguments.start(), ) .flatten() diff --git a/crates/ruff/src/checkers/ast/analyze/deferred_scopes.rs b/crates/ruff/src/checkers/ast/analyze/deferred_scopes.rs index f17bd14763001..ac7e675492eb9 100644 --- a/crates/ruff/src/checkers/ast/analyze/deferred_scopes.rs +++ b/crates/ruff/src/checkers/ast/analyze/deferred_scopes.rs @@ -37,7 +37,7 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) { // Identify any valid runtime imports. If a module is imported at runtime, and // used at runtime, then by default, we avoid flagging any other // imports from that model as typing-only. - let enforce_typing_imports = !checker.is_stub + let enforce_typing_imports = !checker.source_type.is_stub() && checker.any_enabled(&[ Rule::RuntimeImportInTypeCheckingBlock, Rule::TypingOnlyFirstPartyImport, @@ -243,7 +243,7 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) { pyflakes::rules::unused_annotation(checker, scope, &mut diagnostics); } - if !checker.is_stub { + if !checker.source_type.is_stub() { if checker.any_enabled(&[ Rule::UnusedClassMethodArgument, Rule::UnusedFunctionArgument, diff --git a/crates/ruff/src/checkers/ast/analyze/definitions.rs b/crates/ruff/src/checkers/ast/analyze/definitions.rs index 02339a232e840..c1ff6947a8dfd 100644 --- a/crates/ruff/src/checkers/ast/analyze/definitions.rs +++ b/crates/ruff/src/checkers/ast/analyze/definitions.rs @@ -30,7 +30,7 @@ pub(crate) fn definitions(checker: &mut Checker) { Rule::MissingTypeKwargs, Rule::MissingTypeSelf, ]); - let enforce_stubs = checker.is_stub && checker.enabled(Rule::DocstringInStub); + let enforce_stubs = checker.source_type.is_stub() && checker.enabled(Rule::DocstringInStub); let enforce_stubs_and_runtime = checker.enabled(Rule::IterMethodReturnIterable); let enforce_docstrings = checker.any_enabled(&[ Rule::BlankLineAfterLastSection, diff --git a/crates/ruff/src/checkers/ast/analyze/expression.rs b/crates/ruff/src/checkers/ast/analyze/expression.rs index 9d5e0d5a3d133..2b462698efe44 100644 --- a/crates/ruff/src/checkers/ast/analyze/expression.rs +++ b/crates/ruff/src/checkers/ast/analyze/expression.rs @@ -31,7 +31,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { if let Some(operator) = typing::to_pep604_operator(value, slice, &checker.semantic) { if checker.enabled(Rule::FutureRewritableTypeAnnotation) { - if !checker.is_stub + if !checker.source_type.is_stub() && checker.settings.target_version < PythonVersion::Py310 && checker.settings.target_version >= PythonVersion::Py37 && !checker.semantic.future_annotations() @@ -44,7 +44,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { } } if checker.enabled(Rule::NonPEP604Annotation) { - if checker.is_stub + if checker.source_type.is_stub() || checker.settings.target_version >= PythonVersion::Py310 || (checker.settings.target_version >= PythonVersion::Py37 && checker.semantic.future_annotations() @@ -59,7 +59,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { // Ex) list[...] if checker.enabled(Rule::FutureRequiredTypeAnnotation) { - if !checker.is_stub + if !checker.source_type.is_stub() && checker.settings.target_version < PythonVersion::Py39 && !checker.semantic.future_annotations() && checker.semantic.in_annotation() @@ -176,7 +176,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { typing::to_pep585_generic(expr, &checker.semantic) { if checker.enabled(Rule::FutureRewritableTypeAnnotation) { - if !checker.is_stub + if !checker.source_type.is_stub() && checker.settings.target_version < PythonVersion::Py39 && checker.settings.target_version >= PythonVersion::Py37 && !checker.semantic.future_annotations() @@ -187,7 +187,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { } } if checker.enabled(Rule::NonPEP585Annotation) { - if checker.is_stub + if checker.source_type.is_stub() || checker.settings.target_version >= PythonVersion::Py39 || (checker.settings.target_version >= PythonVersion::Py37 && checker.semantic.future_annotations() @@ -272,7 +272,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { ]) { if let Some(replacement) = typing::to_pep585_generic(expr, &checker.semantic) { if checker.enabled(Rule::FutureRewritableTypeAnnotation) { - if !checker.is_stub + if !checker.source_type.is_stub() && checker.settings.target_version < PythonVersion::Py39 && checker.settings.target_version >= PythonVersion::Py37 && !checker.semantic.future_annotations() @@ -285,7 +285,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { } } if checker.enabled(Rule::NonPEP585Annotation) { - if checker.is_stub + if checker.source_type.is_stub() || checker.settings.target_version >= PythonVersion::Py39 || (checker.settings.target_version >= PythonVersion::Py37 && checker.semantic.future_annotations() @@ -1066,7 +1066,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { }) => { // Ex) `str | None` if checker.enabled(Rule::FutureRequiredTypeAnnotation) { - if !checker.is_stub + if !checker.source_type.is_stub() && checker.settings.target_version < PythonVersion::Py310 && !checker.semantic.future_annotations() && checker.semantic.in_annotation() @@ -1212,7 +1212,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { kind: _, range: _, }) => { - if checker.is_stub && checker.enabled(Rule::NumericLiteralTooLong) { + if checker.source_type.is_stub() && checker.enabled(Rule::NumericLiteralTooLong) { flake8_pyi::rules::numeric_literal_too_long(checker, expr); } } @@ -1221,7 +1221,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { kind: _, range: _, }) => { - if checker.is_stub && checker.enabled(Rule::StringOrBytesTooLong) { + if checker.source_type.is_stub() && checker.enabled(Rule::StringOrBytesTooLong) { flake8_pyi::rules::string_or_bytes_too_long(checker, expr); } } @@ -1249,7 +1249,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { if checker.enabled(Rule::UnicodeKindPrefix) { pyupgrade::rules::unicode_kind_prefix(checker, expr, kind.as_deref()); } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::StringOrBytesTooLong) { flake8_pyi::rules::string_or_bytes_too_long(checker, expr); } diff --git a/crates/ruff/src/checkers/ast/analyze/parameters.rs b/crates/ruff/src/checkers/ast/analyze/parameters.rs index 9320947a4539e..fe300a8ea135f 100644 --- a/crates/ruff/src/checkers/ast/analyze/parameters.rs +++ b/crates/ruff/src/checkers/ast/analyze/parameters.rs @@ -15,7 +15,7 @@ pub(crate) fn parameters(parameters: &Parameters, checker: &mut Checker) { if checker.settings.rules.enabled(Rule::ImplicitOptional) { ruff::rules::implicit_optional(checker, parameters); } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::TypedArgumentDefaultInStub) { flake8_pyi::rules::typed_argument_simple_defaults(checker, parameters); } diff --git a/crates/ruff/src/checkers/ast/analyze/statement.rs b/crates/ruff/src/checkers/ast/analyze/statement.rs index dc88a36f6edb0..cbdbff9f5ae54 100644 --- a/crates/ruff/src/checkers/ast/analyze/statement.rs +++ b/crates/ruff/src/checkers/ast/analyze/statement.rs @@ -133,7 +133,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { checker.diagnostics.push(diagnostic); } } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::PassStatementStubBody) { flake8_pyi::rules::pass_statement_stub_body(checker, body); } @@ -168,12 +168,14 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { type_params.as_ref(), ); } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::StrOrReprDefinedInStub) { flake8_pyi::rules::str_or_repr_defined_in_stub(checker, stmt); } } - if checker.is_stub || checker.settings.target_version >= PythonVersion::Py311 { + if checker.source_type.is_stub() + || checker.settings.target_version >= PythonVersion::Py311 + { if checker.enabled(Rule::NoReturnArgumentAnnotationInStub) { flake8_pyi::rules::no_return_argument_annotation(checker, parameters); } @@ -412,7 +414,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { body, ); } - if !checker.is_stub { + if !checker.source_type.is_stub() { if checker.enabled(Rule::DjangoModelWithoutDunderStr) { flake8_django::rules::model_without_dunder_str(checker, class_def); } @@ -453,7 +455,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { checker.diagnostics.push(diagnostic); } } - if !checker.is_stub { + if !checker.source_type.is_stub() { if checker.any_enabled(&[ Rule::AbstractBaseClassWithoutAbstractMethod, Rule::EmptyMethodWithoutAbstractDecorator, @@ -467,7 +469,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { ); } } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::PassStatementStubBody) { flake8_pyi::rules::pass_statement_stub_body(checker, body); } @@ -569,7 +571,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { alias, ); } - if !checker.is_stub { + if !checker.source_type.is_stub() { if checker.enabled(Rule::UselessImportAlias) { pylint::rules::useless_import_alias(checker, alias); } @@ -744,7 +746,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { checker.diagnostics.push(diagnostic); } } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::FutureAnnotationsInStub) { flake8_pyi::rules::from_future_import(checker, import_from); } @@ -889,7 +891,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { checker.diagnostics.push(diagnostic); } } - if !checker.is_stub { + if !checker.source_type.is_stub() { if checker.enabled(Rule::UselessImportAlias) { pylint::rules::useless_import_alias(checker, alias); } @@ -1013,7 +1015,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { checker.diagnostics.push(diagnostic); } } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.any_enabled(&[ Rule::UnrecognizedVersionInfoCheck, Rule::PatchVersionComparison, @@ -1325,7 +1327,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { if checker.settings.rules.enabled(Rule::TypeBivariance) { pylint::rules::type_bivariance(checker, value); } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.any_enabled(&[ Rule::UnprefixedTypeParam, Rule::AssignmentDefaultInStub, @@ -1395,7 +1397,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { if checker.enabled(Rule::NonPEP695TypeAlias) { pyupgrade::rules::non_pep695_type_alias(checker, assign_stmt); } - if checker.is_stub { + if checker.source_type.is_stub() { if let Some(value) = value { if checker.enabled(Rule::AssignmentDefaultInStub) { // Ignore assignments in function bodies; those are covered by other rules. diff --git a/crates/ruff/src/checkers/ast/mod.rs b/crates/ruff/src/checkers/ast/mod.rs index 73c34971fbb67..1c19eda25ea69 100644 --- a/crates/ruff/src/checkers/ast/mod.rs +++ b/crates/ruff/src/checkers/ast/mod.rs @@ -43,7 +43,7 @@ use ruff_python_ast::helpers::{extract_handled_exceptions, to_module_path}; use ruff_python_ast::identifier::Identifier; use ruff_python_ast::str::trailing_quote; use ruff_python_ast::visitor::{walk_except_handler, walk_pattern, Visitor}; -use ruff_python_ast::{helpers, str, visitor}; +use ruff_python_ast::{helpers, str, visitor, PySourceType}; use ruff_python_codegen::{Generator, Quote, Stylist}; use ruff_python_index::Indexer; use ruff_python_parser::typing::{parse_type_annotation, AnnotationKind}; @@ -53,7 +53,6 @@ use ruff_python_semantic::{ ModuleKind, ScopeId, ScopeKind, SemanticModel, SemanticModelFlags, StarImport, SubmoduleImport, }; use ruff_python_stdlib::builtins::{BUILTINS, MAGIC_GLOBALS}; -use ruff_python_stdlib::path::is_python_stub_file; use ruff_source_file::Locator; use crate::checkers::ast::deferred::Deferred; @@ -75,8 +74,8 @@ pub(crate) struct Checker<'a> { package: Option<&'a Path>, /// The module representation of the current file (e.g., `foo.bar`). module_path: Option<&'a [String]>, - /// Whether the current file is a stub (`.pyi`) file. - is_stub: bool, + /// The [`PySourceType`] of the current file. + pub(crate) source_type: PySourceType, /// The [`flags::Noqa`] for the current analysis (i.e., whether to respect suppression /// comments). noqa: flags::Noqa, @@ -118,6 +117,7 @@ impl<'a> Checker<'a> { stylist: &'a Stylist, indexer: &'a Indexer, importer: Importer<'a>, + source_type: PySourceType, ) -> Checker<'a> { Checker { settings, @@ -126,7 +126,7 @@ impl<'a> Checker<'a> { path, package, module_path: module.path(), - is_stub: is_python_stub_file(path), + source_type, locator, stylist, indexer, @@ -233,11 +233,6 @@ impl<'a> Checker<'a> { &self.semantic } - /// Return `true` if the current file is a stub file (`.pyi`). - pub(crate) const fn is_stub(&self) -> bool { - self.is_stub - } - /// The [`Path`] to the file under analysis. pub(crate) const fn path(&self) -> &'a Path { self.path @@ -1786,7 +1781,7 @@ impl<'a> Checker<'a> { pyupgrade::rules::quoted_annotation(self, value, range); } } - if self.is_stub { + if self.source_type.is_stub() { if self.enabled(Rule::QuotedAnnotationInStub) { flake8_pyi::rules::quoted_annotation_in_stub(self, value, range); } @@ -1928,6 +1923,7 @@ pub(crate) fn check_ast( noqa: flags::Noqa, path: &Path, package: Option<&Path>, + source_type: PySourceType, ) -> Vec { let module_path = package.and_then(|package| to_module_path(package, path)); let module = Module { @@ -1955,6 +1951,7 @@ pub(crate) fn check_ast( stylist, indexer, Importer::new(python_ast, locator, stylist), + source_type, ); checker.bind_builtins(); diff --git a/crates/ruff/src/checkers/imports.rs b/crates/ruff/src/checkers/imports.rs index 7eb17a2eca30d..d0ee5096577f9 100644 --- a/crates/ruff/src/checkers/imports.rs +++ b/crates/ruff/src/checkers/imports.rs @@ -2,7 +2,7 @@ use std::borrow::Cow; use std::path::Path; -use ruff_python_ast::{self as ast, Ranged, Stmt, Suite}; +use ruff_python_ast::{self as ast, PySourceType, Ranged, Stmt, Suite}; use ruff_diagnostics::Diagnostic; use ruff_python_ast::helpers::to_module_path; @@ -10,7 +10,7 @@ use ruff_python_ast::imports::{ImportMap, ModuleImport}; use ruff_python_ast::statement_visitor::StatementVisitor; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; -use ruff_python_stdlib::path::is_python_stub_file; + use ruff_source_file::Locator; use crate::directives::IsortDirectives; @@ -87,12 +87,12 @@ pub(crate) fn check_imports( path: &Path, package: Option<&Path>, source_kind: Option<&SourceKind>, + source_type: PySourceType, ) -> (Vec, Option) { - let is_stub = is_python_stub_file(path); - // Extract all import blocks from the AST. let tracker = { - let mut tracker = BlockBuilder::new(locator, directives, is_stub, source_kind); + let mut tracker = + BlockBuilder::new(locator, directives, source_type.is_stub(), source_kind); tracker.visit_body(python_ast); tracker }; @@ -104,7 +104,13 @@ pub(crate) fn check_imports( for block in &blocks { if !block.imports.is_empty() { if let Some(diagnostic) = isort::rules::organize_imports( - block, locator, stylist, indexer, settings, package, + block, + locator, + stylist, + indexer, + settings, + package, + source_type, ) { diagnostics.push(diagnostic); } @@ -113,7 +119,11 @@ pub(crate) fn check_imports( } if settings.rules.enabled(Rule::MissingRequiredImport) { diagnostics.extend(isort::rules::add_required_imports( - python_ast, locator, stylist, settings, is_stub, + python_ast, + locator, + stylist, + settings, + source_type, )); } diff --git a/crates/ruff/src/importer/insertion.rs b/crates/ruff/src/importer/insertion.rs index 5739955bedba2..311a1be96791c 100644 --- a/crates/ruff/src/importer/insertion.rs +++ b/crates/ruff/src/importer/insertion.rs @@ -1,8 +1,8 @@ //! Insert statements into Python code. use std::ops::Add; -use ruff_python_ast::{Ranged, Stmt}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_ast::{PySourceType, Ranged, Stmt}; +use ruff_python_parser::{lexer, AsMode, Tok}; use ruff_text_size::TextSize; use ruff_diagnostics::Edit; @@ -137,6 +137,7 @@ impl<'a> Insertion<'a> { mut location: TextSize, locator: &Locator<'a>, stylist: &Stylist, + source_type: PySourceType, ) -> Insertion<'a> { enum Awaiting { Colon(u32), @@ -146,7 +147,7 @@ impl<'a> Insertion<'a> { let mut state = Awaiting::Colon(0); for (tok, range) in - lexer::lex_starts_at(locator.after(location), Mode::Module, location).flatten() + lexer::lex_starts_at(locator.after(location), source_type.as_mode(), location).flatten() { match state { // Iterate until we find the colon indicating the start of the block body. @@ -300,12 +301,12 @@ fn match_leading_semicolon(s: &str) -> Option { mod tests { use anyhow::Result; - use ruff_python_parser::lexer::LexResult; - use ruff_text_size::TextSize; - + use ruff_python_ast::PySourceType; use ruff_python_codegen::Stylist; - use ruff_python_parser::parse_suite; + use ruff_python_parser::lexer::LexResult; + use ruff_python_parser::{parse_suite, Mode}; use ruff_source_file::{LineEnding, Locator}; + use ruff_text_size::TextSize; use super::Insertion; @@ -313,7 +314,7 @@ mod tests { fn start_of_file() -> Result<()> { fn insert(contents: &str) -> Result { let program = parse_suite(contents, "")?; - let tokens: Vec = ruff_python_parser::tokenize(contents); + let tokens: Vec = ruff_python_parser::tokenize(contents, Mode::Module); let locator = Locator::new(contents); let stylist = Stylist::from_tokens(&tokens, &locator); Ok(Insertion::start_of_file(&program, &locator, &stylist)) @@ -424,10 +425,10 @@ x = 1 #[test] fn start_of_block() { fn insert(contents: &str, offset: TextSize) -> Insertion { - let tokens: Vec = ruff_python_parser::tokenize(contents); + let tokens: Vec = ruff_python_parser::tokenize(contents, Mode::Module); let locator = Locator::new(contents); let stylist = Stylist::from_tokens(&tokens, &locator); - Insertion::start_of_block(offset, &locator, &stylist) + Insertion::start_of_block(offset, &locator, &stylist, PySourceType::default()) } let contents = "if True: pass"; diff --git a/crates/ruff/src/importer/mod.rs b/crates/ruff/src/importer/mod.rs index e08208237b72f..1ef341ee7ef78 100644 --- a/crates/ruff/src/importer/mod.rs +++ b/crates/ruff/src/importer/mod.rs @@ -7,7 +7,7 @@ use std::error::Error; use anyhow::Result; use libcst_native::{ImportAlias, Name, NameOrAttribute}; -use ruff_python_ast::{self as ast, Ranged, Stmt, Suite}; +use ruff_python_ast::{self as ast, PySourceType, Ranged, Stmt, Suite}; use ruff_text_size::TextSize; use ruff_diagnostics::Edit; @@ -121,6 +121,7 @@ impl<'a> Importer<'a> { import: &StmtImports, at: TextSize, semantic: &SemanticModel, + source_type: PySourceType, ) -> Result { // Generate the modified import statement. let content = autofix::codemods::retain_imports( @@ -140,7 +141,7 @@ impl<'a> Importer<'a> { // Add the import to a `TYPE_CHECKING` block. let add_import_edit = if let Some(block) = self.preceding_type_checking_block(at) { // Add the import to the `TYPE_CHECKING` block. - self.add_to_type_checking_block(&content, block.start()) + self.add_to_type_checking_block(&content, block.start(), source_type) } else { // Add the import to a new `TYPE_CHECKING` block. self.add_type_checking_block( @@ -353,8 +354,13 @@ impl<'a> Importer<'a> { } /// Add an import statement to an existing `TYPE_CHECKING` block. - fn add_to_type_checking_block(&self, content: &str, at: TextSize) -> Edit { - Insertion::start_of_block(at, self.locator, self.stylist).into_edit(content) + fn add_to_type_checking_block( + &self, + content: &str, + at: TextSize, + source_type: PySourceType, + ) -> Edit { + Insertion::start_of_block(at, self.locator, self.stylist, source_type).into_edit(content) } /// Return the import statement that precedes the given position, if any. diff --git a/crates/ruff/src/jupyter/notebook.rs b/crates/ruff/src/jupyter/notebook.rs index 75a7664d712ca..652e54850f05f 100644 --- a/crates/ruff/src/jupyter/notebook.rs +++ b/crates/ruff/src/jupyter/notebook.rs @@ -24,8 +24,6 @@ use crate::IOError; pub const JUPYTER_NOTEBOOK_EXT: &str = "ipynb"; -const MAGIC_PREFIX: [&str; 3] = ["%", "!", "?"]; - /// Run round-trip source code generation on a given Jupyter notebook file path. pub fn round_trip(path: &Path) -> anyhow::Result { let mut notebook = Notebook::read(path).map_err(|err| { @@ -78,26 +76,21 @@ impl Cell { /// Return `true` if it's a valid code cell. /// /// A valid code cell is a cell where the cell type is [`Cell::Code`] and the - /// source doesn't contain a magic, shell or help command. + /// source doesn't contain a cell magic. fn is_valid_code_cell(&self) -> bool { let source = match self { Cell::Code(cell) => &cell.source, _ => return false, }; - // Ignore a cell if it contains a magic command. There could be valid - // Python code as well, but we'll ignore that for now. - // TODO(dhruvmanila): https://github.com/psf/black/blob/main/src/black/handle_ipynb_magics.py + // Ignore cells containing cell magic. This is different from line magic + // which is allowed and ignored by the parser. !match source { - SourceValue::String(string) => string.lines().any(|line| { - MAGIC_PREFIX - .iter() - .any(|prefix| line.trim_start().starts_with(prefix)) - }), - SourceValue::StringArray(string_array) => string_array.iter().any(|line| { - MAGIC_PREFIX - .iter() - .any(|prefix| line.trim_start().starts_with(prefix)) - }), + SourceValue::String(string) => string + .lines() + .any(|line| line.trim_start().starts_with("%%")), + SourceValue::StringArray(string_array) => string_array + .iter() + .any(|line| line.trim_start().starts_with("%%")), } } } @@ -513,9 +506,10 @@ mod tests { } #[test_case(Path::new("markdown.json"), false; "markdown")] - #[test_case(Path::new("only_magic.json"), false; "only_magic")] - #[test_case(Path::new("code_and_magic.json"), false; "code_and_magic")] + #[test_case(Path::new("only_magic.json"), true; "only_magic")] + #[test_case(Path::new("code_and_magic.json"), true; "code_and_magic")] #[test_case(Path::new("only_code.json"), true; "only_code")] + #[test_case(Path::new("cell_magic.json"), false; "cell_magic")] fn test_is_valid_code_cell(path: &Path, expected: bool) -> Result<()> { assert_eq!(read_jupyter_cell(path)?.is_valid_code_cell(), expected); Ok(()) @@ -567,7 +561,7 @@ print("after empty cells") #[test] fn test_import_sorting() -> Result<()> { let path = "isort.ipynb".to_string(); - let (diagnostics, source_kind) = test_notebook_path( + let (diagnostics, source_kind, _) = test_notebook_path( &path, Path::new("isort_expected.ipynb"), &settings::Settings::for_rule(Rule::UnsortedImports), @@ -576,10 +570,22 @@ print("after empty cells") Ok(()) } + #[test] + fn test_line_magics() -> Result<()> { + let path = "line_magics.ipynb".to_string(); + let (diagnostics, source_kind, _) = test_notebook_path( + &path, + Path::new("line_magics_expected.ipynb"), + &settings::Settings::for_rule(Rule::UnusedImport), + )?; + assert_messages!(diagnostics, path, source_kind); + Ok(()) + } + #[test] fn test_json_consistency() -> Result<()> { let path = "before_fix.ipynb".to_string(); - let (_, source_kind) = test_notebook_path( + let (_, _, source_kind) = test_notebook_path( path, Path::new("after_fix.ipynb"), &settings::Settings::for_rule(Rule::UnusedImport), diff --git a/crates/ruff/src/jupyter/snapshots/ruff__jupyter__notebook__tests__import_sorting.snap b/crates/ruff/src/jupyter/snapshots/ruff__jupyter__notebook__tests__import_sorting.snap index 240556c37576f..e0ab3572a84d4 100644 --- a/crates/ruff/src/jupyter/snapshots/ruff__jupyter__notebook__tests__import_sorting.snap +++ b/crates/ruff/src/jupyter/snapshots/ruff__jupyter__notebook__tests__import_sorting.snap @@ -47,4 +47,43 @@ isort.ipynb:cell 2:1:1: I001 [*] Import block is un-sorted or un-formatted 7 9 | def foo(): 8 10 | pass +isort.ipynb:cell 3:1:1: I001 [*] Import block is un-sorted or un-formatted + | +1 | / from pathlib import Path +2 | | import sys +3 | | +4 | | %matplotlib \ + | |_^ I001 +5 | --inline + | + = help: Organize imports + +ℹ Fix +6 6 | # Newline should be added here +7 7 | def foo(): +8 8 | pass + 9 |+import sys +9 10 | from pathlib import Path +10 |-import sys +11 11 | +12 12 | %matplotlib \ +13 13 | --inline + +isort.ipynb:cell 3:7:1: I001 [*] Import block is un-sorted or un-formatted + | +5 | --inline +6 | +7 | / import math +8 | | import abc + | + = help: Organize imports + +ℹ Fix +12 12 | %matplotlib \ +13 13 | --inline +14 14 | + 15 |+import abc +15 16 | import math +16 |-import abc + diff --git a/crates/ruff/src/jupyter/snapshots/ruff__jupyter__notebook__tests__line_magics.snap b/crates/ruff/src/jupyter/snapshots/ruff__jupyter__notebook__tests__line_magics.snap new file mode 100644 index 0000000000000..61211b2c18edb --- /dev/null +++ b/crates/ruff/src/jupyter/snapshots/ruff__jupyter__notebook__tests__line_magics.snap @@ -0,0 +1,23 @@ +--- +source: crates/ruff/src/jupyter/notebook.rs +--- +line_magics.ipynb:cell 1:5:8: F401 [*] `os` imported but unused + | +3 | %matplotlib inline +4 | +5 | import os + | ^^ F401 +6 | +7 | _ = math.pi + | + = help: Remove unused import: `os` + +ℹ Fix +2 2 | +3 3 | %matplotlib inline +4 4 | +5 |-import os +6 5 | +7 6 | _ = math.pi + + diff --git a/crates/ruff/src/linter.rs b/crates/ruff/src/linter.rs index 0f614535da42a..e3fc117253bf2 100644 --- a/crates/ruff/src/linter.rs +++ b/crates/ruff/src/linter.rs @@ -7,14 +7,15 @@ use colored::Colorize; use itertools::Itertools; use log::error; use ruff_python_parser::lexer::LexResult; -use ruff_python_parser::ParseError; +use ruff_python_parser::{AsMode, ParseError}; use rustc_hash::FxHashMap; use ruff_diagnostics::Diagnostic; use ruff_python_ast::imports::ImportMap; +use ruff_python_ast::PySourceType; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; -use ruff_python_stdlib::path::is_python_stub_file; + use ruff_source_file::{Locator, SourceFileBuilder}; use crate::autofix::{fix_file, FixResult}; @@ -81,6 +82,7 @@ pub fn check_path( settings: &Settings, noqa: flags::Noqa, source_kind: Option<&SourceKind>, + source_type: PySourceType, ) -> LinterResult<(Vec, Option)> { // Aggregate all diagnostics. let mut diagnostics = vec![]; @@ -101,9 +103,13 @@ pub fn check_path( .iter_enabled() .any(|rule_code| rule_code.lint_source().is_tokens()) { - let is_stub = is_python_stub_file(path); diagnostics.extend(check_tokens( - &tokens, path, locator, indexer, settings, is_stub, + &tokens, + path, + locator, + indexer, + settings, + source_type.is_stub(), )); } @@ -138,7 +144,11 @@ pub fn check_path( .iter_enabled() .any(|rule_code| rule_code.lint_source().is_imports()); if use_ast || use_imports || use_doc_lines { - match ruff_python_parser::parse_program_tokens(tokens, &path.to_string_lossy()) { + match ruff_python_parser::parse_program_tokens( + tokens, + &path.to_string_lossy(), + source_type.is_jupyter(), + ) { Ok(python_ast) => { if use_ast { diagnostics.extend(check_ast( @@ -151,6 +161,7 @@ pub fn check_path( noqa, path, package, + source_type, )); } if use_imports { @@ -164,6 +175,7 @@ pub fn check_path( path, package, source_kind, + source_type, ); imports = module_imports; diagnostics.extend(import_diagnostics); @@ -256,11 +268,13 @@ const MAX_ITERATIONS: usize = 100; /// Add any missing `# noqa` pragmas to the source code at the given `Path`. pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings) -> Result { + let source_type = PySourceType::from(path); + // Read the file from disk. let contents = std::fs::read_to_string(path)?; // Tokenize once. - let tokens: Vec = ruff_python_parser::tokenize(&contents); + let tokens: Vec = ruff_python_parser::tokenize(&contents, source_type.as_mode()); // Map row and column locations to byte slices (lazily). let locator = Locator::new(&contents); @@ -294,6 +308,7 @@ pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings settings, flags::Noqa::Disabled, None, + source_type, ); // Log any parse errors. @@ -326,9 +341,10 @@ pub fn lint_only( settings: &Settings, noqa: flags::Noqa, source_kind: Option<&SourceKind>, + source_type: PySourceType, ) -> LinterResult<(Vec, Option)> { // Tokenize once. - let tokens: Vec = ruff_python_parser::tokenize(contents); + let tokens: Vec = ruff_python_parser::tokenize(contents, source_type.as_mode()); // Map row and column locations to byte slices (lazily). let locator = Locator::new(contents); @@ -359,6 +375,7 @@ pub fn lint_only( settings, noqa, source_kind, + source_type, ); result.map(|(diagnostics, imports)| { @@ -405,6 +422,7 @@ pub fn lint_fix<'a>( noqa: flags::Noqa, settings: &Settings, source_kind: &mut SourceKind, + source_type: PySourceType, ) -> Result> { let mut transformed = Cow::Borrowed(contents); @@ -420,7 +438,8 @@ pub fn lint_fix<'a>( // Continuously autofix until the source code stabilizes. loop { // Tokenize once. - let tokens: Vec = ruff_python_parser::tokenize(&transformed); + let tokens: Vec = + ruff_python_parser::tokenize(&transformed, source_type.as_mode()); // Map row and column locations to byte slices (lazily). let locator = Locator::new(&transformed); @@ -451,6 +470,7 @@ pub fn lint_fix<'a>( settings, noqa, Some(source_kind), + source_type, ); if iterations == 0 { diff --git a/crates/ruff/src/rules/flake8_annotations/fixes.rs b/crates/ruff/src/rules/flake8_annotations/fixes.rs index 2111978b0458e..82074eb89a8bd 100644 --- a/crates/ruff/src/rules/flake8_annotations/fixes.rs +++ b/crates/ruff/src/rules/flake8_annotations/fixes.rs @@ -1,6 +1,6 @@ use anyhow::{bail, Result}; -use ruff_python_ast::{Ranged, Stmt}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_ast::{PySourceType, Ranged, Stmt}; +use ruff_python_parser::{lexer, AsMode, Tok}; use ruff_diagnostics::Edit; use ruff_source_file::Locator; @@ -10,6 +10,7 @@ pub(crate) fn add_return_annotation( locator: &Locator, stmt: &Stmt, annotation: &str, + source_type: PySourceType, ) -> Result { let contents = &locator.contents()[stmt.range()]; @@ -17,7 +18,9 @@ pub(crate) fn add_return_annotation( let mut seen_lpar = false; let mut seen_rpar = false; let mut count = 0u32; - for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, stmt.start()).flatten() { + for (tok, range) in + lexer::lex_starts_at(contents, source_type.as_mode(), stmt.start()).flatten() + { if seen_lpar && seen_rpar { if matches!(tok, Tok::Colon) { return Ok(Edit::insertion(format!(" -> {annotation}"), range.start())); diff --git a/crates/ruff/src/rules/flake8_annotations/rules/definition.rs b/crates/ruff/src/rules/flake8_annotations/rules/definition.rs index 26fd2c9a7ee21..01895a874dfb7 100644 --- a/crates/ruff/src/rules/flake8_annotations/rules/definition.rs +++ b/crates/ruff/src/rules/flake8_annotations/rules/definition.rs @@ -709,8 +709,13 @@ pub(crate) fn definition( ); if checker.patch(diagnostic.kind.rule()) { diagnostic.try_set_fix(|| { - fixes::add_return_annotation(checker.locator(), stmt, "None") - .map(Fix::suggested) + fixes::add_return_annotation( + checker.locator(), + stmt, + "None", + checker.source_type, + ) + .map(Fix::suggested) }); } diagnostics.push(diagnostic); @@ -727,8 +732,13 @@ pub(crate) fn definition( if checker.patch(diagnostic.kind.rule()) { if let Some(return_type) = simple_magic_return_type(name) { diagnostic.try_set_fix(|| { - fixes::add_return_annotation(checker.locator(), stmt, return_type) - .map(Fix::suggested) + fixes::add_return_annotation( + checker.locator(), + stmt, + return_type, + checker.source_type, + ) + .map(Fix::suggested) }); } } diff --git a/crates/ruff/src/rules/flake8_pytest_style/rules/fixture.rs b/crates/ruff/src/rules/flake8_pytest_style/rules/fixture.rs index 3f19235822d74..63352d2ffacd6 100644 --- a/crates/ruff/src/rules/flake8_pytest_style/rules/fixture.rs +++ b/crates/ruff/src/rules/flake8_pytest_style/rules/fixture.rs @@ -550,6 +550,7 @@ fn check_fixture_decorator(checker: &mut Checker, func_name: &str, decorator: &D arguments, edits::Parentheses::Preserve, checker.locator(), + checker.source_type, ) .map(Fix::suggested) }); diff --git a/crates/ruff/src/rules/flake8_pytest_style/rules/parametrize.rs b/crates/ruff/src/rules/flake8_pytest_style/rules/parametrize.rs index fa54765663b5f..e42a3d95501a1 100644 --- a/crates/ruff/src/rules/flake8_pytest_style/rules/parametrize.rs +++ b/crates/ruff/src/rules/flake8_pytest_style/rules/parametrize.rs @@ -1,5 +1,7 @@ -use ruff_python_ast::{self as ast, Arguments, Constant, Decorator, Expr, ExprContext, Ranged}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_ast::{ + self as ast, Arguments, Constant, Decorator, Expr, ExprContext, PySourceType, Ranged, +}; +use ruff_python_parser::{lexer, AsMode, Tok}; use ruff_text_size::TextRange; use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation}; @@ -95,7 +97,12 @@ fn elts_to_csv(elts: &[Expr], generator: Generator) -> Option { /// ``` /// /// This method assumes that the first argument is a string. -fn get_parametrize_name_range(decorator: &Decorator, expr: &Expr, locator: &Locator) -> TextRange { +fn get_parametrize_name_range( + decorator: &Decorator, + expr: &Expr, + locator: &Locator, + source_type: PySourceType, +) -> TextRange { let mut locations = Vec::new(); let mut implicit_concat = None; @@ -103,7 +110,7 @@ fn get_parametrize_name_range(decorator: &Decorator, expr: &Expr, locator: &Loca // decorator to find them. for (tok, range) in lexer::lex_starts_at( locator.slice(decorator.range()), - Mode::Module, + source_type.as_mode(), decorator.start(), ) .flatten() @@ -141,8 +148,12 @@ fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) { if names.len() > 1 { match names_type { types::ParametrizeNameType::Tuple => { - let name_range = - get_parametrize_name_range(decorator, expr, checker.locator()); + let name_range = get_parametrize_name_range( + decorator, + expr, + checker.locator(), + checker.source_type, + ); let mut diagnostic = Diagnostic::new( PytestParametrizeNamesWrongType { expected: names_type, @@ -172,8 +183,12 @@ fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) { checker.diagnostics.push(diagnostic); } types::ParametrizeNameType::List => { - let name_range = - get_parametrize_name_range(decorator, expr, checker.locator()); + let name_range = get_parametrize_name_range( + decorator, + expr, + checker.locator(), + checker.source_type, + ); let mut diagnostic = Diagnostic::new( PytestParametrizeNamesWrongType { expected: names_type, diff --git a/crates/ruff/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs b/crates/ruff/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs index 93036d0f08c7c..8c0d69bf45d92 100644 --- a/crates/ruff/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs +++ b/crates/ruff/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs @@ -1,5 +1,5 @@ -use ruff_python_ast::{self as ast, Arguments, Expr, Ranged}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_ast::{self as ast, Arguments, Expr, PySourceType, Ranged}; +use ruff_python_parser::{lexer, AsMode, Tok}; use ruff_text_size::{TextRange, TextSize}; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; @@ -71,7 +71,7 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr: return; } - let range = match_parens(func.end(), checker.locator()) + let range = match_parens(func.end(), checker.locator(), checker.source_type) .expect("Expected call to include parentheses"); let mut diagnostic = Diagnostic::new(UnnecessaryParenOnRaiseException, range); if checker.patch(diagnostic.kind.rule()) { @@ -82,14 +82,18 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr: } /// Return the range of the first parenthesis pair after a given [`TextSize`]. -fn match_parens(start: TextSize, locator: &Locator) -> Option { +fn match_parens( + start: TextSize, + locator: &Locator, + source_type: PySourceType, +) -> Option { let contents = &locator.contents()[usize::from(start)..]; let mut fix_start = None; let mut fix_end = None; let mut count = 0u32; - for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, start).flatten() { + for (tok, range) in lexer::lex_starts_at(contents, source_type.as_mode(), start).flatten() { match tok { Tok::Lpar => { if count == 0 { diff --git a/crates/ruff/src/rules/flake8_simplify/rules/ast_if.rs b/crates/ruff/src/rules/flake8_simplify/rules/ast_if.rs index 11d0a3f4eddff..cb18e590f4dad 100644 --- a/crates/ruff/src/rules/flake8_simplify/rules/ast_if.rs +++ b/crates/ruff/src/rules/flake8_simplify/rules/ast_if.rs @@ -378,6 +378,7 @@ pub(crate) fn nested_if_statements( let colon = first_colon_range( TextRange::new(test.end(), first_stmt.start()), checker.locator().contents(), + checker.source_type.is_jupyter(), ); // Check if the parent is already emitting a larger diagnostic including this if statement diff --git a/crates/ruff/src/rules/flake8_simplify/rules/ast_with.rs b/crates/ruff/src/rules/flake8_simplify/rules/ast_with.rs index 2a9ff71350d8c..55c5d0d78105a 100644 --- a/crates/ruff/src/rules/flake8_simplify/rules/ast_with.rs +++ b/crates/ruff/src/rules/flake8_simplify/rules/ast_with.rs @@ -119,6 +119,7 @@ pub(crate) fn multiple_with_statements( body.first().expect("Expected body to be non-empty").start(), ), checker.locator().contents(), + checker.source_type.is_jupyter(), ); let mut diagnostic = Diagnostic::new( diff --git a/crates/ruff/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs b/crates/ruff/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs index 0447b9ebdca7a..5609580ea8113 100644 --- a/crates/ruff/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs +++ b/crates/ruff/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs @@ -447,6 +447,7 @@ fn fix_imports(checker: &Checker, stmt_id: NodeId, imports: &[Import]) -> Result }, at, checker.semantic(), + checker.source_type, )?; Ok( diff --git a/crates/ruff/src/rules/isort/annotate.rs b/crates/ruff/src/rules/isort/annotate.rs index 1aeca71811e63..d434d0bde769f 100644 --- a/crates/ruff/src/rules/isort/annotate.rs +++ b/crates/ruff/src/rules/isort/annotate.rs @@ -1,4 +1,4 @@ -use ruff_python_ast::{self as ast, Ranged, Stmt}; +use ruff_python_ast::{self as ast, PySourceType, Ranged, Stmt}; use ruff_text_size::TextRange; use ruff_source_file::Locator; @@ -13,6 +13,7 @@ pub(crate) fn annotate_imports<'a>( comments: Vec>, locator: &Locator, split_on_trailing_comma: bool, + source_type: PySourceType, ) -> Vec> { let mut comments_iter = comments.into_iter().peekable(); @@ -119,7 +120,7 @@ pub(crate) fn annotate_imports<'a>( names: aliases, level: level.map(|level| level.to_u32()), trailing_comma: if split_on_trailing_comma { - trailing_comma(import, locator) + trailing_comma(import, locator, source_type) } else { TrailingComma::default() }, diff --git a/crates/ruff/src/rules/isort/comments.rs b/crates/ruff/src/rules/isort/comments.rs index b7963ff1ab244..e2c05fccc3847 100644 --- a/crates/ruff/src/rules/isort/comments.rs +++ b/crates/ruff/src/rules/isort/comments.rs @@ -1,6 +1,7 @@ use std::borrow::Cow; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_ast::PySourceType; +use ruff_python_parser::{lexer, AsMode, Tok}; use ruff_text_size::{TextRange, TextSize}; use ruff_source_file::Locator; @@ -22,9 +23,13 @@ impl Comment<'_> { } /// Collect all comments in an import block. -pub(crate) fn collect_comments<'a>(range: TextRange, locator: &'a Locator) -> Vec> { +pub(crate) fn collect_comments<'a>( + range: TextRange, + locator: &'a Locator, + source_type: PySourceType, +) -> Vec> { let contents = locator.slice(range); - lexer::lex_starts_at(contents, Mode::Module, range.start()) + lexer::lex_starts_at(contents, source_type.as_mode(), range.start()) .flatten() .filter_map(|(tok, range)| { if let Tok::Comment(value) = tok { diff --git a/crates/ruff/src/rules/isort/helpers.rs b/crates/ruff/src/rules/isort/helpers.rs index adf185891db5d..00f9504ac3212 100644 --- a/crates/ruff/src/rules/isort/helpers.rs +++ b/crates/ruff/src/rules/isort/helpers.rs @@ -1,5 +1,5 @@ -use ruff_python_ast::{Ranged, Stmt}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_ast::{PySourceType, Ranged, Stmt}; +use ruff_python_parser::{lexer, AsMode, Tok}; use ruff_python_trivia::PythonWhitespace; use ruff_source_file::{Locator, UniversalNewlines}; @@ -8,11 +8,15 @@ use crate::rules::isort::types::TrailingComma; /// Return `true` if a `Stmt::ImportFrom` statement ends with a magic /// trailing comma. -pub(super) fn trailing_comma(stmt: &Stmt, locator: &Locator) -> TrailingComma { +pub(super) fn trailing_comma( + stmt: &Stmt, + locator: &Locator, + source_type: PySourceType, +) -> TrailingComma { let contents = locator.slice(stmt.range()); let mut count = 0u32; let mut trailing_comma = TrailingComma::Absent; - for (tok, _) in lexer::lex_starts_at(contents, Mode::Module, stmt.start()).flatten() { + for (tok, _) in lexer::lex_starts_at(contents, source_type.as_mode(), stmt.start()).flatten() { if matches!(tok, Tok::Lpar) { count = count.saturating_add(1); } diff --git a/crates/ruff/src/rules/isort/mod.rs b/crates/ruff/src/rules/isort/mod.rs index 854b26728b386..eed06c0e3d4d5 100644 --- a/crates/ruff/src/rules/isort/mod.rs +++ b/crates/ruff/src/rules/isort/mod.rs @@ -11,6 +11,7 @@ pub use categorize::{ImportSection, ImportType}; use comments::Comment; use normalize::normalize_imports; use order::order_imports; +use ruff_python_ast::PySourceType; use ruff_python_codegen::Stylist; use ruff_source_file::Locator; use settings::RelativeImportsOrder; @@ -72,6 +73,7 @@ pub(crate) fn format_imports( stylist: &Stylist, src: &[PathBuf], package: Option<&Path>, + source_type: PySourceType, combine_as_imports: bool, force_single_line: bool, force_sort_within_sections: bool, @@ -94,7 +96,13 @@ pub(crate) fn format_imports( section_order: &[ImportSection], ) -> String { let trailer = &block.trailer; - let block = annotate_imports(&block.imports, comments, locator, split_on_trailing_comma); + let block = annotate_imports( + &block.imports, + comments, + locator, + split_on_trailing_comma, + source_type, + ); // Normalize imports (i.e., deduplicate, aggregate `from` imports). let block = normalize_imports( diff --git a/crates/ruff/src/rules/isort/rules/add_required_imports.rs b/crates/ruff/src/rules/isort/rules/add_required_imports.rs index 09ff10669df3a..dd71b1e552638 100644 --- a/crates/ruff/src/rules/isort/rules/add_required_imports.rs +++ b/crates/ruff/src/rules/isort/rules/add_required_imports.rs @@ -1,5 +1,5 @@ use log::error; -use ruff_python_ast::{self as ast, Stmt, Suite}; +use ruff_python_ast::{self as ast, PySourceType, Stmt, Suite}; use ruff_text_size::{TextRange, TextSize}; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Fix}; @@ -91,7 +91,7 @@ fn add_required_import( locator: &Locator, stylist: &Stylist, settings: &Settings, - is_stub: bool, + source_type: PySourceType, ) -> Option { // Don't add imports to semantically-empty files. if python_ast.iter().all(is_docstring_stmt) { @@ -99,7 +99,7 @@ fn add_required_import( } // We don't need to add `__future__` imports to stubs. - if is_stub && required_import.is_future_import() { + if source_type.is_stub() && required_import.is_future_import() { return None; } @@ -131,7 +131,7 @@ pub(crate) fn add_required_imports( locator: &Locator, stylist: &Stylist, settings: &Settings, - is_stub: bool, + source_type: PySourceType, ) -> Vec { settings .isort @@ -172,7 +172,7 @@ pub(crate) fn add_required_imports( locator, stylist, settings, - is_stub, + source_type, ) }) .collect(), @@ -190,7 +190,7 @@ pub(crate) fn add_required_imports( locator, stylist, settings, - is_stub, + source_type, ) }) .collect(), diff --git a/crates/ruff/src/rules/isort/rules/organize_imports.rs b/crates/ruff/src/rules/isort/rules/organize_imports.rs index 02873311d91cf..acc9a0ee31d6c 100644 --- a/crates/ruff/src/rules/isort/rules/organize_imports.rs +++ b/crates/ruff/src/rules/isort/rules/organize_imports.rs @@ -1,7 +1,7 @@ use std::path::Path; use itertools::{EitherOrBoth, Itertools}; -use ruff_python_ast::{Ranged, Stmt}; +use ruff_python_ast::{PySourceType, Ranged, Stmt}; use ruff_text_size::TextRange; use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation}; @@ -87,6 +87,7 @@ pub(crate) fn organize_imports( indexer: &Indexer, settings: &Settings, package: Option<&Path>, + source_type: PySourceType, ) -> Option { let indentation = locator.slice(extract_indentation_range(&block.imports, locator)); let indentation = leading_indentation(indentation); @@ -105,6 +106,7 @@ pub(crate) fn organize_imports( let comments = comments::collect_comments( TextRange::new(range.start(), locator.full_line_end(range.end())), locator, + source_type, ); let trailing_line_end = if block.trailer.is_none() { @@ -123,6 +125,7 @@ pub(crate) fn organize_imports( stylist, &settings.src, package, + source_type, settings.isort.combine_as_imports, settings.isort.force_single_line, settings.isort.force_sort_within_sections, diff --git a/crates/ruff/src/rules/pandas_vet/rules/inplace_argument.rs b/crates/ruff/src/rules/pandas_vet/rules/inplace_argument.rs index 4ecd4f4f017ed..71cb125d7b94b 100644 --- a/crates/ruff/src/rules/pandas_vet/rules/inplace_argument.rs +++ b/crates/ruff/src/rules/pandas_vet/rules/inplace_argument.rs @@ -1,7 +1,7 @@ use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation}; use ruff_macros::{derive_message_formats, violation}; use ruff_python_ast::helpers::is_const_true; -use ruff_python_ast::{self as ast, Keyword, Ranged}; +use ruff_python_ast::{self as ast, Keyword, PySourceType, Ranged}; use ruff_python_semantic::{BindingKind, Import}; use ruff_source_file::Locator; @@ -93,9 +93,12 @@ pub(crate) fn inplace_argument(checker: &mut Checker, call: &ast::ExprCall) { && checker.semantic().expr_parent().is_none() && !checker.semantic().scope().kind.is_lambda() { - if let Some(fix) = - convert_inplace_argument_to_assignment(checker.locator(), call, keyword) - { + if let Some(fix) = convert_inplace_argument_to_assignment( + checker.locator(), + call, + keyword, + checker.source_type, + ) { diagnostic.set_fix(fix); } } @@ -116,6 +119,7 @@ fn convert_inplace_argument_to_assignment( locator: &Locator, call: &ast::ExprCall, keyword: &Keyword, + source_type: PySourceType, ) -> Option { // Add the assignment. let attr = call.func.as_attribute_expr()?; @@ -125,8 +129,14 @@ fn convert_inplace_argument_to_assignment( ); // Remove the `inplace` argument. - let remove_argument = - remove_argument(keyword, &call.arguments, Parentheses::Preserve, locator).ok()?; + let remove_argument = remove_argument( + keyword, + &call.arguments, + Parentheses::Preserve, + locator, + source_type, + ) + .ok()?; Some(Fix::suggested_edits(insert_assignment, [remove_argument])) } diff --git a/crates/ruff/src/rules/pydocstyle/rules/not_missing.rs b/crates/ruff/src/rules/pydocstyle/rules/not_missing.rs index d8789d5e0d7b8..00ec74d836efe 100644 --- a/crates/ruff/src/rules/pydocstyle/rules/not_missing.rs +++ b/crates/ruff/src/rules/pydocstyle/rules/not_missing.rs @@ -523,7 +523,7 @@ pub(crate) fn not_missing( definition: &Definition, visibility: Visibility, ) -> bool { - if checker.is_stub() { + if checker.source_type.is_stub() { return true; } diff --git a/crates/ruff/src/rules/pyflakes/mod.rs b/crates/ruff/src/rules/pyflakes/mod.rs index b37f23c7b5462..ea7288d5a6921 100644 --- a/crates/ruff/src/rules/pyflakes/mod.rs +++ b/crates/ruff/src/rules/pyflakes/mod.rs @@ -12,11 +12,14 @@ mod tests { use anyhow::Result; use regex::Regex; use ruff_python_parser::lexer::LexResult; + use test_case::test_case; use ruff_diagnostics::Diagnostic; + use ruff_python_ast::PySourceType; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; + use ruff_python_parser::AsMode; use ruff_python_trivia::textwrap::dedent; use ruff_source_file::Locator; @@ -504,8 +507,9 @@ mod tests { /// Note that all tests marked with `#[ignore]` should be considered TODOs. fn flakes(contents: &str, expected: &[Rule]) { let contents = dedent(contents); + let source_type = PySourceType::default(); let settings = Settings::for_rules(Linter::Pyflakes.rules()); - let tokens: Vec = ruff_python_parser::tokenize(&contents); + let tokens: Vec = ruff_python_parser::tokenize(&contents, source_type.as_mode()); let locator = Locator::new(&contents); let stylist = Stylist::from_tokens(&tokens, &locator); let indexer = Indexer::from_tokens(&tokens, &locator); @@ -529,6 +533,7 @@ mod tests { &settings, flags::Noqa::Enabled, None, + source_type, ); diagnostics.sort_by_key(Diagnostic::start); let actual = diagnostics diff --git a/crates/ruff/src/rules/pyflakes/rules/f_string_missing_placeholders.rs b/crates/ruff/src/rules/pyflakes/rules/f_string_missing_placeholders.rs index 0a247f872aaaf..4de5016a3ae76 100644 --- a/crates/ruff/src/rules/pyflakes/rules/f_string_missing_placeholders.rs +++ b/crates/ruff/src/rules/pyflakes/rules/f_string_missing_placeholders.rs @@ -1,5 +1,5 @@ -use ruff_python_ast::{Expr, Ranged}; -use ruff_python_parser::{lexer, Mode, StringKind, Tok}; +use ruff_python_ast::{Expr, PySourceType, Ranged}; +use ruff_python_parser::{lexer, AsMode, StringKind, Tok}; use ruff_text_size::{TextRange, TextSize}; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; @@ -52,9 +52,10 @@ impl AlwaysAutofixableViolation for FStringMissingPlaceholders { fn find_useless_f_strings<'a>( expr: &'a Expr, locator: &'a Locator, + source_type: PySourceType, ) -> impl Iterator + 'a { let contents = locator.slice(expr.range()); - lexer::lex_starts_at(contents, Mode::Module, expr.start()) + lexer::lex_starts_at(contents, source_type.as_mode(), expr.start()) .flatten() .filter_map(|(tok, range)| match tok { Tok::String { @@ -85,7 +86,9 @@ pub(crate) fn f_string_missing_placeholders(expr: &Expr, values: &[Expr], checke .iter() .any(|value| matches!(value, Expr::FormattedValue(_))) { - for (prefix_range, tok_range) in find_useless_f_strings(expr, checker.locator()) { + for (prefix_range, tok_range) in + find_useless_f_strings(expr, checker.locator(), checker.source_type) + { let mut diagnostic = Diagnostic::new(FStringMissingPlaceholders, tok_range); if checker.patch(diagnostic.kind.rule()) { diagnostic.set_fix(convert_f_string_to_regular_string( diff --git a/crates/ruff/src/rules/pyflakes/rules/unused_variable.rs b/crates/ruff/src/rules/pyflakes/rules/unused_variable.rs index 50c17d1085af9..94cbad3727706 100644 --- a/crates/ruff/src/rules/pyflakes/rules/unused_variable.rs +++ b/crates/ruff/src/rules/pyflakes/rules/unused_variable.rs @@ -1,6 +1,6 @@ use itertools::Itertools; -use ruff_python_ast::{self as ast, Ranged, Stmt}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_ast::{self as ast, PySourceType, Ranged, Stmt}; +use ruff_python_parser::{lexer, AsMode, Tok}; use ruff_text_size::{TextRange, TextSize}; use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation}; @@ -62,12 +62,17 @@ impl Violation for UnusedVariable { } /// Return the [`TextRange`] of the token before the next match of the predicate -fn match_token_before(location: TextSize, locator: &Locator, f: F) -> Option +fn match_token_before( + location: TextSize, + locator: &Locator, + source_type: PySourceType, + f: F, +) -> Option where F: Fn(Tok) -> bool, { let contents = locator.after(location); - for ((_, range), (tok, _)) in lexer::lex_starts_at(contents, Mode::Module, location) + for ((_, range), (tok, _)) in lexer::lex_starts_at(contents, source_type.as_mode(), location) .flatten() .tuple_windows() { @@ -80,7 +85,12 @@ where /// Return the [`TextRange`] of the token after the next match of the predicate, skipping over /// any bracketed expressions. -fn match_token_after(location: TextSize, locator: &Locator, f: F) -> Option +fn match_token_after( + location: TextSize, + locator: &Locator, + source_type: PySourceType, + f: F, +) -> Option where F: Fn(Tok) -> bool, { @@ -91,7 +101,7 @@ where let mut sqb_count = 0u32; let mut brace_count = 0u32; - for ((tok, _), (_, range)) in lexer::lex_starts_at(contents, Mode::Module, location) + for ((tok, _), (_, range)) in lexer::lex_starts_at(contents, source_type.as_mode(), location) .flatten() .tuple_windows() { @@ -131,7 +141,12 @@ where /// Return the [`TextRange`] of the token matching the predicate or the first mismatched /// bracket, skipping over any bracketed expressions. -fn match_token_or_closing_brace(location: TextSize, locator: &Locator, f: F) -> Option +fn match_token_or_closing_brace( + location: TextSize, + locator: &Locator, + source_type: PySourceType, + f: F, +) -> Option where F: Fn(Tok) -> bool, { @@ -142,7 +157,7 @@ where let mut sqb_count = 0u32; let mut brace_count = 0u32; - for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, location).flatten() { + for (tok, range) in lexer::lex_starts_at(contents, source_type.as_mode(), location).flatten() { match tok { Tok::Lpar => { par_count = par_count.saturating_add(1); @@ -204,7 +219,10 @@ fn remove_unused_variable( // If the expression is complex (`x = foo()`), remove the assignment, // but preserve the right-hand side. let start = target.start(); - let end = match_token_after(start, checker.locator(), |tok| tok == Tok::Equal)? + let end = + match_token_after(start, checker.locator(), checker.source_type, |tok| { + tok == Tok::Equal + })? .start(); let edit = Edit::deletion(start, end); Some(Fix::suggested(edit)) @@ -230,7 +248,10 @@ fn remove_unused_variable( // but preserve the right-hand side. let start = stmt.start(); let end = - match_token_after(start, checker.locator(), |tok| tok == Tok::Equal)?.start(); + match_token_after(start, checker.locator(), checker.source_type, |tok| { + tok == Tok::Equal + })? + .start(); let edit = Edit::deletion(start, end); Some(Fix::suggested(edit)) } else { @@ -249,16 +270,21 @@ fn remove_unused_variable( if let Some(optional_vars) = &item.optional_vars { if optional_vars.range() == range { // Find the first token before the `as` keyword. - let start = - match_token_before(item.context_expr.start(), checker.locator(), |tok| { - tok == Tok::As - })? - .end(); + let start = match_token_before( + item.context_expr.start(), + checker.locator(), + checker.source_type, + |tok| tok == Tok::As, + )? + .end(); // Find the first colon, comma, or closing bracket after the `as` keyword. - let end = match_token_or_closing_brace(start, checker.locator(), |tok| { - tok == Tok::Colon || tok == Tok::Comma - })? + let end = match_token_or_closing_brace( + start, + checker.locator(), + checker.source_type, + |tok| tok == Tok::Colon || tok == Tok::Comma, + )? .start(); let edit = Edit::deletion(start, end); diff --git a/crates/ruff/src/rules/pylint/rules/bad_string_format_type.rs b/crates/ruff/src/rules/pylint/rules/bad_string_format_type.rs index 857fc4e5db2b6..1f2905751d0fe 100644 --- a/crates/ruff/src/rules/pylint/rules/bad_string_format_type.rs +++ b/crates/ruff/src/rules/pylint/rules/bad_string_format_type.rs @@ -2,7 +2,7 @@ use std::str::FromStr; use ruff_python_ast::{self as ast, Constant, Expr, Ranged}; use ruff_python_literal::cformat::{CFormatPart, CFormatSpec, CFormatStrOrBytes, CFormatString}; -use ruff_python_parser::{lexer, Mode}; +use ruff_python_parser::{lexer, AsMode}; use ruff_text_size::TextRange; use rustc_hash::FxHashMap; @@ -203,7 +203,9 @@ pub(crate) fn bad_string_format_type(checker: &mut Checker, expr: &Expr, right: // Grab each string segment (in case there's an implicit concatenation). let content = checker.locator().slice(expr.range()); let mut strings: Vec = vec![]; - for (tok, range) in lexer::lex_starts_at(content, Mode::Module, expr.start()).flatten() { + for (tok, range) in + lexer::lex_starts_at(content, checker.source_type.as_mode(), expr.start()).flatten() + { if tok.is_string() { strings.push(range); } else if tok.is_percent() { diff --git a/crates/ruff/src/rules/pyupgrade/rules/printf_string_formatting.rs b/crates/ruff/src/rules/pyupgrade/rules/printf_string_formatting.rs index 073639447fac0..76192f9c88ab8 100644 --- a/crates/ruff/src/rules/pyupgrade/rules/printf_string_formatting.rs +++ b/crates/ruff/src/rules/pyupgrade/rules/printf_string_formatting.rs @@ -4,7 +4,7 @@ use ruff_python_ast::{self as ast, Constant, Expr, Ranged}; use ruff_python_literal::cformat::{ CConversionFlags, CFormatPart, CFormatPrecision, CFormatQuantity, CFormatString, }; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_parser::{lexer, AsMode, Tok}; use ruff_text_size::TextRange; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; @@ -339,7 +339,7 @@ pub(crate) fn printf_string_formatting( let mut extension = None; for (tok, range) in lexer::lex_starts_at( checker.locator().slice(expr.range()), - Mode::Module, + checker.source_type.as_mode(), expr.start(), ) .flatten() diff --git a/crates/ruff/src/rules/pyupgrade/rules/redundant_open_modes.rs b/crates/ruff/src/rules/pyupgrade/rules/redundant_open_modes.rs index 1e45960d33f07..d09ec7315598d 100644 --- a/crates/ruff/src/rules/pyupgrade/rules/redundant_open_modes.rs +++ b/crates/ruff/src/rules/pyupgrade/rules/redundant_open_modes.rs @@ -4,8 +4,8 @@ use anyhow::{anyhow, Result}; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_ast::{self as ast, Constant, Expr, Ranged}; -use ruff_python_parser::{lexer, Mode}; +use ruff_python_ast::{self as ast, Constant, Expr, PySourceType, Ranged}; +use ruff_python_parser::{lexer, AsMode}; use ruff_python_semantic::SemanticModel; use ruff_source_file::Locator; use ruff_text_size::TextSize; @@ -84,6 +84,7 @@ pub(crate) fn redundant_open_modes(checker: &mut Checker, call: &ast::ExprCall) mode.replacement_value(), checker.locator(), checker.patch(Rule::RedundantOpenModes), + checker.source_type, )); } } @@ -103,6 +104,7 @@ pub(crate) fn redundant_open_modes(checker: &mut Checker, call: &ast::ExprCall) mode.replacement_value(), checker.locator(), checker.patch(Rule::RedundantOpenModes), + checker.source_type, )); } } @@ -169,6 +171,7 @@ fn create_check( replacement_value: Option<&str>, locator: &Locator, patch: bool, + source_type: PySourceType, ) -> Diagnostic { let mut diagnostic = Diagnostic::new( RedundantOpenModes { @@ -184,7 +187,7 @@ fn create_check( ))); } else { diagnostic.try_set_fix(|| { - create_remove_param_fix(locator, expr, mode_param).map(Fix::automatic) + create_remove_param_fix(locator, expr, mode_param, source_type).map(Fix::automatic) }); } } @@ -195,6 +198,7 @@ fn create_remove_param_fix( locator: &Locator, expr: &T, mode_param: &Expr, + source_type: PySourceType, ) -> Result { let content = locator.slice(expr.range()); // Find the last comma before mode_param and create a deletion fix @@ -203,7 +207,8 @@ fn create_remove_param_fix( let mut fix_end: Option = None; let mut is_first_arg: bool = false; let mut delete_first_arg: bool = false; - for (tok, range) in lexer::lex_starts_at(content, Mode::Module, expr.start()).flatten() { + for (tok, range) in lexer::lex_starts_at(content, source_type.as_mode(), expr.start()).flatten() + { if range.start() == mode_param.start() { if is_first_arg { delete_first_arg = true; diff --git a/crates/ruff/src/rules/pyupgrade/rules/replace_stdout_stderr.rs b/crates/ruff/src/rules/pyupgrade/rules/replace_stdout_stderr.rs index 3c863c12282d4..3482e99006488 100644 --- a/crates/ruff/src/rules/pyupgrade/rules/replace_stdout_stderr.rs +++ b/crates/ruff/src/rules/pyupgrade/rules/replace_stdout_stderr.rs @@ -2,7 +2,7 @@ use anyhow::Result; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_ast::{self as ast, Keyword, Ranged}; +use ruff_python_ast::{self as ast, Keyword, PySourceType, Ranged}; use ruff_source_file::Locator; use crate::autofix::edits::{remove_argument, Parentheses}; @@ -56,6 +56,7 @@ fn generate_fix( stderr: &Keyword, call: &ast::ExprCall, locator: &Locator, + source_type: PySourceType, ) -> Result { let (first, second) = if stdout.start() < stderr.start() { (stdout, stderr) @@ -69,6 +70,7 @@ fn generate_fix( &call.arguments, Parentheses::Preserve, locator, + source_type, )?], )) } @@ -103,7 +105,9 @@ pub(crate) fn replace_stdout_stderr(checker: &mut Checker, call: &ast::ExprCall) let mut diagnostic = Diagnostic::new(ReplaceStdoutStderr, call.range()); if checker.patch(diagnostic.kind.rule()) { - diagnostic.try_set_fix(|| generate_fix(stdout, stderr, call, checker.locator())); + diagnostic.try_set_fix(|| { + generate_fix(stdout, stderr, call, checker.locator(), checker.source_type) + }); } checker.diagnostics.push(diagnostic); } diff --git a/crates/ruff/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs b/crates/ruff/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs index 31396177b47c7..8e9dae557e376 100644 --- a/crates/ruff/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs +++ b/crates/ruff/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs @@ -1,7 +1,7 @@ use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_ast::{self as ast, Arguments, Constant, Expr, Keyword, Ranged}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_ast::{self as ast, Arguments, Constant, Expr, Keyword, PySourceType, Ranged}; +use ruff_python_parser::{lexer, AsMode, Tok}; use ruff_source_file::Locator; use ruff_text_size::TextRange; @@ -119,12 +119,18 @@ fn match_encoding_arg(arguments: &Arguments) -> Option { } /// Return a [`Fix`] replacing the call to encode with a byte string. -fn replace_with_bytes_literal(locator: &Locator, expr: &T) -> Fix { +fn replace_with_bytes_literal( + locator: &Locator, + expr: &T, + source_type: PySourceType, +) -> Fix { // Build up a replacement string by prefixing all string tokens with `b`. let contents = locator.slice(expr.range()); let mut replacement = String::with_capacity(contents.len() + 1); let mut prev = expr.start(); - for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, expr.start()).flatten() { + for (tok, range) in + lexer::lex_starts_at(contents, source_type.as_mode(), expr.start()).flatten() + { match tok { Tok::Dot => break, Tok::String { .. } => { @@ -166,7 +172,11 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal call.range(), ); if checker.patch(Rule::UnnecessaryEncodeUTF8) { - diagnostic.set_fix(replace_with_bytes_literal(checker.locator(), call)); + diagnostic.set_fix(replace_with_bytes_literal( + checker.locator(), + call, + checker.source_type, + )); } checker.diagnostics.push(diagnostic); } else if let EncodingArg::Keyword(kwarg) = encoding_arg { @@ -185,6 +195,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal &call.arguments, Parentheses::Preserve, checker.locator(), + checker.source_type, ) .map(Fix::automatic) }); @@ -205,6 +216,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal &call.arguments, Parentheses::Preserve, checker.locator(), + checker.source_type, ) .map(Fix::automatic) }); @@ -232,6 +244,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal &call.arguments, Parentheses::Preserve, checker.locator(), + checker.source_type, ) .map(Fix::automatic) }); @@ -252,6 +265,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal &call.arguments, Parentheses::Preserve, checker.locator(), + checker.source_type, ) .map(Fix::automatic) }); diff --git a/crates/ruff/src/rules/pyupgrade/rules/useless_object_inheritance.rs b/crates/ruff/src/rules/pyupgrade/rules/useless_object_inheritance.rs index 127ab261c8b96..3736942d8460e 100644 --- a/crates/ruff/src/rules/pyupgrade/rules/useless_object_inheritance.rs +++ b/crates/ruff/src/rules/pyupgrade/rules/useless_object_inheritance.rs @@ -69,8 +69,14 @@ pub(crate) fn useless_object_inheritance(checker: &mut Checker, class_def: &ast: ); if checker.patch(diagnostic.kind.rule()) { diagnostic.try_set_fix(|| { - remove_argument(base, arguments, Parentheses::Remove, checker.locator()) - .map(Fix::automatic) + remove_argument( + base, + arguments, + Parentheses::Remove, + checker.locator(), + checker.source_type, + ) + .map(Fix::automatic) }); } checker.diagnostics.push(diagnostic); diff --git a/crates/ruff/src/test.rs b/crates/ruff/src/test.rs index 9e72487231e11..b89ce5a80fd93 100644 --- a/crates/ruff/src/test.rs +++ b/crates/ruff/src/test.rs @@ -7,11 +7,14 @@ use std::path::Path; use anyhow::Result; use itertools::Itertools; use ruff_python_parser::lexer::LexResult; + use rustc_hash::FxHashMap; use ruff_diagnostics::{AutofixKind, Diagnostic}; +use ruff_python_ast::PySourceType; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; +use ruff_python_parser::AsMode; use ruff_python_trivia::textwrap::dedent; use ruff_source_file::{Locator, SourceFileBuilder}; @@ -61,8 +64,9 @@ pub(crate) fn test_notebook_path( path: impl AsRef, expected: impl AsRef, settings: &Settings, -) -> Result<(Vec, SourceKind)> { +) -> Result<(Vec, SourceKind, SourceKind)> { let mut source_kind = SourceKind::Jupyter(read_jupyter_notebook(path.as_ref())?); + let original_source_kind = source_kind.clone(); let messages = test_contents(&mut source_kind, path.as_ref(), settings); let expected_notebook = read_jupyter_notebook(expected.as_ref())?; if let SourceKind::Jupyter(notebook) = &source_kind { @@ -70,7 +74,7 @@ pub(crate) fn test_notebook_path( assert_eq!(notebook.index(), expected_notebook.index()); assert_eq!(notebook.content(), expected_notebook.content()); }; - Ok((messages, source_kind)) + Ok((messages, original_source_kind, source_kind)) } /// Run [`check_path`] on a snippet of Python code. @@ -100,7 +104,8 @@ pub(crate) fn max_iterations() -> usize { /// asserts that autofixes converge after a fixed number of iterations. fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings) -> Vec { let contents = source_kind.content().to_string(); - let tokens: Vec = ruff_python_parser::tokenize(&contents); + let source_type = PySourceType::from(path); + let tokens: Vec = ruff_python_parser::tokenize(&contents, source_type.as_mode()); let locator = Locator::new(&contents); let stylist = Stylist::from_tokens(&tokens, &locator); let indexer = Indexer::from_tokens(&tokens, &locator); @@ -125,6 +130,7 @@ fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings) settings, flags::Noqa::Enabled, Some(source_kind), + source_type, ); let source_has_errors = error.is_some(); @@ -162,7 +168,8 @@ fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings) notebook.update(&source_map, &fixed_contents); }; - let tokens: Vec = ruff_python_parser::tokenize(&fixed_contents); + let tokens: Vec = + ruff_python_parser::tokenize(&fixed_contents, source_type.as_mode()); let locator = Locator::new(&fixed_contents); let stylist = Stylist::from_tokens(&tokens, &locator); let indexer = Indexer::from_tokens(&tokens, &locator); @@ -187,6 +194,7 @@ fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings) settings, flags::Noqa::Enabled, Some(source_kind), + source_type, ); if let Some(fixed_error) = fixed_error { diff --git a/crates/ruff_benchmark/benches/linter.rs b/crates/ruff_benchmark/benches/linter.rs index 7abaa4fdafa25..a93f5dbb2817a 100644 --- a/crates/ruff_benchmark/benches/linter.rs +++ b/crates/ruff_benchmark/benches/linter.rs @@ -9,6 +9,7 @@ use ruff::linter::lint_only; use ruff::settings::{flags, Settings}; use ruff::RuleSelector; use ruff_benchmark::{TestCase, TestCaseSpeed, TestFile, TestFileDownloadError}; +use ruff_python_ast::PySourceType; #[cfg(target_os = "windows")] #[global_allocator] @@ -57,13 +58,15 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &Settings) { &case, |b, case| { b.iter(|| { + let path = case.path(); let result = lint_only( case.code(), - &case.path(), + &path, None, settings, flags::Noqa::Enabled, None, + PySourceType::from(path.as_path()), ); // Assert that file contains no parse errors diff --git a/crates/ruff_cli/src/diagnostics.rs b/crates/ruff_cli/src/diagnostics.rs index 54ce93a57dbeb..5ad6a50fd2788 100644 --- a/crates/ruff_cli/src/diagnostics.rs +++ b/crates/ruff_cli/src/diagnostics.rs @@ -29,7 +29,8 @@ use ruff::{fs, IOError}; use ruff_diagnostics::Diagnostic; use ruff_macros::CacheKey; use ruff_python_ast::imports::ImportMap; -use ruff_python_stdlib::path::{is_jupyter_notebook, is_project_toml}; +use ruff_python_ast::PySourceType; +use ruff_python_stdlib::path::is_project_toml; use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder}; #[derive(CacheKey)] @@ -211,8 +212,10 @@ pub(crate) fn lint_path( }); } + let source_type = PySourceType::from(path); + // Read the file from disk - let mut source_kind = if is_jupyter_notebook(path) { + let mut source_kind = if source_type.is_jupyter() { match load_jupyter_notebook(path) { Ok(notebook) => SourceKind::Jupyter(notebook), Err(diagnostic) => return Ok(*diagnostic), @@ -249,6 +252,7 @@ pub(crate) fn lint_path( noqa, &settings.lib, &mut source_kind, + source_type, ) { if !fixed.is_empty() { match autofix { @@ -335,6 +339,7 @@ pub(crate) fn lint_path( &settings.lib, noqa, Some(&source_kind), + source_type, ); let fixed = FxHashMap::default(); (result, fixed) @@ -347,6 +352,7 @@ pub(crate) fn lint_path( &settings.lib, noqa, Some(&source_kind), + source_type, ); let fixed = FxHashMap::default(); (result, fixed) @@ -396,6 +402,8 @@ pub(crate) fn lint_stdin( autofix: flags::FixMode, ) -> Result { let mut source_kind = SourceKind::Python(contents.to_string()); + let source_type = PySourceType::default(); + // Lint the inputs. let ( LinterResult { @@ -415,6 +423,7 @@ pub(crate) fn lint_stdin( noqa, settings, &mut source_kind, + source_type, ) { match autofix { flags::FixMode::Apply => { @@ -450,6 +459,7 @@ pub(crate) fn lint_stdin( settings, noqa, Some(&source_kind), + source_type, ); let fixed = FxHashMap::default(); @@ -468,6 +478,7 @@ pub(crate) fn lint_stdin( settings, noqa, Some(&source_kind), + source_type, ); let fixed = FxHashMap::default(); (result, fixed) diff --git a/crates/ruff_dev/src/print_ast.rs b/crates/ruff_dev/src/print_ast.rs index fc141b13e7637..14aa14bfac6d7 100644 --- a/crates/ruff_dev/src/print_ast.rs +++ b/crates/ruff_dev/src/print_ast.rs @@ -5,18 +5,26 @@ use std::fs; use std::path::PathBuf; use anyhow::Result; -use ruff_python_parser::parse_suite; +use ruff_python_parser::{parse, Mode}; #[derive(clap::Args)] pub(crate) struct Args { /// Python file for which to generate the AST. #[arg(required = true)] file: PathBuf, + /// Run in Jupyter mode i.e., allow line magics. + #[arg(long)] + jupyter: bool, } pub(crate) fn main(args: &Args) -> Result<()> { let contents = fs::read_to_string(&args.file)?; - let python_ast = parse_suite(&contents, &args.file.to_string_lossy())?; + let mode = if args.jupyter { + Mode::Jupyter + } else { + Mode::Module + }; + let python_ast = parse(&contents, mode, &args.file.to_string_lossy())?; println!("{python_ast:#?}"); Ok(()) } diff --git a/crates/ruff_dev/src/print_tokens.rs b/crates/ruff_dev/src/print_tokens.rs index 573337e3f7fc2..c3e17904bcb05 100644 --- a/crates/ruff_dev/src/print_tokens.rs +++ b/crates/ruff_dev/src/print_tokens.rs @@ -12,11 +12,19 @@ pub(crate) struct Args { /// Python file for which to generate the AST. #[arg(required = true)] file: PathBuf, + /// Run in Jupyter mode i.e., allow line magics (`%`, `!`, `?`, `/`, `,`, `;`). + #[arg(long)] + jupyter: bool, } pub(crate) fn main(args: &Args) -> Result<()> { let contents = fs::read_to_string(&args.file)?; - for (tok, range) in lexer::lex(&contents, Mode::Module).flatten() { + let mode = if args.jupyter { + Mode::Jupyter + } else { + Mode::Module + }; + for (tok, range) in lexer::lex(&contents, mode).flatten() { println!( "{start:#?} {tok:#?} {end:#?}", start = range.start(), diff --git a/crates/ruff_python_parser/src/lib.rs b/crates/ruff_python_parser/src/lib.rs index 0697099a43af2..0e0d607a12768 100644 --- a/crates/ruff_python_parser/src/lib.rs +++ b/crates/ruff_python_parser/src/lib.rs @@ -114,7 +114,7 @@ pub use parser::{ parse, parse_expression, parse_expression_starts_at, parse_program, parse_starts_at, parse_suite, parse_tokens, ParseError, ParseErrorType, }; -use ruff_python_ast::{CmpOp, Expr, Mod, Ranged, Suite}; +use ruff_python_ast::{CmpOp, Expr, Mod, PySourceType, Ranged, Suite}; use ruff_text_size::{TextRange, TextSize}; pub use string::FStringErrorType; pub use token::{StringKind, Tok, TokenKind}; @@ -130,9 +130,9 @@ mod token; pub mod typing; /// Collect tokens up to and including the first error. -pub fn tokenize(contents: &str) -> Vec { +pub fn tokenize(contents: &str, mode: Mode) -> Vec { let mut tokens: Vec = vec![]; - for tok in lexer::lex(contents, Mode::Module) { + for tok in lexer::lex(contents, mode) { let is_err = tok.is_err(); tokens.push(tok); if is_err { @@ -146,17 +146,32 @@ pub fn tokenize(contents: &str) -> Vec { pub fn parse_program_tokens( lxr: Vec, source_path: &str, + is_jupyter_notebook: bool, ) -> anyhow::Result { - match parse_tokens(lxr, Mode::Module, source_path)? { + let mode = if is_jupyter_notebook { + Mode::Jupyter + } else { + Mode::Module + }; + match parse_tokens(lxr, mode, source_path)? { Mod::Module(m) => Ok(m.body), Mod::Expression(_) => unreachable!("Mode::Module doesn't return other variant"), } } /// Return the `Range` of the first `Tok::Colon` token in a `Range`. -pub fn first_colon_range(range: TextRange, source: &str) -> Option { +pub fn first_colon_range( + range: TextRange, + source: &str, + is_jupyter_notebook: bool, +) -> Option { let contents = &source[range]; - let range = lexer::lex_starts_at(contents, Mode::Module, range.start()) + let mode = if is_jupyter_notebook { + Mode::Jupyter + } else { + Mode::Module + }; + let range = lexer::lex_starts_at(contents, mode, range.start()) .flatten() .find(|(tok, _)| tok.is_colon()) .map(|(_, range)| range); @@ -308,6 +323,19 @@ impl std::str::FromStr for Mode { } } +pub trait AsMode { + fn as_mode(&self) -> Mode; +} + +impl AsMode for PySourceType { + fn as_mode(&self) -> Mode { + match self { + PySourceType::Python | PySourceType::Stub => Mode::Module, + PySourceType::Jupyter => Mode::Jupyter, + } + } +} + /// Returned when a given mode is not valid. #[derive(Debug)] pub struct ModeParseError; @@ -357,6 +385,7 @@ mod tests { let range = first_colon_range( TextRange::new(TextSize::from(0), contents.text_len()), contents, + false, ) .unwrap(); assert_eq!(&contents[range], ":"); diff --git a/crates/ruff_shrinking/src/main.rs b/crates/ruff_shrinking/src/main.rs index 4c62cfa35534f..5efef3a728464 100644 --- a/crates/ruff_shrinking/src/main.rs +++ b/crates/ruff_shrinking/src/main.rs @@ -36,6 +36,7 @@ use regex::Regex; use ruff_python_ast::statement_visitor::{walk_body, walk_stmt, StatementVisitor}; use ruff_python_ast::visitor::{walk_expr, Visitor}; use ruff_python_ast::{Expr, Ranged, Stmt, Suite}; +use ruff_python_parser::Mode; use ruff_text_size::TextRange; use std::collections::HashMap; use std::path::{Path, PathBuf}; @@ -275,7 +276,7 @@ impl Strategy for StrategyRemoveToken { input: &'a str, _ast: &'a Suite, ) -> Result> { - let token_ranges: Vec<_> = ruff_python_parser::tokenize(input) + let token_ranges: Vec<_> = ruff_python_parser::tokenize(input, Mode::Module) .into_iter() // At this point we know we have valid python code .map(Result::unwrap) @@ -320,9 +321,9 @@ fn minimization_step( pattern: &Regex, last_strategy_and_idx: Option<(&'static dyn Strategy, usize)>, ) -> Result> { - let tokens = ruff_python_parser::tokenize(input); - let ast = - ruff_python_parser::parse_program_tokens(tokens, "input.py").context("not valid python")?; + let tokens = ruff_python_parser::tokenize(input, Mode::Module); + let ast = ruff_python_parser::parse_program_tokens(tokens, "input.py", false) + .context("not valid python")?; // Try the last succeeding strategy first, skipping all that failed last time if let Some((last_strategy, last_idx)) = last_strategy_and_idx { diff --git a/crates/ruff_wasm/src/lib.rs b/crates/ruff_wasm/src/lib.rs index 9180a8c3983f9..d8aa2c0e64193 100644 --- a/crates/ruff_wasm/src/lib.rs +++ b/crates/ruff_wasm/src/lib.rs @@ -25,6 +25,7 @@ use ruff_python_ast::PySourceType; use ruff_python_codegen::Stylist; use ruff_python_formatter::{format_module, format_node, PyFormatOptions}; use ruff_python_index::{CommentRangesBuilder, Indexer}; +use ruff_python_parser::AsMode; use ruff_source_file::{Locator, SourceLocation}; #[wasm_bindgen(typescript_custom_section)] @@ -197,8 +198,10 @@ impl Workspace { } pub fn check(&self, contents: &str) -> Result { + let source_type = PySourceType::default(); + // Tokenize once. - let tokens: Vec = ruff_python_parser::tokenize(contents); + let tokens: Vec = ruff_python_parser::tokenize(contents, source_type.as_mode()); // Map row and column locations to byte slices (lazily). let locator = Locator::new(contents); @@ -228,6 +231,7 @@ impl Workspace { &self.settings, flags::Noqa::Enabled, None, + source_type, ); let source_code = locator.to_source_code();