Skip to content

Commit

Permalink
Move Q001-3 to AST based checker
Browse files Browse the repository at this point in the history
  • Loading branch information
dhruvmanila committed Mar 9, 2024
1 parent ab4b600 commit 1e24c01
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 144 deletions.
9 changes: 8 additions & 1 deletion crates/ruff_linter/src/checkers/ast/analyze/string_like.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use ruff_python_ast::StringLike;

use crate::checkers::ast::Checker;
use crate::codes::Rule;
use crate::rules::{flake8_bandit, flake8_pyi, ruff};
use crate::rules::{flake8_bandit, flake8_pyi, flake8_quotes, ruff};

/// Run lint rules over a [`StringLike`] syntax nodes.
pub(crate) fn string_like(string_like: StringLike, checker: &mut Checker) {
Expand All @@ -23,4 +23,11 @@ pub(crate) fn string_like(string_like: StringLike, checker: &mut Checker) {
flake8_pyi::rules::string_or_bytes_too_long(checker, string_like);
}
}
if checker.any_enabled(&[
Rule::BadQuotesInlineString,
Rule::BadQuotesMultilineString,
Rule::BadQuotesDocstring,
]) {
flake8_quotes::rules::check_string_quotes(checker, string_like);
}
}
8 changes: 0 additions & 8 deletions crates/ruff_linter/src/checkers/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,14 +130,6 @@ pub(crate) fn check_tokens(
flake8_quotes::rules::unnecessary_escaped_quote(&mut diagnostics, tokens, locator);
}

if settings.rules.any_enabled(&[
Rule::BadQuotesInlineString,
Rule::BadQuotesMultilineString,
Rule::BadQuotesDocstring,
]) {
flake8_quotes::rules::check_string_quotes(&mut diagnostics, tokens, locator, settings);
}

if settings.rules.any_enabled(&[
Rule::SingleLineImplicitStringConcatenation,
Rule::MultiLineImplicitStringConcatenation,
Expand Down
3 changes: 0 additions & 3 deletions crates/ruff_linter/src/registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -257,9 +257,6 @@ impl Rule {
| Rule::TrailingWhitespace => LintSource::PhysicalLines,
Rule::AmbiguousUnicodeCharacterComment
| Rule::AvoidableEscapedQuote
| Rule::BadQuotesDocstring
| Rule::BadQuotesInlineString
| Rule::BadQuotesMultilineString
| Rule::BlanketNOQA
| Rule::BlanketTypeIgnore
| Rule::BlankLineAfterDecorator
Expand Down
157 changes: 25 additions & 132 deletions crates/ruff_linter/src/rules/flake8_quotes/rules/check_string_quotes.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,9 @@
use ruff_python_parser::lexer::LexResult;
use ruff_python_parser::Tok;
use ruff_text_size::{TextRange, TextSize};

use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
use ruff_source_file::Locator;

use crate::lex::docstring_detection::StateMachine;
use ruff_python_ast::StringLike;
use ruff_text_size::{Ranged, TextRange};

use crate::settings::LinterSettings;
use crate::checkers::ast::Checker;

use super::super::settings::Quote;

Expand Down Expand Up @@ -232,17 +227,17 @@ impl<'a> From<&'a str> for Trivia<'a> {
}

/// Q002
fn docstring(locator: &Locator, range: TextRange, settings: &LinterSettings) -> Option<Diagnostic> {
let quotes_settings = &settings.flake8_quotes;
fn docstring(checker: &mut Checker, range: TextRange) {
let quotes_settings = &checker.settings.flake8_quotes;

let text = locator.slice(range);
let text = checker.locator().slice(range);
let trivia: Trivia = text.into();

if trivia
.raw_text
.contains(good_docstring(quotes_settings.docstring_quotes))
{
return None;
return;
}

let mut diagnostic = Diagnostic::new(
Expand All @@ -264,23 +259,17 @@ fn docstring(locator: &Locator, range: TextRange, settings: &LinterSettings) ->
fixed_contents,
range,
)));
Some(diagnostic)
checker.diagnostics.push(diagnostic);
}

/// Q000, Q001
fn strings(
locator: &Locator,
sequence: &[TextRange],
settings: &LinterSettings,
) -> Vec<Diagnostic> {
let mut diagnostics = vec![];

let quotes_settings = &settings.flake8_quotes;
fn strings(checker: &mut Checker, sequence: &[TextRange]) {
let quotes_settings = &checker.settings.flake8_quotes;

let trivia = sequence
.iter()
.map(|range| {
let text = locator.slice(*range);
let text = checker.locator().slice(*range);
let trivia: Trivia = text.into();
trivia
})
Expand Down Expand Up @@ -339,7 +328,7 @@ fn strings(
fixed_contents,
*range,
)));
diagnostics.push(diagnostic);
checker.diagnostics.push(diagnostic);
} else if trivia.last_quote_char != quotes_settings.inline_quotes.as_char()
// If we're not using the preferred type, only allow use to avoid escapes.
&& !relax_quote
Expand All @@ -362,120 +351,24 @@ fn strings(
fixed_contents,
*range,
)));
diagnostics.push(diagnostic);
}
}

diagnostics
}

/// A builder for the f-string range.
///
/// For now, this is limited to the outermost f-string and doesn't support
/// nested f-strings.
#[derive(Debug, Default)]
struct FStringRangeBuilder {
start_location: TextSize,
end_location: TextSize,
nesting: u32,
}

impl FStringRangeBuilder {
fn visit_token(&mut self, token: &Tok, range: TextRange) {
match token {
Tok::FStringStart(_) => {
if self.nesting == 0 {
self.start_location = range.start();
}
self.nesting += 1;
}
Tok::FStringEnd => {
self.nesting = self.nesting.saturating_sub(1);
if self.nesting == 0 {
self.end_location = range.end();
}
}
_ => {}
checker.diagnostics.push(diagnostic);
}
}

/// Returns `true` if the lexer is currently inside of a f-string.
///
/// It'll return `false` once the `FStringEnd` token for the outermost
/// f-string is visited.
const fn in_fstring(&self) -> bool {
self.nesting > 0
}

/// Returns the complete range of the previously visited f-string.
///
/// This method should only be called once the lexer is outside of any
/// f-string otherwise it might return an invalid range.
///
/// It doesn't consume the builder because there can be multiple f-strings
/// throughout the source code.
fn finish(&self) -> TextRange {
debug_assert!(!self.in_fstring());
TextRange::new(self.start_location, self.end_location)
}
}

/// Generate `flake8-quote` diagnostics from a token stream.
pub(crate) fn check_string_quotes(
diagnostics: &mut Vec<Diagnostic>,
lxr: &[LexResult],
locator: &Locator,
settings: &LinterSettings,
) {
// Keep track of sequences of strings, which represent implicit string
// concatenation, and should thus be handled as a single unit.
let mut sequence = vec![];
let mut state_machine = StateMachine::default();
let mut fstring_range_builder = FStringRangeBuilder::default();
for &(ref tok, range) in lxr.iter().flatten() {
fstring_range_builder.visit_token(tok, range);
if fstring_range_builder.in_fstring() {
continue;
pub(crate) fn check_string_quotes(checker: &mut Checker, string_like: StringLike) {
let ranges: Vec<TextRange> = match string_like {
StringLike::String(node) => node.value.iter().map(Ranged::range).collect(),
StringLike::Bytes(node) => node.value.iter().map(Ranged::range).collect(),
StringLike::FString(node) => node.value.iter().map(Ranged::range).collect(),
};

if checker.semantic().in_docstring() {
for range in ranges {
docstring(checker, range);
}

let is_docstring = state_machine.consume(tok);

// If this is a docstring, consume the existing sequence, then consume the
// docstring, then move on.
if is_docstring {
if !sequence.is_empty() {
diagnostics.extend(strings(locator, &sequence, settings));
sequence.clear();
}
if let Some(diagnostic) = docstring(locator, range, settings) {
diagnostics.push(diagnostic);
}
} else {
match tok {
Tok::String { .. } => {
// If this is a string, add it to the sequence.
sequence.push(range);
}
Tok::FStringEnd => {
// If this is the end of an f-string, add the entire f-string
// range to the sequence.
sequence.push(fstring_range_builder.finish());
}
Tok::Comment(..) | Tok::NonLogicalNewline => continue,
_ => {
// Otherwise, consume the sequence.
if !sequence.is_empty() {
diagnostics.extend(strings(locator, &sequence, settings));
sequence.clear();
}
}
}
}
}

// If we have an unterminated sequence, consume it.
if !sequence.is_empty() {
diagnostics.extend(strings(locator, &sequence, settings));
sequence.clear();
} else {
strings(checker, &ranges);
}
}

0 comments on commit 1e24c01

Please sign in to comment.