diff --git a/crates/ruff_linter/src/checkers/ast/analyze/string_like.rs b/crates/ruff_linter/src/checkers/ast/analyze/string_like.rs index 5af18e67adf54d..52f481a91a0a84 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/string_like.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/string_like.rs @@ -2,7 +2,7 @@ use ruff_python_ast::StringLike; use crate::checkers::ast::Checker; use crate::codes::Rule; -use crate::rules::{flake8_bandit, flake8_pyi, ruff}; +use crate::rules::{flake8_bandit, flake8_pyi, flake8_quotes, ruff}; /// Run lint rules over a [`StringLike`] syntax nodes. pub(crate) fn string_like(string_like: StringLike, checker: &mut Checker) { @@ -23,4 +23,11 @@ pub(crate) fn string_like(string_like: StringLike, checker: &mut Checker) { flake8_pyi::rules::string_or_bytes_too_long(checker, string_like); } } + if checker.any_enabled(&[ + Rule::BadQuotesInlineString, + Rule::BadQuotesMultilineString, + Rule::BadQuotesDocstring, + ]) { + flake8_quotes::rules::check_string_quotes(checker, string_like); + } } diff --git a/crates/ruff_linter/src/checkers/tokens.rs b/crates/ruff_linter/src/checkers/tokens.rs index 762f4cc463cc10..f4e7ee87bce3dc 100644 --- a/crates/ruff_linter/src/checkers/tokens.rs +++ b/crates/ruff_linter/src/checkers/tokens.rs @@ -130,14 +130,6 @@ pub(crate) fn check_tokens( flake8_quotes::rules::unnecessary_escaped_quote(&mut diagnostics, tokens, locator); } - if settings.rules.any_enabled(&[ - Rule::BadQuotesInlineString, - Rule::BadQuotesMultilineString, - Rule::BadQuotesDocstring, - ]) { - flake8_quotes::rules::check_string_quotes(&mut diagnostics, tokens, locator, settings); - } - if settings.rules.any_enabled(&[ Rule::SingleLineImplicitStringConcatenation, Rule::MultiLineImplicitStringConcatenation, diff --git a/crates/ruff_linter/src/registry.rs b/crates/ruff_linter/src/registry.rs index e85f14d7116af7..0ae6cd42be21cb 100644 --- a/crates/ruff_linter/src/registry.rs +++ b/crates/ruff_linter/src/registry.rs @@ -257,9 +257,6 @@ impl Rule { | Rule::TrailingWhitespace => LintSource::PhysicalLines, Rule::AmbiguousUnicodeCharacterComment | Rule::AvoidableEscapedQuote - | Rule::BadQuotesDocstring - | Rule::BadQuotesInlineString - | Rule::BadQuotesMultilineString | Rule::BlanketNOQA | Rule::BlanketTypeIgnore | Rule::BlankLineAfterDecorator diff --git a/crates/ruff_linter/src/rules/flake8_quotes/rules/check_string_quotes.rs b/crates/ruff_linter/src/rules/flake8_quotes/rules/check_string_quotes.rs index fdad2d1cc12a44..e14ec91fac8f49 100644 --- a/crates/ruff_linter/src/rules/flake8_quotes/rules/check_string_quotes.rs +++ b/crates/ruff_linter/src/rules/flake8_quotes/rules/check_string_quotes.rs @@ -1,14 +1,9 @@ -use ruff_python_parser::lexer::LexResult; -use ruff_python_parser::Tok; -use ruff_text_size::{TextRange, TextSize}; - use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; -use ruff_source_file::Locator; - -use crate::lex::docstring_detection::StateMachine; +use ruff_python_ast::StringLike; +use ruff_text_size::{Ranged, TextRange}; -use crate::settings::LinterSettings; +use crate::checkers::ast::Checker; use super::super::settings::Quote; @@ -232,17 +227,17 @@ impl<'a> From<&'a str> for Trivia<'a> { } /// Q002 -fn docstring(locator: &Locator, range: TextRange, settings: &LinterSettings) -> Option { - let quotes_settings = &settings.flake8_quotes; +fn docstring(checker: &mut Checker, range: TextRange) { + let quotes_settings = &checker.settings.flake8_quotes; - let text = locator.slice(range); + let text = checker.locator().slice(range); let trivia: Trivia = text.into(); if trivia .raw_text .contains(good_docstring(quotes_settings.docstring_quotes)) { - return None; + return; } let mut diagnostic = Diagnostic::new( @@ -264,23 +259,17 @@ fn docstring(locator: &Locator, range: TextRange, settings: &LinterSettings) -> fixed_contents, range, ))); - Some(diagnostic) + checker.diagnostics.push(diagnostic); } /// Q000, Q001 -fn strings( - locator: &Locator, - sequence: &[TextRange], - settings: &LinterSettings, -) -> Vec { - let mut diagnostics = vec![]; - - let quotes_settings = &settings.flake8_quotes; +fn strings(checker: &mut Checker, sequence: &[TextRange]) { + let quotes_settings = &checker.settings.flake8_quotes; let trivia = sequence .iter() .map(|range| { - let text = locator.slice(*range); + let text = checker.locator().slice(*range); let trivia: Trivia = text.into(); trivia }) @@ -339,7 +328,7 @@ fn strings( fixed_contents, *range, ))); - diagnostics.push(diagnostic); + checker.diagnostics.push(diagnostic); } else if trivia.last_quote_char != quotes_settings.inline_quotes.as_char() // If we're not using the preferred type, only allow use to avoid escapes. && !relax_quote @@ -362,120 +351,24 @@ fn strings( fixed_contents, *range, ))); - diagnostics.push(diagnostic); - } - } - - diagnostics -} - -/// A builder for the f-string range. -/// -/// For now, this is limited to the outermost f-string and doesn't support -/// nested f-strings. -#[derive(Debug, Default)] -struct FStringRangeBuilder { - start_location: TextSize, - end_location: TextSize, - nesting: u32, -} - -impl FStringRangeBuilder { - fn visit_token(&mut self, token: &Tok, range: TextRange) { - match token { - Tok::FStringStart(_) => { - if self.nesting == 0 { - self.start_location = range.start(); - } - self.nesting += 1; - } - Tok::FStringEnd => { - self.nesting = self.nesting.saturating_sub(1); - if self.nesting == 0 { - self.end_location = range.end(); - } - } - _ => {} + checker.diagnostics.push(diagnostic); } } - - /// Returns `true` if the lexer is currently inside of a f-string. - /// - /// It'll return `false` once the `FStringEnd` token for the outermost - /// f-string is visited. - const fn in_fstring(&self) -> bool { - self.nesting > 0 - } - - /// Returns the complete range of the previously visited f-string. - /// - /// This method should only be called once the lexer is outside of any - /// f-string otherwise it might return an invalid range. - /// - /// It doesn't consume the builder because there can be multiple f-strings - /// throughout the source code. - fn finish(&self) -> TextRange { - debug_assert!(!self.in_fstring()); - TextRange::new(self.start_location, self.end_location) - } } /// Generate `flake8-quote` diagnostics from a token stream. -pub(crate) fn check_string_quotes( - diagnostics: &mut Vec, - lxr: &[LexResult], - locator: &Locator, - settings: &LinterSettings, -) { - // Keep track of sequences of strings, which represent implicit string - // concatenation, and should thus be handled as a single unit. - let mut sequence = vec![]; - let mut state_machine = StateMachine::default(); - let mut fstring_range_builder = FStringRangeBuilder::default(); - for &(ref tok, range) in lxr.iter().flatten() { - fstring_range_builder.visit_token(tok, range); - if fstring_range_builder.in_fstring() { - continue; +pub(crate) fn check_string_quotes(checker: &mut Checker, string_like: StringLike) { + let ranges: Vec = match string_like { + StringLike::String(node) => node.value.iter().map(Ranged::range).collect(), + StringLike::Bytes(node) => node.value.iter().map(Ranged::range).collect(), + StringLike::FString(node) => node.value.iter().map(Ranged::range).collect(), + }; + + if checker.semantic().in_docstring() { + for range in ranges { + docstring(checker, range); } - - let is_docstring = state_machine.consume(tok); - - // If this is a docstring, consume the existing sequence, then consume the - // docstring, then move on. - if is_docstring { - if !sequence.is_empty() { - diagnostics.extend(strings(locator, &sequence, settings)); - sequence.clear(); - } - if let Some(diagnostic) = docstring(locator, range, settings) { - diagnostics.push(diagnostic); - } - } else { - match tok { - Tok::String { .. } => { - // If this is a string, add it to the sequence. - sequence.push(range); - } - Tok::FStringEnd => { - // If this is the end of an f-string, add the entire f-string - // range to the sequence. - sequence.push(fstring_range_builder.finish()); - } - Tok::Comment(..) | Tok::NonLogicalNewline => continue, - _ => { - // Otherwise, consume the sequence. - if !sequence.is_empty() { - diagnostics.extend(strings(locator, &sequence, settings)); - sequence.clear(); - } - } - } - } - } - - // If we have an unterminated sequence, consume it. - if !sequence.is_empty() { - diagnostics.extend(strings(locator, &sequence, settings)); - sequence.clear(); + } else { + strings(checker, &ranges); } }