Skip to content

Commit

Permalink
Move blanket noqa and blanket type: ignore rules into token-based…
Browse files Browse the repository at this point in the history
… checker (#5996)

Closes #5981.
  • Loading branch information
charliermarsh committed Jul 23, 2023
1 parent 71f1643 commit 1776cbd
Show file tree
Hide file tree
Showing 6 changed files with 93 additions and 150 deletions.
11 changes: 0 additions & 11 deletions crates/ruff/src/checkers/physical_lines.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ use crate::rules::pycodestyle::rules::{
doc_line_too_long, line_too_long, mixed_spaces_and_tabs, no_newline_at_end_of_file,
tab_indentation, trailing_whitespace,
};
use crate::rules::pygrep_hooks::rules::{blanket_noqa, blanket_type_ignore};
use crate::rules::pylint;
use crate::rules::pyupgrade::rules::unnecessary_coding_comment;
use crate::settings::Settings;
Expand All @@ -33,13 +32,11 @@ pub(crate) fn check_physical_lines(
let mut diagnostics: Vec<Diagnostic> = vec![];
let mut has_any_shebang = false;

let enforce_blanket_noqa = settings.rules.enabled(Rule::BlanketNOQA);
let enforce_shebang_not_executable = settings.rules.enabled(Rule::ShebangNotExecutable);
let enforce_shebang_missing = settings.rules.enabled(Rule::ShebangMissingExecutableFile);
let enforce_shebang_whitespace = settings.rules.enabled(Rule::ShebangLeadingWhitespace);
let enforce_shebang_newline = settings.rules.enabled(Rule::ShebangNotFirstLine);
let enforce_shebang_python = settings.rules.enabled(Rule::ShebangMissingPython);
let enforce_blanket_type_ignore = settings.rules.enabled(Rule::BlanketTypeIgnore);
let enforce_doc_line_too_long = settings.rules.enabled(Rule::DocLineTooLong);
let enforce_line_too_long = settings.rules.enabled(Rule::LineTooLong);
let enforce_no_newline_at_end_of_file = settings.rules.enabled(Rule::MissingNewlineAtEndOfFile);
Expand Down Expand Up @@ -73,14 +70,6 @@ pub(crate) fn check_physical_lines(
}
}

if enforce_blanket_type_ignore {
blanket_type_ignore(&mut diagnostics, &line);
}

if enforce_blanket_noqa {
blanket_noqa(&mut diagnostics, &line);
}

if enforce_shebang_missing
|| enforce_shebang_not_executable
|| enforce_shebang_whitespace
Expand Down
136 changes: 56 additions & 80 deletions crates/ruff/src/checkers/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use crate::registry::{AsRule, Rule};
use crate::rules::ruff::rules::Context;
use crate::rules::{
eradicate, flake8_commas, flake8_fixme, flake8_implicit_str_concat, flake8_pyi, flake8_quotes,
flake8_todos, pycodestyle, pylint, pyupgrade, ruff,
flake8_todos, pycodestyle, pygrep_hooks, pylint, pyupgrade, ruff,
};
use crate::settings::Settings;

Expand All @@ -25,69 +25,22 @@ pub(crate) fn check_tokens(
) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];

let enforce_ambiguous_unicode_character = settings.rules.any_enabled(&[
if settings.rules.enabled(Rule::BlanketNOQA) {
pygrep_hooks::rules::blanket_noqa(&mut diagnostics, indexer, locator);
}

if settings.rules.enabled(Rule::BlanketTypeIgnore) {
pygrep_hooks::rules::blanket_type_ignore(&mut diagnostics, indexer, locator);
}

if settings.rules.any_enabled(&[
Rule::AmbiguousUnicodeCharacterString,
Rule::AmbiguousUnicodeCharacterDocstring,
Rule::AmbiguousUnicodeCharacterComment,
]);
let enforce_invalid_string_character = settings.rules.any_enabled(&[
Rule::InvalidCharacterBackspace,
Rule::InvalidCharacterSub,
Rule::InvalidCharacterEsc,
Rule::InvalidCharacterNul,
Rule::InvalidCharacterZeroWidthSpace,
]);
let enforce_quotes = settings.rules.any_enabled(&[
Rule::BadQuotesInlineString,
Rule::BadQuotesMultilineString,
Rule::BadQuotesDocstring,
Rule::AvoidableEscapedQuote,
]);
let enforce_commented_out_code = settings.rules.enabled(Rule::CommentedOutCode);
let enforce_compound_statements = settings.rules.any_enabled(&[
Rule::MultipleStatementsOnOneLineColon,
Rule::MultipleStatementsOnOneLineSemicolon,
Rule::UselessSemicolon,
]);
let enforce_invalid_escape_sequence = settings.rules.enabled(Rule::InvalidEscapeSequence);
let enforce_implicit_string_concatenation = settings.rules.any_enabled(&[
Rule::SingleLineImplicitStringConcatenation,
Rule::MultiLineImplicitStringConcatenation,
]);

let enforce_trailing_comma = settings.rules.any_enabled(&[
Rule::MissingTrailingComma,
Rule::TrailingCommaOnBareTuple,
Rule::ProhibitedTrailingComma,
]);
let enforce_extraneous_parenthesis = settings.rules.enabled(Rule::ExtraneousParentheses);
let enforce_type_comment_in_stub = settings.rules.enabled(Rule::TypeCommentInStub);

// Combine flake8_todos and flake8_fixme so that we can reuse detected [`TodoDirective`]s.
let enforce_todos = settings.rules.any_enabled(&[
Rule::InvalidTodoTag,
Rule::MissingTodoAuthor,
Rule::MissingTodoLink,
Rule::MissingTodoColon,
Rule::MissingTodoDescription,
Rule::InvalidTodoCapitalization,
Rule::MissingSpaceAfterTodoColon,
Rule::LineContainsFixme,
Rule::LineContainsXxx,
Rule::LineContainsTodo,
Rule::LineContainsHack,
]);

// RUF001, RUF002, RUF003
if enforce_ambiguous_unicode_character {
]) {
let mut state_machine = StateMachine::default();
for &(ref tok, range) in tokens.iter().flatten() {
let is_docstring = if enforce_ambiguous_unicode_character {
state_machine.consume(tok)
} else {
false
};

let is_docstring = state_machine.consume(tok);
if matches!(tok, Tok::String { .. } | Tok::Comment(_)) {
ruff::rules::ambiguous_unicode_character(
&mut diagnostics,
Expand All @@ -108,13 +61,11 @@ pub(crate) fn check_tokens(
}
}

// ERA001
if enforce_commented_out_code {
if settings.rules.enabled(Rule::CommentedOutCode) {
eradicate::rules::commented_out_code(&mut diagnostics, locator, indexer, settings);
}

// W605
if enforce_invalid_escape_sequence {
if settings.rules.enabled(Rule::InvalidEscapeSequence) {
for (tok, range) in tokens.iter().flatten() {
if tok.is_string() {
pycodestyle::rules::invalid_escape_sequence(
Expand All @@ -127,17 +78,25 @@ pub(crate) fn check_tokens(
}
}

// PLE2510, PLE2512, PLE2513
if enforce_invalid_string_character {
if settings.rules.any_enabled(&[
Rule::InvalidCharacterBackspace,
Rule::InvalidCharacterSub,
Rule::InvalidCharacterEsc,
Rule::InvalidCharacterNul,
Rule::InvalidCharacterZeroWidthSpace,
]) {
for (tok, range) in tokens.iter().flatten() {
if tok.is_string() {
pylint::rules::invalid_string_characters(&mut diagnostics, *range, locator);
}
}
}

// E701, E702, E703
if enforce_compound_statements {
if settings.rules.any_enabled(&[
Rule::MultipleStatementsOnOneLineColon,
Rule::MultipleStatementsOnOneLineSemicolon,
Rule::UselessSemicolon,
]) {
pycodestyle::rules::compound_statements(
&mut diagnostics,
tokens,
Expand All @@ -147,13 +106,19 @@ pub(crate) fn check_tokens(
);
}

// Q001, Q002, Q003
if enforce_quotes {
if settings.rules.any_enabled(&[
Rule::BadQuotesInlineString,
Rule::BadQuotesMultilineString,
Rule::BadQuotesDocstring,
Rule::AvoidableEscapedQuote,
]) {
flake8_quotes::rules::from_tokens(&mut diagnostics, tokens, locator, settings);
}

// ISC001, ISC002
if enforce_implicit_string_concatenation {
if settings.rules.any_enabled(&[
Rule::SingleLineImplicitStringConcatenation,
Rule::MultiLineImplicitStringConcatenation,
]) {
flake8_implicit_str_concat::rules::implicit(
&mut diagnostics,
tokens,
Expand All @@ -162,24 +127,35 @@ pub(crate) fn check_tokens(
);
}

// COM812, COM818, COM819
if enforce_trailing_comma {
if settings.rules.any_enabled(&[
Rule::MissingTrailingComma,
Rule::TrailingCommaOnBareTuple,
Rule::ProhibitedTrailingComma,
]) {
flake8_commas::rules::trailing_commas(&mut diagnostics, tokens, locator, settings);
}

// UP034
if enforce_extraneous_parenthesis {
if settings.rules.enabled(Rule::ExtraneousParentheses) {
pyupgrade::rules::extraneous_parentheses(&mut diagnostics, tokens, locator, settings);
}

// PYI033
if enforce_type_comment_in_stub && is_stub {
if is_stub && settings.rules.enabled(Rule::TypeCommentInStub) {
flake8_pyi::rules::type_comment_in_stub(&mut diagnostics, locator, indexer);
}

// TD001, TD002, TD003, TD004, TD005, TD006, TD007
// FIX001, FIX002, FIX003, FIX004
if enforce_todos {
if settings.rules.any_enabled(&[
Rule::InvalidTodoTag,
Rule::MissingTodoAuthor,
Rule::MissingTodoLink,
Rule::MissingTodoColon,
Rule::MissingTodoDescription,
Rule::InvalidTodoCapitalization,
Rule::MissingSpaceAfterTodoColon,
Rule::LineContainsFixme,
Rule::LineContainsXxx,
Rule::LineContainsTodo,
Rule::LineContainsHack,
]) {
let todo_comments: Vec<TodoComment> = indexer
.comment_ranges()
.iter()
Expand Down
7 changes: 4 additions & 3 deletions crates/ruff/src/registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -238,9 +238,8 @@ impl Rule {
match self {
Rule::InvalidPyprojectToml => LintSource::PyprojectToml,
Rule::UnusedNOQA => LintSource::Noqa,
Rule::BlanketNOQA
| Rule::BlanketTypeIgnore
| Rule::DocLineTooLong

Rule::DocLineTooLong
| Rule::LineTooLong
| Rule::MixedSpacesAndTabs
| Rule::MissingNewlineAtEndOfFile
Expand All @@ -262,6 +261,8 @@ impl Rule {
| Rule::BadQuotesDocstring
| Rule::BadQuotesInlineString
| Rule::BadQuotesMultilineString
| Rule::BlanketNOQA
| Rule::BlanketTypeIgnore
| Rule::CommentedOutCode
| Rule::MultiLineImplicitStringConcatenation
| Rule::InvalidCharacterBackspace
Expand Down
30 changes: 14 additions & 16 deletions crates/ruff/src/rules/pygrep_hooks/rules/blanket_noqa.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
use once_cell::sync::Lazy;
use regex::Regex;
use ruff_text_size::{TextLen, TextRange, TextSize};
use rustpython_parser::ast::Ranged;

use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_trivia::Line;
use ruff_python_ast::source_code::{Indexer, Locator};

use crate::noqa::Directive;

/// ## What it does
/// Check for `noqa` annotations that suppress all diagnostics, as opposed to
Expand Down Expand Up @@ -39,18 +39,16 @@ impl Violation for BlanketNOQA {
}
}

static BLANKET_NOQA_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?i)# noqa($|\s|:[^ ])").unwrap());

/// PGH004
pub(crate) fn blanket_noqa(diagnostics: &mut Vec<Diagnostic>, line: &Line) {
if let Some(match_) = BLANKET_NOQA_REGEX.find(line.as_str()) {
diagnostics.push(Diagnostic::new(
BlanketNOQA,
TextRange::at(
line.start() + TextSize::try_from(match_.start()).unwrap(),
match_.as_str().text_len(),
),
));
pub(crate) fn blanket_noqa(
diagnostics: &mut Vec<Diagnostic>,
indexer: &Indexer,
locator: &Locator,
) {
for range in indexer.comment_ranges() {
let line = locator.slice(*range);
if let Ok(Some(Directive::All(all))) = Directive::try_extract(line, range.start()) {
diagnostics.push(Diagnostic::new(BlanketNOQA, all.range()));
}
}
}
31 changes: 19 additions & 12 deletions crates/ruff/src/rules/pygrep_hooks/rules/blanket_type_ignore.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use ruff_text_size::{TextLen, TextRange, TextSize};

use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_trivia::Line;
use ruff_python_ast::source_code::{Indexer, Locator};

/// ## What it does
/// Check for `type: ignore` annotations that suppress all type warnings, as
Expand Down Expand Up @@ -41,17 +41,24 @@ impl Violation for BlanketTypeIgnore {
}

/// PGH003
pub(crate) fn blanket_type_ignore(diagnostics: &mut Vec<Diagnostic>, line: &Line) {
for match_ in TYPE_IGNORE_PATTERN.find_iter(line) {
if let Ok(codes) = parse_type_ignore_tag(line[match_.end()..].trim()) {
if codes.is_empty() {
diagnostics.push(Diagnostic::new(
BlanketTypeIgnore,
TextRange::at(
line.start() + TextSize::try_from(match_.start()).unwrap(),
match_.as_str().text_len(),
),
));
pub(crate) fn blanket_type_ignore(
diagnostics: &mut Vec<Diagnostic>,
indexer: &Indexer,
locator: &Locator,
) {
for range in indexer.comment_ranges() {
let line = locator.slice(*range);
for match_ in TYPE_IGNORE_PATTERN.find_iter(line) {
if let Ok(codes) = parse_type_ignore_tag(line[match_.end()..].trim()) {
if codes.is_empty() {
diagnostics.push(Diagnostic::new(
BlanketTypeIgnore,
TextRange::at(
range.start() + TextSize::try_from(match_.start()).unwrap(),
match_.as_str().text_len(),
),
));
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,6 @@ PGH004_0.py:1:8: PGH004 Use specific rule codes when using `noqa`
3 | # noqa
|

PGH004_0.py:2:8: PGH004 Use specific rule codes when using `noqa`
|
1 | x = 1 # noqa
2 | x = 1 # NOQA:F401,W203
| ^^^^^^^^ PGH004
3 | # noqa
4 | # NOQA
|

PGH004_0.py:3:1: PGH004 Use specific rule codes when using `noqa`
|
1 | x = 1 # noqa
Expand All @@ -38,23 +29,4 @@ PGH004_0.py:4:1: PGH004 Use specific rule codes when using `noqa`
6 | # noqa:F401,W203
|

PGH004_0.py:5:1: PGH004 Use specific rule codes when using `noqa`
|
3 | # noqa
4 | # NOQA
5 | # noqa:F401
| ^^^^^^^^ PGH004
6 | # noqa:F401,W203
|

PGH004_0.py:6:1: PGH004 Use specific rule codes when using `noqa`
|
4 | # NOQA
5 | # noqa:F401
6 | # noqa:F401,W203
| ^^^^^^^^ PGH004
7 |
8 | x = 1
|


0 comments on commit 1776cbd

Please sign in to comment.