Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support unused variable removal in multi-assignment statements #2786

Merged
merged 1 commit into from
Feb 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions crates/ruff/resources/test/fixtures/pyflakes/F841_3.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,3 +81,25 @@ def f():
def f():
with Nested(m) as (x, y):
pass


def f():
toplevel = tt = lexer.get_token()
if not tt:
break


def f():
toplevel = tt = lexer.get_token()


def f():
toplevel = (a, b) = lexer.get_token()


def f():
(a, b) = toplevel = lexer.get_token()


def f():
toplevel = tt = 1
107 changes: 68 additions & 39 deletions crates/ruff/src/rules/pyflakes/rules/unused_variable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,18 +35,18 @@ impl AlwaysAutofixableViolation for UnusedVariable {

/// Return the start and end [`Location`] of the token after the next match of the predicate,
/// skipping over any bracketed expressions.
fn match_token_after<F>(stmt: &Stmt, locator: &Locator, f: F) -> Range
fn match_token_after<F, T>(located: &Located<T>, locator: &Locator, f: F) -> Range
where
F: Fn(Tok) -> bool,
{
let contents = locator.slice_source_code_range(&Range::from_located(stmt));
let contents = locator.slice_source_code_at(located.location);

// Track the bracket depth.
let mut par_count = 0;
let mut sqb_count = 0;
let mut brace_count = 0;

for ((_, tok, _), (start, _, end)) in lexer::make_tokenizer_located(contents, stmt.location)
for ((_, tok, _), (start, _, end)) in lexer::make_tokenizer_located(contents, located.location)
.flatten()
.tuple_windows()
{
Expand Down Expand Up @@ -168,43 +168,72 @@ fn remove_unused_variable(
) -> Option<(DeletionKind, Fix)> {
// First case: simple assignment (`x = 1`)
if let StmtKind::Assign { targets, value, .. } = &stmt.node {
if targets.len() == 1 && matches!(targets[0].node, ExprKind::Name { .. }) {
return if contains_effect(checker, value) {
// If the expression is complex (`x = foo()`), remove the assignment,
// but preserve the right-hand side.
Some((
DeletionKind::Partial,
Fix::deletion(
stmt.location,
match_token_after(stmt, checker.locator, |tok| tok == Tok::Equal).location,
),
))
} else {
// If (e.g.) assigning to a constant (`x = 1`), delete the entire statement.
let parent = checker
.child_to_parent
.get(&RefEquality(stmt))
.map(std::convert::Into::into);
let deleted: Vec<&Stmt> = checker
.deletions
.iter()
.map(std::convert::Into::into)
.collect();
match delete_stmt(
stmt,
parent,
&deleted,
checker.locator,
checker.indexer,
checker.stylist,
) {
Ok(fix) => Some((DeletionKind::Whole, fix)),
Err(err) => {
error!("Failed to delete unused variable: {}", err);
None
if let Some(target) = targets.iter().find(|target| {
range.location == target.location && range.end_location == target.end_location.unwrap()
}) {
if matches!(target.node, ExprKind::Name { .. }) {
return if targets.len() > 1 {
// Construct a deletion by concatenating everything before the target to
// everything after it. This ensures that our edit spans the entire statement,
// which in turn ensures that we only apply one edit per pass.
Some((
DeletionKind::Partial,
Fix::replacement(
format!(
"{}{}",
checker.locator.slice_source_code_range(&Range::new(
stmt.location,
target.location
)),
checker.locator.slice_source_code_range(&Range::new(
match_token_after(target, checker.locator, |tok| tok
== Tok::Equal)
.location,
stmt.end_location.unwrap()
))
),
stmt.location,
stmt.end_location.unwrap(),
),
))
} else if contains_effect(checker, value) {
// If the expression is complex (`x = foo()`), remove the assignment,
// but preserve the right-hand side.
Some((
DeletionKind::Partial,
Fix::deletion(
target.location,
match_token_after(target, checker.locator, |tok| tok == Tok::Equal)
.location,
),
))
} else {
// If (e.g.) assigning to a constant (`x = 1`), delete the entire statement.
let parent = checker
.child_to_parent
.get(&RefEquality(stmt))
.map(std::convert::Into::into);
let deleted: Vec<&Stmt> = checker
.deletions
.iter()
.map(std::convert::Into::into)
.collect();
match delete_stmt(
stmt,
parent,
&deleted,
checker.locator,
checker.indexer,
checker.stylist,
) {
Ok(fix) => Some((DeletionKind::Whole, fix)),
Err(err) => {
error!("Failed to delete unused variable: {}", err);
None
}
}
}
};
};
}
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
---
source: src/rules/pyflakes/mod.rs
source: crates/ruff/src/rules/pyflakes/mod.rs
expression: diagnostics
---
- kind:
Expand Down Expand Up @@ -90,7 +90,15 @@ expression: diagnostics
end_location:
row: 26
column: 16
fix: ~
fix:
content:
- "(x, y) = bar"
location:
row: 26
column: 4
end_location:
row: 26
column: 22
parent: ~
- kind:
UnusedVariable:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
---
source: src/rules/pyflakes/mod.rs
source: crates/ruff/src/rules/pyflakes/mod.rs
expression: diagnostics
---
- kind:
Expand Down Expand Up @@ -33,7 +33,15 @@ expression: diagnostics
end_location:
row: 16
column: 19
fix: ~
fix:
content:
- "(x, y) = 1, 2"
location:
row: 16
column: 4
end_location:
row: 16
column: 26
parent: ~
- kind:
UnusedVariable:
Expand All @@ -44,7 +52,15 @@ expression: diagnostics
end_location:
row: 20
column: 10
fix: ~
fix:
content:
- "(x, y) = 1, 2"
location:
row: 20
column: 4
end_location:
row: 20
column: 26
parent: ~
- kind:
UnusedVariable:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,15 @@ expression: diagnostics
end_location:
row: 33
column: 22
fix: ~
fix:
content:
- "(x2, y2) = (1, 2)"
location:
row: 33
column: 4
end_location:
row: 33
column: 31
parent: ~
- kind:
UnusedVariable:
Expand All @@ -196,7 +204,15 @@ expression: diagnostics
end_location:
row: 34
column: 11
fix: ~
fix:
content:
- "(x3, y3) = (1, 2)"
location:
row: 34
column: 4
end_location:
row: 34
column: 31
parent: ~
- kind:
UnusedVariable:
Expand Down Expand Up @@ -350,4 +366,137 @@ expression: diagnostics
row: 77
column: 27
parent: ~
- kind:
UnusedVariable:
name: toplevel
location:
row: 87
column: 4
end_location:
row: 87
column: 12
fix:
content:
- tt = lexer.get_token()
location:
row: 87
column: 4
end_location:
row: 87
column: 37
parent: ~
- kind:
UnusedVariable:
name: toplevel
location:
row: 93
column: 4
end_location:
row: 93
column: 12
fix:
content:
- tt = lexer.get_token()
location:
row: 93
column: 4
end_location:
row: 93
column: 37
parent: ~
- kind:
UnusedVariable:
name: tt
location:
row: 93
column: 15
end_location:
row: 93
column: 17
fix:
content:
- toplevel = lexer.get_token()
location:
row: 93
column: 4
end_location:
row: 93
column: 37
parent: ~
- kind:
UnusedVariable:
name: toplevel
location:
row: 97
column: 4
end_location:
row: 97
column: 12
fix:
content:
- "(a, b) = lexer.get_token()"
location:
row: 97
column: 4
end_location:
row: 97
column: 41
parent: ~
- kind:
UnusedVariable:
name: toplevel
location:
row: 101
column: 13
end_location:
row: 101
column: 21
fix:
content:
- "(a, b) = lexer.get_token()"
location:
row: 101
column: 4
end_location:
row: 101
column: 41
parent: ~
- kind:
UnusedVariable:
name: toplevel
location:
row: 105
column: 4
end_location:
row: 105
column: 12
fix:
content:
- tt = 1
location:
row: 105
column: 4
end_location:
row: 105
column: 21
parent: ~
- kind:
UnusedVariable:
name: tt
location:
row: 105
column: 15
end_location:
row: 105
column: 17
fix:
content:
- toplevel = 1
location:
row: 105
column: 4
end_location:
row: 105
column: 21
parent: ~

Loading