Update Rust toolchain to 1.95 and MSRV to 1.93 (#24677)

This commit is contained in:
Alex Waygood
2026-04-17 11:39:14 +01:00
committed by GitHub
parent 08629b4954
commit 581b3fa230
77 changed files with 851 additions and 1025 deletions
+2 -1
View File
@@ -5,7 +5,7 @@ resolver = "2"
[workspace.package]
# Please update rustfmt.toml when bumping the Rust edition
edition = "2024"
rust-version = "1.92"
rust-version = "1.93"
homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
@@ -249,6 +249,7 @@ must_use_candidate = "allow"
similar_names = "allow"
single_match_else = "allow"
too_many_lines = "allow"
collapsible_match = "allow" # Not always an improvement in readability, quite opinionated
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
+1 -1
View File
@@ -200,7 +200,7 @@ impl Cache {
#[expect(clippy::cast_possible_truncation)]
pub(crate) fn save(&mut self) -> bool {
/// Maximum duration for which we keep a file in cache that hasn't been seen.
const MAX_LAST_SEEN: Duration = Duration::from_secs(30 * 24 * 60 * 60); // 30 days.
const MAX_LAST_SEEN: Duration = Duration::from_hours(720); // 30 days.
let changes = std::mem::take(self.changes.get_mut().unwrap());
if changes.is_empty() {
+1 -1
View File
@@ -740,7 +740,7 @@ fn benchmark_large_union_narrowing(criterion: &mut Criterion) {
code.push_str("def process(decl: AllDecls) -> None:\n match decl:\n");
for i in 0..NUM_MATCH_BRANCHES {
writeln!(&mut code, " case C{i}():\n pass",).ok();
writeln!(&mut code, " case C{i}():\n pass").ok();
}
code.push_str(" case _:\n pass\n\n");
+1 -1
View File
@@ -31,7 +31,7 @@ fn generate_set(output: &mut String, set: Set, parents: &mut Vec<Set>) {
.filter_map(|set| set.name())
.chain(std::iter::once(name.as_str()))
.join(".");
writeln!(output, "#### `{title}`\n",).unwrap();
writeln!(output, "#### `{title}`\n").unwrap();
}
}
+5 -5
View File
@@ -44,18 +44,18 @@ pub(crate) fn main(args: &Args) -> anyhow::Result<()> {
Mode::Check => {
let current = std::fs::read_to_string(&markdown_path)?;
if output == current {
println!("Up-to-date: {file_name}",);
println!("Up-to-date: {file_name}");
} else {
let comparison = StrComparison::new(&current, &output);
bail!("{file_name} changed, please run `{REGENERATE_ALL_COMMAND}`:\n{comparison}",);
bail!("{file_name} changed, please run `{REGENERATE_ALL_COMMAND}`:\n{comparison}");
}
}
Mode::Write => {
let current = std::fs::read_to_string(&markdown_path)?;
if current == output {
println!("Up-to-date: {file_name}",);
println!("Up-to-date: {file_name}");
} else {
println!("Updating: {file_name}",);
println!("Updating: {file_name}");
std::fs::write(markdown_path, output.as_bytes())?;
}
}
@@ -75,7 +75,7 @@ fn generate_set(output: &mut String, set: Set, parents: &mut Vec<Set>) {
.filter_map(|set| set.name())
.chain(std::iter::once(name.as_str()))
.join(".");
writeln!(output, "## `{title}`\n",).unwrap();
writeln!(output, "## `{title}`\n").unwrap();
}
}
+8 -15
View File
@@ -912,11 +912,11 @@ impl SemanticSyntaxContext for Checker<'_> {
for parent in self.semantic.current_statements().skip(1) {
match parent {
Stmt::For(ast::StmtFor { orelse, .. })
| Stmt::While(ast::StmtWhile { orelse, .. }) => {
if !orelse.contains(child) {
| Stmt::While(ast::StmtWhile { orelse, .. })
if !orelse.contains(child) =>
{
return true;
}
}
Stmt::FunctionDef(_) | Stmt::ClassDef(_) => {
break;
}
@@ -1164,8 +1164,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
names,
range: _,
node_index: _,
}) => {
if !self.semantic.scope_id.is_global() {
}) if !self.semantic.scope_id.is_global() => {
for name in names {
let binding_id = self.semantic.global_scope().get(name);
@@ -1185,13 +1184,11 @@ impl<'a> Visitor<'a> for Checker<'a> {
scope.add(name, binding_id);
}
}
}
Stmt::Nonlocal(ast::StmtNonlocal {
names,
range: _,
node_index: _,
}) => {
if !self.semantic.scope_id.is_global() {
}) if !self.semantic.scope_id.is_global() => {
for name in names {
if let Some((scope_id, binding_id)) = self.semantic.nonlocal(name) {
// Mark the binding as "used", since the `nonlocal` requires an existing
@@ -1218,7 +1215,6 @@ impl<'a> Visitor<'a> for Checker<'a> {
}
}
}
}
_ => {}
}
@@ -1621,12 +1617,10 @@ impl<'a> Visitor<'a> for Checker<'a> {
DocstringState::Expected(ExpectedDocstringKind::Attribute);
}
}
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
if target.is_name_expr() {
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) if target.is_name_expr() => {
self.docstring_state =
DocstringState::Expected(ExpectedDocstringKind::Attribute);
}
}
_ => {}
}
}
@@ -2778,16 +2772,15 @@ impl<'a> Checker<'a> {
match parent {
Stmt::TypeAlias(_) => flags.insert(BindingFlags::DEFERRED_TYPE_ALIAS),
Stmt::AnnAssign(ast::StmtAnnAssign { annotation, .. }) => {
Stmt::AnnAssign(ast::StmtAnnAssign { annotation, .. })
// TODO: It is a bit unfortunate that we do this check twice
// maybe we should change how we visit this statement
// so the semantic flag for the type alias sticks around
// until after we've handled this store, so we can check
// the flag instead of duplicating this check
if self.semantic.match_typing_expr(annotation, "TypeAlias") {
if self.semantic.match_typing_expr(annotation, "TypeAlias") => {
flags.insert(BindingFlags::ANNOTATED_TYPE_ALIAS);
}
}
_ => {}
}
+1 -1
View File
@@ -78,7 +78,7 @@ pub(crate) fn check_noqa(
let noqa_offsets = diagnostic
.parent()
.into_iter()
.chain(diagnostic.range().map(TextRange::start).into_iter())
.chain(diagnostic.range().map(TextRange::start))
.map(|position| noqa_line_for.resolve(position))
.unique();
+16 -20
View File
@@ -485,54 +485,50 @@ fn is_lone_child(child: &Stmt, parent: &Stmt) -> bool {
match parent {
Stmt::FunctionDef(ast::StmtFunctionDef { body, .. })
| Stmt::ClassDef(ast::StmtClassDef { body, .. })
| Stmt::With(ast::StmtWith { body, .. }) => {
if is_only(body, child) {
| Stmt::With(ast::StmtWith { body, .. })
if is_only(body, child) =>
{
return true;
}
}
Stmt::For(ast::StmtFor { body, orelse, .. })
| Stmt::While(ast::StmtWhile { body, orelse, .. }) => {
if is_only(body, child) || is_only(orelse, child) {
| Stmt::While(ast::StmtWhile { body, orelse, .. })
if (is_only(body, child) || is_only(orelse, child)) =>
{
return true;
}
}
Stmt::If(ast::StmtIf {
body,
elif_else_clauses,
..
}) => {
if is_only(body, child)
}) if (is_only(body, child)
|| elif_else_clauses
.iter()
.any(|ast::ElifElseClause { body, .. }| is_only(body, child))
.any(|ast::ElifElseClause { body, .. }| is_only(body, child))) =>
{
return true;
}
}
Stmt::Try(ast::StmtTry {
body,
handlers,
orelse,
finalbody,
..
}) => {
if is_only(body, child)
}) if (is_only(body, child)
|| is_only(orelse, child)
|| is_only(finalbody, child)
|| handlers.iter().any(|handler| match handler {
ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler {
body, ..
}) => is_only(body, child),
})
ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler { body, .. }) => {
is_only(body, child)
}
})) =>
{
return true;
}
}
Stmt::Match(ast::StmtMatch { cases, .. }) => {
if cases.iter().any(|case| is_only(&case.body, child)) {
Stmt::Match(ast::StmtMatch { cases, .. })
if cases.iter().any(|case| is_only(&case.body, child)) =>
{
return true;
}
}
_ => {}
}
false
@@ -96,8 +96,8 @@ pub(crate) fn compare_to_hardcoded_password_string(
/// S105
pub(crate) fn assign_hardcoded_password_string(checker: &Checker, value: &Expr, targets: &[Expr]) {
if string_literal(value)
.filter(|string| !string.is_empty())
.is_some()
.as_ref()
.is_some_and(|string| !string.is_empty())
{
for target in targets {
if let Some(name) = password_target(target) {
@@ -67,8 +67,7 @@ pub(crate) fn ssl_insecure_version(checker: &Checker, call: &ExprCall) {
};
match &keyword.value {
Expr::Name(ast::ExprName { id, .. }) => {
if is_insecure_protocol(id) {
Expr::Name(ast::ExprName { id, .. }) if is_insecure_protocol(id) => {
checker.report_diagnostic(
SslInsecureVersion {
protocol: id.to_string(),
@@ -76,9 +75,7 @@ pub(crate) fn ssl_insecure_version(checker: &Checker, call: &ExprCall) {
keyword.range(),
);
}
}
Expr::Attribute(ast::ExprAttribute { attr, .. }) => {
if is_insecure_protocol(attr) {
Expr::Attribute(ast::ExprAttribute { attr, .. }) if is_insecure_protocol(attr) => {
checker.report_diagnostic(
SslInsecureVersion {
protocol: attr.to_string(),
@@ -86,7 +83,6 @@ pub(crate) fn ssl_insecure_version(checker: &Checker, call: &ExprCall) {
keyword.range(),
);
}
}
_ => {}
}
}
@@ -56,8 +56,7 @@ pub(crate) fn ssl_with_bad_defaults(checker: &Checker, function_def: &StmtFuncti
.filter_map(|param| param.default.as_deref())
{
match default {
Expr::Name(ast::ExprName { id, range, .. }) => {
if is_insecure_protocol(id.as_str()) {
Expr::Name(ast::ExprName { id, range, .. }) if is_insecure_protocol(id.as_str()) => {
checker.report_diagnostic(
SslWithBadDefaults {
protocol: id.to_string(),
@@ -65,9 +64,9 @@ pub(crate) fn ssl_with_bad_defaults(checker: &Checker, function_def: &StmtFuncti
*range,
);
}
}
Expr::Attribute(ast::ExprAttribute { attr, range, .. }) => {
if is_insecure_protocol(attr.as_str()) {
Expr::Attribute(ast::ExprAttribute { attr, range, .. })
if is_insecure_protocol(attr.as_str()) =>
{
checker.report_diagnostic(
SslWithBadDefaults {
protocol: attr.to_string(),
@@ -75,7 +74,6 @@ pub(crate) fn ssl_with_bad_defaults(checker: &Checker, function_def: &StmtFuncti
*range,
);
}
}
_ => {}
}
}
@@ -967,7 +967,7 @@ pub(crate) fn suspicious_function_reference(checker: &Checker, func: &Expr) {
}
match checker.semantic().current_expression_parent() {
Some(Expr::Call(parent)) => {
Some(Expr::Call(parent))
// Avoid duplicate diagnostics. For example:
//
// ```python
@@ -975,10 +975,9 @@ pub(crate) fn suspicious_function_reference(checker: &Checker, func: &Expr) {
// shelve.open(lorem, ipsum)
// # ^^^^^^ Should not be reported as a reference
// ```
if parent.func.range().contains_range(func.range()) {
if parent.func.range().contains_range(func.range()) => {
return;
}
}
Some(Expr::Attribute(_)) => {
// Avoid duplicate diagnostics. For example:
//
@@ -1204,13 +1203,12 @@ fn suspicious_function(
// If the `url` argument is a `urllib.request.Request` object, allow `http` and `https` schemes.
Some(Expr::Call(ExprCall {
func, arguments, ..
})) => {
if checker
})) if checker
.semantic()
.resolve_qualified_name(func.as_ref())
.is_some_and(|name| {
name.segments() == ["urllib", "request", "Request"]
})
}) =>
{
if let Some(url_expr) = arguments.find_argument_value("url", 0)
&& expression_starts_with_http_prefix(
@@ -1222,18 +1220,17 @@ fn suspicious_function(
return;
}
}
}
// If the `url` argument is a string literal (including resolved bindings), allow `http` and `https` schemes.
Some(expr) => {
Some(expr)
if expression_starts_with_http_prefix(
expr,
checker.semantic(),
checker.settings(),
) {
) =>
{
return;
}
}
_ => {}
}
@@ -250,13 +250,12 @@ impl<'a> StatementVisitor<'a> for LogExceptionVisitor<'a> {
}) = value.as_ref()
{
match func.as_ref() {
Expr::Attribute(ast::ExprAttribute { attr, .. }) => {
Expr::Attribute(ast::ExprAttribute { attr, .. })
if logging::is_logger_candidate(
func,
self.semantic,
self.logger_objects,
) {
if match attr.as_str() {
) && match attr.as_str() {
"exception" => true,
_ if is_logger_method_name(attr) => is_exc_info_enabled(
attr,
@@ -265,12 +264,11 @@ impl<'a> StatementVisitor<'a> for LogExceptionVisitor<'a> {
self.settings,
),
_ => false,
} {
} =>
{
self.seen = true;
}
}
}
Expr::Name(ast::ExprName { .. }) => {
Expr::Name(ast::ExprName { .. })
if self.semantic.resolve_qualified_name(func).is_some_and(
|qualified_name| match qualified_name.segments() {
["logging", "exception"] => true,
@@ -284,10 +282,10 @@ impl<'a> StatementVisitor<'a> for LogExceptionVisitor<'a> {
}
_ => false,
},
) {
) =>
{
self.seen = true;
}
}
_ => {}
}
}
@@ -113,12 +113,11 @@ impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
value: Some(value),
range: _,
node_index: _,
}) => {
})
// Mark `return lambda: x` as safe.
if value.is_lambda_expr() {
if value.is_lambda_expr() => {
self.safe_functions.push(value);
}
}
_ => {}
}
visitor::walk_stmt(self, stmt);
@@ -148,8 +147,7 @@ impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
}
}
}
Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => {
if attr == "reduce" {
Expr::Attribute(ast::ExprAttribute { value, attr, .. }) if attr == "reduce" => {
if let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() {
if id == "functools" {
for arg in &*arguments.args {
@@ -160,7 +158,6 @@ impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
}
}
}
}
_ => {}
}
@@ -177,8 +174,7 @@ impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
body,
range: _,
node_index: _,
}) => {
if !self.safe_functions.contains(&expr) {
}) if !self.safe_functions.contains(&expr) => {
// Collect all loaded variable names.
let mut visitor = LoadedNamesVisitor::default();
visitor.visit_expr(body);
@@ -202,7 +198,6 @@ impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
return;
}
}
_ => {}
}
visitor::walk_expr(self, expr);
@@ -71,10 +71,10 @@ pub(crate) fn loop_iterator_mutation(checker: &Checker, stmt_for: &StmtFor) {
}
Expr::Call(ExprCall {
func, arguments, ..
}) => {
})
// Ex) Given `for i, item in enumerate(items):`, `i` is the index and `items` is the
// iterable.
if checker.semantic().match_builtin_expr(func, "enumerate") {
if checker.semantic().match_builtin_expr(func, "enumerate") => {
// Ex) `items`
let Some(iter) = arguments.args.first() else {
return;
@@ -90,9 +90,6 @@ pub(crate) fn loop_iterator_mutation(checker: &Checker, stmt_for: &StmtFor) {
// Ex) `i`
(index, target, iter)
} else {
return;
}
}
_ => {
return;
@@ -45,11 +45,9 @@ pub(crate) fn unintentional_type_annotation(
return;
}
match target {
Expr::Subscript(ast::ExprSubscript { value, .. }) => {
if value.is_name_expr() {
Expr::Subscript(ast::ExprSubscript { value, .. }) if value.is_name_expr() => {
checker.report_diagnostic(UnintentionalTypeAnnotation, stmt.range());
}
}
Expr::Attribute(ast::ExprAttribute { value, .. }) => {
if let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() {
if id != "self" {
@@ -106,11 +106,11 @@ pub(crate) fn call_datetime_strptime_without_zone(checker: &Checker, call: &ast:
// Does the `strptime` call contain a format string with a timezone specifier?
if let Some(expr) = call.arguments.args.get(1) {
match expr {
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
if value.to_str().contains("%z") {
Expr::StringLiteral(ast::ExprStringLiteral { value, .. })
if value.to_str().contains("%z") =>
{
return;
}
}
Expr::FString(ast::ExprFString { value, .. }) => {
for f_string_part in value {
match f_string_part {
@@ -77,18 +77,18 @@ pub(crate) fn all_with_model_form(checker: &Checker, class_def: &ast::StmtClassD
continue;
}
match value.as_ref() {
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
if value == "__all__" {
Expr::StringLiteral(ast::ExprStringLiteral { value, .. })
if value == "__all__" =>
{
checker.report_diagnostic(DjangoAllWithModelForm, element.range());
return;
}
}
Expr::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => {
if value == "__all__".as_bytes() {
Expr::BytesLiteral(ast::ExprBytesLiteral { value, .. })
if value == "__all__".as_bytes() =>
{
checker.report_diagnostic(DjangoAllWithModelForm, element.range());
return;
}
}
_ => (),
}
}
@@ -98,19 +98,18 @@ fn is_model_abstract(class_def: &ast::StmtClassDef) -> bool {
}
for element in body {
match element {
Stmt::Assign(ast::StmtAssign { targets, value, .. }) => {
Stmt::Assign(ast::StmtAssign { targets, value, .. })
if targets
.iter()
.any(|target| is_abstract_true_assignment(target, Some(value)))
.any(|target| is_abstract_true_assignment(target, Some(value))) =>
{
return true;
}
}
Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => {
if is_abstract_true_assignment(target, value.as_deref()) {
Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. })
if is_abstract_true_assignment(target, value.as_deref()) =>
{
return true;
}
}
_ => {}
}
}
@@ -201,11 +201,11 @@ pub(crate) fn string_in_exception(checker: &Checker, stmt: &Stmt, exc: &Expr) {
if let Some(first) = args.first() {
match first {
// Check for string literals.
Expr::StringLiteral(ast::ExprStringLiteral { value: string, .. }) => {
if checker.is_rule_enabled(Rule::RawStringInException) {
if string.len() >= checker.settings().flake8_errmsg.max_string_length {
let mut diagnostic =
checker.report_diagnostic(RawStringInException, first.range());
Expr::StringLiteral(ast::ExprStringLiteral { value: string, .. })
if checker.is_rule_enabled(Rule::RawStringInException)
&& string.len() >= checker.settings().flake8_errmsg.max_string_length =>
{
let mut diagnostic = checker.report_diagnostic(RawStringInException, first.range());
if let Some(indentation) = whitespace::indentation(checker.source(), stmt) {
diagnostic.set_fix(generate_fix(
stmt,
@@ -217,14 +217,12 @@ pub(crate) fn string_in_exception(checker: &Checker, stmt: &Stmt, exc: &Expr) {
));
}
}
}
}
// Check for byte string literals.
Expr::BytesLiteral(ast::ExprBytesLiteral { value: bytes, .. }) => {
if checker.settings().rules.enabled(Rule::RawStringInException) {
if bytes.len() >= checker.settings().flake8_errmsg.max_string_length {
let mut diagnostic =
checker.report_diagnostic(RawStringInException, first.range());
Expr::BytesLiteral(ast::ExprBytesLiteral { value: bytes, .. })
if checker.settings().rules.enabled(Rule::RawStringInException)
&& bytes.len() >= checker.settings().flake8_errmsg.max_string_length =>
{
let mut diagnostic = checker.report_diagnostic(RawStringInException, first.range());
if let Some(indentation) = whitespace::indentation(checker.source(), stmt) {
diagnostic.set_fix(generate_fix(
stmt,
@@ -236,13 +234,9 @@ pub(crate) fn string_in_exception(checker: &Checker, stmt: &Stmt, exc: &Expr) {
));
}
}
}
}
// Check for f-strings.
Expr::FString(_) => {
if checker.is_rule_enabled(Rule::FStringInException) {
let mut diagnostic =
checker.report_diagnostic(FStringInException, first.range());
Expr::FString(_) if checker.is_rule_enabled(Rule::FStringInException) => {
let mut diagnostic = checker.report_diagnostic(FStringInException, first.range());
if let Some(indentation) = whitespace::indentation(checker.source(), stmt) {
diagnostic.set_fix(generate_fix(
stmt,
@@ -254,17 +248,15 @@ pub(crate) fn string_in_exception(checker: &Checker, stmt: &Stmt, exc: &Expr) {
));
}
}
}
// Check for .format() calls.
Expr::Call(ast::ExprCall { func, .. }) => {
if checker.is_rule_enabled(Rule::DotFormatInException) {
Expr::Call(ast::ExprCall { func, .. })
if checker.is_rule_enabled(Rule::DotFormatInException) =>
{
if let Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = func.as_ref() {
if attr == "format" && value.is_literal_expr() {
let mut diagnostic =
checker.report_diagnostic(DotFormatInException, first.range());
if let Some(indentation) =
whitespace::indentation(checker.source(), stmt)
{
if let Some(indentation) = whitespace::indentation(checker.source(), stmt) {
diagnostic.set_fix(generate_fix(
stmt,
first,
@@ -277,7 +269,6 @@ pub(crate) fn string_in_exception(checker: &Checker, stmt: &Stmt, exc: &Expr) {
}
}
}
}
_ => {}
}
}
@@ -153,21 +153,19 @@ fn check_msg(checker: &Checker, msg: &Expr, arguments: &Arguments, msg_pos: usiz
_ => {}
},
// Check for f-strings.
Expr::FString(f_string) => {
if checker.is_rule_enabled(Rule::LoggingFString) {
Expr::FString(f_string) if checker.is_rule_enabled(Rule::LoggingFString) => {
logging_f_string(checker, msg, f_string, arguments, msg_pos);
}
}
// Check for .format() calls.
Expr::Call(ast::ExprCall { func, .. }) => {
if checker.is_rule_enabled(Rule::LoggingStringFormat) {
Expr::Call(ast::ExprCall { func, .. })
if checker.is_rule_enabled(Rule::LoggingStringFormat) =>
{
if let Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = func.as_ref() {
if attr == "format" && value.is_literal_expr() {
checker.report_diagnostic(LoggingStringFormat, msg.range());
}
}
}
}
_ => {}
}
}
@@ -194,8 +192,7 @@ fn check_log_record_attr_clash(checker: &Checker, extra: &Keyword) {
func,
arguments: Arguments { keywords, .. },
..
}) => {
if checker.semantic().match_builtin_expr(func, "dict") {
}) if checker.semantic().match_builtin_expr(func, "dict") => {
for keyword in keywords {
if let Some(attr) = &keyword.arg {
if is_reserved_attr(attr) {
@@ -207,7 +204,6 @@ fn check_log_record_attr_clash(checker: &Checker, extra: &Keyword) {
}
}
}
}
_ => {}
}
}
@@ -207,20 +207,18 @@ pub(crate) fn non_self_return_type(
}
match name {
"__iter__" => {
"__iter__"
if is_iterable_or_iterator(returns, semantic)
&& subclasses_iterator(class_def, semantic)
&& subclasses_iterator(class_def, semantic) =>
{
add_diagnostic(checker, stmt, returns, class_def, name);
}
}
"__aiter__" => {
"__aiter__"
if is_async_iterable_or_iterator(returns, semantic)
&& subclasses_async_iterator(class_def, semantic)
&& subclasses_async_iterator(class_def, semantic) =>
{
add_diagnostic(checker, stmt, returns, class_def, name);
}
}
_ => {}
}
}
@@ -336,15 +336,14 @@ fn is_valid_default_value_with_annotation(
// Ex) `-1`, `-3.14`, `2j`
Expr::NumberLiteral(_) => return true,
// Ex) `-math.inf`, `-math.pi`, etc.
Expr::Attribute(_) => {
Expr::Attribute(_)
if semantic
.resolve_qualified_name(operand)
.as_ref()
.is_some_and(is_allowed_negated_math_attribute)
.is_some_and(is_allowed_negated_math_attribute) =>
{
return true;
}
}
_ => {}
}
}
@@ -387,15 +386,14 @@ fn is_valid_default_value_with_annotation(
}
}
// Ex) `math.inf`, `sys.stdin`, etc.
Expr::Attribute(_) => {
Expr::Attribute(_)
if semantic
.resolve_qualified_name(default)
.as_ref()
.is_some_and(is_allowed_math_attribute)
.is_some_and(is_allowed_math_attribute) =>
{
return true;
}
}
_ => {}
}
false
@@ -769,8 +769,9 @@ fn handle_value_rows(
) {
for elt in elts {
match elt {
Expr::Tuple(ast::ExprTuple { elts, .. }) => {
if values_row_type != types::ParametrizeValuesRowType::Tuple {
Expr::Tuple(ast::ExprTuple { elts, .. })
if values_row_type != types::ParametrizeValuesRowType::Tuple =>
{
let mut diagnostic = checker.report_diagnostic(
PytestParametrizeValuesWrongType {
values: values_type,
@@ -795,11 +796,8 @@ fn handle_value_rows(
&& checker.locator().up_to(elt.end()).chars().rev().nth(1) == Some(',');
// Replace `(` with `[`.
let elt_start = Edit::replacement(
"[".into(),
elt.start(),
elt.start() + TextSize::from(1),
);
let elt_start =
Edit::replacement("[".into(), elt.start(), elt.start() + TextSize::from(1));
// Replace `)` or `,)` with `]`.
let start = if has_trailing_comma {
elt.end() - TextSize::from(2)
@@ -810,9 +808,9 @@ fn handle_value_rows(
Fix::unsafe_edits(elt_start, [elt_end])
});
}
}
Expr::List(ast::ExprList { elts, .. }) => {
if values_row_type != types::ParametrizeValuesRowType::List {
Expr::List(ast::ExprList { elts, .. })
if values_row_type != types::ParametrizeValuesRowType::List =>
{
let mut diagnostic = checker.report_diagnostic(
PytestParametrizeValuesWrongType {
values: values_type,
@@ -835,11 +833,8 @@ fn handle_value_rows(
};
// Replace `[` with `(`.
let elt_start = Edit::replacement(
"(".into(),
elt.start(),
elt.start() + TextSize::from(1),
);
let elt_start =
Edit::replacement("(".into(), elt.start(), elt.start() + TextSize::from(1));
// Replace `]` with `)` or `,)`.
let elt_end = Edit::replacement(
if needs_trailing_comma {
@@ -853,7 +848,6 @@ fn handle_value_rows(
Fix::unsafe_edits(elt_start, [elt_end])
});
}
}
_ => {}
}
}
@@ -109,14 +109,13 @@ impl<'a> Visitor<'a> for ReturnVisitor<'_, 'a> {
.non_locals
.extend(names.iter().map(Identifier::as_str));
}
Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => {
Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. })
// Ex) `x: int`
if value.is_none() {
if value.is_none() => {
if let Expr::Name(name) = target.as_ref() {
self.stack.annotations.insert(name.id.as_str());
}
}
}
Stmt::Return(stmt_return) => {
// If the `return` statement is preceded by an `assignment` statement, then the
// `assignment` statement may be redundant.
@@ -324,38 +324,30 @@ pub(crate) fn quote_annotation(
let expr = semantic.expression(node_id).expect("Expression not found");
if let Some(parent_id) = semantic.parent_expression_id(node_id) {
match semantic.expression(parent_id) {
Some(Expr::Subscript(parent)) => {
if expr == parent.value.as_ref() {
Some(Expr::Subscript(parent)) if expr == parent.value.as_ref() => {
// If we're quoting the value of a subscript, we need to quote the entire
// expression. For example, when quoting `DataFrame` in `DataFrame[int]`, we
// should generate `"DataFrame[int]"`.
return quote_annotation(parent_id, semantic, stylist, locator, flags);
}
}
Some(Expr::Attribute(parent)) => {
if expr == parent.value.as_ref() {
Some(Expr::Attribute(parent)) if expr == parent.value.as_ref() => {
// If we're quoting the value of an attribute, we need to quote the entire
// expression. For example, when quoting `DataFrame` in `pd.DataFrame`, we
// should generate `"pd.DataFrame"`.
return quote_annotation(parent_id, semantic, stylist, locator, flags);
}
}
Some(Expr::Call(parent)) => {
if expr == parent.func.as_ref() {
Some(Expr::Call(parent)) if expr == parent.func.as_ref() => {
// If we're quoting the function of a call, we need to quote the entire
// expression. For example, when quoting `DataFrame` in `DataFrame()`, we
// should generate `"DataFrame()"`.
return quote_annotation(parent_id, semantic, stylist, locator, flags);
}
}
Some(Expr::BinOp(parent)) => {
if parent.op.is_bit_or() {
Some(Expr::BinOp(parent)) if parent.op.is_bit_or() => {
// If we're quoting the left or right side of a binary operation, we need to
// quote the entire expression. For example, when quoting `DataFrame` in
// `DataFrame | Series`, we should generate `"DataFrame | Series"`.
return quote_annotation(parent_id, semantic, stylist, locator, flags);
}
}
_ => {}
}
}
@@ -63,7 +63,7 @@ pub enum ImportSection {
impl fmt::Display for ImportSection {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Known(import_type) => write!(f, "known {{ type = {import_type} }}",),
Self::Known(import_type) => write!(f, "known {{ type = {import_type} }}"),
Self::UserDefined(string) => fmt::Debug::fmt(string, f),
}
}
@@ -69,7 +69,7 @@ impl Violation for Numpy2Deprecation {
} = self;
match migration_guide {
Some(migration_guide) => {
format!("`np.{existing}` will be removed in NumPy 2.0. {migration_guide}",)
format!("`np.{existing}` will be removed in NumPy 2.0. {migration_guide}")
}
None => format!("`np.{existing}` will be removed without replacement in NumPy 2.0"),
}
@@ -135,18 +135,12 @@ pub(crate) fn compound_statements(
// Use an iterator to allow passing it around.
let mut token_iter = tokens.iter_with_context();
loop {
let Some(token) = token_iter.next() else {
break;
};
while let Some(token) = token_iter.next() {
match token.kind() {
TokenKind::Ellipsis => {
if allow_ellipsis {
TokenKind::Ellipsis if allow_ellipsis => {
allow_ellipsis = false;
continue;
}
}
TokenKind::Indent => {
indent = indent.saturating_add(1);
}
@@ -181,9 +181,9 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
}
}
BracketOrPunctuation::CloseBracket(symbol)
if symbol != '}' || interpolated_strings == 0 =>
if (symbol != '}' || interpolated_strings == 0)
&& !matches!(prev_token, Some(TokenKind::Comma)) =>
{
if !matches!(prev_token, Some(TokenKind::Comma)) {
if let (Whitespace::Single | Whitespace::Many | Whitespace::Tab, offset) =
line.leading_whitespace(token)
{
@@ -196,9 +196,9 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
}
}
}
}
BracketOrPunctuation::Punctuation(symbol) => {
if !matches!(prev_token, Some(TokenKind::Comma)) {
BracketOrPunctuation::Punctuation(symbol)
if !matches!(prev_token, Some(TokenKind::Comma)) =>
{
let whitespace = line.leading_whitespace(token);
if let (Whitespace::Single | Whitespace::Many | Whitespace::Tab, offset) =
whitespace
@@ -213,6 +213,19 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
// If we're in the second half of a double colon, disallow
// any whitespace (e.g., `foo[1: :2]` or `foo[1 : : 2]`).
if matches!(prev_token, Some(TokenKind::Colon)) {
let range = TextRange::at(token.start() - offset, offset);
if let Some(mut diagnostic) = context.report_diagnostic_if_enabled(
WhitespaceBeforePunctuation { symbol },
range,
) {
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
}
} else if iter.peek().is_some_and(|token| {
matches!(token.kind(), TokenKind::Rsqb | TokenKind::Comma)
}) {
// Allow `foo[1 :]`, but not `foo[1 :]`.
// Or `foo[index :, 2]`, but not `foo[index :, 2]`.
if let (Whitespace::Many | Whitespace::Tab, offset) = whitespace {
let range = TextRange::at(token.start() - offset, offset);
if let Some(mut diagnostic) = context
.report_diagnostic_if_enabled(
@@ -223,24 +236,6 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
diagnostic
.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
}
} else if iter.peek().is_some_and(|token| {
matches!(token.kind(), TokenKind::Rsqb | TokenKind::Comma)
}) {
// Allow `foo[1 :]`, but not `foo[1 :]`.
// Or `foo[index :, 2]`, but not `foo[index :, 2]`.
if let (Whitespace::Many | Whitespace::Tab, offset) = whitespace
{
let range = TextRange::at(token.start() - offset, offset);
if let Some(mut diagnostic) = context
.report_diagnostic_if_enabled(
WhitespaceBeforePunctuation { symbol },
range,
)
{
diagnostic.set_fix(Fix::safe_edit(
Edit::range_deletion(range),
));
}
}
} else if iter.peek().is_some_and(|token| {
matches!(
@@ -256,8 +251,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
// ]
// distinct from the above case, because ruff format produces a
// whitespace before the colon and so should the fix
if let (Whitespace::Many | Whitespace::Tab, offset) = whitespace
{
if let (Whitespace::Many | Whitespace::Tab, offset) = whitespace {
let range = TextRange::at(token.start() - offset, offset);
if let Some(mut diagnostic) = context
.report_diagnostic_if_enabled(
@@ -267,10 +261,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
{
diagnostic.set_fix(Fix::safe_edits(
Edit::range_deletion(range),
[Edit::insertion(
" ".into(),
token.start() - offset,
)],
[Edit::insertion(" ".into(), token.start() - offset)],
));
}
}
@@ -288,9 +279,8 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
range,
)
{
diagnostic.set_fix(Fix::safe_edit(
Edit::range_deletion(range),
));
diagnostic
.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
}
}
}
@@ -312,7 +302,6 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
}
}
}
}
_ => {}
}
}
@@ -97,8 +97,7 @@ pub(crate) fn not_tests(checker: &Checker, unary_op: &ast::ExprUnaryOp) {
};
match &**ops {
[CmpOp::In] => {
if checker.is_rule_enabled(Rule::NotInTest) {
[CmpOp::In] if checker.is_rule_enabled(Rule::NotInTest) => {
let mut diagnostic = checker.report_diagnostic(NotInTest, unary_op.operand.range());
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
pad(
@@ -116,9 +115,7 @@ pub(crate) fn not_tests(checker: &Checker, unary_op: &ast::ExprUnaryOp) {
unary_op.range(),
)));
}
}
[CmpOp::Is] => {
if checker.is_rule_enabled(Rule::NotIsTest) {
[CmpOp::Is] if checker.is_rule_enabled(Rule::NotIsTest) => {
let mut diagnostic = checker.report_diagnostic(NotIsTest, unary_op.operand.range());
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
pad(
@@ -136,7 +133,6 @@ pub(crate) fn not_tests(checker: &Checker, unary_op: &ast::ExprUnaryOp) {
unary_op.range(),
)));
}
}
_ => {}
}
}
@@ -1277,7 +1277,7 @@ pub(crate) fn check_docstring(
if !definition.is_property(extra_property_decorators, semantic) {
if !body_entries.returns.is_empty() {
match function_def.returns.as_deref() {
Some(returns) => {
Some(returns)
// Ignore it if it's annotated as returning `None`
// or it's a generator function annotated as returning `None`,
// i.e. any of `-> None`, `-> Iterator[...]` or `-> Generator[..., ..., None]`
@@ -1287,11 +1287,10 @@ pub(crate) fn check_docstring(
returns,
semantic,
)
{
=> {
checker
.report_diagnostic(DocstringMissingReturns, docstring.range());
}
}
None if body_entries
.returns
.iter()
@@ -156,11 +156,7 @@ fn locate_cmp_ops(expr: &Expr, tokens: &Tokens) -> Vec<LocatedCmpOp> {
// Track the nesting level.
let mut nesting = 0u32;
loop {
let Some(token) = tok_iter.next() else {
break;
};
while let Some(token) = tok_iter.next() {
match token.kind() {
TokenKind::Lpar | TokenKind::Lsqb | TokenKind::Lbrace => {
nesting = nesting.saturating_add(1);
@@ -60,11 +60,9 @@ fn match_not_implemented(expr: &Expr) -> Option<&Expr> {
}
}
}
Expr::Name(ast::ExprName { id, .. }) => {
if id == "NotImplemented" {
Expr::Name(ast::ExprName { id, .. }) if id == "NotImplemented" => {
return Some(expr);
}
}
_ => {}
}
None
@@ -177,8 +177,9 @@ pub(crate) fn repeated_keys(checker: &Checker, dict: &ast::ExprDict) {
| Expr::NoneLiteral(_)
| Expr::EllipsisLiteral(_)
| Expr::Tuple(_)
| Expr::FString(_) => {
if checker.is_rule_enabled(Rule::MultiValueRepeatedKeyLiteral) {
| Expr::FString(_)
if checker.is_rule_enabled(Rule::MultiValueRepeatedKeyLiteral) =>
{
let mut diagnostic = checker.report_diagnostic(
MultiValueRepeatedKeyLiteral {
name: SourceCodeSnippet::from_str(checker.locator().slice(key)),
@@ -207,9 +208,9 @@ pub(crate) fn repeated_keys(checker: &Checker, dict: &ast::ExprDict) {
)));
}
}
}
Expr::Name(_) => {
if checker.is_rule_enabled(Rule::MultiValueRepeatedKeyVariable) {
Expr::Name(_)
if checker.is_rule_enabled(Rule::MultiValueRepeatedKeyVariable) =>
{
let mut diagnostic = checker.report_diagnostic(
MultiValueRepeatedKeyVariable {
name: SourceCodeSnippet::from_str(checker.locator().slice(key)),
@@ -236,7 +237,6 @@ pub(crate) fn repeated_keys(checker: &Checker, dict: &ast::ExprDict) {
)));
}
}
}
_ => {}
}
}
@@ -54,7 +54,7 @@ impl Violation for CompareToEmptyString {
existing,
replacement,
} = self;
format!("`{existing}` can be simplified to `{replacement}` as an empty string is falsey",)
format!("`{existing}` can be simplified to `{replacement}` as an empty string is falsey")
}
}
@@ -55,7 +55,7 @@ impl AlwaysFixableViolation for ModifiedIteratingSet {
#[derive_message_formats]
fn message(&self) -> String {
let ModifiedIteratingSet { name } = self;
format!("Iterated set `{name}` is modified within the `for` loop",)
format!("Iterated set `{name}` is modified within the `for` loop")
}
fn fix_title(&self) -> String {
@@ -300,8 +300,8 @@ fn merged_membership_test(
.join(", ");
if all_hashable {
return format!("{left} {op} {{{members}}}",);
return format!("{left} {op} {{{members}}}");
}
format!("{left} {op} ({members})",)
format!("{left} {op} ({members})")
}
@@ -67,7 +67,7 @@ impl Violation for TypeBivariance {
match param_name {
None => format!("`{kind}` cannot be both covariant and contravariant"),
Some(param_name) => {
format!("`{kind}` \"{param_name}\" cannot be both covariant and contravariant",)
format!("`{kind}` \"{param_name}\" cannot be both covariant and contravariant")
}
}
}
@@ -288,12 +288,12 @@ pub(crate) fn deprecated_mock_import(checker: &Checker, stmt: &Stmt) {
is_lazy: _,
range: _,
node_index: _,
}) => {
})
// Find all `mock` imports.
if names
.iter()
.any(|name| &name.name == "mock" || &name.name == "mock.mock")
{
=> {
// Generate the fix, if needed, which is shared between all `mock` imports.
let content = if let Some(indent) = indentation(checker.source(), stmt) {
match format_import(stmt, indent, checker.locator(), checker.stylist()) {
@@ -332,7 +332,6 @@ pub(crate) fn deprecated_mock_import(checker: &Checker, stmt: &Stmt) {
}
}
}
}
Stmt::ImportFrom(ast::StmtImportFrom {
module: Some(module),
level,
@@ -164,11 +164,9 @@ pub(crate) fn os_error_alias_handlers(checker: &Checker, handlers: &[ExceptHandl
continue;
};
match expr.as_ref() {
Expr::Name(_) | Expr::Attribute(_) => {
if is_alias(expr, checker.semantic()) {
Expr::Name(_) | Expr::Attribute(_) if is_alias(expr, checker.semantic()) => {
atom_diagnostic(checker, expr);
}
}
Expr::Tuple(tuple) => {
// List of aliases to replace with `OSError`.
let mut aliases: Vec<&Expr> = vec![];
@@ -292,8 +292,7 @@ pub(crate) fn expr_name_to_type_var<'a>(
Expr::Subscript(ExprSubscript {
value: subscript_value,
..
}) => {
if semantic.match_typing_expr(subscript_value, "TypeVar") {
}) if semantic.match_typing_expr(subscript_value, "TypeVar") => {
return Some(TypeVar {
name: &name.id,
restriction: None,
@@ -301,7 +300,6 @@ pub(crate) fn expr_name_to_type_var<'a>(
default: None,
});
}
}
Expr::Call(ExprCall {
func, arguments, ..
}) => {
@@ -186,11 +186,11 @@ pub(crate) fn timeout_error_alias_handlers(checker: &Checker, handlers: &[Except
continue;
};
match expr.as_ref() {
Expr::Name(_) | Expr::Attribute(_) => {
if is_alias(expr, checker.semantic(), checker.target_version()) {
Expr::Name(_) | Expr::Attribute(_)
if is_alias(expr, checker.semantic(), checker.target_version()) =>
{
atom_diagnostic(checker, expr);
}
}
Expr::Tuple(tuple) => {
// List of aliases to replace with `TimeoutError`.
let mut aliases: Vec<&Expr> = vec![];
@@ -103,19 +103,16 @@ fn match_encoding_arg(arguments: &Arguments) -> Option<EncodingArg<'_>> {
// Ex `"".encode()`
([], []) => return Some(EncodingArg::Empty),
// Ex `"".encode(encoding)`
([arg], []) => {
if is_utf8_encoding_arg(arg) {
([arg], []) if is_utf8_encoding_arg(arg) => {
return Some(EncodingArg::Positional(arg));
}
}
// Ex `"".encode(kwarg=kwarg)`
([], [keyword]) => {
if keyword.arg.as_ref().is_some_and(|arg| arg == "encoding") {
if is_utf8_encoding_arg(&keyword.value) {
([], [keyword])
if keyword.arg.as_ref().is_some_and(|arg| arg == "encoding")
&& is_utf8_encoding_arg(&keyword.value) =>
{
return Some(EncodingArg::Keyword(keyword));
}
}
}
// Ex `"".encode(*args, **kwargs)`
_ => {}
}
@@ -76,7 +76,7 @@ impl Violation for UnnecessaryFromFloat {
method_name,
constructor,
} = self;
format!("Verbose method `{method_name}` in `{constructor}` construction",)
format!("Verbose method `{method_name}` in `{constructor}` construction")
}
fn fix_title(&self) -> Option<String> {
@@ -136,13 +136,13 @@ pub(crate) fn mutable_class_default(checker: &Checker, class_def: &ast::StmtClas
checker.report_diagnostic(MutableClassDefault, value.range());
}
}
Stmt::Assign(ast::StmtAssign { value, targets, .. }) => {
Stmt::Assign(ast::StmtAssign { value, targets, .. })
if !targets.iter().all(|target| {
is_special_attribute(target)
|| target
.as_name_expr()
.is_some_and(|name| class_var_targets.contains(&name.id))
}) && is_mutable_expr(value, checker.semantic())
}) && is_mutable_expr(value, checker.semantic()) =>
{
// The `_fields_` property of a `ctypes.Structure` base class has its
// immutability enforced by the base class itself which will throw an error if
@@ -159,7 +159,6 @@ pub(crate) fn mutable_class_default(checker: &Checker, class_def: &ast::StmtClas
checker.report_diagnostic(MutableClassDefault, value.range());
}
}
_ => (),
}
}
@@ -105,11 +105,11 @@ fn contains_message(expr: &Expr) -> bool {
}
}
}
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
if value.chars().any(char::is_whitespace) {
Expr::StringLiteral(ast::ExprStringLiteral { value, .. })
if value.chars().any(char::is_whitespace) =>
{
return true;
}
}
_ => {}
}
+1 -1
View File
@@ -20,7 +20,7 @@ mod rule_namespace;
mod rust_doc;
mod violation_metadata;
#[proc_macro_derive(OptionsMetadata, attributes(option, doc, option_group))]
#[proc_macro_derive(OptionsMetadata, attributes(option, option_group))]
pub fn derive_options_metadata(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
@@ -120,15 +120,15 @@ impl<'a, 'src> StringNormalizer<'a, 'src> {
return QuoteStyle::Preserve;
}
}
StringLikePart::TString(tstring) => {
StringLikePart::TString(tstring)
if is_interpolated_string_with_quoted_format_spec_and_debug(
&tstring.elements,
tstring.flags.into(),
self.context,
) {
) =>
{
return QuoteStyle::Preserve;
}
}
_ => {}
}
@@ -118,7 +118,7 @@ fn black_compatibility(input_path: &Utf8Path, content: String) -> datatest_stabl
// The following code mimics insta's logic generating the snapshot name for a test.
let workspace_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let full_snapshot_name = format!("black_compatibility@{test_name}.snap",);
let full_snapshot_name = format!("black_compatibility@{test_name}.snap");
let snapshot_path = Path::new(&workspace_path)
.join("tests/snapshots")
@@ -367,7 +367,7 @@ fn format_file(
(Cow::Owned(without_markers), content)
} else {
let printed = format_module_source(source, options.clone()).unwrap_or_else(|err| {
panic!("Formatting `{input_path} was expected to succeed but it failed: {err}",)
panic!("Formatting `{input_path} was expected to succeed but it failed: {err}")
});
let formatted_code = printed.into_code();
@@ -59,8 +59,7 @@ impl Transformer for Normalizer {
// but not joining here doesn't play nicely with other string normalizations done in the
// Normalizer.
match expr {
Expr::StringLiteral(string) => {
if string.value.is_implicit_concatenated() {
Expr::StringLiteral(string) if string.value.is_implicit_concatenated() => {
let can_join = string.value.iter().all(|literal| {
!literal.flags.is_triple_quoted() && !literal.flags.prefix().is_raw()
});
@@ -74,10 +73,8 @@ impl Transformer for Normalizer {
});
}
}
}
Expr::BytesLiteral(bytes) => {
if bytes.value.is_implicit_concatenated() {
Expr::BytesLiteral(bytes) if bytes.value.is_implicit_concatenated() => {
let can_join = bytes.value.iter().all(|literal| {
!literal.flags.is_triple_quoted() && !literal.flags.prefix().is_raw()
});
@@ -91,10 +88,8 @@ impl Transformer for Normalizer {
});
}
}
}
Expr::FString(fstring) => {
if fstring.value.is_implicit_concatenated() {
Expr::FString(fstring) if fstring.value.is_implicit_concatenated() => {
let can_join = fstring.value.iter().all(|part| match part {
FStringPart::Literal(literal) => {
!literal.flags.is_triple_quoted() && !literal.flags.prefix().is_raw()
@@ -148,15 +143,13 @@ impl Transformer for Normalizer {
for part in &fstring.value {
match part {
ast::FStringPart::Literal(string_literal) => {
collector
.push_literal(&string_literal.value, string_literal.range);
collector.push_literal(&string_literal.value, string_literal.range);
}
ast::FStringPart::FString(fstring) => {
for element in &fstring.elements {
match element {
ast::InterpolatedStringElement::Literal(literal) => {
collector
.push_literal(&literal.value, literal.range);
collector.push_literal(&literal.value, literal.range);
}
ast::InterpolatedStringElement::Interpolation(
expression,
@@ -177,7 +170,6 @@ impl Transformer for Normalizer {
});
}
}
}
_ => {}
}
+1 -1
View File
@@ -221,7 +221,7 @@ impl std::fmt::Display for ParseErrorType {
match self {
ParseErrorType::OtherError(msg) => write!(f, "{msg}"),
ParseErrorType::ExpectedToken { found, expected } => {
write!(f, "Expected {expected}, found {found}",)
write!(f, "Expected {expected}, found {found}")
}
ParseErrorType::Lexical(lex_error) => write!(f, "{lex_error}"),
ParseErrorType::SimpleStatementsOnSameLine => {
@@ -333,16 +333,12 @@ impl SemanticSyntaxChecker {
}
}
}
Stmt::Break(ast::StmtBreak { range, .. }) => {
if !ctx.in_loop_context() {
Stmt::Break(ast::StmtBreak { range, .. }) if !ctx.in_loop_context() => {
Self::add_error(ctx, SemanticSyntaxErrorKind::BreakOutsideLoop, *range);
}
}
Stmt::Continue(ast::StmtContinue { range, .. }) => {
if !ctx.in_loop_context() {
Stmt::Continue(ast::StmtContinue { range, .. }) if !ctx.in_loop_context() => {
Self::add_error(ctx, SemanticSyntaxErrorKind::ContinueOutsideLoop, *range);
}
}
_ => {}
}
@@ -1273,7 +1269,7 @@ impl Display for SemanticSyntaxError {
)
}
SemanticSyntaxErrorKind::DuplicateMatchClassAttribute(name) => {
write!(f, "attribute name `{name}` repeated in class pattern",)
write!(f, "attribute name `{name}` repeated in class pattern")
}
SemanticSyntaxErrorKind::LoadBeforeGlobalDeclaration { name, start: _ } => {
write!(f, "name `{name}` is used prior to global declaration")
@@ -310,48 +310,40 @@ fn sometimes_breaks(stmts: &[Stmt], semantic: &SemanticModel) -> bool {
body,
elif_else_clauses,
..
}) => {
if std::iter::once(body)
}) if std::iter::once(body)
.chain(elif_else_clauses.iter().map(|clause| &clause.body))
.any(|body| sometimes_breaks(body, semantic))
.any(|body| sometimes_breaks(body, semantic)) =>
{
return true;
}
}
Stmt::Match(ast::StmtMatch { cases, .. }) => {
Stmt::Match(ast::StmtMatch { cases, .. })
if cases
.iter()
.any(|case| sometimes_breaks(&case.body, semantic))
.any(|case| sometimes_breaks(&case.body, semantic)) =>
{
return true;
}
}
Stmt::Try(ast::StmtTry {
body,
handlers,
orelse,
finalbody,
..
}) => {
if sometimes_breaks(body, semantic)
}) if (sometimes_breaks(body, semantic)
|| handlers.iter().any(|handler| {
let ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler {
body,
..
body, ..
}) = handler;
sometimes_breaks(body, semantic)
})
|| sometimes_breaks(orelse, semantic)
|| sometimes_breaks(finalbody, semantic)
|| sometimes_breaks(finalbody, semantic)) =>
{
return true;
}
}
Stmt::With(ast::StmtWith { body, .. }) => {
if sometimes_breaks(body, semantic) {
Stmt::With(ast::StmtWith { body, .. }) if sometimes_breaks(body, semantic) => {
return true;
}
}
Stmt::Break(_) => return true,
Stmt::Return(_) => return false,
Stmt::Raise(_) => return false,
@@ -1320,11 +1320,9 @@ fn match_target<'a>(binding: &Binding, targets: &[Expr], values: &'a [Expr]) ->
_ => (),
}
}
Expr::Name(name) => {
if name.range() == binding.range() {
Expr::Name(name) if name.range() == binding.range() => {
return Some(value);
}
}
_ => (),
}
}
+6 -6
View File
@@ -1130,8 +1130,9 @@ impl<'a> SemanticModel<'a> {
// Ex) Given `module="sys"` and `object="exit"`:
// `import sys` -> `sys.exit`
// `import sys as sys2` -> `sys2.exit`
BindingKind::Import(Import { qualified_name }) => {
if qualified_name.segments() == module_path.as_slice() {
BindingKind::Import(Import { qualified_name })
if qualified_name.segments() == module_path.as_slice() =>
{
if let Some(source) = binding.source {
// Verify that `sys` isn't bound in an inner scope.
if self
@@ -1148,7 +1149,6 @@ impl<'a> SemanticModel<'a> {
}
}
}
}
// Ex) Given `module="os.path"` and `object="join"`:
// `from os.path import join` -> `join`
// `from os.path import join as join2` -> `join2`
@@ -1181,8 +1181,9 @@ impl<'a> SemanticModel<'a> {
// `import os.path ` -> `os.name`
// Ex) Given `module="os.path"` and `object="join"`:
// `import os.path ` -> `os.path.join`
BindingKind::SubmoduleImport(SubmoduleImport { qualified_name }) => {
if qualified_name.segments().starts_with(&module_path) {
BindingKind::SubmoduleImport(SubmoduleImport { qualified_name })
if qualified_name.segments().starts_with(&module_path) =>
{
if let Some(source) = binding.source {
// Verify that `os` isn't bound in an inner scope.
if self
@@ -1199,7 +1200,6 @@ impl<'a> SemanticModel<'a> {
}
}
}
}
// Non-imports.
_ => {}
}
+8 -12
View File
@@ -159,27 +159,25 @@ impl SemanticModel<'_> {
// Allow comprehensions, even though we can't statically analyze them.
return (None, DunderAllFlags::empty());
}
Expr::Name(ast::ExprName { id, .. }) => {
Expr::Name(ast::ExprName { id, .. })
// Ex) `__all__ = __all__ + multiprocessing.__all__`
if id == "__all__" {
if id == "__all__" => {
return (None, DunderAllFlags::empty());
}
}
Expr::Attribute(ast::ExprAttribute { attr, .. }) => {
Expr::Attribute(ast::ExprAttribute { attr, .. })
// Ex) `__all__ = __all__ + multiprocessing.__all__`
if attr == "__all__" {
if attr == "__all__" => {
return (None, DunderAllFlags::empty());
}
}
Expr::Call(ast::ExprCall {
func, arguments, ..
}) => {
})
// Allow `tuple()`, `list()`, and their generic forms, like `list[int]()`.
if arguments.keywords.is_empty() && arguments.args.len() <= 1 {
if self
if arguments.keywords.is_empty() && arguments.args.len() <= 1
&& self
.resolve_builtin_symbol(map_subscript(func))
.is_some_and(|symbol| matches!(symbol, "tuple" | "list"))
{
=> {
let [arg] = arguments.args.as_ref() else {
return (None, DunderAllFlags::empty());
};
@@ -197,8 +195,6 @@ impl SemanticModel<'_> {
}
}
}
}
}
Expr::Named(ast::ExprNamed { value, .. }) => {
// Allow, e.g., `__all__ += (value := ["A", "B"])`.
return self.extract_dunder_all_elts(value);
+1 -8
View File
@@ -728,14 +728,7 @@ impl<'a> SimpleTokenizer<'a> {
SimpleTokenKind::At
}
}
'!' => {
if self.cursor.eat_char('=') {
SimpleTokenKind::NotEqual
} else {
self.bogus = true;
SimpleTokenKind::Other
}
}
'!' if self.cursor.eat_char('=') => SimpleTokenKind::NotEqual,
'~' => SimpleTokenKind::Tilde,
':' => {
if self.cursor.eat_char('=') {
+1 -1
View File
@@ -136,7 +136,7 @@ impl Client {
method.to_string(),
Value::Null,
)))
.map_err(|error| anyhow!("Failed to send notification (method={method}): {error}",))
.map_err(|error| anyhow!("Failed to send notification (method={method}): {error}"))
}
/// Sends a response to the client for a given request ID.
+1 -4
View File
@@ -80,11 +80,8 @@ fn delete_lines_pandas_html() {
},
];
let mut version = 2;
for change in changes {
for (version, change) in (2..).zip(changes) {
document.apply_changes(vec![change], version, PositionEncoding::UTF16);
version += 1;
}
insta::assert_snapshot!(document.contents());
+3 -3
View File
@@ -866,11 +866,11 @@ impl<'m> ContextCursor<'m> {
return Some(cause_ty);
}
}
ast::AnyNodeRef::ExceptHandlerExceptHandler(handler) => {
if handler.type_.as_deref().is_some_and(contains) {
ast::AnyNodeRef::ExceptHandlerExceptHandler(handler)
if handler.type_.as_deref().is_some_and(contains) =>
{
return Some(except_ty);
}
}
_ => {}
}
if node.is_statement() {
+2 -3
View File
@@ -440,12 +440,11 @@ impl SourceOrderVisitor<'_> for FoldingRangeVisitor<'_> {
AnyNodeRef::ExprList(list) => {
self.add_range(list.range());
}
AnyNodeRef::ExprTuple(tuple) => {
AnyNodeRef::ExprTuple(tuple)
// Only fold parenthesized tuples.
if tuple.parenthesized {
if tuple.parenthesized => {
self.add_range(tuple.range());
}
}
AnyNodeRef::ExprDict(dict) => {
self.add_range(dict.range());
}
+1 -1
View File
@@ -894,7 +894,7 @@ mod tests {
String::new()
};
format!("{inlay_hint_buf}{rendered_diagnostics}{fixes}",)
format!("{inlay_hint_buf}{rendered_diagnostics}{fixes}")
}
fn render_diagnostic<D>(&self, diagnostic: D) -> String
+12 -18
View File
@@ -617,24 +617,21 @@ impl LocalReferencesFinder<'_> {
}
}
}
AnyNodeRef::StmtAnnAssign(ann_assign) => {
AnyNodeRef::StmtAnnAssign(ann_assign)
// Check if our node is the target (left side) of annotated assignment
if Self::expr_contains_range(&ann_assign.target, covering_node.node().range()) {
if Self::expr_contains_range(&ann_assign.target, covering_node.node().range()) => {
return ReferenceKind::Write;
}
}
AnyNodeRef::StmtAugAssign(aug_assign) => {
AnyNodeRef::StmtAugAssign(aug_assign)
// Check if our node is the target (left side) of augmented assignment
if Self::expr_contains_range(&aug_assign.target, covering_node.node().range()) {
if Self::expr_contains_range(&aug_assign.target, covering_node.node().range()) => {
return ReferenceKind::Write;
}
}
// For loop targets are writes
AnyNodeRef::StmtFor(for_stmt) => {
if Self::expr_contains_range(&for_stmt.target, covering_node.node().range()) {
AnyNodeRef::StmtFor(for_stmt)
if Self::expr_contains_range(&for_stmt.target, covering_node.node().range()) => {
return ReferenceKind::Write;
}
}
// With statement targets are writes
AnyNodeRef::WithItem(with_item) => {
if let Some(optional_vars) = &with_item.optional_vars {
@@ -654,30 +651,27 @@ impl LocalReferencesFinder<'_> {
}
}
}
AnyNodeRef::StmtFunctionDef(func) => {
AnyNodeRef::StmtFunctionDef(func)
if Self::node_contains_range(
AnyNodeRef::from(&func.name),
covering_node.node().range(),
) {
) => {
return ReferenceKind::Other;
}
}
AnyNodeRef::StmtClassDef(class) => {
AnyNodeRef::StmtClassDef(class)
if Self::node_contains_range(
AnyNodeRef::from(&class.name),
covering_node.node().range(),
) {
) => {
return ReferenceKind::Other;
}
}
AnyNodeRef::Parameter(param) => {
AnyNodeRef::Parameter(param)
if Self::node_contains_range(
AnyNodeRef::from(&param.name),
covering_node.node().range(),
) {
) => {
return ReferenceKind::Other;
}
}
AnyNodeRef::StmtGlobal(_) | AnyNodeRef::StmtNonlocal(_) => {
return ReferenceKind::Other;
}
+1 -1
View File
@@ -2921,7 +2921,7 @@ class C: ...
.iter()
.map(|(_, symbol)| {
let mut snapshot =
format!("{name} :: {kind:?}", name = symbol.name, kind = symbol.kind,);
format!("{name} :: {kind:?}", name = symbol.name, kind = symbol.kind);
if let Some(ref imported_from) = symbol.imported_from {
snapshot = format!(
"{snapshot} :: Re-exported from `{module_name}`",
+1 -1
View File
@@ -424,7 +424,7 @@ impl LintRegistry {
/// Iterates over all removed lints.
pub fn removed(&self) -> impl Iterator<Item = LintId> + '_ {
self.by_name.iter().filter_map(|(_, value)| {
self.by_name.values().filter_map(|value| {
if let LintEntry::Removed(metadata) = value {
Some(*metadata)
} else {
@@ -2131,7 +2131,7 @@ impl<'db> Bindings<'db> {
};
let return_type = parse_struct_format(db, format_literal.value(db))
.map(|elements| Type::heterogeneous_tuple(db, elements.into_iter()))
.map(|elements| Type::heterogeneous_tuple(db, elements))
.unwrap_or_else(|| Type::homogeneous_tuple(db, Type::unknown()));
overload.set_return_type(return_type);
@@ -2684,30 +2684,30 @@ impl NodeId {
)?;
// Calling display_graph recursively here causes rustc to claim that the
// expect(unused) up above is unfulfilled!
write!(f, "\n{prefix}┡━₁ ",)?;
write!(f, "\n{prefix}┡━₁ ")?;
format_node(
db,
builder,
interior.if_true,
&format_args!("{prefix}",),
&format_args!("{prefix}"),
seen,
f,
)?;
write!(f, "\n{prefix}├─? ",)?;
write!(f, "\n{prefix}├─? ")?;
format_node(
db,
builder,
interior.if_uncertain,
&format_args!("{prefix}",),
&format_args!("{prefix}"),
seen,
f,
)?;
write!(f, "\n{prefix}└─₀ ",)?;
write!(f, "\n{prefix}└─₀ ")?;
format_node(
db,
builder,
interior.if_false,
&format_args!("{prefix} ",),
&format_args!("{prefix} "),
seen,
f,
)?;
@@ -4248,8 +4248,7 @@ pub(crate) fn report_invalid_exception_raised(
return;
};
if raise_type.is_notimplemented(context.db()) {
let mut diagnostic =
builder.into_diagnostic(format_args!("Cannot raise `NotImplemented`",));
let mut diagnostic = builder.into_diagnostic(format_args!("Cannot raise `NotImplemented`"));
diagnostic.set_primary_message("Did you mean `NotImplementedError`?");
diagnostic.info("Can only raise an instance or subclass of `BaseException`");
} else {
@@ -4798,7 +4797,7 @@ pub(crate) fn report_attempted_protocol_instantiation(
let db = context.db();
let class_name = protocol.name(db);
let mut diagnostic =
builder.into_diagnostic(format_args!("Cannot instantiate class `{class_name}`",));
builder.into_diagnostic(format_args!("Cannot instantiate class `{class_name}`"));
diagnostic.set_primary_message("This call will raise `TypeError` at runtime");
let mut class_def_diagnostic = SubDiagnostic::new(
@@ -4919,7 +4918,7 @@ pub(crate) fn report_undeclared_protocol_member(
);
class_def_diagnostic.annotate(
Annotation::primary(protocol_class.definition_span(db))
.message(format_args!("`{class_name}` declared as a protocol here",)),
.message(format_args!("`{class_name}` declared as a protocol here")),
);
diagnostic.sub(class_def_diagnostic);
@@ -4950,7 +4949,7 @@ pub(crate) fn report_duplicate_bases(
let duplicate_name = duplicate_base.name(db);
let mut diagnostic =
builder.into_diagnostic(format_args!("Duplicate base class `{duplicate_name}`",));
builder.into_diagnostic(format_args!("Duplicate base class `{duplicate_name}`"));
let mut sub_diagnostic = SubDiagnostic::new(
SubDiagnosticSeverity::Info,
@@ -5724,8 +5723,8 @@ pub(super) fn report_invalid_method_override<'db>(
"It is recommended for `{member}` to work with arbitrary objects, for example:",
),
format_args!(""),
format_args!(" def {member}(self, other: object) -> bool:",),
format_args!(" if not isinstance(other, {class_name}):",),
format_args!(" def {member}(self, other: object) -> bool:"),
format_args!(" if not isinstance(other, {class_name}):"),
format_args!(" return False"),
format_args!(" return <logic to compare two `{class_name}` instances>"),
format_args!(""),
@@ -1932,7 +1932,7 @@ impl KnownFunction {
diagnostic.annotate(
Annotation::secondary(context.span(&call_expression.arguments.args[0]))
.message(format_args!("Inferred type is `{}`", actual_ty.display(db),)),
.message(format_args!("Inferred type is `{}`", actual_ty.display(db))),
);
if actual_ty.is_subtype_of(db, *asserted_ty) {
@@ -979,7 +979,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// can.
error(
&self.context,
format_args!("Unknown keyword argument `{name}` in `TypeVar` creation",),
format_args!("Unknown keyword argument `{name}` in `TypeVar` creation"),
kwarg,
);
self.infer_expression(&kwarg.value, TypeContext::default());
@@ -1363,13 +1363,9 @@ impl<'c, 'db> TypeRelationChecker<'_, 'c, 'db> {
// Here, `fn` is positional-only parameter because of the `/` while `x` is a
// positional-or-keyword parameter.
loop {
let Some(EitherOrBoth::Both(source_param, target_param)) =
while let Some(EitherOrBoth::Both(source_param, target_param)) =
parameters.next()
else {
break;
};
{
match (source_param.kind(), target_param.kind()) {
(
ParameterKind::PositionalOnly {
@@ -1526,11 +1522,7 @@ impl<'c, 'db> TypeRelationChecker<'_, 'c, 'db> {
target_iter: target_prefix_params.iter(),
};
loop {
let Some(next_parameter) = parameters.next() else {
break;
};
while let Some(next_parameter) = parameters.next() {
match next_parameter {
EitherOrBoth::Left(_) => {
// If the non-Concatenate callable has remaining parameters, they
@@ -1604,11 +1596,7 @@ impl<'c, 'db> TypeRelationChecker<'_, 'c, 'db> {
return result;
}
loop {
let Some(target_param) = parameters.peek_target()
else {
break;
};
while let Some(target_param) = parameters.peek_target() {
if !check_types(
target_param.annotated_type(),
source_param.annotated_type(),
@@ -1683,11 +1671,7 @@ impl<'c, 'db> TypeRelationChecker<'_, 'c, 'db> {
};
if target.parameters.kind() != ParametersKind::Gradual {
loop {
let Some(next_parameter) = parameters.next() else {
break;
};
while let Some(next_parameter) = parameters.next() {
match next_parameter {
EitherOrBoth::Left(_) => {
return self.never();
@@ -1909,11 +1893,7 @@ impl<'c, 'db> TypeRelationChecker<'_, 'c, 'db> {
target_iter: target_prefix_params.iter(),
};
loop {
let Some(parameter) = parameters.next() else {
break;
};
while let Some(parameter) = parameters.next() {
match parameter {
EitherOrBoth::Left(_) => {
// Once the right (other) iterator is exhausted, all the remaining
@@ -1956,11 +1936,7 @@ impl<'c, 'db> TypeRelationChecker<'_, 'c, 'db> {
return result;
}
loop {
let Some(target_param) = parameters.peek_target()
else {
break;
};
while let Some(target_param) = parameters.peek_target() {
if !check_types(
target_param.annotated_type(),
source_param.annotated_type(),
@@ -2135,10 +2111,7 @@ impl<'c, 'db> TypeRelationChecker<'_, 'c, 'db> {
// So, any remaining positional parameters in `target` would need to be
// checked against the variadic parameter in `source`. This loop does
// that by only moving the `other` iterator forward.
loop {
let Some(target_parameter) = parameters.peek_target() else {
break;
};
while let Some(target_parameter) = parameters.peek_target() {
match target_parameter.kind() {
ParameterKind::PositionalOrKeyword { .. } => {
target_keywords.push(target_parameter);
@@ -306,11 +306,9 @@ impl ProgressReporterState<'_> {
let total = self.total_files;
#[expect(clippy::cast_possible_truncation)]
let percentage = if total > 0 {
Some((checked * 100 / total) as u32)
} else {
None
};
let percentage = (checked * 100)
.checked_div(total)
.map(|result| result as u32);
work_done.report_progress(format!("{checked}/{total} files"), percentage);
+1 -1
View File
@@ -750,7 +750,7 @@ impl TestServer {
Some("ty") => match serde_json::to_value(options) {
Ok(value) => value,
Err(err) => {
panic!("Failed to deserialize workspace configuration options: {err}",)
panic!("Failed to deserialize workspace configuration options: {err}")
}
},
Some(section) => {
@@ -675,7 +675,7 @@ fn condensed_full_document_diagnostic_report(report: FullDocumentDiagnosticRepor
Some(DiagnosticSeverity::HINT) => "HINT",
None | Some(_) => "unknown",
};
format!("{range}[{severity}]: {message}", message = d.message,)
format!("{range}[{severity}]: {message}", message = d.message)
})
.collect()
}
+1 -1
View File
@@ -686,7 +686,7 @@ impl std::fmt::Display for ModuleInconsistency<'_> {
" when listing modules, but `resolve_module` returned `None`",
)?,
Some(ref got) => {
write!(f, " when listing modules, but `resolve_module` returned ",)?;
write!(f, " when listing modules, but `resolve_module` returned ")?;
fmt_module(self.db, f, got)?;
}
}
+1 -1
View File
@@ -1,2 +1,2 @@
[toolchain]
channel = "1.94"
channel = "1.95"