Compare commits
17 Commits
0.6.0
...
charlie/fo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
400732a655 | ||
|
|
a9847af6e8 | ||
|
|
d61d75d4fa | ||
|
|
499c0bd875 | ||
|
|
4cb30b598f | ||
|
|
aba0d83c11 | ||
|
|
c319414e54 | ||
|
|
ef1f6d98a0 | ||
|
|
b850b812de | ||
|
|
a87b27c075 | ||
|
|
9b73532b11 | ||
|
|
d8debb7a36 | ||
|
|
bd4a947b29 | ||
|
|
f121f8b31b | ||
|
|
80efb865e9 | ||
|
|
52d27befe8 | ||
|
|
6ed06afd28 |
24
CHANGELOG.md
24
CHANGELOG.md
@@ -1,5 +1,29 @@
|
||||
# Changelog
|
||||
|
||||
## 0.6.1
|
||||
|
||||
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,
|
||||
Ruff changed its behavior to lint and format Jupyter notebooks by default;
|
||||
however, due to an oversight, these files were still excluded by default if
|
||||
Ruff was run via pre-commit, leading to inconsistent behavior.
|
||||
This has [now been fixed](https://github.com/astral-sh/ruff-pre-commit/pull/96).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fast-api-unused-path-parameter` (`FAST003`) ([#12638](https://github.com/astral-sh/ruff/pull/12638))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Rename `too-many-positional` to `too-many-positional-arguments` (`R0917`) ([#12905](https://github.com/astral-sh/ruff/pull/12905))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix crash when applying "fix-all" code-action to notebook cells ([#12929](https://github.com/astral-sh/ruff/pull/12929))
|
||||
|
||||
### Other changes
|
||||
|
||||
- \[`flake8-naming`\]: Respect import conventions (`N817`) ([#12922](https://github.com/astral-sh/ruff/pull/12922))
|
||||
|
||||
## 0.6.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.6.0) for a migration guide and overview of the changes!
|
||||
|
||||
@@ -361,7 +361,7 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
downstream jobs manually if needed.
|
||||
1. Verify the GitHub release:
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. Append the contributors from the `scripts/release.sh` script
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
|
||||
36
Cargo.lock
generated
36
Cargo.lock
generated
@@ -846,6 +846,12 @@ version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
||||
|
||||
[[package]]
|
||||
name = "foldhash"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4deb59dd6330afa472c000b86c0c9ada26274836eb59563506c3e34e4bb9a819"
|
||||
|
||||
[[package]]
|
||||
name = "form_urlencoded"
|
||||
version = "1.2.1"
|
||||
@@ -1916,6 +1922,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"foldhash",
|
||||
"jod-thread",
|
||||
"libc",
|
||||
"lsp-server",
|
||||
@@ -1928,7 +1935,6 @@ dependencies = [
|
||||
"ruff_python_ast",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
@@ -1958,13 +1964,13 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"foldhash",
|
||||
"notify",
|
||||
"red_knot_python_semantic",
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
@@ -2060,7 +2066,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.6.0"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2073,6 +2079,7 @@ dependencies = [
|
||||
"clearscreen",
|
||||
"colored",
|
||||
"filetime",
|
||||
"foldhash",
|
||||
"ignore",
|
||||
"insta",
|
||||
"insta-cmd",
|
||||
@@ -2095,7 +2102,6 @@ dependencies = [
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"ruff_workspace",
|
||||
"rustc-hash 2.0.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
@@ -2154,6 +2160,7 @@ dependencies = [
|
||||
"countme",
|
||||
"dashmap 6.0.1",
|
||||
"filetime",
|
||||
"foldhash",
|
||||
"ignore",
|
||||
"insta",
|
||||
"matchit",
|
||||
@@ -2165,7 +2172,6 @@ dependencies = [
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
@@ -2231,10 +2237,10 @@ name = "ruff_formatter"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"drop_bomb",
|
||||
"foldhash",
|
||||
"ruff_cache",
|
||||
"ruff_macros",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
"static_assertions",
|
||||
@@ -2252,7 +2258,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.6.0"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2262,6 +2268,7 @@ dependencies = [
|
||||
"clap",
|
||||
"colored",
|
||||
"fern",
|
||||
"foldhash",
|
||||
"glob",
|
||||
"globset",
|
||||
"imperative",
|
||||
@@ -2294,7 +2301,6 @@ dependencies = [
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -2349,6 +2355,7 @@ dependencies = [
|
||||
"aho-corasick",
|
||||
"bitflags 2.6.0",
|
||||
"compact_str",
|
||||
"foldhash",
|
||||
"is-macro",
|
||||
"itertools 0.13.0",
|
||||
"once_cell",
|
||||
@@ -2357,7 +2364,6 @@ dependencies = [
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
]
|
||||
@@ -2392,6 +2398,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"countme",
|
||||
"foldhash",
|
||||
"insta",
|
||||
"itertools 0.13.0",
|
||||
"memchr",
|
||||
@@ -2405,7 +2412,6 @@ dependencies = [
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -2446,13 +2452,13 @@ dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"bstr",
|
||||
"compact_str",
|
||||
"foldhash",
|
||||
"insta",
|
||||
"memchr",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"static_assertions",
|
||||
"unicode-ident",
|
||||
"unicode-normalization",
|
||||
@@ -2475,6 +2481,7 @@ name = "ruff_python_semantic"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"foldhash",
|
||||
"is-macro",
|
||||
"ruff_cache",
|
||||
"ruff_index",
|
||||
@@ -2484,7 +2491,6 @@ dependencies = [
|
||||
"ruff_python_stdlib",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
]
|
||||
@@ -2523,6 +2529,7 @@ version = "0.2.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"foldhash",
|
||||
"ignore",
|
||||
"insta",
|
||||
"jod-thread",
|
||||
@@ -2542,7 +2549,6 @@ dependencies = [
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"ruff_workspace",
|
||||
"rustc-hash 2.0.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
@@ -2572,7 +2578,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.6.0"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -2602,6 +2608,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"colored",
|
||||
"etcetera",
|
||||
"foldhash",
|
||||
"glob",
|
||||
"globset",
|
||||
"ignore",
|
||||
@@ -2621,7 +2628,6 @@ dependencies = [
|
||||
"ruff_python_formatter",
|
||||
"ruff_python_semantic",
|
||||
"ruff_source_file",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
"shellexpand",
|
||||
|
||||
@@ -105,9 +105,10 @@ pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
foldhash = { version = "0.1.0" }
|
||||
salsa = { git = "https://github.com/MichaReiser/salsa.git", tag = "red-knot-0.0.1" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.6.0/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.0/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.6.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.6.0
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -168,6 +168,24 @@ impl ModuleName {
|
||||
};
|
||||
Some(Self(name))
|
||||
}
|
||||
|
||||
/// Extend `self` with the components of `other`
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// let mut module_name = ModuleName::new_static("foo").unwrap();
|
||||
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar");
|
||||
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
|
||||
/// ```
|
||||
pub fn extend(&mut self, other: &ModuleName) {
|
||||
self.0.push('.');
|
||||
self.0.push_str(other);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::iter::FusedIterator;
|
||||
|
||||
pub(crate) use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
pub(crate) use resolver::SearchPaths;
|
||||
pub(crate) use resolver::{file_to_module, SearchPaths};
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
|
||||
@@ -77,3 +77,9 @@ pub enum ModuleKind {
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
|
||||
impl ModuleKind {
|
||||
pub const fn is_package(self) -> bool {
|
||||
matches!(self, ModuleKind::Package)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -528,6 +528,103 @@ y = 2
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_parameter_symbols() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["f", "str", "int"]);
|
||||
|
||||
let [(function_scope_id, _function_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("Expected a function scope")
|
||||
};
|
||||
|
||||
let function_table = index.symbol_table(function_scope_id);
|
||||
assert_eq!(
|
||||
names(&function_table),
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let [definition] = use_def.public_definitions(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("Expected parameter definition for {name}");
|
||||
};
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let [definition] = use_def.public_definitions(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("Expected parameter definition for {name}");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lambda_parameter_symbols() {
|
||||
let TestCase { db, file } = test_case("lambda a, b, c=1, *args, d=2, **kwargs: None");
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert!(names(&global_table).is_empty());
|
||||
|
||||
let [(lambda_scope_id, _lambda_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("Expected a lambda scope")
|
||||
};
|
||||
|
||||
let lambda_table = index.symbol_table(lambda_scope_id);
|
||||
assert_eq!(
|
||||
names(&lambda_table),
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(lambda_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let [definition] = use_def
|
||||
.public_definitions(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
else {
|
||||
panic!("Expected parameter definition for {name}");
|
||||
};
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let [definition] = use_def
|
||||
.public_definitions(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
else {
|
||||
panic!("Expected parameter definition for {name}");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
/// Test case to validate that the comprehension scope is correctly identified and that the target
|
||||
/// variable is defined only in the comprehension scope and not in the global scope.
|
||||
#[test]
|
||||
|
||||
@@ -368,6 +368,16 @@ where
|
||||
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, function_def);
|
||||
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in function_def
|
||||
.parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::FunctionTypeParameters(function_def),
|
||||
function_def.type_params.as_deref(),
|
||||
@@ -378,6 +388,16 @@ where
|
||||
}
|
||||
|
||||
builder.push_scope(NodeWithScopeRef::Function(function_def));
|
||||
|
||||
// Add symbols and definitions for the parameters to the function scope.
|
||||
for parameter in &*function_def.parameters {
|
||||
let symbol = builder.add_or_update_symbol(
|
||||
parameter.name().id().clone(),
|
||||
SymbolFlags::IS_DEFINED,
|
||||
);
|
||||
builder.add_definition(symbol, parameter);
|
||||
}
|
||||
|
||||
builder.visit_body(&function_def.body);
|
||||
builder.pop_scope()
|
||||
},
|
||||
@@ -574,9 +594,29 @@ where
|
||||
}
|
||||
ast::Expr::Lambda(lambda) => {
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
self.visit_parameters(parameters);
|
||||
}
|
||||
self.push_scope(NodeWithScopeRef::Lambda(lambda));
|
||||
|
||||
// Add symbols and definitions for the parameters to the lambda scope.
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
for parameter in &**parameters {
|
||||
let symbol = self.add_or_update_symbol(
|
||||
parameter.name().id().clone(),
|
||||
SymbolFlags::IS_DEFINED,
|
||||
);
|
||||
self.add_definition(symbol, parameter);
|
||||
}
|
||||
}
|
||||
|
||||
self.visit_expr(lambda.body.as_ref());
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
@@ -654,6 +694,14 @@ where
|
||||
self.pop_scope();
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_parameters(&mut self, parameters: &'ast ruff_python_ast::Parameters) {
|
||||
// Intentionally avoid walking default expressions, as we handle them in the enclosing
|
||||
// scope.
|
||||
for parameter in parameters.iter().map(ast::AnyParameterRef::as_parameter) {
|
||||
self.visit_parameter(parameter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
|
||||
@@ -45,6 +45,7 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
Parameter(ast::AnyParameterRef<'a>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
@@ -95,6 +96,12 @@ impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ast::AnyParameterRef<'a>) -> Self {
|
||||
Self::Parameter(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
@@ -150,6 +157,14 @@ impl DefinitionNodeRef<'_> {
|
||||
first,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Parameter(parameter) => match parameter {
|
||||
ast::AnyParameterRef::Variadic(parameter) => {
|
||||
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => {
|
||||
DefinitionKind::ParameterWithDefault(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -168,6 +183,10 @@ impl DefinitionNodeRef<'_> {
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
|
||||
Self::Parameter(node) => match node {
|
||||
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -182,6 +201,8 @@ pub enum DefinitionKind {
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
Comprehension(ComprehensionDefinitionKind),
|
||||
Parameter(AstNodeRef<ast::Parameter>),
|
||||
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -273,3 +294,15 @@ impl From<&ast::Comprehension> for DefinitionNodeKey {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Parameter> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Parameter) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ParameterWithDefault) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -113,7 +113,7 @@ pub enum Type<'db> {
|
||||
Any,
|
||||
/// the empty set of values
|
||||
Never,
|
||||
/// unknown type (no annotation)
|
||||
/// unknown type (either no annotation, or some kind of type error)
|
||||
/// equivalent to Any, or possibly to object in strict mode
|
||||
Unknown,
|
||||
/// name does not exist or is not bound to any value (this represents an error, but with some
|
||||
@@ -149,6 +149,35 @@ impl<'db> Type<'db> {
|
||||
matches!(self, Type::Unknown)
|
||||
}
|
||||
|
||||
pub const fn is_never(&self) -> bool {
|
||||
matches!(self, Type::Never)
|
||||
}
|
||||
|
||||
pub fn may_be_unbound(&self, db: &'db dyn Db) -> bool {
|
||||
match self {
|
||||
Type::Unbound => true,
|
||||
Type::Union(union) => union.contains(db, Type::Unbound),
|
||||
// Unbound can't appear in an intersection, because an intersection with Unbound
|
||||
// simplifies to just Unbound.
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn replace_unbound_with(&self, db: &'db dyn Db, replacement: Type<'db>) -> Type<'db> {
|
||||
match self {
|
||||
Type::Unbound => replacement,
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.into_iter()
|
||||
.fold(UnionBuilder::new(db), |builder, ty| {
|
||||
builder.add(ty.replace_unbound_with(db, replacement))
|
||||
})
|
||||
.build(),
|
||||
ty => *ty,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn member(&self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
match self {
|
||||
|
||||
@@ -201,6 +201,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
Type::Never => {}
|
||||
Type::Unbound => {}
|
||||
_ => {
|
||||
if !self.positive.remove(&ty) {
|
||||
self.negative.insert(ty);
|
||||
@@ -214,9 +215,13 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
|
||||
// Never is a subtype of all types
|
||||
if self.positive.contains(&Type::Never) {
|
||||
self.positive.clear();
|
||||
self.positive.retain(Type::is_never);
|
||||
self.negative.clear();
|
||||
}
|
||||
|
||||
if self.positive.contains(&Type::Unbound) {
|
||||
self.positive.retain(Type::is_unbound);
|
||||
self.negative.clear();
|
||||
self.positive.insert(Type::Never);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -426,4 +431,26 @@ mod tests {
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Unbound);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,8 @@
|
||||
//!
|
||||
//! Inferring types at any of the three region granularities returns a [`TypeInference`], which
|
||||
//! holds types for every [`Definition`] and expression within the inferred region.
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use salsa;
|
||||
use salsa::plumbing::AsId;
|
||||
@@ -31,7 +33,7 @@ use ruff_python_ast::{ExprContext, TypeParams};
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::module_resolver::{file_to_module, resolve_module};
|
||||
use crate::semantic_index::ast_ids::{HasScopedAstId, HasScopedUseId, ScopedExpressionId};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind, DefinitionNodeKey};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
@@ -307,6 +309,12 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
definition,
|
||||
);
|
||||
}
|
||||
DefinitionKind::Parameter(parameter) => {
|
||||
self.infer_parameter_definition(parameter, definition);
|
||||
}
|
||||
DefinitionKind::ParameterWithDefault(parameter_with_default) => {
|
||||
self.infer_parameter_with_default_definition(parameter_with_default, definition);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -421,6 +429,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.map(|decorator| self.infer_decorator(decorator))
|
||||
.collect();
|
||||
|
||||
for default in parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.infer_expression(default);
|
||||
}
|
||||
|
||||
// If there are type params, parameters and returns are evaluated in that scope.
|
||||
if type_params.is_none() {
|
||||
self.infer_parameters(parameters);
|
||||
@@ -458,10 +473,12 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let ast::ParameterWithDefault {
|
||||
range: _,
|
||||
parameter,
|
||||
default,
|
||||
default: _,
|
||||
} = parameter_with_default;
|
||||
self.infer_parameter(parameter);
|
||||
self.infer_optional_expression(default.as_deref());
|
||||
|
||||
self.infer_optional_expression(parameter.annotation.as_deref());
|
||||
|
||||
self.infer_definition(parameter_with_default);
|
||||
}
|
||||
|
||||
fn infer_parameter(&mut self, parameter: &ast::Parameter) {
|
||||
@@ -470,7 +487,29 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
name: _,
|
||||
annotation,
|
||||
} = parameter;
|
||||
|
||||
self.infer_optional_expression(annotation.as_deref());
|
||||
|
||||
self.infer_definition(parameter);
|
||||
}
|
||||
|
||||
fn infer_parameter_with_default_definition(
|
||||
&mut self,
|
||||
_parameter_with_default: &ast::ParameterWithDefault,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// TODO(dhruvmanila): Infer types from annotation or default expression
|
||||
self.types.definitions.insert(definition, Type::Unknown);
|
||||
}
|
||||
|
||||
fn infer_parameter_definition(
|
||||
&mut self,
|
||||
_parameter: &ast::Parameter,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// TODO(dhruvmanila): Annotation expression is resolved at the enclosing scope, infer the
|
||||
// parameter type from there
|
||||
self.types.definitions.insert(definition, Type::Unknown);
|
||||
}
|
||||
|
||||
fn infer_class_definition_statement(&mut self, class: &ast::StmtClassDef) {
|
||||
@@ -785,7 +824,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
asname: _,
|
||||
} = alias;
|
||||
|
||||
let module_ty = self.module_ty_from_name(name);
|
||||
let module_ty = self.module_ty_from_name(ModuleName::new(name));
|
||||
self.types.definitions.insert(definition, module_ty);
|
||||
}
|
||||
|
||||
@@ -823,27 +862,82 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_optional_expression(cause.as_deref());
|
||||
}
|
||||
|
||||
/// Given a `from .foo import bar` relative import, resolve the relative module
|
||||
/// we're importing `bar` from into an absolute [`ModuleName`]
|
||||
/// using the name of the module we're currently analyzing.
|
||||
///
|
||||
/// - `level` is the number of dots at the beginning of the relative module name:
|
||||
/// - `from .foo.bar import baz` => `level == 1`
|
||||
/// - `from ...foo.bar import baz` => `level == 3`
|
||||
/// - `tail` is the relative module name stripped of all leading dots:
|
||||
/// - `from .foo import bar` => `tail == "foo"`
|
||||
/// - `from ..foo.bar import baz` => `tail == "foo.bar"`
|
||||
fn relative_module_name(&self, tail: Option<&str>, level: NonZeroU32) -> Option<ModuleName> {
|
||||
let Some(module) = file_to_module(self.db, self.file) else {
|
||||
tracing::debug!("Failed to resolve file {:?} to a module", self.file);
|
||||
return None;
|
||||
};
|
||||
let mut level = level.get();
|
||||
if module.kind().is_package() {
|
||||
level -= 1;
|
||||
}
|
||||
let mut module_name = module.name().to_owned();
|
||||
for _ in 0..level {
|
||||
module_name = module_name.parent()?;
|
||||
}
|
||||
if let Some(tail) = tail {
|
||||
if let Some(valid_tail) = ModuleName::new(tail) {
|
||||
module_name.extend(&valid_tail);
|
||||
} else {
|
||||
tracing::debug!("Failed to resolve relative import due to invalid syntax");
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Some(module_name)
|
||||
}
|
||||
|
||||
fn infer_import_from_definition(
|
||||
&mut self,
|
||||
import_from: &ast::StmtImportFrom,
|
||||
alias: &ast::Alias,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
let ast::StmtImportFrom { module, .. } = import_from;
|
||||
let module_ty = if let Some(module) = module {
|
||||
self.module_ty_from_name(module)
|
||||
// TODO:
|
||||
// - Absolute `*` imports (`from collections import *`)
|
||||
// - Relative `*` imports (`from ...foo import *`)
|
||||
// - Submodule imports (`from collections import abc`,
|
||||
// where `abc` is a submodule of the `collections` package)
|
||||
//
|
||||
// For the last item, see the currently skipped tests
|
||||
// `follow_relative_import_bare_to_module()` and
|
||||
// `follow_nonexistent_import_bare_to_module()`.
|
||||
let ast::StmtImportFrom { module, level, .. } = import_from;
|
||||
tracing::trace!("Resolving imported object {alias:?} from statement {import_from:?}");
|
||||
let module_name = if let Some(level) = NonZeroU32::new(*level) {
|
||||
self.relative_module_name(module.as_deref(), level)
|
||||
} else {
|
||||
// TODO support relative imports
|
||||
Type::Unknown
|
||||
let module_name = module
|
||||
.as_ref()
|
||||
.expect("Non-relative import should always have a non-None `module`!");
|
||||
ModuleName::new(module_name)
|
||||
};
|
||||
|
||||
let module_ty = self.module_ty_from_name(module_name);
|
||||
|
||||
let ast::Alias {
|
||||
range: _,
|
||||
name,
|
||||
asname: _,
|
||||
} = alias;
|
||||
|
||||
let ty = module_ty.member(self.db, &Name::new(&name.id));
|
||||
// If a symbol is unbound in the module the symbol was originally defined in,
|
||||
// when we're trying to import the symbol from that module into "our" module,
|
||||
// the runtime error will occur immediately (rather than when the symbol is *used*,
|
||||
// as would be the case for a symbol with type `Unbound`), so it's appropriate to
|
||||
// think of the type of the imported symbol as `Unknown` rather than `Unbound`
|
||||
let ty = module_ty
|
||||
.member(self.db, &Name::new(&name.id))
|
||||
.replace_unbound_with(self.db, Type::Unknown);
|
||||
|
||||
self.types.definitions.insert(definition, ty);
|
||||
}
|
||||
@@ -859,11 +953,10 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
fn module_ty_from_name(&self, name: &ast::Identifier) -> Type<'db> {
|
||||
let module = ModuleName::new(&name.id).and_then(|name| resolve_module(self.db, name));
|
||||
module
|
||||
.map(|module| Type::Module(module.file()))
|
||||
.unwrap_or(Type::Unbound)
|
||||
fn module_ty_from_name(&self, module_name: Option<ModuleName>) -> Type<'db> {
|
||||
module_name
|
||||
.and_then(|module_name| resolve_module(self.db, module_name))
|
||||
.map_or(Type::Unknown, |module| Type::Module(module.file()))
|
||||
}
|
||||
|
||||
fn infer_decorator(&mut self, decorator: &ast::Decorator) -> Type<'db> {
|
||||
@@ -1277,6 +1370,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
} = lambda_expression;
|
||||
|
||||
if let Some(parameters) = parameters {
|
||||
for default in parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.infer_expression(default);
|
||||
}
|
||||
|
||||
self.infer_parameters(parameters);
|
||||
}
|
||||
|
||||
@@ -1354,18 +1454,22 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let symbol = symbols.symbol_by_name(id).unwrap();
|
||||
if !symbol.is_defined() || !self.scope.is_function_like(self.db) {
|
||||
// implicit global
|
||||
let mut unbound_ty = if file_scope_id == FileScopeId::global() {
|
||||
let unbound_ty = if file_scope_id == FileScopeId::global() {
|
||||
Type::Unbound
|
||||
} else {
|
||||
global_symbol_ty_by_name(self.db, self.file, id)
|
||||
};
|
||||
// fallback to builtins
|
||||
if matches!(unbound_ty, Type::Unbound)
|
||||
if unbound_ty.may_be_unbound(self.db)
|
||||
&& Some(self.scope) != builtins_scope(self.db)
|
||||
{
|
||||
unbound_ty = builtins_symbol_ty_by_name(self.db, id);
|
||||
Some(unbound_ty.replace_unbound_with(
|
||||
self.db,
|
||||
builtins_symbol_ty_by_name(self.db, id),
|
||||
))
|
||||
} else {
|
||||
Some(unbound_ty)
|
||||
}
|
||||
Some(unbound_ty)
|
||||
} else {
|
||||
Some(Type::Unbound)
|
||||
}
|
||||
@@ -1662,6 +1766,166 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_relative_import_simple() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/foo.py", "X = 42"),
|
||||
("src/package/bar.py", "from .foo import X"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "X", "Literal[42]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_nonexistent_relative_import_simple() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/bar.py", "from .foo import X"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "X", "Unknown");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_relative_import_dotted() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/foo/bar/baz.py", "X = 42"),
|
||||
("src/package/bar.py", "from .foo.bar.baz import X"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "X", "Literal[42]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_relative_import_bare_to_package() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", "X = 42"),
|
||||
("src/package/bar.py", "from . import X"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "X", "Literal[42]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_nonexistent_relative_import_bare_to_package() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
db.write_files([("src/package/bar.py", "from . import X")])?;
|
||||
assert_public_ty(&db, "src/package/bar.py", "X", "Unknown");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[ignore = "TODO: Submodule imports possibly not supported right now?"]
|
||||
#[test]
|
||||
fn follow_relative_import_bare_to_module() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/foo.py", "X = 42"),
|
||||
("src/package/bar.py", "from . import foo; y = foo.X"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "y", "Literal[42]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[ignore = "TODO: Submodule imports possibly not supported right now?"]
|
||||
#[test]
|
||||
fn follow_nonexistent_import_bare_to_module() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/bar.py", "from . import foo"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "foo", "Unknown");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_relative_import_from_dunder_init() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", "from .foo import X"),
|
||||
("src/package/foo.py", "X = 42"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/__init__.py", "X", "Literal[42]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_nonexistent_relative_import_from_dunder_init() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
db.write_files([("src/package/__init__.py", "from .foo import X")])?;
|
||||
assert_public_ty(&db, "src/package/__init__.py", "X", "Unknown");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_very_relative_import() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/foo.py", "X = 42"),
|
||||
(
|
||||
"src/package/subpackage/subsubpackage/bar.py",
|
||||
"from ...foo import X",
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_public_ty(
|
||||
&db,
|
||||
"src/package/subpackage/subsubpackage/bar.py",
|
||||
"X",
|
||||
"Literal[42]",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn imported_unbound_symbol_is_unknown() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/foo.py", "x"),
|
||||
("src/package/bar.py", "from package.foo import x"),
|
||||
])?;
|
||||
|
||||
// the type as seen from external modules (`Unknown`)
|
||||
// is different from the type inside the module itself (`Unbound`):
|
||||
assert_public_ty(&db, "src/package/foo.py", "x", "Unbound");
|
||||
assert_public_ty(&db, "src/package/bar.py", "x", "Unknown");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
@@ -2163,6 +2427,38 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conditionally_global_or_builtin() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
if flag:
|
||||
copyright = 1
|
||||
def f():
|
||||
y = copyright
|
||||
",
|
||||
)?;
|
||||
|
||||
let file = system_path_to_file(&db, "src/a.py").expect("Expected file to exist.");
|
||||
let index = semantic_index(&db, file);
|
||||
let function_scope = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.next()
|
||||
.unwrap()
|
||||
.0
|
||||
.to_scope_id(&db, file);
|
||||
let y_ty = symbol_ty_by_name(&db, function_scope, "y");
|
||||
|
||||
assert_eq!(
|
||||
y_ty.display(&db).to_string(),
|
||||
"Literal[1] | Literal[copyright]"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Class name lookups do fall back to globals, but the public type never does.
|
||||
#[test]
|
||||
fn unbound_class_local() -> anyhow::Result<()> {
|
||||
|
||||
@@ -25,7 +25,7 @@ crossbeam = { workspace = true }
|
||||
jod-thread = { workspace = true }
|
||||
lsp-server = { workspace = true }
|
||||
lsp-types = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
foldhash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
shellexpand = { workspace = true }
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use anyhow::Ok;
|
||||
use foldhash::{HashMap, HashMapExt};
|
||||
use lsp_types::NotebookCellKind;
|
||||
use ruff_notebook::CellMetadata;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
|
||||
use crate::{PositionEncoding, TextDocument};
|
||||
|
||||
@@ -17,7 +17,7 @@ pub struct NotebookDocument {
|
||||
metadata: ruff_notebook::RawNotebookMetadata,
|
||||
version: DocumentVersion,
|
||||
// Used to quickly find the index of a cell for a given URL.
|
||||
cell_index: FxHashMap<lsp_types::Url, CellId>,
|
||||
cell_index: HashMap<lsp_types::Url, CellId>,
|
||||
}
|
||||
|
||||
/// A single cell within a notebook, which has text contents represented as a `TextDocument`.
|
||||
@@ -35,7 +35,7 @@ impl NotebookDocument {
|
||||
metadata: serde_json::Map<String, serde_json::Value>,
|
||||
cell_documents: Vec<lsp_types::TextDocumentItem>,
|
||||
) -> crate::Result<Self> {
|
||||
let mut cell_contents: FxHashMap<_, _> = cell_documents
|
||||
let mut cell_contents: HashMap<_, _> = cell_documents
|
||||
.into_iter()
|
||||
.map(|document| (document.uri, document.text))
|
||||
.collect();
|
||||
@@ -122,7 +122,7 @@ impl NotebookDocument {
|
||||
// Instead, it only provides that (a) these cell URIs were removed, and (b) these
|
||||
// cell URIs were added.
|
||||
// https://github.com/astral-sh/ruff/issues/12573
|
||||
let mut deleted_cells = FxHashMap::default();
|
||||
let mut deleted_cells = HashMap::default();
|
||||
|
||||
// First, delete the cells and remove them from the index.
|
||||
if delete > 0 {
|
||||
@@ -216,8 +216,8 @@ impl NotebookDocument {
|
||||
self.cells.get_mut(*self.cell_index.get(uri)?)
|
||||
}
|
||||
|
||||
fn make_cell_index(cells: &[NotebookCell]) -> FxHashMap<lsp_types::Url, CellId> {
|
||||
let mut index = FxHashMap::with_capacity_and_hasher(cells.len(), FxBuildHasher);
|
||||
fn make_cell_index(cells: &[NotebookCell]) -> HashMap<lsp_types::Url, CellId> {
|
||||
let mut index = HashMap::with_capacity(cells.len());
|
||||
for (i, cell) in cells.iter().enumerate() {
|
||||
index.insert(cell.url.clone(), i);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::any::TypeId;
|
||||
|
||||
use foldhash::HashMap;
|
||||
use lsp_server::{Notification, RequestId};
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::{schedule::Task, ClientSender};
|
||||
@@ -23,7 +23,7 @@ pub(crate) struct Responder(ClientSender);
|
||||
pub(crate) struct Requester<'s> {
|
||||
sender: ClientSender,
|
||||
next_request_id: i32,
|
||||
response_handlers: FxHashMap<lsp_server::RequestId, ResponseBuilder<'s>>,
|
||||
response_handlers: HashMap<lsp_server::RequestId, ResponseBuilder<'s>>,
|
||||
}
|
||||
|
||||
impl<'s> Client<'s> {
|
||||
@@ -34,7 +34,7 @@ impl<'s> Client<'s> {
|
||||
requester: Requester {
|
||||
sender,
|
||||
next_request_id: 1,
|
||||
response_handlers: FxHashMap::default(),
|
||||
response_handlers: HashMap::default(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@ use std::borrow::Cow;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use foldhash::HashMap;
|
||||
use lsp_types::Url;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{
|
||||
edit::{DocumentKey, DocumentVersion, NotebookDocument},
|
||||
@@ -16,10 +16,10 @@ use super::ClientSettings;
|
||||
#[derive(Default, Debug)]
|
||||
pub(crate) struct Index {
|
||||
/// Maps all document file URLs to the associated document controller
|
||||
documents: FxHashMap<Url, DocumentController>,
|
||||
documents: HashMap<Url, DocumentController>,
|
||||
|
||||
/// Maps opaque cell URLs to a notebook URL (document)
|
||||
notebook_cells: FxHashMap<Url, Url>,
|
||||
notebook_cells: HashMap<Url, Url>,
|
||||
|
||||
/// Global settings provided by the client.
|
||||
global_settings: ClientSettings,
|
||||
@@ -28,8 +28,8 @@ pub(crate) struct Index {
|
||||
impl Index {
|
||||
pub(super) fn new(global_settings: ClientSettings) -> Self {
|
||||
Self {
|
||||
documents: FxHashMap::default(),
|
||||
notebook_cells: FxHashMap::default(),
|
||||
documents: HashMap::default(),
|
||||
notebook_cells: HashMap::default(),
|
||||
global_settings,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use foldhash::HashMap;
|
||||
use lsp_types::Url;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::Deserialize;
|
||||
|
||||
/// Maps a workspace URI to its associated client settings. Used during server initialization.
|
||||
pub(crate) type WorkspaceSettingsMap = FxHashMap<Url, ClientSettings>;
|
||||
pub(crate) type WorkspaceSettingsMap = HashMap<Url, ClientSettings>;
|
||||
|
||||
/// This is a direct representation of the settings schema sent by the client.
|
||||
#[derive(Debug, Deserialize, Default)]
|
||||
|
||||
@@ -22,7 +22,7 @@ ruff_text_size = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
foldhash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::system::walk_directory::WalkState;
|
||||
@@ -18,13 +18,13 @@ impl RootDatabase {
|
||||
|
||||
let mut workspace_change = false;
|
||||
// Packages that need reloading
|
||||
let mut changed_packages = FxHashSet::default();
|
||||
let mut changed_packages = HashSet::default();
|
||||
// Paths that were added
|
||||
let mut added_paths = FxHashSet::default();
|
||||
let mut added_paths = HashSet::default();
|
||||
|
||||
// Deduplicate the `sync` calls. Many file watchers emit multiple events for the same path.
|
||||
let mut synced_files = FxHashSet::default();
|
||||
let mut synced_recursively = FxHashSet::default();
|
||||
let mut synced_files = HashSet::default();
|
||||
let mut synced_recursively = HashSet::default();
|
||||
|
||||
let mut sync_path = |db: &mut RootDatabase, path: &SystemPath| {
|
||||
if synced_files.insert(path.to_path_buf()) {
|
||||
|
||||
@@ -124,12 +124,16 @@ fn format_diagnostic(context: &SemanticLintContext, message: &str, start: TextSi
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef) {
|
||||
// TODO: this treats any symbol with `Type::Unknown` as an unresolved import,
|
||||
// which isn't really correct: if it exists but has `Type::Unknown` in the
|
||||
// module we're importing it from, we shouldn't really emit a diagnostic here,
|
||||
// but currently do.
|
||||
match import {
|
||||
AnyImportRef::Import(import) => {
|
||||
for alias in &import.names {
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unbound() {
|
||||
if ty.is_unknown() {
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Unresolved import '{}'", &alias.name),
|
||||
@@ -142,7 +146,7 @@ fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef)
|
||||
for alias in &import.names {
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unbound() {
|
||||
if ty.is_unknown() {
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Unresolved import '{}'", &alias.name),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use foldhash::{HashMapExt, HashSet, HashSetExt};
|
||||
use salsa::{Durability, Setter as _};
|
||||
|
||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
||||
@@ -74,7 +74,7 @@ pub struct Workspace {
|
||||
/// open files rather than all files in the workspace.
|
||||
#[return_ref]
|
||||
#[default]
|
||||
open_fileset: Option<Arc<FxHashSet<File>>>,
|
||||
open_fileset: Option<Arc<HashSet<File>>>,
|
||||
|
||||
/// The (first-party) packages in this workspace.
|
||||
#[return_ref]
|
||||
@@ -219,7 +219,7 @@ impl Workspace {
|
||||
}
|
||||
|
||||
/// Returns the open files in the workspace or `None` if the entire workspace should be checked.
|
||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
||||
pub fn open_files(self, db: &dyn Db) -> Option<&HashSet<File>> {
|
||||
self.open_fileset(db).as_deref()
|
||||
}
|
||||
|
||||
@@ -227,7 +227,7 @@ impl Workspace {
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
|
||||
pub fn set_open_files(self, db: &mut dyn Db, open_files: HashSet<File>) {
|
||||
tracing::debug!("Set open workspace files (count: {})", open_files.len());
|
||||
|
||||
self.set_open_fileset(db).to(Some(Arc::new(open_files)));
|
||||
@@ -236,7 +236,7 @@ impl Workspace {
|
||||
/// This takes the open files from the workspace and returns them.
|
||||
///
|
||||
/// This changes the behavior of `check` to check all files in the workspace instead of just the open files.
|
||||
pub fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
|
||||
pub fn take_open_files(self, db: &mut dyn Db) -> HashSet<File> {
|
||||
tracing::debug!("Take open workspace files");
|
||||
|
||||
// Salsa will cancel any pending queries and remove its own reference to `open_files`
|
||||
@@ -246,7 +246,7 @@ impl Workspace {
|
||||
if let Some(open_files) = open_files {
|
||||
Arc::try_unwrap(open_files).unwrap()
|
||||
} else {
|
||||
FxHashSet::default()
|
||||
HashSet::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -372,7 +372,7 @@ pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
Diagnostics::from(diagnostics)
|
||||
}
|
||||
|
||||
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> HashSet<File> {
|
||||
let paths = std::sync::Mutex::new(Vec::new());
|
||||
|
||||
db.system().walk_directory(path).run(|| {
|
||||
@@ -402,7 +402,7 @@ fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
});
|
||||
|
||||
let paths = paths.into_inner().unwrap();
|
||||
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
||||
let mut files = HashSet::with_capacity(paths.len());
|
||||
|
||||
for path in paths {
|
||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::iter::FusedIterator;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
use salsa::Setter;
|
||||
|
||||
use ruff_db::files::File;
|
||||
@@ -105,7 +105,7 @@ pub struct LazyFiles<'a> {
|
||||
|
||||
impl<'a> LazyFiles<'a> {
|
||||
/// Sets the indexed files of a package to `files`.
|
||||
pub fn set(mut self, files: FxHashSet<File>) -> IndexedFiles {
|
||||
pub fn set(mut self, files: HashSet<File>) -> IndexedFiles {
|
||||
let files = IndexedFiles::new(files);
|
||||
*self.files = State::Indexed(files.clone());
|
||||
files
|
||||
@@ -127,11 +127,11 @@ impl<'a> LazyFiles<'a> {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IndexedFiles {
|
||||
revision: u64,
|
||||
files: Arc<std::sync::Mutex<FxHashSet<File>>>,
|
||||
files: Arc<std::sync::Mutex<HashSet<File>>>,
|
||||
}
|
||||
|
||||
impl IndexedFiles {
|
||||
fn new(files: FxHashSet<File>) -> Self {
|
||||
fn new(files: HashSet<File>) -> Self {
|
||||
Self {
|
||||
files: Arc::new(std::sync::Mutex::new(files)),
|
||||
revision: 0,
|
||||
@@ -155,11 +155,11 @@ impl PartialEq for IndexedFiles {
|
||||
impl Eq for IndexedFiles {}
|
||||
|
||||
pub struct IndexedFilesGuard<'a> {
|
||||
guard: std::sync::MutexGuard<'a, FxHashSet<File>>,
|
||||
guard: std::sync::MutexGuard<'a, HashSet<File>>,
|
||||
}
|
||||
|
||||
impl Deref for IndexedFilesGuard<'_> {
|
||||
type Target = FxHashSet<File>;
|
||||
type Target = HashSet<File>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.guard
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.6.0"
|
||||
version = "0.6.1"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -44,7 +44,7 @@ notify = { workspace = true }
|
||||
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
|
||||
rayon = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
foldhash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
shellexpand = { workspace = true }
|
||||
|
||||
@@ -9,9 +9,9 @@ use anyhow::{anyhow, bail};
|
||||
use clap::builder::{TypedValueParser, ValueParserFactory};
|
||||
use clap::{command, Parser};
|
||||
use colored::Colorize;
|
||||
use foldhash::HashMap;
|
||||
use path_absolutize::path_dedot;
|
||||
use regex::Regex;
|
||||
use rustc_hash::FxHashMap;
|
||||
use toml;
|
||||
|
||||
use ruff_linter::line_width::LineLength;
|
||||
@@ -1278,7 +1278,7 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
|
||||
/// Convert a list of `PatternPrefixPair` structs to `PerFileIgnore`.
|
||||
pub fn collect_per_file_ignores(pairs: Vec<PatternPrefixPair>) -> Vec<PerFileIgnore> {
|
||||
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
|
||||
let mut per_file_ignores: HashMap<String, Vec<RuleSelector>> = HashMap::default();
|
||||
for pair in pairs {
|
||||
per_file_ignores
|
||||
.entry(pair.pattern)
|
||||
|
||||
@@ -9,11 +9,11 @@ use std::time::{Duration, SystemTime};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use filetime::FileTime;
|
||||
use foldhash::HashMap;
|
||||
use itertools::Itertools;
|
||||
use log::{debug, error};
|
||||
use rayon::iter::ParallelIterator;
|
||||
use rayon::iter::{IntoParallelIterator, ParallelBridge};
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
@@ -140,7 +140,7 @@ impl Cache {
|
||||
fn empty(path: PathBuf, package_root: PathBuf) -> Self {
|
||||
let package = PackageCache {
|
||||
package_root,
|
||||
files: FxHashMap::default(),
|
||||
files: HashMap::default(),
|
||||
};
|
||||
Cache::new(path, package)
|
||||
}
|
||||
@@ -318,7 +318,7 @@ struct PackageCache {
|
||||
/// single file "packages", e.g. scripts.
|
||||
package_root: PathBuf,
|
||||
/// Mapping of source file path to it's cached data.
|
||||
files: FxHashMap<RelativePathBuf, FileCache>,
|
||||
files: HashMap<RelativePathBuf, FileCache>,
|
||||
}
|
||||
|
||||
/// On disk representation of the cache per source file.
|
||||
@@ -357,9 +357,9 @@ impl FileCache {
|
||||
.collect()
|
||||
};
|
||||
let notebook_indexes = if let Some(notebook_index) = lint.notebook_index.as_ref() {
|
||||
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook_index.clone())])
|
||||
HashMap::from_iter([(path.to_string_lossy().to_string(), notebook_index.clone())])
|
||||
} else {
|
||||
FxHashMap::default()
|
||||
HashMap::default()
|
||||
};
|
||||
Diagnostics::new(messages, notebook_indexes)
|
||||
})
|
||||
@@ -493,11 +493,11 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct PackageCacheMap<'a>(FxHashMap<&'a Path, Cache>);
|
||||
pub(crate) struct PackageCacheMap<'a>(HashMap<&'a Path, Cache>);
|
||||
|
||||
impl<'a> PackageCacheMap<'a> {
|
||||
pub(crate) fn init(
|
||||
package_roots: &FxHashMap<&'a Path, Option<&'a Path>>,
|
||||
package_roots: &HashMap<&'a Path, Option<&'a Path>>,
|
||||
resolver: &Resolver,
|
||||
) -> Self {
|
||||
fn init_cache(path: &Path) {
|
||||
|
||||
@@ -5,11 +5,11 @@ use std::time::Instant;
|
||||
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use foldhash::HashMap;
|
||||
use ignore::Error;
|
||||
use log::{debug, error, warn};
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use rayon::prelude::*;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_linter::message::Message;
|
||||
@@ -133,7 +133,7 @@ pub(crate) fn check(
|
||||
dummy,
|
||||
TextSize::default(),
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
HashMap::default(),
|
||||
)
|
||||
} else {
|
||||
warn!(
|
||||
@@ -221,7 +221,7 @@ mod test {
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use ruff_linter::message::{Emitter, EmitterContext, TextEmitter};
|
||||
@@ -284,7 +284,7 @@ mod test {
|
||||
.emit(
|
||||
&mut output,
|
||||
&diagnostics.messages,
|
||||
&EmitterContext::new(&FxHashMap::default()),
|
||||
&EmitterContext::new(&HashMap::default()),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -7,11 +7,11 @@ use std::time::Instant;
|
||||
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use foldhash::HashSet;
|
||||
use itertools::Itertools;
|
||||
use log::{error, warn};
|
||||
use rayon::iter::Either::{Left, Right};
|
||||
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
||||
use rustc_hash::FxHashSet;
|
||||
use thiserror::Error;
|
||||
use tracing::debug;
|
||||
|
||||
@@ -782,7 +782,7 @@ impl Display for FormatCommandError {
|
||||
|
||||
pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
// First, collect all rules that are incompatible regardless of the linter-specific settings.
|
||||
let mut incompatible_rules = FxHashSet::default();
|
||||
let mut incompatible_rules = HashSet::default();
|
||||
for setting in resolver.settings() {
|
||||
for rule in [
|
||||
// The formatter might collapse implicit string concatenation on a single line.
|
||||
|
||||
@@ -9,8 +9,8 @@ use std::path::Path;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use colored::Colorize;
|
||||
use foldhash::HashMap;
|
||||
use log::{debug, warn};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_linter::codes::Rule;
|
||||
@@ -33,13 +33,13 @@ use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
pub(crate) struct Diagnostics {
|
||||
pub(crate) messages: Vec<Message>,
|
||||
pub(crate) fixed: FixMap,
|
||||
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
pub(crate) notebook_indexes: HashMap<String, NotebookIndex>,
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub(crate) fn new(
|
||||
messages: Vec<Message>,
|
||||
notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
notebook_indexes: HashMap<String, NotebookIndex>,
|
||||
) -> Self {
|
||||
Self {
|
||||
messages,
|
||||
@@ -72,7 +72,7 @@ impl Diagnostics {
|
||||
source_file,
|
||||
TextSize::default(),
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
HashMap::default(),
|
||||
)
|
||||
} else {
|
||||
match path {
|
||||
@@ -106,7 +106,7 @@ impl Diagnostics {
|
||||
range: TextRange::default(),
|
||||
file: dummy,
|
||||
})],
|
||||
FxHashMap::default(),
|
||||
HashMap::default(),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -132,7 +132,7 @@ impl AddAssign for Diagnostics {
|
||||
|
||||
/// A collection of fixes indexed by file path.
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
pub(crate) struct FixMap(FxHashMap<String, FixTable>);
|
||||
pub(crate) struct FixMap(HashMap<String, FixTable>);
|
||||
|
||||
impl FixMap {
|
||||
/// Returns `true` if there are no fixes in the map.
|
||||
@@ -314,7 +314,7 @@ pub(crate) fn lint_path(
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = HashMap::default();
|
||||
(result, transformed, fixed)
|
||||
}
|
||||
} else {
|
||||
@@ -328,7 +328,7 @@ pub(crate) fn lint_path(
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = HashMap::default();
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
@@ -357,9 +357,9 @@ pub(crate) fn lint_path(
|
||||
}
|
||||
|
||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
|
||||
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook.into_index())])
|
||||
HashMap::from_iter([(path.to_string_lossy().to_string(), notebook.into_index())])
|
||||
} else {
|
||||
FxHashMap::default()
|
||||
HashMap::default()
|
||||
};
|
||||
|
||||
Ok(Diagnostics {
|
||||
@@ -456,7 +456,7 @@ pub(crate) fn lint_stdin(
|
||||
}
|
||||
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = HashMap::default();
|
||||
(result, transformed, fixed)
|
||||
}
|
||||
} else {
|
||||
@@ -470,17 +470,17 @@ pub(crate) fn lint_stdin(
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = HashMap::default();
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
|
||||
FxHashMap::from_iter([(
|
||||
HashMap::from_iter([(
|
||||
path.map_or_else(|| "-".into(), |path| path.to_string_lossy().to_string()),
|
||||
notebook.into_index(),
|
||||
)])
|
||||
} else {
|
||||
FxHashMap::default()
|
||||
HashMap::default()
|
||||
};
|
||||
|
||||
Ok(Diagnostics {
|
||||
|
||||
@@ -89,7 +89,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
let Case { db, parser, .. } = case;
|
||||
let result = db.check_file(*parser).unwrap();
|
||||
|
||||
assert_eq!(result.len(), 402);
|
||||
assert_eq!(result.len(), 34);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
@@ -104,7 +104,7 @@ fn benchmark_cold(criterion: &mut Criterion) {
|
||||
let Case { db, parser, .. } = case;
|
||||
let result = db.check_file(*parser).unwrap();
|
||||
|
||||
assert_eq!(result.len(), 402);
|
||||
assert_eq!(result.len(), 34);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
|
||||
@@ -31,7 +31,7 @@ thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
tracing-tree = { workspace = true, optional = true }
|
||||
rustc-hash = { workspace = true }
|
||||
foldhash = { workspace = true }
|
||||
|
||||
[target.'cfg(not(target_arch="wasm32"))'.dependencies]
|
||||
zip = { workspace = true, features = ["zstd"] }
|
||||
|
||||
@@ -6,6 +6,7 @@ use dashmap::mapref::entry::Entry;
|
||||
use salsa::{Durability, Setter};
|
||||
|
||||
pub use file_root::{FileRoot, FileRootKind};
|
||||
use foldhash::{HashMapExt, HashSetExt};
|
||||
pub use path::FilePath;
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use std::hash::BuildHasherDefault;
|
||||
|
||||
use rustc_hash::FxHasher;
|
||||
use foldhash::fast::RandomState;
|
||||
|
||||
use crate::files::Files;
|
||||
use crate::system::System;
|
||||
@@ -15,8 +13,8 @@ pub mod system;
|
||||
pub mod testing;
|
||||
pub mod vendored;
|
||||
|
||||
pub type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
pub type FxDashSet<K> = dashmap::DashSet<K, BuildHasherDefault<FxHasher>>;
|
||||
pub type FxDashMap<K, V> = dashmap::DashMap<K, V, RandomState>;
|
||||
pub type FxDashSet<K> = dashmap::DashSet<K, RandomState>;
|
||||
|
||||
/// Most basic database that gives access to files, the host system, source code, and parsed AST.
|
||||
#[salsa::db]
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::sync::{Arc, RwLock, RwLockWriteGuard};
|
||||
|
||||
use camino::{Utf8Path, Utf8PathBuf};
|
||||
use filetime::FileTime;
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::{HashMap, HashMapExt, HashSetExt};
|
||||
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
|
||||
@@ -54,7 +54,7 @@ impl MemoryFileSystem {
|
||||
let fs = Self {
|
||||
inner: Arc::new(MemoryFileSystemInner {
|
||||
by_path: RwLock::new(BTreeMap::default()),
|
||||
virtual_files: RwLock::new(FxHashMap::default()),
|
||||
virtual_files: RwLock::new(HashMap::default()),
|
||||
cwd: cwd.clone(),
|
||||
}),
|
||||
};
|
||||
@@ -385,7 +385,7 @@ impl std::fmt::Debug for MemoryFileSystem {
|
||||
|
||||
struct MemoryFileSystemInner {
|
||||
by_path: RwLock<BTreeMap<Utf8PathBuf, Entry>>,
|
||||
virtual_files: RwLock<FxHashMap<SystemVirtualPathBuf, File>>,
|
||||
virtual_files: RwLock<HashMap<SystemVirtualPathBuf, File>>,
|
||||
cwd: SystemPathBuf,
|
||||
}
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ ruff_macros = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
drop_bomb = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
foldhash = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
static_assertions = { workspace = true }
|
||||
|
||||
@@ -2,7 +2,7 @@ use super::{write, Arguments, FormatElement};
|
||||
use crate::format_element::Interned;
|
||||
use crate::prelude::LineMode;
|
||||
use crate::{FormatResult, FormatState};
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
use std::any::{Any, TypeId};
|
||||
use std::fmt::Debug;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
@@ -349,7 +349,7 @@ pub struct RemoveSoftLinesBuffer<'a, Context> {
|
||||
///
|
||||
/// It's fine to not snapshot the cache. The worst that can happen is that it holds on interned elements
|
||||
/// that are now unused. But there's little harm in that and the cache is cleaned when dropping the buffer.
|
||||
interned_cache: FxHashMap<Interned, Interned>,
|
||||
interned_cache: HashMap<Interned, Interned>,
|
||||
}
|
||||
|
||||
impl<'a, Context> RemoveSoftLinesBuffer<'a, Context> {
|
||||
@@ -357,7 +357,7 @@ impl<'a, Context> RemoveSoftLinesBuffer<'a, Context> {
|
||||
pub fn new(inner: &'a mut dyn Buffer<Context = Context>) -> Self {
|
||||
Self {
|
||||
inner,
|
||||
interned_cache: FxHashMap::default(),
|
||||
interned_cache: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -370,7 +370,7 @@ impl<'a, Context> RemoveSoftLinesBuffer<'a, Context> {
|
||||
// Extracted to function to avoid monomorphization
|
||||
fn clean_interned(
|
||||
interned: &Interned,
|
||||
interned_cache: &mut FxHashMap<Interned, Interned>,
|
||||
interned_cache: &mut HashMap<Interned, Interned>,
|
||||
) -> Interned {
|
||||
if let Some(cleaned) = interned_cache.get(interned) {
|
||||
cleaned.clone()
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Deref;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
|
||||
use crate::format_element::tag::{Condition, DedentMode};
|
||||
use crate::prelude::tag::GroupMode;
|
||||
@@ -57,7 +56,7 @@ impl Document {
|
||||
fn propagate_expands<'a>(
|
||||
elements: &'a [FormatElement],
|
||||
enclosing: &mut Vec<Enclosing<'a>>,
|
||||
checked_interned: &mut FxHashMap<&'a Interned, bool>,
|
||||
checked_interned: &mut HashMap<&'a Interned, bool>,
|
||||
) -> bool {
|
||||
let mut expands = false;
|
||||
for element in elements {
|
||||
@@ -147,7 +146,7 @@ impl Document {
|
||||
} else {
|
||||
self.len().ilog2() as usize
|
||||
});
|
||||
let mut interned = FxHashMap::default();
|
||||
let mut interned = HashMap::default();
|
||||
propagate_expands(self, &mut enclosing, &mut interned);
|
||||
}
|
||||
|
||||
@@ -210,7 +209,7 @@ impl<'a> IrFormatContext<'a> {
|
||||
fn new(source_code: SourceCode<'a>) -> Self {
|
||||
Self {
|
||||
source_code,
|
||||
printed_interned_elements: HashMap::new(),
|
||||
printed_interned_elements: HashMap::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.6.0"
|
||||
version = "0.6.1"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -56,7 +56,7 @@ pep440_rs = { workspace = true, features = ["serde"] }
|
||||
pyproject-toml = { workspace = true }
|
||||
quick-junit = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
foldhash = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
|
||||
134
crates/ruff_linter/resources/test/fixtures/fastapi/FAST003.py
vendored
Normal file
134
crates/ruff_linter/resources/test/fixtures/fastapi/FAST003.py
vendored
Normal file
@@ -0,0 +1,134 @@
|
||||
from fastapi import FastAPI
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
# Errors
|
||||
@app.get("/things/{thing_id}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/books/isbn-{isbn}")
|
||||
async def read_thing():
|
||||
...
|
||||
|
||||
|
||||
@app.get("/things/{thing_id:path}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id : path}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(author: str):
|
||||
return {"author": author}
|
||||
|
||||
|
||||
@app.get("/books/{author_name}/{title}")
|
||||
async def read_thing():
|
||||
...
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(author: str, title: str, /):
|
||||
return {"author": author, "title": title}
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}/{page}")
|
||||
async def read_thing(
|
||||
author: str,
|
||||
query: str,
|
||||
): ...
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing():
|
||||
...
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(*, author: str):
|
||||
...
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(hello, /, *, author: str):
|
||||
...
|
||||
|
||||
|
||||
@app.get("/things/{thing_id}")
|
||||
async def read_thing(
|
||||
query: str,
|
||||
):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id}")
|
||||
async def read_thing(
|
||||
query: str = "default",
|
||||
):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id}")
|
||||
async def read_thing(
|
||||
*, query: str = "default",
|
||||
):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
# OK
|
||||
@app.get("/things/{thing_id}")
|
||||
async def read_thing(thing_id: int, query: str):
|
||||
return {"thing_id": thing_id, "query": query}
|
||||
|
||||
|
||||
@app.get("/books/isbn-{isbn}")
|
||||
async def read_thing(isbn: str):
|
||||
return {"isbn": isbn}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id:path}")
|
||||
async def read_thing(thing_id: str, query: str):
|
||||
return {"thing_id": thing_id, "query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id : path}")
|
||||
async def read_thing(thing_id: str, query: str):
|
||||
return {"thing_id": thing_id, "query": query}
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(author: str, title: str):
|
||||
return {"author": author, "title": title}
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(*, author: str, title: str):
|
||||
return {"author": author, "title": title}
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title:path}")
|
||||
async def read_thing(*, author: str, title: str):
|
||||
return {"author": author, "title": title}
|
||||
|
||||
|
||||
# Ignored
|
||||
@app.get("/things/{thing-id}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id!r}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id=}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
@@ -1,2 +1,6 @@
|
||||
import mod.CaMel as CM
|
||||
from mod import CamelCase as CC
|
||||
|
||||
|
||||
# OK depending on configured import convention
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
@@ -94,6 +94,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::FastApiNonAnnotatedDependency) {
|
||||
fastapi::rules::fastapi_non_annotated_dependency(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::FastApiUnusedPathParameter) {
|
||||
fastapi::rules::fastapi_unused_path_parameter(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::AmbiguousFunctionName) {
|
||||
if let Some(diagnostic) = pycodestyle::rules::ambiguous_function_name(name) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
@@ -263,8 +266,8 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::TooManyArguments) {
|
||||
pylint::rules::too_many_arguments(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyPositional) {
|
||||
pylint::rules::too_many_positional(checker, function_def);
|
||||
if checker.enabled(Rule::TooManyPositionalArguments) {
|
||||
pylint::rules::too_many_positional_arguments(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyReturnStatements) {
|
||||
if let Some(diagnostic) = pylint::rules::too_many_return_statements(
|
||||
@@ -704,11 +707,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsAcronym) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::camelcase_imported_as_acronym(
|
||||
name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
name, asname, alias, stmt, checker,
|
||||
) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
@@ -1023,7 +1022,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
checker,
|
||||
) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use foldhash::HashSet;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
@@ -132,7 +132,7 @@ pub(crate) fn check_noqa(
|
||||
let mut unknown_codes = vec![];
|
||||
let mut unmatched_codes = vec![];
|
||||
let mut valid_codes = vec![];
|
||||
let mut seen_codes = FxHashSet::default();
|
||||
let mut seen_codes = HashSet::default();
|
||||
let mut self_ignore = false;
|
||||
for original_code in directive.iter().map(Code::as_str) {
|
||||
let code = get_redirect_target(original_code).unwrap_or(original_code);
|
||||
|
||||
@@ -248,7 +248,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "R0914") => (RuleGroup::Preview, rules::pylint::rules::TooManyLocals),
|
||||
(Pylint, "R0915") => (RuleGroup::Stable, rules::pylint::rules::TooManyStatements),
|
||||
(Pylint, "R0916") => (RuleGroup::Preview, rules::pylint::rules::TooManyBooleanExpressions),
|
||||
(Pylint, "R0917") => (RuleGroup::Preview, rules::pylint::rules::TooManyPositional),
|
||||
(Pylint, "R0917") => (RuleGroup::Preview, rules::pylint::rules::TooManyPositionalArguments),
|
||||
(Pylint, "R1701") => (RuleGroup::Removed, rules::pylint::rules::RepeatedIsinstanceCalls),
|
||||
(Pylint, "R1702") => (RuleGroup::Preview, rules::pylint::rules::TooManyNestedBlocks),
|
||||
(Pylint, "R1704") => (RuleGroup::Stable, rules::pylint::rules::RedefinedArgumentFromLocal),
|
||||
@@ -920,6 +920,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
// fastapi
|
||||
(FastApi, "001") => (RuleGroup::Preview, rules::fastapi::rules::FastApiRedundantResponseModel),
|
||||
(FastApi, "002") => (RuleGroup::Preview, rules::fastapi::rules::FastApiNonAnnotatedDependency),
|
||||
(FastApi, "003") => (RuleGroup::Preview, rules::fastapi::rules::FastApiUnusedPathParameter),
|
||||
|
||||
// pydoclint
|
||||
(Pydoclint, "201") => (RuleGroup::Preview, rules::pydoclint::rules::DocstringMissingReturns),
|
||||
|
||||
@@ -4,7 +4,7 @@ use anyhow::{Context, Result};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::parenthesize::parenthesized_range;
|
||||
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, ExprList, Stmt};
|
||||
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, ExprList, Parameters, Stmt};
|
||||
use ruff_python_ast::{AnyNodeRef, ArgOrKeyword};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
@@ -282,6 +282,59 @@ pub(crate) fn add_argument(
|
||||
}
|
||||
}
|
||||
|
||||
/// Generic function to add a (regular) parameter to a function definition.
|
||||
pub(crate) fn add_parameter(parameter: &str, parameters: &Parameters, source: &str) -> Edit {
|
||||
if let Some(last) = parameters
|
||||
.args
|
||||
.iter()
|
||||
.filter(|arg| arg.default.is_none())
|
||||
.last()
|
||||
{
|
||||
// Case 1: at least one regular parameter, so append after the last one.
|
||||
Edit::insertion(format!(", {parameter}"), last.end())
|
||||
} else if parameters.args.first().is_some() {
|
||||
// Case 2: no regular parameters, but at least one keyword parameter, so add before the
|
||||
// first.
|
||||
let pos = parameters.start();
|
||||
let mut tokenizer = SimpleTokenizer::starts_at(pos, source);
|
||||
let name = tokenizer
|
||||
.find(|token| token.kind == SimpleTokenKind::Name)
|
||||
.expect("Unable to find name token");
|
||||
Edit::insertion(format!("{parameter}, "), name.start())
|
||||
} else if let Some(last) = parameters.posonlyargs.last() {
|
||||
// Case 2: no regular parameter, but a positional-only parameter exists, so add after that.
|
||||
// We take care to add it *after* the `/` separator.
|
||||
let pos = last.end();
|
||||
let mut tokenizer = SimpleTokenizer::starts_at(pos, source);
|
||||
let slash = tokenizer
|
||||
.find(|token| token.kind == SimpleTokenKind::Slash)
|
||||
.expect("Unable to find `/` token");
|
||||
// Try to find a comma after the slash.
|
||||
let comma = tokenizer.find(|token| token.kind == SimpleTokenKind::Comma);
|
||||
if let Some(comma) = comma {
|
||||
Edit::insertion(format!(" {parameter},"), comma.start() + TextSize::from(1))
|
||||
} else {
|
||||
Edit::insertion(format!(", {parameter}"), slash.start())
|
||||
}
|
||||
} else if parameters.kwonlyargs.first().is_some() {
|
||||
// Case 3: no regular parameter, but a keyword-only parameter exist, so add parameter before that.
|
||||
// We need to backtrack to before the `*` separator.
|
||||
// We know there is no non-keyword-only params, so we can safely assume that the `*` separator is the first
|
||||
let pos = parameters.start();
|
||||
let mut tokenizer = SimpleTokenizer::starts_at(pos, source);
|
||||
let star = tokenizer
|
||||
.find(|token| token.kind == SimpleTokenKind::Star)
|
||||
.expect("Unable to find `*` token");
|
||||
Edit::insertion(format!("{parameter}, "), star.start())
|
||||
} else {
|
||||
// Case 4: no parameters at all, so add parameter after the opening parenthesis.
|
||||
Edit::insertion(
|
||||
parameter.to_string(),
|
||||
parameters.start() + TextSize::from(1),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Safely adjust the indentation of the indented block at [`TextRange`].
|
||||
///
|
||||
/// The [`TextRange`] is assumed to represent an entire indented block, including the leading
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use itertools::Itertools;
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use foldhash::{HashMap, HashMapExt, HashSet};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, IsolationLevel, SourceMap};
|
||||
use ruff_source_file::Locator;
|
||||
@@ -57,8 +57,8 @@ fn apply_fixes<'a>(
|
||||
let mut output = String::with_capacity(locator.len());
|
||||
let mut last_pos: Option<TextSize> = None;
|
||||
let mut applied: BTreeSet<&Edit> = BTreeSet::default();
|
||||
let mut isolated: FxHashSet<u32> = FxHashSet::default();
|
||||
let mut fixed = FxHashMap::default();
|
||||
let mut isolated: HashSet<u32> = HashSet::default();
|
||||
let mut fixed = HashMap::default();
|
||||
let mut source_map = SourceMap::default();
|
||||
|
||||
for (rule, fix) in diagnostics
|
||||
|
||||
@@ -4,8 +4,8 @@ use std::path::Path;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use colored::Colorize;
|
||||
use foldhash::HashMap;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_notebook::Notebook;
|
||||
@@ -43,7 +43,7 @@ pub struct LinterResult {
|
||||
pub has_syntax_error: bool,
|
||||
}
|
||||
|
||||
pub type FixTable = FxHashMap<Rule, usize>;
|
||||
pub type FixTable = HashMap<Rule, usize>;
|
||||
|
||||
pub struct FixerResult<'a> {
|
||||
/// The result returned by the linter, after applying any fixes.
|
||||
@@ -476,7 +476,7 @@ pub fn lint_fix<'a>(
|
||||
let mut transformed = Cow::Borrowed(source_kind);
|
||||
|
||||
// Track the number of fixed errors across iterations.
|
||||
let mut fixed = FxHashMap::default();
|
||||
let mut fixed = HashMap::default();
|
||||
|
||||
// As an escape hatch, bail after 100 iterations.
|
||||
let mut iterations = 0;
|
||||
|
||||
@@ -5,10 +5,10 @@ use std::sync::Mutex;
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use fern;
|
||||
use foldhash::HashSet;
|
||||
use log::Level;
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_python_parser::{ParseError, ParseErrorType};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_source_file::{LineIndex, OneIndexed, SourceCode, SourceLocation};
|
||||
|
||||
@@ -35,7 +35,7 @@ macro_rules! warn_user_once_by_id {
|
||||
};
|
||||
}
|
||||
|
||||
pub static MESSAGES: Lazy<Mutex<FxHashSet<String>>> = Lazy::new(Mutex::default);
|
||||
pub static MESSAGES: Lazy<Mutex<HashSet<String>>> = Lazy::new(Mutex::default);
|
||||
|
||||
/// Warn a user once, if warnings are enabled, with uniqueness determined by the content of the
|
||||
/// message.
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::collections::BTreeMap;
|
||||
use std::io::Write;
|
||||
use std::ops::Deref;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
|
||||
pub use azure::AzureEmitter;
|
||||
pub use github::GithubEmitter;
|
||||
@@ -285,11 +285,11 @@ pub trait Emitter {
|
||||
|
||||
/// Context passed to [`Emitter`].
|
||||
pub struct EmitterContext<'a> {
|
||||
notebook_indexes: &'a FxHashMap<String, NotebookIndex>,
|
||||
notebook_indexes: &'a HashMap<String, NotebookIndex>,
|
||||
}
|
||||
|
||||
impl<'a> EmitterContext<'a> {
|
||||
pub fn new(notebook_indexes: &'a FxHashMap<String, NotebookIndex>) -> Self {
|
||||
pub fn new(notebook_indexes: &'a HashMap<String, NotebookIndex>) -> Self {
|
||||
Self { notebook_indexes }
|
||||
}
|
||||
|
||||
@@ -305,7 +305,7 @@ impl<'a> EmitterContext<'a> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Edit, Fix};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
@@ -399,7 +399,7 @@ def fibonacci(n):
|
||||
]
|
||||
}
|
||||
|
||||
pub(super) fn create_notebook_messages() -> (Vec<Message>, FxHashMap<String, NotebookIndex>) {
|
||||
pub(super) fn create_notebook_messages() -> (Vec<Message>, HashMap<String, NotebookIndex>) {
|
||||
let notebook = r"# cell 1
|
||||
import os
|
||||
# cell 2
|
||||
@@ -453,7 +453,7 @@ def foo():
|
||||
|
||||
let notebook_source = SourceFileBuilder::new("notebook.ipynb", notebook).finish();
|
||||
|
||||
let mut notebook_indexes = FxHashMap::default();
|
||||
let mut notebook_indexes = HashMap::default();
|
||||
notebook_indexes.insert(
|
||||
"notebook.ipynb".to_string(),
|
||||
NotebookIndex::new(
|
||||
@@ -510,7 +510,7 @@ def foo():
|
||||
emitter: &mut dyn Emitter,
|
||||
messages: &[Message],
|
||||
) -> String {
|
||||
let notebook_indexes = FxHashMap::default();
|
||||
let notebook_indexes = HashMap::default();
|
||||
let context = EmitterContext::new(¬ebook_indexes);
|
||||
let mut output: Vec<u8> = Vec::new();
|
||||
emitter.emit(&mut output, messages, &context).unwrap();
|
||||
@@ -521,7 +521,7 @@ def foo():
|
||||
pub(super) fn capture_emitter_notebook_output(
|
||||
emitter: &mut dyn Emitter,
|
||||
messages: &[Message],
|
||||
notebook_indexes: &FxHashMap<String, NotebookIndex>,
|
||||
notebook_indexes: &HashMap<String, NotebookIndex>,
|
||||
) -> String {
|
||||
let context = EmitterContext::new(notebook_indexes);
|
||||
let mut output: Vec<u8> = Vec::new();
|
||||
|
||||
@@ -15,6 +15,7 @@ mod tests {
|
||||
|
||||
#[test_case(Rule::FastApiRedundantResponseModel, Path::new("FAST001.py"))]
|
||||
#[test_case(Rule::FastApiNonAnnotatedDependency, Path::new("FAST002.py"))]
|
||||
#[test_case(Rule::FastApiUnusedPathParameter, Path::new("FAST003.py"))]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -0,0 +1,232 @@
|
||||
use std::iter::Peekable;
|
||||
use std::ops::Range;
|
||||
use std::str::CharIndices;
|
||||
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_diagnostics::{Diagnostic, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_semantic::Modules;
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::fix::edits::add_parameter;
|
||||
use crate::rules::fastapi::rules::is_fastapi_route_decorator;
|
||||
|
||||
/// ## What it does
|
||||
/// Identifies FastAPI routes that declare path parameters in the route path
|
||||
/// that are not included in the function signature.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Path parameters are used to extract values from the URL path.
|
||||
///
|
||||
/// If a path parameter is declared in the route path but not in the function
|
||||
/// signature, it will not be accessible in the function body, which is likely
|
||||
/// a mistake.
|
||||
///
|
||||
/// If a path parameter is declared in the route path, but as a positional-only
|
||||
/// argument in the function signature, it will also not be accessible in the
|
||||
/// function body, as FastAPI will not inject the parameter.
|
||||
///
|
||||
/// ## Known problems
|
||||
/// If the path parameter is _not_ a valid Python identifier (e.g., `user-id`, as
|
||||
/// opposed to `user_id`), FastAPI will normalize it. However, this rule simply
|
||||
/// ignores such path parameters, as FastAPI's normalization behavior is undocumented.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
/// from fastapi import FastAPI
|
||||
///
|
||||
/// app = FastAPI()
|
||||
///
|
||||
///
|
||||
/// @app.get("/things/{thing_id}")
|
||||
/// async def read_thing(query: str): ...
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
///
|
||||
/// ```python
|
||||
/// from fastapi import FastAPI
|
||||
///
|
||||
/// app = FastAPI()
|
||||
///
|
||||
///
|
||||
/// @app.get("/things/{thing_id}")
|
||||
/// async def read_thing(thing_id: int, query: str): ...
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix safety
|
||||
/// This rule's fix is marked as unsafe, as modifying a function signature can
|
||||
/// change the behavior of the code.
|
||||
#[violation]
|
||||
pub struct FastApiUnusedPathParameter {
|
||||
arg_name: String,
|
||||
function_name: String,
|
||||
is_positional: bool,
|
||||
}
|
||||
|
||||
impl Violation for FastApiUnusedPathParameter {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let Self {
|
||||
arg_name,
|
||||
function_name,
|
||||
is_positional,
|
||||
} = self;
|
||||
#[allow(clippy::if_not_else)]
|
||||
if !is_positional {
|
||||
format!("Parameter `{arg_name}` appears in route path, but not in `{function_name}` signature")
|
||||
} else {
|
||||
format!(
|
||||
"Parameter `{arg_name}` appears in route path, but only as a positional-only argument in `{function_name}` signature"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
let Self {
|
||||
arg_name,
|
||||
is_positional,
|
||||
..
|
||||
} = self;
|
||||
if *is_positional {
|
||||
None
|
||||
} else {
|
||||
Some(format!("Add `{arg_name}` to function signature"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// FAST003
|
||||
pub(crate) fn fastapi_unused_path_parameter(
|
||||
checker: &mut Checker,
|
||||
function_def: &ast::StmtFunctionDef,
|
||||
) {
|
||||
if !checker.semantic().seen_module(Modules::FASTAPI) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the route path from the decorator.
|
||||
let route_decorator = function_def
|
||||
.decorator_list
|
||||
.iter()
|
||||
.find_map(|decorator| is_fastapi_route_decorator(decorator, checker.semantic()));
|
||||
|
||||
let Some(route_decorator) = route_decorator else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(path_arg) = route_decorator.arguments.args.first() else {
|
||||
return;
|
||||
};
|
||||
let diagnostic_range = path_arg.range();
|
||||
|
||||
// We can't really handle anything other than string literals.
|
||||
let path = match path_arg.as_string_literal_expr() {
|
||||
Some(path_arg) => &path_arg.value,
|
||||
None => return,
|
||||
};
|
||||
|
||||
// Extract the path parameters from the route path.
|
||||
let path_params = PathParamIterator::new(path.to_str());
|
||||
|
||||
// Extract the arguments from the function signature
|
||||
let named_args: Vec<_> = function_def
|
||||
.parameters
|
||||
.args
|
||||
.iter()
|
||||
.chain(function_def.parameters.kwonlyargs.iter())
|
||||
.map(|arg| arg.parameter.name.as_str())
|
||||
.collect();
|
||||
|
||||
// Check if any of the path parameters are not in the function signature.
|
||||
let mut diagnostics = vec![];
|
||||
for (path_param, range) in path_params {
|
||||
// Ignore invalid identifiers (e.g., `user-id`, as opposed to `user_id`)
|
||||
if !is_identifier(path_param) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the path parameter is already in the function signature, we don't need to do anything.
|
||||
if named_args.contains(&path_param) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Determine whether the path parameter is used as a positional-only argument. In this case,
|
||||
// the path parameter injection won't work, but we also can't fix it (yet), since we'd need
|
||||
// to make the parameter non-positional-only.
|
||||
let is_positional = function_def
|
||||
.parameters
|
||||
.posonlyargs
|
||||
.iter()
|
||||
.any(|arg| arg.parameter.name.as_str() == path_param);
|
||||
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
FastApiUnusedPathParameter {
|
||||
arg_name: path_param.to_string(),
|
||||
function_name: function_def.name.to_string(),
|
||||
is_positional,
|
||||
},
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
diagnostic_range
|
||||
.add_start(TextSize::from(range.start as u32 + 1))
|
||||
.sub_end(TextSize::from((path.len() - range.end + 1) as u32)),
|
||||
);
|
||||
if !is_positional {
|
||||
diagnostic.set_fix(Fix::unsafe_edit(add_parameter(
|
||||
path_param,
|
||||
&function_def.parameters,
|
||||
checker.locator().contents(),
|
||||
)));
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
checker.diagnostics.extend(diagnostics);
|
||||
}
|
||||
|
||||
/// An iterator to extract parameters from FastAPI route paths.
|
||||
///
|
||||
/// The iterator yields tuples of the parameter name and the range of the parameter in the input,
|
||||
/// inclusive of curly braces.
|
||||
#[derive(Debug)]
|
||||
struct PathParamIterator<'a> {
|
||||
input: &'a str,
|
||||
chars: Peekable<CharIndices<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> PathParamIterator<'a> {
|
||||
fn new(input: &'a str) -> Self {
|
||||
PathParamIterator {
|
||||
input,
|
||||
chars: input.char_indices().peekable(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for PathParamIterator<'a> {
|
||||
type Item = (&'a str, Range<usize>);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while let Some((start, c)) = self.chars.next() {
|
||||
if c == '{' {
|
||||
if let Some((end, _)) = self.chars.by_ref().find(|&(_, ch)| ch == '}') {
|
||||
let param_content = &self.input[start + 1..end];
|
||||
// We ignore text after a colon, since those are path convertors
|
||||
// See also: https://fastapi.tiangolo.com/tutorial/path-params/?h=path#path-convertor
|
||||
let param_name_end = param_content.find(':').unwrap_or(param_content.len());
|
||||
let param_name = ¶m_content[..param_name_end].trim();
|
||||
|
||||
#[allow(clippy::range_plus_one)]
|
||||
return Some((param_name, start..end + 1));
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
pub(crate) use fastapi_non_annotated_dependency::*;
|
||||
pub(crate) use fastapi_redundant_response_model::*;
|
||||
pub(crate) use fastapi_unused_path_parameter::*;
|
||||
|
||||
mod fastapi_non_annotated_dependency;
|
||||
mod fastapi_redundant_response_model;
|
||||
mod fastapi_unused_path_parameter;
|
||||
|
||||
use ruff_python_ast::{Decorator, ExprCall, StmtFunctionDef};
|
||||
use ruff_python_semantic::analyze::typing::resolve_assignment;
|
||||
|
||||
@@ -0,0 +1,323 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/fastapi/mod.rs
|
||||
---
|
||||
FAST003.py:7:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
6 | # Errors
|
||||
7 | @app.get("/things/{thing_id}")
|
||||
| ^^^^^^^^^^ FAST003
|
||||
8 | async def read_thing(query: str):
|
||||
9 | return {"query": query}
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
5 5 |
|
||||
6 6 | # Errors
|
||||
7 7 | @app.get("/things/{thing_id}")
|
||||
8 |-async def read_thing(query: str):
|
||||
8 |+async def read_thing(query: str, thing_id):
|
||||
9 9 | return {"query": query}
|
||||
10 10 |
|
||||
11 11 |
|
||||
|
||||
FAST003.py:12:23: FAST003 [*] Parameter `isbn` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
12 | @app.get("/books/isbn-{isbn}")
|
||||
| ^^^^^^ FAST003
|
||||
13 | async def read_thing():
|
||||
14 | ...
|
||||
|
|
||||
= help: Add `isbn` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
10 10 |
|
||||
11 11 |
|
||||
12 12 | @app.get("/books/isbn-{isbn}")
|
||||
13 |-async def read_thing():
|
||||
13 |+async def read_thing(isbn):
|
||||
14 14 | ...
|
||||
15 15 |
|
||||
16 16 |
|
||||
|
||||
FAST003.py:17:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
17 | @app.get("/things/{thing_id:path}")
|
||||
| ^^^^^^^^^^^^^^^ FAST003
|
||||
18 | async def read_thing(query: str):
|
||||
19 | return {"query": query}
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
15 15 |
|
||||
16 16 |
|
||||
17 17 | @app.get("/things/{thing_id:path}")
|
||||
18 |-async def read_thing(query: str):
|
||||
18 |+async def read_thing(query: str, thing_id):
|
||||
19 19 | return {"query": query}
|
||||
20 20 |
|
||||
21 21 |
|
||||
|
||||
FAST003.py:22:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
22 | @app.get("/things/{thing_id : path}")
|
||||
| ^^^^^^^^^^^^^^^^^ FAST003
|
||||
23 | async def read_thing(query: str):
|
||||
24 | return {"query": query}
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
20 20 |
|
||||
21 21 |
|
||||
22 22 | @app.get("/things/{thing_id : path}")
|
||||
23 |-async def read_thing(query: str):
|
||||
23 |+async def read_thing(query: str, thing_id):
|
||||
24 24 | return {"query": query}
|
||||
25 25 |
|
||||
26 26 |
|
||||
|
||||
FAST003.py:27:27: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
27 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
28 | async def read_thing(author: str):
|
||||
29 | return {"author": author}
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
25 25 |
|
||||
26 26 |
|
||||
27 27 | @app.get("/books/{author}/{title}")
|
||||
28 |-async def read_thing(author: str):
|
||||
28 |+async def read_thing(author: str, title):
|
||||
29 29 | return {"author": author}
|
||||
30 30 |
|
||||
31 31 |
|
||||
|
||||
FAST003.py:32:18: FAST003 [*] Parameter `author_name` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
32 | @app.get("/books/{author_name}/{title}")
|
||||
| ^^^^^^^^^^^^^ FAST003
|
||||
33 | async def read_thing():
|
||||
34 | ...
|
||||
|
|
||||
= help: Add `author_name` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
30 30 |
|
||||
31 31 |
|
||||
32 32 | @app.get("/books/{author_name}/{title}")
|
||||
33 |-async def read_thing():
|
||||
33 |+async def read_thing(author_name):
|
||||
34 34 | ...
|
||||
35 35 |
|
||||
36 36 |
|
||||
|
||||
FAST003.py:32:32: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
32 | @app.get("/books/{author_name}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
33 | async def read_thing():
|
||||
34 | ...
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
30 30 |
|
||||
31 31 |
|
||||
32 32 | @app.get("/books/{author_name}/{title}")
|
||||
33 |-async def read_thing():
|
||||
33 |+async def read_thing(title):
|
||||
34 34 | ...
|
||||
35 35 |
|
||||
36 36 |
|
||||
|
||||
FAST003.py:37:18: FAST003 Parameter `author` appears in route path, but only as a positional-only argument in `read_thing` signature
|
||||
|
|
||||
37 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^^ FAST003
|
||||
38 | async def read_thing(author: str, title: str, /):
|
||||
39 | return {"author": author, "title": title}
|
||||
|
|
||||
|
||||
FAST003.py:37:27: FAST003 Parameter `title` appears in route path, but only as a positional-only argument in `read_thing` signature
|
||||
|
|
||||
37 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
38 | async def read_thing(author: str, title: str, /):
|
||||
39 | return {"author": author, "title": title}
|
||||
|
|
||||
|
||||
FAST003.py:42:27: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
42 | @app.get("/books/{author}/{title}/{page}")
|
||||
| ^^^^^^^ FAST003
|
||||
43 | async def read_thing(
|
||||
44 | author: str,
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
42 42 | @app.get("/books/{author}/{title}/{page}")
|
||||
43 43 | async def read_thing(
|
||||
44 44 | author: str,
|
||||
45 |- query: str,
|
||||
45 |+ query: str, title,
|
||||
46 46 | ): ...
|
||||
47 47 |
|
||||
48 48 |
|
||||
|
||||
FAST003.py:42:35: FAST003 [*] Parameter `page` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
42 | @app.get("/books/{author}/{title}/{page}")
|
||||
| ^^^^^^ FAST003
|
||||
43 | async def read_thing(
|
||||
44 | author: str,
|
||||
|
|
||||
= help: Add `page` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
42 42 | @app.get("/books/{author}/{title}/{page}")
|
||||
43 43 | async def read_thing(
|
||||
44 44 | author: str,
|
||||
45 |- query: str,
|
||||
45 |+ query: str, page,
|
||||
46 46 | ): ...
|
||||
47 47 |
|
||||
48 48 |
|
||||
|
||||
FAST003.py:49:18: FAST003 [*] Parameter `author` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
49 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^^ FAST003
|
||||
50 | async def read_thing():
|
||||
51 | ...
|
||||
|
|
||||
= help: Add `author` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
47 47 |
|
||||
48 48 |
|
||||
49 49 | @app.get("/books/{author}/{title}")
|
||||
50 |-async def read_thing():
|
||||
50 |+async def read_thing(author):
|
||||
51 51 | ...
|
||||
52 52 |
|
||||
53 53 |
|
||||
|
||||
FAST003.py:49:27: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
49 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
50 | async def read_thing():
|
||||
51 | ...
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
47 47 |
|
||||
48 48 |
|
||||
49 49 | @app.get("/books/{author}/{title}")
|
||||
50 |-async def read_thing():
|
||||
50 |+async def read_thing(title):
|
||||
51 51 | ...
|
||||
52 52 |
|
||||
53 53 |
|
||||
|
||||
FAST003.py:54:27: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
54 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
55 | async def read_thing(*, author: str):
|
||||
56 | ...
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
52 52 |
|
||||
53 53 |
|
||||
54 54 | @app.get("/books/{author}/{title}")
|
||||
55 |-async def read_thing(*, author: str):
|
||||
55 |+async def read_thing(title, *, author: str):
|
||||
56 56 | ...
|
||||
57 57 |
|
||||
58 58 |
|
||||
|
||||
FAST003.py:59:27: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
59 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
60 | async def read_thing(hello, /, *, author: str):
|
||||
61 | ...
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
57 57 |
|
||||
58 58 |
|
||||
59 59 | @app.get("/books/{author}/{title}")
|
||||
60 |-async def read_thing(hello, /, *, author: str):
|
||||
60 |+async def read_thing(hello, /, title, *, author: str):
|
||||
61 61 | ...
|
||||
62 62 |
|
||||
63 63 |
|
||||
|
||||
FAST003.py:64:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
64 | @app.get("/things/{thing_id}")
|
||||
| ^^^^^^^^^^ FAST003
|
||||
65 | async def read_thing(
|
||||
66 | query: str,
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
63 63 |
|
||||
64 64 | @app.get("/things/{thing_id}")
|
||||
65 65 | async def read_thing(
|
||||
66 |- query: str,
|
||||
66 |+ query: str, thing_id,
|
||||
67 67 | ):
|
||||
68 68 | return {"query": query}
|
||||
69 69 |
|
||||
|
||||
FAST003.py:71:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
71 | @app.get("/things/{thing_id}")
|
||||
| ^^^^^^^^^^ FAST003
|
||||
72 | async def read_thing(
|
||||
73 | query: str = "default",
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
70 70 |
|
||||
71 71 | @app.get("/things/{thing_id}")
|
||||
72 72 | async def read_thing(
|
||||
73 |- query: str = "default",
|
||||
73 |+ thing_id, query: str = "default",
|
||||
74 74 | ):
|
||||
75 75 | return {"query": query}
|
||||
76 76 |
|
||||
|
||||
FAST003.py:78:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
78 | @app.get("/things/{thing_id}")
|
||||
| ^^^^^^^^^^ FAST003
|
||||
79 | async def read_thing(
|
||||
80 | *, query: str = "default",
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
77 77 |
|
||||
78 78 | @app.get("/things/{thing_id}")
|
||||
79 79 | async def read_thing(
|
||||
80 |- *, query: str = "default",
|
||||
80 |+ thing_id, *, query: str = "default",
|
||||
81 81 | ):
|
||||
82 82 | return {"query": query}
|
||||
83 83 |
|
||||
@@ -1,5 +1,5 @@
|
||||
use foldhash::HashSet;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::helpers::{
|
||||
@@ -109,7 +109,7 @@ pub(crate) fn auto_return_type(function: &ast::StmtFunctionDef) -> Option<AutoPy
|
||||
pub(crate) enum AutoPythonType {
|
||||
Never,
|
||||
Atom(PythonType),
|
||||
Union(FxHashSet<PythonType>),
|
||||
Union(HashSet<PythonType>),
|
||||
}
|
||||
|
||||
impl AutoPythonType {
|
||||
|
||||
@@ -32,7 +32,7 @@ use crate::rules::flake8_async::helpers::AsyncModule;
|
||||
///
|
||||
///
|
||||
/// async def main():
|
||||
/// with asyncio.timeout(2):
|
||||
/// async with asyncio.timeout(2):
|
||||
/// await long_running_task()
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -22,14 +22,14 @@ use crate::rules::flake8_async::helpers::MethodName;
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// async def func():
|
||||
/// with asyncio.timeout(2):
|
||||
/// async with asyncio.timeout(2):
|
||||
/// do_something()
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// async def func():
|
||||
/// with asyncio.timeout(2):
|
||||
/// async with asyncio.timeout(2):
|
||||
/// do_something()
|
||||
/// await awaitable()
|
||||
/// ```
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use foldhash::{HashMap, HashMapExt, HashSet};
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Violation};
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
@@ -118,9 +118,9 @@ fn duplicate_handler_exceptions<'a>(
|
||||
checker: &mut Checker,
|
||||
expr: &'a Expr,
|
||||
elts: &'a [Expr],
|
||||
) -> FxHashMap<UnqualifiedName<'a>, &'a Expr> {
|
||||
let mut seen: FxHashMap<UnqualifiedName, &Expr> = FxHashMap::default();
|
||||
let mut duplicates: FxHashSet<UnqualifiedName> = FxHashSet::default();
|
||||
) -> HashMap<UnqualifiedName<'a>, &'a Expr> {
|
||||
let mut seen: HashMap<UnqualifiedName, &Expr> = HashMap::default();
|
||||
let mut duplicates: HashSet<UnqualifiedName> = HashSet::default();
|
||||
let mut unique_elts: Vec<&Expr> = Vec::default();
|
||||
for type_ in elts {
|
||||
if let Some(name) = UnqualifiedName::from_expr(type_) {
|
||||
@@ -171,8 +171,8 @@ fn duplicate_handler_exceptions<'a>(
|
||||
|
||||
/// B025
|
||||
pub(crate) fn duplicate_exceptions(checker: &mut Checker, handlers: &[ExceptHandler]) {
|
||||
let mut seen: FxHashSet<UnqualifiedName> = FxHashSet::default();
|
||||
let mut duplicates: FxHashMap<UnqualifiedName, Vec<&Expr>> = FxHashMap::default();
|
||||
let mut seen: HashSet<UnqualifiedName> = HashSet::default();
|
||||
let mut duplicates: HashMap<UnqualifiedName, Vec<&Expr>> = HashMap::default();
|
||||
for handler in handlers {
|
||||
let ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler {
|
||||
type_: Some(type_),
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use anyhow::{Context, Result};
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -48,7 +48,7 @@ impl Violation for DuplicateValue {
|
||||
|
||||
/// B033
|
||||
pub(crate) fn duplicate_value(checker: &mut Checker, set: &ast::ExprSet) {
|
||||
let mut seen_values: FxHashSet<ComparableExpr> = FxHashSet::default();
|
||||
let mut seen_values: HashSet<ComparableExpr> = HashSet::default();
|
||||
for (index, value) in set.iter().enumerate() {
|
||||
if value.is_literal_expr() {
|
||||
let comparable_value = ComparableExpr::from(value);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use foldhash::HashMap;
|
||||
use ruff_python_ast::{self as ast, Expr, ParameterWithDefault};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -76,7 +76,7 @@ pub(crate) fn loop_variable_overrides_iterator(checker: &mut Checker, target: &E
|
||||
|
||||
#[derive(Default)]
|
||||
struct NameFinder<'a> {
|
||||
names: FxHashMap<&'a str, &'a Expr>,
|
||||
names: HashMap<&'a str, &'a Expr>,
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for NameFinder<'a> {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -70,7 +70,7 @@ pub(crate) fn static_key_dict_comprehension(checker: &mut Checker, dict_comp: &a
|
||||
|
||||
/// Returns `true` if the given expression is a constant in the context of the dictionary
|
||||
/// comprehension.
|
||||
fn is_constant(key: &Expr, names: &FxHashMap<&str, &ast::ExprName>) -> bool {
|
||||
fn is_constant(key: &Expr, names: &HashMap<&str, &ast::ExprName>) -> bool {
|
||||
match key {
|
||||
Expr::Tuple(tuple) => tuple.iter().all(|elem| is_constant(elem, names)),
|
||||
Expr::Name(ast::ExprName { id, .. }) => !names.contains_key(id.as_str()),
|
||||
|
||||
@@ -7,7 +7,7 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use foldhash::{HashMap, HashMapExt, HashSet};
|
||||
|
||||
use crate::assert_messages;
|
||||
use crate::registry::Rule;
|
||||
@@ -28,7 +28,7 @@ mod tests {
|
||||
#[test]
|
||||
fn custom() -> Result<()> {
|
||||
let mut aliases = default_aliases();
|
||||
aliases.extend(FxHashMap::from_iter([
|
||||
aliases.extend(HashMap::from_iter([
|
||||
("dask.array".to_string(), "da".to_string()),
|
||||
("dask.dataframe".to_string(), "dd".to_string()),
|
||||
]));
|
||||
@@ -37,8 +37,8 @@ mod tests {
|
||||
&LinterSettings {
|
||||
flake8_import_conventions: super::settings::Settings {
|
||||
aliases,
|
||||
banned_aliases: FxHashMap::default(),
|
||||
banned_from: FxHashSet::default(),
|
||||
banned_aliases: HashMap::default(),
|
||||
banned_from: HashSet::default(),
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::UnconventionalImportAlias)
|
||||
},
|
||||
@@ -54,7 +54,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
flake8_import_conventions: super::settings::Settings {
|
||||
aliases: default_aliases(),
|
||||
banned_aliases: FxHashMap::from_iter([
|
||||
banned_aliases: HashMap::from_iter([
|
||||
(
|
||||
"typing".to_string(),
|
||||
BannedAliases::from_iter(["t".to_string(), "ty".to_string()]),
|
||||
@@ -72,7 +72,7 @@ mod tests {
|
||||
BannedAliases::from_iter(["F".to_string()]),
|
||||
),
|
||||
]),
|
||||
banned_from: FxHashSet::default(),
|
||||
banned_from: HashSet::default(),
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::BannedImportAlias)
|
||||
},
|
||||
@@ -88,8 +88,8 @@ mod tests {
|
||||
&LinterSettings {
|
||||
flake8_import_conventions: super::settings::Settings {
|
||||
aliases: default_aliases(),
|
||||
banned_aliases: FxHashMap::default(),
|
||||
banned_from: FxHashSet::from_iter([
|
||||
banned_aliases: HashMap::default(),
|
||||
banned_from: HashSet::from_iter([
|
||||
"logging.config".to_string(),
|
||||
"typing".to_string(),
|
||||
"pandas".to_string(),
|
||||
@@ -108,14 +108,14 @@ mod tests {
|
||||
Path::new("flake8_import_conventions/remove_default.py"),
|
||||
&LinterSettings {
|
||||
flake8_import_conventions: super::settings::Settings {
|
||||
aliases: FxHashMap::from_iter([
|
||||
aliases: HashMap::from_iter([
|
||||
("altair".to_string(), "alt".to_string()),
|
||||
("matplotlib.pyplot".to_string(), "plt".to_string()),
|
||||
("pandas".to_string(), "pd".to_string()),
|
||||
("seaborn".to_string(), "sns".to_string()),
|
||||
]),
|
||||
banned_aliases: FxHashMap::default(),
|
||||
banned_from: FxHashSet::default(),
|
||||
banned_aliases: HashMap::default(),
|
||||
banned_from: HashSet::default(),
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::UnconventionalImportAlias)
|
||||
},
|
||||
@@ -127,7 +127,7 @@ mod tests {
|
||||
#[test]
|
||||
fn override_defaults() -> Result<()> {
|
||||
let mut aliases = default_aliases();
|
||||
aliases.extend(FxHashMap::from_iter([(
|
||||
aliases.extend(HashMap::from_iter([(
|
||||
"numpy".to_string(),
|
||||
"nmp".to_string(),
|
||||
)]));
|
||||
@@ -137,8 +137,8 @@ mod tests {
|
||||
&LinterSettings {
|
||||
flake8_import_conventions: super::settings::Settings {
|
||||
aliases,
|
||||
banned_aliases: FxHashMap::default(),
|
||||
banned_from: FxHashSet::default(),
|
||||
banned_aliases: HashMap::default(),
|
||||
banned_from: HashSet::default(),
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::UnconventionalImportAlias)
|
||||
},
|
||||
@@ -150,7 +150,7 @@ mod tests {
|
||||
#[test]
|
||||
fn from_imports() -> Result<()> {
|
||||
let mut aliases = default_aliases();
|
||||
aliases.extend(FxHashMap::from_iter([
|
||||
aliases.extend(HashMap::from_iter([
|
||||
("xml.dom.minidom".to_string(), "md".to_string()),
|
||||
(
|
||||
"xml.dom.minidom.parseString".to_string(),
|
||||
@@ -163,8 +163,8 @@ mod tests {
|
||||
&LinterSettings {
|
||||
flake8_import_conventions: super::settings::Settings {
|
||||
aliases,
|
||||
banned_aliases: FxHashMap::default(),
|
||||
banned_from: FxHashSet::default(),
|
||||
banned_aliases: HashMap::default(),
|
||||
banned_from: HashSet::default(),
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::UnconventionalImportAlias)
|
||||
},
|
||||
@@ -186,7 +186,7 @@ mod tests {
|
||||
#[test]
|
||||
fn same_name() -> Result<()> {
|
||||
let mut aliases = default_aliases();
|
||||
aliases.extend(FxHashMap::from_iter([(
|
||||
aliases.extend(HashMap::from_iter([(
|
||||
"django.conf.settings".to_string(),
|
||||
"settings".to_string(),
|
||||
)]));
|
||||
@@ -195,8 +195,8 @@ mod tests {
|
||||
&LinterSettings {
|
||||
flake8_import_conventions: super::settings::Settings {
|
||||
aliases,
|
||||
banned_aliases: FxHashMap::default(),
|
||||
banned_from: FxHashSet::default(),
|
||||
banned_aliases: HashMap::default(),
|
||||
banned_from: HashSet::default(),
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::UnconventionalImportAlias)
|
||||
},
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -51,7 +51,7 @@ pub(crate) fn banned_import_alias(
|
||||
stmt: &Stmt,
|
||||
name: &str,
|
||||
asname: &str,
|
||||
banned_conventions: &FxHashMap<String, BannedAliases>,
|
||||
banned_conventions: &HashMap<String, BannedAliases>,
|
||||
) -> Option<Diagnostic> {
|
||||
if let Some(banned_aliases) = banned_conventions.get(name) {
|
||||
if banned_aliases
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use foldhash::HashSet;
|
||||
use ruff_python_ast::Stmt;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -48,7 +48,7 @@ impl Violation for BannedImportFrom {
|
||||
pub(crate) fn banned_import_from(
|
||||
stmt: &Stmt,
|
||||
name: &str,
|
||||
banned_conventions: &FxHashSet<String>,
|
||||
banned_conventions: &HashSet<String>,
|
||||
) -> Option<Diagnostic> {
|
||||
if banned_conventions.contains(name) {
|
||||
return Some(Diagnostic::new(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -59,7 +59,7 @@ impl Violation for UnconventionalImportAlias {
|
||||
pub(crate) fn unconventional_import_alias(
|
||||
checker: &Checker,
|
||||
binding: &Binding,
|
||||
conventions: &FxHashMap<String, String>,
|
||||
conventions: &HashMap<String, String>,
|
||||
) -> Option<Diagnostic> {
|
||||
let import = binding.as_any_import()?;
|
||||
let qualified_name = import.qualified_name().to_string();
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use foldhash::{HashMap, HashSet};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use ruff_macros::CacheKey;
|
||||
@@ -60,24 +60,24 @@ impl FromIterator<String> for BannedAliases {
|
||||
|
||||
#[derive(Debug, Clone, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub aliases: FxHashMap<String, String>,
|
||||
pub banned_aliases: FxHashMap<String, BannedAliases>,
|
||||
pub banned_from: FxHashSet<String>,
|
||||
pub aliases: HashMap<String, String>,
|
||||
pub banned_aliases: HashMap<String, BannedAliases>,
|
||||
pub banned_from: HashSet<String>,
|
||||
}
|
||||
|
||||
pub fn default_aliases() -> FxHashMap<String, String> {
|
||||
pub fn default_aliases() -> HashMap<String, String> {
|
||||
CONVENTIONAL_ALIASES
|
||||
.iter()
|
||||
.map(|(k, v)| ((*k).to_string(), (*v).to_string()))
|
||||
.collect::<FxHashMap<_, _>>()
|
||||
.collect::<HashMap<_, _>>()
|
||||
}
|
||||
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
aliases: default_aliases(),
|
||||
banned_aliases: FxHashMap::default(),
|
||||
banned_from: FxHashSet::default(),
|
||||
banned_aliases: HashMap::default(),
|
||||
banned_from: HashSet::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Fix};
|
||||
@@ -51,7 +51,7 @@ impl AlwaysFixableViolation for DuplicateClassFieldDefinition {
|
||||
|
||||
/// PIE794
|
||||
pub(crate) fn duplicate_class_field_definition(checker: &mut Checker, body: &[Stmt]) {
|
||||
let mut seen_targets: FxHashSet<&str> = FxHashSet::default();
|
||||
let mut seen_targets: HashSet<&str> = HashSet::default();
|
||||
for stmt in body {
|
||||
// Extract the property name from the assignment statement.
|
||||
let target = match stmt {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_diagnostics::Violation;
|
||||
@@ -68,7 +68,7 @@ pub(crate) fn non_unique_enums(checker: &mut Checker, parent: &Stmt, body: &[Stm
|
||||
return;
|
||||
}
|
||||
|
||||
let mut seen_targets: FxHashSet<ComparableExpr> = FxHashSet::default();
|
||||
let mut seen_targets: HashSet<ComparableExpr> = HashSet::default();
|
||||
for stmt in body {
|
||||
let Stmt::Assign(ast::StmtAssign { value, .. }) = stmt else {
|
||||
continue;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use foldhash::{HashMapExt, HashSet, HashSetExt};
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -147,11 +147,9 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &mut Checker, call: &ast::ExprCal
|
||||
|
||||
/// Determine the set of keywords that appear in multiple positions (either directly, as in
|
||||
/// `func(x=1)`, or indirectly, as in `func(**{"x": 1})`).
|
||||
fn duplicates(call: &ast::ExprCall) -> FxHashSet<&str> {
|
||||
let mut seen =
|
||||
FxHashSet::with_capacity_and_hasher(call.arguments.keywords.len(), FxBuildHasher);
|
||||
let mut duplicates =
|
||||
FxHashSet::with_capacity_and_hasher(call.arguments.keywords.len(), FxBuildHasher);
|
||||
fn duplicates(call: &ast::ExprCall) -> HashSet<&str> {
|
||||
let mut seen = HashSet::with_capacity(call.arguments.keywords.len());
|
||||
let mut duplicates = HashSet::with_capacity(call.arguments.keywords.len());
|
||||
for keyword in &*call.arguments.keywords {
|
||||
if let Some(name) = &keyword.arg {
|
||||
if !seen.insert(name.as_str()) {
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -45,7 +43,7 @@ impl Violation for DuplicateLiteralMember {
|
||||
|
||||
/// PYI062
|
||||
pub(crate) fn duplicate_literal_member<'a>(checker: &mut Checker, expr: &'a Expr) {
|
||||
let mut seen_nodes: HashSet<ComparableExpr<'_>, _> = FxHashSet::default();
|
||||
let mut seen_nodes = HashSet::<ComparableExpr<'_>>::default();
|
||||
let mut diagnostics: Vec<Diagnostic> = Vec::new();
|
||||
|
||||
// Adds a member to `literal_exprs` if it is a `Literal` annotation
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -52,7 +50,7 @@ impl Violation for DuplicateUnionMember {
|
||||
|
||||
/// PYI016
|
||||
pub(crate) fn duplicate_union_member<'a>(checker: &mut Checker, expr: &'a Expr) {
|
||||
let mut seen_nodes: HashSet<ComparableExpr<'_>, _> = FxHashSet::default();
|
||||
let mut seen_nodes = HashSet::<ComparableExpr<'_>>::default();
|
||||
let mut diagnostics: Vec<Diagnostic> = Vec::new();
|
||||
|
||||
// Adds a member to `literal_exprs` if it is a `Literal` annotation
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::fmt;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -60,7 +60,7 @@ impl Violation for RedundantLiteralUnion {
|
||||
/// PYI051
|
||||
pub(crate) fn redundant_literal_union<'a>(checker: &mut Checker, union: &'a Expr) {
|
||||
let mut typing_literal_exprs = Vec::new();
|
||||
let mut builtin_types_in_union = FxHashSet::default();
|
||||
let mut builtin_types_in_union = HashSet::default();
|
||||
|
||||
// Adds a member to `literal_exprs` for each value in a `Literal`, and any builtin types
|
||||
// to `builtin_types_in_union`.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
use foldhash::{HashMap, HashMapExt, HashSetExt};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -651,8 +651,7 @@ fn check_duplicates(checker: &mut Checker, values: &Expr) {
|
||||
return;
|
||||
};
|
||||
|
||||
let mut seen: FxHashMap<ComparableExpr, usize> =
|
||||
FxHashMap::with_capacity_and_hasher(elts.len(), FxBuildHasher);
|
||||
let mut seen: HashMap<ComparableExpr, usize> = HashMap::with_capacity(elts.len());
|
||||
let mut prev = None;
|
||||
for (index, element) in elts.iter().enumerate() {
|
||||
let expr = ComparableExpr::from(element);
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use foldhash::{HashMap, HashMapExt, HashSetExt};
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::{
|
||||
self as ast, Arguments, CmpOp, Expr, ExprContext, Identifier, Keyword, Stmt, UnaryOp,
|
||||
};
|
||||
use ruff_text_size::TextRange;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
|
||||
/// An enum to represent the different types of assertions present in the
|
||||
/// `unittest` module. Note: any variants that can't be replaced with plain
|
||||
@@ -230,7 +230,7 @@ impl UnittestAssert {
|
||||
&'a self,
|
||||
args: &'a [Expr],
|
||||
keywords: &'a [Keyword],
|
||||
) -> Result<FxHashMap<&'a str, &'a Expr>> {
|
||||
) -> Result<HashMap<&'a str, &'a Expr>> {
|
||||
// If we have variable-length arguments, abort.
|
||||
if args.iter().any(Expr::is_starred_expr) || keywords.iter().any(|kw| kw.arg.is_none()) {
|
||||
bail!("Variable-length arguments are not supported");
|
||||
@@ -248,8 +248,8 @@ impl UnittestAssert {
|
||||
}
|
||||
|
||||
// Generate a map from argument name to value.
|
||||
let mut args_map: FxHashMap<&str, &Expr> =
|
||||
FxHashMap::with_capacity_and_hasher(args.len() + keywords.len(), FxBuildHasher);
|
||||
let mut args_map: HashMap<&str, &Expr> =
|
||||
HashMap::with_capacity(args.len() + keywords.len());
|
||||
|
||||
// Process positional arguments.
|
||||
for (arg_name, value) in arg_spec.iter().zip(args.iter()) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use foldhash::HashSet;
|
||||
use ruff_python_ast::{self as ast, ElifElseClause, Expr, Identifier, Stmt};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_python_ast::visitor;
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
@@ -12,7 +12,7 @@ pub(super) struct Stack<'data> {
|
||||
/// The `elif` or `else` statements in the current function.
|
||||
pub(super) elifs_elses: Vec<(&'data [Stmt], &'data ElifElseClause)>,
|
||||
/// The non-local variables in the current function.
|
||||
pub(super) non_locals: FxHashSet<&'data str>,
|
||||
pub(super) non_locals: HashSet<&'data str>,
|
||||
/// The annotated variables in the current function.
|
||||
///
|
||||
/// For example, consider:
|
||||
@@ -27,7 +27,7 @@ pub(super) struct Stack<'data> {
|
||||
/// In this case, the annotation on `x` is used to cast the return value
|
||||
/// of `foo()` to an `int`. Removing the `x = foo()` statement would
|
||||
/// change the return type of the function.
|
||||
pub(super) annotations: FxHashSet<&'data str>,
|
||||
pub(super) annotations: HashSet<&'data str>,
|
||||
/// Whether the current function is a generator.
|
||||
pub(super) is_generator: bool,
|
||||
/// The `assignment`-to-`return` statement pairs in the current function.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -94,7 +94,7 @@ pub(crate) fn if_else_block_instead_of_dict_lookup(checker: &mut Checker, stmt_i
|
||||
}
|
||||
|
||||
// The `expr` was checked to be a literal above, so this is safe.
|
||||
let mut literals: FxHashSet<ComparableLiteral> = FxHashSet::default();
|
||||
let mut literals: HashSet<ComparableLiteral> = HashSet::default();
|
||||
literals.insert(literal_expr.into());
|
||||
|
||||
for clause in elif_else_clauses {
|
||||
|
||||
@@ -8,7 +8,7 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
|
||||
use crate::assert_messages;
|
||||
use crate::registry::Rule;
|
||||
@@ -23,7 +23,7 @@ mod tests {
|
||||
Path::new("flake8_tidy_imports/TID251.py"),
|
||||
&LinterSettings {
|
||||
flake8_tidy_imports: flake8_tidy_imports::settings::Settings {
|
||||
banned_api: FxHashMap::from_iter([
|
||||
banned_api: HashMap::from_iter([
|
||||
(
|
||||
"cgi".to_string(),
|
||||
ApiBan {
|
||||
@@ -52,7 +52,7 @@ mod tests {
|
||||
Path::new("flake8_tidy_imports/TID/my_package/sublib/api/application.py"),
|
||||
&LinterSettings {
|
||||
flake8_tidy_imports: flake8_tidy_imports::settings::Settings {
|
||||
banned_api: FxHashMap::from_iter([
|
||||
banned_api: HashMap::from_iter([
|
||||
(
|
||||
"attrs".to_string(),
|
||||
ApiBan {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
@@ -42,7 +42,7 @@ impl Display for Strictness {
|
||||
#[derive(Debug, Clone, CacheKey, Default)]
|
||||
pub struct Settings {
|
||||
pub ban_relative_imports: Strictness,
|
||||
pub banned_api: FxHashMap<String, ApiBan>,
|
||||
pub banned_api: HashMap<String, ApiBan>,
|
||||
pub banned_module_level_imports: Vec<String>,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -103,7 +103,7 @@ pub(crate) fn runtime_import_in_type_checking_block(
|
||||
diagnostics: &mut Vec<Diagnostic>,
|
||||
) {
|
||||
// Collect all runtime imports by statement.
|
||||
let mut actions: FxHashMap<(NodeId, Action), Vec<ImportBinding>> = FxHashMap::default();
|
||||
let mut actions: HashMap<(NodeId, Action), Vec<ImportBinding>> = HashMap::default();
|
||||
|
||||
for binding_id in scope.binding_ids() {
|
||||
let binding = checker.semantic().binding(binding_id);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -248,10 +248,10 @@ pub(crate) fn typing_only_runtime_import(
|
||||
diagnostics: &mut Vec<Diagnostic>,
|
||||
) {
|
||||
// Collect all typing-only imports by statement and import type.
|
||||
let mut errors_by_statement: FxHashMap<(NodeId, ImportType), Vec<ImportBinding>> =
|
||||
FxHashMap::default();
|
||||
let mut ignores_by_statement: FxHashMap<(NodeId, ImportType), Vec<ImportBinding>> =
|
||||
FxHashMap::default();
|
||||
let mut errors_by_statement: HashMap<(NodeId, ImportType), Vec<ImportBinding>> =
|
||||
HashMap::default();
|
||||
let mut ignores_by_statement: HashMap<(NodeId, ImportType), Vec<ImportBinding>> =
|
||||
HashMap::default();
|
||||
|
||||
for binding_id in scope.binding_ids() {
|
||||
let binding = checker.semantic().binding(binding_id);
|
||||
|
||||
@@ -3,8 +3,8 @@ use std::fmt;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{fs, iter};
|
||||
|
||||
use foldhash::{HashMap, HashMapExt, HashSet, HashSetExt};
|
||||
use log::debug;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum_macros::EnumIter;
|
||||
|
||||
@@ -283,7 +283,7 @@ impl KnownModules {
|
||||
third_party: Vec<glob::Pattern>,
|
||||
local_folder: Vec<glob::Pattern>,
|
||||
standard_library: Vec<glob::Pattern>,
|
||||
user_defined: FxHashMap<String, Vec<glob::Pattern>>,
|
||||
user_defined: HashMap<String, Vec<glob::Pattern>>,
|
||||
) -> Self {
|
||||
let known: Vec<(glob::Pattern, ImportSection)> = user_defined
|
||||
.into_iter()
|
||||
@@ -315,7 +315,7 @@ impl KnownModules {
|
||||
.collect();
|
||||
|
||||
// Warn in the case of duplicate modules.
|
||||
let mut seen = FxHashSet::with_capacity_and_hasher(known.len(), FxBuildHasher);
|
||||
let mut seen = HashSet::with_capacity(known.len());
|
||||
for (module, _) in &known {
|
||||
if !seen.insert(module) {
|
||||
warn_user_once!("One or more modules are part of multiple import sections, including: `{module}`");
|
||||
@@ -382,8 +382,8 @@ impl KnownModules {
|
||||
}
|
||||
|
||||
/// Return the list of user-defined modules, indexed by section.
|
||||
pub fn user_defined(&self) -> FxHashMap<&str, Vec<&glob::Pattern>> {
|
||||
let mut user_defined: FxHashMap<&str, Vec<&glob::Pattern>> = FxHashMap::default();
|
||||
pub fn user_defined(&self) -> HashMap<&str, Vec<&glob::Pattern>> {
|
||||
let mut user_defined: HashMap<&str, Vec<&glob::Pattern>> = HashMap::default();
|
||||
for (module, section) in &self.known {
|
||||
if let ImportSection::UserDefined(section_name) = section {
|
||||
user_defined
|
||||
|
||||
@@ -282,9 +282,9 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use foldhash::{HashMap, HashMapExt, HashSet, HashSetExt};
|
||||
use ruff_python_semantic::{MemberNameImport, ModuleNameImport, NameImport};
|
||||
use ruff_text_size::Ranged;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use test_case::test_case;
|
||||
|
||||
use crate::assert_messages;
|
||||
@@ -378,7 +378,7 @@ mod tests {
|
||||
vec![pattern("foo"), pattern("__future__")],
|
||||
vec![],
|
||||
vec![],
|
||||
FxHashMap::default(),
|
||||
HashMap::default(),
|
||||
),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
@@ -402,7 +402,7 @@ mod tests {
|
||||
vec![pattern("foo"), pattern("__future__")],
|
||||
vec![],
|
||||
vec![],
|
||||
FxHashMap::default(),
|
||||
HashMap::default(),
|
||||
),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
@@ -426,7 +426,7 @@ mod tests {
|
||||
vec![pattern("foo.bar")],
|
||||
vec![],
|
||||
vec![],
|
||||
FxHashMap::default(),
|
||||
HashMap::default(),
|
||||
),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
@@ -465,7 +465,7 @@ mod tests {
|
||||
vec![],
|
||||
vec![pattern("ruff")],
|
||||
vec![],
|
||||
FxHashMap::default(),
|
||||
HashMap::default(),
|
||||
),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
@@ -489,7 +489,7 @@ mod tests {
|
||||
vec![],
|
||||
vec![pattern("ruff")],
|
||||
vec![],
|
||||
FxHashMap::default(),
|
||||
HashMap::default(),
|
||||
),
|
||||
relative_imports_order: RelativeImportsOrder::ClosestToFurthest,
|
||||
..super::settings::Settings::default()
|
||||
@@ -527,7 +527,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_to_top: FxHashSet::from_iter([
|
||||
force_to_top: HashSet::from_iter([
|
||||
"z".to_string(),
|
||||
"lib1".to_string(),
|
||||
"lib3".to_string(),
|
||||
@@ -607,7 +607,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_single_line: true,
|
||||
single_line_exclusions: FxHashSet::from_iter([
|
||||
single_line_exclusions: HashSet::from_iter([
|
||||
"os".to_string(),
|
||||
"logging.handlers".to_string(),
|
||||
]),
|
||||
@@ -669,7 +669,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
classes: FxHashSet::from_iter([
|
||||
classes: HashSet::from_iter([
|
||||
"SVC".to_string(),
|
||||
"SELU".to_string(),
|
||||
"N_CLASS".to_string(),
|
||||
@@ -697,7 +697,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
constants: FxHashSet::from_iter([
|
||||
constants: HashSet::from_iter([
|
||||
"Const".to_string(),
|
||||
"constant".to_string(),
|
||||
"First".to_string(),
|
||||
@@ -727,7 +727,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
variables: FxHashSet::from_iter([
|
||||
variables: HashSet::from_iter([
|
||||
"VAR".to_string(),
|
||||
"Variable".to_string(),
|
||||
"MyVar".to_string(),
|
||||
@@ -754,7 +754,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_sort_within_sections: true,
|
||||
force_to_top: FxHashSet::from_iter(["z".to_string()]),
|
||||
force_to_top: HashSet::from_iter(["z".to_string()]),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
@@ -1010,7 +1010,7 @@ mod tests {
|
||||
vec![],
|
||||
vec![],
|
||||
vec![],
|
||||
FxHashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
|
||||
HashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
|
||||
),
|
||||
section_order: vec![
|
||||
ImportSection::Known(ImportType::Future),
|
||||
@@ -1061,7 +1061,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
no_lines_before: FxHashSet::from_iter([
|
||||
no_lines_before: HashSet::from_iter([
|
||||
ImportSection::Known(ImportType::Future),
|
||||
ImportSection::Known(ImportType::StandardLibrary),
|
||||
ImportSection::Known(ImportType::ThirdParty),
|
||||
@@ -1089,7 +1089,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
no_lines_before: FxHashSet::from_iter([
|
||||
no_lines_before: HashSet::from_iter([
|
||||
ImportSection::Known(ImportType::StandardLibrary),
|
||||
ImportSection::Known(ImportType::LocalFolder),
|
||||
]),
|
||||
@@ -1202,7 +1202,7 @@ mod tests {
|
||||
vec![],
|
||||
vec![],
|
||||
vec![],
|
||||
FxHashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
|
||||
HashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
|
||||
),
|
||||
section_order: vec![
|
||||
ImportSection::Known(ImportType::Future),
|
||||
@@ -1235,7 +1235,7 @@ mod tests {
|
||||
vec![],
|
||||
vec![],
|
||||
vec![],
|
||||
FxHashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
|
||||
HashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
|
||||
),
|
||||
section_order: vec![
|
||||
ImportSection::Known(ImportType::Future),
|
||||
@@ -1267,7 +1267,7 @@ mod tests {
|
||||
vec![],
|
||||
vec![],
|
||||
vec![],
|
||||
FxHashMap::default(),
|
||||
HashMap::default(),
|
||||
),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
|
||||
@@ -5,7 +5,7 @@ use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
@@ -53,17 +53,17 @@ pub struct Settings {
|
||||
pub force_sort_within_sections: bool,
|
||||
pub case_sensitive: bool,
|
||||
pub force_wrap_aliases: bool,
|
||||
pub force_to_top: FxHashSet<String>,
|
||||
pub force_to_top: HashSet<String>,
|
||||
pub known_modules: KnownModules,
|
||||
pub detect_same_package: bool,
|
||||
pub order_by_type: bool,
|
||||
pub relative_imports_order: RelativeImportsOrder,
|
||||
pub single_line_exclusions: FxHashSet<String>,
|
||||
pub single_line_exclusions: HashSet<String>,
|
||||
pub split_on_trailing_comma: bool,
|
||||
pub classes: FxHashSet<String>,
|
||||
pub constants: FxHashSet<String>,
|
||||
pub variables: FxHashSet<String>,
|
||||
pub no_lines_before: FxHashSet<ImportSection>,
|
||||
pub classes: HashSet<String>,
|
||||
pub constants: HashSet<String>,
|
||||
pub variables: HashSet<String>,
|
||||
pub no_lines_before: HashSet<ImportSection>,
|
||||
pub lines_after_imports: isize,
|
||||
pub lines_between_types: usize,
|
||||
pub forced_separate: Vec<String>,
|
||||
@@ -85,16 +85,16 @@ impl Default for Settings {
|
||||
detect_same_package: true,
|
||||
case_sensitive: false,
|
||||
force_wrap_aliases: false,
|
||||
force_to_top: FxHashSet::default(),
|
||||
force_to_top: HashSet::default(),
|
||||
known_modules: KnownModules::default(),
|
||||
order_by_type: true,
|
||||
relative_imports_order: RelativeImportsOrder::default(),
|
||||
single_line_exclusions: FxHashSet::default(),
|
||||
single_line_exclusions: HashSet::default(),
|
||||
split_on_trailing_comma: true,
|
||||
classes: FxHashSet::default(),
|
||||
constants: FxHashSet::default(),
|
||||
variables: FxHashSet::default(),
|
||||
no_lines_before: FxHashSet::default(),
|
||||
classes: HashSet::default(),
|
||||
constants: HashSet::default(),
|
||||
variables: HashSet::default(),
|
||||
no_lines_before: HashSet::default(),
|
||||
lines_after_imports: -1,
|
||||
lines_between_types: 0,
|
||||
forced_separate: Vec::new(),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
|
||||
use ruff_python_ast::helpers::format_import_from;
|
||||
|
||||
@@ -73,7 +73,7 @@ impl<'a> Importable<'a> for ImportFromData<'a> {
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct ImportFromStatement<'a> {
|
||||
pub(crate) comments: ImportFromCommentSet<'a>,
|
||||
pub(crate) aliases: FxHashMap<AliasData<'a>, ImportFromCommentSet<'a>>,
|
||||
pub(crate) aliases: HashMap<AliasData<'a>, ImportFromCommentSet<'a>>,
|
||||
pub(crate) trailing_comma: TrailingComma,
|
||||
}
|
||||
|
||||
@@ -81,17 +81,17 @@ pub(crate) struct ImportFromStatement<'a> {
|
||||
pub(crate) struct ImportBlock<'a> {
|
||||
// Set of (name, asname), used to track regular imports.
|
||||
// Ex) `import module`
|
||||
pub(crate) import: FxHashMap<AliasData<'a>, ImportCommentSet<'a>>,
|
||||
pub(crate) import: HashMap<AliasData<'a>, ImportCommentSet<'a>>,
|
||||
// Map from (module, level) to `AliasData`, used to track 'from' imports.
|
||||
// Ex) `from module import member`
|
||||
pub(crate) import_from: FxHashMap<ImportFromData<'a>, ImportFromStatement<'a>>,
|
||||
pub(crate) import_from: HashMap<ImportFromData<'a>, ImportFromStatement<'a>>,
|
||||
// Set of (module, level, name, asname), used to track re-exported 'from' imports.
|
||||
// Ex) `from module import member as member`
|
||||
pub(crate) import_from_as:
|
||||
FxHashMap<(ImportFromData<'a>, AliasData<'a>), ImportFromStatement<'a>>,
|
||||
HashMap<(ImportFromData<'a>, AliasData<'a>), ImportFromStatement<'a>>,
|
||||
// Map from (module, level) to `AliasData`, used to track star imports.
|
||||
// Ex) `from module import *`
|
||||
pub(crate) import_from_star: FxHashMap<ImportFromData<'a>, ImportFromStatement<'a>>,
|
||||
pub(crate) import_from_star: HashMap<ImportFromData<'a>, ImportFromStatement<'a>>,
|
||||
}
|
||||
|
||||
type Import<'a> = (AliasData<'a>, ImportCommentSet<'a>);
|
||||
|
||||
@@ -8,11 +8,12 @@ mod tests {
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashMap;
|
||||
use test_case::test_case;
|
||||
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::pep8_naming;
|
||||
use crate::rules::pep8_naming::settings::IgnoreNames;
|
||||
use crate::rules::{flake8_import_conventions, pep8_naming};
|
||||
use crate::test::test_path;
|
||||
use crate::{assert_messages, settings};
|
||||
|
||||
@@ -87,6 +88,25 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn camelcase_imported_as_incorrect_convention() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
Path::new("pep8_naming").join("N817.py").as_path(),
|
||||
&settings::LinterSettings {
|
||||
flake8_import_conventions: flake8_import_conventions::settings::Settings {
|
||||
aliases: FxHashMap::from_iter([(
|
||||
"xml.etree.ElementTree".to_string(),
|
||||
"XET".to_string(),
|
||||
)]),
|
||||
..Default::default()
|
||||
},
|
||||
..settings::LinterSettings::for_rule(Rule::CamelcaseImportedAsAcronym)
|
||||
},
|
||||
)?;
|
||||
assert_messages!(diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn classmethod_decorators() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use ruff_python_ast::{Alias, Stmt};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{Alias, Stmt};
|
||||
use ruff_python_stdlib::str::{self};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::rules::pep8_naming::helpers;
|
||||
use crate::rules::pep8_naming::settings::IgnoreNames;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for `CamelCase` imports that are aliased as acronyms.
|
||||
@@ -23,6 +22,9 @@ use crate::rules::pep8_naming::settings::IgnoreNames;
|
||||
/// Note that this rule is distinct from `camelcase-imported-as-constant`
|
||||
/// to accommodate selective enforcement.
|
||||
///
|
||||
/// Also note that import aliases following an import convention according to the
|
||||
/// [`lint.flake8-import-conventions.aliases`] option are allowed.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from example import MyClassName as MCN
|
||||
@@ -34,6 +36,9 @@ use crate::rules::pep8_naming::settings::IgnoreNames;
|
||||
/// ```
|
||||
///
|
||||
/// [PEP 8]: https://peps.python.org/pep-0008/
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.flake8-import-conventions.aliases`
|
||||
#[violation]
|
||||
pub struct CamelcaseImportedAsAcronym {
|
||||
name: String,
|
||||
@@ -54,17 +59,32 @@ pub(crate) fn camelcase_imported_as_acronym(
|
||||
asname: &str,
|
||||
alias: &Alias,
|
||||
stmt: &Stmt,
|
||||
ignore_names: &IgnoreNames,
|
||||
checker: &Checker,
|
||||
) -> Option<Diagnostic> {
|
||||
if helpers::is_camelcase(name)
|
||||
&& !str::is_cased_lowercase(asname)
|
||||
&& str::is_cased_uppercase(asname)
|
||||
&& helpers::is_acronym(name, asname)
|
||||
{
|
||||
let ignore_names = &checker.settings.pep8_naming.ignore_names;
|
||||
|
||||
// Ignore any explicitly-allowed names.
|
||||
if ignore_names.matches(name) || ignore_names.matches(asname) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Ignore names that follow a community-agreed import convention.
|
||||
if checker
|
||||
.settings
|
||||
.flake8_import_conventions
|
||||
.aliases
|
||||
.get(&*alias.name)
|
||||
.map(String::as_str)
|
||||
== Some(asname)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
CamelcaseImportedAsAcronym {
|
||||
name: name.to_string(),
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pep8_naming/mod.rs
|
||||
---
|
||||
N817.py:1:8: N817 CamelCase `CaMel` imported as acronym `CM`
|
||||
|
|
||||
1 | import mod.CaMel as CM
|
||||
| ^^^^^^^^^^^^^^^ N817
|
||||
2 | from mod import CamelCase as CC
|
||||
|
|
||||
|
||||
N817.py:2:17: N817 CamelCase `CamelCase` imported as acronym `CC`
|
||||
|
|
||||
1 | import mod.CaMel as CM
|
||||
2 | from mod import CamelCase as CC
|
||||
| ^^^^^^^^^^^^^^^ N817
|
||||
|
|
||||
|
||||
N817.py:6:8: N817 CamelCase `ElementTree` imported as acronym `ET`
|
||||
|
|
||||
5 | # OK depending on configured import convention
|
||||
6 | import xml.etree.ElementTree as ET
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ N817
|
||||
|
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::FxHashMap;
|
||||
use foldhash::HashMap;
|
||||
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -175,7 +175,7 @@ pub(crate) fn literal_comparisons(checker: &mut Checker, compare: &ast::ExprComp
|
||||
// through the list of operators, we apply "dummy" fixes for each error,
|
||||
// then replace the entire expression at the end with one "real" fix, to
|
||||
// avoid conflicts.
|
||||
let mut bad_ops: FxHashMap<usize, CmpOp> = FxHashMap::default();
|
||||
let mut bad_ops: HashMap<usize, CmpOp> = HashMap::default();
|
||||
let mut diagnostics: Vec<Diagnostic> = vec![];
|
||||
|
||||
// Check `left`.
|
||||
|
||||
@@ -34,6 +34,7 @@ use crate::registry::Rule;
|
||||
///
|
||||
/// ```python
|
||||
/// class PhotoMetadata:
|
||||
///
|
||||
/// """Metadata about a photo."""
|
||||
/// ```
|
||||
///
|
||||
@@ -125,6 +126,7 @@ impl AlwaysFixableViolation for OneBlankLineAfterClass {
|
||||
///
|
||||
/// ```python
|
||||
/// class PhotoMetadata:
|
||||
///
|
||||
/// """Metadata about a photo."""
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use foldhash::HashSet;
|
||||
use itertools::Itertools;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustc_hash::FxHashSet;
|
||||
use std::ops::Add;
|
||||
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Violation};
|
||||
@@ -1779,13 +1779,13 @@ fn common_section(
|
||||
blanks_and_section_underline(checker, docstring, context);
|
||||
}
|
||||
|
||||
fn missing_args(checker: &mut Checker, docstring: &Docstring, docstrings_args: &FxHashSet<String>) {
|
||||
fn missing_args(checker: &mut Checker, docstring: &Docstring, docstrings_args: &HashSet<String>) {
|
||||
let Some(function) = docstring.definition.as_function_def() else {
|
||||
return;
|
||||
};
|
||||
|
||||
// Look for arguments that weren't included in the docstring.
|
||||
let mut missing_arg_names: FxHashSet<String> = FxHashSet::default();
|
||||
let mut missing_arg_names: HashSet<String> = HashSet::default();
|
||||
|
||||
// If this is a non-static method, skip `cls` or `self`.
|
||||
for ParameterWithDefault {
|
||||
@@ -1847,10 +1847,10 @@ fn missing_args(checker: &mut Checker, docstring: &Docstring, docstrings_args: &
|
||||
static GOOGLE_ARGS_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^\s*(\*?\*?\w+)\s*(\(.*?\))?\s*:(\r\n|\n)?\s*.+").unwrap());
|
||||
|
||||
fn args_section(context: &SectionContext) -> FxHashSet<String> {
|
||||
fn args_section(context: &SectionContext) -> HashSet<String> {
|
||||
let mut following_lines = context.following_lines().peekable();
|
||||
let Some(first_line) = following_lines.next() else {
|
||||
return FxHashSet::default();
|
||||
return HashSet::default();
|
||||
};
|
||||
|
||||
// Normalize leading whitespace, by removing any lines with less indentation
|
||||
@@ -1896,12 +1896,12 @@ fn args_section(context: &SectionContext) -> FxHashSet<String> {
|
||||
matches
|
||||
.iter()
|
||||
.filter_map(|captures| captures.get(1).map(|arg_name| arg_name.as_str().to_owned()))
|
||||
.collect::<FxHashSet<String>>()
|
||||
.collect::<HashSet<String>>()
|
||||
}
|
||||
|
||||
fn parameters_section(checker: &mut Checker, docstring: &Docstring, context: &SectionContext) {
|
||||
// Collect the list of arguments documented in the docstring.
|
||||
let mut docstring_args: FxHashSet<String> = FxHashSet::default();
|
||||
let mut docstring_args: HashSet<String> = HashSet::default();
|
||||
let section_level_indent = leading_space(context.summary_line());
|
||||
|
||||
// Join line continuations, then resplit by line.
|
||||
@@ -2026,7 +2026,7 @@ fn parse_google_sections(
|
||||
|
||||
if checker.enabled(Rule::UndocumentedParam) {
|
||||
let mut has_args = false;
|
||||
let mut documented_args: FxHashSet<String> = FxHashSet::default();
|
||||
let mut documented_args: HashSet<String> = HashSet::default();
|
||||
for section_context in section_contexts {
|
||||
// Checks occur at the section level. Since two sections (args/keyword args and their
|
||||
// variants) can list arguments, we need to unify the sets of arguments mentioned in both
|
||||
|
||||
@@ -2,22 +2,22 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::str::FromStr;
|
||||
|
||||
use foldhash::HashSet;
|
||||
use ruff_python_literal::cformat::{
|
||||
CFormatError, CFormatPart, CFormatPrecision, CFormatQuantity, CFormatSpec, CFormatString,
|
||||
};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
pub(crate) struct CFormatSummary {
|
||||
pub(crate) starred: bool,
|
||||
pub(crate) num_positional: usize,
|
||||
pub(crate) keywords: FxHashSet<String>,
|
||||
pub(crate) keywords: HashSet<String>,
|
||||
}
|
||||
|
||||
impl From<&CFormatString> for CFormatSummary {
|
||||
fn from(format_string: &CFormatString) -> Self {
|
||||
let mut starred = false;
|
||||
let mut num_positional = 0;
|
||||
let mut keywords = FxHashSet::default();
|
||||
let mut keywords = HashSet::default();
|
||||
|
||||
for format_part in format_string.iter() {
|
||||
let CFormatPart::Spec(CFormatSpec {
|
||||
|
||||
@@ -10,8 +10,8 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use foldhash::HashMap;
|
||||
use regex::Regex;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use test_case::test_case;
|
||||
|
||||
@@ -261,7 +261,7 @@ mod tests {
|
||||
vec![],
|
||||
vec![],
|
||||
vec![],
|
||||
FxHashMap::default(),
|
||||
HashMap::default(),
|
||||
),
|
||||
..isort::settings::Settings::default()
|
||||
},
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
|
||||
use foldhash::{HashMap, HashMapExt, HashSet, HashSetExt};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -129,8 +129,8 @@ impl Violation for MultiValueRepeatedKeyVariable {
|
||||
/// F601, F602
|
||||
pub(crate) fn repeated_keys(checker: &mut Checker, dict: &ast::ExprDict) {
|
||||
// Generate a map from key to (index, value).
|
||||
let mut seen: FxHashMap<ComparableExpr, FxHashSet<ComparableExpr>> =
|
||||
FxHashMap::with_capacity_and_hasher(dict.len(), FxBuildHasher);
|
||||
let mut seen: HashMap<ComparableExpr, HashSet<ComparableExpr>> =
|
||||
HashMap::with_capacity(dict.len());
|
||||
|
||||
// Detect duplicate keys.
|
||||
for (i, ast::DictItem { key, value }) in dict.iter().enumerate() {
|
||||
@@ -142,7 +142,7 @@ pub(crate) fn repeated_keys(checker: &mut Checker, dict: &ast::ExprDict) {
|
||||
let comparable_value = ComparableExpr::from(value);
|
||||
|
||||
let Some(seen_values) = seen.get_mut(&comparable_key) else {
|
||||
seen.insert(comparable_key, FxHashSet::from_iter([comparable_value]));
|
||||
seen.insert(comparable_key, HashSet::from_iter([comparable_value]));
|
||||
continue;
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::string::ToString;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -637,7 +637,7 @@ pub(crate) fn percent_format_missing_arguments(
|
||||
return; // contains **x splat
|
||||
}
|
||||
|
||||
let mut keywords = FxHashSet::default();
|
||||
let mut keywords = HashSet::default();
|
||||
for key in dict.iter_keys().flatten() {
|
||||
match key {
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
|
||||
@@ -859,7 +859,7 @@ pub(crate) fn string_dot_format_missing_argument(
|
||||
return;
|
||||
}
|
||||
|
||||
let keywords: FxHashSet<_> = keywords
|
||||
let keywords: HashSet<_> = keywords
|
||||
.iter()
|
||||
.filter_map(|k| {
|
||||
let Keyword { arg, .. } = &k;
|
||||
|
||||
@@ -8,8 +8,8 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use foldhash::HashSet;
|
||||
use regex::Regex;
|
||||
use rustc_hash::FxHashSet;
|
||||
use test_case::test_case;
|
||||
|
||||
use crate::registry::Rule;
|
||||
@@ -123,7 +123,10 @@ mod tests {
|
||||
#[test_case(Rule::RedefinedLoopName, Path::new("redefined_loop_name.py"))]
|
||||
#[test_case(Rule::ReturnInInit, Path::new("return_in_init.py"))]
|
||||
#[test_case(Rule::TooManyArguments, Path::new("too_many_arguments.py"))]
|
||||
#[test_case(Rule::TooManyPositional, Path::new("too_many_positional.py"))]
|
||||
#[test_case(
|
||||
Rule::TooManyPositionalArguments,
|
||||
Path::new("too_many_positional_arguments.py")
|
||||
)]
|
||||
#[test_case(Rule::TooManyBranches, Path::new("too_many_branches.py"))]
|
||||
#[test_case(
|
||||
Rule::TooManyReturnStatements,
|
||||
@@ -217,7 +220,7 @@ mod tests {
|
||||
Path::new("pylint").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
pylint: pylint::settings::Settings {
|
||||
allow_dunder_method_names: FxHashSet::from_iter([
|
||||
allow_dunder_method_names: HashSet::from_iter([
|
||||
"__special_custom_magic__".to_string()
|
||||
]),
|
||||
..pylint::settings::Settings::default()
|
||||
@@ -294,7 +297,7 @@ mod tests {
|
||||
max_positional_args: 4,
|
||||
..pylint::settings::Settings::default()
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::TooManyPositional)
|
||||
..LinterSettings::for_rule(Rule::TooManyPositionalArguments)
|
||||
},
|
||||
)?;
|
||||
assert_messages!(diagnostics);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::fmt;
|
||||
|
||||
use foldhash::HashSet;
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -124,7 +124,7 @@ impl fmt::Display for RemovalKind {
|
||||
/// escapes.
|
||||
fn has_duplicates(s: &ast::StringLiteralValue) -> bool {
|
||||
let mut escaped = false;
|
||||
let mut seen = FxHashSet::default();
|
||||
let mut seen = HashSet::default();
|
||||
for ch in s.chars() {
|
||||
if escaped {
|
||||
escaped = false;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use foldhash::HashMap;
|
||||
use ruff_python_ast::{self as ast, Expr, StringFlags, StringLiteral};
|
||||
use ruff_python_literal::cformat::{CFormatPart, CFormatSpec, CFormatStrOrBytes, CFormatString};
|
||||
use ruff_text_size::Ranged;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -178,7 +178,7 @@ fn is_valid_dict(formats: &[CFormatStrOrBytes<String>], items: &[ast::DictItem])
|
||||
return true;
|
||||
}
|
||||
|
||||
let formats_hash: FxHashMap<&str, &&CFormatSpec> = formats
|
||||
let formats_hash: HashMap<&str, &&CFormatSpec> = formats
|
||||
.iter()
|
||||
.filter_map(|format| {
|
||||
format
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use foldhash::{HashMapExt, HashSet, HashSetExt};
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr};
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -63,7 +63,7 @@ pub(crate) fn duplicate_bases(checker: &mut Checker, name: &str, arguments: Opti
|
||||
};
|
||||
let bases = &arguments.args;
|
||||
|
||||
let mut seen: FxHashSet<&str> = FxHashSet::with_capacity_and_hasher(bases.len(), FxBuildHasher);
|
||||
let mut seen: HashSet<&str> = HashSet::with_capacity(bases.len());
|
||||
for base in &**bases {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = base {
|
||||
if !seen.insert(id) {
|
||||
|
||||
@@ -79,7 +79,7 @@ pub(crate) use too_many_boolean_expressions::*;
|
||||
pub(crate) use too_many_branches::*;
|
||||
pub(crate) use too_many_locals::*;
|
||||
pub(crate) use too_many_nested_blocks::*;
|
||||
pub(crate) use too_many_positional::*;
|
||||
pub(crate) use too_many_positional_arguments::*;
|
||||
pub(crate) use too_many_public_methods::*;
|
||||
pub(crate) use too_many_return_statements::*;
|
||||
pub(crate) use too_many_statements::*;
|
||||
@@ -182,7 +182,7 @@ mod too_many_boolean_expressions;
|
||||
mod too_many_branches;
|
||||
mod too_many_locals;
|
||||
mod too_many_nested_blocks;
|
||||
mod too_many_positional;
|
||||
mod too_many_positional_arguments;
|
||||
mod too_many_public_methods;
|
||||
mod too_many_return_statements;
|
||||
mod too_many_statements;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rustc_hash::FxHashSet;
|
||||
use foldhash::HashSet;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -102,7 +102,7 @@ impl Ranged for AttributeAssignment<'_> {
|
||||
/// If the `__slots__` attribute cannot be statically determined, returns an empty vector.
|
||||
fn is_attributes_not_in_slots(body: &[Stmt]) -> Vec<AttributeAssignment> {
|
||||
// First, collect all the attributes that are assigned to `__slots__`.
|
||||
let mut slots = FxHashSet::default();
|
||||
let mut slots = HashSet::default();
|
||||
for statement in body {
|
||||
match statement {
|
||||
// Ex) `__slots__ = ("name",)`
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user