Compare commits
9 Commits
editables-
...
0.5.4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
53b84ab054 | ||
|
|
3664f85f45 | ||
|
|
2c1926beeb | ||
|
|
4bcc96ae51 | ||
|
|
c0a2b49bac | ||
|
|
ca22248628 | ||
|
|
d8cf8ac2ef | ||
|
|
1c7b84059e | ||
|
|
f82bb67555 |
17
CHANGELOG.md
17
CHANGELOG.md
@@ -1,5 +1,22 @@
|
||||
# Changelog
|
||||
|
||||
## 0.5.4
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Rename `RUF007` to `zip-instead-of-pairwise` ([#12399](https://github.com/astral-sh/ruff/pull/12399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-builtins`\] Avoid shadowing diagnostics for `@override` methods ([#12415](https://github.com/astral-sh/ruff/pull/12415))
|
||||
- \[`flake8-comprehensions`\] Insert parentheses for multi-argument generators ([#12422](https://github.com/astral-sh/ruff/pull/12422))
|
||||
- \[`pydocstyle`\] Handle escaped docstrings within docstring (`D301`) ([#12192](https://github.com/astral-sh/ruff/pull/12192))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix GitHub link to Neovim setup ([#12410](https://github.com/astral-sh/ruff/pull/12410))
|
||||
- Fix `output-format` default in settings reference ([#12409](https://github.com/astral-sh/ruff/pull/12409))
|
||||
|
||||
## 0.5.3
|
||||
|
||||
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
|
||||
|
||||
6
Cargo.lock
generated
6
Cargo.lock
generated
@@ -1992,7 +1992,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.5.3"
|
||||
version = "0.5.4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2176,7 +2176,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.5.3"
|
||||
version = "0.5.4"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2491,7 +2491,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.5.3"
|
||||
version = "0.5.4"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.5.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.3/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.5.4/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.4/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.3
|
||||
rev: v0.5.4
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -233,10 +233,16 @@ impl ModuleResolutionPathBuf {
|
||||
ModuleResolutionPathRef::from(self).is_directory(search_path, resolver)
|
||||
}
|
||||
|
||||
pub(crate) fn is_site_packages(&self) -> bool {
|
||||
#[must_use]
|
||||
pub(crate) const fn is_site_packages(&self) -> bool {
|
||||
matches!(self.0, ModuleResolutionPathBufInner::SitePackages(_))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) const fn is_standard_library(&self) -> bool {
|
||||
matches!(self.0, ModuleResolutionPathBufInner::StandardLibrary(_))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn with_pyi_extension(&self) -> Self {
|
||||
ModuleResolutionPathRef::from(self).with_pyi_extension()
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::borrow::Cow;
|
||||
use std::iter::FusedIterator;
|
||||
use std::sync::Arc;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
use ruff_db::files::{File, FilePath};
|
||||
@@ -442,6 +443,52 @@ pub(crate) mod internal {
|
||||
}
|
||||
}
|
||||
|
||||
/// Modules that are builtin to the Python interpreter itself.
|
||||
///
|
||||
/// When these module names are imported, standard module resolution is bypassed:
|
||||
/// the module name always resolves to the stdlib module,
|
||||
/// even if there's a module of the same name in the workspace root
|
||||
/// (which would normally result in the stdlib module being overridden).
|
||||
///
|
||||
/// TODO(Alex): write a script to generate this list,
|
||||
/// similar to what we do in `crates/ruff_python_stdlib/src/sys.rs`
|
||||
static BUILTIN_MODULES: Lazy<FxHashSet<&str>> = Lazy::new(|| {
|
||||
const BUILTIN_MODULE_NAMES: &[&str] = &[
|
||||
"_abc",
|
||||
"_ast",
|
||||
"_codecs",
|
||||
"_collections",
|
||||
"_functools",
|
||||
"_imp",
|
||||
"_io",
|
||||
"_locale",
|
||||
"_operator",
|
||||
"_signal",
|
||||
"_sre",
|
||||
"_stat",
|
||||
"_string",
|
||||
"_symtable",
|
||||
"_thread",
|
||||
"_tokenize",
|
||||
"_tracemalloc",
|
||||
"_typing",
|
||||
"_warnings",
|
||||
"_weakref",
|
||||
"atexit",
|
||||
"builtins",
|
||||
"errno",
|
||||
"faulthandler",
|
||||
"gc",
|
||||
"itertools",
|
||||
"marshal",
|
||||
"posix",
|
||||
"pwd",
|
||||
"sys",
|
||||
"time",
|
||||
];
|
||||
BUILTIN_MODULE_NAMES.iter().copied().collect()
|
||||
});
|
||||
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(
|
||||
@@ -450,8 +497,12 @@ fn resolve_name(
|
||||
) -> Option<(Arc<ModuleResolutionPathBuf>, File, ModuleKind)> {
|
||||
let resolver_settings = module_resolution_settings(db);
|
||||
let resolver_state = ResolverState::new(db, resolver_settings.target_version());
|
||||
let is_builtin_module = BUILTIN_MODULES.contains(&name.as_str());
|
||||
|
||||
for search_path in resolver_settings.search_paths(db) {
|
||||
if is_builtin_module && !search_path.is_standard_library() {
|
||||
continue;
|
||||
}
|
||||
let mut components = name.components();
|
||||
let module_name = components.next_back()?;
|
||||
|
||||
@@ -629,6 +680,40 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtins_vendored() {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_vendored_typeshed()
|
||||
.with_src_files(&[("builtins.py", "FOOOO = 42")])
|
||||
.build();
|
||||
|
||||
let builtins_module_name = ModuleName::new_static("builtins").unwrap();
|
||||
let builtins = resolve_module(&db, builtins_module_name).expect("builtins to resolve");
|
||||
|
||||
assert_eq!(builtins.file().path(&db), &stdlib.join("builtins.pyi"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtins_custom() {
|
||||
const TYPESHED: MockedTypeshed = MockedTypeshed {
|
||||
stdlib_files: &[("builtins.pyi", "def min(a, b): ...")],
|
||||
versions: "builtins: 3.8-",
|
||||
};
|
||||
|
||||
const SRC: &[FileSpec] = &[("builtins.py", "FOOOO = 42")];
|
||||
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_src_files(SRC)
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.build();
|
||||
|
||||
let builtins_module_name = ModuleName::new_static("builtins").unwrap();
|
||||
let builtins = resolve_module(&db, builtins_module_name).expect("builtins to resolve");
|
||||
|
||||
assert_eq!(builtins.file().path(&db), &stdlib.join("builtins.pyi"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stdlib() {
|
||||
const TYPESHED: MockedTypeshed = MockedTypeshed {
|
||||
@@ -1603,28 +1688,4 @@ not_a_directory
|
||||
ModuleResolutionPathBuf::editable_installation_root(db.system(), "/src").unwrap()
|
||||
)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicate_editable_search_paths_added() {
|
||||
let TestCase { mut db, .. } = TestCaseBuilder::new()
|
||||
.with_site_packages_files(&[("_foo.pth", "/x"), ("_bar.pth", "/x")])
|
||||
.build();
|
||||
|
||||
db.write_file("/x/foo.py", "").unwrap();
|
||||
|
||||
let search_paths: Vec<&SearchPathRoot> =
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
|
||||
let editable_install =
|
||||
ModuleResolutionPathBuf::editable_installation_root(db.system(), "/x").unwrap();
|
||||
|
||||
assert_eq!(
|
||||
search_paths
|
||||
.iter()
|
||||
.filter(|path| ****path == editable_install)
|
||||
.count(),
|
||||
1,
|
||||
"Unexpected search paths: {search_paths:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use red_knot_module_resolver::{resolve_module, ModuleName};
|
||||
|
||||
use crate::semantic_index::global_scope;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::Db;
|
||||
|
||||
/// Salsa query to get the builtins scope.
|
||||
///
|
||||
/// Can return None if a custom typeshed is used that is missing `builtins.pyi`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn builtins_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
|
||||
let builtins_name =
|
||||
ModuleName::new_static("builtins").expect("Expected 'builtins' to be a valid module name");
|
||||
let builtins_file = resolve_module(db.upcast(), builtins_name)?.file();
|
||||
Some(global_scope(db, builtins_file))
|
||||
}
|
||||
@@ -3,6 +3,7 @@ use salsa::DbWithJar;
|
||||
use red_knot_module_resolver::Db as ResolverDb;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
@@ -28,6 +29,7 @@ pub struct Jar(
|
||||
infer_definition_types,
|
||||
infer_expression_types,
|
||||
infer_scope_types,
|
||||
builtins_scope,
|
||||
);
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
|
||||
@@ -6,6 +6,7 @@ pub use db::{Db, Jar};
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod builtins;
|
||||
mod db;
|
||||
mod node_key;
|
||||
pub mod semantic_index;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{global_scope, symbol_table, use_def_map};
|
||||
@@ -47,6 +48,15 @@ pub(crate) fn global_symbol_ty_by_name<'db>(db: &'db dyn Db, file: File, name: &
|
||||
symbol_ty_by_name(db, global_scope(db, file), name)
|
||||
}
|
||||
|
||||
/// Shorthand for `symbol_ty` that looks up a symbol in the builtins.
|
||||
///
|
||||
/// Returns `None` if the builtins module isn't available for some reason.
|
||||
pub(crate) fn builtins_symbol_ty_by_name<'db>(db: &'db dyn Db, name: &str) -> Type<'db> {
|
||||
builtins_scope(db)
|
||||
.map(|builtins| symbol_ty_by_name(db, builtins, name))
|
||||
.unwrap_or(Type::Unbound)
|
||||
}
|
||||
|
||||
/// Infer the type of a [`Definition`].
|
||||
pub(crate) fn definition_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
|
||||
let inference = infer_definition_types(db, definition);
|
||||
|
||||
@@ -29,13 +29,9 @@ impl Display for DisplayType<'_> {
|
||||
write!(f, "<module '{:?}'>", file.path(self.db.upcast()))
|
||||
}
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class) => {
|
||||
f.write_str("Literal[")?;
|
||||
f.write_str(&class.name(self.db))?;
|
||||
f.write_str("]")
|
||||
}
|
||||
Type::Class(class) => write!(f, "Literal[{}]", class.name(self.db)),
|
||||
Type::Instance(class) => f.write_str(&class.name(self.db)),
|
||||
Type::Function(function) => f.write_str(&function.name(self.db)),
|
||||
Type::Function(function) => write!(f, "Literal[{}]", function.name(self.db)),
|
||||
Type::Union(union) => union.display(self.db).fmt(f),
|
||||
Type::Intersection(intersection) => intersection.display(self.db).fmt(f),
|
||||
Type::IntLiteral(n) => write!(f, "Literal[{n}]"),
|
||||
|
||||
@@ -29,15 +29,16 @@ use ruff_db::parsed::parsed_module;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{ExprContext, TypeParams};
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::ast_ids::{HasScopedAstId, HasScopedUseId, ScopedExpressionId};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind, DefinitionNodeKey};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::NodeWithScopeKind;
|
||||
use crate::semantic_index::symbol::{NodeWithScopeRef, ScopeId};
|
||||
use crate::semantic_index::symbol::{FileScopeId, NodeWithScopeKind, NodeWithScopeRef, ScopeId};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::types::{
|
||||
definitions_ty, global_symbol_ty_by_name, ClassType, FunctionType, Name, Type, UnionTypeBuilder,
|
||||
builtins_symbol_ty_by_name, definitions_ty, global_symbol_ty_by_name, ClassType, FunctionType,
|
||||
Name, Type, UnionTypeBuilder,
|
||||
};
|
||||
use crate::Db;
|
||||
|
||||
@@ -46,9 +47,9 @@ use crate::Db;
|
||||
/// scope.
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn infer_scope_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> TypeInference<'db> {
|
||||
let _span = tracing::trace_span!("infer_scope_types", ?scope).entered();
|
||||
|
||||
let file = scope.file(db);
|
||||
let _span = tracing::trace_span!("infer_scope_types", ?scope, ?file).entered();
|
||||
|
||||
// Using the index here is fine because the code below depends on the AST anyway.
|
||||
// The isolation of the query is by the return inferred types.
|
||||
let index = semantic_index(db, file);
|
||||
@@ -63,9 +64,10 @@ pub(crate) fn infer_definition_types<'db>(
|
||||
db: &'db dyn Db,
|
||||
definition: Definition<'db>,
|
||||
) -> TypeInference<'db> {
|
||||
let _span = tracing::trace_span!("infer_definition_types", ?definition).entered();
|
||||
let file = definition.file(db);
|
||||
let _span = tracing::trace_span!("infer_definition_types", ?definition, ?file,).entered();
|
||||
|
||||
let index = semantic_index(db, definition.file(db));
|
||||
let index = semantic_index(db, file);
|
||||
|
||||
TypeInferenceBuilder::new(db, InferenceRegion::Definition(definition), index).finish()
|
||||
}
|
||||
@@ -80,9 +82,10 @@ pub(crate) fn infer_expression_types<'db>(
|
||||
db: &'db dyn Db,
|
||||
expression: Expression<'db>,
|
||||
) -> TypeInference<'db> {
|
||||
let _span = tracing::trace_span!("infer_expression_types", ?expression).entered();
|
||||
let file = expression.file(db);
|
||||
let _span = tracing::trace_span!("infer_expression_types", ?expression, ?file).entered();
|
||||
|
||||
let index = semantic_index(db, expression.file(db));
|
||||
let index = semantic_index(db, file);
|
||||
|
||||
TypeInferenceBuilder::new(db, InferenceRegion::Expression(expression), index).finish()
|
||||
}
|
||||
@@ -684,7 +687,18 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let symbol = symbols.symbol_by_name(id).unwrap();
|
||||
if !symbol.is_defined() || !self.scope.is_function_like(self.db) {
|
||||
// implicit global
|
||||
Some(global_symbol_ty_by_name(self.db, self.file, id))
|
||||
let mut unbound_ty = if file_scope_id == FileScopeId::global() {
|
||||
Type::Unbound
|
||||
} else {
|
||||
global_symbol_ty_by_name(self.db, self.file, id)
|
||||
};
|
||||
// fallback to builtins
|
||||
if matches!(unbound_ty, Type::Unbound)
|
||||
&& Some(self.scope) != builtins_scope(self.db)
|
||||
{
|
||||
unbound_ty = builtins_symbol_ty_by_name(self.db, id);
|
||||
}
|
||||
Some(unbound_ty)
|
||||
} else {
|
||||
Some(Type::Unbound)
|
||||
}
|
||||
@@ -790,6 +804,7 @@ mod tests {
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::semantic_index;
|
||||
@@ -817,6 +832,23 @@ mod tests {
|
||||
db
|
||||
}
|
||||
|
||||
fn setup_db_with_custom_typeshed(typeshed: &str) -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
Program::new(
|
||||
&db,
|
||||
TargetVersion::Py38,
|
||||
SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
workspace_root: SystemPathBuf::from("/src"),
|
||||
site_packages: None,
|
||||
custom_typeshed: Some(SystemPathBuf::from(typeshed)),
|
||||
},
|
||||
);
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
fn assert_public_ty(db: &TestDb, file_name: &str, symbol_name: &str, expected: &str) {
|
||||
let file = system_path_to_file(db, file_name).expect("Expected file to exist.");
|
||||
|
||||
@@ -1368,6 +1400,80 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtin_symbol_vendored_stdlib() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("/src/a.py", "c = copyright")?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "c", "Literal[copyright]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtin_symbol_custom_stdlib() -> anyhow::Result<()> {
|
||||
let mut db = setup_db_with_custom_typeshed("/typeshed");
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "c = copyright"),
|
||||
(
|
||||
"/typeshed/stdlib/builtins.pyi",
|
||||
"def copyright() -> None: ...",
|
||||
),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "c", "Literal[copyright]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_global_later_defined() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("/src/a.py", "x = foo; foo = 1")?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Unbound");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_builtin_later_defined() -> anyhow::Result<()> {
|
||||
let mut db = setup_db_with_custom_typeshed("/typeshed");
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "x = foo"),
|
||||
("/typeshed/stdlib/builtins.pyi", "foo = bar; bar = 1"),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Unbound");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_builtins() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("/src/a.py", "import builtins; x = builtins.copyright")?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Literal[copyright]");
|
||||
// imported builtins module is the same file as the implicit builtins
|
||||
let file = system_path_to_file(&db, "/src/a.py").expect("Expected file to exist.");
|
||||
let builtins_ty = global_symbol_ty_by_name(&db, file, "builtins");
|
||||
let Type::Module(builtins_file) = builtins_ty else {
|
||||
panic!("Builtins are not a module?");
|
||||
};
|
||||
let implicit_builtins_file = builtins_scope(&db).expect("builtins to exist").file(&db);
|
||||
assert_eq!(builtins_file, implicit_builtins_file);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn first_public_def<'db>(db: &'db TestDb, file: File, name: &str) -> Definition<'db> {
|
||||
let scope = global_scope(db, file);
|
||||
*use_def_map(db, scope)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.5.3"
|
||||
version = "0.5.4"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -137,7 +137,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
|
||||
case.fs
|
||||
.write_file(
|
||||
SystemPath::new("/src/foo.py"),
|
||||
SystemPath::new("/src/bar.py"),
|
||||
format!("{BAR_CODE}\n# A comment\n"),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.5.3"
|
||||
version = "0.5.4"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -5,7 +5,21 @@ def func1(str, /, type, *complex, Exception, **getattr):
|
||||
async def func2(bytes):
|
||||
pass
|
||||
|
||||
|
||||
async def func3(id, dir):
|
||||
pass
|
||||
|
||||
|
||||
map([], lambda float: ...)
|
||||
|
||||
from typing import override, overload
|
||||
|
||||
|
||||
@override
|
||||
def func4(id, dir):
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def func4(id, dir):
|
||||
pass
|
||||
|
||||
@@ -3,8 +3,18 @@ min([x.val for x in bar])
|
||||
max([x.val for x in bar])
|
||||
sum([x.val for x in bar], 0)
|
||||
|
||||
# Ok
|
||||
# OK
|
||||
sum(x.val for x in bar)
|
||||
min(x.val for x in bar)
|
||||
max(x.val for x in bar)
|
||||
sum(x.val for x in bar, 0)
|
||||
|
||||
# Multi-line
|
||||
sum(
|
||||
[
|
||||
delta
|
||||
for delta in timedelta_list
|
||||
if delta
|
||||
],
|
||||
dt.timedelta(),
|
||||
)
|
||||
|
||||
@@ -3,6 +3,7 @@ use ruff_python_ast::Parameter;
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_diagnostics::Violation;
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_semantic::analyze::visibility::{is_overload, is_override};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -69,6 +70,19 @@ pub(crate) fn builtin_argument_shadowing(checker: &mut Checker, parameter: &Para
|
||||
&checker.settings.flake8_builtins.builtins_ignorelist,
|
||||
checker.source_type,
|
||||
) {
|
||||
// Ignore `@override` and `@overload` decorated functions.
|
||||
if checker
|
||||
.semantic()
|
||||
.current_statement()
|
||||
.as_function_def_stmt()
|
||||
.is_some_and(|function_def| {
|
||||
is_override(&function_def.decorator_list, checker.semantic())
|
||||
|| is_overload(&function_def.decorator_list, checker.semantic())
|
||||
})
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
BuiltinArgumentShadowing {
|
||||
name: parameter.name.to_string(),
|
||||
|
||||
@@ -43,30 +43,24 @@ A002.py:5:17: A002 Argument `bytes` is shadowing a Python builtin
|
||||
6 | pass
|
||||
|
|
||||
|
||||
A002.py:8:17: A002 Argument `id` is shadowing a Python builtin
|
||||
|
|
||||
6 | pass
|
||||
7 |
|
||||
8 | async def func3(id, dir):
|
||||
| ^^ A002
|
||||
9 | pass
|
||||
|
|
||||
|
||||
A002.py:8:21: A002 Argument `dir` is shadowing a Python builtin
|
||||
|
|
||||
6 | pass
|
||||
7 |
|
||||
8 | async def func3(id, dir):
|
||||
| ^^^ A002
|
||||
9 | pass
|
||||
|
|
||||
|
||||
A002.py:11:16: A002 Argument `float` is shadowing a Python builtin
|
||||
A002.py:9:17: A002 Argument `id` is shadowing a Python builtin
|
||||
|
|
||||
9 | pass
|
||||
10 |
|
||||
11 | map([], lambda float: ...)
|
||||
9 | async def func3(id, dir):
|
||||
| ^^ A002
|
||||
10 | pass
|
||||
|
|
||||
|
||||
A002.py:9:21: A002 Argument `dir` is shadowing a Python builtin
|
||||
|
|
||||
9 | async def func3(id, dir):
|
||||
| ^^^ A002
|
||||
10 | pass
|
||||
|
|
||||
|
||||
A002.py:13:16: A002 Argument `float` is shadowing a Python builtin
|
||||
|
|
||||
13 | map([], lambda float: ...)
|
||||
| ^^^^^ A002
|
||||
14 |
|
||||
15 | from typing import override, overload
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -43,12 +43,10 @@ A002.py:5:17: A002 Argument `bytes` is shadowing a Python builtin
|
||||
6 | pass
|
||||
|
|
||||
|
||||
A002.py:11:16: A002 Argument `float` is shadowing a Python builtin
|
||||
A002.py:13:16: A002 Argument `float` is shadowing a Python builtin
|
||||
|
|
||||
9 | pass
|
||||
10 |
|
||||
11 | map([], lambda float: ...)
|
||||
13 | map([], lambda float: ...)
|
||||
| ^^^^^ A002
|
||||
14 |
|
||||
15 | from typing import override, overload
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use ruff_python_ast::{self as ast, Expr, Keyword};
|
||||
|
||||
use ruff_diagnostics::Violation;
|
||||
use ruff_diagnostics::{Diagnostic, FixAvailability};
|
||||
use ruff_diagnostics::{Edit, Fix, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::any_over_expr;
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
@@ -112,9 +112,30 @@ pub(crate) fn unnecessary_comprehension_in_call(
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryComprehensionInCall, arg.range());
|
||||
diagnostic.try_set_fix(|| {
|
||||
fixes::fix_unnecessary_comprehension_in_call(expr, checker.locator(), checker.stylist())
|
||||
});
|
||||
|
||||
if args.len() == 1 {
|
||||
// If there's only one argument, remove the list or set brackets.
|
||||
diagnostic.try_set_fix(|| {
|
||||
fixes::fix_unnecessary_comprehension_in_call(expr, checker.locator(), checker.stylist())
|
||||
});
|
||||
} else {
|
||||
// If there are multiple arguments, replace the list or set brackets with parentheses.
|
||||
// If a function call has multiple arguments, one of which is a generator, then the
|
||||
// generator must be parenthesized.
|
||||
|
||||
// Replace `[` with `(`.
|
||||
let collection_start = Edit::replacement(
|
||||
"(".to_string(),
|
||||
arg.start(),
|
||||
arg.start() + TextSize::from(1),
|
||||
);
|
||||
|
||||
// Replace `]` with `)`.
|
||||
let collection_end =
|
||||
Edit::replacement(")".to_string(), arg.end() - TextSize::from(1), arg.end());
|
||||
|
||||
diagnostic.set_fix(Fix::unsafe_edits(collection_start, [collection_end]));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@ C419_1.py:3:5: C419 [*] Unnecessary list comprehension
|
||||
3 |+max(x.val for x in bar)
|
||||
4 4 | sum([x.val for x in bar], 0)
|
||||
5 5 |
|
||||
6 6 | # Ok
|
||||
6 6 | # OK
|
||||
|
||||
C419_1.py:4:5: C419 [*] Unnecessary list comprehension
|
||||
|
|
||||
@@ -61,7 +61,7 @@ C419_1.py:4:5: C419 [*] Unnecessary list comprehension
|
||||
4 | sum([x.val for x in bar], 0)
|
||||
| ^^^^^^^^^^^^^^^^^^^^ C419
|
||||
5 |
|
||||
6 | # Ok
|
||||
6 | # OK
|
||||
|
|
||||
= help: Remove unnecessary list comprehension
|
||||
|
||||
@@ -70,7 +70,37 @@ C419_1.py:4:5: C419 [*] Unnecessary list comprehension
|
||||
2 2 | min([x.val for x in bar])
|
||||
3 3 | max([x.val for x in bar])
|
||||
4 |-sum([x.val for x in bar], 0)
|
||||
4 |+sum(x.val for x in bar, 0)
|
||||
4 |+sum((x.val for x in bar), 0)
|
||||
5 5 |
|
||||
6 6 | # Ok
|
||||
6 6 | # OK
|
||||
7 7 | sum(x.val for x in bar)
|
||||
|
||||
C419_1.py:14:5: C419 [*] Unnecessary list comprehension
|
||||
|
|
||||
12 | # Multi-line
|
||||
13 | sum(
|
||||
14 | [
|
||||
| _____^
|
||||
15 | | delta
|
||||
16 | | for delta in timedelta_list
|
||||
17 | | if delta
|
||||
18 | | ],
|
||||
| |_____^ C419
|
||||
19 | dt.timedelta(),
|
||||
20 | )
|
||||
|
|
||||
= help: Remove unnecessary list comprehension
|
||||
|
||||
ℹ Unsafe fix
|
||||
11 11 |
|
||||
12 12 | # Multi-line
|
||||
13 13 | sum(
|
||||
14 |- [
|
||||
14 |+ (
|
||||
15 15 | delta
|
||||
16 16 | for delta in timedelta_list
|
||||
17 17 | if delta
|
||||
18 |- ],
|
||||
18 |+ ),
|
||||
19 19 | dt.timedelta(),
|
||||
20 20 | )
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_wasm"
|
||||
version = "0.5.3"
|
||||
version = "0.5.4"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -74,13 +74,13 @@ pub struct Options {
|
||||
)]
|
||||
pub extend: Option<String>,
|
||||
|
||||
/// The style in which violation messages should be formatted: `"full"`
|
||||
/// (shows source), `"concise"` (default), `"grouped"` (group messages by file), `"json"`
|
||||
/// The style in which violation messages should be formatted: `"full"` (default)
|
||||
/// (shows source), `"concise"`, `"grouped"` (group messages by file), `"json"`
|
||||
/// (machine-readable), `"junit"` (machine-readable XML), `"github"` (GitHub
|
||||
/// Actions annotations), `"gitlab"` (GitLab CI code quality report),
|
||||
/// `"pylint"` (Pylint text format) or `"azure"` (Azure Pipeline logging commands).
|
||||
#[option(
|
||||
default = r#""concise""#,
|
||||
default = r#""full""#,
|
||||
value_type = r#""full" | "concise" | "grouped" | "json" | "junit" | "github" | "gitlab" | "pylint" | "azure""#,
|
||||
example = r#"
|
||||
# Group violations by containing file.
|
||||
|
||||
@@ -28,9 +28,9 @@ For more documentation on the Ruff extension, refer to the
|
||||
|
||||
## Neovim
|
||||
|
||||
The [`nvim-lspconfig`](https://github/neovim/nvim-lspconfig) plugin can be used to configure the
|
||||
The [`nvim-lspconfig`](https://github.com/neovim/nvim-lspconfig) plugin can be used to configure the
|
||||
Ruff Language Server in Neovim. To set it up, install
|
||||
[`nvim-lspconfig`](https://github/neovim/nvim-lspconfig) plugin, set it up as per the
|
||||
[`nvim-lspconfig`](https://github.com/neovim/nvim-lspconfig) plugin, set it up as per the
|
||||
[configuration](https://github.com/neovim/nvim-lspconfig#configuration) documentation, and add the
|
||||
following to your `init.lua`:
|
||||
|
||||
|
||||
@@ -78,7 +78,7 @@ Ruff can be used as a [pre-commit](https://pre-commit.com) hook via [`ruff-pre-c
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.3
|
||||
rev: v0.5.4
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -91,7 +91,7 @@ To enable lint fixes, add the `--fix` argument to the lint hook:
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.3
|
||||
rev: v0.5.4
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -105,7 +105,7 @@ To run the hooks over Jupyter Notebooks too, add `jupyter` to the list of allowe
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.3
|
||||
rev: v0.5.4
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "maturin"
|
||||
|
||||
[project]
|
||||
name = "ruff"
|
||||
version = "0.5.3"
|
||||
version = "0.5.4"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||
readme = "README.md"
|
||||
|
||||
2
ruff.schema.json
generated
2
ruff.schema.json
generated
@@ -531,7 +531,7 @@
|
||||
}
|
||||
},
|
||||
"output-format": {
|
||||
"description": "The style in which violation messages should be formatted: `\"full\"` (shows source), `\"concise\"` (default), `\"grouped\"` (group messages by file), `\"json\"` (machine-readable), `\"junit\"` (machine-readable XML), `\"github\"` (GitHub Actions annotations), `\"gitlab\"` (GitLab CI code quality report), `\"pylint\"` (Pylint text format) or `\"azure\"` (Azure Pipeline logging commands).",
|
||||
"description": "The style in which violation messages should be formatted: `\"full\"` (default) (shows source), `\"concise\"`, `\"grouped\"` (group messages by file), `\"json\"` (machine-readable), `\"junit\"` (machine-readable XML), `\"github\"` (GitHub Actions annotations), `\"gitlab\"` (GitLab CI code quality report), `\"pylint\"` (Pylint text format) or `\"azure\"` (Azure Pipeline logging commands).",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/OutputFormat"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "scripts"
|
||||
version = "0.5.3"
|
||||
version = "0.5.4"
|
||||
description = ""
|
||||
authors = ["Charles Marsh <charlie.r.marsh@gmail.com>"]
|
||||
|
||||
|
||||
Reference in New Issue
Block a user