Compare commits
51 Commits
editables-
...
0.5.5
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fc16d8d04d | ||
|
|
175e5d7b88 | ||
|
|
c03f257ed7 | ||
|
|
6bbb4a28c2 | ||
|
|
2ce3e3ae60 | ||
|
|
2a64cccb61 | ||
|
|
928ffd6650 | ||
|
|
e52be0951a | ||
|
|
889073578e | ||
|
|
eac965ecaf | ||
|
|
8659f2f4ea | ||
|
|
c1b292a0dc | ||
|
|
3af6ccb720 | ||
|
|
f0fc6a95fe | ||
|
|
f96a3c71ff | ||
|
|
b9b7deff17 | ||
|
|
40d9324f5a | ||
|
|
a9f8bd59b2 | ||
|
|
143e172431 | ||
|
|
b2d3a05ee4 | ||
|
|
ef1ca0dd38 | ||
|
|
c7b13bb8fc | ||
|
|
dbbe3526ef | ||
|
|
f22c8ab811 | ||
|
|
2a8f95c437 | ||
|
|
ea2d51c2bb | ||
|
|
ed238e0c76 | ||
|
|
3ace12943e | ||
|
|
978909fcf4 | ||
|
|
f8735e1ee8 | ||
|
|
d70ceb6a56 | ||
|
|
fc7d9e95b8 | ||
|
|
b578fca9cb | ||
|
|
8d3146c2b2 | ||
|
|
fa5c841154 | ||
|
|
f8fcbc19d9 | ||
|
|
97fdd48208 | ||
|
|
731ed2e40b | ||
|
|
3a742c17f8 | ||
|
|
053243635c | ||
|
|
82355712c3 | ||
|
|
4bc73dd87e | ||
|
|
53b84ab054 | ||
|
|
3664f85f45 | ||
|
|
2c1926beeb | ||
|
|
4bcc96ae51 | ||
|
|
c0a2b49bac | ||
|
|
ca22248628 | ||
|
|
d8cf8ac2ef | ||
|
|
1c7b84059e | ||
|
|
f82bb67555 |
@@ -56,7 +56,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.2
|
||||
rev: v0.5.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
||||
52
CHANGELOG.md
52
CHANGELOG.md
@@ -1,5 +1,57 @@
|
||||
# Changelog
|
||||
|
||||
## 0.5.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fastapi-redundant-response-model` (`FAST001`) and `fastapi-non-annotated-dependency`(`FAST002`) ([#11579](https://github.com/astral-sh/ruff/pull/11579))
|
||||
- \[`pydoclint`\] Implement `docstring-missing-exception` (`DOC501`) and `docstring-extraneous-exception` (`DOC502`) ([#11471](https://github.com/astral-sh/ruff/pull/11471))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Fix NumPy 2.0 rule for `np.alltrue` and `np.sometrue` ([#12473](https://github.com/astral-sh/ruff/pull/12473))
|
||||
- \[`numpy`\] Ignore `NPY201` inside `except` blocks for compatibility with older numpy versions ([#12490](https://github.com/astral-sh/ruff/pull/12490))
|
||||
- \[`pep8-naming`\] Avoid applying `ignore-names` to `self` and `cls` function names (`N804`, `N805`) ([#12497](https://github.com/astral-sh/ruff/pull/12497))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of leading function comment with type params ([#12447](https://github.com/astral-sh/ruff/pull/12447))
|
||||
|
||||
### Server
|
||||
|
||||
- Do not bail code action resolution when a quick fix is requested ([#12462](https://github.com/astral-sh/ruff/pull/12462))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `Ord` implementation of `cmp_fix` ([#12471](https://github.com/astral-sh/ruff/pull/12471))
|
||||
- Raise syntax error for unparenthesized generator expression in multi-argument call ([#12445](https://github.com/astral-sh/ruff/pull/12445))
|
||||
- \[`pydoclint`\] Fix panic in `DOC501` reported in [#12428](https://github.com/astral-sh/ruff/pull/12428) ([#12435](https://github.com/astral-sh/ruff/pull/12435))
|
||||
- \[`flake8-bugbear`\] Allow singleton tuples with starred expressions in `B013` ([#12484](https://github.com/astral-sh/ruff/pull/12484))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add Eglot setup guide for Emacs editor ([#12426](https://github.com/astral-sh/ruff/pull/12426))
|
||||
- Add note about the breaking change in `nvim-lspconfig` ([#12507](https://github.com/astral-sh/ruff/pull/12507))
|
||||
- Add note to include notebook files for native server ([#12449](https://github.com/astral-sh/ruff/pull/12449))
|
||||
- Add setup docs for Zed editor ([#12501](https://github.com/astral-sh/ruff/pull/12501))
|
||||
|
||||
## 0.5.4
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Rename `RUF007` to `zip-instead-of-pairwise` ([#12399](https://github.com/astral-sh/ruff/pull/12399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-builtins`\] Avoid shadowing diagnostics for `@override` methods ([#12415](https://github.com/astral-sh/ruff/pull/12415))
|
||||
- \[`flake8-comprehensions`\] Insert parentheses for multi-argument generators ([#12422](https://github.com/astral-sh/ruff/pull/12422))
|
||||
- \[`pydocstyle`\] Handle escaped docstrings within docstring (`D301`) ([#12192](https://github.com/astral-sh/ruff/pull/12192))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix GitHub link to Neovim setup ([#12410](https://github.com/astral-sh/ruff/pull/12410))
|
||||
- Fix `output-format` default in settings reference ([#12409](https://github.com/astral-sh/ruff/pull/12409))
|
||||
|
||||
## 0.5.3
|
||||
|
||||
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
|
||||
|
||||
@@ -905,7 +905,7 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
|
||||
42
Cargo.lock
generated
42
Cargo.lock
generated
@@ -1866,6 +1866,7 @@ dependencies = [
|
||||
"ruff_python_ast",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
@@ -1897,6 +1898,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.6.0",
|
||||
"countme",
|
||||
"hashbrown",
|
||||
"ordermap",
|
||||
"red_knot_module_resolver",
|
||||
@@ -1904,7 +1906,6 @@ dependencies = [
|
||||
"ruff_index",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_trivia",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
@@ -1992,7 +1993,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.5.3"
|
||||
version = "0.5.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2091,6 +2092,7 @@ dependencies = [
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
@@ -2176,7 +2178,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.5.3"
|
||||
version = "0.5.5"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2491,7 +2493,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.5.3"
|
||||
version = "0.5.5"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -2910,9 +2912,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.71"
|
||||
version = "2.0.72"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b146dcf730474b4bcd16c311627b31ede9ab149045db4d6088b3becaea046462"
|
||||
checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3000,18 +3002,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.62"
|
||||
version = "1.0.63"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2675633b1499176c2dff06b0856a27976a8f9d436737b4cf4f312d4d91d8bbb"
|
||||
checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.62"
|
||||
version = "1.0.63"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d20468752b09f49e909e55a5d338caa8bedf615594e9d80bc4c565d30faf798c"
|
||||
checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3075,9 +3077,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.8.14"
|
||||
version = "0.8.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335"
|
||||
checksum = "ac2caab0bf757388c6c0ae23b3293fdb463fee59434529014f85e3263b995c28"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
@@ -3096,9 +3098,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.22.14"
|
||||
version = "0.22.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38"
|
||||
checksum = "278f3d518e152219c994ce877758516bca5e118eaed6996192a774fb9fbf0788"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"serde",
|
||||
@@ -3183,9 +3185,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-tree"
|
||||
version = "0.3.1"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b56c62d2c80033cb36fae448730a2f2ef99410fe3ecbffc916681a32f6807dbe"
|
||||
checksum = "f459ca79f1b0d5f71c54ddfde6debfc59c8b6eeb46808ae492077f739dc7b49c"
|
||||
dependencies = [
|
||||
"nu-ansi-term 0.50.0",
|
||||
"tracing-core",
|
||||
@@ -3338,9 +3340,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.9.1"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439"
|
||||
checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"rand",
|
||||
@@ -3350,9 +3352,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "uuid-macro-internal"
|
||||
version = "1.9.1"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3ff64d5cde1e2cb5268bdb497235b6bd255ba8244f910dbc3574e59593de68c"
|
||||
checksum = "ee1cd046f83ea2c4e920d6ee9f7c3537ef928d75dce5d84a87c2c5d6b3999a3a"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -133,7 +133,7 @@ toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,3 +1371,28 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.5.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.3/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.5.5/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.5/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.3
|
||||
rev: v0.5.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -434,6 +434,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
|
||||
@@ -31,6 +31,9 @@ tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = { workspace = true }
|
||||
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -5,15 +5,16 @@ use salsa::{Cancelled, Database, DbWithJar};
|
||||
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::program::{Program, ProgramSettings};
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled, Diagnostics};
|
||||
use crate::watch::{FileChangeKind, FileWatcherChange};
|
||||
use crate::workspace::{check_file, Package, Workspace, WorkspaceMetadata};
|
||||
use crate::workspace::{check_file, Package, Package_files, Workspace, WorkspaceMetadata};
|
||||
|
||||
mod changes;
|
||||
|
||||
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
|
||||
|
||||
@@ -21,6 +22,7 @@ pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
|
||||
pub struct Jar(
|
||||
Workspace,
|
||||
Package,
|
||||
Package_files,
|
||||
lint_syntax,
|
||||
lint_semantic,
|
||||
unwind_if_cancelled,
|
||||
@@ -59,58 +61,6 @@ impl RootDatabase {
|
||||
self.workspace.unwrap()
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, changes))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<FileWatcherChange>) {
|
||||
let workspace = self.workspace();
|
||||
let workspace_path = workspace.root(self).to_path_buf();
|
||||
|
||||
// TODO: Optimize change tracking by only reloading a package if a file that is part of the package was changed.
|
||||
let mut structural_change = false;
|
||||
for change in changes {
|
||||
if matches!(
|
||||
change.path.file_name(),
|
||||
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
||||
) {
|
||||
// Changes to ignore files or settings can change the workspace structure or add/remove files
|
||||
// from packages.
|
||||
structural_change = true;
|
||||
} else {
|
||||
match change.kind {
|
||||
FileChangeKind::Created => {
|
||||
// Reload the package when a new file was added. This is necessary because the file might be excluded
|
||||
// by a gitignore.
|
||||
if workspace.package(self, &change.path).is_some() {
|
||||
structural_change = true;
|
||||
}
|
||||
}
|
||||
FileChangeKind::Modified => {}
|
||||
FileChangeKind::Deleted => {
|
||||
if let Some(package) = workspace.package(self, &change.path) {
|
||||
if let Some(file) = system_path_to_file(self, &change.path) {
|
||||
package.remove_file(self, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File::touch_path(self, &change.path);
|
||||
}
|
||||
|
||||
if structural_change {
|
||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reload workspace after structural change.");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::error!("Failed to load workspace, keep old workspace: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| db.workspace().check(db))
|
||||
@@ -152,18 +102,29 @@ impl Upcast<dyn SemanticDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolverDb for RootDatabase {}
|
||||
@@ -198,3 +159,102 @@ impl salsa::ParallelDatabase for RootDatabase {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
||||
use ruff_db::files::Files;
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
|
||||
use super::{Db, Jar};
|
||||
|
||||
#[salsa::db(Jar, SemanticJar, ResolverJar, SourceJar)]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().snapshot(),
|
||||
files: Files::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl red_knot_module_resolver::Db for TestDb {}
|
||||
impl red_knot_python_semantic::Db for TestDb {}
|
||||
impl Db for TestDb {}
|
||||
|
||||
impl salsa::Database for TestDb {}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
system: self.system.snapshot(),
|
||||
vendored: self.vendored.snapshot(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
190
crates/red_knot/src/db/changes.rs
Normal file
190
crates/red_knot/src/db/changes.rs
Normal file
@@ -0,0 +1,190 @@
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::system::walk_directory::WalkState;
|
||||
use ruff_db::system::SystemPath;
|
||||
use ruff_db::Db;
|
||||
|
||||
use crate::db::RootDatabase;
|
||||
use crate::watch;
|
||||
use crate::watch::{CreatedKind, DeletedKind};
|
||||
use crate::workspace::WorkspaceMetadata;
|
||||
|
||||
impl RootDatabase {
|
||||
#[tracing::instrument(level = "debug", skip(self, changes))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<watch::ChangeEvent>) {
|
||||
let workspace = self.workspace();
|
||||
let workspace_path = workspace.root(self).to_path_buf();
|
||||
|
||||
let mut workspace_change = false;
|
||||
// Packages that need reloading
|
||||
let mut changed_packages = FxHashSet::default();
|
||||
// Paths that were added
|
||||
let mut added_paths = FxHashSet::default();
|
||||
|
||||
// Deduplicate the `sync` calls. Many file watchers emit multiple events for the same path.
|
||||
let mut synced_files = FxHashSet::default();
|
||||
let mut synced_recursively = FxHashSet::default();
|
||||
|
||||
let mut sync_path = |db: &mut RootDatabase, path: &SystemPath| {
|
||||
if synced_files.insert(path.to_path_buf()) {
|
||||
File::sync_path(db, path);
|
||||
}
|
||||
};
|
||||
|
||||
let mut sync_recursively = |db: &mut RootDatabase, path: &SystemPath| {
|
||||
if synced_recursively.insert(path.to_path_buf()) {
|
||||
Files::sync_recursively(db, path);
|
||||
}
|
||||
};
|
||||
|
||||
for change in changes {
|
||||
if let Some(path) = change.path() {
|
||||
if matches!(
|
||||
path.file_name(),
|
||||
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
||||
) {
|
||||
// Changes to ignore files or settings can change the workspace structure or add/remove files
|
||||
// from packages.
|
||||
if let Some(package) = workspace.package(self, path) {
|
||||
changed_packages.insert(package);
|
||||
} else {
|
||||
workspace_change = true;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
match change {
|
||||
watch::ChangeEvent::Changed { path, kind: _ } => sync_path(self, &path),
|
||||
|
||||
watch::ChangeEvent::Created { kind, path } => {
|
||||
match kind {
|
||||
CreatedKind::File => sync_path(self, &path),
|
||||
CreatedKind::Directory | CreatedKind::Any => {
|
||||
sync_recursively(self, &path);
|
||||
}
|
||||
}
|
||||
|
||||
if self.system().is_file(&path) {
|
||||
// Add the parent directory because `walkdir` always visits explicitly passed files
|
||||
// even if they match an exclude filter.
|
||||
added_paths.insert(path.parent().unwrap().to_path_buf());
|
||||
} else {
|
||||
added_paths.insert(path);
|
||||
}
|
||||
}
|
||||
|
||||
watch::ChangeEvent::Deleted { kind, path } => {
|
||||
let is_file = match kind {
|
||||
DeletedKind::File => true,
|
||||
DeletedKind::Directory => {
|
||||
// file watchers emit an event for every deleted file. No need to scan the entire dir.
|
||||
continue;
|
||||
}
|
||||
DeletedKind::Any => self
|
||||
.files
|
||||
.try_system(self, &path)
|
||||
.is_some_and(|file| file.exists(self)),
|
||||
};
|
||||
|
||||
if is_file {
|
||||
sync_path(self, &path);
|
||||
|
||||
if let Some(package) = workspace.package(self, &path) {
|
||||
if let Some(file) = self.files().try_system(self, &path) {
|
||||
package.remove_file(self, file);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sync_recursively(self, &path);
|
||||
|
||||
// TODO: Remove after converting `package.files()` to a salsa query.
|
||||
if let Some(package) = workspace.package(self, &path) {
|
||||
changed_packages.insert(package);
|
||||
} else {
|
||||
workspace_change = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
watch::ChangeEvent::Rescan => {
|
||||
workspace_change = true;
|
||||
Files::sync_all(self);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if workspace_change {
|
||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reload workspace after structural change.");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::error!("Failed to load workspace, keep old workspace: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
let mut added_paths = added_paths.into_iter().filter(|path| {
|
||||
let Some(package) = workspace.package(self, path) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// Skip packages that need reloading
|
||||
!changed_packages.contains(&package)
|
||||
});
|
||||
|
||||
// Use directory walking to discover newly added files.
|
||||
if let Some(path) = added_paths.next() {
|
||||
let mut walker = self.system().walk_directory(&path);
|
||||
|
||||
for extra_path in added_paths {
|
||||
walker = walker.add(&extra_path);
|
||||
}
|
||||
|
||||
let added_paths = std::sync::Mutex::new(Vec::default());
|
||||
|
||||
walker.run(|| {
|
||||
Box::new(|entry| {
|
||||
let Ok(entry) = entry else {
|
||||
return WalkState::Continue;
|
||||
};
|
||||
|
||||
if !entry.file_type().is_file() {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
|
||||
let mut paths = added_paths.lock().unwrap();
|
||||
|
||||
paths.push(entry.into_path());
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
|
||||
for path in added_paths.into_inner().unwrap() {
|
||||
let package = workspace.package(self, &path);
|
||||
let file = system_path_to_file(self, &path);
|
||||
|
||||
if let (Some(package), Some(file)) = (package, file) {
|
||||
package.add_file(self, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reload
|
||||
for package in changed_packages {
|
||||
package.reload_files(self);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {}
|
||||
@@ -11,7 +11,7 @@ use ruff_db::files::File;
|
||||
use ruff_db::parsed::{parsed_module, ParsedModule};
|
||||
use ruff_db::source::{source_text, SourceText};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::visitor::{walk_stmt, Visitor};
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_stmt, Visitor};
|
||||
|
||||
use crate::db::Db;
|
||||
|
||||
@@ -120,6 +120,25 @@ fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef)
|
||||
}
|
||||
}
|
||||
|
||||
fn lint_maybe_undefined(context: &SemanticLintContext, name: &ast::ExprName) {
|
||||
if !matches!(name.ctx, ast::ExprContext::Load) {
|
||||
return;
|
||||
}
|
||||
let semantic = &context.semantic;
|
||||
match name.ty(semantic) {
|
||||
Type::Unbound => {
|
||||
context.push_diagnostic(format!("Name '{}' used when not defined.", &name.id));
|
||||
}
|
||||
Type::Union(union) if union.contains(semantic.db(), Type::Unbound) => {
|
||||
context.push_diagnostic(format!(
|
||||
"Name '{}' used when possibly not defined.",
|
||||
&name.id
|
||||
));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn lint_bad_override(context: &SemanticLintContext, class: &ast::StmtClassDef) {
|
||||
let semantic = &context.semantic;
|
||||
|
||||
@@ -233,6 +252,17 @@ impl Visitor<'_> for SemanticVisitor<'_> {
|
||||
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &ast::Expr) {
|
||||
match expr {
|
||||
ast::Expr::Name(name) if matches!(name.ctx, ast::ExprContext::Load) => {
|
||||
lint_maybe_undefined(self.context, name);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@@ -272,3 +302,59 @@ enum AnyImportRef<'a> {
|
||||
Import(&'a ast::StmtImport),
|
||||
ImportFrom(&'a ast::StmtImportFrom),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use super::{lint_semantic, Diagnostics};
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
Program::new(
|
||||
&db,
|
||||
TargetVersion::Py38,
|
||||
SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
workspace_root: SystemPathBuf::from("/src"),
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
},
|
||||
);
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn undefined_variable() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
x = int
|
||||
if flag:
|
||||
y = x
|
||||
y
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let file = system_path_to_file(&db, "/src/a.py").expect("file to exist");
|
||||
let Diagnostics::List(messages) = lint_semantic(&db, file) else {
|
||||
panic!("expected some diagnostics");
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
*messages,
|
||||
vec![
|
||||
"Name 'flag' used when not defined.",
|
||||
"Name 'y' used when possibly not defined."
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,8 +11,8 @@ use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
use red_knot::db::RootDatabase;
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::watch::FileWatcherChange;
|
||||
use red_knot::watch;
|
||||
use red_knot::watch::WorkspaceWatcher;
|
||||
use red_knot::workspace::WorkspaceMetadata;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
||||
use ruff_db::system::{OsSystem, System, SystemPathBuf};
|
||||
@@ -57,6 +57,13 @@ struct Args {
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Run in watch mode by re-running whenever files change",
|
||||
short = 'W'
|
||||
)]
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
#[allow(
|
||||
@@ -72,6 +79,7 @@ pub fn main() -> anyhow::Result<()> {
|
||||
extra_search_path: extra_paths,
|
||||
target_version,
|
||||
verbosity,
|
||||
watch,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
let verbosity = verbosity.level();
|
||||
@@ -117,125 +125,120 @@ pub fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
})?;
|
||||
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
|
||||
file_watcher.watch_folder(db.workspace().root(&db).as_std_path())?;
|
||||
|
||||
main_loop.run(&mut db);
|
||||
|
||||
println!("{}", countme::get_all());
|
||||
if watch {
|
||||
main_loop.watch(&mut db)?;
|
||||
} else {
|
||||
main_loop.run(&mut db);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
verbosity: Option<VerbosityLevel>,
|
||||
orchestrator: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
/// Receiver for the messages sent **to** the main loop.
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
|
||||
verbosity: Option<VerbosityLevel>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new(verbosity: Option<VerbosityLevel>) -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
main_loop: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
(
|
||||
Self {
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
verbosity,
|
||||
orchestrator: orchestrator_sender,
|
||||
receiver: main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator.clone(),
|
||||
}
|
||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<()> {
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
||||
self.run(db);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(self, db: &mut RootDatabase) {
|
||||
self.orchestrator.send(OrchestratorMessage::Run).unwrap();
|
||||
fn run(mut self, db: &mut RootDatabase) {
|
||||
// Schedule the first check.
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
let mut revision = 0usize;
|
||||
|
||||
for message in &self.receiver {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
|
||||
match message {
|
||||
MainLoopMessage::CheckWorkspace { revision } => {
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
let db = db.snapshot();
|
||||
let orchestrator = self.orchestrator.clone();
|
||||
let sender = self.sender.clone();
|
||||
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
orchestrator
|
||||
.send(OrchestratorMessage::CheckCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
.unwrap();
|
||||
// Send the result back to the main loop for printing.
|
||||
sender
|
||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
MainLoopMessage::CheckCompleted {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
if check_revision == revision {
|
||||
eprintln!("{}", result.join("\n"));
|
||||
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return self.exit();
|
||||
}
|
||||
}
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes);
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
eprintln!("{}", diagnostics.join("\n"));
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
return;
|
||||
return self.exit();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
#[allow(clippy::print_stderr, clippy::unused_self)]
|
||||
fn exit(self) {
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("Exit");
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -250,115 +253,16 @@ impl MainLoopCancellationToken {
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
main_loop: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckWorkspace {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.main_loop.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.main_loop.send(MainLoopMessage::CheckWorkspace { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckWorkspace { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckWorkspace,
|
||||
CheckCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
result: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
fn setup_tracing(verbosity: Option<VerbosityLevel>) {
|
||||
|
||||
@@ -1,111 +1,94 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, ModifyKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
pub use watcher::{directory_watcher, EventHandler, Watcher};
|
||||
pub use workspace_watcher::WorkspaceWatcher;
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
mod watcher;
|
||||
mod workspace_watcher;
|
||||
|
||||
/// Classification of a file system change event.
|
||||
///
|
||||
/// ## Renaming a path
|
||||
/// Renaming a path creates a [`ChangeEvent::Deleted`] event for the old path and/or a [`ChangeEvent::Created`] for the new location.
|
||||
/// Whether both events are created or just one of them depends from where to where the path was moved:
|
||||
///
|
||||
/// * Inside the watched directory: Both events are created.
|
||||
/// * From a watched directory to a non-watched directory: Only a [`ChangeEvent::Deleted`] event is created.
|
||||
/// * From a non-watched directory to a watched directory: Only a [`ChangeEvent::Created`] event is created.
|
||||
///
|
||||
/// ## Renaming a directory
|
||||
/// It's up to the file watcher implementation to aggregate the rename event for a directory to a single rename
|
||||
/// event instead of emitting an event for each file or subdirectory in that path.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum ChangeEvent {
|
||||
/// A new path was created
|
||||
Created {
|
||||
path: SystemPathBuf,
|
||||
kind: CreatedKind,
|
||||
},
|
||||
|
||||
/// The content or metadata of a path was changed.
|
||||
Changed {
|
||||
path: SystemPathBuf,
|
||||
kind: ChangedKind,
|
||||
},
|
||||
|
||||
/// A path was deleted.
|
||||
Deleted {
|
||||
path: SystemPathBuf,
|
||||
kind: DeletedKind,
|
||||
},
|
||||
|
||||
/// The file watcher failed to observe some changes and now is out of sync with the file system.
|
||||
///
|
||||
/// This can happen if many files are changed at once. The consumer should rescan all files to catch up
|
||||
/// with the file system.
|
||||
Rescan,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
impl ChangeEvent {
|
||||
pub fn file_name(&self) -> Option<&str> {
|
||||
self.path().and_then(|path| path.file_name())
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
pub fn path(&self) -> Option<&SystemPath> {
|
||||
match self {
|
||||
ChangeEvent::Created { path, .. }
|
||||
| ChangeEvent::Changed { path, .. }
|
||||
| ChangeEvent::Deleted { path, .. } => Some(path),
|
||||
ChangeEvent::Rescan => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
/// Classification of an event that creates a new path.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum CreatedKind {
|
||||
/// A file was created.
|
||||
File,
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |event: notify::Result<Event>| {
|
||||
match event {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::From)) => {
|
||||
FileChangeKind::Deleted
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::To)) => {
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Any)) => {
|
||||
// TODO Introduce a better catch all event for cases that we don't understand.
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Both)) => {
|
||||
todo!("Handle both create and delete event.");
|
||||
}
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
/// A directory was created.
|
||||
Directory,
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if let Some(fs_path) = SystemPath::from_std_path(&path) {
|
||||
changes
|
||||
.push(FileWatcherChange::new(fs_path.to_path_buf(), change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
/// A file, directory, or any other kind of path was created.
|
||||
Any,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
pub path: SystemPathBuf,
|
||||
#[allow(unused)]
|
||||
pub kind: FileChangeKind,
|
||||
}
|
||||
/// Classification of an event related to a content or metadata change.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum ChangedKind {
|
||||
/// The content of a file was changed.
|
||||
FileContent,
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: SystemPathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
/// The metadata of a file was changed.
|
||||
FileMetadata,
|
||||
|
||||
/// Either the content or metadata of a path was changed.
|
||||
Any,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
pub enum DeletedKind {
|
||||
File,
|
||||
Directory,
|
||||
Any,
|
||||
}
|
||||
|
||||
393
crates/red_knot/src/watch/watcher.rs
Normal file
393
crates/red_knot/src/watch/watcher.rs
Normal file
@@ -0,0 +1,393 @@
|
||||
use notify::event::{CreateKind, MetadataKind, ModifyKind, RemoveKind, RenameMode};
|
||||
use notify::{recommended_watcher, EventKind, RecommendedWatcher, RecursiveMode, Watcher as _};
|
||||
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
use crate::watch::{ChangeEvent, ChangedKind, CreatedKind, DeletedKind};
|
||||
|
||||
/// Creates a new watcher observing file system changes.
|
||||
///
|
||||
/// The watcher debounces events, but guarantees to send all changes eventually (even if the file system keeps changing).
|
||||
pub fn directory_watcher<H>(handler: H) -> notify::Result<Watcher>
|
||||
where
|
||||
H: EventHandler,
|
||||
{
|
||||
let (sender, receiver) = crossbeam::channel::bounded(20);
|
||||
|
||||
let debouncer = std::thread::Builder::new()
|
||||
.name("watcher::debouncer".to_string())
|
||||
.spawn(move || {
|
||||
// Wait for the next set of changes
|
||||
for message in &receiver {
|
||||
let event = match message {
|
||||
DebouncerMessage::Event(event) => event,
|
||||
DebouncerMessage::Flush => {
|
||||
continue;
|
||||
}
|
||||
DebouncerMessage::Exit => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut debouncer = Debouncer::default();
|
||||
|
||||
debouncer.add_result(event);
|
||||
|
||||
// Debounce any new incoming changes:
|
||||
// * Take any new incoming change events and merge them with the previous change events
|
||||
// * If there are no new incoming change events after 10 ms, flush the changes and wait for the next notify event.
|
||||
// * Flush no later than after 3s.
|
||||
loop {
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
crossbeam::select! {
|
||||
recv(receiver) -> message => {
|
||||
match message {
|
||||
Ok(DebouncerMessage::Event(event)) => {
|
||||
debouncer.add_result(event);
|
||||
|
||||
// Ensure that we flush the changes eventually.
|
||||
if start.elapsed() > std::time::Duration::from_secs(3) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(DebouncerMessage::Flush) => {
|
||||
break;
|
||||
}
|
||||
|
||||
Ok(DebouncerMessage::Exit) => {
|
||||
return;
|
||||
},
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders. There's no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
let events = debouncer.into_events();
|
||||
|
||||
if !events.is_empty() {
|
||||
handler.handle(events);
|
||||
}
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let debouncer_sender = sender.clone();
|
||||
let watcher =
|
||||
recommended_watcher(move |event| sender.send(DebouncerMessage::Event(event)).unwrap())?;
|
||||
|
||||
Ok(Watcher {
|
||||
watcher,
|
||||
debouncer_sender,
|
||||
debouncer_thread: Some(debouncer),
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum DebouncerMessage {
|
||||
/// A new file system event.
|
||||
Event(notify::Result<notify::Event>),
|
||||
|
||||
Flush,
|
||||
|
||||
/// Exit the debouncer thread.
|
||||
Exit,
|
||||
}
|
||||
|
||||
pub struct Watcher {
|
||||
watcher: RecommendedWatcher,
|
||||
debouncer_sender: crossbeam::channel::Sender<DebouncerMessage>,
|
||||
debouncer_thread: Option<std::thread::JoinHandle<()>>,
|
||||
}
|
||||
|
||||
impl Watcher {
|
||||
/// Sets up file watching for `path`.
|
||||
pub fn watch(&mut self, path: &SystemPath) -> notify::Result<()> {
|
||||
self.watcher
|
||||
.watch(path.as_std_path(), RecursiveMode::Recursive)
|
||||
}
|
||||
|
||||
/// Stops file watching for `path`.
|
||||
pub fn unwatch(&mut self, path: &SystemPath) -> notify::Result<()> {
|
||||
self.watcher.unwatch(path.as_std_path())
|
||||
}
|
||||
|
||||
/// Stops the file watcher.
|
||||
///
|
||||
/// Pending events will be discarded.
|
||||
///
|
||||
/// The call blocks until the watcher has stopped.
|
||||
pub fn stop(mut self) {
|
||||
self.set_stop();
|
||||
if let Some(debouncher) = self.debouncer_thread.take() {
|
||||
debouncher.join().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
/// Flushes any pending events.
|
||||
pub fn flush(&self) {
|
||||
self.debouncer_sender.send(DebouncerMessage::Flush).unwrap();
|
||||
}
|
||||
|
||||
fn set_stop(&mut self) {
|
||||
self.debouncer_sender.send(DebouncerMessage::Exit).ok();
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Watcher {
|
||||
fn drop(&mut self) {
|
||||
self.set_stop();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Debouncer {
|
||||
events: Vec<ChangeEvent>,
|
||||
rescan_event: Option<ChangeEvent>,
|
||||
}
|
||||
|
||||
impl Debouncer {
|
||||
#[tracing::instrument(level = "trace", skip(self))]
|
||||
fn add_result(&mut self, result: notify::Result<notify::Event>) {
|
||||
match result {
|
||||
Ok(event) => self.add_event(event),
|
||||
Err(error) => self.add_error(error),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self, clippy::needless_pass_by_value)]
|
||||
fn add_error(&mut self, error: notify::Error) {
|
||||
// Micha: I skimmed through some of notify's source code and it seems the most common errors
|
||||
// are IO errors. All other errors should really only happen when adding or removing a watched folders.
|
||||
// It's not clear what an upstream handler should do in the case of an IOError (other than logging it).
|
||||
// That's what we do for now as well.
|
||||
tracing::warn!("File watcher error: {error:?}.");
|
||||
}
|
||||
|
||||
fn add_event(&mut self, event: notify::Event) {
|
||||
if self.rescan_event.is_some() {
|
||||
// We're already in a rescan state, ignore all other events
|
||||
return;
|
||||
}
|
||||
|
||||
// If the file watcher is out of sync or we observed too many changes, trigger a full rescan
|
||||
if event.need_rescan() || self.events.len() > 10000 {
|
||||
self.events = Vec::new();
|
||||
self.rescan_event = Some(ChangeEvent::Rescan);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
let kind = event.kind;
|
||||
let path = match SystemPathBuf::from_path_buf(event.paths.into_iter().next().unwrap()) {
|
||||
Ok(path) => path,
|
||||
Err(path) => {
|
||||
tracing::debug!(
|
||||
"Ignore change to non-UTF8 path '{path}': {kind:?}",
|
||||
path = path.display()
|
||||
);
|
||||
|
||||
// Ignore non-UTF8 paths because they aren't handled by the rest of the system.
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let event = match kind {
|
||||
EventKind::Create(create) => {
|
||||
let kind = match create {
|
||||
CreateKind::File => CreatedKind::File,
|
||||
CreateKind::Folder => CreatedKind::Directory,
|
||||
CreateKind::Any | CreateKind::Other => {
|
||||
CreatedKind::from(FileType::from_path(&path))
|
||||
}
|
||||
};
|
||||
|
||||
ChangeEvent::Created { path, kind }
|
||||
}
|
||||
|
||||
EventKind::Modify(modify) => match modify {
|
||||
ModifyKind::Metadata(metadata) => {
|
||||
if FileType::from_path(&path) != FileType::File {
|
||||
// Only interested in file metadata events.
|
||||
return;
|
||||
}
|
||||
|
||||
match metadata {
|
||||
MetadataKind::Any | MetadataKind::Permissions | MetadataKind::Other => {
|
||||
ChangeEvent::Changed {
|
||||
path,
|
||||
kind: ChangedKind::FileMetadata,
|
||||
}
|
||||
}
|
||||
|
||||
MetadataKind::AccessTime
|
||||
| MetadataKind::WriteTime
|
||||
| MetadataKind::Ownership
|
||||
| MetadataKind::Extended => {
|
||||
// We're not interested in these metadata changes
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ModifyKind::Data(_) => ChangeEvent::Changed {
|
||||
kind: ChangedKind::FileMetadata,
|
||||
path,
|
||||
},
|
||||
|
||||
ModifyKind::Name(rename) => match rename {
|
||||
RenameMode::From => {
|
||||
// TODO: notify_debouncer_full matches the `RenameMode::From` and `RenameMode::To` events.
|
||||
// Matching the from and to event would have the added advantage that we know the
|
||||
// type of the path that was renamed, allowing `apply_changes` to avoid traversing the
|
||||
// entire package.
|
||||
// https://github.com/notify-rs/notify/blob/128bf6230c03d39dbb7f301ff7b20e594e34c3a2/notify-debouncer-full/src/lib.rs#L293-L297
|
||||
ChangeEvent::Deleted {
|
||||
kind: DeletedKind::Any,
|
||||
path,
|
||||
}
|
||||
}
|
||||
|
||||
RenameMode::To => ChangeEvent::Created {
|
||||
kind: CreatedKind::from(FileType::from_path(&path)),
|
||||
path,
|
||||
},
|
||||
|
||||
RenameMode::Both => {
|
||||
// Both is only emitted when moving a path from within a watched directory
|
||||
// to another watched directory. The event is not emitted if the `to` or `from` path
|
||||
// lay outside the watched directory. However, the `To` and `From` events are always emitted.
|
||||
// That's why we ignore `Both` and instead rely on `To` and `From`.
|
||||
return;
|
||||
}
|
||||
|
||||
RenameMode::Other => {
|
||||
// Skip over any other rename events
|
||||
return;
|
||||
}
|
||||
|
||||
RenameMode::Any => {
|
||||
// Guess the action based on the current file system state
|
||||
if path.as_std_path().exists() {
|
||||
let file_type = FileType::from_path(&path);
|
||||
|
||||
ChangeEvent::Created {
|
||||
kind: file_type.into(),
|
||||
path,
|
||||
}
|
||||
} else {
|
||||
ChangeEvent::Deleted {
|
||||
kind: DeletedKind::Any,
|
||||
path,
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
ModifyKind::Other => {
|
||||
// Skip other modification events that are not content or metadata related
|
||||
return;
|
||||
}
|
||||
ModifyKind::Any => {
|
||||
if !path.as_std_path().is_file() {
|
||||
return;
|
||||
}
|
||||
|
||||
ChangeEvent::Changed {
|
||||
path,
|
||||
kind: ChangedKind::Any,
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
EventKind::Access(_) => {
|
||||
// We're not interested in any access events
|
||||
return;
|
||||
}
|
||||
|
||||
EventKind::Remove(kind) => {
|
||||
let kind = match kind {
|
||||
RemoveKind::File => DeletedKind::File,
|
||||
RemoveKind::Folder => DeletedKind::Directory,
|
||||
RemoveKind::Any | RemoveKind::Other => DeletedKind::Any,
|
||||
};
|
||||
|
||||
ChangeEvent::Deleted { path, kind }
|
||||
}
|
||||
|
||||
EventKind::Other => {
|
||||
// Skip over meta events
|
||||
return;
|
||||
}
|
||||
|
||||
EventKind::Any => {
|
||||
tracing::debug!("Skip any FS event for {path}.");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
self.events.push(event);
|
||||
}
|
||||
|
||||
fn into_events(self) -> Vec<ChangeEvent> {
|
||||
if let Some(rescan_event) = self.rescan_event {
|
||||
vec![rescan_event]
|
||||
} else {
|
||||
self.events
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<ChangeEvent>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<ChangeEvent>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<ChangeEvent>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
enum FileType {
|
||||
/// The event is related to a directory.
|
||||
File,
|
||||
|
||||
/// The event is related to a directory.
|
||||
Directory,
|
||||
|
||||
/// It's unknown whether the event is related to a file or a directory or if it is any other file type.
|
||||
Any,
|
||||
}
|
||||
|
||||
impl FileType {
|
||||
fn from_path(path: &SystemPath) -> FileType {
|
||||
match path.as_std_path().metadata() {
|
||||
Ok(metadata) if metadata.is_file() => FileType::File,
|
||||
Ok(metadata) if metadata.is_dir() => FileType::Directory,
|
||||
Ok(_) | Err(_) => FileType::Any,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FileType> for CreatedKind {
|
||||
fn from(value: FileType) -> Self {
|
||||
match value {
|
||||
FileType::File => Self::File,
|
||||
FileType::Directory => Self::Directory,
|
||||
FileType::Any => Self::Any,
|
||||
}
|
||||
}
|
||||
}
|
||||
112
crates/red_knot/src/watch/workspace_watcher.rs
Normal file
112
crates/red_knot/src/watch/workspace_watcher.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
use crate::db::RootDatabase;
|
||||
use crate::watch::Watcher;
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
use rustc_hash::FxHashSet;
|
||||
use std::fmt::{Formatter, Write};
|
||||
use tracing::info;
|
||||
|
||||
/// Wrapper around a [`Watcher`] that watches the relevant paths of a workspace.
|
||||
pub struct WorkspaceWatcher {
|
||||
watcher: Watcher,
|
||||
|
||||
/// The paths that need to be watched. This includes paths for which setting up file watching failed.
|
||||
watched_paths: FxHashSet<SystemPathBuf>,
|
||||
|
||||
/// Paths that should be watched but setting up the watcher failed for some reason.
|
||||
/// This should be rare.
|
||||
errored_paths: Vec<SystemPathBuf>,
|
||||
}
|
||||
|
||||
impl WorkspaceWatcher {
|
||||
/// Create a new workspace watcher.
|
||||
pub fn new(watcher: Watcher, db: &RootDatabase) -> Self {
|
||||
let mut watcher = Self {
|
||||
watcher,
|
||||
watched_paths: FxHashSet::default(),
|
||||
errored_paths: Vec::new(),
|
||||
};
|
||||
|
||||
watcher.update(db);
|
||||
|
||||
watcher
|
||||
}
|
||||
|
||||
pub fn update(&mut self, db: &RootDatabase) {
|
||||
let new_watch_paths = db.workspace().paths_to_watch(db);
|
||||
|
||||
let mut added_folders = new_watch_paths.difference(&self.watched_paths).peekable();
|
||||
let mut removed_folders = self.watched_paths.difference(&new_watch_paths).peekable();
|
||||
|
||||
if added_folders.peek().is_none() && removed_folders.peek().is_none() {
|
||||
return;
|
||||
}
|
||||
|
||||
for added_folder in added_folders {
|
||||
// Log a warning. It's not worth aborting if registering a single folder fails because
|
||||
// Ruff otherwise stills works as expected.
|
||||
if let Err(error) = self.watcher.watch(added_folder) {
|
||||
// TODO: Log a user-facing warning.
|
||||
tracing::warn!("Failed to setup watcher for path '{added_folder}': {error}. You have to restart Ruff after making changes to files under this path or you might see stale results.");
|
||||
self.errored_paths.push(added_folder.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for removed_path in removed_folders {
|
||||
if let Some(index) = self
|
||||
.errored_paths
|
||||
.iter()
|
||||
.position(|path| path == removed_path)
|
||||
{
|
||||
self.errored_paths.swap_remove(index);
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Err(error) = self.watcher.unwatch(removed_path) {
|
||||
info!("Failed to remove the file watcher for the path '{removed_path}: {error}.");
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Set up file watchers for {}",
|
||||
DisplayWatchedPaths {
|
||||
paths: &new_watch_paths
|
||||
}
|
||||
);
|
||||
|
||||
self.watched_paths = new_watch_paths;
|
||||
}
|
||||
|
||||
/// Returns `true` if setting up watching for any path failed.
|
||||
pub fn has_errored_paths(&self) -> bool {
|
||||
!self.errored_paths.is_empty()
|
||||
}
|
||||
|
||||
pub fn flush(&self) {
|
||||
self.watcher.flush();
|
||||
}
|
||||
|
||||
pub fn stop(self) {
|
||||
self.watcher.stop();
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplayWatchedPaths<'a> {
|
||||
paths: &'a FxHashSet<SystemPathBuf>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DisplayWatchedPaths<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_char('[')?;
|
||||
|
||||
let mut iter = self.paths.iter();
|
||||
if let Some(first) = iter.next() {
|
||||
write!(f, "\"{first}\"")?;
|
||||
|
||||
for path in iter {
|
||||
write!(f, ", \"{path}\"")?;
|
||||
}
|
||||
}
|
||||
|
||||
f.write_char(']')
|
||||
}
|
||||
}
|
||||
@@ -1,22 +1,25 @@
|
||||
// TODO: Fix clippy warnings created by salsa macros
|
||||
#![allow(clippy::used_underscore_binding)]
|
||||
#![allow(clippy::used_underscore_binding, unreachable_pub)]
|
||||
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
||||
use red_knot_module_resolver::system_module_search_paths;
|
||||
use ruff_db::{
|
||||
files::{system_path_to_file, File},
|
||||
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
|
||||
};
|
||||
use ruff_python_ast::{name::Name, PySourceType};
|
||||
|
||||
use crate::workspace::files::{Index, IndexedFiles, PackageFiles};
|
||||
use crate::{
|
||||
db::Db,
|
||||
lint::{lint_semantic, lint_syntax, Diagnostics},
|
||||
};
|
||||
|
||||
mod files;
|
||||
mod metadata;
|
||||
|
||||
/// The project workspace as a Salsa ingredient.
|
||||
@@ -93,7 +96,7 @@ pub struct Package {
|
||||
|
||||
/// The files that are part of this package.
|
||||
#[return_ref]
|
||||
file_set: Arc<FxHashSet<File>>,
|
||||
file_set: PackageFiles,
|
||||
// TODO: Add the loaded settings.
|
||||
}
|
||||
|
||||
@@ -117,6 +120,7 @@ impl Workspace {
|
||||
self.package_tree(db).values().copied()
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub fn reload(self, db: &mut dyn Db, metadata: WorkspaceMetadata) {
|
||||
assert_eq!(self.root(db), metadata.root());
|
||||
|
||||
@@ -139,6 +143,7 @@ impl Workspace {
|
||||
self.set_package_tree(db).to(new_packages);
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
|
||||
let path = metadata.root().to_path_buf();
|
||||
|
||||
@@ -157,7 +162,7 @@ impl Workspace {
|
||||
pub fn package(self, db: &dyn Db, path: &SystemPath) -> Option<Package> {
|
||||
let packages = self.package_tree(db);
|
||||
|
||||
let (package_path, package) = packages.range(..path.to_path_buf()).next_back()?;
|
||||
let (package_path, package) = packages.range(..=path.to_path_buf()).next_back()?;
|
||||
|
||||
if path.starts_with(package_path) {
|
||||
Some(*package)
|
||||
@@ -236,8 +241,20 @@ impl Workspace {
|
||||
FxHashSet::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the paths that should be watched.
|
||||
///
|
||||
/// The paths that require watching might change with every revision.
|
||||
pub fn paths_to_watch(self, db: &dyn Db) -> FxHashSet<SystemPathBuf> {
|
||||
ruff_db::system::deduplicate_nested_paths(
|
||||
std::iter::once(self.root(db)).chain(system_module_search_paths(db.upcast())),
|
||||
)
|
||||
.map(SystemPath::to_path_buf)
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
impl Package {
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
self.root_buf(db)
|
||||
@@ -245,51 +262,69 @@ impl Package {
|
||||
|
||||
/// Returns `true` if `file` is a first-party file part of this package.
|
||||
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
||||
self.files(db).contains(&file)
|
||||
self.files(db).read().contains(&file)
|
||||
}
|
||||
|
||||
pub fn files(self, db: &dyn Db) -> &FxHashSet<File> {
|
||||
self.file_set(db)
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn remove_file(self, db: &mut dyn Db, file: File) {
|
||||
let Some(mut index) = PackageFiles::indexed_mut(db, self) else {
|
||||
return;
|
||||
};
|
||||
|
||||
index.remove(file);
|
||||
}
|
||||
|
||||
pub fn remove_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
let mut files_arc = self.file_set(db).clone();
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn add_file(self, db: &mut dyn Db, file: File) {
|
||||
let Some(mut index) = PackageFiles::indexed_mut(db, self) else {
|
||||
return;
|
||||
};
|
||||
|
||||
// Set a dummy value. Salsa will cancel any pending queries and remove its own reference to `files`
|
||||
// so that the reference counter to `files` now drops to 1.
|
||||
self.set_file_set(db).to(Arc::new(FxHashSet::default()));
|
||||
|
||||
let files = Arc::get_mut(&mut files_arc).unwrap();
|
||||
let removed = files.remove(&file);
|
||||
self.set_file_set(db).to(files_arc);
|
||||
|
||||
removed
|
||||
index.insert(file);
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
let mut result = Vec::new();
|
||||
for file in self.files(db) {
|
||||
let diagnostics = check_file(db, *file);
|
||||
for file in &self.files(db).read() {
|
||||
let diagnostics = check_file(db, file);
|
||||
result.extend_from_slice(&diagnostics);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn from_metadata(db: &dyn Db, metadata: PackageMetadata) -> Self {
|
||||
let files = discover_package_files(db, metadata.root());
|
||||
/// Returns the files belonging to this package.
|
||||
#[salsa::tracked]
|
||||
pub fn files(self, db: &dyn Db) -> IndexedFiles {
|
||||
let files = self.file_set(db);
|
||||
|
||||
Self::new(db, metadata.name, metadata.root, Arc::new(files))
|
||||
let indexed = match files.get() {
|
||||
Index::Lazy(vacant) => {
|
||||
let files = discover_package_files(db, self.root(db));
|
||||
vacant.set(files)
|
||||
}
|
||||
Index::Indexed(indexed) => indexed,
|
||||
};
|
||||
|
||||
indexed
|
||||
}
|
||||
|
||||
fn from_metadata(db: &dyn Db, metadata: PackageMetadata) -> Self {
|
||||
Self::new(db, metadata.name, metadata.root, PackageFiles::default())
|
||||
}
|
||||
|
||||
fn update(self, db: &mut dyn Db, metadata: PackageMetadata) {
|
||||
let root = self.root(db);
|
||||
assert_eq!(root, metadata.root());
|
||||
|
||||
let files = discover_package_files(db, root);
|
||||
|
||||
self.set_name(db).to(metadata.name);
|
||||
self.set_file_set(db).to(Arc::new(files));
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn reload_files(self, db: &mut dyn Db) {
|
||||
// Force a re-index of the files in the next revision.
|
||||
self.set_file_set(db).to(PackageFiles::lazy());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
252
crates/red_knot/src/workspace/files.rs
Normal file
252
crates/red_knot/src/workspace/files.rs
Normal file
@@ -0,0 +1,252 @@
|
||||
use std::iter::FusedIterator;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::workspace::Package;
|
||||
use ruff_db::files::File;
|
||||
|
||||
/// The indexed files of a package.
|
||||
///
|
||||
/// The indexing happens lazily, but the files are then cached for subsequent reads.
|
||||
///
|
||||
/// ## Implementation
|
||||
/// The implementation uses internal mutability to transition between the lazy and indexed state
|
||||
/// without triggering a new salsa revision. This is safe because the initial indexing happens on first access,
|
||||
/// so no query can be depending on the contents of the indexed files before that. All subsequent mutations to
|
||||
/// the indexed files must go through `IndexedFilesMut`, which uses the Salsa setter `package.set_file_set` to
|
||||
/// ensure that Salsa always knows when the set of indexed files have changed.
|
||||
#[derive(Debug)]
|
||||
pub struct PackageFiles {
|
||||
state: std::sync::Mutex<State>,
|
||||
}
|
||||
|
||||
impl PackageFiles {
|
||||
pub fn lazy() -> Self {
|
||||
Self {
|
||||
state: std::sync::Mutex::new(State::Lazy),
|
||||
}
|
||||
}
|
||||
|
||||
fn indexed(indexed_files: IndexedFiles) -> Self {
|
||||
Self {
|
||||
state: std::sync::Mutex::new(State::Indexed(indexed_files)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self) -> Index {
|
||||
let state = self.state.lock().unwrap();
|
||||
|
||||
match &*state {
|
||||
State::Lazy => Index::Lazy(LazyFiles { files: state }),
|
||||
State::Indexed(files) => Index::Indexed(files.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a mutable view on the index that allows cheap in-place mutations.
|
||||
///
|
||||
/// The changes are automatically written back to the database once the view is dropped.
|
||||
pub fn indexed_mut(db: &mut dyn Db, package: Package) -> Option<IndexedFilesMut> {
|
||||
// Calling `runtime_mut` cancels all pending salsa queries. This ensures that there are no pending
|
||||
// reads to the file set.
|
||||
let _ = db.runtime_mut();
|
||||
|
||||
let files = package.file_set(db);
|
||||
|
||||
let indexed = match &*files.state.lock().unwrap() {
|
||||
State::Lazy => return None,
|
||||
State::Indexed(indexed) => indexed.clone(),
|
||||
};
|
||||
|
||||
Some(IndexedFilesMut {
|
||||
db: Some(db),
|
||||
package,
|
||||
new_revision: indexed.revision,
|
||||
indexed,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PackageFiles {
|
||||
fn default() -> Self {
|
||||
Self::lazy()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum State {
|
||||
/// The files of a package haven't been indexed yet.
|
||||
Lazy,
|
||||
|
||||
/// The files are indexed. Stores the known files of a package.
|
||||
Indexed(IndexedFiles),
|
||||
}
|
||||
|
||||
pub enum Index<'a> {
|
||||
/// The index has not yet been computed. Allows inserting the files.
|
||||
Lazy(LazyFiles<'a>),
|
||||
|
||||
Indexed(IndexedFiles),
|
||||
}
|
||||
|
||||
/// Package files that have not been indexed yet.
|
||||
pub struct LazyFiles<'a> {
|
||||
files: std::sync::MutexGuard<'a, State>,
|
||||
}
|
||||
|
||||
impl<'a> LazyFiles<'a> {
|
||||
/// Sets the indexed files of a package to `files`.
|
||||
pub fn set(mut self, files: FxHashSet<File>) -> IndexedFiles {
|
||||
let files = IndexedFiles::new(files);
|
||||
*self.files = State::Indexed(files.clone());
|
||||
files
|
||||
}
|
||||
}
|
||||
|
||||
/// The indexed files of a package.
|
||||
///
|
||||
/// # Salsa integration
|
||||
/// The type is cheap clonable and allows for in-place mutation of the files. The in-place mutation requires
|
||||
/// extra care because the type is used as the result of Salsa queries and Salsa relies on a type's equality
|
||||
/// to determine if the output has changed. This is accomplished by using a `revision` that gets incremented
|
||||
/// whenever the files are changed. The revision ensures that salsa's comparison of the
|
||||
/// previous [`IndexedFiles`] with the next [`IndexedFiles`] returns false even though they both
|
||||
/// point to the same underlying hash set.
|
||||
///
|
||||
/// # Equality
|
||||
/// Two [`IndexedFiles`] are only equal if they have the same revision and point to the **same** (identity) hash set.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IndexedFiles {
|
||||
revision: u64,
|
||||
files: Arc<std::sync::Mutex<FxHashSet<File>>>,
|
||||
}
|
||||
|
||||
impl IndexedFiles {
|
||||
fn new(files: FxHashSet<File>) -> Self {
|
||||
Self {
|
||||
files: Arc::new(std::sync::Mutex::new(files)),
|
||||
revision: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Locks the file index for reading.
|
||||
pub fn read(&self) -> IndexedFilesGuard {
|
||||
IndexedFilesGuard {
|
||||
guard: self.files.lock().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for IndexedFiles {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.revision == other.revision && Arc::ptr_eq(&self.files, &other.files)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for IndexedFiles {}
|
||||
|
||||
pub struct IndexedFilesGuard<'a> {
|
||||
guard: std::sync::MutexGuard<'a, FxHashSet<File>>,
|
||||
}
|
||||
|
||||
impl Deref for IndexedFilesGuard<'_> {
|
||||
type Target = FxHashSet<File>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.guard
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a IndexedFilesGuard<'a> {
|
||||
type Item = File;
|
||||
type IntoIter = IndexedFilesIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
IndexedFilesIter {
|
||||
inner: self.guard.iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over the indexed files.
|
||||
///
|
||||
/// # Locks
|
||||
/// Holding on to the iterator locks the file index for reading.
|
||||
pub struct IndexedFilesIter<'a> {
|
||||
inner: std::collections::hash_set::Iter<'a, File>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for IndexedFilesIter<'a> {
|
||||
type Item = File;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next().copied()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for IndexedFilesIter<'_> {}
|
||||
|
||||
impl ExactSizeIterator for IndexedFilesIter<'_> {}
|
||||
|
||||
/// A Mutable view of a package's indexed files.
|
||||
///
|
||||
/// Allows in-place mutation of the files without deep cloning the hash set.
|
||||
/// The changes are written back when the mutable view is dropped or by calling [`Self::set`] manually.
|
||||
pub struct IndexedFilesMut<'db> {
|
||||
db: Option<&'db mut dyn Db>,
|
||||
package: Package,
|
||||
indexed: IndexedFiles,
|
||||
new_revision: u64,
|
||||
}
|
||||
|
||||
impl IndexedFilesMut<'_> {
|
||||
pub fn insert(&mut self, file: File) -> bool {
|
||||
if self.indexed.files.lock().unwrap().insert(file) {
|
||||
self.new_revision += 1;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, file: File) -> bool {
|
||||
if self.indexed.files.lock().unwrap().remove(&file) {
|
||||
self.new_revision += 1;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes the changes back to the database.
|
||||
pub fn set(mut self) {
|
||||
self.set_impl();
|
||||
}
|
||||
|
||||
fn set_impl(&mut self) {
|
||||
let Some(db) = self.db.take() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if self.indexed.revision != self.new_revision {
|
||||
self.package
|
||||
.set_file_set(db)
|
||||
.to(PackageFiles::indexed(IndexedFiles {
|
||||
revision: self.new_revision,
|
||||
files: self.indexed.files.clone(),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for IndexedFilesMut<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.set_impl();
|
||||
}
|
||||
}
|
||||
726
crates/red_knot/tests/file_watching.rs
Normal file
726
crates/red_knot/tests/file_watching.rs
Normal file
@@ -0,0 +1,726 @@
|
||||
#![allow(clippy::disallowed_names)]
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
|
||||
use red_knot::db::RootDatabase;
|
||||
use red_knot::watch;
|
||||
use red_knot::watch::{directory_watcher, WorkspaceWatcher};
|
||||
use red_knot::workspace::WorkspaceMetadata;
|
||||
use red_knot_module_resolver::{resolve_module, ModuleName};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::program::{Program, ProgramSettings, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::Upcast;
|
||||
|
||||
struct TestCase {
|
||||
db: RootDatabase,
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
changes_receiver: crossbeam::channel::Receiver<Vec<watch::ChangeEvent>>,
|
||||
temp_dir: tempfile::TempDir,
|
||||
}
|
||||
|
||||
impl TestCase {
|
||||
fn workspace_path(&self, relative: impl AsRef<SystemPath>) -> SystemPathBuf {
|
||||
SystemPath::absolute(relative, self.db.workspace().root(&self.db))
|
||||
}
|
||||
|
||||
fn root_path(&self) -> &SystemPath {
|
||||
SystemPath::from_std_path(self.temp_dir.path()).unwrap()
|
||||
}
|
||||
|
||||
fn db(&self) -> &RootDatabase {
|
||||
&self.db
|
||||
}
|
||||
|
||||
fn db_mut(&mut self) -> &mut RootDatabase {
|
||||
&mut self.db
|
||||
}
|
||||
|
||||
fn stop_watch(&mut self) -> Vec<watch::ChangeEvent> {
|
||||
if let Some(watcher) = self.watcher.take() {
|
||||
// Give the watcher some time to catch up.
|
||||
std::thread::sleep(Duration::from_millis(10));
|
||||
watcher.flush();
|
||||
watcher.stop();
|
||||
}
|
||||
|
||||
let mut all_events = Vec::new();
|
||||
for events in &self.changes_receiver {
|
||||
all_events.extend(events);
|
||||
}
|
||||
|
||||
all_events
|
||||
}
|
||||
|
||||
fn update_search_path_settings(
|
||||
&mut self,
|
||||
f: impl FnOnce(&SearchPathSettings) -> SearchPathSettings,
|
||||
) {
|
||||
let program = Program::get(self.db());
|
||||
let search_path_settings = program.search_paths(self.db());
|
||||
|
||||
let new_settings = f(search_path_settings);
|
||||
|
||||
program.set_search_paths(&mut self.db).to(new_settings);
|
||||
|
||||
if let Some(watcher) = &mut self.watcher {
|
||||
watcher.update(&self.db);
|
||||
assert!(!watcher.has_errored_paths());
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_package_files(&self, path: &SystemPath) -> Vec<File> {
|
||||
let package = self.db().workspace().package(self.db(), path).unwrap();
|
||||
let files = package.files(self.db());
|
||||
let files = files.read();
|
||||
let mut collected: Vec<_> = files.into_iter().collect();
|
||||
collected.sort_unstable_by_key(|file| file.path(self.db()).as_system_path().unwrap());
|
||||
collected
|
||||
}
|
||||
|
||||
fn system_file(&self, path: impl AsRef<SystemPath>) -> Option<File> {
|
||||
system_path_to_file(self.db(), path.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
fn setup<I, P>(workspace_files: I) -> anyhow::Result<TestCase>
|
||||
where
|
||||
I: IntoIterator<Item = (P, &'static str)>,
|
||||
P: AsRef<SystemPath>,
|
||||
{
|
||||
setup_with_search_paths(workspace_files, |_root, workspace_path| {
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: workspace_path.to_path_buf(),
|
||||
custom_typeshed: None,
|
||||
site_packages: None,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn setup_with_search_paths<I, P>(
|
||||
workspace_files: I,
|
||||
create_search_paths: impl FnOnce(&SystemPath, &SystemPath) -> SearchPathSettings,
|
||||
) -> anyhow::Result<TestCase>
|
||||
where
|
||||
I: IntoIterator<Item = (P, &'static str)>,
|
||||
P: AsRef<SystemPath>,
|
||||
{
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
||||
let root_path = SystemPath::from_std_path(temp_dir.path()).ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Temp directory '{}' is not a valid UTF-8 path.",
|
||||
temp_dir.path().display()
|
||||
)
|
||||
})?;
|
||||
|
||||
let root_path = SystemPathBuf::from_utf8_path_buf(
|
||||
root_path
|
||||
.as_utf8_path()
|
||||
.canonicalize_utf8()
|
||||
.with_context(|| "Failed to canonicalize root path.")?,
|
||||
);
|
||||
|
||||
let workspace_path = root_path.join("workspace");
|
||||
|
||||
std::fs::create_dir_all(workspace_path.as_std_path())
|
||||
.with_context(|| format!("Failed to create workspace directory '{workspace_path}'",))?;
|
||||
|
||||
for (relative_path, content) in workspace_files {
|
||||
let relative_path = relative_path.as_ref();
|
||||
let absolute_path = workspace_path.join(relative_path);
|
||||
if let Some(parent) = absolute_path.parent() {
|
||||
std::fs::create_dir_all(parent).with_context(|| {
|
||||
format!("Failed to create parent directory for file '{relative_path}'.",)
|
||||
})?;
|
||||
}
|
||||
|
||||
std::fs::write(absolute_path.as_std_path(), content)
|
||||
.with_context(|| format!("Failed to write file '{relative_path}'"))?;
|
||||
}
|
||||
|
||||
let system = OsSystem::new(&workspace_path);
|
||||
|
||||
let workspace = WorkspaceMetadata::from_path(&workspace_path, &system)?;
|
||||
let search_paths = create_search_paths(&root_path, workspace.root());
|
||||
|
||||
for path in search_paths
|
||||
.extra_paths
|
||||
.iter()
|
||||
.chain(search_paths.site_packages.iter())
|
||||
.chain(search_paths.custom_typeshed.iter())
|
||||
{
|
||||
std::fs::create_dir_all(path.as_std_path())
|
||||
.with_context(|| format!("Failed to create search path '{path}'"))?;
|
||||
}
|
||||
|
||||
let settings = ProgramSettings {
|
||||
target_version: TargetVersion::default(),
|
||||
search_paths,
|
||||
};
|
||||
|
||||
let db = RootDatabase::new(workspace, settings, system);
|
||||
|
||||
let (sender, receiver) = crossbeam::channel::unbounded();
|
||||
let watcher = directory_watcher(move |events| sender.send(events).unwrap())
|
||||
.with_context(|| "Failed to create directory watcher")?;
|
||||
|
||||
let watcher = WorkspaceWatcher::new(watcher, &db);
|
||||
assert!(!watcher.has_errored_paths());
|
||||
|
||||
let test_case = TestCase {
|
||||
db,
|
||||
changes_receiver: receiver,
|
||||
watcher: Some(watcher),
|
||||
temp_dir,
|
||||
};
|
||||
|
||||
Ok(test_case)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_file() -> anyhow::Result<()> {
|
||||
let mut case = setup([("bar.py", "")])?;
|
||||
let bar_path = case.workspace_path("bar.py");
|
||||
let bar_file = case.system_file(&bar_path).unwrap();
|
||||
let foo_path = case.workspace_path("foo.py");
|
||||
|
||||
assert_eq!(case.system_file(&foo_path), None);
|
||||
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]);
|
||||
|
||||
std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
let foo = case.system_file(&foo_path).expect("foo.py to exist.");
|
||||
|
||||
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file, foo]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_ignored_file() -> anyhow::Result<()> {
|
||||
let mut case = setup([("bar.py", ""), (".ignore", "foo.py")])?;
|
||||
let bar_path = case.workspace_path("bar.py");
|
||||
let bar_file = case.system_file(&bar_path).unwrap();
|
||||
let foo_path = case.workspace_path("foo.py");
|
||||
|
||||
assert_eq!(case.system_file(&foo_path), None);
|
||||
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]);
|
||||
|
||||
std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(case.system_file(&foo_path).is_some());
|
||||
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn changed_file() -> anyhow::Result<()> {
|
||||
let foo_source = "print('Hello, world!')";
|
||||
let mut case = setup([("foo.py", foo_source)])?;
|
||||
let foo_path = case.workspace_path("foo.py");
|
||||
|
||||
let foo = case
|
||||
.system_file(&foo_path)
|
||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), foo_source);
|
||||
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
|
||||
|
||||
std::fs::write(foo_path.as_std_path(), "print('Version 2')")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
|
||||
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[test]
|
||||
fn changed_metadata() -> anyhow::Result<()> {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
let mut case = setup([("foo.py", "")])?;
|
||||
let foo_path = case.workspace_path("foo.py");
|
||||
|
||||
let foo = case
|
||||
.system_file(&foo_path)
|
||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
||||
assert_eq!(
|
||||
foo.permissions(case.db()),
|
||||
Some(
|
||||
std::fs::metadata(foo_path.as_std_path())
|
||||
.unwrap()
|
||||
.permissions()
|
||||
.mode()
|
||||
)
|
||||
);
|
||||
|
||||
std::fs::set_permissions(
|
||||
foo_path.as_std_path(),
|
||||
std::fs::Permissions::from_mode(0o777),
|
||||
)
|
||||
.with_context(|| "Failed to set file permissions.")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert_eq!(
|
||||
foo.permissions(case.db()),
|
||||
Some(
|
||||
std::fs::metadata(foo_path.as_std_path())
|
||||
.unwrap()
|
||||
.permissions()
|
||||
.mode()
|
||||
)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deleted_file() -> anyhow::Result<()> {
|
||||
let foo_source = "print('Hello, world!')";
|
||||
let mut case = setup([("foo.py", foo_source)])?;
|
||||
let foo_path = case.workspace_path("foo.py");
|
||||
|
||||
let foo = case
|
||||
.system_file(&foo_path)
|
||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
||||
|
||||
assert!(foo.exists(case.db()));
|
||||
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
|
||||
|
||||
std::fs::remove_file(foo_path.as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(!foo.exists(case.db()));
|
||||
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Tests the case where a file is moved from inside a watched directory to a directory that is not watched.
|
||||
///
|
||||
/// This matches the behavior of deleting a file in VS code.
|
||||
#[test]
|
||||
fn move_file_to_trash() -> anyhow::Result<()> {
|
||||
let foo_source = "print('Hello, world!')";
|
||||
let mut case = setup([("foo.py", foo_source)])?;
|
||||
let foo_path = case.workspace_path("foo.py");
|
||||
|
||||
let trash_path = case.root_path().join(".trash");
|
||||
std::fs::create_dir_all(trash_path.as_std_path())?;
|
||||
|
||||
let foo = case
|
||||
.system_file(&foo_path)
|
||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
||||
|
||||
assert!(foo.exists(case.db()));
|
||||
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
|
||||
|
||||
std::fs::rename(
|
||||
foo_path.as_std_path(),
|
||||
trash_path.join("foo.py").as_std_path(),
|
||||
)?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(!foo.exists(case.db()));
|
||||
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Move a file from a non-workspace (non-watched) location into the workspace.
|
||||
#[test]
|
||||
fn move_file_to_workspace() -> anyhow::Result<()> {
|
||||
let mut case = setup([("bar.py", "")])?;
|
||||
let bar_path = case.workspace_path("bar.py");
|
||||
let bar = case.system_file(&bar_path).unwrap();
|
||||
|
||||
let foo_path = case.root_path().join("foo.py");
|
||||
std::fs::write(foo_path.as_std_path(), "")?;
|
||||
|
||||
let foo_in_workspace_path = case.workspace_path("foo.py");
|
||||
|
||||
assert!(case.system_file(&foo_path).is_some());
|
||||
assert_eq!(&case.collect_package_files(&bar_path), &[bar]);
|
||||
assert!(case
|
||||
.db()
|
||||
.workspace()
|
||||
.package(case.db(), &foo_path)
|
||||
.is_none());
|
||||
|
||||
std::fs::rename(foo_path.as_std_path(), foo_in_workspace_path.as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
let foo_in_workspace = case
|
||||
.system_file(&foo_in_workspace_path)
|
||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
||||
|
||||
assert!(foo_in_workspace.exists(case.db()));
|
||||
assert_eq!(
|
||||
&case.collect_package_files(&foo_in_workspace_path),
|
||||
&[bar, foo_in_workspace]
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Rename a workspace file.
|
||||
#[test]
|
||||
fn rename_file() -> anyhow::Result<()> {
|
||||
let mut case = setup([("foo.py", "")])?;
|
||||
let foo_path = case.workspace_path("foo.py");
|
||||
let bar_path = case.workspace_path("bar.py");
|
||||
|
||||
let foo = case
|
||||
.system_file(&foo_path)
|
||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
||||
|
||||
assert_eq!(case.collect_package_files(&foo_path), [foo]);
|
||||
|
||||
std::fs::rename(foo_path.as_std_path(), bar_path.as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(!foo.exists(case.db()));
|
||||
|
||||
let bar = case
|
||||
.system_file(&bar_path)
|
||||
.ok_or_else(|| anyhow!("Bar not found"))?;
|
||||
|
||||
assert!(bar.exists(case.db()));
|
||||
assert_eq!(case.collect_package_files(&foo_path), [bar]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn directory_moved_to_workspace() -> anyhow::Result<()> {
|
||||
let mut case = setup([("bar.py", "import sub.a")])?;
|
||||
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
|
||||
|
||||
let sub_original_path = case.root_path().join("sub");
|
||||
let init_original_path = sub_original_path.join("__init__.py");
|
||||
let a_original_path = sub_original_path.join("a.py");
|
||||
|
||||
std::fs::create_dir(sub_original_path.as_std_path())
|
||||
.with_context(|| "Failed to create sub directory")?;
|
||||
std::fs::write(init_original_path.as_std_path(), "")
|
||||
.with_context(|| "Failed to create __init__.py")?;
|
||||
std::fs::write(a_original_path.as_std_path(), "").with_context(|| "Failed to create a.py")?;
|
||||
|
||||
let sub_a_module = resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap());
|
||||
|
||||
assert_eq!(sub_a_module, None);
|
||||
assert_eq!(
|
||||
case.collect_package_files(&case.workspace_path("bar.py")),
|
||||
&[bar]
|
||||
);
|
||||
|
||||
let sub_new_path = case.workspace_path("sub");
|
||||
std::fs::rename(sub_original_path.as_std_path(), sub_new_path.as_std_path())
|
||||
.with_context(|| "Failed to move sub directory")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
let init_file = case
|
||||
.system_file(sub_new_path.join("__init__.py"))
|
||||
.expect("__init__.py to exist");
|
||||
let a_file = case
|
||||
.system_file(sub_new_path.join("a.py"))
|
||||
.expect("a.py to exist");
|
||||
|
||||
// `import sub.a` should now resolve
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some());
|
||||
|
||||
assert_eq!(
|
||||
case.collect_package_files(&case.workspace_path("bar.py")),
|
||||
&[bar, init_file, a_file]
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn directory_moved_to_trash() -> anyhow::Result<()> {
|
||||
let mut case = setup([
|
||||
("bar.py", "import sub.a"),
|
||||
("sub/__init__.py", ""),
|
||||
("sub/a.py", ""),
|
||||
])?;
|
||||
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
|
||||
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some(),);
|
||||
|
||||
let sub_path = case.workspace_path("sub");
|
||||
let init_file = case
|
||||
.system_file(sub_path.join("__init__.py"))
|
||||
.expect("__init__.py to exist");
|
||||
let a_file = case
|
||||
.system_file(sub_path.join("a.py"))
|
||||
.expect("a.py to exist");
|
||||
|
||||
assert_eq!(
|
||||
case.collect_package_files(&case.workspace_path("bar.py")),
|
||||
&[bar, init_file, a_file]
|
||||
);
|
||||
|
||||
std::fs::create_dir(case.root_path().join(".trash").as_std_path())?;
|
||||
let trashed_sub = case.root_path().join(".trash/sub");
|
||||
std::fs::rename(sub_path.as_std_path(), trashed_sub.as_std_path())
|
||||
.with_context(|| "Failed to move the sub directory to the trash")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
// `import sub.a` should no longer resolve
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
|
||||
|
||||
assert!(!init_file.exists(case.db()));
|
||||
assert!(!a_file.exists(case.db()));
|
||||
|
||||
assert_eq!(
|
||||
case.collect_package_files(&case.workspace_path("bar.py")),
|
||||
&[bar]
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn directory_renamed() -> anyhow::Result<()> {
|
||||
let mut case = setup([
|
||||
("bar.py", "import sub.a"),
|
||||
("sub/__init__.py", ""),
|
||||
("sub/a.py", ""),
|
||||
])?;
|
||||
|
||||
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
|
||||
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some());
|
||||
assert!(resolve_module(
|
||||
case.db().upcast(),
|
||||
ModuleName::new_static("foo.baz").unwrap()
|
||||
)
|
||||
.is_none());
|
||||
|
||||
let sub_path = case.workspace_path("sub");
|
||||
let sub_init = case
|
||||
.system_file(sub_path.join("__init__.py"))
|
||||
.expect("__init__.py to exist");
|
||||
let sub_a = case
|
||||
.system_file(sub_path.join("a.py"))
|
||||
.expect("a.py to exist");
|
||||
|
||||
assert_eq!(
|
||||
case.collect_package_files(&sub_path),
|
||||
&[bar, sub_init, sub_a]
|
||||
);
|
||||
|
||||
let foo_baz = case.workspace_path("foo/baz");
|
||||
|
||||
std::fs::create_dir(case.workspace_path("foo").as_std_path())?;
|
||||
std::fs::rename(sub_path.as_std_path(), foo_baz.as_std_path())
|
||||
.with_context(|| "Failed to move the sub directory")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
// `import sub.a` should no longer resolve
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
|
||||
// `import foo.baz` should now resolve
|
||||
assert!(resolve_module(
|
||||
case.db().upcast(),
|
||||
ModuleName::new_static("foo.baz").unwrap()
|
||||
)
|
||||
.is_some());
|
||||
|
||||
// The old paths are no longer tracked
|
||||
assert!(!sub_init.exists(case.db()));
|
||||
assert!(!sub_a.exists(case.db()));
|
||||
|
||||
let foo_baz_init = case
|
||||
.system_file(foo_baz.join("__init__.py"))
|
||||
.expect("__init__.py to exist");
|
||||
let foo_baz_a = case
|
||||
.system_file(foo_baz.join("a.py"))
|
||||
.expect("a.py to exist");
|
||||
|
||||
// The new paths are synced
|
||||
|
||||
assert!(foo_baz_init.exists(case.db()));
|
||||
assert!(foo_baz_a.exists(case.db()));
|
||||
|
||||
assert_eq!(
|
||||
case.collect_package_files(&sub_path),
|
||||
&[bar, foo_baz_init, foo_baz_a]
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn directory_deleted() -> anyhow::Result<()> {
|
||||
let mut case = setup([
|
||||
("bar.py", "import sub.a"),
|
||||
("sub/__init__.py", ""),
|
||||
("sub/a.py", ""),
|
||||
])?;
|
||||
|
||||
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
|
||||
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some(),);
|
||||
|
||||
let sub_path = case.workspace_path("sub");
|
||||
|
||||
let init_file = case
|
||||
.system_file(sub_path.join("__init__.py"))
|
||||
.expect("__init__.py to exist");
|
||||
let a_file = case
|
||||
.system_file(sub_path.join("a.py"))
|
||||
.expect("a.py to exist");
|
||||
assert_eq!(
|
||||
case.collect_package_files(&sub_path),
|
||||
&[bar, init_file, a_file]
|
||||
);
|
||||
|
||||
std::fs::remove_dir_all(sub_path.as_std_path())
|
||||
.with_context(|| "Failed to remove the sub directory")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
// `import sub.a` should no longer resolve
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
|
||||
|
||||
assert!(!init_file.exists(case.db()));
|
||||
assert!(!a_file.exists(case.db()));
|
||||
assert_eq!(case.collect_package_files(&sub_path), &[bar]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn search_path() -> anyhow::Result<()> {
|
||||
let mut case =
|
||||
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: workspace_path.to_path_buf(),
|
||||
custom_typeshed: None,
|
||||
site_packages: Some(root_path.join("site_packages")),
|
||||
}
|
||||
})?;
|
||||
|
||||
let site_packages = case.root_path().join("site_packages");
|
||||
|
||||
assert_eq!(
|
||||
resolve_module(case.db(), ModuleName::new("a").unwrap()),
|
||||
None
|
||||
);
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
std::fs::write(site_packages.join("__init__.py").as_std_path(), "")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
|
||||
assert_eq!(
|
||||
case.collect_package_files(&case.workspace_path("bar.py")),
|
||||
&[case.system_file(case.workspace_path("bar.py")).unwrap()]
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_search_path() -> anyhow::Result<()> {
|
||||
let mut case = setup([("bar.py", "import sub.a")])?;
|
||||
|
||||
let site_packages = case.workspace_path("site_packages");
|
||||
std::fs::create_dir_all(site_packages.as_std_path())?;
|
||||
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_none());
|
||||
|
||||
// Register site-packages as a search path.
|
||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
||||
site_packages: Some(site_packages.clone()),
|
||||
..settings.clone()
|
||||
});
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
std::fs::write(site_packages.join("__init__.py").as_std_path(), "")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_search_path() -> anyhow::Result<()> {
|
||||
let mut case =
|
||||
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: workspace_path.to_path_buf(),
|
||||
custom_typeshed: None,
|
||||
site_packages: Some(root_path.join("site_packages")),
|
||||
}
|
||||
})?;
|
||||
|
||||
// Remove site packages from the search path settings.
|
||||
let site_packages = case.root_path().join("site_packages");
|
||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
||||
site_packages: None,
|
||||
..settings.clone()
|
||||
});
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
assert_eq!(changes, &[]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -76,6 +76,9 @@ pub(crate) mod tests {
|
||||
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn ruff_db::Db + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ruff_db::Db for TestDb {
|
||||
|
||||
@@ -1,3 +1,16 @@
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use module::{Module, ModuleKind};
|
||||
pub use module_name::ModuleName;
|
||||
pub use resolver::resolve_module;
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::{
|
||||
vendored_typeshed_stubs, TypeshedVersionsParseError, TypeshedVersionsParseErrorKind,
|
||||
};
|
||||
|
||||
use crate::resolver::{module_resolution_settings, SearchPathIterator};
|
||||
|
||||
mod db;
|
||||
mod module;
|
||||
mod module_name;
|
||||
@@ -9,10 +22,29 @@ mod typeshed;
|
||||
#[cfg(test)]
|
||||
mod testing;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use module::{Module, ModuleKind};
|
||||
pub use module_name::ModuleName;
|
||||
pub use resolver::resolve_module;
|
||||
pub use typeshed::{
|
||||
vendored_typeshed_stubs, TypeshedVersionsParseError, TypeshedVersionsParseErrorKind,
|
||||
};
|
||||
/// Returns an iterator over all search paths pointing to a system path
|
||||
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
|
||||
SystemModuleSearchPathsIter {
|
||||
inner: module_resolution_settings(db).search_paths(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SystemModuleSearchPathsIter<'db> {
|
||||
inner: SearchPathIterator<'db>,
|
||||
}
|
||||
|
||||
impl<'db> Iterator for SystemModuleSearchPathsIter<'db> {
|
||||
type Item = &'db SystemPath;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let next = self.inner.next()?;
|
||||
|
||||
if let Some(system_path) = next.as_system_path() {
|
||||
return Some(system_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for SystemModuleSearchPathsIter<'_> {}
|
||||
|
||||
@@ -5,7 +5,7 @@ use ruff_db::files::File;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::path::{ModuleResolutionPathBuf, ModuleResolutionPathRef};
|
||||
use crate::path::ModuleSearchPath;
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
@@ -17,7 +17,7 @@ impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: Arc<ModuleResolutionPathBuf>,
|
||||
search_path: ModuleSearchPath,
|
||||
file: File,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -41,8 +41,8 @@ impl Module {
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub(crate) fn search_path(&self) -> ModuleResolutionPathRef {
|
||||
ModuleResolutionPathRef::from(&*self.inner.search_path)
|
||||
pub(crate) fn search_path(&self) -> &ModuleSearchPath {
|
||||
&self.inner.search_path
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
@@ -77,7 +77,7 @@ impl salsa::DebugWithDb<dyn Db> for Module {
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: Arc<ModuleResolutionPathBuf>,
|
||||
search_path: ModuleSearchPath,
|
||||
file: File,
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
use std::borrow::Cow;
|
||||
use std::iter::FusedIterator;
|
||||
use std::sync::Arc;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
use ruff_db::files::{File, FilePath};
|
||||
@@ -11,11 +11,9 @@ use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
|
||||
use crate::db::Db;
|
||||
use crate::module::{Module, ModuleKind};
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::path::ModuleResolutionPathBuf;
|
||||
use crate::path::{ModulePathBuf, ModuleSearchPath, SearchPathValidationError};
|
||||
use crate::state::ResolverState;
|
||||
|
||||
type SearchPathRoot = Arc<ModuleResolutionPathBuf>;
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
let interned_name = internal::ModuleNameIngredient::new(db, module_name);
|
||||
@@ -32,9 +30,8 @@ pub(crate) fn resolve_module_query<'db>(
|
||||
db: &'db dyn Db,
|
||||
module_name: internal::ModuleNameIngredient<'db>,
|
||||
) -> Option<Module> {
|
||||
let _span = tracing::trace_span!("resolve_module", ?module_name).entered();
|
||||
|
||||
let name = module_name.name(db);
|
||||
let _span = tracing::trace_span!("resolve_module", %name).entered();
|
||||
|
||||
let (search_path, module_file, kind) = resolve_name(db, name)?;
|
||||
|
||||
@@ -105,16 +102,10 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
///
|
||||
/// This method also implements the typing spec's [module resolution order].
|
||||
///
|
||||
/// TODO(Alex): this method does multiple `.unwrap()` calls when it should really return an error.
|
||||
/// Each `.unwrap()` call is a point where we're validating a setting that the user would pass
|
||||
/// and transforming it into an internal representation for a validated path.
|
||||
/// Rather than panicking if a path fails to validate, we should display an error message to the user
|
||||
/// and exit the process with a nonzero exit code.
|
||||
/// This validation should probably be done outside of Salsa?
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSettings {
|
||||
fn try_resolve_module_resolution_settings(
|
||||
db: &dyn Db,
|
||||
) -> Result<ModuleResolutionSettings, SearchPathValidationError> {
|
||||
let program = Program::get(db.upcast());
|
||||
|
||||
let SearchPathSettings {
|
||||
@@ -132,43 +123,30 @@ pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSetting
|
||||
tracing::info!("extra search paths: {extra_paths:?}");
|
||||
}
|
||||
|
||||
let current_directory = db.system().current_directory();
|
||||
let system = db.system();
|
||||
|
||||
let mut static_search_paths: Vec<_> = extra_paths
|
||||
.iter()
|
||||
.map(|fs_path| {
|
||||
Arc::new(
|
||||
ModuleResolutionPathBuf::extra(SystemPath::absolute(fs_path, current_directory))
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let mut static_search_paths = vec![];
|
||||
|
||||
static_search_paths.push(Arc::new(
|
||||
ModuleResolutionPathBuf::first_party(SystemPath::absolute(
|
||||
workspace_root,
|
||||
current_directory,
|
||||
))
|
||||
.unwrap(),
|
||||
));
|
||||
for path in extra_paths {
|
||||
static_search_paths.push(ModuleSearchPath::extra(system, path.to_owned())?);
|
||||
}
|
||||
|
||||
static_search_paths.push(Arc::new(custom_typeshed.as_ref().map_or_else(
|
||||
ModuleResolutionPathBuf::vendored_stdlib,
|
||||
|custom| {
|
||||
ModuleResolutionPathBuf::stdlib_from_custom_typeshed_root(&SystemPath::absolute(
|
||||
custom,
|
||||
current_directory,
|
||||
))
|
||||
.unwrap()
|
||||
},
|
||||
)));
|
||||
static_search_paths.push(ModuleSearchPath::first_party(
|
||||
system,
|
||||
workspace_root.to_owned(),
|
||||
)?);
|
||||
|
||||
if let Some(path) = site_packages {
|
||||
let site_packages_root = Arc::new(
|
||||
ModuleResolutionPathBuf::site_packages(SystemPath::absolute(path, current_directory))
|
||||
.unwrap(),
|
||||
);
|
||||
static_search_paths.push(site_packages_root);
|
||||
static_search_paths.push(if let Some(custom_typeshed) = custom_typeshed.as_ref() {
|
||||
ModuleSearchPath::custom_stdlib(db, custom_typeshed.to_owned())?
|
||||
} else {
|
||||
ModuleSearchPath::vendored_stdlib()
|
||||
});
|
||||
|
||||
if let Some(site_packages) = site_packages {
|
||||
static_search_paths.push(ModuleSearchPath::site_packages(
|
||||
system,
|
||||
site_packages.to_owned(),
|
||||
)?);
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
@@ -193,17 +171,23 @@ pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSetting
|
||||
}
|
||||
});
|
||||
|
||||
ModuleResolutionSettings {
|
||||
Ok(ModuleResolutionSettings {
|
||||
target_version,
|
||||
static_search_paths,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSettings {
|
||||
// TODO proper error handling if this returns an error:
|
||||
try_resolve_module_resolution_settings(db).unwrap()
|
||||
}
|
||||
|
||||
/// Collect all dynamic search paths:
|
||||
/// search paths listed in `.pth` files in the `site-packages` directory
|
||||
/// due to editable installations of third-party packages.
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn editable_install_resolution_paths(db: &dyn Db) -> Vec<Arc<ModuleResolutionPathBuf>> {
|
||||
pub(crate) fn editable_install_resolution_paths(db: &dyn Db) -> Vec<ModuleSearchPath> {
|
||||
// This query needs to be re-executed each time a `.pth` file
|
||||
// is added, modified or removed from the `site-packages` directory.
|
||||
// However, we don't use Salsa queries to read the source text of `.pth` files;
|
||||
@@ -258,7 +242,7 @@ pub(crate) fn editable_install_resolution_paths(db: &dyn Db) -> Vec<Arc<ModuleRe
|
||||
if existing_paths.insert(Cow::Owned(
|
||||
installation.as_system_path().unwrap().to_path_buf(),
|
||||
)) {
|
||||
dynamic_paths.push(Arc::new(installation));
|
||||
dynamic_paths.push(installation);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -274,14 +258,14 @@ pub(crate) fn editable_install_resolution_paths(db: &dyn Db) -> Vec<Arc<ModuleRe
|
||||
/// are only calculated lazily.
|
||||
///
|
||||
/// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
struct SearchPathIterator<'db> {
|
||||
pub(crate) struct SearchPathIterator<'db> {
|
||||
db: &'db dyn Db,
|
||||
static_paths: std::slice::Iter<'db, SearchPathRoot>,
|
||||
dynamic_paths: Option<std::slice::Iter<'db, SearchPathRoot>>,
|
||||
static_paths: std::slice::Iter<'db, ModuleSearchPath>,
|
||||
dynamic_paths: Option<std::slice::Iter<'db, ModuleSearchPath>>,
|
||||
}
|
||||
|
||||
impl<'db> Iterator for SearchPathIterator<'db> {
|
||||
type Item = &'db SearchPathRoot;
|
||||
type Item = &'db ModuleSearchPath;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let SearchPathIterator {
|
||||
@@ -313,7 +297,7 @@ struct PthFile<'db> {
|
||||
impl<'db> PthFile<'db> {
|
||||
/// Yield paths in this `.pth` file that appear to represent editable installations,
|
||||
/// and should therefore be added as module-resolution search paths.
|
||||
fn editable_installations(&'db self) -> impl Iterator<Item = ModuleResolutionPathBuf> + 'db {
|
||||
fn editable_installations(&'db self) -> impl Iterator<Item = ModuleSearchPath> + 'db {
|
||||
let PthFile {
|
||||
system,
|
||||
path: _,
|
||||
@@ -335,7 +319,7 @@ impl<'db> PthFile<'db> {
|
||||
return None;
|
||||
}
|
||||
let possible_editable_install = SystemPath::absolute(line, site_packages);
|
||||
ModuleResolutionPathBuf::editable_installation_root(*system, possible_editable_install)
|
||||
ModuleSearchPath::editable(*system, possible_editable_install).ok()
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -407,7 +391,7 @@ pub(crate) struct ModuleResolutionSettings {
|
||||
///
|
||||
/// Note that `site-packages` *is included* as a search path in this sequence,
|
||||
/// but it is also stored separately so that we're able to find editable installs later.
|
||||
static_search_paths: Vec<SearchPathRoot>,
|
||||
static_search_paths: Vec<ModuleSearchPath>,
|
||||
}
|
||||
|
||||
impl ModuleResolutionSettings {
|
||||
@@ -415,7 +399,7 @@ impl ModuleResolutionSettings {
|
||||
self.target_version
|
||||
}
|
||||
|
||||
fn search_paths<'db>(&'db self, db: &'db dyn Db) -> SearchPathIterator<'db> {
|
||||
pub(crate) fn search_paths<'db>(&'db self, db: &'db dyn Db) -> SearchPathIterator<'db> {
|
||||
SearchPathIterator {
|
||||
db,
|
||||
static_paths: self.static_search_paths.iter(),
|
||||
@@ -442,16 +426,63 @@ pub(crate) mod internal {
|
||||
}
|
||||
}
|
||||
|
||||
/// Modules that are builtin to the Python interpreter itself.
|
||||
///
|
||||
/// When these module names are imported, standard module resolution is bypassed:
|
||||
/// the module name always resolves to the stdlib module,
|
||||
/// even if there's a module of the same name in the workspace root
|
||||
/// (which would normally result in the stdlib module being overridden).
|
||||
///
|
||||
/// TODO(Alex): write a script to generate this list,
|
||||
/// similar to what we do in `crates/ruff_python_stdlib/src/sys.rs`
|
||||
static BUILTIN_MODULES: Lazy<FxHashSet<&str>> = Lazy::new(|| {
|
||||
const BUILTIN_MODULE_NAMES: &[&str] = &[
|
||||
"_abc",
|
||||
"_ast",
|
||||
"_codecs",
|
||||
"_collections",
|
||||
"_functools",
|
||||
"_imp",
|
||||
"_io",
|
||||
"_locale",
|
||||
"_operator",
|
||||
"_signal",
|
||||
"_sre",
|
||||
"_stat",
|
||||
"_string",
|
||||
"_symtable",
|
||||
"_thread",
|
||||
"_tokenize",
|
||||
"_tracemalloc",
|
||||
"_typing",
|
||||
"_warnings",
|
||||
"_weakref",
|
||||
"atexit",
|
||||
"builtins",
|
||||
"errno",
|
||||
"faulthandler",
|
||||
"gc",
|
||||
"itertools",
|
||||
"marshal",
|
||||
"posix",
|
||||
"pwd",
|
||||
"sys",
|
||||
"time",
|
||||
];
|
||||
BUILTIN_MODULE_NAMES.iter().copied().collect()
|
||||
});
|
||||
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(
|
||||
db: &dyn Db,
|
||||
name: &ModuleName,
|
||||
) -> Option<(Arc<ModuleResolutionPathBuf>, File, ModuleKind)> {
|
||||
fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(ModuleSearchPath, File, ModuleKind)> {
|
||||
let resolver_settings = module_resolution_settings(db);
|
||||
let resolver_state = ResolverState::new(db, resolver_settings.target_version());
|
||||
let is_builtin_module = BUILTIN_MODULES.contains(&name.as_str());
|
||||
|
||||
for search_path in resolver_settings.search_paths(db) {
|
||||
if is_builtin_module && !search_path.is_standard_library() {
|
||||
continue;
|
||||
}
|
||||
let mut components = name.components();
|
||||
let module_name = components.next_back()?;
|
||||
|
||||
@@ -503,14 +534,14 @@ fn resolve_name(
|
||||
}
|
||||
|
||||
fn resolve_package<'a, 'db, I>(
|
||||
module_search_path: &ModuleResolutionPathBuf,
|
||||
module_search_path: &ModuleSearchPath,
|
||||
components: I,
|
||||
resolver_state: &ResolverState<'db>,
|
||||
) -> Result<ResolvedPackage, PackageKind>
|
||||
where
|
||||
I: Iterator<Item = &'a str>,
|
||||
{
|
||||
let mut package_path = module_search_path.clone();
|
||||
let mut package_path = module_search_path.as_module_path().clone();
|
||||
|
||||
// `true` if inside a folder that is a namespace package (has no `__init__.py`).
|
||||
// Namespace packages are special because they can be spread across multiple search paths.
|
||||
@@ -562,7 +593,7 @@ where
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ResolvedPackage {
|
||||
path: ModuleResolutionPathBuf,
|
||||
path: ModulePathBuf,
|
||||
kind: PackageKind,
|
||||
}
|
||||
|
||||
@@ -618,7 +649,7 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!("foo", foo_module.name());
|
||||
assert_eq!(&src, &foo_module.search_path());
|
||||
assert_eq!(&src, foo_module.search_path());
|
||||
assert_eq!(ModuleKind::Module, foo_module.kind());
|
||||
|
||||
let expected_foo_path = src.join("foo.py");
|
||||
@@ -629,6 +660,40 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtins_vendored() {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_vendored_typeshed()
|
||||
.with_src_files(&[("builtins.py", "FOOOO = 42")])
|
||||
.build();
|
||||
|
||||
let builtins_module_name = ModuleName::new_static("builtins").unwrap();
|
||||
let builtins = resolve_module(&db, builtins_module_name).expect("builtins to resolve");
|
||||
|
||||
assert_eq!(builtins.file().path(&db), &stdlib.join("builtins.pyi"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtins_custom() {
|
||||
const TYPESHED: MockedTypeshed = MockedTypeshed {
|
||||
stdlib_files: &[("builtins.pyi", "def min(a, b): ...")],
|
||||
versions: "builtins: 3.8-",
|
||||
};
|
||||
|
||||
const SRC: &[FileSpec] = &[("builtins.py", "FOOOO = 42")];
|
||||
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_src_files(SRC)
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.build();
|
||||
|
||||
let builtins_module_name = ModuleName::new_static("builtins").unwrap();
|
||||
let builtins = resolve_module(&db, builtins_module_name).expect("builtins to resolve");
|
||||
|
||||
assert_eq!(builtins.file().path(&db), &stdlib.join("builtins.pyi"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stdlib() {
|
||||
const TYPESHED: MockedTypeshed = MockedTypeshed {
|
||||
@@ -649,7 +714,7 @@ mod tests {
|
||||
resolve_module(&db, functools_module_name).as_ref()
|
||||
);
|
||||
|
||||
assert_eq!(&stdlib, &functools_module.search_path().to_path_buf());
|
||||
assert_eq!(&stdlib, functools_module.search_path());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind());
|
||||
|
||||
let expected_functools_path = stdlib.join("functools.pyi");
|
||||
@@ -701,7 +766,7 @@ mod tests {
|
||||
});
|
||||
let search_path = resolved_module.search_path();
|
||||
assert_eq!(
|
||||
&stdlib, &search_path,
|
||||
&stdlib, search_path,
|
||||
"Search path for {module_name} was unexpectedly {search_path:?}"
|
||||
);
|
||||
assert!(
|
||||
@@ -797,7 +862,7 @@ mod tests {
|
||||
});
|
||||
let search_path = resolved_module.search_path();
|
||||
assert_eq!(
|
||||
&stdlib, &search_path,
|
||||
&stdlib, search_path,
|
||||
"Search path for {module_name} was unexpectedly {search_path:?}"
|
||||
);
|
||||
assert!(
|
||||
@@ -856,7 +921,7 @@ mod tests {
|
||||
Some(&functools_module),
|
||||
resolve_module(&db, functools_module_name).as_ref()
|
||||
);
|
||||
assert_eq!(&src, &functools_module.search_path());
|
||||
assert_eq!(&src, functools_module.search_path());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind());
|
||||
assert_eq!(&src.join("functools.py"), functools_module.file().path(&db));
|
||||
|
||||
@@ -877,7 +942,7 @@ mod tests {
|
||||
let pydoc_data_topics = resolve_module(&db, pydoc_data_topics_name).unwrap();
|
||||
|
||||
assert_eq!("pydoc_data.topics", pydoc_data_topics.name());
|
||||
assert_eq!(pydoc_data_topics.search_path(), stdlib);
|
||||
assert_eq!(pydoc_data_topics.search_path(), &stdlib);
|
||||
assert_eq!(
|
||||
pydoc_data_topics.file().path(&db),
|
||||
&stdlib.join("pydoc_data/topics.pyi")
|
||||
@@ -894,7 +959,7 @@ mod tests {
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!("foo", foo_module.name());
|
||||
assert_eq!(&src, &foo_module.search_path());
|
||||
assert_eq!(&src, foo_module.search_path());
|
||||
assert_eq!(&foo_path, foo_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
@@ -921,7 +986,7 @@ mod tests {
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let foo_init_path = src.join("foo/__init__.py");
|
||||
|
||||
assert_eq!(&src, &foo_module.search_path());
|
||||
assert_eq!(&src, foo_module.search_path());
|
||||
assert_eq!(&foo_init_path, foo_module.file().path(&db));
|
||||
assert_eq!(ModuleKind::Package, foo_module.kind());
|
||||
|
||||
@@ -944,7 +1009,7 @@ mod tests {
|
||||
let foo = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let foo_stub = src.join("foo.pyi");
|
||||
|
||||
assert_eq!(&src, &foo.search_path());
|
||||
assert_eq!(&src, foo.search_path());
|
||||
assert_eq!(&foo_stub, foo.file().path(&db));
|
||||
|
||||
assert_eq!(Some(foo), path_to_module(&db, &FilePath::System(foo_stub)));
|
||||
@@ -968,7 +1033,7 @@ mod tests {
|
||||
resolve_module(&db, ModuleName::new_static("foo.bar.baz").unwrap()).unwrap();
|
||||
let baz_path = src.join("foo/bar/baz.py");
|
||||
|
||||
assert_eq!(&src, &baz_module.search_path());
|
||||
assert_eq!(&src, baz_module.search_path());
|
||||
assert_eq!(&baz_path, baz_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
@@ -1068,7 +1133,7 @@ mod tests {
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let foo_src_path = src.join("foo.py");
|
||||
|
||||
assert_eq!(&src, &foo_module.search_path());
|
||||
assert_eq!(&src, foo_module.search_path());
|
||||
assert_eq!(&foo_src_path, foo_module.file().path(&db));
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
@@ -1101,7 +1166,8 @@ mod tests {
|
||||
|
||||
std::fs::create_dir_all(src.as_std_path())?;
|
||||
std::fs::create_dir_all(site_packages.as_std_path())?;
|
||||
std::fs::create_dir_all(custom_typeshed.as_std_path())?;
|
||||
std::fs::create_dir_all(custom_typeshed.join("stdlib").as_std_path())?;
|
||||
std::fs::File::create(custom_typeshed.join("stdlib/VERSIONS").as_std_path())?;
|
||||
|
||||
std::fs::write(foo.as_std_path(), "")?;
|
||||
std::os::unix::fs::symlink(foo.as_std_path(), bar.as_std_path())?;
|
||||
@@ -1120,12 +1186,12 @@ mod tests {
|
||||
|
||||
assert_ne!(foo_module, bar_module);
|
||||
|
||||
assert_eq!(&src, &foo_module.search_path());
|
||||
assert_eq!(&src, foo_module.search_path());
|
||||
assert_eq!(&foo, foo_module.file().path(&db));
|
||||
|
||||
// `foo` and `bar` shouldn't resolve to the same file
|
||||
|
||||
assert_eq!(&src, &bar_module.search_path());
|
||||
assert_eq!(&src, bar_module.search_path());
|
||||
assert_eq!(&bar, bar_module.file().path(&db));
|
||||
assert_eq!(&foo, foo_module.file().path(&db));
|
||||
|
||||
@@ -1160,7 +1226,7 @@ mod tests {
|
||||
|
||||
// Delete `bar.py`
|
||||
db.memory_file_system().remove_file(&bar_path).unwrap();
|
||||
bar.touch(&mut db);
|
||||
bar.sync(&mut db);
|
||||
|
||||
// Re-query the foo module. The foo module should still be cached because `bar.py` isn't relevant
|
||||
// for resolving `foo`.
|
||||
@@ -1212,7 +1278,7 @@ mod tests {
|
||||
db.memory_file_system().remove_file(&foo_init_path)?;
|
||||
db.memory_file_system()
|
||||
.remove_directory(foo_init_path.parent().unwrap())?;
|
||||
File::touch_path(&mut db, &foo_init_path);
|
||||
File::sync_path(&mut db, &foo_init_path);
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name).expect("Foo module to resolve");
|
||||
assert_eq!(&src.join("foo.py"), foo_module.file().path(&db));
|
||||
@@ -1241,7 +1307,7 @@ mod tests {
|
||||
let stdlib_functools_path = stdlib.join("functools.pyi");
|
||||
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
assert_eq!(functools_module.search_path(), stdlib);
|
||||
assert_eq!(functools_module.search_path(), &stdlib);
|
||||
assert_eq!(
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, &stdlib_functools_path)
|
||||
@@ -1261,7 +1327,7 @@ mod tests {
|
||||
&ModuleNameIngredient::new(&db, functools_module_name.clone()),
|
||||
&events,
|
||||
);
|
||||
assert_eq!(functools_module.search_path(), stdlib);
|
||||
assert_eq!(functools_module.search_path(), &stdlib);
|
||||
assert_eq!(
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, &stdlib_functools_path)
|
||||
@@ -1287,7 +1353,7 @@ mod tests {
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
assert_eq!(functools_module.search_path(), stdlib);
|
||||
assert_eq!(functools_module.search_path(), &stdlib);
|
||||
assert_eq!(
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, stdlib.join("functools.pyi"))
|
||||
@@ -1298,7 +1364,7 @@ mod tests {
|
||||
let src_functools_path = src.join("functools.py");
|
||||
db.write_file(&src_functools_path, "FOO: int").unwrap();
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
assert_eq!(functools_module.search_path(), src);
|
||||
assert_eq!(functools_module.search_path(), &src);
|
||||
assert_eq!(
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, &src_functools_path)
|
||||
@@ -1329,7 +1395,7 @@ mod tests {
|
||||
let src_functools_path = src.join("functools.py");
|
||||
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
assert_eq!(functools_module.search_path(), src);
|
||||
assert_eq!(functools_module.search_path(), &src);
|
||||
assert_eq!(
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, &src_functools_path)
|
||||
@@ -1340,9 +1406,9 @@ mod tests {
|
||||
db.memory_file_system()
|
||||
.remove_file(&src_functools_path)
|
||||
.unwrap();
|
||||
File::touch_path(&mut db, &src_functools_path);
|
||||
File::sync_path(&mut db, &src_functools_path);
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
assert_eq!(functools_module.search_path(), stdlib);
|
||||
assert_eq!(functools_module.search_path(), &stdlib);
|
||||
assert_eq!(
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, stdlib.join("functools.pyi"))
|
||||
@@ -1552,7 +1618,7 @@ not_a_directory
|
||||
// Salsa file forces a new revision.
|
||||
//
|
||||
// TODO: get rid of the `.report_untracked_read()` call...
|
||||
File::touch_path(&mut db, SystemPath::new("/x/src/foo.py"));
|
||||
File::sync_path(&mut db, SystemPath::new("/x/src/foo.py"));
|
||||
|
||||
assert_eq!(resolve_module(&db, foo_module_name.clone()), None);
|
||||
}
|
||||
@@ -1580,8 +1646,8 @@ not_a_directory
|
||||
.remove_file(src_path.join("foo.py"))
|
||||
.unwrap();
|
||||
db.memory_file_system().remove_directory(&src_path).unwrap();
|
||||
File::touch_path(&mut db, &src_path.join("foo.py"));
|
||||
File::touch_path(&mut db, &src_path);
|
||||
File::sync_path(&mut db, &src_path.join("foo.py"));
|
||||
File::sync_path(&mut db, &src_path);
|
||||
assert_eq!(resolve_module(&db, foo_module_name.clone()), None);
|
||||
}
|
||||
|
||||
@@ -1592,39 +1658,13 @@ not_a_directory
|
||||
.with_site_packages_files(&[("_foo.pth", "/src")])
|
||||
.build();
|
||||
|
||||
let search_paths: Vec<&SearchPathRoot> =
|
||||
let search_paths: Vec<&ModuleSearchPath> =
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
|
||||
assert!(search_paths.contains(&&Arc::new(
|
||||
ModuleResolutionPathBuf::first_party("/src").unwrap()
|
||||
)));
|
||||
|
||||
assert!(!search_paths.contains(&&Arc::new(
|
||||
ModuleResolutionPathBuf::editable_installation_root(db.system(), "/src").unwrap()
|
||||
)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicate_editable_search_paths_added() {
|
||||
let TestCase { mut db, .. } = TestCaseBuilder::new()
|
||||
.with_site_packages_files(&[("_foo.pth", "/x"), ("_bar.pth", "/x")])
|
||||
.build();
|
||||
|
||||
db.write_file("/x/foo.py", "").unwrap();
|
||||
|
||||
let search_paths: Vec<&SearchPathRoot> =
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
|
||||
let editable_install =
|
||||
ModuleResolutionPathBuf::editable_installation_root(db.system(), "/x").unwrap();
|
||||
|
||||
assert_eq!(
|
||||
search_paths
|
||||
.iter()
|
||||
.filter(|path| ****path == editable_install)
|
||||
.count(),
|
||||
1,
|
||||
"Unexpected search paths: {search_paths:?}"
|
||||
assert!(
|
||||
search_paths.contains(&&ModuleSearchPath::first_party(db.system(), "/src").unwrap())
|
||||
);
|
||||
|
||||
assert!(!search_paths.contains(&&ModuleSearchPath::editable(db.system(), "/src").unwrap()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,6 +125,8 @@ impl<T> TestCaseBuilder<T> {
|
||||
files: impl IntoIterator<Item = FileSpec>,
|
||||
) -> SystemPathBuf {
|
||||
let root = location.as_ref().to_path_buf();
|
||||
// Make sure to create the directory even if the list of files is empty:
|
||||
db.memory_file_system().create_directory_all(&root).unwrap();
|
||||
db.write_files(
|
||||
files
|
||||
.into_iter()
|
||||
|
||||
@@ -15,10 +15,10 @@ red_knot_module_resolver = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
bitflags = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use red_knot_module_resolver::{resolve_module, ModuleName};
|
||||
|
||||
use crate::semantic_index::global_scope;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::Db;
|
||||
|
||||
/// Salsa query to get the builtins scope.
|
||||
///
|
||||
/// Can return None if a custom typeshed is used that is missing `builtins.pyi`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn builtins_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
|
||||
let builtins_name =
|
||||
ModuleName::new_static("builtins").expect("Expected 'builtins' to be a valid module name");
|
||||
let builtins_file = resolve_module(db.upcast(), builtins_name)?.file();
|
||||
Some(global_scope(db, builtins_file))
|
||||
}
|
||||
@@ -3,6 +3,7 @@ use salsa::DbWithJar;
|
||||
use red_knot_module_resolver::Db as ResolverDb;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
@@ -28,13 +29,11 @@ pub struct Jar(
|
||||
infer_definition_types,
|
||||
infer_expression_types,
|
||||
infer_scope_types,
|
||||
builtins_scope,
|
||||
);
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
pub trait Db:
|
||||
SourceDb + ResolverDb + DbWithJar<Jar> + Upcast<dyn SourceDb> + Upcast<dyn ResolverDb>
|
||||
{
|
||||
}
|
||||
pub trait Db: SourceDb + ResolverDb + DbWithJar<Jar> + Upcast<dyn ResolverDb> {}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
@@ -47,7 +46,6 @@ pub(crate) mod tests {
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
use ruff_python_trivia::textwrap;
|
||||
|
||||
use super::{Db, Jar};
|
||||
|
||||
@@ -89,12 +87,6 @@ pub(crate) mod tests {
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
|
||||
/// Write auto-dedented text to a file.
|
||||
pub(crate) fn write_dedented(&mut self, path: &str, content: &str) -> anyhow::Result<()> {
|
||||
self.write_file(path, textwrap::dedent(content))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
@@ -125,12 +117,18 @@ pub(crate) mod tests {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl red_knot_module_resolver::Db for TestDb {}
|
||||
|
||||
@@ -6,6 +6,7 @@ pub use db::{Db, Jar};
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod builtins;
|
||||
mod db;
|
||||
mod node_key;
|
||||
pub mod semantic_index;
|
||||
|
||||
@@ -31,8 +31,10 @@ pub(super) struct SemanticIndexBuilder<'db> {
|
||||
file: File,
|
||||
module: &'db ParsedModule,
|
||||
scope_stack: Vec<FileScopeId>,
|
||||
/// the assignment we're currently visiting
|
||||
/// The assignment we're currently visiting.
|
||||
current_assignment: Option<CurrentAssignment<'db>>,
|
||||
/// Flow states at each `break` in the current loop.
|
||||
loop_break_states: Vec<FlowSnapshot>,
|
||||
|
||||
// Semantic Index fields
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
@@ -54,6 +56,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
module: parsed,
|
||||
scope_stack: Vec::new(),
|
||||
current_assignment: None,
|
||||
loop_break_states: vec![],
|
||||
|
||||
scopes: IndexVec::new(),
|
||||
symbol_tables: IndexVec::new(),
|
||||
@@ -100,9 +103,13 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
// SAFETY: `node` is guaranteed to be a child of `self.module`
|
||||
let scope_id = ScopeId::new(self.db, self.file, file_scope_id, unsafe {
|
||||
node.to_kind(self.module.clone())
|
||||
});
|
||||
let scope_id = ScopeId::new(
|
||||
self.db,
|
||||
self.file,
|
||||
file_scope_id,
|
||||
unsafe { node.to_kind(self.module.clone()) },
|
||||
countme::Count::default(),
|
||||
);
|
||||
|
||||
self.scope_ids_by_scope.push(scope_id);
|
||||
self.scopes_by_node.insert(node.node_key(), file_scope_id);
|
||||
@@ -125,33 +132,38 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
&mut self.symbol_tables[scope_id]
|
||||
}
|
||||
|
||||
fn current_use_def_map(&mut self) -> &mut UseDefMapBuilder<'db> {
|
||||
fn current_use_def_map_mut(&mut self) -> &mut UseDefMapBuilder<'db> {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.use_def_maps[scope_id]
|
||||
}
|
||||
|
||||
fn current_use_def_map(&self) -> &UseDefMapBuilder<'db> {
|
||||
let scope_id = self.current_scope();
|
||||
&self.use_def_maps[scope_id]
|
||||
}
|
||||
|
||||
fn current_ast_ids(&mut self) -> &mut AstIdsBuilder {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.ast_ids[scope_id]
|
||||
}
|
||||
|
||||
fn flow_snapshot(&mut self) -> FlowSnapshot {
|
||||
fn flow_snapshot(&self) -> FlowSnapshot {
|
||||
self.current_use_def_map().snapshot()
|
||||
}
|
||||
|
||||
fn flow_restore(&mut self, state: FlowSnapshot) {
|
||||
self.current_use_def_map().restore(state);
|
||||
self.current_use_def_map_mut().restore(state);
|
||||
}
|
||||
|
||||
fn flow_merge(&mut self, state: &FlowSnapshot) {
|
||||
self.current_use_def_map().merge(state);
|
||||
self.current_use_def_map_mut().merge(state);
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, name: Name, flags: SymbolFlags) -> ScopedSymbolId {
|
||||
let symbol_table = self.current_symbol_table();
|
||||
let (symbol_id, added) = symbol_table.add_or_update_symbol(name, flags);
|
||||
if added {
|
||||
let use_def_map = self.current_use_def_map();
|
||||
let use_def_map = self.current_use_def_map_mut();
|
||||
use_def_map.add_symbol(symbol_id);
|
||||
}
|
||||
symbol_id
|
||||
@@ -172,11 +184,12 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
unsafe {
|
||||
definition_node.into_owned(self.module.clone())
|
||||
},
|
||||
countme::Count::default(),
|
||||
);
|
||||
|
||||
self.definitions_by_node
|
||||
.insert(definition_node.key(), definition);
|
||||
self.current_use_def_map()
|
||||
self.current_use_def_map_mut()
|
||||
.record_definition(symbol, definition);
|
||||
|
||||
definition
|
||||
@@ -193,6 +206,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
unsafe {
|
||||
AstNodeRef::new(self.module.clone(), expression_node)
|
||||
},
|
||||
countme::Count::default(),
|
||||
);
|
||||
self.expressions_by_node
|
||||
.insert(expression_node.into(), expression);
|
||||
@@ -416,6 +430,33 @@ where
|
||||
self.flow_merge(&pre_if);
|
||||
}
|
||||
}
|
||||
ast::Stmt::While(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
|
||||
let pre_loop = self.flow_snapshot();
|
||||
|
||||
// Save aside any break states from an outer loop
|
||||
let saved_break_states = std::mem::take(&mut self.loop_break_states);
|
||||
self.visit_body(&node.body);
|
||||
// Get the break states from the body of this loop, and restore the saved outer
|
||||
// ones.
|
||||
let break_states =
|
||||
std::mem::replace(&mut self.loop_break_states, saved_break_states);
|
||||
|
||||
// We may execute the `else` clause without ever executing the body, so merge in
|
||||
// the pre-loop state before visiting `else`.
|
||||
self.flow_merge(&pre_loop);
|
||||
self.visit_body(&node.orelse);
|
||||
|
||||
// Breaking out of a while loop bypasses the `else` clause, so merge in the break
|
||||
// states after visiting `else`.
|
||||
for break_state in break_states {
|
||||
self.flow_merge(&break_state);
|
||||
}
|
||||
}
|
||||
ast::Stmt::Break(_) => {
|
||||
self.loop_break_states.push(self.flow_snapshot());
|
||||
}
|
||||
_ => {
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
@@ -460,7 +501,7 @@ where
|
||||
|
||||
if flags.contains(SymbolFlags::IS_USED) {
|
||||
let use_id = self.current_ast_ids().record_use(expr);
|
||||
self.current_use_def_map().record_use(symbol, use_id);
|
||||
self.current_use_def_map_mut().record_use(symbol, use_id);
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
|
||||
@@ -24,6 +24,9 @@ pub struct Definition<'db> {
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: DefinitionKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Definition<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> Definition<'db> {
|
||||
|
||||
@@ -22,6 +22,9 @@ pub(crate) struct Expression<'db> {
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: AstNodeRef<ast::Expr>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Expression<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> Expression<'db> {
|
||||
|
||||
@@ -100,6 +100,9 @@ pub struct ScopeId<'db> {
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub node: NodeWithScopeKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<ScopeId<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> ScopeId<'db> {
|
||||
|
||||
@@ -194,6 +194,7 @@ pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Definition IDs array for `definitions_by_use` and `definitions_by_symbol` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{global_scope, symbol_table, use_def_map};
|
||||
@@ -47,6 +48,15 @@ pub(crate) fn global_symbol_ty_by_name<'db>(db: &'db dyn Db, file: File, name: &
|
||||
symbol_ty_by_name(db, global_scope(db, file), name)
|
||||
}
|
||||
|
||||
/// Shorthand for `symbol_ty` that looks up a symbol in the builtins.
|
||||
///
|
||||
/// Returns `None` if the builtins module isn't available for some reason.
|
||||
pub(crate) fn builtins_symbol_ty_by_name<'db>(db: &'db dyn Db, name: &str) -> Type<'db> {
|
||||
builtins_scope(db)
|
||||
.map(|builtins| symbol_ty_by_name(db, builtins, name))
|
||||
.unwrap_or(Type::Unbound)
|
||||
}
|
||||
|
||||
/// Infer the type of a [`Definition`].
|
||||
pub(crate) fn definition_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
|
||||
let inference = infer_definition_types(db, definition);
|
||||
@@ -229,6 +239,12 @@ pub struct UnionType<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
pub fn contains(&self, db: &'db dyn Db, ty: Type<'db>) -> bool {
|
||||
self.elements(db).contains(&ty)
|
||||
}
|
||||
}
|
||||
|
||||
struct UnionTypeBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
|
||||
@@ -29,13 +29,9 @@ impl Display for DisplayType<'_> {
|
||||
write!(f, "<module '{:?}'>", file.path(self.db.upcast()))
|
||||
}
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class) => {
|
||||
f.write_str("Literal[")?;
|
||||
f.write_str(&class.name(self.db))?;
|
||||
f.write_str("]")
|
||||
}
|
||||
Type::Class(class) => write!(f, "Literal[{}]", class.name(self.db)),
|
||||
Type::Instance(class) => f.write_str(&class.name(self.db)),
|
||||
Type::Function(function) => f.write_str(&function.name(self.db)),
|
||||
Type::Function(function) => write!(f, "Literal[{}]", function.name(self.db)),
|
||||
Type::Union(union) => union.display(self.db).fmt(f),
|
||||
Type::Intersection(intersection) => intersection.display(self.db).fmt(f),
|
||||
Type::IntLiteral(n) => write!(f, "Literal[{n}]"),
|
||||
|
||||
@@ -29,15 +29,16 @@ use ruff_db::parsed::parsed_module;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{ExprContext, TypeParams};
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::ast_ids::{HasScopedAstId, HasScopedUseId, ScopedExpressionId};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind, DefinitionNodeKey};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::NodeWithScopeKind;
|
||||
use crate::semantic_index::symbol::{NodeWithScopeRef, ScopeId};
|
||||
use crate::semantic_index::symbol::{FileScopeId, NodeWithScopeKind, NodeWithScopeRef, ScopeId};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::types::{
|
||||
definitions_ty, global_symbol_ty_by_name, ClassType, FunctionType, Name, Type, UnionTypeBuilder,
|
||||
builtins_symbol_ty_by_name, definitions_ty, global_symbol_ty_by_name, ClassType, FunctionType,
|
||||
Name, Type, UnionTypeBuilder,
|
||||
};
|
||||
use crate::Db;
|
||||
|
||||
@@ -46,9 +47,9 @@ use crate::Db;
|
||||
/// scope.
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn infer_scope_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> TypeInference<'db> {
|
||||
let _span = tracing::trace_span!("infer_scope_types", ?scope).entered();
|
||||
|
||||
let file = scope.file(db);
|
||||
let _span = tracing::trace_span!("infer_scope_types", ?scope, ?file).entered();
|
||||
|
||||
// Using the index here is fine because the code below depends on the AST anyway.
|
||||
// The isolation of the query is by the return inferred types.
|
||||
let index = semantic_index(db, file);
|
||||
@@ -63,9 +64,10 @@ pub(crate) fn infer_definition_types<'db>(
|
||||
db: &'db dyn Db,
|
||||
definition: Definition<'db>,
|
||||
) -> TypeInference<'db> {
|
||||
let _span = tracing::trace_span!("infer_definition_types", ?definition).entered();
|
||||
let file = definition.file(db);
|
||||
let _span = tracing::trace_span!("infer_definition_types", ?definition, ?file,).entered();
|
||||
|
||||
let index = semantic_index(db, definition.file(db));
|
||||
let index = semantic_index(db, file);
|
||||
|
||||
TypeInferenceBuilder::new(db, InferenceRegion::Definition(definition), index).finish()
|
||||
}
|
||||
@@ -80,9 +82,10 @@ pub(crate) fn infer_expression_types<'db>(
|
||||
db: &'db dyn Db,
|
||||
expression: Expression<'db>,
|
||||
) -> TypeInference<'db> {
|
||||
let _span = tracing::trace_span!("infer_expression_types", ?expression).entered();
|
||||
let file = expression.file(db);
|
||||
let _span = tracing::trace_span!("infer_expression_types", ?expression, ?file).entered();
|
||||
|
||||
let index = semantic_index(db, expression.file(db));
|
||||
let index = semantic_index(db, file);
|
||||
|
||||
TypeInferenceBuilder::new(db, InferenceRegion::Expression(expression), index).finish()
|
||||
}
|
||||
@@ -311,6 +314,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
ast::Stmt::For(for_statement) => self.infer_for_statement(for_statement),
|
||||
ast::Stmt::Import(import) => self.infer_import_statement(import),
|
||||
ast::Stmt::ImportFrom(import) => self.infer_import_from_statement(import),
|
||||
ast::Stmt::Return(ret) => self.infer_return_statement(ret),
|
||||
ast::Stmt::Break(_) | ast::Stmt::Continue(_) | ast::Stmt::Pass(_) => {
|
||||
// No-op
|
||||
}
|
||||
@@ -548,6 +552,12 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.types.definitions.insert(definition, ty);
|
||||
}
|
||||
|
||||
fn infer_return_statement(&mut self, ret: &ast::StmtReturn) {
|
||||
if let Some(value) = &ret.value {
|
||||
self.infer_expression(value);
|
||||
}
|
||||
}
|
||||
|
||||
fn module_ty_from_name(&self, name: &ast::Identifier) -> Type<'db> {
|
||||
let module_name = ModuleName::new(&name.id);
|
||||
let module =
|
||||
@@ -684,7 +694,18 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let symbol = symbols.symbol_by_name(id).unwrap();
|
||||
if !symbol.is_defined() || !self.scope.is_function_like(self.db) {
|
||||
// implicit global
|
||||
Some(global_symbol_ty_by_name(self.db, self.file, id))
|
||||
let mut unbound_ty = if file_scope_id == FileScopeId::global() {
|
||||
Type::Unbound
|
||||
} else {
|
||||
global_symbol_ty_by_name(self.db, self.file, id)
|
||||
};
|
||||
// fallback to builtins
|
||||
if matches!(unbound_ty, Type::Unbound)
|
||||
&& Some(self.scope) != builtins_scope(self.db)
|
||||
{
|
||||
unbound_ty = builtins_symbol_ty_by_name(self.db, id);
|
||||
}
|
||||
Some(unbound_ty)
|
||||
} else {
|
||||
Some(Type::Unbound)
|
||||
}
|
||||
@@ -790,6 +811,7 @@ mod tests {
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::semantic_index;
|
||||
@@ -817,6 +839,23 @@ mod tests {
|
||||
db
|
||||
}
|
||||
|
||||
fn setup_db_with_custom_typeshed(typeshed: &str) -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
Program::new(
|
||||
&db,
|
||||
TargetVersion::Py38,
|
||||
SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
workspace_root: SystemPathBuf::from("/src"),
|
||||
site_packages: None,
|
||||
custom_typeshed: Some(SystemPathBuf::from(typeshed)),
|
||||
},
|
||||
);
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
fn assert_public_ty(db: &TestDb, file_name: &str, symbol_name: &str, expected: &str) {
|
||||
let file = system_path_to_file(db, file_name).expect("Expected file to exist.");
|
||||
|
||||
@@ -1368,6 +1407,153 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtin_symbol_vendored_stdlib() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("/src/a.py", "c = copyright")?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "c", "Literal[copyright]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtin_symbol_custom_stdlib() -> anyhow::Result<()> {
|
||||
let mut db = setup_db_with_custom_typeshed("/typeshed");
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "c = copyright"),
|
||||
(
|
||||
"/typeshed/stdlib/builtins.pyi",
|
||||
"def copyright() -> None: ...",
|
||||
),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "c", "Literal[copyright]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_global_later_defined() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("/src/a.py", "x = foo; foo = 1")?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Unbound");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_builtin_later_defined() -> anyhow::Result<()> {
|
||||
let mut db = setup_db_with_custom_typeshed("/typeshed");
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "x = foo"),
|
||||
("/typeshed/stdlib/builtins.pyi", "foo = bar; bar = 1"),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Unbound");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_builtins() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("/src/a.py", "import builtins; x = builtins.copyright")?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Literal[copyright]");
|
||||
// imported builtins module is the same file as the implicit builtins
|
||||
let file = system_path_to_file(&db, "/src/a.py").expect("Expected file to exist.");
|
||||
let builtins_ty = global_symbol_ty_by_name(&db, file, "builtins");
|
||||
let Type::Module(builtins_file) = builtins_ty else {
|
||||
panic!("Builtins are not a module?");
|
||||
};
|
||||
let implicit_builtins_file = builtins_scope(&db).expect("builtins to exist").file(&db);
|
||||
assert_eq!(builtins_file, implicit_builtins_file);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn while_loop() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
x = 1
|
||||
while flag:
|
||||
x = 2
|
||||
",
|
||||
)?;
|
||||
|
||||
// body of while loop may or may not run
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Literal[1, 2]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn while_else_no_break() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
x = 1
|
||||
while flag:
|
||||
x = 2
|
||||
else:
|
||||
y = x
|
||||
x = 3
|
||||
",
|
||||
)?;
|
||||
|
||||
// body of the loop can't break, so we can get else, or body+else
|
||||
// x must be 3, because else will always run
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Literal[3]");
|
||||
// y can be 1 or 2 because else always runs, and body may or may not run first
|
||||
assert_public_ty(&db, "/src/a.py", "y", "Literal[1, 2]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn while_else_may_break() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
x = 1
|
||||
y = 0
|
||||
while flag:
|
||||
x = 2
|
||||
if flag2:
|
||||
y = 4
|
||||
break
|
||||
else:
|
||||
y = x
|
||||
x = 3
|
||||
",
|
||||
)?;
|
||||
|
||||
// body may break: we can get just-body (only if we break), just-else, or body+else
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Literal[2, 3]");
|
||||
// if just-body were possible without the break, then 0 would be possible for y
|
||||
// 1 and 2 both being possible for y shows that we can hit else with or without body
|
||||
assert_public_ty(&db, "/src/a.py", "y", "Literal[1, 2, 4]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn first_public_def<'db>(db: &'db TestDb, file: File, name: &str) -> Definition<'db> {
|
||||
let scope = global_scope(db, file);
|
||||
*use_def_map(db, scope)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.5.3"
|
||||
version = "0.5.5"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -36,6 +36,8 @@ class Bar:
|
||||
return 48472783
|
||||
if arg < 10:
|
||||
return 20
|
||||
while arg < 50:
|
||||
arg += 1
|
||||
return 36673
|
||||
"#;
|
||||
|
||||
@@ -137,12 +139,12 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
|
||||
case.fs
|
||||
.write_file(
|
||||
SystemPath::new("/src/foo.py"),
|
||||
SystemPath::new("/src/bar.py"),
|
||||
format!("{BAR_CODE}\n# A comment\n"),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
case.bar.touch(&mut case.db);
|
||||
case.bar.sync(&mut case.db);
|
||||
case
|
||||
},
|
||||
|case| {
|
||||
|
||||
@@ -15,6 +15,7 @@ ruff_cache = { workspace = true, optional = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@ use dashmap::mapref::entry::Entry;
|
||||
|
||||
use crate::file_revision::FileRevision;
|
||||
use crate::files::private::FileStatus;
|
||||
use crate::system::SystemPath;
|
||||
use crate::vendored::VendoredPath;
|
||||
use crate::system::{SystemPath, SystemPathBuf};
|
||||
use crate::vendored::{VendoredPath, VendoredPathBuf};
|
||||
use crate::{Db, FxDashMap};
|
||||
pub use path::FilePath;
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
@@ -24,10 +24,7 @@ pub fn system_path_to_file(db: &dyn Db, path: impl AsRef<SystemPath>) -> Option<
|
||||
// exist anymore so that Salsa can track that the caller of this function depends on the existence of
|
||||
// that file. This function filters out files that don't exist, but Salsa will know that it must
|
||||
// re-run the calling query whenever the `file`'s status changes (because of the `.status` call here).
|
||||
match file.status(db) {
|
||||
FileStatus::Exists => Some(file),
|
||||
FileStatus::Deleted => None,
|
||||
}
|
||||
file.exists(db).then_some(file)
|
||||
}
|
||||
|
||||
/// Interns a vendored file path. Returns `Some` if the vendored file for `path` exists and `None` otherwise.
|
||||
@@ -44,11 +41,14 @@ pub struct Files {
|
||||
|
||||
#[derive(Default)]
|
||||
struct FilesInner {
|
||||
/// Lookup table that maps [`FilePath`]s to salsa interned [`File`] instances.
|
||||
/// Lookup table that maps [`SystemPathBuf`]s to salsa interned [`File`] instances.
|
||||
///
|
||||
/// The map also stores entries for files that don't exist on the file system. This is necessary
|
||||
/// so that queries that depend on the existence of a file are re-executed when the file is created.
|
||||
files_by_path: FxDashMap<FilePath, File>,
|
||||
system_by_path: FxDashMap<SystemPathBuf, File>,
|
||||
|
||||
/// Lookup table that maps vendored files to the salsa [`File`] ingredients.
|
||||
vendored_by_path: FxDashMap<VendoredPathBuf, File>,
|
||||
}
|
||||
|
||||
impl Files {
|
||||
@@ -61,11 +61,10 @@ impl Files {
|
||||
#[tracing::instrument(level = "trace", skip(self, db), ret)]
|
||||
fn system(&self, db: &dyn Db, path: &SystemPath) -> File {
|
||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||
let absolute = FilePath::System(absolute);
|
||||
|
||||
*self
|
||||
.inner
|
||||
.files_by_path
|
||||
.system_by_path
|
||||
.entry(absolute.clone())
|
||||
.or_insert_with(|| {
|
||||
let metadata = db.system().path_metadata(path);
|
||||
@@ -73,7 +72,7 @@ impl Files {
|
||||
match metadata {
|
||||
Ok(metadata) if metadata.file_type().is_file() => File::new(
|
||||
db,
|
||||
absolute,
|
||||
FilePath::System(absolute),
|
||||
metadata.permissions(),
|
||||
metadata.revision(),
|
||||
FileStatus::Exists,
|
||||
@@ -81,7 +80,7 @@ impl Files {
|
||||
),
|
||||
_ => File::new(
|
||||
db,
|
||||
absolute,
|
||||
FilePath::System(absolute),
|
||||
None,
|
||||
FileRevision::zero(),
|
||||
FileStatus::Deleted,
|
||||
@@ -92,11 +91,11 @@ impl Files {
|
||||
}
|
||||
|
||||
/// Tries to look up the file for the given system path, returns `None` if no such file exists yet
|
||||
fn try_system(&self, db: &dyn Db, path: &SystemPath) -> Option<File> {
|
||||
pub fn try_system(&self, db: &dyn Db, path: &SystemPath) -> Option<File> {
|
||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||
self.inner
|
||||
.files_by_path
|
||||
.get(&FilePath::System(absolute))
|
||||
.system_by_path
|
||||
.get(&absolute)
|
||||
.map(|entry| *entry.value())
|
||||
}
|
||||
|
||||
@@ -104,11 +103,7 @@ impl Files {
|
||||
/// exists and `None` otherwise.
|
||||
#[tracing::instrument(level = "trace", skip(self, db), ret)]
|
||||
fn vendored(&self, db: &dyn Db, path: &VendoredPath) -> Option<File> {
|
||||
let file = match self
|
||||
.inner
|
||||
.files_by_path
|
||||
.entry(FilePath::Vendored(path.to_path_buf()))
|
||||
{
|
||||
let file = match self.inner.vendored_by_path.entry(path.to_path_buf()) {
|
||||
Entry::Occupied(entry) => *entry.get(),
|
||||
Entry::Vacant(entry) => {
|
||||
let metadata = db.vendored().metadata(path).ok()?;
|
||||
@@ -131,6 +126,44 @@ impl Files {
|
||||
Some(file)
|
||||
}
|
||||
|
||||
/// Refreshes the state of all known files under `path` recursively.
|
||||
///
|
||||
/// The most common use case is to update the [`Files`] state after removing or moving a directory.
|
||||
///
|
||||
/// # Performance
|
||||
/// Refreshing the state of every file under `path` is expensive. It requires iterating over all known files
|
||||
/// and making system calls to get the latest status of each file in `path`.
|
||||
/// That's why [`File::sync_path`] and [`File::sync_path`] is preferred if it is known that the path is a file.
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn sync_recursively(db: &mut dyn Db, path: &SystemPath) {
|
||||
let path = SystemPath::absolute(path, db.system().current_directory());
|
||||
|
||||
let inner = Arc::clone(&db.files().inner);
|
||||
for entry in inner.system_by_path.iter_mut() {
|
||||
if entry.key().starts_with(&path) {
|
||||
let file = entry.value();
|
||||
file.sync(db);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Refreshes the state of all known files.
|
||||
///
|
||||
/// This is a last-resort method that should only be used when more granular updates aren't possible
|
||||
/// (for example, because the file watcher failed to observe some changes). Use responsibly!
|
||||
///
|
||||
/// # Performance
|
||||
/// Refreshing the state of every file is expensive. It requires iterating over all known files and
|
||||
/// issuing a system call to get the latest status of each file.
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn sync_all(db: &mut dyn Db) {
|
||||
let inner = Arc::clone(&db.files().inner);
|
||||
for entry in inner.system_by_path.iter_mut() {
|
||||
let file = entry.value();
|
||||
file.sync(db);
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a salsa like snapshot. The instances share
|
||||
/// the same path-to-file mapping.
|
||||
pub fn snapshot(&self) -> Self {
|
||||
@@ -144,7 +177,7 @@ impl std::fmt::Debug for Files {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut map = f.debug_map();
|
||||
|
||||
for entry in self.inner.files_by_path.iter() {
|
||||
for entry in self.inner.system_by_path.iter() {
|
||||
map.entry(entry.key(), entry.value());
|
||||
}
|
||||
map.finish()
|
||||
@@ -219,18 +252,20 @@ impl File {
|
||||
}
|
||||
|
||||
/// Refreshes the file metadata by querying the file system if needed.
|
||||
/// TODO: The API should instead take all observed changes from the file system directly
|
||||
/// and then apply the VfsFile status accordingly. But for now, this is sufficient.
|
||||
pub fn touch_path(db: &mut dyn Db, path: &SystemPath) {
|
||||
Self::touch_impl(db, path, None);
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn sync_path(db: &mut dyn Db, path: &SystemPath) {
|
||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||
Self::sync_impl(db, &absolute, None);
|
||||
}
|
||||
|
||||
pub fn touch(self, db: &mut dyn Db) {
|
||||
/// Syncs the [`File`]'s state with the state of the file on the system.
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn sync(self, db: &mut dyn Db) {
|
||||
let path = self.path(db).clone();
|
||||
|
||||
match path {
|
||||
FilePath::System(system) => {
|
||||
Self::touch_impl(db, &system, Some(self));
|
||||
Self::sync_impl(db, &system, Some(self));
|
||||
}
|
||||
FilePath::Vendored(_) => {
|
||||
// Readonly, can never be out of date.
|
||||
@@ -238,23 +273,31 @@ impl File {
|
||||
}
|
||||
}
|
||||
|
||||
/// Private method providing the implementation for [`Self::touch_path`] and [`Self::touch`].
|
||||
fn touch_impl(db: &mut dyn Db, path: &SystemPath, file: Option<File>) {
|
||||
let metadata = db.system().path_metadata(path);
|
||||
|
||||
let (status, revision) = match metadata {
|
||||
Ok(metadata) if metadata.file_type().is_file() => {
|
||||
(FileStatus::Exists, metadata.revision())
|
||||
}
|
||||
_ => (FileStatus::Deleted, FileRevision::zero()),
|
||||
};
|
||||
|
||||
/// Private method providing the implementation for [`Self::sync_path`] and [`Self::sync_path`].
|
||||
fn sync_impl(db: &mut dyn Db, path: &SystemPath, file: Option<File>) {
|
||||
let Some(file) = file.or_else(|| db.files().try_system(db, path)) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let metadata = db.system().path_metadata(path);
|
||||
|
||||
let (status, revision, permission) = match metadata {
|
||||
Ok(metadata) if metadata.file_type().is_file() => (
|
||||
FileStatus::Exists,
|
||||
metadata.revision(),
|
||||
metadata.permissions(),
|
||||
),
|
||||
_ => (FileStatus::Deleted, FileRevision::zero(), None),
|
||||
};
|
||||
|
||||
file.set_status(db).to(status);
|
||||
file.set_revision(db).to(revision);
|
||||
file.set_permissions(db).to(permission);
|
||||
}
|
||||
|
||||
/// Returns `true` if the file exists.
|
||||
pub fn exists(self, db: &dyn Db) -> bool {
|
||||
self.status(db) == FileStatus::Exists
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -19,7 +19,8 @@ pub mod system;
|
||||
pub mod testing;
|
||||
pub mod vendored;
|
||||
|
||||
pub(crate) type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
pub type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
pub type FxDashSet<K> = dashmap::DashSet<K, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(File, Program, source_text, line_index, parsed_module);
|
||||
@@ -34,6 +35,7 @@ pub trait Db: DbWithJar<Jar> {
|
||||
/// Trait for upcasting a reference to a base trait object.
|
||||
pub trait Upcast<T: ?Sized> {
|
||||
fn upcast(&self) -> &T;
|
||||
fn upcast_mut(&mut self) -> &mut T;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -65,7 +65,7 @@ impl std::fmt::Debug for TargetVersion {
|
||||
}
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
#[derive(Eq, PartialEq, Debug, Clone)]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
@@ -75,8 +75,8 @@ pub struct SearchPathSettings {
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub workspace_root: SystemPathBuf,
|
||||
|
||||
/// Optional (already validated) path to standard-library typeshed stubs.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs
|
||||
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
|
||||
|
||||
@@ -9,7 +9,9 @@ use walk_directory::WalkDirectoryBuilder;
|
||||
|
||||
use crate::file_revision::FileRevision;
|
||||
|
||||
pub use self::path::{SystemPath, SystemPathBuf};
|
||||
pub use self::path::{
|
||||
deduplicate_nested_paths, DeduplicatedNestedPathsIter, SystemPath, SystemPathBuf,
|
||||
};
|
||||
|
||||
mod memory_fs;
|
||||
#[cfg(feature = "os")]
|
||||
|
||||
@@ -563,3 +563,60 @@ impl ruff_cache::CacheKey for SystemPathBuf {
|
||||
self.as_path().cache_key(hasher);
|
||||
}
|
||||
}
|
||||
|
||||
/// Deduplicates identical paths and removes nested paths.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```rust
|
||||
/// use ruff_db::system::{SystemPath, deduplicate_nested_paths};///
|
||||
///
|
||||
/// let paths = vec![SystemPath::new("/a/b/c"), SystemPath::new("/a/b"), SystemPath::new("/a/beta"), SystemPath::new("/a/b/c")];
|
||||
/// assert_eq!(deduplicate_nested_paths(paths).collect::<Vec<_>>(), &[SystemPath::new("/a/b"), SystemPath::new("/a/beta")]);
|
||||
/// ```
|
||||
pub fn deduplicate_nested_paths<'a, I>(paths: I) -> DeduplicatedNestedPathsIter<'a>
|
||||
where
|
||||
I: IntoIterator<Item = &'a SystemPath>,
|
||||
{
|
||||
DeduplicatedNestedPathsIter::new(paths)
|
||||
}
|
||||
|
||||
pub struct DeduplicatedNestedPathsIter<'a> {
|
||||
inner: std::vec::IntoIter<&'a SystemPath>,
|
||||
next: Option<&'a SystemPath>,
|
||||
}
|
||||
|
||||
impl<'a> DeduplicatedNestedPathsIter<'a> {
|
||||
fn new<I>(paths: I) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = &'a SystemPath>,
|
||||
{
|
||||
let mut paths = paths.into_iter().collect::<Vec<_>>();
|
||||
// Sort the path to ensure that e.g. `/a/b/c`, comes right after `/a/b`.
|
||||
paths.sort_unstable();
|
||||
|
||||
let mut iter = paths.into_iter();
|
||||
|
||||
Self {
|
||||
next: iter.next(),
|
||||
inner: iter,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DeduplicatedNestedPathsIter<'a> {
|
||||
type Item = &'a SystemPath;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let current = self.next.take()?;
|
||||
|
||||
for next in self.inner.by_ref() {
|
||||
// Skip all paths that have the same prefix as the current path
|
||||
if !next.starts_with(current) {
|
||||
self.next = Some(next);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Some(current)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
use ruff_python_trivia::textwrap;
|
||||
|
||||
use crate::files::File;
|
||||
use crate::system::{DirectoryEntry, MemoryFileSystem, Metadata, Result, System, SystemPath};
|
||||
@@ -144,12 +145,18 @@ pub trait DbWithTestSystem: Db + Sized {
|
||||
.write_file(path, content);
|
||||
|
||||
if result.is_ok() {
|
||||
File::touch_path(self, path);
|
||||
File::sync_path(self, path);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Writes auto-dedented text to a file.
|
||||
fn write_dedented(&mut self, path: &str, content: &str) -> crate::system::Result<()> {
|
||||
self.write_file(path, textwrap::dedent(content))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Writes the content of the given files and notifies the Db about the change.
|
||||
///
|
||||
/// # Panics
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.5.3"
|
||||
version = "0.5.5"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
110
crates/ruff_linter/resources/test/fixtures/fastapi/FAST001.py
vendored
Normal file
110
crates/ruff_linter/resources/test/fixtures/fastapi/FAST001.py
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
from typing import List, Dict
|
||||
|
||||
from fastapi import FastAPI, APIRouter
|
||||
from pydantic import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
class Item(BaseModel):
|
||||
name: str
|
||||
|
||||
|
||||
# Errors
|
||||
|
||||
|
||||
@app.post("/items/", response_model=Item)
|
||||
async def create_item(item: Item) -> Item:
|
||||
return item
|
||||
|
||||
|
||||
@app.post("/items/", response_model=list[Item])
|
||||
async def create_item(item: Item) -> list[Item]:
|
||||
return item
|
||||
|
||||
|
||||
@app.post("/items/", response_model=List[Item])
|
||||
async def create_item(item: Item) -> List[Item]:
|
||||
return item
|
||||
|
||||
|
||||
@app.post("/items/", response_model=Dict[str, Item])
|
||||
async def create_item(item: Item) -> Dict[str, Item]:
|
||||
return item
|
||||
|
||||
|
||||
@app.post("/items/", response_model=str)
|
||||
async def create_item(item: Item) -> str:
|
||||
return item
|
||||
|
||||
|
||||
@app.get("/items/", response_model=Item)
|
||||
async def create_item(item: Item) -> Item:
|
||||
return item
|
||||
|
||||
|
||||
@app.get("/items/", response_model=Item)
|
||||
@app.post("/items/", response_model=Item)
|
||||
async def create_item(item: Item) -> Item:
|
||||
return item
|
||||
|
||||
|
||||
@router.get("/items/", response_model=Item)
|
||||
async def create_item(item: Item) -> Item:
|
||||
return item
|
||||
|
||||
|
||||
# OK
|
||||
|
||||
|
||||
async def create_item(item: Item) -> Item:
|
||||
return item
|
||||
|
||||
|
||||
@app("/items/", response_model=Item)
|
||||
async def create_item(item: Item) -> Item:
|
||||
return item
|
||||
|
||||
|
||||
@cache
|
||||
async def create_item(item: Item) -> Item:
|
||||
return item
|
||||
|
||||
|
||||
@app.post("/items/", response_model=str)
|
||||
async def create_item(item: Item) -> Item:
|
||||
return item
|
||||
|
||||
|
||||
@app.post("/items/")
|
||||
async def create_item(item: Item) -> Item:
|
||||
return item
|
||||
|
||||
|
||||
@app.post("/items/", response_model=str)
|
||||
async def create_item(item: Item):
|
||||
return item
|
||||
|
||||
|
||||
@app.post("/items/", response_model=list[str])
|
||||
async def create_item(item: Item) -> Dict[str, Item]:
|
||||
return item
|
||||
|
||||
|
||||
@app.post("/items/", response_model=list[str])
|
||||
async def create_item(item: Item) -> list[str, str]:
|
||||
return item
|
||||
|
||||
|
||||
@app.post("/items/", response_model=Dict[str, int])
|
||||
async def create_item(item: Item) -> Dict[str, str]:
|
||||
return item
|
||||
|
||||
|
||||
app = None
|
||||
|
||||
|
||||
@app.post("/items/", response_model=Item)
|
||||
async def create_item(item: Item) -> Item:
|
||||
return item
|
||||
68
crates/ruff_linter/resources/test/fixtures/fastapi/FAST002.py
vendored
Normal file
68
crates/ruff_linter/resources/test/fixtures/fastapi/FAST002.py
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
from fastapi import (
|
||||
FastAPI,
|
||||
APIRouter,
|
||||
Query,
|
||||
Path,
|
||||
Body,
|
||||
Cookie,
|
||||
Header,
|
||||
File,
|
||||
Form,
|
||||
Depends,
|
||||
Security,
|
||||
)
|
||||
from pydantic import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# Errors
|
||||
|
||||
@app.get("/items/")
|
||||
def get_items(
|
||||
current_user: User = Depends(get_current_user),
|
||||
some_security_param: str = Security(get_oauth2_user),
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
@app.post("/stuff/")
|
||||
def do_stuff(
|
||||
some_query_param: str | None = Query(default=None),
|
||||
some_path_param: str = Path(),
|
||||
some_body_param: str = Body("foo"),
|
||||
some_cookie_param: str = Cookie(),
|
||||
some_header_param: int = Header(default=5),
|
||||
some_file_param: UploadFile = File(),
|
||||
some_form_param: str = Form(),
|
||||
):
|
||||
# do stuff
|
||||
pass
|
||||
|
||||
|
||||
# Unchanged
|
||||
|
||||
|
||||
@app.post("/stuff/")
|
||||
def do_stuff(
|
||||
no_default: Body("foo"),
|
||||
no_type_annotation=str,
|
||||
no_fastapi_default: str = BaseModel(),
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
# OK
|
||||
|
||||
@app.post("/stuff/")
|
||||
def do_stuff(
|
||||
some_path_param: Annotated[str, Path()],
|
||||
some_cookie_param: Annotated[str, Cookie()],
|
||||
some_file_param: Annotated[UploadFile, File()],
|
||||
some_form_param: Annotated[str, Form()],
|
||||
some_query_param: Annotated[str | None, Query()] = None,
|
||||
some_body_param: Annotated[str, Body()] = "foo",
|
||||
some_header_param: Annotated[int, Header()] = 5,
|
||||
):
|
||||
pass
|
||||
@@ -12,3 +12,10 @@ except (*retriable_exceptions,):
|
||||
pass
|
||||
except(ValueError,):
|
||||
pass
|
||||
|
||||
list_exceptions = [FileExistsError, FileNotFoundError]
|
||||
|
||||
try:
|
||||
pass
|
||||
except (*list_exceptions,):
|
||||
pass
|
||||
@@ -5,7 +5,21 @@ def func1(str, /, type, *complex, Exception, **getattr):
|
||||
async def func2(bytes):
|
||||
pass
|
||||
|
||||
|
||||
async def func3(id, dir):
|
||||
pass
|
||||
|
||||
|
||||
map([], lambda float: ...)
|
||||
|
||||
from typing import override, overload
|
||||
|
||||
|
||||
@override
|
||||
def func4(id, dir):
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def func4(id, dir):
|
||||
pass
|
||||
|
||||
@@ -3,8 +3,18 @@ min([x.val for x in bar])
|
||||
max([x.val for x in bar])
|
||||
sum([x.val for x in bar], 0)
|
||||
|
||||
# Ok
|
||||
# OK
|
||||
sum(x.val for x in bar)
|
||||
min(x.val for x in bar)
|
||||
max(x.val for x in bar)
|
||||
sum(x.val for x in bar, 0)
|
||||
sum((x.val for x in bar), 0)
|
||||
|
||||
# Multi-line
|
||||
sum(
|
||||
[
|
||||
delta
|
||||
for delta in timedelta_list
|
||||
if delta
|
||||
],
|
||||
dt.timedelta(),
|
||||
)
|
||||
|
||||
@@ -70,3 +70,20 @@ def func():
|
||||
np.lookfor
|
||||
|
||||
np.NAN
|
||||
|
||||
try:
|
||||
from numpy.lib.npyio import DataSource
|
||||
except ImportError:
|
||||
from numpy import DataSource
|
||||
|
||||
DataSource("foo").abspath() # fine (`except ImportError` branch)
|
||||
|
||||
try:
|
||||
from numpy.rec import format_parser
|
||||
from numpy import clongdouble
|
||||
except ModuleNotFoundError:
|
||||
from numpy import format_parser
|
||||
from numpy import longcomplex as clongdouble
|
||||
|
||||
format_parser("foo") # fine (`except ModuleNotFoundError` branch)
|
||||
clongdouble(42) # fine (`except ModuleNotFoundError` branch)
|
||||
|
||||
@@ -41,7 +41,7 @@ def func():
|
||||
|
||||
np.alltrue([True, True])
|
||||
|
||||
np.anytrue([True, False])
|
||||
np.sometrue([True, False])
|
||||
|
||||
np.cumproduct([1, 2, 3])
|
||||
|
||||
@@ -56,3 +56,21 @@ def func():
|
||||
np.ComplexWarning
|
||||
|
||||
np.compare_chararrays
|
||||
|
||||
try:
|
||||
np.all([True, True])
|
||||
except TypeError:
|
||||
np.alltrue([True, True]) # Should emit a warning here (`except TypeError`, not `except AttributeError`)
|
||||
|
||||
try:
|
||||
np.anyyyy([True, True])
|
||||
except AttributeError:
|
||||
np.sometrue([True, True]) # Should emit a warning here
|
||||
# (must have an attribute access of the undeprecated name in the `try` body for it to be ignored)
|
||||
|
||||
try:
|
||||
exc = np.exceptions.ComplexWarning
|
||||
except AttributeError:
|
||||
exc = np.ComplexWarning # `except AttributeError` means that this is okay
|
||||
|
||||
raise exc
|
||||
|
||||
8
crates/ruff_linter/resources/test/fixtures/pydoclint/DOC501.py
vendored
Normal file
8
crates/ruff_linter/resources/test/fixtures/pydoclint/DOC501.py
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# https://github.com/astral-sh/ruff/issues/12428
|
||||
def parse_bool(x, default=_parse_bool_sentinel):
|
||||
"""Parse a boolean value
|
||||
bool or type(default)
|
||||
Raises
|
||||
`ValueError`
|
||||
ê>>> all(parse_bool(x) for x in [True, "yes", "Yes", "true", "True", "on", "ON", "1", 1])
|
||||
"""
|
||||
192
crates/ruff_linter/resources/test/fixtures/pydoclint/DOC501_google.py
vendored
Normal file
192
crates/ruff_linter/resources/test/fixtures/pydoclint/DOC501_google.py
vendored
Normal file
@@ -0,0 +1,192 @@
|
||||
import something
|
||||
from somewhere import AnotherError
|
||||
|
||||
|
||||
class FasterThanLightError(Exception):
|
||||
...
|
||||
|
||||
|
||||
_some_error = Exception
|
||||
|
||||
|
||||
# OK
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""Calculate speed as distance divided by time.
|
||||
|
||||
Args:
|
||||
distance: Distance traveled.
|
||||
time: Time spent traveling.
|
||||
|
||||
Returns:
|
||||
Speed as distance divided by time.
|
||||
|
||||
Raises:
|
||||
FasterThanLightError: If speed is greater than the speed of light.
|
||||
"""
|
||||
try:
|
||||
return distance / time
|
||||
except ZeroDivisionError as exc:
|
||||
raise FasterThanLightError from exc
|
||||
|
||||
|
||||
# DOC501
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""Calculate speed as distance divided by time.
|
||||
|
||||
Args:
|
||||
distance: Distance traveled.
|
||||
time: Time spent traveling.
|
||||
|
||||
Returns:
|
||||
Speed as distance divided by time.
|
||||
"""
|
||||
try:
|
||||
return distance / time
|
||||
except ZeroDivisionError as exc:
|
||||
raise FasterThanLightError from exc
|
||||
|
||||
|
||||
# DOC501
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""Calculate speed as distance divided by time.
|
||||
|
||||
Args:
|
||||
distance: Distance traveled.
|
||||
time: Time spent traveling.
|
||||
|
||||
Returns:
|
||||
Speed as distance divided by time.
|
||||
"""
|
||||
try:
|
||||
return distance / time
|
||||
except ZeroDivisionError as exc:
|
||||
raise FasterThanLightError from exc
|
||||
except:
|
||||
raise ValueError
|
||||
|
||||
|
||||
# DOC501
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""Calculate speed as distance divided by time.
|
||||
|
||||
Args:
|
||||
distance: Distance traveled.
|
||||
time: Time spent traveling.
|
||||
|
||||
Returns:
|
||||
Speed as distance divided by time.
|
||||
"""
|
||||
try:
|
||||
return distance / time
|
||||
except ZeroDivisionError as exc:
|
||||
print('oops')
|
||||
raise exc
|
||||
|
||||
|
||||
# DOC501
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""Calculate speed as distance divided by time.
|
||||
|
||||
Args:
|
||||
distance: Distance traveled.
|
||||
time: Time spent traveling.
|
||||
|
||||
Returns:
|
||||
Speed as distance divided by time.
|
||||
"""
|
||||
try:
|
||||
return distance / time
|
||||
except (ZeroDivisionError, ValueError) as exc:
|
||||
print('oops')
|
||||
raise exc
|
||||
|
||||
|
||||
# DOC501
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""Calculate speed as distance divided by time.
|
||||
|
||||
Args:
|
||||
distance: Distance traveled.
|
||||
time: Time spent traveling.
|
||||
|
||||
Returns:
|
||||
Speed as distance divided by time.
|
||||
"""
|
||||
raise AnotherError
|
||||
|
||||
|
||||
# DOC501
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""Calculate speed as distance divided by time.
|
||||
|
||||
Args:
|
||||
distance: Distance traveled.
|
||||
time: Time spent traveling.
|
||||
|
||||
Returns:
|
||||
Speed as distance divided by time.
|
||||
"""
|
||||
raise AnotherError()
|
||||
|
||||
|
||||
# DOC501
|
||||
def foo(bar: int):
|
||||
"""Foo.
|
||||
|
||||
Args:
|
||||
bar: Bar.
|
||||
"""
|
||||
raise something.SomeError
|
||||
|
||||
|
||||
# DOC501, but can't resolve the error
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""Calculate speed as distance divided by time.
|
||||
|
||||
Args:
|
||||
distance: Distance traveled.
|
||||
time: Time spent traveling.
|
||||
|
||||
Returns:
|
||||
Speed as distance divided by time.
|
||||
"""
|
||||
raise _some_error
|
||||
|
||||
|
||||
# OK
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
try:
|
||||
return distance / time
|
||||
except ZeroDivisionError as exc:
|
||||
raise FasterThanLightError from exc
|
||||
|
||||
|
||||
# OK
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
# OK
|
||||
def foo(bar: int):
|
||||
"""Foo.
|
||||
|
||||
Args:
|
||||
bar: Bar.
|
||||
|
||||
Raises:
|
||||
SomeError: Wow.
|
||||
"""
|
||||
raise something.SomeError
|
||||
|
||||
|
||||
# OK
|
||||
def foo(bar: int):
|
||||
"""Foo.
|
||||
|
||||
Args:
|
||||
bar: Bar.
|
||||
|
||||
Raises:
|
||||
something.SomeError: Wow.
|
||||
"""
|
||||
raise something.SomeError
|
||||
78
crates/ruff_linter/resources/test/fixtures/pydoclint/DOC501_numpy.py
vendored
Normal file
78
crates/ruff_linter/resources/test/fixtures/pydoclint/DOC501_numpy.py
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
class FasterThanLightError(Exception):
|
||||
...
|
||||
|
||||
|
||||
# OK
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""
|
||||
Calculate speed as distance divided by time.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
distance : float
|
||||
Distance traveled.
|
||||
time : float
|
||||
Time spent traveling.
|
||||
|
||||
Returns
|
||||
-------
|
||||
float
|
||||
Speed as distance divided by time.
|
||||
|
||||
Raises
|
||||
------
|
||||
FasterThanLightError
|
||||
If speed is greater than the speed of light.
|
||||
"""
|
||||
try:
|
||||
return distance / time
|
||||
except ZeroDivisionError as exc:
|
||||
raise FasterThanLightError from exc
|
||||
|
||||
|
||||
# DOC501
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""
|
||||
Calculate speed as distance divided by time.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
distance : float
|
||||
Distance traveled.
|
||||
time : float
|
||||
Time spent traveling.
|
||||
|
||||
Returns
|
||||
-------
|
||||
float
|
||||
Speed as distance divided by time.
|
||||
"""
|
||||
try:
|
||||
return distance / time
|
||||
except ZeroDivisionError as exc:
|
||||
raise FasterThanLightError from exc
|
||||
|
||||
|
||||
# DOC501
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""
|
||||
Calculate speed as distance divided by time.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
distance : float
|
||||
Distance traveled.
|
||||
time : float
|
||||
Time spent traveling.
|
||||
|
||||
Returns
|
||||
-------
|
||||
float
|
||||
Speed as distance divided by time.
|
||||
"""
|
||||
try:
|
||||
return distance / time
|
||||
except ZeroDivisionError as exc:
|
||||
raise FasterThanLightError from exc
|
||||
except:
|
||||
raise ValueError
|
||||
58
crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_google.py
vendored
Normal file
58
crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_google.py
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
class FasterThanLightError(Exception):
|
||||
...
|
||||
|
||||
|
||||
# DOC502
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""Calculate speed as distance divided by time.
|
||||
|
||||
Args:
|
||||
distance: Distance traveled.
|
||||
time: Time spent traveling.
|
||||
|
||||
Returns:
|
||||
Speed as distance divided by time.
|
||||
|
||||
Raises:
|
||||
FasterThanLightError: If speed is greater than the speed of light.
|
||||
"""
|
||||
return distance / time
|
||||
|
||||
|
||||
# DOC502
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""Calculate speed as distance divided by time.
|
||||
|
||||
Args:
|
||||
distance: Distance traveled.
|
||||
time: Time spent traveling.
|
||||
|
||||
Returns:
|
||||
Speed as distance divided by time.
|
||||
|
||||
Raises:
|
||||
FasterThanLightError: If speed is greater than the speed of light.
|
||||
DivisionByZero: Divide by zero.
|
||||
"""
|
||||
return distance / time
|
||||
|
||||
|
||||
# DOC502
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""Calculate speed as distance divided by time.
|
||||
|
||||
Args:
|
||||
distance: Distance traveled.
|
||||
time: Time spent traveling.
|
||||
|
||||
Returns:
|
||||
Speed as distance divided by time.
|
||||
|
||||
Raises:
|
||||
FasterThanLightError: If speed is greater than the speed of light.
|
||||
DivisionByZero: Divide by zero.
|
||||
"""
|
||||
try:
|
||||
return distance / time
|
||||
except ZeroDivisionError as exc:
|
||||
raise FasterThanLightError from exc
|
||||
84
crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_numpy.py
vendored
Normal file
84
crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_numpy.py
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
class FasterThanLightError(Exception):
|
||||
...
|
||||
|
||||
|
||||
# DOC502
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""
|
||||
Calculate speed as distance divided by time.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
distance : float
|
||||
Distance traveled.
|
||||
time : float
|
||||
Time spent traveling.
|
||||
|
||||
Returns
|
||||
-------
|
||||
float
|
||||
Speed as distance divided by time.
|
||||
|
||||
Raises
|
||||
------
|
||||
FasterThanLightError
|
||||
If speed is greater than the speed of light.
|
||||
"""
|
||||
return distance / time
|
||||
|
||||
|
||||
# DOC502
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""
|
||||
Calculate speed as distance divided by time.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
distance : float
|
||||
Distance traveled.
|
||||
time : float
|
||||
Time spent traveling.
|
||||
|
||||
Returns
|
||||
-------
|
||||
float
|
||||
Speed as distance divided by time.
|
||||
|
||||
Raises
|
||||
------
|
||||
FasterThanLightError
|
||||
If speed is greater than the speed of light.
|
||||
DivisionByZero
|
||||
If attempting to divide by zero.
|
||||
"""
|
||||
return distance / time
|
||||
|
||||
|
||||
# DOC502
|
||||
def calculate_speed(distance: float, time: float) -> float:
|
||||
"""
|
||||
Calculate speed as distance divided by time.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
distance : float
|
||||
Distance traveled.
|
||||
time : float
|
||||
Time spent traveling.
|
||||
|
||||
Returns
|
||||
-------
|
||||
float
|
||||
Speed as distance divided by time.
|
||||
|
||||
Raises
|
||||
------
|
||||
FasterThanLightError
|
||||
If speed is greater than the speed of light.
|
||||
DivisionByZero
|
||||
If attempting to divide by zero.
|
||||
"""
|
||||
try:
|
||||
return distance / time
|
||||
except ZeroDivisionError as exc:
|
||||
raise FasterThanLightError from exc
|
||||
@@ -10,7 +10,7 @@ use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::docstrings::Docstring;
|
||||
use crate::fs::relativize_path;
|
||||
use crate::rules::{flake8_annotations, flake8_pyi, pydocstyle, pylint};
|
||||
use crate::rules::{flake8_annotations, flake8_pyi, pydoclint, pydocstyle, pylint};
|
||||
use crate::{docstrings, warn_user};
|
||||
|
||||
/// Run lint rules over all [`Definition`] nodes in the [`SemanticModel`].
|
||||
@@ -83,12 +83,17 @@ pub(crate) fn definitions(checker: &mut Checker) {
|
||||
Rule::UndocumentedPublicNestedClass,
|
||||
Rule::UndocumentedPublicPackage,
|
||||
]);
|
||||
let enforce_pydoclint = checker.any_enabled(&[
|
||||
Rule::DocstringMissingException,
|
||||
Rule::DocstringExtraneousException,
|
||||
]);
|
||||
|
||||
if !enforce_annotations
|
||||
&& !enforce_docstrings
|
||||
&& !enforce_stubs
|
||||
&& !enforce_stubs_and_runtime
|
||||
&& !enforce_dunder_method
|
||||
&& !enforce_pydoclint
|
||||
{
|
||||
return;
|
||||
}
|
||||
@@ -163,8 +168,8 @@ pub(crate) fn definitions(checker: &mut Checker) {
|
||||
}
|
||||
}
|
||||
|
||||
// pydocstyle
|
||||
if enforce_docstrings {
|
||||
// pydocstyle, pydoclint
|
||||
if enforce_docstrings || enforce_pydoclint {
|
||||
if pydocstyle::helpers::should_ignore_definition(
|
||||
definition,
|
||||
&checker.settings.pydocstyle.ignore_decorators,
|
||||
@@ -282,7 +287,8 @@ pub(crate) fn definitions(checker: &mut Checker) {
|
||||
if checker.enabled(Rule::OverloadWithDocstring) {
|
||||
pydocstyle::rules::if_needed(checker, &docstring);
|
||||
}
|
||||
if checker.any_enabled(&[
|
||||
|
||||
let enforce_sections = checker.any_enabled(&[
|
||||
Rule::BlankLineAfterLastSection,
|
||||
Rule::BlankLinesBetweenHeaderAndContent,
|
||||
Rule::CapitalizeSectionName,
|
||||
@@ -298,12 +304,30 @@ pub(crate) fn definitions(checker: &mut Checker) {
|
||||
Rule::SectionUnderlineMatchesSectionLength,
|
||||
Rule::SectionUnderlineNotOverIndented,
|
||||
Rule::UndocumentedParam,
|
||||
]) {
|
||||
pydocstyle::rules::sections(
|
||||
checker,
|
||||
]);
|
||||
if enforce_sections || enforce_pydoclint {
|
||||
let section_contexts = pydocstyle::helpers::get_section_contexts(
|
||||
&docstring,
|
||||
checker.settings.pydocstyle.convention.as_ref(),
|
||||
);
|
||||
|
||||
if enforce_sections {
|
||||
pydocstyle::rules::sections(
|
||||
checker,
|
||||
&docstring,
|
||||
§ion_contexts,
|
||||
checker.settings.pydocstyle.convention.as_ref(),
|
||||
);
|
||||
}
|
||||
|
||||
if enforce_pydoclint {
|
||||
pydoclint::rules::check_docstring(
|
||||
checker,
|
||||
definition,
|
||||
§ion_contexts,
|
||||
checker.settings.pydocstyle.convention.as_ref(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,11 +8,11 @@ use ruff_text_size::Ranged;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::{
|
||||
airflow, flake8_async, flake8_bandit, flake8_boolean_trap, flake8_bugbear, flake8_builtins,
|
||||
flake8_debugger, flake8_django, flake8_errmsg, flake8_import_conventions, flake8_pie,
|
||||
flake8_pyi, flake8_pytest_style, flake8_raise, flake8_return, flake8_simplify, flake8_slots,
|
||||
flake8_tidy_imports, flake8_type_checking, mccabe, pandas_vet, pep8_naming, perflint,
|
||||
pycodestyle, pyflakes, pygrep_hooks, pylint, pyupgrade, refurb, ruff, tryceratops,
|
||||
airflow, fastapi, flake8_async, flake8_bandit, flake8_boolean_trap, flake8_bugbear,
|
||||
flake8_builtins, flake8_debugger, flake8_django, flake8_errmsg, flake8_import_conventions,
|
||||
flake8_pie, flake8_pyi, flake8_pytest_style, flake8_raise, flake8_return, flake8_simplify,
|
||||
flake8_slots, flake8_tidy_imports, flake8_type_checking, mccabe, pandas_vet, pep8_naming,
|
||||
perflint, pycodestyle, pyflakes, pygrep_hooks, pylint, pyupgrade, refurb, ruff, tryceratops,
|
||||
};
|
||||
use crate::settings::types::PythonVersion;
|
||||
|
||||
@@ -88,6 +88,12 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::DjangoNonLeadingReceiverDecorator) {
|
||||
flake8_django::rules::non_leading_receiver_decorator(checker, decorator_list);
|
||||
}
|
||||
if checker.enabled(Rule::FastApiRedundantResponseModel) {
|
||||
fastapi::rules::fastapi_redundant_response_model(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::FastApiNonAnnotatedDependency) {
|
||||
fastapi::rules::fastapi_non_annotated_dependency(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::AmbiguousFunctionName) {
|
||||
if let Some(diagnostic) = pycodestyle::rules::ambiguous_function_name(name) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
|
||||
@@ -33,9 +33,7 @@ use log::debug;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, IsolationLevel};
|
||||
use ruff_notebook::{CellOffsets, NotebookIndex};
|
||||
use ruff_python_ast::helpers::{
|
||||
collect_import_from_member, extract_handled_exceptions, is_docstring_stmt, to_module_path,
|
||||
};
|
||||
use ruff_python_ast::helpers::{collect_import_from_member, is_docstring_stmt, to_module_path};
|
||||
use ruff_python_ast::identifier::Identifier;
|
||||
use ruff_python_ast::name::QualifiedName;
|
||||
use ruff_python_ast::str::Quote;
|
||||
@@ -834,32 +832,22 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
self.semantic.pop_scope();
|
||||
self.visit_expr(name);
|
||||
}
|
||||
Stmt::Try(ast::StmtTry {
|
||||
body,
|
||||
handlers,
|
||||
orelse,
|
||||
finalbody,
|
||||
..
|
||||
}) => {
|
||||
let mut handled_exceptions = Exceptions::empty();
|
||||
for type_ in extract_handled_exceptions(handlers) {
|
||||
if let Some(builtins_name) = self.semantic.resolve_builtin_symbol(type_) {
|
||||
match builtins_name {
|
||||
"NameError" => handled_exceptions |= Exceptions::NAME_ERROR,
|
||||
"ModuleNotFoundError" => {
|
||||
handled_exceptions |= Exceptions::MODULE_NOT_FOUND_ERROR;
|
||||
}
|
||||
"ImportError" => handled_exceptions |= Exceptions::IMPORT_ERROR,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Stmt::Try(
|
||||
try_node @ ast::StmtTry {
|
||||
body,
|
||||
handlers,
|
||||
orelse,
|
||||
finalbody,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
// Iterate over the `body`, then the `handlers`, then the `orelse`, then the
|
||||
// `finalbody`, but treat the body and the `orelse` as a single branch for
|
||||
// flow analysis purposes.
|
||||
let branch = self.semantic.push_branch();
|
||||
self.semantic.handled_exceptions.push(handled_exceptions);
|
||||
self.semantic
|
||||
.handled_exceptions
|
||||
.push(Exceptions::from_try_stmt(try_node, &self.semantic));
|
||||
self.visit_body(body);
|
||||
self.semantic.handled_exceptions.pop();
|
||||
self.semantic.pop_branch();
|
||||
@@ -1837,7 +1825,7 @@ impl<'a> Checker<'a> {
|
||||
name: &'a str,
|
||||
range: TextRange,
|
||||
kind: BindingKind<'a>,
|
||||
flags: BindingFlags,
|
||||
mut flags: BindingFlags,
|
||||
) -> BindingId {
|
||||
// Determine the scope to which the binding belongs.
|
||||
// Per [PEP 572](https://peps.python.org/pep-0572/#scope-of-the-target), named
|
||||
@@ -1853,6 +1841,10 @@ impl<'a> Checker<'a> {
|
||||
self.semantic.scope_id
|
||||
};
|
||||
|
||||
if self.semantic.in_exception_handler() {
|
||||
flags |= BindingFlags::IN_EXCEPT_HANDLER;
|
||||
}
|
||||
|
||||
// Create the `Binding`.
|
||||
let binding_id = self.semantic.push_binding(range, kind, flags);
|
||||
|
||||
|
||||
@@ -912,6 +912,14 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Numpy, "003") => (RuleGroup::Stable, rules::numpy::rules::NumpyDeprecatedFunction),
|
||||
(Numpy, "201") => (RuleGroup::Stable, rules::numpy::rules::Numpy2Deprecation),
|
||||
|
||||
// fastapi
|
||||
(FastApi, "001") => (RuleGroup::Preview, rules::fastapi::rules::FastApiRedundantResponseModel),
|
||||
(FastApi, "002") => (RuleGroup::Preview, rules::fastapi::rules::FastApiNonAnnotatedDependency),
|
||||
|
||||
// pydoclint
|
||||
(Pydoclint, "501") => (RuleGroup::Preview, rules::pydoclint::rules::DocstringMissingException),
|
||||
(Pydoclint, "502") => (RuleGroup::Preview, rules::pydoclint::rules::DocstringExtraneousException),
|
||||
|
||||
// ruff
|
||||
(Ruff, "001") => (RuleGroup::Stable, rules::ruff::rules::AmbiguousUnicodeCharacterString),
|
||||
(Ruff, "002") => (RuleGroup::Stable, rules::ruff::rules::AmbiguousUnicodeCharacterDocstring),
|
||||
@@ -943,6 +951,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Ruff, "030") => (RuleGroup::Preview, rules::ruff::rules::AssertWithPrintMessage),
|
||||
(Ruff, "100") => (RuleGroup::Stable, rules::ruff::rules::UnusedNOQA),
|
||||
(Ruff, "101") => (RuleGroup::Preview, rules::ruff::rules::RedirectedNOQA),
|
||||
|
||||
(Ruff, "200") => (RuleGroup::Stable, rules::ruff::rules::InvalidPyprojectToml),
|
||||
#[cfg(any(feature = "test-rules", test))]
|
||||
(Ruff, "900") => (RuleGroup::Stable, rules::ruff::rules::StableTestRule),
|
||||
|
||||
@@ -163,6 +163,7 @@ impl SectionKind {
|
||||
pub(crate) struct SectionContexts<'a> {
|
||||
contexts: Vec<SectionContextData>,
|
||||
docstring: &'a Docstring<'a>,
|
||||
style: SectionStyle,
|
||||
}
|
||||
|
||||
impl<'a> SectionContexts<'a> {
|
||||
@@ -221,9 +222,14 @@ impl<'a> SectionContexts<'a> {
|
||||
Self {
|
||||
contexts,
|
||||
docstring,
|
||||
style,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn style(&self) -> SectionStyle {
|
||||
self.style
|
||||
}
|
||||
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.contexts.len()
|
||||
}
|
||||
@@ -396,7 +402,7 @@ impl<'a> SectionContext<'a> {
|
||||
NewlineWithTrailingNewline::with_offset(lines, self.offset() + self.data.summary_full_end)
|
||||
}
|
||||
|
||||
fn following_lines_str(&self) -> &'a str {
|
||||
pub(crate) fn following_lines_str(&self) -> &'a str {
|
||||
&self.docstring_body.as_str()[self.following_range_relative()]
|
||||
}
|
||||
|
||||
|
||||
@@ -130,13 +130,13 @@ fn apply_fixes<'a>(
|
||||
/// Compare two fixes.
|
||||
fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Ordering {
|
||||
// Always apply `RedefinedWhileUnused` before `UnusedImport`, as the latter can end up fixing
|
||||
// the former.
|
||||
{
|
||||
match (rule1, rule2) {
|
||||
(Rule::RedefinedWhileUnused, Rule::UnusedImport) => return std::cmp::Ordering::Less,
|
||||
(Rule::UnusedImport, Rule::RedefinedWhileUnused) => return std::cmp::Ordering::Greater,
|
||||
_ => std::cmp::Ordering::Equal,
|
||||
}
|
||||
// the former. But we can't apply this just for `RedefinedWhileUnused` and `UnusedImport` because it violates
|
||||
// `< is transitive: a < b and b < c implies a < c. The same must hold for both == and >.`
|
||||
// See https://github.com/astral-sh/ruff/issues/12469#issuecomment-2244392085
|
||||
match (rule1, rule2) {
|
||||
(Rule::RedefinedWhileUnused, _) => std::cmp::Ordering::Less,
|
||||
(_, Rule::RedefinedWhileUnused) => std::cmp::Ordering::Greater,
|
||||
_ => std::cmp::Ordering::Equal,
|
||||
}
|
||||
// Apply fixes in order of their start position.
|
||||
.then_with(|| fix1.min_start().cmp(&fix2.min_start()))
|
||||
|
||||
@@ -193,6 +193,9 @@ pub enum Linter {
|
||||
/// NumPy-specific rules
|
||||
#[prefix = "NPY"]
|
||||
Numpy,
|
||||
/// [FastAPI](https://pypi.org/project/fastapi/)
|
||||
#[prefix = "FAST"]
|
||||
FastApi,
|
||||
/// [Airflow](https://pypi.org/project/apache-airflow/)
|
||||
#[prefix = "AIR"]
|
||||
Airflow,
|
||||
@@ -202,6 +205,9 @@ pub enum Linter {
|
||||
/// [refurb](https://pypi.org/project/refurb/)
|
||||
#[prefix = "FURB"]
|
||||
Refurb,
|
||||
/// [pydoclint](https://pypi.org/project/pydoclint/)
|
||||
#[prefix = "DOC"]
|
||||
Pydoclint,
|
||||
/// Ruff-specific rules
|
||||
#[prefix = "RUF"]
|
||||
Ruff,
|
||||
|
||||
27
crates/ruff_linter/src/rules/fastapi/mod.rs
Normal file
27
crates/ruff_linter/src/rules/fastapi/mod.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
//! FastAPI-specific rules.
|
||||
pub(crate) mod rules;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::convert::AsRef;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use test_case::test_case;
|
||||
|
||||
use crate::registry::Rule;
|
||||
use crate::test::test_path;
|
||||
use crate::{assert_messages, settings};
|
||||
|
||||
#[test_case(Rule::FastApiRedundantResponseModel, Path::new("FAST001.py"))]
|
||||
#[test_case(Rule::FastApiNonAnnotatedDependency, Path::new("FAST002.py"))]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("fastapi").join(path).as_path(),
|
||||
&settings::LinterSettings::for_rule(rule_code),
|
||||
)?;
|
||||
assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,138 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::helpers::map_callable;
|
||||
use ruff_python_semantic::Modules;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::importer::ImportRequest;
|
||||
use crate::rules::fastapi::rules::is_fastapi_route;
|
||||
use crate::settings::types::PythonVersion;
|
||||
|
||||
/// ## What it does
|
||||
/// Identifies FastAPI routes with deprecated uses of `Depends`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The FastAPI documentation recommends the use of `Annotated` for defining
|
||||
/// route dependencies and parameters, rather than using `Depends` directly
|
||||
/// with a default value.
|
||||
///
|
||||
/// This approach is also suggested for various route parameters, including Body and Cookie, as it helps ensure consistency and clarity in defining dependencies and parameters.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
/// from fastapi import Depends, FastAPI
|
||||
///
|
||||
/// app = FastAPI()
|
||||
///
|
||||
///
|
||||
/// async def common_parameters(q: str | None = None, skip: int = 0, limit: int = 100):
|
||||
/// return {"q": q, "skip": skip, "limit": limit}
|
||||
///
|
||||
///
|
||||
/// @app.get("/items/")
|
||||
/// async def read_items(commons: dict = Depends(common_parameters)):
|
||||
/// return commons
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
///
|
||||
/// ```python
|
||||
/// from typing import Annotated
|
||||
///
|
||||
/// from fastapi import Depends, FastAPI
|
||||
///
|
||||
/// app = FastAPI()
|
||||
///
|
||||
///
|
||||
/// async def common_parameters(q: str | None = None, skip: int = 0, limit: int = 100):
|
||||
/// return {"q": q, "skip": skip, "limit": limit}
|
||||
///
|
||||
///
|
||||
/// @app.get("/items/")
|
||||
/// async def read_items(commons: Annotated[dict, Depends(common_parameters)]):
|
||||
/// return commons
|
||||
/// ```
|
||||
|
||||
#[violation]
|
||||
pub struct FastApiNonAnnotatedDependency;
|
||||
|
||||
impl AlwaysFixableViolation for FastApiNonAnnotatedDependency {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("FastAPI dependency without `Annotated`")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
"Replace with `Annotated`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// RUF103
|
||||
pub(crate) fn fastapi_non_annotated_dependency(
|
||||
checker: &mut Checker,
|
||||
function_def: &ast::StmtFunctionDef,
|
||||
) {
|
||||
if !checker.semantic().seen_module(Modules::FASTAPI) {
|
||||
return;
|
||||
}
|
||||
if !is_fastapi_route(function_def, checker.semantic()) {
|
||||
return;
|
||||
}
|
||||
for parameter in &function_def.parameters.args {
|
||||
if let (Some(annotation), Some(default)) =
|
||||
(¶meter.parameter.annotation, ¶meter.default)
|
||||
{
|
||||
if checker
|
||||
.semantic()
|
||||
.resolve_qualified_name(map_callable(default))
|
||||
.is_some_and(|qualified_name| {
|
||||
matches!(
|
||||
qualified_name.segments(),
|
||||
[
|
||||
"fastapi",
|
||||
"Query"
|
||||
| "Path"
|
||||
| "Body"
|
||||
| "Cookie"
|
||||
| "Header"
|
||||
| "File"
|
||||
| "Form"
|
||||
| "Depends"
|
||||
| "Security"
|
||||
]
|
||||
)
|
||||
})
|
||||
{
|
||||
let mut diagnostic =
|
||||
Diagnostic::new(FastApiNonAnnotatedDependency, parameter.range);
|
||||
|
||||
diagnostic.try_set_fix(|| {
|
||||
let module = if checker.settings.target_version >= PythonVersion::Py39 {
|
||||
"typing"
|
||||
} else {
|
||||
"typing_extensions"
|
||||
};
|
||||
let (import_edit, binding) = checker.importer().get_or_import_symbol(
|
||||
&ImportRequest::import_from(module, "Annotated"),
|
||||
function_def.start(),
|
||||
checker.semantic(),
|
||||
)?;
|
||||
let content = format!(
|
||||
"{}: {}[{}, {}]",
|
||||
parameter.parameter.name.id,
|
||||
binding,
|
||||
checker.locator().slice(annotation.range()),
|
||||
checker.locator().slice(default.range())
|
||||
);
|
||||
let parameter_edit = Edit::range_replacement(content, parameter.range());
|
||||
Ok(Fix::unsafe_edits(import_edit, [parameter_edit]))
|
||||
});
|
||||
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,158 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{Decorator, Expr, ExprCall, Keyword, StmtFunctionDef};
|
||||
use ruff_python_semantic::{Modules, SemanticModel};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::fix::edits::{remove_argument, Parentheses};
|
||||
use crate::rules::fastapi::rules::is_fastapi_route_decorator;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for FastAPI routes that use the optional `response_model` parameter
|
||||
/// with the same type as the return type.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// FastAPI routes automatically infer the response model type from the return
|
||||
/// type, so specifying it explicitly is redundant.
|
||||
///
|
||||
/// The `response_model` parameter is used to override the default response
|
||||
/// model type. For example, `response_model` can be used to specify that
|
||||
/// a non-serializable response type should instead be serialized via an
|
||||
/// alternative type.
|
||||
///
|
||||
/// For more information, see the [FastAPI documentation](https://fastapi.tiangolo.com/tutorial/response-model/).
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
/// from fastapi import FastAPI
|
||||
/// from pydantic import BaseModel
|
||||
///
|
||||
/// app = FastAPI()
|
||||
///
|
||||
///
|
||||
/// class Item(BaseModel):
|
||||
/// name: str
|
||||
///
|
||||
///
|
||||
/// @app.post("/items/", response_model=Item)
|
||||
/// async def create_item(item: Item) -> Item:
|
||||
/// return item
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
///
|
||||
/// ```python
|
||||
/// from fastapi import FastAPI
|
||||
/// from pydantic import BaseModel
|
||||
///
|
||||
/// app = FastAPI()
|
||||
///
|
||||
///
|
||||
/// class Item(BaseModel):
|
||||
/// name: str
|
||||
///
|
||||
///
|
||||
/// @app.post("/items/")
|
||||
/// async def create_item(item: Item) -> Item:
|
||||
/// return item
|
||||
/// ```
|
||||
|
||||
#[violation]
|
||||
pub struct FastApiRedundantResponseModel;
|
||||
|
||||
impl AlwaysFixableViolation for FastApiRedundantResponseModel {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("FastAPI route with redundant `response_model` argument")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
"Remove argument".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// RUF102
|
||||
pub(crate) fn fastapi_redundant_response_model(
|
||||
checker: &mut Checker,
|
||||
function_def: &StmtFunctionDef,
|
||||
) {
|
||||
if !checker.semantic().seen_module(Modules::FASTAPI) {
|
||||
return;
|
||||
}
|
||||
for decorator in &function_def.decorator_list {
|
||||
let Some((call, response_model_arg)) =
|
||||
check_decorator(function_def, decorator, checker.semantic())
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let mut diagnostic =
|
||||
Diagnostic::new(FastApiRedundantResponseModel, response_model_arg.range());
|
||||
diagnostic.try_set_fix(|| {
|
||||
remove_argument(
|
||||
response_model_arg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator().contents(),
|
||||
)
|
||||
.map(Fix::unsafe_edit)
|
||||
});
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
fn check_decorator<'a>(
|
||||
function_def: &StmtFunctionDef,
|
||||
decorator: &'a Decorator,
|
||||
semantic: &'a SemanticModel,
|
||||
) -> Option<(&'a ExprCall, &'a Keyword)> {
|
||||
let call = is_fastapi_route_decorator(decorator, semantic)?;
|
||||
let response_model_arg = call.arguments.find_keyword("response_model")?;
|
||||
let return_value = function_def.returns.as_ref()?;
|
||||
if is_identical_types(&response_model_arg.value, return_value, semantic) {
|
||||
Some((call, response_model_arg))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn is_identical_types(
|
||||
response_model_arg: &Expr,
|
||||
return_value: &Expr,
|
||||
semantic: &SemanticModel,
|
||||
) -> bool {
|
||||
if let (Some(response_mode_name_expr), Some(return_value_name_expr)) = (
|
||||
response_model_arg.as_name_expr(),
|
||||
return_value.as_name_expr(),
|
||||
) {
|
||||
return semantic.resolve_name(response_mode_name_expr)
|
||||
== semantic.resolve_name(return_value_name_expr);
|
||||
}
|
||||
if let (Some(response_mode_subscript), Some(return_value_subscript)) = (
|
||||
response_model_arg.as_subscript_expr(),
|
||||
return_value.as_subscript_expr(),
|
||||
) {
|
||||
return is_identical_types(
|
||||
&response_mode_subscript.value,
|
||||
&return_value_subscript.value,
|
||||
semantic,
|
||||
) && is_identical_types(
|
||||
&response_mode_subscript.slice,
|
||||
&return_value_subscript.slice,
|
||||
semantic,
|
||||
);
|
||||
}
|
||||
if let (Some(response_mode_tuple), Some(return_value_tuple)) = (
|
||||
response_model_arg.as_tuple_expr(),
|
||||
return_value.as_tuple_expr(),
|
||||
) {
|
||||
return response_mode_tuple.elts.len() == return_value_tuple.elts.len()
|
||||
&& response_mode_tuple
|
||||
.elts
|
||||
.iter()
|
||||
.zip(return_value_tuple.elts.iter())
|
||||
.all(|(x, y)| is_identical_types(x, y, semantic));
|
||||
}
|
||||
false
|
||||
}
|
||||
44
crates/ruff_linter/src/rules/fastapi/rules/mod.rs
Normal file
44
crates/ruff_linter/src/rules/fastapi/rules/mod.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
pub(crate) use fastapi_non_annotated_dependency::*;
|
||||
pub(crate) use fastapi_redundant_response_model::*;
|
||||
|
||||
mod fastapi_non_annotated_dependency;
|
||||
mod fastapi_redundant_response_model;
|
||||
|
||||
use ruff_python_ast::{Decorator, ExprCall, StmtFunctionDef};
|
||||
use ruff_python_semantic::analyze::typing::resolve_assignment;
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
|
||||
/// Returns `true` if the function is a FastAPI route.
|
||||
pub(crate) fn is_fastapi_route(function_def: &StmtFunctionDef, semantic: &SemanticModel) -> bool {
|
||||
return function_def
|
||||
.decorator_list
|
||||
.iter()
|
||||
.any(|decorator| is_fastapi_route_decorator(decorator, semantic).is_some());
|
||||
}
|
||||
|
||||
/// Returns `true` if the decorator is indicative of a FastAPI route.
|
||||
pub(crate) fn is_fastapi_route_decorator<'a>(
|
||||
decorator: &'a Decorator,
|
||||
semantic: &'a SemanticModel,
|
||||
) -> Option<&'a ExprCall> {
|
||||
let call = decorator.expression.as_call_expr()?;
|
||||
let decorator_method = call.func.as_attribute_expr()?;
|
||||
let method_name = &decorator_method.attr;
|
||||
|
||||
if !matches!(
|
||||
method_name.as_str(),
|
||||
"get" | "post" | "put" | "delete" | "patch" | "options" | "head" | "trace"
|
||||
) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let qualified_name = resolve_assignment(&decorator_method.value, semantic)?;
|
||||
if matches!(
|
||||
qualified_name.segments(),
|
||||
["fastapi", "FastAPI" | "APIRouter"]
|
||||
) {
|
||||
Some(call)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,263 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/fastapi/mod.rs
|
||||
---
|
||||
FAST002.py:24:5: FAST002 [*] FastAPI dependency without `Annotated`
|
||||
|
|
||||
22 | @app.get("/items/")
|
||||
23 | def get_items(
|
||||
24 | current_user: User = Depends(get_current_user),
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FAST002
|
||||
25 | some_security_param: str = Security(get_oauth2_user),
|
||||
26 | ):
|
||||
|
|
||||
= help: Replace with `Annotated`
|
||||
|
||||
ℹ Unsafe fix
|
||||
12 12 | Security,
|
||||
13 13 | )
|
||||
14 14 | from pydantic import BaseModel
|
||||
15 |+from typing import Annotated
|
||||
15 16 |
|
||||
16 17 | app = FastAPI()
|
||||
17 18 | router = APIRouter()
|
||||
--------------------------------------------------------------------------------
|
||||
21 22 |
|
||||
22 23 | @app.get("/items/")
|
||||
23 24 | def get_items(
|
||||
24 |- current_user: User = Depends(get_current_user),
|
||||
25 |+ current_user: Annotated[User, Depends(get_current_user)],
|
||||
25 26 | some_security_param: str = Security(get_oauth2_user),
|
||||
26 27 | ):
|
||||
27 28 | pass
|
||||
|
||||
FAST002.py:25:5: FAST002 [*] FastAPI dependency without `Annotated`
|
||||
|
|
||||
23 | def get_items(
|
||||
24 | current_user: User = Depends(get_current_user),
|
||||
25 | some_security_param: str = Security(get_oauth2_user),
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FAST002
|
||||
26 | ):
|
||||
27 | pass
|
||||
|
|
||||
= help: Replace with `Annotated`
|
||||
|
||||
ℹ Unsafe fix
|
||||
12 12 | Security,
|
||||
13 13 | )
|
||||
14 14 | from pydantic import BaseModel
|
||||
15 |+from typing import Annotated
|
||||
15 16 |
|
||||
16 17 | app = FastAPI()
|
||||
17 18 | router = APIRouter()
|
||||
--------------------------------------------------------------------------------
|
||||
22 23 | @app.get("/items/")
|
||||
23 24 | def get_items(
|
||||
24 25 | current_user: User = Depends(get_current_user),
|
||||
25 |- some_security_param: str = Security(get_oauth2_user),
|
||||
26 |+ some_security_param: Annotated[str, Security(get_oauth2_user)],
|
||||
26 27 | ):
|
||||
27 28 | pass
|
||||
28 29 |
|
||||
|
||||
FAST002.py:32:5: FAST002 [*] FastAPI dependency without `Annotated`
|
||||
|
|
||||
30 | @app.post("/stuff/")
|
||||
31 | def do_stuff(
|
||||
32 | some_query_param: str | None = Query(default=None),
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FAST002
|
||||
33 | some_path_param: str = Path(),
|
||||
34 | some_body_param: str = Body("foo"),
|
||||
|
|
||||
= help: Replace with `Annotated`
|
||||
|
||||
ℹ Unsafe fix
|
||||
12 12 | Security,
|
||||
13 13 | )
|
||||
14 14 | from pydantic import BaseModel
|
||||
15 |+from typing import Annotated
|
||||
15 16 |
|
||||
16 17 | app = FastAPI()
|
||||
17 18 | router = APIRouter()
|
||||
--------------------------------------------------------------------------------
|
||||
29 30 |
|
||||
30 31 | @app.post("/stuff/")
|
||||
31 32 | def do_stuff(
|
||||
32 |- some_query_param: str | None = Query(default=None),
|
||||
33 |+ some_query_param: Annotated[str | None, Query(default=None)],
|
||||
33 34 | some_path_param: str = Path(),
|
||||
34 35 | some_body_param: str = Body("foo"),
|
||||
35 36 | some_cookie_param: str = Cookie(),
|
||||
|
||||
FAST002.py:33:5: FAST002 [*] FastAPI dependency without `Annotated`
|
||||
|
|
||||
31 | def do_stuff(
|
||||
32 | some_query_param: str | None = Query(default=None),
|
||||
33 | some_path_param: str = Path(),
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FAST002
|
||||
34 | some_body_param: str = Body("foo"),
|
||||
35 | some_cookie_param: str = Cookie(),
|
||||
|
|
||||
= help: Replace with `Annotated`
|
||||
|
||||
ℹ Unsafe fix
|
||||
12 12 | Security,
|
||||
13 13 | )
|
||||
14 14 | from pydantic import BaseModel
|
||||
15 |+from typing import Annotated
|
||||
15 16 |
|
||||
16 17 | app = FastAPI()
|
||||
17 18 | router = APIRouter()
|
||||
--------------------------------------------------------------------------------
|
||||
30 31 | @app.post("/stuff/")
|
||||
31 32 | def do_stuff(
|
||||
32 33 | some_query_param: str | None = Query(default=None),
|
||||
33 |- some_path_param: str = Path(),
|
||||
34 |+ some_path_param: Annotated[str, Path()],
|
||||
34 35 | some_body_param: str = Body("foo"),
|
||||
35 36 | some_cookie_param: str = Cookie(),
|
||||
36 37 | some_header_param: int = Header(default=5),
|
||||
|
||||
FAST002.py:34:5: FAST002 [*] FastAPI dependency without `Annotated`
|
||||
|
|
||||
32 | some_query_param: str | None = Query(default=None),
|
||||
33 | some_path_param: str = Path(),
|
||||
34 | some_body_param: str = Body("foo"),
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FAST002
|
||||
35 | some_cookie_param: str = Cookie(),
|
||||
36 | some_header_param: int = Header(default=5),
|
||||
|
|
||||
= help: Replace with `Annotated`
|
||||
|
||||
ℹ Unsafe fix
|
||||
12 12 | Security,
|
||||
13 13 | )
|
||||
14 14 | from pydantic import BaseModel
|
||||
15 |+from typing import Annotated
|
||||
15 16 |
|
||||
16 17 | app = FastAPI()
|
||||
17 18 | router = APIRouter()
|
||||
--------------------------------------------------------------------------------
|
||||
31 32 | def do_stuff(
|
||||
32 33 | some_query_param: str | None = Query(default=None),
|
||||
33 34 | some_path_param: str = Path(),
|
||||
34 |- some_body_param: str = Body("foo"),
|
||||
35 |+ some_body_param: Annotated[str, Body("foo")],
|
||||
35 36 | some_cookie_param: str = Cookie(),
|
||||
36 37 | some_header_param: int = Header(default=5),
|
||||
37 38 | some_file_param: UploadFile = File(),
|
||||
|
||||
FAST002.py:35:5: FAST002 [*] FastAPI dependency without `Annotated`
|
||||
|
|
||||
33 | some_path_param: str = Path(),
|
||||
34 | some_body_param: str = Body("foo"),
|
||||
35 | some_cookie_param: str = Cookie(),
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FAST002
|
||||
36 | some_header_param: int = Header(default=5),
|
||||
37 | some_file_param: UploadFile = File(),
|
||||
|
|
||||
= help: Replace with `Annotated`
|
||||
|
||||
ℹ Unsafe fix
|
||||
12 12 | Security,
|
||||
13 13 | )
|
||||
14 14 | from pydantic import BaseModel
|
||||
15 |+from typing import Annotated
|
||||
15 16 |
|
||||
16 17 | app = FastAPI()
|
||||
17 18 | router = APIRouter()
|
||||
--------------------------------------------------------------------------------
|
||||
32 33 | some_query_param: str | None = Query(default=None),
|
||||
33 34 | some_path_param: str = Path(),
|
||||
34 35 | some_body_param: str = Body("foo"),
|
||||
35 |- some_cookie_param: str = Cookie(),
|
||||
36 |+ some_cookie_param: Annotated[str, Cookie()],
|
||||
36 37 | some_header_param: int = Header(default=5),
|
||||
37 38 | some_file_param: UploadFile = File(),
|
||||
38 39 | some_form_param: str = Form(),
|
||||
|
||||
FAST002.py:36:5: FAST002 [*] FastAPI dependency without `Annotated`
|
||||
|
|
||||
34 | some_body_param: str = Body("foo"),
|
||||
35 | some_cookie_param: str = Cookie(),
|
||||
36 | some_header_param: int = Header(default=5),
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FAST002
|
||||
37 | some_file_param: UploadFile = File(),
|
||||
38 | some_form_param: str = Form(),
|
||||
|
|
||||
= help: Replace with `Annotated`
|
||||
|
||||
ℹ Unsafe fix
|
||||
12 12 | Security,
|
||||
13 13 | )
|
||||
14 14 | from pydantic import BaseModel
|
||||
15 |+from typing import Annotated
|
||||
15 16 |
|
||||
16 17 | app = FastAPI()
|
||||
17 18 | router = APIRouter()
|
||||
--------------------------------------------------------------------------------
|
||||
33 34 | some_path_param: str = Path(),
|
||||
34 35 | some_body_param: str = Body("foo"),
|
||||
35 36 | some_cookie_param: str = Cookie(),
|
||||
36 |- some_header_param: int = Header(default=5),
|
||||
37 |+ some_header_param: Annotated[int, Header(default=5)],
|
||||
37 38 | some_file_param: UploadFile = File(),
|
||||
38 39 | some_form_param: str = Form(),
|
||||
39 40 | ):
|
||||
|
||||
FAST002.py:37:5: FAST002 [*] FastAPI dependency without `Annotated`
|
||||
|
|
||||
35 | some_cookie_param: str = Cookie(),
|
||||
36 | some_header_param: int = Header(default=5),
|
||||
37 | some_file_param: UploadFile = File(),
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FAST002
|
||||
38 | some_form_param: str = Form(),
|
||||
39 | ):
|
||||
|
|
||||
= help: Replace with `Annotated`
|
||||
|
||||
ℹ Unsafe fix
|
||||
12 12 | Security,
|
||||
13 13 | )
|
||||
14 14 | from pydantic import BaseModel
|
||||
15 |+from typing import Annotated
|
||||
15 16 |
|
||||
16 17 | app = FastAPI()
|
||||
17 18 | router = APIRouter()
|
||||
--------------------------------------------------------------------------------
|
||||
34 35 | some_body_param: str = Body("foo"),
|
||||
35 36 | some_cookie_param: str = Cookie(),
|
||||
36 37 | some_header_param: int = Header(default=5),
|
||||
37 |- some_file_param: UploadFile = File(),
|
||||
38 |+ some_file_param: Annotated[UploadFile, File()],
|
||||
38 39 | some_form_param: str = Form(),
|
||||
39 40 | ):
|
||||
40 41 | # do stuff
|
||||
|
||||
FAST002.py:38:5: FAST002 [*] FastAPI dependency without `Annotated`
|
||||
|
|
||||
36 | some_header_param: int = Header(default=5),
|
||||
37 | some_file_param: UploadFile = File(),
|
||||
38 | some_form_param: str = Form(),
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FAST002
|
||||
39 | ):
|
||||
40 | # do stuff
|
||||
|
|
||||
= help: Replace with `Annotated`
|
||||
|
||||
ℹ Unsafe fix
|
||||
12 12 | Security,
|
||||
13 13 | )
|
||||
14 14 | from pydantic import BaseModel
|
||||
15 |+from typing import Annotated
|
||||
15 16 |
|
||||
16 17 | app = FastAPI()
|
||||
17 18 | router = APIRouter()
|
||||
--------------------------------------------------------------------------------
|
||||
35 36 | some_cookie_param: str = Cookie(),
|
||||
36 37 | some_header_param: int = Header(default=5),
|
||||
37 38 | some_file_param: UploadFile = File(),
|
||||
38 |- some_form_param: str = Form(),
|
||||
39 |+ some_form_param: Annotated[str, Form()],
|
||||
39 40 | ):
|
||||
40 41 | # do stuff
|
||||
41 42 | pass
|
||||
@@ -0,0 +1,174 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/fastapi/mod.rs
|
||||
---
|
||||
FAST001.py:17:22: FAST001 [*] FastAPI route with redundant `response_model` argument
|
||||
|
|
||||
17 | @app.post("/items/", response_model=Item)
|
||||
| ^^^^^^^^^^^^^^^^^^^ FAST001
|
||||
18 | async def create_item(item: Item) -> Item:
|
||||
19 | return item
|
||||
|
|
||||
= help: Remove argument
|
||||
|
||||
ℹ Unsafe fix
|
||||
14 14 | # Errors
|
||||
15 15 |
|
||||
16 16 |
|
||||
17 |-@app.post("/items/", response_model=Item)
|
||||
17 |+@app.post("/items/")
|
||||
18 18 | async def create_item(item: Item) -> Item:
|
||||
19 19 | return item
|
||||
20 20 |
|
||||
|
||||
FAST001.py:22:22: FAST001 [*] FastAPI route with redundant `response_model` argument
|
||||
|
|
||||
22 | @app.post("/items/", response_model=list[Item])
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ FAST001
|
||||
23 | async def create_item(item: Item) -> list[Item]:
|
||||
24 | return item
|
||||
|
|
||||
= help: Remove argument
|
||||
|
||||
ℹ Unsafe fix
|
||||
19 19 | return item
|
||||
20 20 |
|
||||
21 21 |
|
||||
22 |-@app.post("/items/", response_model=list[Item])
|
||||
22 |+@app.post("/items/")
|
||||
23 23 | async def create_item(item: Item) -> list[Item]:
|
||||
24 24 | return item
|
||||
25 25 |
|
||||
|
||||
FAST001.py:27:22: FAST001 [*] FastAPI route with redundant `response_model` argument
|
||||
|
|
||||
27 | @app.post("/items/", response_model=List[Item])
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ FAST001
|
||||
28 | async def create_item(item: Item) -> List[Item]:
|
||||
29 | return item
|
||||
|
|
||||
= help: Remove argument
|
||||
|
||||
ℹ Unsafe fix
|
||||
24 24 | return item
|
||||
25 25 |
|
||||
26 26 |
|
||||
27 |-@app.post("/items/", response_model=List[Item])
|
||||
27 |+@app.post("/items/")
|
||||
28 28 | async def create_item(item: Item) -> List[Item]:
|
||||
29 29 | return item
|
||||
30 30 |
|
||||
|
||||
FAST001.py:32:22: FAST001 [*] FastAPI route with redundant `response_model` argument
|
||||
|
|
||||
32 | @app.post("/items/", response_model=Dict[str, Item])
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FAST001
|
||||
33 | async def create_item(item: Item) -> Dict[str, Item]:
|
||||
34 | return item
|
||||
|
|
||||
= help: Remove argument
|
||||
|
||||
ℹ Unsafe fix
|
||||
29 29 | return item
|
||||
30 30 |
|
||||
31 31 |
|
||||
32 |-@app.post("/items/", response_model=Dict[str, Item])
|
||||
32 |+@app.post("/items/")
|
||||
33 33 | async def create_item(item: Item) -> Dict[str, Item]:
|
||||
34 34 | return item
|
||||
35 35 |
|
||||
|
||||
FAST001.py:37:22: FAST001 [*] FastAPI route with redundant `response_model` argument
|
||||
|
|
||||
37 | @app.post("/items/", response_model=str)
|
||||
| ^^^^^^^^^^^^^^^^^^ FAST001
|
||||
38 | async def create_item(item: Item) -> str:
|
||||
39 | return item
|
||||
|
|
||||
= help: Remove argument
|
||||
|
||||
ℹ Unsafe fix
|
||||
34 34 | return item
|
||||
35 35 |
|
||||
36 36 |
|
||||
37 |-@app.post("/items/", response_model=str)
|
||||
37 |+@app.post("/items/")
|
||||
38 38 | async def create_item(item: Item) -> str:
|
||||
39 39 | return item
|
||||
40 40 |
|
||||
|
||||
FAST001.py:42:21: FAST001 [*] FastAPI route with redundant `response_model` argument
|
||||
|
|
||||
42 | @app.get("/items/", response_model=Item)
|
||||
| ^^^^^^^^^^^^^^^^^^^ FAST001
|
||||
43 | async def create_item(item: Item) -> Item:
|
||||
44 | return item
|
||||
|
|
||||
= help: Remove argument
|
||||
|
||||
ℹ Unsafe fix
|
||||
39 39 | return item
|
||||
40 40 |
|
||||
41 41 |
|
||||
42 |-@app.get("/items/", response_model=Item)
|
||||
42 |+@app.get("/items/")
|
||||
43 43 | async def create_item(item: Item) -> Item:
|
||||
44 44 | return item
|
||||
45 45 |
|
||||
|
||||
FAST001.py:47:21: FAST001 [*] FastAPI route with redundant `response_model` argument
|
||||
|
|
||||
47 | @app.get("/items/", response_model=Item)
|
||||
| ^^^^^^^^^^^^^^^^^^^ FAST001
|
||||
48 | @app.post("/items/", response_model=Item)
|
||||
49 | async def create_item(item: Item) -> Item:
|
||||
|
|
||||
= help: Remove argument
|
||||
|
||||
ℹ Unsafe fix
|
||||
44 44 | return item
|
||||
45 45 |
|
||||
46 46 |
|
||||
47 |-@app.get("/items/", response_model=Item)
|
||||
47 |+@app.get("/items/")
|
||||
48 48 | @app.post("/items/", response_model=Item)
|
||||
49 49 | async def create_item(item: Item) -> Item:
|
||||
50 50 | return item
|
||||
|
||||
FAST001.py:48:22: FAST001 [*] FastAPI route with redundant `response_model` argument
|
||||
|
|
||||
47 | @app.get("/items/", response_model=Item)
|
||||
48 | @app.post("/items/", response_model=Item)
|
||||
| ^^^^^^^^^^^^^^^^^^^ FAST001
|
||||
49 | async def create_item(item: Item) -> Item:
|
||||
50 | return item
|
||||
|
|
||||
= help: Remove argument
|
||||
|
||||
ℹ Unsafe fix
|
||||
45 45 |
|
||||
46 46 |
|
||||
47 47 | @app.get("/items/", response_model=Item)
|
||||
48 |-@app.post("/items/", response_model=Item)
|
||||
48 |+@app.post("/items/")
|
||||
49 49 | async def create_item(item: Item) -> Item:
|
||||
50 50 | return item
|
||||
51 51 |
|
||||
|
||||
FAST001.py:53:24: FAST001 [*] FastAPI route with redundant `response_model` argument
|
||||
|
|
||||
53 | @router.get("/items/", response_model=Item)
|
||||
| ^^^^^^^^^^^^^^^^^^^ FAST001
|
||||
54 | async def create_item(item: Item) -> Item:
|
||||
55 | return item
|
||||
|
|
||||
= help: Remove argument
|
||||
|
||||
ℹ Unsafe fix
|
||||
50 50 | return item
|
||||
51 51 |
|
||||
52 52 |
|
||||
53 |-@router.get("/items/", response_model=Item)
|
||||
53 |+@router.get("/items/")
|
||||
54 54 | async def create_item(item: Item) -> Item:
|
||||
55 55 | return item
|
||||
56 56 |
|
||||
@@ -34,9 +34,10 @@ use crate::settings::types::PreviewMode;
|
||||
/// await DONE.wait()
|
||||
/// ```
|
||||
///
|
||||
/// [`asyncio` events]: https://docs.python.org/3/library/asyncio-sync.html#asyncio.Event
|
||||
/// [`anyio` events]: https://trio.readthedocs.io/en/latest/reference-core.html#trio.Event
|
||||
/// [`trio` events]: https://anyio.readthedocs.io/en/latest/api.html#anyio.Event
|
||||
/// ## References
|
||||
/// - [`asyncio` events](https://docs.python.org/3/library/asyncio-sync.html#asyncio.Event)
|
||||
/// - [`anyio` events](https://trio.readthedocs.io/en/latest/reference-core.html#trio.Event)
|
||||
/// - [`trio` events](https://anyio.readthedocs.io/en/latest/api.html#anyio.Event)
|
||||
#[violation]
|
||||
pub struct AsyncBusyWait {
|
||||
module: AsyncModule,
|
||||
|
||||
@@ -37,9 +37,10 @@ use crate::settings::types::PreviewMode;
|
||||
/// await long_running_task()
|
||||
/// ```
|
||||
///
|
||||
/// [`asyncio` timeouts]: https://docs.python.org/3/library/asyncio-task.html#timeouts
|
||||
/// [`anyio` timeouts]: https://anyio.readthedocs.io/en/stable/cancellation.html
|
||||
/// [`trio` timeouts]: https://trio.readthedocs.io/en/stable/reference-core.html#cancellation-and-timeouts
|
||||
/// ## References
|
||||
/// - [`asyncio` timeouts](https://docs.python.org/3/library/asyncio-task.html#timeouts)
|
||||
/// - [`anyio` timeouts](https://anyio.readthedocs.io/en/stable/cancellation.html)
|
||||
/// - [`trio` timeouts](https://trio.readthedocs.io/en/stable/reference-core.html#cancellation-and-timeouts)
|
||||
#[violation]
|
||||
pub struct AsyncFunctionWithTimeout {
|
||||
module: AsyncModule,
|
||||
|
||||
@@ -32,9 +32,10 @@ use crate::settings::types::PreviewMode;
|
||||
/// await awaitable()
|
||||
/// ```
|
||||
///
|
||||
/// [`asyncio` timeouts]: https://docs.python.org/3/library/asyncio-task.html#timeouts
|
||||
/// [`anyio` timeouts]: https://anyio.readthedocs.io/en/stable/cancellation.html
|
||||
/// [`trio` timeouts]: https://trio.readthedocs.io/en/stable/reference-core.html#cancellation-and-timeouts
|
||||
/// ## References
|
||||
/// - [`asyncio` timeouts](https://docs.python.org/3/library/asyncio-task.html#timeouts)
|
||||
/// - [`anyio` timeouts](https://anyio.readthedocs.io/en/stable/cancellation.html)
|
||||
/// - [`trio` timeouts](https://trio.readthedocs.io/en/stable/reference-core.html#cancellation-and-timeouts)
|
||||
#[violation]
|
||||
pub struct CancelScopeNoCheckpoint {
|
||||
method_name: MethodName,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::map_starred;
|
||||
use ruff_python_ast::{self as ast, ExceptHandler, Expr};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
@@ -11,6 +10,9 @@ use crate::fix::edits::pad;
|
||||
/// Checks for single-element tuples in exception handlers (e.g.,
|
||||
/// `except (ValueError,):`).
|
||||
///
|
||||
/// Note: Single-element tuples consisting of a starred expression
|
||||
/// are allowed.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// A tuple with a single element can be more concisely and idiomatically
|
||||
/// expressed as a single value.
|
||||
@@ -69,7 +71,17 @@ pub(crate) fn redundant_tuple_in_exception_handler(
|
||||
let [elt] = elts.as_slice() else {
|
||||
continue;
|
||||
};
|
||||
let elt = map_starred(elt);
|
||||
// It is not safe to replace a single-element
|
||||
// tuple consisting of a starred expression
|
||||
// by the unstarred expression because the unstarred
|
||||
// expression can be any iterable whereas `except` must
|
||||
// be followed by a literal or a tuple. For example:
|
||||
// ```python
|
||||
// except (*[ValueError,FileNotFoundError],)
|
||||
// ```
|
||||
if elt.is_starred_expr() {
|
||||
continue;
|
||||
}
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
RedundantTupleInExceptionHandler {
|
||||
name: checker.generator().expr(elt),
|
||||
|
||||
@@ -22,27 +22,6 @@ B013.py:5:8: B013 [*] A length-one tuple literal is redundant in exception handl
|
||||
7 7 | except AttributeError:
|
||||
8 8 | pass
|
||||
|
||||
B013.py:11:8: B013 [*] A length-one tuple literal is redundant in exception handlers
|
||||
|
|
||||
9 | except (ImportError, TypeError):
|
||||
10 | pass
|
||||
11 | except (*retriable_exceptions,):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ B013
|
||||
12 | pass
|
||||
13 | except(ValueError,):
|
||||
|
|
||||
= help: Replace with `except retriable_exceptions`
|
||||
|
||||
ℹ Safe fix
|
||||
8 8 | pass
|
||||
9 9 | except (ImportError, TypeError):
|
||||
10 10 | pass
|
||||
11 |-except (*retriable_exceptions,):
|
||||
11 |+except retriable_exceptions:
|
||||
12 12 | pass
|
||||
13 13 | except(ValueError,):
|
||||
14 14 | pass
|
||||
|
||||
B013.py:13:7: B013 [*] A length-one tuple literal is redundant in exception handlers
|
||||
|
|
||||
11 | except (*retriable_exceptions,):
|
||||
@@ -60,5 +39,5 @@ B013.py:13:7: B013 [*] A length-one tuple literal is redundant in exception hand
|
||||
13 |-except(ValueError,):
|
||||
13 |+except ValueError:
|
||||
14 14 | pass
|
||||
|
||||
|
||||
15 15 |
|
||||
16 16 | list_exceptions = [FileExistsError, FileNotFoundError]
|
||||
|
||||
@@ -3,6 +3,7 @@ use ruff_python_ast::Parameter;
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_diagnostics::Violation;
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_semantic::analyze::visibility::{is_overload, is_override};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -69,6 +70,19 @@ pub(crate) fn builtin_argument_shadowing(checker: &mut Checker, parameter: &Para
|
||||
&checker.settings.flake8_builtins.builtins_ignorelist,
|
||||
checker.source_type,
|
||||
) {
|
||||
// Ignore `@override` and `@overload` decorated functions.
|
||||
if checker
|
||||
.semantic()
|
||||
.current_statement()
|
||||
.as_function_def_stmt()
|
||||
.is_some_and(|function_def| {
|
||||
is_override(&function_def.decorator_list, checker.semantic())
|
||||
|| is_overload(&function_def.decorator_list, checker.semantic())
|
||||
})
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
BuiltinArgumentShadowing {
|
||||
name: parameter.name.to_string(),
|
||||
|
||||
@@ -43,30 +43,24 @@ A002.py:5:17: A002 Argument `bytes` is shadowing a Python builtin
|
||||
6 | pass
|
||||
|
|
||||
|
||||
A002.py:8:17: A002 Argument `id` is shadowing a Python builtin
|
||||
|
|
||||
6 | pass
|
||||
7 |
|
||||
8 | async def func3(id, dir):
|
||||
| ^^ A002
|
||||
9 | pass
|
||||
|
|
||||
|
||||
A002.py:8:21: A002 Argument `dir` is shadowing a Python builtin
|
||||
|
|
||||
6 | pass
|
||||
7 |
|
||||
8 | async def func3(id, dir):
|
||||
| ^^^ A002
|
||||
9 | pass
|
||||
|
|
||||
|
||||
A002.py:11:16: A002 Argument `float` is shadowing a Python builtin
|
||||
A002.py:9:17: A002 Argument `id` is shadowing a Python builtin
|
||||
|
|
||||
9 | pass
|
||||
10 |
|
||||
11 | map([], lambda float: ...)
|
||||
9 | async def func3(id, dir):
|
||||
| ^^ A002
|
||||
10 | pass
|
||||
|
|
||||
|
||||
A002.py:9:21: A002 Argument `dir` is shadowing a Python builtin
|
||||
|
|
||||
9 | async def func3(id, dir):
|
||||
| ^^^ A002
|
||||
10 | pass
|
||||
|
|
||||
|
||||
A002.py:13:16: A002 Argument `float` is shadowing a Python builtin
|
||||
|
|
||||
13 | map([], lambda float: ...)
|
||||
| ^^^^^ A002
|
||||
14 |
|
||||
15 | from typing import override, overload
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -43,12 +43,10 @@ A002.py:5:17: A002 Argument `bytes` is shadowing a Python builtin
|
||||
6 | pass
|
||||
|
|
||||
|
||||
A002.py:11:16: A002 Argument `float` is shadowing a Python builtin
|
||||
A002.py:13:16: A002 Argument `float` is shadowing a Python builtin
|
||||
|
|
||||
9 | pass
|
||||
10 |
|
||||
11 | map([], lambda float: ...)
|
||||
13 | map([], lambda float: ...)
|
||||
| ^^^^^ A002
|
||||
14 |
|
||||
15 | from typing import override, overload
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use ruff_python_ast::{self as ast, Expr, Keyword};
|
||||
|
||||
use ruff_diagnostics::Violation;
|
||||
use ruff_diagnostics::{Diagnostic, FixAvailability};
|
||||
use ruff_diagnostics::{Edit, Fix, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::any_over_expr;
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
@@ -112,9 +112,30 @@ pub(crate) fn unnecessary_comprehension_in_call(
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryComprehensionInCall, arg.range());
|
||||
diagnostic.try_set_fix(|| {
|
||||
fixes::fix_unnecessary_comprehension_in_call(expr, checker.locator(), checker.stylist())
|
||||
});
|
||||
|
||||
if args.len() == 1 {
|
||||
// If there's only one argument, remove the list or set brackets.
|
||||
diagnostic.try_set_fix(|| {
|
||||
fixes::fix_unnecessary_comprehension_in_call(expr, checker.locator(), checker.stylist())
|
||||
});
|
||||
} else {
|
||||
// If there are multiple arguments, replace the list or set brackets with parentheses.
|
||||
// If a function call has multiple arguments, one of which is a generator, then the
|
||||
// generator must be parenthesized.
|
||||
|
||||
// Replace `[` with `(`.
|
||||
let collection_start = Edit::replacement(
|
||||
"(".to_string(),
|
||||
arg.start(),
|
||||
arg.start() + TextSize::from(1),
|
||||
);
|
||||
|
||||
// Replace `]` with `)`.
|
||||
let collection_end =
|
||||
Edit::replacement(")".to_string(), arg.end() - TextSize::from(1), arg.end());
|
||||
|
||||
diagnostic.set_fix(Fix::unsafe_edits(collection_start, [collection_end]));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@ C419_1.py:3:5: C419 [*] Unnecessary list comprehension
|
||||
3 |+max(x.val for x in bar)
|
||||
4 4 | sum([x.val for x in bar], 0)
|
||||
5 5 |
|
||||
6 6 | # Ok
|
||||
6 6 | # OK
|
||||
|
||||
C419_1.py:4:5: C419 [*] Unnecessary list comprehension
|
||||
|
|
||||
@@ -61,7 +61,7 @@ C419_1.py:4:5: C419 [*] Unnecessary list comprehension
|
||||
4 | sum([x.val for x in bar], 0)
|
||||
| ^^^^^^^^^^^^^^^^^^^^ C419
|
||||
5 |
|
||||
6 | # Ok
|
||||
6 | # OK
|
||||
|
|
||||
= help: Remove unnecessary list comprehension
|
||||
|
||||
@@ -70,7 +70,37 @@ C419_1.py:4:5: C419 [*] Unnecessary list comprehension
|
||||
2 2 | min([x.val for x in bar])
|
||||
3 3 | max([x.val for x in bar])
|
||||
4 |-sum([x.val for x in bar], 0)
|
||||
4 |+sum(x.val for x in bar, 0)
|
||||
4 |+sum((x.val for x in bar), 0)
|
||||
5 5 |
|
||||
6 6 | # Ok
|
||||
6 6 | # OK
|
||||
7 7 | sum(x.val for x in bar)
|
||||
|
||||
C419_1.py:14:5: C419 [*] Unnecessary list comprehension
|
||||
|
|
||||
12 | # Multi-line
|
||||
13 | sum(
|
||||
14 | [
|
||||
| _____^
|
||||
15 | | delta
|
||||
16 | | for delta in timedelta_list
|
||||
17 | | if delta
|
||||
18 | | ],
|
||||
| |_____^ C419
|
||||
19 | dt.timedelta(),
|
||||
20 | )
|
||||
|
|
||||
= help: Remove unnecessary list comprehension
|
||||
|
||||
ℹ Unsafe fix
|
||||
11 11 |
|
||||
12 12 | # Multi-line
|
||||
13 13 | sum(
|
||||
14 |- [
|
||||
14 |+ (
|
||||
15 15 | delta
|
||||
16 16 | for delta in timedelta_list
|
||||
17 17 | if delta
|
||||
18 |- ],
|
||||
18 |+ ),
|
||||
19 19 | dt.timedelta(),
|
||||
20 20 | )
|
||||
|
||||
@@ -97,6 +97,7 @@ impl Violation for RawStringInException {
|
||||
///
|
||||
/// Which will produce a traceback like:
|
||||
/// ```console
|
||||
/// Traceback (most recent call last):
|
||||
/// File "tmp.py", line 3, in <module>
|
||||
/// raise RuntimeError(msg)
|
||||
/// RuntimeError: 'Some value' is incorrect
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#![allow(clippy::useless_format)]
|
||||
pub mod airflow;
|
||||
pub mod eradicate;
|
||||
pub mod fastapi;
|
||||
pub mod flake8_2020;
|
||||
pub mod flake8_annotations;
|
||||
pub mod flake8_async;
|
||||
@@ -48,6 +49,7 @@ pub mod pandas_vet;
|
||||
pub mod pep8_naming;
|
||||
pub mod perflint;
|
||||
pub mod pycodestyle;
|
||||
pub mod pydoclint;
|
||||
pub mod pydocstyle;
|
||||
pub mod pyflakes;
|
||||
pub mod pygrep_hooks;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -570,6 +570,8 @@ NPY201.py:72:5: NPY201 [*] `np.NAN` will be removed in NumPy 2.0. Use `numpy.nan
|
||||
71 |
|
||||
72 | np.NAN
|
||||
| ^^^^^^ NPY201
|
||||
73 |
|
||||
74 | try:
|
||||
|
|
||||
= help: Replace with `numpy.nan`
|
||||
|
||||
@@ -579,3 +581,6 @@ NPY201.py:72:5: NPY201 [*] `np.NAN` will be removed in NumPy 2.0. Use `numpy.nan
|
||||
71 71 |
|
||||
72 |- np.NAN
|
||||
72 |+ np.nan
|
||||
73 73 |
|
||||
74 74 | try:
|
||||
75 75 | from numpy.lib.npyio import DataSource
|
||||
|
||||
@@ -306,30 +306,51 @@ NPY201_2.py:40:5: NPY201 [*] `np.row_stack` will be removed in NumPy 2.0. Use `n
|
||||
42 42 | np.alltrue([True, True])
|
||||
43 43 |
|
||||
|
||||
NPY201_2.py:42:5: NPY201 [*] `np.alltrue` will be removed in NumPy 2.0. Use `all` instead.
|
||||
NPY201_2.py:42:5: NPY201 [*] `np.alltrue` will be removed in NumPy 2.0. Use `numpy.all` instead.
|
||||
|
|
||||
40 | np.row_stack(([1,2], [3,4]))
|
||||
41 |
|
||||
42 | np.alltrue([True, True])
|
||||
| ^^^^^^^^^^ NPY201
|
||||
43 |
|
||||
44 | np.anytrue([True, False])
|
||||
44 | np.sometrue([True, False])
|
||||
|
|
||||
= help: Replace with `all`
|
||||
= help: Replace with `numpy.all`
|
||||
|
||||
ℹ Safe fix
|
||||
39 39 |
|
||||
40 40 | np.row_stack(([1,2], [3,4]))
|
||||
41 41 |
|
||||
42 |- np.alltrue([True, True])
|
||||
42 |+ all([True, True])
|
||||
42 |+ np.all([True, True])
|
||||
43 43 |
|
||||
44 44 | np.anytrue([True, False])
|
||||
44 44 | np.sometrue([True, False])
|
||||
45 45 |
|
||||
|
||||
NPY201_2.py:44:5: NPY201 [*] `np.sometrue` will be removed in NumPy 2.0. Use `numpy.any` instead.
|
||||
|
|
||||
42 | np.alltrue([True, True])
|
||||
43 |
|
||||
44 | np.sometrue([True, False])
|
||||
| ^^^^^^^^^^^ NPY201
|
||||
45 |
|
||||
46 | np.cumproduct([1, 2, 3])
|
||||
|
|
||||
= help: Replace with `numpy.any`
|
||||
|
||||
ℹ Safe fix
|
||||
41 41 |
|
||||
42 42 | np.alltrue([True, True])
|
||||
43 43 |
|
||||
44 |- np.sometrue([True, False])
|
||||
44 |+ np.any([True, False])
|
||||
45 45 |
|
||||
46 46 | np.cumproduct([1, 2, 3])
|
||||
47 47 |
|
||||
|
||||
NPY201_2.py:46:5: NPY201 [*] `np.cumproduct` will be removed in NumPy 2.0. Use `numpy.cumprod` instead.
|
||||
|
|
||||
44 | np.anytrue([True, False])
|
||||
44 | np.sometrue([True, False])
|
||||
45 |
|
||||
46 | np.cumproduct([1, 2, 3])
|
||||
| ^^^^^^^^^^^^^ NPY201
|
||||
@@ -340,7 +361,7 @@ NPY201_2.py:46:5: NPY201 [*] `np.cumproduct` will be removed in NumPy 2.0. Use `
|
||||
|
||||
ℹ Safe fix
|
||||
43 43 |
|
||||
44 44 | np.anytrue([True, False])
|
||||
44 44 | np.sometrue([True, False])
|
||||
45 45 |
|
||||
46 |- np.cumproduct([1, 2, 3])
|
||||
46 |+ np.cumprod([1, 2, 3])
|
||||
@@ -461,6 +482,7 @@ NPY201_2.py:56:5: NPY201 [*] `np.ComplexWarning` will be removed in NumPy 2.0. U
|
||||
57 |+ ComplexWarning
|
||||
57 58 |
|
||||
58 59 | np.compare_chararrays
|
||||
59 60 |
|
||||
|
||||
NPY201_2.py:58:5: NPY201 [*] `np.compare_chararrays` will be removed in NumPy 2.0. Use `numpy.char.compare_chararrays` instead.
|
||||
|
|
||||
@@ -468,6 +490,8 @@ NPY201_2.py:58:5: NPY201 [*] `np.compare_chararrays` will be removed in NumPy 2.
|
||||
57 |
|
||||
58 | np.compare_chararrays
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ NPY201
|
||||
59 |
|
||||
60 | try:
|
||||
|
|
||||
= help: Replace with `numpy.char.compare_chararrays`
|
||||
|
||||
@@ -482,3 +506,47 @@ NPY201_2.py:58:5: NPY201 [*] `np.compare_chararrays` will be removed in NumPy 2.
|
||||
57 58 |
|
||||
58 |- np.compare_chararrays
|
||||
59 |+ compare_chararrays
|
||||
59 60 |
|
||||
60 61 | try:
|
||||
61 62 | np.all([True, True])
|
||||
|
||||
NPY201_2.py:63:9: NPY201 [*] `np.alltrue` will be removed in NumPy 2.0. Use `numpy.all` instead.
|
||||
|
|
||||
61 | np.all([True, True])
|
||||
62 | except TypeError:
|
||||
63 | np.alltrue([True, True]) # Should emit a warning here (`except TypeError`, not `except AttributeError`)
|
||||
| ^^^^^^^^^^ NPY201
|
||||
64 |
|
||||
65 | try:
|
||||
|
|
||||
= help: Replace with `numpy.all`
|
||||
|
||||
ℹ Safe fix
|
||||
60 60 | try:
|
||||
61 61 | np.all([True, True])
|
||||
62 62 | except TypeError:
|
||||
63 |- np.alltrue([True, True]) # Should emit a warning here (`except TypeError`, not `except AttributeError`)
|
||||
63 |+ np.all([True, True]) # Should emit a warning here (`except TypeError`, not `except AttributeError`)
|
||||
64 64 |
|
||||
65 65 | try:
|
||||
66 66 | np.anyyyy([True, True])
|
||||
|
||||
NPY201_2.py:68:9: NPY201 [*] `np.sometrue` will be removed in NumPy 2.0. Use `numpy.any` instead.
|
||||
|
|
||||
66 | np.anyyyy([True, True])
|
||||
67 | except AttributeError:
|
||||
68 | np.sometrue([True, True]) # Should emit a warning here
|
||||
| ^^^^^^^^^^^ NPY201
|
||||
69 | # (must have an attribute access of the undeprecated name in the `try` body for it to be ignored)
|
||||
|
|
||||
= help: Replace with `numpy.any`
|
||||
|
||||
ℹ Safe fix
|
||||
65 65 | try:
|
||||
66 66 | np.anyyyy([True, True])
|
||||
67 67 | except AttributeError:
|
||||
68 |- np.sometrue([True, True]) # Should emit a warning here
|
||||
68 |+ np.any([True, True]) # Should emit a warning here
|
||||
69 69 | # (must have an attribute access of the undeprecated name in the `try` body for it to be ignored)
|
||||
70 70 |
|
||||
71 71 | try:
|
||||
|
||||
@@ -208,9 +208,7 @@ pub(crate) fn invalid_first_argument_name(
|
||||
function_type::FunctionType::Method => FunctionType::Method,
|
||||
function_type::FunctionType::ClassMethod => FunctionType::ClassMethod,
|
||||
};
|
||||
if !checker.enabled(function_type.rule())
|
||||
|| checker.settings.pep8_naming.ignore_names.matches(name)
|
||||
{
|
||||
if !checker.enabled(function_type.rule()) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -225,7 +223,13 @@ pub(crate) fn invalid_first_argument_name(
|
||||
return;
|
||||
};
|
||||
|
||||
if &self_or_cls.name == function_type.valid_first_argument_name() {
|
||||
if &self_or_cls.name == function_type.valid_first_argument_name()
|
||||
|| checker
|
||||
.settings
|
||||
.pep8_naming
|
||||
.ignore_names
|
||||
.matches(&self_or_cls.name)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -20,6 +20,25 @@ N804.py:5:27: N804 [*] First argument of a class method should be named `cls`
|
||||
7 7 |
|
||||
8 8 | @classmethod
|
||||
|
||||
N804.py:9:20: N804 [*] First argument of a class method should be named `cls`
|
||||
|
|
||||
8 | @classmethod
|
||||
9 | def badAllowed(self, x, /, other):
|
||||
| ^^^^ N804
|
||||
10 | ...
|
||||
|
|
||||
= help: Rename `self` to `cls`
|
||||
|
||||
ℹ Unsafe fix
|
||||
6 6 | ...
|
||||
7 7 |
|
||||
8 8 | @classmethod
|
||||
9 |- def badAllowed(self, x, /, other):
|
||||
9 |+ def badAllowed(cls, x, /, other):
|
||||
10 10 | ...
|
||||
11 11 |
|
||||
12 12 | @classmethod
|
||||
|
||||
N804.py:13:18: N804 [*] First argument of a class method should be named `cls`
|
||||
|
|
||||
12 | @classmethod
|
||||
@@ -39,6 +58,25 @@ N804.py:13:18: N804 [*] First argument of a class method should be named `cls`
|
||||
15 15 |
|
||||
16 16 |
|
||||
|
||||
N804.py:18:20: N804 [*] First argument of a class method should be named `cls`
|
||||
|
|
||||
17 | class MetaClass(ABCMeta):
|
||||
18 | def badAllowed(self):
|
||||
| ^^^^ N804
|
||||
19 | pass
|
||||
|
|
||||
= help: Rename `self` to `cls`
|
||||
|
||||
ℹ Unsafe fix
|
||||
15 15 |
|
||||
16 16 |
|
||||
17 17 | class MetaClass(ABCMeta):
|
||||
18 |- def badAllowed(self):
|
||||
18 |+ def badAllowed(cls):
|
||||
19 19 | pass
|
||||
20 20 |
|
||||
21 21 | def stillBad(self):
|
||||
|
||||
N804.py:21:18: N804 [*] First argument of a class method should be named `cls`
|
||||
|
|
||||
19 | pass
|
||||
|
||||
@@ -1,6 +1,25 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pep8_naming/mod.rs
|
||||
---
|
||||
N805.py:7:20: N805 [*] First argument of a method should be named `self`
|
||||
|
|
||||
6 | class Class:
|
||||
7 | def badAllowed(this):
|
||||
| ^^^^ N805
|
||||
8 | pass
|
||||
|
|
||||
= help: Rename `this` to `self`
|
||||
|
||||
ℹ Unsafe fix
|
||||
4 4 |
|
||||
5 5 |
|
||||
6 6 | class Class:
|
||||
7 |- def badAllowed(this):
|
||||
7 |+ def badAllowed(self):
|
||||
8 8 | pass
|
||||
9 9 |
|
||||
10 10 | def stillBad(this):
|
||||
|
||||
N805.py:10:18: N805 [*] First argument of a method should be named `self`
|
||||
|
|
||||
8 | pass
|
||||
@@ -21,6 +40,26 @@ N805.py:10:18: N805 [*] First argument of a method should be named `self`
|
||||
12 12 |
|
||||
13 13 | if False:
|
||||
|
||||
N805.py:15:24: N805 [*] First argument of a method should be named `self`
|
||||
|
|
||||
13 | if False:
|
||||
14 |
|
||||
15 | def badAllowed(this):
|
||||
| ^^^^ N805
|
||||
16 | pass
|
||||
|
|
||||
= help: Rename `this` to `self`
|
||||
|
||||
ℹ Unsafe fix
|
||||
12 12 |
|
||||
13 13 | if False:
|
||||
14 14 |
|
||||
15 |- def badAllowed(this):
|
||||
15 |+ def badAllowed(self):
|
||||
16 16 | pass
|
||||
17 17 |
|
||||
18 18 | def stillBad(this):
|
||||
|
||||
N805.py:18:22: N805 [*] First argument of a method should be named `self`
|
||||
|
|
||||
16 | pass
|
||||
@@ -41,6 +80,25 @@ N805.py:18:22: N805 [*] First argument of a method should be named `self`
|
||||
20 20 |
|
||||
21 21 | @pydantic.validator
|
||||
|
||||
N805.py:22:20: N805 [*] First argument of a method should be named `self`
|
||||
|
|
||||
21 | @pydantic.validator
|
||||
22 | def badAllowed(cls, my_field: str) -> str:
|
||||
| ^^^ N805
|
||||
23 | pass
|
||||
|
|
||||
= help: Rename `cls` to `self`
|
||||
|
||||
ℹ Unsafe fix
|
||||
19 19 | pass
|
||||
20 20 |
|
||||
21 21 | @pydantic.validator
|
||||
22 |- def badAllowed(cls, my_field: str) -> str:
|
||||
22 |+ def badAllowed(self, my_field: str) -> str:
|
||||
23 23 | pass
|
||||
24 24 |
|
||||
25 25 | @pydantic.validator
|
||||
|
||||
N805.py:26:18: N805 [*] First argument of a method should be named `self`
|
||||
|
|
||||
25 | @pydantic.validator
|
||||
@@ -60,6 +118,25 @@ N805.py:26:18: N805 [*] First argument of a method should be named `self`
|
||||
28 28 |
|
||||
29 29 | @pydantic.validator("my_field")
|
||||
|
||||
N805.py:30:20: N805 [*] First argument of a method should be named `self`
|
||||
|
|
||||
29 | @pydantic.validator("my_field")
|
||||
30 | def badAllowed(cls, my_field: str) -> str:
|
||||
| ^^^ N805
|
||||
31 | pass
|
||||
|
|
||||
= help: Rename `cls` to `self`
|
||||
|
||||
ℹ Unsafe fix
|
||||
27 27 | pass
|
||||
28 28 |
|
||||
29 29 | @pydantic.validator("my_field")
|
||||
30 |- def badAllowed(cls, my_field: str) -> str:
|
||||
30 |+ def badAllowed(self, my_field: str) -> str:
|
||||
31 31 | pass
|
||||
32 32 |
|
||||
33 33 | @pydantic.validator("my_field")
|
||||
|
||||
N805.py:34:18: N805 [*] First argument of a method should be named `self`
|
||||
|
|
||||
33 | @pydantic.validator("my_field")
|
||||
@@ -79,6 +156,15 @@ N805.py:34:18: N805 [*] First argument of a method should be named `self`
|
||||
36 36 |
|
||||
37 37 | @classmethod
|
||||
|
||||
N805.py:55:20: N805 First argument of a method should be named `self`
|
||||
|
|
||||
54 | class PosOnlyClass:
|
||||
55 | def badAllowed(this, blah, /, self, something: str):
|
||||
| ^^^^ N805
|
||||
56 | pass
|
||||
|
|
||||
= help: Rename `this` to `self`
|
||||
|
||||
N805.py:58:18: N805 First argument of a method should be named `self`
|
||||
|
|
||||
56 | pass
|
||||
|
||||
66
crates/ruff_linter/src/rules/pydoclint/mod.rs
Normal file
66
crates/ruff_linter/src/rules/pydoclint/mod.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
//! Rules from [pydoclint](https://pypi.org/project/pydoclint/).
|
||||
pub(crate) mod rules;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::BTreeSet;
|
||||
use std::convert::AsRef;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use test_case::test_case;
|
||||
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::pydocstyle::settings::{Convention, Settings};
|
||||
use crate::test::test_path;
|
||||
use crate::{assert_messages, settings};
|
||||
|
||||
#[test_case(Rule::DocstringMissingException, Path::new("DOC501.py"))]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("pydoclint").join(path).as_path(),
|
||||
&settings::LinterSettings::for_rule(rule_code),
|
||||
)?;
|
||||
assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::DocstringMissingException, Path::new("DOC501_google.py"))]
|
||||
#[test_case(Rule::DocstringExtraneousException, Path::new("DOC502_google.py"))]
|
||||
fn rules_google_style(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("pydoclint").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
pydocstyle: Settings {
|
||||
convention: Some(Convention::Google),
|
||||
ignore_decorators: BTreeSet::new(),
|
||||
property_decorators: BTreeSet::new(),
|
||||
},
|
||||
..settings::LinterSettings::for_rule(rule_code)
|
||||
},
|
||||
)?;
|
||||
assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::DocstringMissingException, Path::new("DOC501_numpy.py"))]
|
||||
#[test_case(Rule::DocstringExtraneousException, Path::new("DOC502_numpy.py"))]
|
||||
fn rules_numpy_style(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("pydoclint").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
pydocstyle: Settings {
|
||||
convention: Some(Convention::Numpy),
|
||||
ignore_decorators: BTreeSet::new(),
|
||||
property_decorators: BTreeSet::new(),
|
||||
},
|
||||
..settings::LinterSettings::for_rule(rule_code)
|
||||
},
|
||||
)?;
|
||||
assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
383
crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs
Normal file
383
crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs
Normal file
@@ -0,0 +1,383 @@
|
||||
use itertools::Itertools;
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_diagnostics::Violation;
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::name::QualifiedName;
|
||||
use ruff_python_ast::visitor::{self, Visitor};
|
||||
use ruff_python_ast::{self as ast, Expr, Stmt};
|
||||
use ruff_python_semantic::{Definition, MemberKind, SemanticModel};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::docstrings::sections::{SectionContexts, SectionKind};
|
||||
use crate::docstrings::styles::SectionStyle;
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::pydocstyle::settings::Convention;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for function docstrings that do not include documentation for all
|
||||
/// explicitly-raised exceptions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// If a function raises an exception without documenting it in its docstring,
|
||||
/// it can be misleading to users and/or a sign of incomplete documentation or
|
||||
/// refactors.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// def calculate_speed(distance: float, time: float) -> float:
|
||||
/// """Calculate speed as distance divided by time.
|
||||
///
|
||||
/// Args:
|
||||
/// distance: Distance traveled.
|
||||
/// time: Time spent traveling.
|
||||
///
|
||||
/// Returns:
|
||||
/// Speed as distance divided by time.
|
||||
/// """
|
||||
/// try:
|
||||
/// return distance / time
|
||||
/// except ZeroDivisionError as exc:
|
||||
/// raise FasterThanLightError from exc
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// def calculate_speed(distance: float, time: float) -> float:
|
||||
/// """Calculate speed as distance divided by time.
|
||||
///
|
||||
/// Args:
|
||||
/// distance: Distance traveled.
|
||||
/// time: Time spent traveling.
|
||||
///
|
||||
/// Returns:
|
||||
/// Speed as distance divided by time.
|
||||
///
|
||||
/// Raises:
|
||||
/// FasterThanLightError: If speed is greater than the speed of light.
|
||||
/// """
|
||||
/// try:
|
||||
/// return distance / time
|
||||
/// except ZeroDivisionError as exc:
|
||||
/// raise FasterThanLightError from exc
|
||||
/// ```
|
||||
#[violation]
|
||||
pub struct DocstringMissingException {
|
||||
id: String,
|
||||
}
|
||||
|
||||
impl Violation for DocstringMissingException {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let DocstringMissingException { id } = self;
|
||||
format!("Raised exception `{id}` missing from docstring")
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for function docstrings that include exceptions which are not
|
||||
/// explicitly raised.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Some conventions prefer non-explicit exceptions be omitted from the
|
||||
/// docstring.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// def calculate_speed(distance: float, time: float) -> float:
|
||||
/// """Calculate speed as distance divided by time.
|
||||
///
|
||||
/// Args:
|
||||
/// distance: Distance traveled.
|
||||
/// time: Time spent traveling.
|
||||
///
|
||||
/// Returns:
|
||||
/// Speed as distance divided by time.
|
||||
///
|
||||
/// Raises:
|
||||
/// ZeroDivisionError: Divided by zero.
|
||||
/// """
|
||||
/// return distance / time
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// def calculate_speed(distance: float, time: float) -> float:
|
||||
/// """Calculate speed as distance divided by time.
|
||||
///
|
||||
/// Args:
|
||||
/// distance: Distance traveled.
|
||||
/// time: Time spent traveling.
|
||||
///
|
||||
/// Returns:
|
||||
/// Speed as distance divided by time.
|
||||
/// """
|
||||
/// return distance / time
|
||||
/// ```
|
||||
#[violation]
|
||||
pub struct DocstringExtraneousException {
|
||||
ids: Vec<String>,
|
||||
}
|
||||
|
||||
impl Violation for DocstringExtraneousException {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let DocstringExtraneousException { ids } = self;
|
||||
|
||||
if let [id] = ids.as_slice() {
|
||||
format!("Raised exception is not explicitly raised: `{id}`")
|
||||
} else {
|
||||
format!(
|
||||
"Raised exceptions are not explicitly raised: {}",
|
||||
ids.iter().map(|id| format!("`{id}`")).join(", ")
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct DocstringEntries<'a> {
|
||||
raised_exceptions: Vec<QualifiedName<'a>>,
|
||||
raised_exceptions_range: TextRange,
|
||||
}
|
||||
|
||||
impl<'a> DocstringEntries<'a> {
|
||||
/// Return the raised exceptions for the docstring, or `None` if the docstring does not contain
|
||||
/// a `Raises` section.
|
||||
fn from_sections(sections: &'a SectionContexts, style: SectionStyle) -> Option<Self> {
|
||||
for section in sections.iter() {
|
||||
if section.kind() == SectionKind::Raises {
|
||||
return Some(Self {
|
||||
raised_exceptions: parse_entries(section.following_lines_str(), style),
|
||||
raised_exceptions_range: section.range(),
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for DocstringEntries<'_> {
|
||||
fn range(&self) -> TextRange {
|
||||
self.raised_exceptions_range
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the entries in a `Raises` section of a docstring.
|
||||
fn parse_entries(content: &str, style: SectionStyle) -> Vec<QualifiedName> {
|
||||
match style {
|
||||
SectionStyle::Google => parse_entries_google(content),
|
||||
SectionStyle::Numpy => parse_entries_numpy(content),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses Google-style docstring sections of the form:
|
||||
///
|
||||
/// ```python
|
||||
/// Raises:
|
||||
/// FasterThanLightError: If speed is greater than the speed of light.
|
||||
/// DivisionByZero: If attempting to divide by zero.
|
||||
/// ```
|
||||
fn parse_entries_google(content: &str) -> Vec<QualifiedName> {
|
||||
let mut entries: Vec<QualifiedName> = Vec::new();
|
||||
for potential in content.lines() {
|
||||
let Some(colon_idx) = potential.find(':') else {
|
||||
continue;
|
||||
};
|
||||
let entry = potential[..colon_idx].trim();
|
||||
entries.push(QualifiedName::user_defined(entry));
|
||||
}
|
||||
entries
|
||||
}
|
||||
|
||||
/// Parses NumPy-style docstring sections of the form:
|
||||
///
|
||||
/// ```python
|
||||
/// Raises
|
||||
/// ------
|
||||
/// FasterThanLightError
|
||||
/// If speed is greater than the speed of light.
|
||||
/// DivisionByZero
|
||||
/// If attempting to divide by zero.
|
||||
/// ```
|
||||
fn parse_entries_numpy(content: &str) -> Vec<QualifiedName> {
|
||||
let mut entries: Vec<QualifiedName> = Vec::new();
|
||||
let mut lines = content.lines();
|
||||
let Some(dashes) = lines.next() else {
|
||||
return entries;
|
||||
};
|
||||
let indentation = &dashes[..dashes.len() - dashes.trim_start().len()];
|
||||
for potential in lines {
|
||||
if let Some(entry) = potential.strip_prefix(indentation) {
|
||||
if let Some(first_char) = entry.chars().next() {
|
||||
if !first_char.is_whitespace() {
|
||||
entries.push(QualifiedName::user_defined(entry.trim_end()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
entries
|
||||
}
|
||||
|
||||
/// An individual exception raised in a function body.
|
||||
#[derive(Debug)]
|
||||
struct Entry<'a> {
|
||||
qualified_name: QualifiedName<'a>,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl Ranged for Entry<'_> {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
/// The exceptions raised in a function body.
|
||||
#[derive(Debug)]
|
||||
struct BodyEntries<'a> {
|
||||
raised_exceptions: Vec<Entry<'a>>,
|
||||
}
|
||||
|
||||
/// An AST visitor to extract the raised exceptions from a function body.
|
||||
struct BodyVisitor<'a> {
|
||||
raised_exceptions: Vec<Entry<'a>>,
|
||||
semantic: &'a SemanticModel<'a>,
|
||||
}
|
||||
|
||||
impl<'a> BodyVisitor<'a> {
|
||||
fn new(semantic: &'a SemanticModel) -> Self {
|
||||
Self {
|
||||
raised_exceptions: Vec::new(),
|
||||
semantic,
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> BodyEntries<'a> {
|
||||
BodyEntries {
|
||||
raised_exceptions: self.raised_exceptions,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for BodyVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
if let Stmt::Raise(ast::StmtRaise { exc: Some(exc), .. }) = stmt {
|
||||
if let Some(qualified_name) = extract_raised_exception(self.semantic, exc.as_ref()) {
|
||||
self.raised_exceptions.push(Entry {
|
||||
qualified_name,
|
||||
range: exc.as_ref().range(),
|
||||
});
|
||||
}
|
||||
}
|
||||
visitor::walk_stmt(self, stmt);
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_raised_exception<'a>(
|
||||
semantic: &SemanticModel<'a>,
|
||||
exc: &'a Expr,
|
||||
) -> Option<QualifiedName<'a>> {
|
||||
if let Some(qualified_name) = semantic.resolve_qualified_name(exc) {
|
||||
return Some(qualified_name);
|
||||
}
|
||||
if let Expr::Call(ast::ExprCall { func, .. }) = exc {
|
||||
return extract_raised_exception(semantic, func.as_ref());
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// DOC501, DOC502
|
||||
pub(crate) fn check_docstring(
|
||||
checker: &mut Checker,
|
||||
definition: &Definition,
|
||||
section_contexts: &SectionContexts,
|
||||
convention: Option<&Convention>,
|
||||
) {
|
||||
let mut diagnostics = Vec::new();
|
||||
let Definition::Member(member) = definition else {
|
||||
return;
|
||||
};
|
||||
|
||||
// Only check function docstrings.
|
||||
if matches!(
|
||||
member.kind,
|
||||
MemberKind::Class(_) | MemberKind::NestedClass(_)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Prioritize the specified convention over the determined style.
|
||||
let docstring_entries = match convention {
|
||||
Some(Convention::Google) => {
|
||||
DocstringEntries::from_sections(section_contexts, SectionStyle::Google)
|
||||
}
|
||||
Some(Convention::Numpy) => {
|
||||
DocstringEntries::from_sections(section_contexts, SectionStyle::Numpy)
|
||||
}
|
||||
_ => DocstringEntries::from_sections(section_contexts, section_contexts.style()),
|
||||
};
|
||||
|
||||
let body_entries = {
|
||||
let mut visitor = BodyVisitor::new(checker.semantic());
|
||||
visitor::walk_body(&mut visitor, member.body());
|
||||
visitor.finish()
|
||||
};
|
||||
|
||||
// DOC501
|
||||
if checker.enabled(Rule::DocstringMissingException) {
|
||||
for body_raise in &body_entries.raised_exceptions {
|
||||
let Some(name) = body_raise.qualified_name.segments().last() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if *name == "NotImplementedError" {
|
||||
continue;
|
||||
}
|
||||
|
||||
if !docstring_entries.as_ref().is_some_and(|entries| {
|
||||
entries.raised_exceptions.iter().any(|exception| {
|
||||
body_raise
|
||||
.qualified_name
|
||||
.segments()
|
||||
.ends_with(exception.segments())
|
||||
})
|
||||
}) {
|
||||
let diagnostic = Diagnostic::new(
|
||||
DocstringMissingException {
|
||||
id: (*name).to_string(),
|
||||
},
|
||||
body_raise.range(),
|
||||
);
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// DOC502
|
||||
if checker.enabled(Rule::DocstringExtraneousException) {
|
||||
if let Some(docstring_entries) = docstring_entries {
|
||||
let mut extraneous_exceptions = Vec::new();
|
||||
for docstring_raise in &docstring_entries.raised_exceptions {
|
||||
if !body_entries.raised_exceptions.iter().any(|exception| {
|
||||
exception
|
||||
.qualified_name
|
||||
.segments()
|
||||
.ends_with(docstring_raise.segments())
|
||||
}) {
|
||||
extraneous_exceptions.push(docstring_raise.to_string());
|
||||
}
|
||||
}
|
||||
if !extraneous_exceptions.is_empty() {
|
||||
let diagnostic = Diagnostic::new(
|
||||
DocstringExtraneousException {
|
||||
ids: extraneous_exceptions,
|
||||
},
|
||||
docstring_entries.range(),
|
||||
);
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checker.diagnostics.extend(diagnostics);
|
||||
}
|
||||
3
crates/ruff_linter/src/rules/pydoclint/rules/mod.rs
Normal file
3
crates/ruff_linter/src/rules/pydoclint/rules/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub(crate) use check_docstring::*;
|
||||
|
||||
mod check_docstring;
|
||||
@@ -0,0 +1,38 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pydoclint/mod.rs
|
||||
---
|
||||
DOC502_google.py:16:1: DOC502 Raised exception is not explicitly raised: `FasterThanLightError`
|
||||
|
|
||||
14 | Speed as distance divided by time.
|
||||
15 |
|
||||
16 | / Raises:
|
||||
17 | | FasterThanLightError: If speed is greater than the speed of light.
|
||||
18 | | """
|
||||
| |____^ DOC502
|
||||
19 | return distance / time
|
||||
|
|
||||
|
||||
DOC502_google.py:33:1: DOC502 Raised exceptions are not explicitly raised: `FasterThanLightError`, `DivisionByZero`
|
||||
|
|
||||
31 | Speed as distance divided by time.
|
||||
32 |
|
||||
33 | / Raises:
|
||||
34 | | FasterThanLightError: If speed is greater than the speed of light.
|
||||
35 | | DivisionByZero: Divide by zero.
|
||||
36 | | """
|
||||
| |____^ DOC502
|
||||
37 | return distance / time
|
||||
|
|
||||
|
||||
DOC502_google.py:51:1: DOC502 Raised exception is not explicitly raised: `DivisionByZero`
|
||||
|
|
||||
49 | Speed as distance divided by time.
|
||||
50 |
|
||||
51 | / Raises:
|
||||
52 | | FasterThanLightError: If speed is greater than the speed of light.
|
||||
53 | | DivisionByZero: Divide by zero.
|
||||
54 | | """
|
||||
| |____^ DOC502
|
||||
55 | try:
|
||||
56 | return distance / time
|
||||
|
|
||||
@@ -0,0 +1,46 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pydoclint/mod.rs
|
||||
---
|
||||
DOC502_numpy.py:22:1: DOC502 Raised exception is not explicitly raised: `FasterThanLightError`
|
||||
|
|
||||
20 | Speed as distance divided by time.
|
||||
21 |
|
||||
22 | / Raises
|
||||
23 | | ------
|
||||
24 | | FasterThanLightError
|
||||
25 | | If speed is greater than the speed of light.
|
||||
26 | | """
|
||||
| |____^ DOC502
|
||||
27 | return distance / time
|
||||
|
|
||||
|
||||
DOC502_numpy.py:47:1: DOC502 Raised exceptions are not explicitly raised: `FasterThanLightError`, `DivisionByZero`
|
||||
|
|
||||
45 | Speed as distance divided by time.
|
||||
46 |
|
||||
47 | / Raises
|
||||
48 | | ------
|
||||
49 | | FasterThanLightError
|
||||
50 | | If speed is greater than the speed of light.
|
||||
51 | | DivisionByZero
|
||||
52 | | If attempting to divide by zero.
|
||||
53 | | """
|
||||
| |____^ DOC502
|
||||
54 | return distance / time
|
||||
|
|
||||
|
||||
DOC502_numpy.py:74:1: DOC502 Raised exception is not explicitly raised: `DivisionByZero`
|
||||
|
|
||||
72 | Speed as distance divided by time.
|
||||
73 |
|
||||
74 | / Raises
|
||||
75 | | ------
|
||||
76 | | FasterThanLightError
|
||||
77 | | If speed is greater than the speed of light.
|
||||
78 | | DivisionByZero
|
||||
79 | | If attempting to divide by zero.
|
||||
80 | | """
|
||||
| |____^ DOC502
|
||||
81 | try:
|
||||
82 | return distance / time
|
||||
|
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pydoclint/mod.rs
|
||||
---
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pydoclint/mod.rs
|
||||
---
|
||||
DOC501_google.py:46:15: DOC501 Raised exception `FasterThanLightError` missing from docstring
|
||||
|
|
||||
44 | return distance / time
|
||||
45 | except ZeroDivisionError as exc:
|
||||
46 | raise FasterThanLightError from exc
|
||||
| ^^^^^^^^^^^^^^^^^^^^ DOC501
|
||||
|
|
||||
|
||||
DOC501_google.py:63:15: DOC501 Raised exception `FasterThanLightError` missing from docstring
|
||||
|
|
||||
61 | return distance / time
|
||||
62 | except ZeroDivisionError as exc:
|
||||
63 | raise FasterThanLightError from exc
|
||||
| ^^^^^^^^^^^^^^^^^^^^ DOC501
|
||||
64 | except:
|
||||
65 | raise ValueError
|
||||
|
|
||||
|
||||
DOC501_google.py:65:15: DOC501 Raised exception `ValueError` missing from docstring
|
||||
|
|
||||
63 | raise FasterThanLightError from exc
|
||||
64 | except:
|
||||
65 | raise ValueError
|
||||
| ^^^^^^^^^^ DOC501
|
||||
|
|
||||
|
||||
DOC501_google.py:115:11: DOC501 Raised exception `AnotherError` missing from docstring
|
||||
|
|
||||
113 | Speed as distance divided by time.
|
||||
114 | """
|
||||
115 | raise AnotherError
|
||||
| ^^^^^^^^^^^^ DOC501
|
||||
|
|
||||
|
||||
DOC501_google.py:129:11: DOC501 Raised exception `AnotherError` missing from docstring
|
||||
|
|
||||
127 | Speed as distance divided by time.
|
||||
128 | """
|
||||
129 | raise AnotherError()
|
||||
| ^^^^^^^^^^^^^^ DOC501
|
||||
|
|
||||
|
||||
DOC501_google.py:139:11: DOC501 Raised exception `SomeError` missing from docstring
|
||||
|
|
||||
137 | bar: Bar.
|
||||
138 | """
|
||||
139 | raise something.SomeError
|
||||
| ^^^^^^^^^^^^^^^^^^^ DOC501
|
||||
|
|
||||
@@ -0,0 +1,28 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pydoclint/mod.rs
|
||||
---
|
||||
DOC501_numpy.py:53:15: DOC501 Raised exception `FasterThanLightError` missing from docstring
|
||||
|
|
||||
51 | return distance / time
|
||||
52 | except ZeroDivisionError as exc:
|
||||
53 | raise FasterThanLightError from exc
|
||||
| ^^^^^^^^^^^^^^^^^^^^ DOC501
|
||||
|
|
||||
|
||||
DOC501_numpy.py:76:15: DOC501 Raised exception `FasterThanLightError` missing from docstring
|
||||
|
|
||||
74 | return distance / time
|
||||
75 | except ZeroDivisionError as exc:
|
||||
76 | raise FasterThanLightError from exc
|
||||
| ^^^^^^^^^^^^^^^^^^^^ DOC501
|
||||
77 | except:
|
||||
78 | raise ValueError
|
||||
|
|
||||
|
||||
DOC501_numpy.py:78:15: DOC501 Raised exception `ValueError` missing from docstring
|
||||
|
|
||||
76 | raise FasterThanLightError from exc
|
||||
77 | except:
|
||||
78 | raise ValueError
|
||||
| ^^^^^^^^^^ DOC501
|
||||
|
|
||||
@@ -5,6 +5,11 @@ use ruff_python_ast::name::QualifiedName;
|
||||
use ruff_python_semantic::{Definition, SemanticModel};
|
||||
use ruff_source_file::UniversalNewlines;
|
||||
|
||||
use crate::docstrings::sections::{SectionContexts, SectionKind};
|
||||
use crate::docstrings::styles::SectionStyle;
|
||||
use crate::docstrings::Docstring;
|
||||
use crate::rules::pydocstyle::settings::Convention;
|
||||
|
||||
/// Return the index of the first logical line in a string.
|
||||
pub(super) fn logical_line(content: &str) -> Option<usize> {
|
||||
// Find the first logical line.
|
||||
@@ -61,3 +66,59 @@ pub(crate) fn should_ignore_definition(
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn get_section_contexts<'a>(
|
||||
docstring: &'a Docstring<'a>,
|
||||
convention: Option<&'a Convention>,
|
||||
) -> SectionContexts<'a> {
|
||||
match convention {
|
||||
Some(Convention::Google) => {
|
||||
return SectionContexts::from_docstring(docstring, SectionStyle::Google);
|
||||
}
|
||||
Some(Convention::Numpy) => {
|
||||
return SectionContexts::from_docstring(docstring, SectionStyle::Numpy);
|
||||
}
|
||||
Some(Convention::Pep257) | None => {
|
||||
// There are some overlapping section names, between the Google and NumPy conventions
|
||||
// (e.g., "Returns", "Raises"). Break ties by checking for the presence of some of the
|
||||
// section names that are unique to each convention.
|
||||
|
||||
// If the docstring contains `Parameters:` or `Other Parameters:`, use the NumPy
|
||||
// convention.
|
||||
let numpy_sections = SectionContexts::from_docstring(docstring, SectionStyle::Numpy);
|
||||
if numpy_sections.iter().any(|context| {
|
||||
matches!(
|
||||
context.kind(),
|
||||
SectionKind::Parameters
|
||||
| SectionKind::OtherParams
|
||||
| SectionKind::OtherParameters
|
||||
)
|
||||
}) {
|
||||
return numpy_sections;
|
||||
}
|
||||
|
||||
// If the docstring contains any argument specifier, use the Google convention.
|
||||
let google_sections = SectionContexts::from_docstring(docstring, SectionStyle::Google);
|
||||
if google_sections.iter().any(|context| {
|
||||
matches!(
|
||||
context.kind(),
|
||||
SectionKind::Args
|
||||
| SectionKind::Arguments
|
||||
| SectionKind::KeywordArgs
|
||||
| SectionKind::KeywordArguments
|
||||
| SectionKind::OtherArgs
|
||||
| SectionKind::OtherArguments
|
||||
)
|
||||
}) {
|
||||
return google_sections;
|
||||
}
|
||||
|
||||
// Otherwise, use whichever convention matched more sections.
|
||||
if google_sections.len() > numpy_sections.len() {
|
||||
google_sections
|
||||
} else {
|
||||
numpy_sections
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1324,67 +1324,16 @@ impl AlwaysFixableViolation for BlankLinesBetweenHeaderAndContent {
|
||||
pub(crate) fn sections(
|
||||
checker: &mut Checker,
|
||||
docstring: &Docstring,
|
||||
section_contexts: &SectionContexts,
|
||||
convention: Option<&Convention>,
|
||||
) {
|
||||
match convention {
|
||||
Some(Convention::Google) => {
|
||||
parse_google_sections(
|
||||
checker,
|
||||
docstring,
|
||||
&SectionContexts::from_docstring(docstring, SectionStyle::Google),
|
||||
);
|
||||
}
|
||||
Some(Convention::Numpy) => {
|
||||
parse_numpy_sections(
|
||||
checker,
|
||||
docstring,
|
||||
&SectionContexts::from_docstring(docstring, SectionStyle::Numpy),
|
||||
);
|
||||
}
|
||||
Some(Convention::Pep257) | None => {
|
||||
// There are some overlapping section names, between the Google and NumPy conventions
|
||||
// (e.g., "Returns", "Raises"). Break ties by checking for the presence of some of the
|
||||
// section names that are unique to each convention.
|
||||
|
||||
// If the docstring contains `Parameters:` or `Other Parameters:`, use the NumPy
|
||||
// convention.
|
||||
let numpy_sections = SectionContexts::from_docstring(docstring, SectionStyle::Numpy);
|
||||
if numpy_sections.iter().any(|context| {
|
||||
matches!(
|
||||
context.kind(),
|
||||
SectionKind::Parameters
|
||||
| SectionKind::OtherParams
|
||||
| SectionKind::OtherParameters
|
||||
)
|
||||
}) {
|
||||
parse_numpy_sections(checker, docstring, &numpy_sections);
|
||||
return;
|
||||
}
|
||||
|
||||
// If the docstring contains any argument specifier, use the Google convention.
|
||||
let google_sections = SectionContexts::from_docstring(docstring, SectionStyle::Google);
|
||||
if google_sections.iter().any(|context| {
|
||||
matches!(
|
||||
context.kind(),
|
||||
SectionKind::Args
|
||||
| SectionKind::Arguments
|
||||
| SectionKind::KeywordArgs
|
||||
| SectionKind::KeywordArguments
|
||||
| SectionKind::OtherArgs
|
||||
| SectionKind::OtherArguments
|
||||
)
|
||||
}) {
|
||||
parse_google_sections(checker, docstring, &google_sections);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, use whichever convention matched more sections.
|
||||
if google_sections.len() > numpy_sections.len() {
|
||||
parse_google_sections(checker, docstring, &google_sections);
|
||||
} else {
|
||||
parse_numpy_sections(checker, docstring, &numpy_sections);
|
||||
}
|
||||
}
|
||||
Some(Convention::Google) => parse_google_sections(checker, docstring, section_contexts),
|
||||
Some(Convention::Numpy) => parse_numpy_sections(checker, docstring, section_contexts),
|
||||
Some(Convention::Pep257) | None => match section_contexts.style() {
|
||||
SectionStyle::Google => parse_google_sections(checker, docstring, section_contexts),
|
||||
SectionStyle::Numpy => parse_numpy_sections(checker, docstring, section_contexts),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -224,3 +224,25 @@ class QuerySet(AltersData):
|
||||
|
||||
as_manager.queryset_only = True
|
||||
as_manager = classmethod(as_manager)
|
||||
|
||||
|
||||
# Decorators
|
||||
@decorator
|
||||
# comment
|
||||
class Foo1: ...
|
||||
|
||||
@decorator
|
||||
# comment
|
||||
class Foo2(Foo1): ...
|
||||
|
||||
@decorator
|
||||
# comment
|
||||
class Foo3[T]: ...
|
||||
|
||||
@decorator # comment
|
||||
class Foo4: ...
|
||||
|
||||
@decorator
|
||||
# comment
|
||||
@decorato2
|
||||
class Foo5: ...
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user