Compare commits
30 Commits
zb/msrv
...
micha/node
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0a8f644109 | ||
|
|
4bc27133a9 | ||
|
|
7154b64248 | ||
|
|
6d01c487a5 | ||
|
|
6660b11422 | ||
|
|
b5c5f710fc | ||
|
|
ee88abf77c | ||
|
|
78bd73f25a | ||
|
|
110765154f | ||
|
|
30ee44770d | ||
|
|
fd69533fe5 | ||
|
|
39c6364545 | ||
|
|
100d765ddf | ||
|
|
6ea231e458 | ||
|
|
c9df4ddf6a | ||
|
|
948463aafa | ||
|
|
729fa12575 | ||
|
|
f14ee9edd5 | ||
|
|
a67630f907 | ||
|
|
5bc81f26c8 | ||
|
|
6908e2682f | ||
|
|
25c4295564 | ||
|
|
426fa4bb12 | ||
|
|
b0b65c24ff | ||
|
|
08bc6d2589 | ||
|
|
f2ae12bab3 | ||
|
|
965f415212 | ||
|
|
83b5bbf004 | ||
|
|
87f6f08ef5 | ||
|
|
59114d0301 |
13
.github/workflows/ci.yaml
vendored
13
.github/workflows/ci.yaml
vendored
@@ -407,20 +407,11 @@ jobs:
|
||||
run: rustup default "${MSRV}"
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
- name: "Build tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: cargo "+${MSRV}" insta test --all-features --unreferenced reject --test-runner nextest
|
||||
run: cargo "+${MSRV}" test --no-run --all-features
|
||||
|
||||
cargo-fuzz-build:
|
||||
name: "cargo fuzz build"
|
||||
|
||||
28
CHANGELOG.md
28
CHANGELOG.md
@@ -1,5 +1,33 @@
|
||||
# Changelog
|
||||
|
||||
## 0.12.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Support non-context-manager calls in `B017` ([#19063](https://github.com/astral-sh/ruff/pull/19063))
|
||||
- \[`flake8-use-pathlib`\] Add autofixes for `PTH100`, `PTH106`, `PTH107`, `PTH108`, `PTH110`, `PTH111`, `PTH112`, `PTH113`, `PTH114`, `PTH115`, `PTH117`, `PTH119`, `PTH120` ([#19213](https://github.com/astral-sh/ruff/pull/19213))
|
||||
- \[`flake8-use-pathlib`\] Add autofixes for `PTH203`, `PTH204`, `PTH205` ([#18922](https://github.com/astral-sh/ruff/pull/18922))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-return`\] Fix false-positive for variables used inside nested functions in `RET504` ([#18433](https://github.com/astral-sh/ruff/pull/18433))
|
||||
- Treat form feed as valid whitespace before a line continuation ([#19220](https://github.com/astral-sh/ruff/pull/19220))
|
||||
- \[`flake8-type-checking`\] Fix syntax error introduced by fix (`TC008`) ([#19150](https://github.com/astral-sh/ruff/pull/19150))
|
||||
- \[`pyupgrade`\] Keyword arguments in `super` should suppress the `UP008` fix ([#19131](https://github.com/astral-sh/ruff/pull/19131))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI007`, `PYI008`) ([#19103](https://github.com/astral-sh/ruff/pull/19103))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM116`) ([#19111](https://github.com/astral-sh/ruff/pull/19111))
|
||||
- \[`flake8-type-checking`\] Make example error out-of-the-box (`TC001`) ([#19151](https://github.com/astral-sh/ruff/pull/19151))
|
||||
- \[`flake8-use-pathlib`\] Make example error out-of-the-box (`PTH210`) ([#19189](https://github.com/astral-sh/ruff/pull/19189))
|
||||
- \[`pycodestyle`\] Make example error out-of-the-box (`E272`) ([#19191](https://github.com/astral-sh/ruff/pull/19191))
|
||||
- \[`pycodestyle`\] Make example not raise unnecessary `SyntaxError` (`E114`) ([#19190](https://github.com/astral-sh/ruff/pull/19190))
|
||||
- \[`pydoclint`\] Make example error out-of-the-box (`DOC501`) ([#19218](https://github.com/astral-sh/ruff/pull/19218))
|
||||
- \[`pylint`, `pyupgrade`\] Fix syntax errors in examples (`PLW1501`, `UP028`) ([#19127](https://github.com/astral-sh/ruff/pull/19127))
|
||||
- \[`pylint`\] Update `missing-maxsplit-arg` docs and error to suggest proper usage (`PLC0207`) ([#18949](https://github.com/astral-sh/ruff/pull/18949))
|
||||
- \[`flake8-bandit`\] Make example error out-of-the-box (`S412`) ([#19241](https://github.com/astral-sh/ruff/pull/19241))
|
||||
|
||||
## 0.12.2
|
||||
|
||||
### Preview features
|
||||
|
||||
11
Cargo.lock
generated
11
Cargo.lock
generated
@@ -2711,7 +2711,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.12.2"
|
||||
version = "0.12.3"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2852,6 +2852,7 @@ dependencies = [
|
||||
"salsa",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
@@ -2961,7 +2962,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.2"
|
||||
version = "0.12.3"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
@@ -3294,7 +3295,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.12.2"
|
||||
version = "0.12.3"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -4152,9 +4153,12 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"insta",
|
||||
"regex",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash",
|
||||
"salsa",
|
||||
@@ -4269,6 +4273,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"ty_ide",
|
||||
|
||||
@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.2/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.2/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.12.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.3/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.2
|
||||
rev: v0.12.3
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.12.2"
|
||||
version = "0.12.3"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -439,7 +439,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
if cli.statistics {
|
||||
printer.write_statistics(&diagnostics, &mut summary_writer)?;
|
||||
} else {
|
||||
printer.write_once(&diagnostics, &mut summary_writer)?;
|
||||
printer.write_once(&diagnostics, &mut summary_writer, preview)?;
|
||||
}
|
||||
|
||||
if !cli.exit_zero {
|
||||
|
||||
@@ -9,13 +9,14 @@ use itertools::{Itertools, iterate};
|
||||
use ruff_linter::linter::FixTable;
|
||||
use serde::Serialize;
|
||||
|
||||
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, SecondaryCode,
|
||||
};
|
||||
use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, RdjsonEmitter, SarifEmitter,
|
||||
TextEmitter,
|
||||
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, JunitEmitter,
|
||||
PylintEmitter, RdjsonEmitter, SarifEmitter, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::settings::flags::{self};
|
||||
@@ -202,6 +203,7 @@ impl Printer {
|
||||
&self,
|
||||
diagnostics: &Diagnostics,
|
||||
writer: &mut dyn Write,
|
||||
preview: bool,
|
||||
) -> Result<()> {
|
||||
if matches!(self.log_level, LogLevel::Silent) {
|
||||
return Ok(());
|
||||
@@ -229,13 +231,21 @@ impl Printer {
|
||||
|
||||
match self.format {
|
||||
OutputFormat::Json => {
|
||||
JsonEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Json)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Rdjson => {
|
||||
RdjsonEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::JsonLines => {
|
||||
JsonLinesEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::JsonLines)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Junit => {
|
||||
JunitEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
@@ -283,7 +293,11 @@ impl Printer {
|
||||
PylintEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Azure => {
|
||||
AzureEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Azure)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Sarif => {
|
||||
SarifEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
|
||||
@@ -5692,3 +5692,57 @@ class Foo:
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test_case::test_case("concise")]
|
||||
#[test_case::test_case("full")]
|
||||
#[test_case::test_case("json")]
|
||||
#[test_case::test_case("json-lines")]
|
||||
#[test_case::test_case("junit")]
|
||||
#[test_case::test_case("grouped")]
|
||||
#[test_case::test_case("github")]
|
||||
#[test_case::test_case("gitlab")]
|
||||
#[test_case::test_case("pylint")]
|
||||
#[test_case::test_case("rdjson")]
|
||||
#[test_case::test_case("azure")]
|
||||
#[test_case::test_case("sarif")]
|
||||
fn output_format(output_format: &str) -> Result<()> {
|
||||
const CONTENT: &str = "\
|
||||
import os # F401
|
||||
x = y # F821
|
||||
match 42: # invalid-syntax
|
||||
case _: ...
|
||||
";
|
||||
|
||||
let tempdir = TempDir::new()?;
|
||||
let input = tempdir.path().join("input.py");
|
||||
fs::write(&input, CONTENT)?;
|
||||
|
||||
let snapshot = format!("output_format_{output_format}");
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![
|
||||
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
|
||||
(r#""[^"]+\\?/?input.py"#, r#""[TMP]/input.py"#),
|
||||
(ruff_linter::VERSION, "[VERSION]"),
|
||||
]
|
||||
}, {
|
||||
assert_cmd_snapshot!(
|
||||
snapshot,
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args([
|
||||
"check",
|
||||
"--no-cache",
|
||||
"--output-format",
|
||||
output_format,
|
||||
"--select",
|
||||
"F401,F821",
|
||||
"--target-version",
|
||||
"py39",
|
||||
"input.py",
|
||||
])
|
||||
.current_dir(&tempdir),
|
||||
);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
23
crates/ruff/tests/snapshots/lint__output_format_azure.snap
Normal file
23
crates/ruff/tests/snapshots/lint__output_format_azure.snap
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- azure
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=2;columnnumber=5;code=F821;]Undefined name `y`
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=3;columnnumber=1;]SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
25
crates/ruff/tests/snapshots/lint__output_format_concise.snap
Normal file
25
crates/ruff/tests/snapshots/lint__output_format_concise.snap
Normal file
@@ -0,0 +1,25 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- concise
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1:8: F401 [*] `os` imported but unused
|
||||
input.py:2:5: F821 Undefined name `y`
|
||||
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
49
crates/ruff/tests/snapshots/lint__output_format_full.snap
Normal file
49
crates/ruff/tests/snapshots/lint__output_format_full.snap
Normal file
@@ -0,0 +1,49 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- full
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os # F401
|
||||
| ^^ F401
|
||||
2 | x = y # F821
|
||||
3 | match 42: # invalid-syntax
|
||||
|
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
input.py:2:5: F821 Undefined name `y`
|
||||
|
|
||||
1 | import os # F401
|
||||
2 | x = y # F821
|
||||
| ^ F821
|
||||
3 | match 42: # invalid-syntax
|
||||
4 | case _: ...
|
||||
|
|
||||
|
||||
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
|
||||
1 | import os # F401
|
||||
2 | x = y # F821
|
||||
3 | match 42: # invalid-syntax
|
||||
| ^^^^^
|
||||
4 | case _: ...
|
||||
|
|
||||
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
23
crates/ruff/tests/snapshots/lint__output_format_github.snap
Normal file
23
crates/ruff/tests/snapshots/lint__output_format_github.snap
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- github
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
::error title=Ruff (F401),file=[TMP]/input.py,line=1,col=8,endLine=1,endColumn=10::input.py:1:8: F401 `os` imported but unused
|
||||
::error title=Ruff (F821),file=[TMP]/input.py,line=2,col=5,endLine=2,endColumn=6::input.py:2:5: F821 Undefined name `y`
|
||||
::error title=Ruff,file=[TMP]/input.py,line=3,col=1,endLine=3,endColumn=6::input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
60
crates/ruff/tests/snapshots/lint__output_format_gitlab.snap
Normal file
60
crates/ruff/tests/snapshots/lint__output_format_gitlab.snap
Normal file
@@ -0,0 +1,60 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- gitlab
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
[
|
||||
{
|
||||
"check_name": "F401",
|
||||
"description": "`os` imported but unused",
|
||||
"fingerprint": "4dbad37161e65c72",
|
||||
"location": {
|
||||
"lines": {
|
||||
"begin": 1,
|
||||
"end": 1
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
},
|
||||
{
|
||||
"check_name": "F821",
|
||||
"description": "Undefined name `y`",
|
||||
"fingerprint": "7af59862a085230",
|
||||
"location": {
|
||||
"lines": {
|
||||
"begin": 2,
|
||||
"end": 2
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
},
|
||||
{
|
||||
"check_name": "syntax-error",
|
||||
"description": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"fingerprint": "e558cec859bb66e8",
|
||||
"location": {
|
||||
"lines": {
|
||||
"begin": 3,
|
||||
"end": 3
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
]
|
||||
----- stderr -----
|
||||
27
crates/ruff/tests/snapshots/lint__output_format_grouped.snap
Normal file
27
crates/ruff/tests/snapshots/lint__output_format_grouped.snap
Normal file
@@ -0,0 +1,27 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- grouped
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:
|
||||
1:8 F401 [*] `os` imported but unused
|
||||
2:5 F821 Undefined name `y`
|
||||
3:1 SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
@@ -0,0 +1,23 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- json-lines
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"[TMP]/input.py","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
|
||||
{"cell":null,"code":"F821","end_location":{"column":6,"row":2},"filename":"[TMP]/input.py","fix":null,"location":{"column":5,"row":2},"message":"Undefined name `y`","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/undefined-name"}
|
||||
{"cell":null,"code":null,"end_location":{"column":6,"row":3},"filename":"[TMP]/input.py","fix":null,"location":{"column":1,"row":3},"message":"SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)","noqa_row":null,"url":null}
|
||||
|
||||
----- stderr -----
|
||||
88
crates/ruff/tests/snapshots/lint__output_format_json.snap
Normal file
88
crates/ruff/tests/snapshots/lint__output_format_json.snap
Normal file
@@ -0,0 +1,88 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- json
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": "F401",
|
||||
"end_location": {
|
||||
"column": 10,
|
||||
"row": 1
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": {
|
||||
"applicability": "safe",
|
||||
"edits": [
|
||||
{
|
||||
"content": "",
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 2
|
||||
},
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": "Remove unused import: `os`"
|
||||
},
|
||||
"location": {
|
||||
"column": 8,
|
||||
"row": 1
|
||||
},
|
||||
"message": "`os` imported but unused",
|
||||
"noqa_row": 1,
|
||||
"url": "https://docs.astral.sh/ruff/rules/unused-import"
|
||||
},
|
||||
{
|
||||
"cell": null,
|
||||
"code": "F821",
|
||||
"end_location": {
|
||||
"column": 6,
|
||||
"row": 2
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 5,
|
||||
"row": 2
|
||||
},
|
||||
"message": "Undefined name `y`",
|
||||
"noqa_row": 2,
|
||||
"url": "https://docs.astral.sh/ruff/rules/undefined-name"
|
||||
},
|
||||
{
|
||||
"cell": null,
|
||||
"code": null,
|
||||
"end_location": {
|
||||
"column": 6,
|
||||
"row": 3
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 3
|
||||
},
|
||||
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"noqa_row": null,
|
||||
"url": null
|
||||
}
|
||||
]
|
||||
----- stderr -----
|
||||
34
crates/ruff/tests/snapshots/lint__output_format_junit.snap
Normal file
34
crates/ruff/tests/snapshots/lint__output_format_junit.snap
Normal file
@@ -0,0 +1,34 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- junit
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites name="ruff" tests="3" failures="3" errors="0">
|
||||
<testsuite name="[TMP]/input.py" tests="3" disabled="0" errors="0" failures="3" package="org.ruff">
|
||||
<testcase name="org.ruff.F401" classname="[TMP]/input" line="1" column="8">
|
||||
<failure message="`os` imported but unused">line 1, col 8, `os` imported but unused</failure>
|
||||
</testcase>
|
||||
<testcase name="org.ruff.F821" classname="[TMP]/input" line="2" column="5">
|
||||
<failure message="Undefined name `y`">line 2, col 5, Undefined name `y`</failure>
|
||||
</testcase>
|
||||
<testcase name="org.ruff" classname="[TMP]/input" line="3" column="1">
|
||||
<failure message="SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
|
||||
----- stderr -----
|
||||
23
crates/ruff/tests/snapshots/lint__output_format_pylint.snap
Normal file
23
crates/ruff/tests/snapshots/lint__output_format_pylint.snap
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- pylint
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1: [F401] `os` imported but unused
|
||||
input.py:2: [F821] Undefined name `y`
|
||||
input.py:3: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
103
crates/ruff/tests/snapshots/lint__output_format_rdjson.snap
Normal file
103
crates/ruff/tests/snapshots/lint__output_format_rdjson.snap
Normal file
@@ -0,0 +1,103 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- rdjson
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/unused-import",
|
||||
"value": "F401"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 10,
|
||||
"line": 1
|
||||
},
|
||||
"start": {
|
||||
"column": 8,
|
||||
"line": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "`os` imported but unused",
|
||||
"suggestions": [
|
||||
{
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 1,
|
||||
"line": 2
|
||||
},
|
||||
"start": {
|
||||
"column": 1,
|
||||
"line": 1
|
||||
}
|
||||
},
|
||||
"text": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/undefined-name",
|
||||
"value": "F821"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 2
|
||||
},
|
||||
"start": {
|
||||
"column": 5,
|
||||
"line": 2
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "Undefined name `y`"
|
||||
},
|
||||
{
|
||||
"code": {
|
||||
"url": null,
|
||||
"value": null
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 3
|
||||
},
|
||||
"start": {
|
||||
"column": 1,
|
||||
"line": 3
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
}
|
||||
],
|
||||
"severity": "warning",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
----- stderr -----
|
||||
142
crates/ruff/tests/snapshots/lint__output_format_sarif.snap
Normal file
142
crates/ruff/tests/snapshots/lint__output_format_sarif.snap
Normal file
@@ -0,0 +1,142 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- sarif
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
|
||||
"runs": [
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 10,
|
||||
"endLine": 1,
|
||||
"startColumn": 8,
|
||||
"startLine": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "`os` imported but unused"
|
||||
},
|
||||
"ruleId": "F401"
|
||||
},
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 6,
|
||||
"endLine": 2,
|
||||
"startColumn": 5,
|
||||
"startLine": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "Undefined name `y`"
|
||||
},
|
||||
"ruleId": "F821"
|
||||
},
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 6,
|
||||
"endLine": 3,
|
||||
"startColumn": 1,
|
||||
"startLine": 3
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
},
|
||||
"ruleId": null
|
||||
}
|
||||
],
|
||||
"tool": {
|
||||
"driver": {
|
||||
"informationUri": "https://github.com/astral-sh/ruff",
|
||||
"name": "ruff",
|
||||
"rules": [
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Preview\nWhen [preview](https://docs.astral.sh/ruff/preview/) is enabled,\nthe criterion for determining whether an import is first-party\nis stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/source/libraries.html#library-interface-public-and-private-symbols)\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
},
|
||||
"helpUri": "https://docs.astral.sh/ruff/rules/unused-import",
|
||||
"id": "F401",
|
||||
"properties": {
|
||||
"id": "F401",
|
||||
"kind": "Pyflakes",
|
||||
"name": "unused-import",
|
||||
"problem.severity": "error"
|
||||
},
|
||||
"shortDescription": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
}
|
||||
},
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for uses of undefined names.\n\n## Why is this bad?\nAn undefined name is likely to raise `NameError` at runtime.\n\n## Example\n```python\ndef double():\n return n * 2 # raises `NameError` if `n` is undefined when `double` is called\n```\n\nUse instead:\n```python\ndef double(n):\n return n * 2\n```\n\n## Options\n- [`target-version`]: Can be used to configure which symbols Ruff will understand\n as being available in the `builtins` namespace.\n\n## References\n- [Python documentation: Naming and binding](https://docs.python.org/3/reference/executionmodel.html#naming-and-binding)\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "Undefined name `{name}`. {tip}"
|
||||
},
|
||||
"helpUri": "https://docs.astral.sh/ruff/rules/undefined-name",
|
||||
"id": "F821",
|
||||
"properties": {
|
||||
"id": "F821",
|
||||
"kind": "Pyflakes",
|
||||
"name": "undefined-name",
|
||||
"problem.severity": "error"
|
||||
},
|
||||
"shortDescription": {
|
||||
"text": "Undefined name `{name}`. {tip}"
|
||||
}
|
||||
}
|
||||
],
|
||||
"version": "[VERSION]"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"version": "2.1.0"
|
||||
}
|
||||
----- stderr -----
|
||||
@@ -38,6 +38,7 @@ rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
serde_json = { workspace = true, optional = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
@@ -56,6 +57,6 @@ tempfile = { workspace = true }
|
||||
[features]
|
||||
cache = ["ruff_cache"]
|
||||
os = ["ignore", "dep:etcetera"]
|
||||
serde = ["dep:serde", "camino/serde1"]
|
||||
serde = ["camino/serde1", "dep:serde", "dep:serde_json", "ruff_diagnostics/serde"]
|
||||
# Exposes testing utilities.
|
||||
testing = ["tracing-subscriber"]
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
use std::{fmt::Formatter, sync::Arc};
|
||||
|
||||
use render::{FileResolver, Input};
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
|
||||
|
||||
use ruff_annotate_snippets::Level as AnnotateLevel;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
pub use self::render::DisplayDiagnostic;
|
||||
pub use self::render::{DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input};
|
||||
use crate::{Db, files::File};
|
||||
|
||||
mod render;
|
||||
@@ -380,7 +379,7 @@ impl Diagnostic {
|
||||
}
|
||||
|
||||
/// Returns the URL for the rule documentation, if it exists.
|
||||
pub fn to_url(&self) -> Option<String> {
|
||||
pub fn to_ruff_url(&self) -> Option<String> {
|
||||
if self.is_invalid_syntax() {
|
||||
None
|
||||
} else {
|
||||
@@ -432,8 +431,9 @@ impl Diagnostic {
|
||||
/// Returns the [`SourceFile`] which the message belongs to.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||
pub fn expect_ruff_source_file(&self) -> SourceFile {
|
||||
self.expect_primary_span().expect_ruff_file().clone()
|
||||
pub fn expect_ruff_source_file(&self) -> &SourceFile {
|
||||
self.ruff_source_file()
|
||||
.expect("Expected a ruff source file")
|
||||
}
|
||||
|
||||
/// Returns the [`TextRange`] for the diagnostic.
|
||||
@@ -1174,6 +1174,12 @@ pub struct DisplayDiagnosticConfig {
|
||||
/// here for now as the most "sensible" place for it to live until
|
||||
/// we had more concrete use cases. ---AG
|
||||
context: usize,
|
||||
/// Whether to use preview formatting for Ruff diagnostics.
|
||||
#[allow(
|
||||
dead_code,
|
||||
reason = "This is currently only used for JSON but will be needed soon for other formats"
|
||||
)]
|
||||
preview: bool,
|
||||
}
|
||||
|
||||
impl DisplayDiagnosticConfig {
|
||||
@@ -1194,6 +1200,14 @@ impl DisplayDiagnosticConfig {
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to enable preview behavior or not.
|
||||
pub fn preview(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
preview: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DisplayDiagnosticConfig {
|
||||
@@ -1202,6 +1216,7 @@ impl Default for DisplayDiagnosticConfig {
|
||||
format: DiagnosticFormat::default(),
|
||||
color: false,
|
||||
context: 2,
|
||||
preview: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1229,6 +1244,21 @@ pub enum DiagnosticFormat {
|
||||
///
|
||||
/// This may use color when printing to a `tty`.
|
||||
Concise,
|
||||
/// Print diagnostics in the [Azure Pipelines] format.
|
||||
///
|
||||
/// [Azure Pipelines]: https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning
|
||||
Azure,
|
||||
/// Print diagnostics in JSON format.
|
||||
///
|
||||
/// Unlike `json-lines`, this prints all of the diagnostics as a JSON array.
|
||||
#[cfg(feature = "serde")]
|
||||
Json,
|
||||
/// Print diagnostics in JSON format, one per line.
|
||||
///
|
||||
/// This will print each diagnostic as a separate JSON object on its own line. See the `json`
|
||||
/// format for an array of all diagnostics. See <https://jsonlines.org/> for more details.
|
||||
#[cfg(feature = "serde")]
|
||||
JsonLines,
|
||||
}
|
||||
|
||||
/// A representation of the kinds of messages inside a diagnostic.
|
||||
|
||||
@@ -4,6 +4,7 @@ use ruff_annotate_snippets::{
|
||||
Annotation as AnnotateAnnotation, Level as AnnotateLevel, Message as AnnotateMessage,
|
||||
Renderer as AnnotateRenderer, Snippet as AnnotateSnippet,
|
||||
};
|
||||
use ruff_notebook::{Notebook, NotebookIndex};
|
||||
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
@@ -17,9 +18,17 @@ use crate::{
|
||||
|
||||
use super::{
|
||||
Annotation, Diagnostic, DiagnosticFormat, DiagnosticSource, DisplayDiagnosticConfig, Severity,
|
||||
SubDiagnostic,
|
||||
SubDiagnostic, UnifiedFile,
|
||||
};
|
||||
|
||||
use azure::AzureRenderer;
|
||||
|
||||
mod azure;
|
||||
#[cfg(feature = "serde")]
|
||||
mod json;
|
||||
#[cfg(feature = "serde")]
|
||||
mod json_lines;
|
||||
|
||||
/// A type that implements `std::fmt::Display` for diagnostic rendering.
|
||||
///
|
||||
/// It is created via [`Diagnostic::display`].
|
||||
@@ -34,7 +43,6 @@ use super::{
|
||||
pub struct DisplayDiagnostic<'a> {
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
resolver: &'a dyn FileResolver,
|
||||
annotate_renderer: AnnotateRenderer,
|
||||
diag: &'a Diagnostic,
|
||||
}
|
||||
|
||||
@@ -44,16 +52,9 @@ impl<'a> DisplayDiagnostic<'a> {
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
diag: &'a Diagnostic,
|
||||
) -> DisplayDiagnostic<'a> {
|
||||
let annotate_renderer = if config.color {
|
||||
AnnotateRenderer::styled()
|
||||
} else {
|
||||
AnnotateRenderer::plain()
|
||||
};
|
||||
|
||||
DisplayDiagnostic {
|
||||
config,
|
||||
resolver,
|
||||
annotate_renderer,
|
||||
diag,
|
||||
}
|
||||
}
|
||||
@@ -61,68 +62,131 @@ impl<'a> DisplayDiagnostic<'a> {
|
||||
|
||||
impl std::fmt::Display for DisplayDiagnostic<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
DisplayDiagnostics::new(self.resolver, self.config, std::slice::from_ref(self.diag)).fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(self.config.format, DiagnosticFormat::Concise) {
|
||||
let (severity, severity_style) = match self.diag.severity() {
|
||||
Severity::Info => ("info", stylesheet.info),
|
||||
Severity::Warning => ("warning", stylesheet.warning),
|
||||
Severity::Error => ("error", stylesheet.error),
|
||||
Severity::Fatal => ("fatal", stylesheet.error),
|
||||
};
|
||||
/// A type that implements `std::fmt::Display` for rendering a collection of diagnostics.
|
||||
///
|
||||
/// It is intended for collections of diagnostics that need to be serialized together, as is the
|
||||
/// case for JSON, for example.
|
||||
///
|
||||
/// See [`DisplayDiagnostic`] for rendering individual `Diagnostic`s and details about the lifetime
|
||||
/// constraints.
|
||||
pub struct DisplayDiagnostics<'a> {
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
resolver: &'a dyn FileResolver,
|
||||
diagnostics: &'a [Diagnostic],
|
||||
}
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{severity}[{id}]",
|
||||
severity = fmt_styled(severity, severity_style),
|
||||
id = fmt_styled(self.diag.id(), stylesheet.emphasis)
|
||||
)?;
|
||||
impl<'a> DisplayDiagnostics<'a> {
|
||||
pub fn new(
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
diagnostics: &'a [Diagnostic],
|
||||
) -> DisplayDiagnostics<'a> {
|
||||
DisplayDiagnostics {
|
||||
config,
|
||||
resolver,
|
||||
diagnostics,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(span) = self.diag.primary_span() {
|
||||
write!(
|
||||
f,
|
||||
" {path}",
|
||||
path = fmt_styled(span.file().path(self.resolver), stylesheet.emphasis)
|
||||
)?;
|
||||
if let Some(range) = span.range() {
|
||||
let diagnostic_source = span.file().diagnostic_source(self.resolver);
|
||||
let start = diagnostic_source
|
||||
.as_source_code()
|
||||
.line_column(range.start());
|
||||
impl std::fmt::Display for DisplayDiagnostics<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self.config.format {
|
||||
DiagnosticFormat::Concise => {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
for diag in self.diagnostics {
|
||||
let (severity, severity_style) = match diag.severity() {
|
||||
Severity::Info => ("info", stylesheet.info),
|
||||
Severity::Warning => ("warning", stylesheet.warning),
|
||||
Severity::Error => ("error", stylesheet.error),
|
||||
Severity::Fatal => ("fatal", stylesheet.error),
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
":{line}:{col}",
|
||||
line = fmt_styled(start.line, stylesheet.emphasis),
|
||||
col = fmt_styled(start.column, stylesheet.emphasis),
|
||||
"{severity}[{id}]",
|
||||
severity = fmt_styled(severity, severity_style),
|
||||
id = fmt_styled(diag.id(), stylesheet.emphasis)
|
||||
)?;
|
||||
if let Some(span) = diag.primary_span() {
|
||||
write!(
|
||||
f,
|
||||
" {path}",
|
||||
path = fmt_styled(span.file().path(self.resolver), stylesheet.emphasis)
|
||||
)?;
|
||||
if let Some(range) = span.range() {
|
||||
let diagnostic_source = span.file().diagnostic_source(self.resolver);
|
||||
let start = diagnostic_source
|
||||
.as_source_code()
|
||||
.line_column(range.start());
|
||||
|
||||
write!(
|
||||
f,
|
||||
":{line}:{col}",
|
||||
line = fmt_styled(start.line, stylesheet.emphasis),
|
||||
col = fmt_styled(start.column, stylesheet.emphasis),
|
||||
)?;
|
||||
}
|
||||
write!(f, ":")?;
|
||||
}
|
||||
writeln!(f, " {message}", message = diag.concise_message())?;
|
||||
}
|
||||
write!(f, ":")?;
|
||||
}
|
||||
return writeln!(f, " {message}", message = self.diag.concise_message());
|
||||
DiagnosticFormat::Full => {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
let mut renderer = if self.config.color {
|
||||
AnnotateRenderer::styled()
|
||||
} else {
|
||||
AnnotateRenderer::plain()
|
||||
};
|
||||
|
||||
renderer = renderer
|
||||
.error(stylesheet.error)
|
||||
.warning(stylesheet.warning)
|
||||
.info(stylesheet.info)
|
||||
.note(stylesheet.note)
|
||||
.help(stylesheet.help)
|
||||
.line_no(stylesheet.line_no)
|
||||
.emphasis(stylesheet.emphasis)
|
||||
.none(stylesheet.none);
|
||||
|
||||
for diag in self.diagnostics {
|
||||
let resolved = Resolved::new(self.resolver, diag);
|
||||
let renderable = resolved.to_renderable(self.config.context);
|
||||
for diag in renderable.diagnostics.iter() {
|
||||
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
|
||||
}
|
||||
writeln!(f)?;
|
||||
}
|
||||
}
|
||||
DiagnosticFormat::Azure => {
|
||||
AzureRenderer::new(self.resolver).render(f, self.diagnostics)?;
|
||||
}
|
||||
#[cfg(feature = "serde")]
|
||||
DiagnosticFormat::Json => {
|
||||
json::JsonRenderer::new(self.resolver, self.config).render(f, self.diagnostics)?;
|
||||
}
|
||||
#[cfg(feature = "serde")]
|
||||
DiagnosticFormat::JsonLines => {
|
||||
json_lines::JsonLinesRenderer::new(self.resolver, self.config)
|
||||
.render(f, self.diagnostics)?;
|
||||
}
|
||||
}
|
||||
|
||||
let mut renderer = self.annotate_renderer.clone();
|
||||
renderer = renderer
|
||||
.error(stylesheet.error)
|
||||
.warning(stylesheet.warning)
|
||||
.info(stylesheet.info)
|
||||
.note(stylesheet.note)
|
||||
.help(stylesheet.help)
|
||||
.line_no(stylesheet.line_no)
|
||||
.emphasis(stylesheet.emphasis)
|
||||
.none(stylesheet.none);
|
||||
|
||||
let resolved = Resolved::new(self.resolver, self.diag);
|
||||
let renderable = resolved.to_renderable(self.config.context);
|
||||
for diag in renderable.diagnostics.iter() {
|
||||
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
|
||||
}
|
||||
writeln!(f)
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -635,6 +699,12 @@ pub trait FileResolver {
|
||||
|
||||
/// Returns the input contents associated with the file given.
|
||||
fn input(&self, file: File) -> Input;
|
||||
|
||||
/// Returns the [`NotebookIndex`] associated with the file given, if it's a Jupyter notebook.
|
||||
fn notebook_index(&self, file: &UnifiedFile) -> Option<NotebookIndex>;
|
||||
|
||||
/// Returns whether the file given is a Jupyter notebook.
|
||||
fn is_notebook(&self, file: &UnifiedFile) -> bool;
|
||||
}
|
||||
|
||||
impl<T> FileResolver for T
|
||||
@@ -651,6 +721,25 @@ where
|
||||
line_index: line_index(self, file),
|
||||
}
|
||||
}
|
||||
|
||||
fn notebook_index(&self, file: &UnifiedFile) -> Option<NotebookIndex> {
|
||||
match file {
|
||||
UnifiedFile::Ty(file) => self
|
||||
.input(*file)
|
||||
.text
|
||||
.as_notebook()
|
||||
.map(Notebook::index)
|
||||
.cloned(),
|
||||
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_notebook(&self, file: &UnifiedFile) -> bool {
|
||||
match file {
|
||||
UnifiedFile::Ty(file) => self.input(*file).text.as_notebook().is_some(),
|
||||
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FileResolver for &dyn Db {
|
||||
@@ -664,6 +753,25 @@ impl FileResolver for &dyn Db {
|
||||
line_index: line_index(*self, file),
|
||||
}
|
||||
}
|
||||
|
||||
fn notebook_index(&self, file: &UnifiedFile) -> Option<NotebookIndex> {
|
||||
match file {
|
||||
UnifiedFile::Ty(file) => self
|
||||
.input(*file)
|
||||
.text
|
||||
.as_notebook()
|
||||
.map(Notebook::index)
|
||||
.cloned(),
|
||||
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_notebook(&self, file: &UnifiedFile) -> bool {
|
||||
match file {
|
||||
UnifiedFile::Ty(file) => self.input(*file).text.as_notebook().is_some(),
|
||||
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An abstraction over a unit of user input.
|
||||
@@ -724,7 +832,9 @@ fn relativize_path<'p>(cwd: &SystemPath, path: &'p str) -> &'p str {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::diagnostic::{Annotation, DiagnosticId, Severity, Span};
|
||||
use ruff_diagnostics::{Edit, Fix};
|
||||
|
||||
use crate::diagnostic::{Annotation, DiagnosticId, SecondaryCode, Severity, Span};
|
||||
use crate::files::system_path_to_file;
|
||||
use crate::system::{DbWithWritableSystem, SystemPath};
|
||||
use crate::tests::TestDb;
|
||||
@@ -2121,7 +2231,7 @@ watermelon
|
||||
|
||||
/// A small harness for setting up an environment specifically for testing
|
||||
/// diagnostic rendering.
|
||||
struct TestEnvironment {
|
||||
pub(super) struct TestEnvironment {
|
||||
db: TestDb,
|
||||
config: DisplayDiagnosticConfig,
|
||||
}
|
||||
@@ -2130,7 +2240,7 @@ watermelon
|
||||
/// Create a new test harness.
|
||||
///
|
||||
/// This uses the default diagnostic rendering configuration.
|
||||
fn new() -> TestEnvironment {
|
||||
pub(super) fn new() -> TestEnvironment {
|
||||
TestEnvironment {
|
||||
db: TestDb::new(),
|
||||
config: DisplayDiagnosticConfig::default(),
|
||||
@@ -2149,8 +2259,26 @@ watermelon
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Set the output format to use in diagnostic rendering.
|
||||
pub(super) fn format(&mut self, format: DiagnosticFormat) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
config = config.format(format);
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Enable preview functionality for diagnostic rendering.
|
||||
#[allow(
|
||||
dead_code,
|
||||
reason = "This is currently only used for JSON but will be needed soon for other formats"
|
||||
)]
|
||||
pub(super) fn preview(&mut self, yes: bool) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
config = config.preview(yes);
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Add a file with the given path and contents to this environment.
|
||||
fn add(&mut self, path: &str, contents: &str) {
|
||||
pub(super) fn add(&mut self, path: &str, contents: &str) {
|
||||
let path = SystemPath::new(path);
|
||||
self.db.write_file(path, contents).unwrap();
|
||||
}
|
||||
@@ -2200,7 +2328,7 @@ watermelon
|
||||
/// A convenience function for returning a builder for a diagnostic
|
||||
/// with "error" severity and canned values for its identifier
|
||||
/// and message.
|
||||
fn err(&mut self) -> DiagnosticBuilder<'_> {
|
||||
pub(super) fn err(&mut self) -> DiagnosticBuilder<'_> {
|
||||
self.builder(
|
||||
"test-diagnostic",
|
||||
Severity::Error,
|
||||
@@ -2226,6 +2354,12 @@ watermelon
|
||||
DiagnosticBuilder { env: self, diag }
|
||||
}
|
||||
|
||||
/// A convenience function for returning a builder for an invalid syntax diagnostic.
|
||||
fn invalid_syntax(&mut self, message: &str) -> DiagnosticBuilder<'_> {
|
||||
let diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, message);
|
||||
DiagnosticBuilder { env: self, diag }
|
||||
}
|
||||
|
||||
/// Returns a builder for tersely constructing sub-diagnostics.
|
||||
fn sub_builder(&mut self, severity: Severity, message: &str) -> SubDiagnosticBuilder<'_> {
|
||||
let subdiag = SubDiagnostic::new(severity, message);
|
||||
@@ -2235,9 +2369,18 @@ watermelon
|
||||
/// Render the given diagnostic into a `String`.
|
||||
///
|
||||
/// (This will set the "printed" flag on `Diagnostic`.)
|
||||
fn render(&self, diag: &Diagnostic) -> String {
|
||||
pub(super) fn render(&self, diag: &Diagnostic) -> String {
|
||||
diag.display(&self.db, &self.config).to_string()
|
||||
}
|
||||
|
||||
/// Render the given diagnostics into a `String`.
|
||||
///
|
||||
/// See `render` for rendering a single diagnostic.
|
||||
///
|
||||
/// (This will set the "printed" flag on `Diagnostic`.)
|
||||
pub(super) fn render_diagnostics(&self, diagnostics: &[Diagnostic]) -> String {
|
||||
DisplayDiagnostics::new(&self.db, &self.config, diagnostics).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// A helper builder for tersely populating a `Diagnostic`.
|
||||
@@ -2246,14 +2389,14 @@ watermelon
|
||||
/// supported by this builder, and this only needs to be done
|
||||
/// infrequently, consider doing it more verbosely on `diag`
|
||||
/// itself.
|
||||
struct DiagnosticBuilder<'e> {
|
||||
pub(super) struct DiagnosticBuilder<'e> {
|
||||
env: &'e mut TestEnvironment,
|
||||
diag: Diagnostic,
|
||||
}
|
||||
|
||||
impl<'e> DiagnosticBuilder<'e> {
|
||||
/// Return the built diagnostic.
|
||||
fn build(self) -> Diagnostic {
|
||||
pub(super) fn build(self) -> Diagnostic {
|
||||
self.diag
|
||||
}
|
||||
|
||||
@@ -2302,6 +2445,25 @@ watermelon
|
||||
self.diag.annotate(ann);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the secondary code on the diagnostic.
|
||||
fn secondary_code(mut self, secondary_code: &str) -> DiagnosticBuilder<'e> {
|
||||
self.diag
|
||||
.set_secondary_code(SecondaryCode::new(secondary_code.to_string()));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the fix on the diagnostic.
|
||||
pub(super) fn fix(mut self, fix: Fix) -> DiagnosticBuilder<'e> {
|
||||
self.diag.set_fix(fix);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the noqa offset on the diagnostic.
|
||||
fn noqa_offset(mut self, noqa_offset: TextSize) -> DiagnosticBuilder<'e> {
|
||||
self.diag.set_noqa_offset(noqa_offset);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// A helper builder for tersely populating a `SubDiagnostic`.
|
||||
@@ -2381,4 +2543,199 @@ watermelon
|
||||
let offset = TextSize::from(offset.parse::<u32>().unwrap());
|
||||
(line_number, Some(offset))
|
||||
}
|
||||
|
||||
/// Create Ruff-style diagnostics for testing the various output formats.
|
||||
pub(crate) fn create_diagnostics(
|
||||
format: DiagnosticFormat,
|
||||
) -> (TestEnvironment, Vec<Diagnostic>) {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add(
|
||||
"fib.py",
|
||||
r#"import os
|
||||
|
||||
|
||||
def fibonacci(n):
|
||||
"""Compute the nth number in the Fibonacci sequence."""
|
||||
x = 1
|
||||
if n == 0:
|
||||
return 0
|
||||
elif n == 1:
|
||||
return 1
|
||||
else:
|
||||
return fibonacci(n - 1) + fibonacci(n - 2)
|
||||
"#,
|
||||
);
|
||||
env.add("undef.py", r"if a == 1: pass");
|
||||
env.format(format);
|
||||
|
||||
let diagnostics = vec![
|
||||
env.builder("unused-import", Severity::Error, "`os` imported but unused")
|
||||
.primary("fib.py", "1:7", "1:9", "Remove unused import: `os`")
|
||||
.secondary_code("F401")
|
||||
.fix(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(0),
|
||||
TextSize::from(10),
|
||||
))))
|
||||
.noqa_offset(TextSize::from(7))
|
||||
.build(),
|
||||
env.builder(
|
||||
"unused-variable",
|
||||
Severity::Error,
|
||||
"Local variable `x` is assigned to but never used",
|
||||
)
|
||||
.primary(
|
||||
"fib.py",
|
||||
"6:4",
|
||||
"6:5",
|
||||
"Remove assignment to unused variable `x`",
|
||||
)
|
||||
.secondary_code("F841")
|
||||
.fix(Fix::unsafe_edit(Edit::deletion(
|
||||
TextSize::from(94),
|
||||
TextSize::from(99),
|
||||
)))
|
||||
.noqa_offset(TextSize::from(94))
|
||||
.build(),
|
||||
env.builder("undefined-name", Severity::Error, "Undefined name `a`")
|
||||
.primary("undef.py", "1:3", "1:4", "")
|
||||
.secondary_code("F821")
|
||||
.noqa_offset(TextSize::from(3))
|
||||
.build(),
|
||||
];
|
||||
|
||||
(env, diagnostics)
|
||||
}
|
||||
|
||||
/// Create Ruff-style syntax error diagnostics for testing the various output formats.
|
||||
pub(crate) fn create_syntax_error_diagnostics(
|
||||
format: DiagnosticFormat,
|
||||
) -> (TestEnvironment, Vec<Diagnostic>) {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add(
|
||||
"syntax_errors.py",
|
||||
r"from os import
|
||||
|
||||
if call(foo
|
||||
def bar():
|
||||
pass
|
||||
",
|
||||
);
|
||||
env.format(format);
|
||||
|
||||
let diagnostics = vec![
|
||||
env.invalid_syntax("SyntaxError: Expected one or more symbol names after import")
|
||||
.primary("syntax_errors.py", "1:14", "1:15", "")
|
||||
.build(),
|
||||
env.invalid_syntax("SyntaxError: Expected ')', found newline")
|
||||
.primary("syntax_errors.py", "3:11", "3:12", "")
|
||||
.build(),
|
||||
];
|
||||
|
||||
(env, diagnostics)
|
||||
}
|
||||
|
||||
/// Create Ruff-style diagnostics for testing the various output formats for a notebook.
|
||||
#[allow(
|
||||
dead_code,
|
||||
reason = "This is currently only used for JSON but will be needed soon for other formats"
|
||||
)]
|
||||
pub(crate) fn create_notebook_diagnostics(
|
||||
format: DiagnosticFormat,
|
||||
) -> (TestEnvironment, Vec<Diagnostic>) {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add(
|
||||
"notebook.ipynb",
|
||||
r##"
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# cell 1\n",
|
||||
"import os"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# cell 2\n",
|
||||
"import math\n",
|
||||
"\n",
|
||||
"print('hello world')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# cell 3\n",
|
||||
"def foo():\n",
|
||||
" print()\n",
|
||||
" x = 1\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
"##,
|
||||
);
|
||||
env.format(format);
|
||||
|
||||
let diagnostics = vec![
|
||||
env.builder("unused-import", Severity::Error, "`os` imported but unused")
|
||||
.primary("notebook.ipynb", "2:7", "2:9", "Remove unused import: `os`")
|
||||
.secondary_code("F401")
|
||||
.fix(Fix::safe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(9),
|
||||
TextSize::from(19),
|
||||
))))
|
||||
.noqa_offset(TextSize::from(16))
|
||||
.build(),
|
||||
env.builder(
|
||||
"unused-import",
|
||||
Severity::Error,
|
||||
"`math` imported but unused",
|
||||
)
|
||||
.primary(
|
||||
"notebook.ipynb",
|
||||
"4:7",
|
||||
"4:11",
|
||||
"Remove unused import: `math`",
|
||||
)
|
||||
.secondary_code("F401")
|
||||
.fix(Fix::safe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(28),
|
||||
TextSize::from(40),
|
||||
))))
|
||||
.noqa_offset(TextSize::from(35))
|
||||
.build(),
|
||||
env.builder(
|
||||
"unused-variable",
|
||||
Severity::Error,
|
||||
"Local variable `x` is assigned to but never used",
|
||||
)
|
||||
.primary(
|
||||
"notebook.ipynb",
|
||||
"10:4",
|
||||
"10:5",
|
||||
"Remove assignment to unused variable `x`",
|
||||
)
|
||||
.secondary_code("F841")
|
||||
.fix(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(94),
|
||||
TextSize::from(104),
|
||||
))))
|
||||
.noqa_offset(TextSize::from(98))
|
||||
.build(),
|
||||
];
|
||||
|
||||
(env, diagnostics)
|
||||
}
|
||||
}
|
||||
|
||||
83
crates/ruff_db/src/diagnostic/render/azure.rs
Normal file
83
crates/ruff_db/src/diagnostic/render/azure.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
use ruff_source_file::LineColumn;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, Severity};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct AzureRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> AzureRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
}
|
||||
|
||||
impl AzureRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diag in diagnostics {
|
||||
let severity = match diag.severity() {
|
||||
Severity::Info | Severity::Warning => "warning",
|
||||
Severity::Error | Severity::Fatal => "error",
|
||||
};
|
||||
write!(f, "##vso[task.logissue type={severity};")?;
|
||||
if let Some(span) = diag.primary_span() {
|
||||
let filename = span.file().path(self.resolver);
|
||||
write!(f, "sourcepath={filename};")?;
|
||||
if let Some(range) = span.range() {
|
||||
let location = if self.resolver.notebook_index(span.file()).is_some() {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
LineColumn::default()
|
||||
} else {
|
||||
span.file()
|
||||
.diagnostic_source(self.resolver)
|
||||
.as_source_code()
|
||||
.line_column(range.start())
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"linenumber={line};columnnumber={col};",
|
||||
line = location.line,
|
||||
col = location.column,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
writeln!(
|
||||
f,
|
||||
"{code}]{body}",
|
||||
code = diag
|
||||
.secondary_code()
|
||||
.map_or_else(String::new, |code| format!("code={code};")),
|
||||
body = diag.body(),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Azure);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Azure);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
}
|
||||
393
crates/ruff_db/src/diagnostic/render/json.rs
Normal file
393
crates/ruff_db/src/diagnostic/render/json.rs
Normal file
@@ -0,0 +1,393 @@
|
||||
use serde::{Serialize, Serializer, ser::SerializeSeq};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use ruff_diagnostics::{Applicability, Edit};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig, SecondaryCode};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct JsonRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> JsonRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{:#}",
|
||||
diagnostics_to_json_value(diagnostics, self.resolver, self.config)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostics_to_json_value<'a>(
|
||||
diagnostics: impl IntoIterator<Item = &'a Diagnostic>,
|
||||
resolver: &dyn FileResolver,
|
||||
config: &DisplayDiagnosticConfig,
|
||||
) -> Value {
|
||||
let values: Vec<_> = diagnostics
|
||||
.into_iter()
|
||||
.map(|diag| diagnostic_to_json(diag, resolver, config))
|
||||
.collect();
|
||||
json!(values)
|
||||
}
|
||||
|
||||
pub(super) fn diagnostic_to_json<'a>(
|
||||
diagnostic: &'a Diagnostic,
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
) -> JsonDiagnostic<'a> {
|
||||
let span = diagnostic.primary_span_ref();
|
||||
let filename = span.map(|span| span.file().path(resolver));
|
||||
let range = span.and_then(|span| span.range());
|
||||
let diagnostic_source = span.map(|span| span.file().diagnostic_source(resolver));
|
||||
let source_code = diagnostic_source
|
||||
.as_ref()
|
||||
.map(|diagnostic_source| diagnostic_source.as_source_code());
|
||||
let notebook_index = span.and_then(|span| resolver.notebook_index(span.file()));
|
||||
|
||||
let mut start_location = None;
|
||||
let mut end_location = None;
|
||||
let mut noqa_location = None;
|
||||
let mut notebook_cell_index = None;
|
||||
if let Some(source_code) = source_code {
|
||||
noqa_location = diagnostic
|
||||
.noqa_offset()
|
||||
.map(|offset| source_code.line_column(offset));
|
||||
if let Some(range) = range {
|
||||
let mut start = source_code.line_column(range.start());
|
||||
let mut end = source_code.line_column(range.end());
|
||||
if let Some(notebook_index) = ¬ebook_index {
|
||||
notebook_cell_index =
|
||||
Some(notebook_index.cell(start.line).unwrap_or(OneIndexed::MIN));
|
||||
start = notebook_index.translate_line_column(&start);
|
||||
end = notebook_index.translate_line_column(&end);
|
||||
noqa_location =
|
||||
noqa_location.map(|location| notebook_index.translate_line_column(&location));
|
||||
}
|
||||
start_location = Some(start);
|
||||
end_location = Some(end);
|
||||
}
|
||||
}
|
||||
|
||||
let fix = diagnostic.fix().map(|fix| JsonFix {
|
||||
applicability: fix.applicability(),
|
||||
message: diagnostic.suggestion(),
|
||||
edits: ExpandedEdits {
|
||||
edits: fix.edits(),
|
||||
notebook_index,
|
||||
config,
|
||||
diagnostic_source,
|
||||
},
|
||||
});
|
||||
|
||||
// In preview, the locations and filename can be optional.
|
||||
if config.preview {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
cell: notebook_cell_index,
|
||||
location: start_location.map(JsonLocation::from),
|
||||
end_location: end_location.map(JsonLocation::from),
|
||||
filename,
|
||||
noqa_row: noqa_location.map(|location| location.line),
|
||||
}
|
||||
} else {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
cell: notebook_cell_index,
|
||||
location: Some(start_location.unwrap_or_default().into()),
|
||||
end_location: Some(end_location.unwrap_or_default().into()),
|
||||
filename: Some(filename.unwrap_or_default()),
|
||||
noqa_row: noqa_location.map(|location| location.line),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpandedEdits<'a> {
|
||||
edits: &'a [Edit],
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
diagnostic_source: Option<DiagnosticSource>,
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedEdits<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.edits.len()))?;
|
||||
|
||||
for edit in self.edits {
|
||||
let (location, end_location) = if let Some(diagnostic_source) = &self.diagnostic_source
|
||||
{
|
||||
let source_code = diagnostic_source.as_source_code();
|
||||
let mut location = source_code.line_column(edit.start());
|
||||
let mut end_location = source_code.line_column(edit.end());
|
||||
|
||||
if let Some(notebook_index) = &self.notebook_index {
|
||||
// There exists a newline between each cell's source code in the
|
||||
// concatenated source code in Ruff. This newline doesn't actually
|
||||
// exists in the JSON source field.
|
||||
//
|
||||
// Now, certain edits may try to remove this newline, which means
|
||||
// the edit will spill over to the first character of the next cell.
|
||||
// If it does, we need to translate the end location to the last
|
||||
// character of the previous cell.
|
||||
match (
|
||||
notebook_index.cell(location.line),
|
||||
notebook_index.cell(end_location.line),
|
||||
) {
|
||||
(Some(start_cell), Some(end_cell)) if start_cell != end_cell => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: source_code
|
||||
.line_column(source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
(Some(_), None) => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: source_code
|
||||
.line_column(source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
end_location = notebook_index.translate_line_column(&end_location);
|
||||
}
|
||||
}
|
||||
location = notebook_index.translate_line_column(&location);
|
||||
}
|
||||
|
||||
(Some(location), Some(end_location))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
// In preview, the locations can be optional.
|
||||
let value = if self.config.preview {
|
||||
JsonEdit {
|
||||
content: edit.content().unwrap_or_default(),
|
||||
location: location.map(JsonLocation::from),
|
||||
end_location: end_location.map(JsonLocation::from),
|
||||
}
|
||||
} else {
|
||||
JsonEdit {
|
||||
content: edit.content().unwrap_or_default(),
|
||||
location: Some(location.unwrap_or_default().into()),
|
||||
end_location: Some(end_location.unwrap_or_default().into()),
|
||||
}
|
||||
};
|
||||
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
/// A serializable version of `Diagnostic`.
|
||||
///
|
||||
/// The `Old` variant only exists to preserve backwards compatibility. Both this and `JsonEdit`
|
||||
/// should become structs with the `New` definitions in a future Ruff release.
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct JsonDiagnostic<'a> {
|
||||
cell: Option<OneIndexed>,
|
||||
code: Option<&'a SecondaryCode>,
|
||||
end_location: Option<JsonLocation>,
|
||||
filename: Option<&'a str>,
|
||||
fix: Option<JsonFix<'a>>,
|
||||
location: Option<JsonLocation>,
|
||||
message: &'a str,
|
||||
noqa_row: Option<OneIndexed>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonFix<'a> {
|
||||
applicability: Applicability,
|
||||
edits: ExpandedEdits<'a>,
|
||||
message: Option<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonLocation {
|
||||
column: OneIndexed,
|
||||
row: OneIndexed,
|
||||
}
|
||||
|
||||
impl From<LineColumn> for JsonLocation {
|
||||
fn from(location: LineColumn) -> Self {
|
||||
JsonLocation {
|
||||
row: location.line,
|
||||
column: location.column,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonEdit<'a> {
|
||||
content: &'a str,
|
||||
end_location: Option<JsonLocation>,
|
||||
location: Option<JsonLocation>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::{Edit, Fix};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
|
||||
create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_stable() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(false);
|
||||
|
||||
let diag = env
|
||||
.err()
|
||||
.fix(Fix::safe_edit(Edit::insertion(
|
||||
"edit".to_string(),
|
||||
TextSize::from(0),
|
||||
)))
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@r#"
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": null,
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
},
|
||||
"filename": "",
|
||||
"fix": {
|
||||
"applicability": "safe",
|
||||
"edits": [
|
||||
{
|
||||
"content": "edit",
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
},
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": null
|
||||
},
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
},
|
||||
"message": "main diagnostic message",
|
||||
"noqa_row": null,
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
|
||||
}
|
||||
]
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_preview() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(true);
|
||||
|
||||
let diag = env
|
||||
.err()
|
||||
.fix(Fix::safe_edit(Edit::insertion(
|
||||
"edit".to_string(),
|
||||
TextSize::from(0),
|
||||
)))
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@r#"
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": null,
|
||||
"end_location": null,
|
||||
"filename": null,
|
||||
"fix": {
|
||||
"applicability": "safe",
|
||||
"edits": [
|
||||
{
|
||||
"content": "edit",
|
||||
"end_location": null,
|
||||
"location": null
|
||||
}
|
||||
],
|
||||
"message": null
|
||||
},
|
||||
"location": null,
|
||||
"message": "main diagnostic message",
|
||||
"noqa_row": null,
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
|
||||
}
|
||||
]
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
59
crates/ruff_db/src/diagnostic/render/json_lines.rs
Normal file
59
crates/ruff_db/src/diagnostic/render/json_lines.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
use crate::diagnostic::{Diagnostic, DisplayDiagnosticConfig, render::json::diagnostic_to_json};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct JsonLinesRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> JsonLinesRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonLinesRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diag in diagnostics {
|
||||
writeln!(
|
||||
f,
|
||||
"{}",
|
||||
serde_json::json!(diagnostic_to_json(diag, self.resolver, self.config))
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
create_diagnostics, create_notebook_diagnostics, create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/azure.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
source: crates/ruff_db/src/diagnostic/render/azure.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
##vso[task.logissue type=error;sourcepath=fib.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
|
||||
##vso[task.logissue type=error;sourcepath=fib.py;linenumber=6;columnnumber=5;code=F841;]Local variable `x` is assigned to but never used
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/azure.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
source: crates/ruff_db/src/diagnostic/render/azure.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=1;columnnumber=15;]SyntaxError: Expected one or more symbol names after import
|
||||
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=3;columnnumber=12;]SyntaxError: Expected ')', found newline
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/json.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
source: crates/ruff_db/src/diagnostic/render/json.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
[
|
||||
{
|
||||
@@ -84,8 +83,8 @@ snapshot_kind: text
|
||||
{
|
||||
"content": "",
|
||||
"end_location": {
|
||||
"column": 10,
|
||||
"row": 4
|
||||
"column": 1,
|
||||
"row": 5
|
||||
},
|
||||
"location": {
|
||||
"column": 1,
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/json.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
source: crates/ruff_db/src/diagnostic/render/json.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
[
|
||||
{
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/json.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
source: crates/ruff_db/src/diagnostic/render/json.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
[
|
||||
{
|
||||
@@ -1,8 +1,7 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/json_lines.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
source: crates/ruff_db/src/diagnostic/render/json_lines.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
{"cell":1,"code":"F401","end_location":{"column":10,"row":2},"filename":"notebook.ipynb","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":10,"row":2},"location":{"column":1,"row":2}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":2},"message":"`os` imported but unused","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
|
||||
{"cell":2,"code":"F401","end_location":{"column":12,"row":2},"filename":"notebook.ipynb","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":3},"location":{"column":1,"row":2}}],"message":"Remove unused import: `math`"},"location":{"column":8,"row":2},"message":"`math` imported but unused","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
|
||||
{"cell":3,"code":"F841","end_location":{"column":6,"row":4},"filename":"notebook.ipynb","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":10,"row":4},"location":{"column":1,"row":4}}],"message":"Remove assignment to unused variable `x`"},"location":{"column":5,"row":4},"message":"Local variable `x` is assigned to but never used","noqa_row":4,"url":"https://docs.astral.sh/ruff/rules/unused-variable"}
|
||||
{"cell":3,"code":"F841","end_location":{"column":6,"row":4},"filename":"notebook.ipynb","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":1,"row":5},"location":{"column":1,"row":4}}],"message":"Remove assignment to unused variable `x`"},"location":{"column":5,"row":4},"message":"Local variable `x` is assigned to but never used","noqa_row":4,"url":"https://docs.astral.sh/ruff/rules/unused-variable"}
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/json_lines.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
source: crates/ruff_db/src/diagnostic/render/json_lines.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"fib.py","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
|
||||
{"cell":null,"code":"F841","end_location":{"column":6,"row":6},"filename":"fib.py","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":10,"row":6},"location":{"column":5,"row":6}}],"message":"Remove assignment to unused variable `x`"},"location":{"column":5,"row":6},"message":"Local variable `x` is assigned to but never used","noqa_row":6,"url":"https://docs.astral.sh/ruff/rules/unused-variable"}
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/json_lines.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
source: crates/ruff_db/src/diagnostic/render/json_lines.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
{"cell":null,"code":null,"end_location":{"column":1,"row":2},"filename":"syntax_errors.py","fix":null,"location":{"column":15,"row":1},"message":"SyntaxError: Expected one or more symbol names after import","noqa_row":null,"url":null}
|
||||
{"cell":null,"code":null,"end_location":{"column":1,"row":4},"filename":"syntax_errors.py","fix":null,"location":{"column":12,"row":3},"message":"SyntaxError: Expected ')', found newline","noqa_row":null,"url":null}
|
||||
@@ -21,6 +21,19 @@ type LockedZipArchive<'a> = MutexGuard<'a, VendoredZipArchive>;
|
||||
///
|
||||
/// "Files" in the `VendoredFileSystem` are read-only and immutable.
|
||||
/// Directories are supported, but symlinks and hardlinks cannot exist.
|
||||
///
|
||||
/// # Path separators
|
||||
///
|
||||
/// At time of writing (2025-07-11), this implementation always uses `/` as a
|
||||
/// path separator, even in Windows environments where `\` is traditionally
|
||||
/// used as a file path separator. Namely, this is only currently used with zip
|
||||
/// files built by `crates/ty_vendored/build.rs`.
|
||||
///
|
||||
/// Callers using this may provide paths that use a `\` as a separator. It will
|
||||
/// be transparently normalized to `/`.
|
||||
///
|
||||
/// This is particularly important because the presence of a trailing separator
|
||||
/// in a zip file is conventionally used to indicate a directory entry.
|
||||
#[derive(Clone)]
|
||||
pub struct VendoredFileSystem {
|
||||
inner: Arc<Mutex<VendoredZipArchive>>,
|
||||
@@ -115,6 +128,68 @@ impl VendoredFileSystem {
|
||||
read_to_string(self, path.as_ref())
|
||||
}
|
||||
|
||||
/// Read the direct children of the directory
|
||||
/// identified by `path`.
|
||||
///
|
||||
/// If `path` is not a directory, then this will
|
||||
/// return an empty `Vec`.
|
||||
pub fn read_directory(&self, dir: impl AsRef<VendoredPath>) -> Vec<DirectoryEntry> {
|
||||
// N.B. We specifically do not return an iterator here to avoid
|
||||
// holding a lock for the lifetime of the iterator returned.
|
||||
// That is, it seems like a footgun to keep the zip archive
|
||||
// locked during iteration, since the unit of work for each
|
||||
// item in the iterator could be arbitrarily long. Allocating
|
||||
// up front and stuffing all entries into it is probably the
|
||||
// simplest solution and what we do here. If this becomes
|
||||
// a problem, there are other strategies we could pursue.
|
||||
// (Amortizing allocs, using a different synchronization
|
||||
// behavior or even exposing additional APIs.) ---AG
|
||||
|
||||
fn read_directory(fs: &VendoredFileSystem, dir: &VendoredPath) -> Vec<DirectoryEntry> {
|
||||
let mut normalized = NormalizedVendoredPath::from(dir);
|
||||
if !normalized.as_str().ends_with('/') {
|
||||
normalized = normalized.with_trailing_slash();
|
||||
}
|
||||
let archive = fs.lock_archive();
|
||||
let mut entries = vec![];
|
||||
for name in archive.0.file_names() {
|
||||
// Any entry that doesn't have the `path` (with a
|
||||
// trailing slash) as a prefix cannot possibly be in
|
||||
// the directory referenced by `path`.
|
||||
let Some(without_dir_prefix) = name.strip_prefix(normalized.as_str()) else {
|
||||
continue;
|
||||
};
|
||||
// Filter out an entry equivalent to the path given
|
||||
// since we only want children of the directory.
|
||||
if without_dir_prefix.is_empty() {
|
||||
continue;
|
||||
}
|
||||
// We only want *direct* children. Files that are
|
||||
// direct children cannot have any slashes (or else
|
||||
// they are not direct children). Directories that
|
||||
// are direct children can only have one slash and
|
||||
// it must be at the end.
|
||||
//
|
||||
// (We do this manually ourselves to avoid doing a
|
||||
// full file lookup and metadata retrieval via the
|
||||
// `zip` crate.)
|
||||
let file_type = FileType::from_zip_file_name(without_dir_prefix);
|
||||
let slash_count = without_dir_prefix.matches('/').count();
|
||||
match file_type {
|
||||
FileType::File if slash_count > 0 => continue,
|
||||
FileType::Directory if slash_count > 1 => continue,
|
||||
_ => {}
|
||||
}
|
||||
entries.push(DirectoryEntry {
|
||||
path: VendoredPathBuf::from(name),
|
||||
file_type,
|
||||
});
|
||||
}
|
||||
entries
|
||||
}
|
||||
read_directory(self, dir.as_ref())
|
||||
}
|
||||
|
||||
/// Acquire a lock on the underlying zip archive.
|
||||
/// The call will block until it is able to acquire the lock.
|
||||
///
|
||||
@@ -206,6 +281,14 @@ pub enum FileType {
|
||||
}
|
||||
|
||||
impl FileType {
|
||||
fn from_zip_file_name(name: &str) -> FileType {
|
||||
if name.ends_with('/') {
|
||||
FileType::Directory
|
||||
} else {
|
||||
FileType::File
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn is_file(self) -> bool {
|
||||
matches!(self, Self::File)
|
||||
}
|
||||
@@ -244,6 +327,30 @@ impl Metadata {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct DirectoryEntry {
|
||||
path: VendoredPathBuf,
|
||||
file_type: FileType,
|
||||
}
|
||||
|
||||
impl DirectoryEntry {
|
||||
pub fn new(path: VendoredPathBuf, file_type: FileType) -> Self {
|
||||
Self { path, file_type }
|
||||
}
|
||||
|
||||
pub fn into_path(self) -> VendoredPathBuf {
|
||||
self.path
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &VendoredPath {
|
||||
&self.path
|
||||
}
|
||||
|
||||
pub fn file_type(&self) -> FileType {
|
||||
self.file_type
|
||||
}
|
||||
}
|
||||
|
||||
/// Newtype wrapper around a ZipArchive.
|
||||
#[derive(Debug)]
|
||||
struct VendoredZipArchive(ZipArchive<io::Cursor<Cow<'static, [u8]>>>);
|
||||
@@ -498,6 +605,60 @@ pub(crate) mod tests {
|
||||
test_directory("./stdlib/asyncio/../asyncio/")
|
||||
}
|
||||
|
||||
fn readdir_snapshot(fs: &VendoredFileSystem, path: &str) -> String {
|
||||
let mut paths = fs
|
||||
.read_directory(VendoredPath::new(path))
|
||||
.into_iter()
|
||||
.map(|entry| entry.path().to_string())
|
||||
.collect::<Vec<String>>();
|
||||
paths.sort();
|
||||
paths.join("\n")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_directory_stdlib() {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib/"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib/"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_directory_asyncio() {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "stdlib/asyncio"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "stdlib/asyncio/"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio/"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
}
|
||||
|
||||
fn test_nonexistent_path(path: &str) {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
let path = VendoredPath::new(path);
|
||||
|
||||
@@ -17,6 +17,10 @@ impl VendoredPath {
|
||||
unsafe { &*(path as *const Utf8Path as *const VendoredPath) }
|
||||
}
|
||||
|
||||
pub fn file_name(&self) -> Option<&str> {
|
||||
self.0.file_name()
|
||||
}
|
||||
|
||||
pub fn to_path_buf(&self) -> VendoredPathBuf {
|
||||
VendoredPathBuf(self.0.to_path_buf())
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.2"
|
||||
version = "0.12.3"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""
|
||||
Should emit:
|
||||
B017 - on lines 24, 28, 46, 49, 52, and 58
|
||||
B017 - on lines 24, 28, 46, 49, 52, 58, 62, 68, and 71
|
||||
"""
|
||||
import asyncio
|
||||
import unittest
|
||||
@@ -56,3 +56,17 @@ def test_pytest_raises():
|
||||
|
||||
with contextlib.nullcontext(), pytest.raises(Exception):
|
||||
raise ValueError("Multiple context managers")
|
||||
|
||||
|
||||
def test_pytest_raises_keyword():
|
||||
with pytest.raises(expected_exception=Exception):
|
||||
raise ValueError("Should be flagged")
|
||||
|
||||
def test_assert_raises_keyword():
|
||||
class TestKwargs(unittest.TestCase):
|
||||
def test_method(self):
|
||||
with self.assertRaises(exception=Exception):
|
||||
raise ValueError("Should be flagged")
|
||||
|
||||
with self.assertRaises(exception=BaseException):
|
||||
raise ValueError("Should be flagged")
|
||||
|
||||
@@ -181,3 +181,51 @@ class SubclassTestModel2(TestModel4):
|
||||
# Subclass without __str__
|
||||
class SubclassTestModel3(TestModel1):
|
||||
pass
|
||||
|
||||
|
||||
# Test cases for type-annotated abstract models - these should NOT trigger DJ008
|
||||
from typing import ClassVar
|
||||
from django_stubs_ext.db.models import TypedModelMeta
|
||||
|
||||
|
||||
class TypeAnnotatedAbstractModel1(models.Model):
|
||||
"""Model with type-annotated abstract = True - should not trigger DJ008"""
|
||||
new_field = models.CharField(max_length=10)
|
||||
|
||||
class Meta(TypedModelMeta):
|
||||
abstract: ClassVar[bool] = True
|
||||
|
||||
|
||||
class TypeAnnotatedAbstractModel2(models.Model):
|
||||
"""Model with type-annotated abstract = True using regular Meta - should not trigger DJ008"""
|
||||
new_field = models.CharField(max_length=10)
|
||||
|
||||
class Meta:
|
||||
abstract: ClassVar[bool] = True
|
||||
|
||||
|
||||
class TypeAnnotatedAbstractModel3(models.Model):
|
||||
"""Model with type-annotated abstract = True but without ClassVar - should not trigger DJ008"""
|
||||
new_field = models.CharField(max_length=10)
|
||||
|
||||
class Meta:
|
||||
abstract: bool = True
|
||||
|
||||
|
||||
class TypeAnnotatedNonAbstractModel(models.Model):
|
||||
"""Model with type-annotated abstract = False - should trigger DJ008"""
|
||||
new_field = models.CharField(max_length=10)
|
||||
|
||||
class Meta:
|
||||
abstract: ClassVar[bool] = False
|
||||
|
||||
|
||||
class TypeAnnotatedAbstractModelWithStr(models.Model):
|
||||
"""Model with type-annotated abstract = True and __str__ method - should not trigger DJ008"""
|
||||
new_field = models.CharField(max_length=10)
|
||||
|
||||
class Meta(TypedModelMeta):
|
||||
abstract: ClassVar[bool] = True
|
||||
|
||||
def __str__(self):
|
||||
return self.new_field
|
||||
|
||||
@@ -422,6 +422,35 @@ def func(a: dict[str, int]) -> list[dict[str, int]]:
|
||||
services = a["services"]
|
||||
return services
|
||||
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/14052
|
||||
def outer() -> list[object]:
|
||||
@register
|
||||
async def inner() -> None:
|
||||
print(layout)
|
||||
|
||||
layout = [...]
|
||||
return layout
|
||||
|
||||
def outer() -> list[object]:
|
||||
with open("") as f:
|
||||
async def inner() -> None:
|
||||
print(layout)
|
||||
|
||||
layout = [...]
|
||||
return layout
|
||||
|
||||
|
||||
def outer() -> list[object]:
|
||||
def inner():
|
||||
with open("") as f:
|
||||
async def inner_inner() -> None:
|
||||
print(layout)
|
||||
|
||||
layout = [...]
|
||||
return layout
|
||||
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/18411
|
||||
def f():
|
||||
(#=
|
||||
|
||||
6
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/whitespace.py
vendored
Normal file
6
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/whitespace.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
# Regression test for: https://github.com/astral-sh/ruff/issues/19175
|
||||
# there is a (potentially invisible) unicode formfeed character (000C) between `TYPE_CHECKING` and the backslash
|
||||
from typing import TYPE_CHECKING\
|
||||
|
||||
if TYPE_CHECKING: import builtins
|
||||
builtins.print("!")
|
||||
5
crates/ruff_linter/resources/test/fixtures/isort/required_imports/multiple_strings.py
vendored
Normal file
5
crates/ruff_linter/resources/test/fixtures/isort/required_imports/multiple_strings.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
"""This is a docstring."""
|
||||
"This is not a docstring."
|
||||
"This is also not a docstring."
|
||||
|
||||
x = 1
|
||||
@@ -4,8 +4,8 @@ use crate::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::rules::{
|
||||
flake8_import_conventions, flake8_pyi, flake8_pytest_style, flake8_type_checking, pyflakes,
|
||||
pylint, pyupgrade, refurb, ruff,
|
||||
flake8_import_conventions, flake8_pyi, flake8_pytest_style, flake8_return,
|
||||
flake8_type_checking, pyflakes, pylint, pyupgrade, refurb, ruff,
|
||||
};
|
||||
|
||||
/// Run lint rules over the [`Binding`]s.
|
||||
@@ -25,11 +25,20 @@ pub(crate) fn bindings(checker: &Checker) {
|
||||
Rule::ForLoopWrites,
|
||||
Rule::CustomTypeVarForSelf,
|
||||
Rule::PrivateTypeParameter,
|
||||
Rule::UnnecessaryAssign,
|
||||
]) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (binding_id, binding) in checker.semantic.bindings.iter_enumerated() {
|
||||
if checker.is_rule_enabled(Rule::UnnecessaryAssign) {
|
||||
if binding.kind.is_function_definition() {
|
||||
flake8_return::rules::unnecessary_assign(
|
||||
checker,
|
||||
binding.statement(checker.semantic()).unwrap(),
|
||||
);
|
||||
}
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::UnusedVariable) {
|
||||
if binding.kind.is_bound_exception()
|
||||
&& binding.is_unused()
|
||||
|
||||
@@ -207,7 +207,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
Rule::UnnecessaryReturnNone,
|
||||
Rule::ImplicitReturnValue,
|
||||
Rule::ImplicitReturn,
|
||||
Rule::UnnecessaryAssign,
|
||||
Rule::SuperfluousElseReturn,
|
||||
Rule::SuperfluousElseRaise,
|
||||
Rule::SuperfluousElseContinue,
|
||||
|
||||
@@ -670,7 +670,11 @@ impl SemanticSyntaxContext for Checker<'_> {
|
||||
| SemanticSyntaxErrorKind::InvalidStarExpression
|
||||
| SemanticSyntaxErrorKind::AsyncComprehensionInSyncComprehension(_)
|
||||
| SemanticSyntaxErrorKind::DuplicateParameter(_)
|
||||
| SemanticSyntaxErrorKind::NonlocalDeclarationAtModuleLevel => {
|
||||
| SemanticSyntaxErrorKind::NonlocalDeclarationAtModuleLevel
|
||||
| SemanticSyntaxErrorKind::LoadBeforeNonlocalDeclaration { .. }
|
||||
| SemanticSyntaxErrorKind::NonlocalAndGlobal(_)
|
||||
| SemanticSyntaxErrorKind::AnnotatedGlobal(_)
|
||||
| SemanticSyntaxErrorKind::AnnotatedNonlocal(_) => {
|
||||
self.semantic_errors.borrow_mut().push(error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ use ruff_python_ast::Stmt;
|
||||
use ruff_python_ast::helpers::is_docstring_stmt;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_parser::{TokenKind, Tokens};
|
||||
use ruff_python_trivia::is_python_whitespace;
|
||||
use ruff_python_trivia::{PythonWhitespace, textwrap::indent};
|
||||
use ruff_source_file::{LineRanges, UniversalNewlineIterator};
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
@@ -274,19 +275,12 @@ impl<'a> Insertion<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the end of the last docstring.
|
||||
/// Find the end of the docstring (first string statement).
|
||||
fn match_docstring_end(body: &[Stmt]) -> Option<TextSize> {
|
||||
let mut iter = body.iter();
|
||||
let mut stmt = iter.next()?;
|
||||
let stmt = body.first()?;
|
||||
if !is_docstring_stmt(stmt) {
|
||||
return None;
|
||||
}
|
||||
for next in iter {
|
||||
if !is_docstring_stmt(next) {
|
||||
break;
|
||||
}
|
||||
stmt = next;
|
||||
}
|
||||
Some(stmt.end())
|
||||
}
|
||||
|
||||
@@ -306,7 +300,7 @@ fn match_semicolon(s: &str) -> Option<TextSize> {
|
||||
fn match_continuation(s: &str) -> Option<TextSize> {
|
||||
for (offset, c) in s.char_indices() {
|
||||
match c {
|
||||
' ' | '\t' => continue,
|
||||
_ if is_python_whitespace(c) => continue,
|
||||
'\\' => return Some(TextSize::try_from(offset).unwrap()),
|
||||
_ => break,
|
||||
}
|
||||
@@ -366,7 +360,7 @@ mod tests {
|
||||
.trim_start();
|
||||
assert_eq!(
|
||||
insert(contents)?,
|
||||
Insertion::own_line("", TextSize::from(40), "\n")
|
||||
Insertion::own_line("", TextSize::from(20), "\n")
|
||||
);
|
||||
|
||||
let contents = r"
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
use std::io::Write;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_source_file::LineColumn;
|
||||
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
|
||||
/// Generate error logging commands for Azure Pipelines format.
|
||||
/// See [documentation](https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning)
|
||||
#[derive(Default)]
|
||||
pub struct AzureEmitter;
|
||||
|
||||
impl Emitter for AzureEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for diagnostic in diagnostics {
|
||||
let filename = diagnostic.expect_ruff_filename();
|
||||
let location = if context.is_notebook(&filename) {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
LineColumn::default()
|
||||
} else {
|
||||
diagnostic.expect_ruff_start_location()
|
||||
};
|
||||
|
||||
writeln!(
|
||||
writer,
|
||||
"##vso[task.logissue type=error\
|
||||
;sourcepath={filename};linenumber={line};columnnumber={col};{code}]{body}",
|
||||
line = location.line,
|
||||
col = location.column,
|
||||
code = diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(String::new, |code| format!("code={code};")),
|
||||
body = diagnostic.body(),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use crate::message::AzureEmitter;
|
||||
use crate::message::tests::{
|
||||
capture_emitter_output, create_diagnostics, create_syntax_error_diagnostics,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = AzureEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let mut emitter = AzureEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
}
|
||||
@@ -21,7 +21,7 @@ use crate::{Applicability, Fix};
|
||||
/// * Compute the diff from the [`Edit`] because diff calculation is expensive.
|
||||
pub(super) struct Diff<'a> {
|
||||
fix: &'a Fix,
|
||||
source_code: SourceFile,
|
||||
source_code: &'a SourceFile,
|
||||
}
|
||||
|
||||
impl<'a> Diff<'a> {
|
||||
|
||||
@@ -1,220 +0,0 @@
|
||||
use std::io::Write;
|
||||
|
||||
use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed, SourceCode};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Edit;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct JsonEmitter;
|
||||
|
||||
impl Emitter for JsonEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
serde_json::to_writer_pretty(
|
||||
writer,
|
||||
&ExpandedMessages {
|
||||
diagnostics,
|
||||
context,
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpandedMessages<'a> {
|
||||
diagnostics: &'a [Diagnostic],
|
||||
context: &'a EmitterContext<'a>,
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedMessages<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
|
||||
|
||||
for message in self.diagnostics {
|
||||
let value = message_to_json_value(message, self.context);
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn message_to_json_value(message: &Diagnostic, context: &EmitterContext) -> Value {
|
||||
let source_file = message.expect_ruff_source_file();
|
||||
let source_code = source_file.to_source_code();
|
||||
let filename = message.expect_ruff_filename();
|
||||
let notebook_index = context.notebook_index(&filename);
|
||||
|
||||
let fix = message.fix().map(|fix| {
|
||||
json!({
|
||||
"applicability": fix.applicability(),
|
||||
"message": message.suggestion(),
|
||||
"edits": &ExpandedEdits { edits: fix.edits(), source_code: &source_code, notebook_index },
|
||||
})
|
||||
});
|
||||
|
||||
let mut start_location = source_code.line_column(message.expect_range().start());
|
||||
let mut end_location = source_code.line_column(message.expect_range().end());
|
||||
let mut noqa_location = message
|
||||
.noqa_offset()
|
||||
.map(|offset| source_code.line_column(offset));
|
||||
let mut notebook_cell_index = None;
|
||||
|
||||
if let Some(notebook_index) = notebook_index {
|
||||
notebook_cell_index = Some(
|
||||
notebook_index
|
||||
.cell(start_location.line)
|
||||
.unwrap_or(OneIndexed::MIN),
|
||||
);
|
||||
start_location = notebook_index.translate_line_column(&start_location);
|
||||
end_location = notebook_index.translate_line_column(&end_location);
|
||||
noqa_location =
|
||||
noqa_location.map(|location| notebook_index.translate_line_column(&location));
|
||||
}
|
||||
|
||||
json!({
|
||||
"code": message.secondary_code(),
|
||||
"url": message.to_url(),
|
||||
"message": message.body(),
|
||||
"fix": fix,
|
||||
"cell": notebook_cell_index,
|
||||
"location": location_to_json(start_location),
|
||||
"end_location": location_to_json(end_location),
|
||||
"filename": filename,
|
||||
"noqa_row": noqa_location.map(|location| location.line)
|
||||
})
|
||||
}
|
||||
|
||||
fn location_to_json(location: LineColumn) -> serde_json::Value {
|
||||
json!({
|
||||
"row": location.line,
|
||||
"column": location.column
|
||||
})
|
||||
}
|
||||
|
||||
struct ExpandedEdits<'a> {
|
||||
edits: &'a [Edit],
|
||||
source_code: &'a SourceCode<'a, 'a>,
|
||||
notebook_index: Option<&'a NotebookIndex>,
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedEdits<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.edits.len()))?;
|
||||
|
||||
for edit in self.edits {
|
||||
let mut location = self.source_code.line_column(edit.start());
|
||||
let mut end_location = self.source_code.line_column(edit.end());
|
||||
|
||||
if let Some(notebook_index) = self.notebook_index {
|
||||
// There exists a newline between each cell's source code in the
|
||||
// concatenated source code in Ruff. This newline doesn't actually
|
||||
// exists in the JSON source field.
|
||||
//
|
||||
// Now, certain edits may try to remove this newline, which means
|
||||
// the edit will spill over to the first character of the next cell.
|
||||
// If it does, we need to translate the end location to the last
|
||||
// character of the previous cell.
|
||||
match (
|
||||
notebook_index.cell(location.line),
|
||||
notebook_index.cell(end_location.line),
|
||||
) {
|
||||
(Some(start_cell), Some(end_cell)) if start_cell != end_cell => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: self
|
||||
.source_code
|
||||
.line_column(self.source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
(Some(_), None) => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: self
|
||||
.source_code
|
||||
.line_column(self.source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
end_location = notebook_index.translate_line_column(&end_location);
|
||||
}
|
||||
}
|
||||
location = notebook_index.translate_line_column(&location);
|
||||
}
|
||||
|
||||
let value = json!({
|
||||
"content": edit.content().unwrap_or_default(),
|
||||
"location": location_to_json(location),
|
||||
"end_location": location_to_json(end_location)
|
||||
});
|
||||
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use crate::message::JsonEmitter;
|
||||
use crate::message::tests::{
|
||||
capture_emitter_notebook_output, capture_emitter_output, create_diagnostics,
|
||||
create_notebook_diagnostics, create_syntax_error_diagnostics,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = JsonEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let mut emitter = JsonEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let mut emitter = JsonEmitter;
|
||||
let (diagnostics, notebook_indexes) = create_notebook_diagnostics();
|
||||
let content =
|
||||
capture_emitter_notebook_output(&mut emitter, &diagnostics, ¬ebook_indexes);
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
use std::io::Write;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
|
||||
use crate::message::json::message_to_json_value;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct JsonLinesEmitter;
|
||||
|
||||
impl Emitter for JsonLinesEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for diagnostic in diagnostics {
|
||||
serde_json::to_writer(&mut *writer, &message_to_json_value(diagnostic, context))?;
|
||||
writer.write_all(b"\n")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use crate::message::json_lines::JsonLinesEmitter;
|
||||
use crate::message::tests::{
|
||||
capture_emitter_notebook_output, capture_emitter_output, create_diagnostics,
|
||||
create_notebook_diagnostics, create_syntax_error_diagnostics,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = JsonLinesEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let mut emitter = JsonLinesEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let mut emitter = JsonLinesEmitter;
|
||||
let (messages, notebook_indexes) = create_notebook_diagnostics();
|
||||
let content = capture_emitter_notebook_output(&mut emitter, &messages, ¬ebook_indexes);
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
}
|
||||
@@ -3,17 +3,17 @@ use std::fmt::Display;
|
||||
use std::io::Write;
|
||||
use std::ops::Deref;
|
||||
|
||||
use ruff_db::diagnostic::{
|
||||
Annotation, Diagnostic, DiagnosticId, LintName, SecondaryCode, Severity, Span,
|
||||
};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
pub use azure::AzureEmitter;
|
||||
use ruff_db::diagnostic::{
|
||||
Annotation, Diagnostic, DiagnosticId, FileResolver, Input, LintName, SecondaryCode, Severity,
|
||||
Span, UnifiedFile,
|
||||
};
|
||||
use ruff_db::files::File;
|
||||
|
||||
pub use github::GithubEmitter;
|
||||
pub use gitlab::GitlabEmitter;
|
||||
pub use grouped::GroupedEmitter;
|
||||
pub use json::JsonEmitter;
|
||||
pub use json_lines::JsonLinesEmitter;
|
||||
pub use junit::JunitEmitter;
|
||||
pub use pylint::PylintEmitter;
|
||||
pub use rdjson::RdjsonEmitter;
|
||||
@@ -26,13 +26,10 @@ pub use text::TextEmitter;
|
||||
use crate::Fix;
|
||||
use crate::registry::Rule;
|
||||
|
||||
mod azure;
|
||||
mod diff;
|
||||
mod github;
|
||||
mod gitlab;
|
||||
mod grouped;
|
||||
mod json;
|
||||
mod json_lines;
|
||||
mod junit;
|
||||
mod pylint;
|
||||
mod rdjson;
|
||||
@@ -107,6 +104,34 @@ where
|
||||
diagnostic
|
||||
}
|
||||
|
||||
impl FileResolver for EmitterContext<'_> {
|
||||
fn path(&self, _file: File) -> &str {
|
||||
unimplemented!("Expected a Ruff file for rendering a Ruff diagnostic");
|
||||
}
|
||||
|
||||
fn input(&self, _file: File) -> Input {
|
||||
unimplemented!("Expected a Ruff file for rendering a Ruff diagnostic");
|
||||
}
|
||||
|
||||
fn notebook_index(&self, file: &UnifiedFile) -> Option<NotebookIndex> {
|
||||
match file {
|
||||
UnifiedFile::Ty(_) => {
|
||||
unimplemented!("Expected a Ruff file for rendering a Ruff diagnostic")
|
||||
}
|
||||
UnifiedFile::Ruff(file) => self.notebook_indexes.get(file.name()).cloned(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_notebook(&self, file: &UnifiedFile) -> bool {
|
||||
match file {
|
||||
UnifiedFile::Ty(_) => {
|
||||
unimplemented!("Expected a Ruff file for rendering a Ruff diagnostic")
|
||||
}
|
||||
UnifiedFile::Ruff(file) => self.notebook_indexes.get(file.name()).is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct MessageWithLocation<'a> {
|
||||
message: &'a Diagnostic,
|
||||
start_location: LineColumn,
|
||||
|
||||
@@ -73,7 +73,7 @@ fn message_to_rdjson_value(message: &Diagnostic) -> Value {
|
||||
},
|
||||
"code": {
|
||||
"value": message.secondary_code(),
|
||||
"url": message.to_url(),
|
||||
"url": message.to_ruff_url(),
|
||||
},
|
||||
"suggestions": rdjson_suggestions(fix.edits(), &source_code),
|
||||
})
|
||||
@@ -86,7 +86,7 @@ fn message_to_rdjson_value(message: &Diagnostic) -> Value {
|
||||
},
|
||||
"code": {
|
||||
"value": message.secondary_code(),
|
||||
"url": message.to_url(),
|
||||
"url": message.to_ruff_url(),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -87,9 +87,14 @@ fn detect_blind_exception(
|
||||
}
|
||||
}
|
||||
|
||||
let first_arg = arguments.args.first()?;
|
||||
let exception_argument_name = if is_pytest_raises {
|
||||
"expected_exception"
|
||||
} else {
|
||||
"exception"
|
||||
};
|
||||
|
||||
let builtin_symbol = semantic.resolve_builtin_symbol(first_arg)?;
|
||||
let exception_expr = arguments.find_argument_value(exception_argument_name, 0)?;
|
||||
let builtin_symbol = semantic.resolve_builtin_symbol(exception_expr)?;
|
||||
|
||||
match builtin_symbol {
|
||||
"Exception" => Some(ExceptionKind::Exception),
|
||||
|
||||
@@ -43,3 +43,29 @@ B017_0.py:57:36: B017 Do not assert blind exception: `Exception`
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ B017
|
||||
58 | raise ValueError("Multiple context managers")
|
||||
|
|
||||
|
||||
B017_0.py:62:10: B017 Do not assert blind exception: `Exception`
|
||||
|
|
||||
61 | def test_pytest_raises_keyword():
|
||||
62 | with pytest.raises(expected_exception=Exception):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
|
||||
63 | raise ValueError("Should be flagged")
|
||||
|
|
||||
|
||||
B017_0.py:68:18: B017 Do not assert blind exception: `Exception`
|
||||
|
|
||||
66 | class TestKwargs(unittest.TestCase):
|
||||
67 | def test_method(self):
|
||||
68 | with self.assertRaises(exception=Exception):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
|
||||
69 | raise ValueError("Should be flagged")
|
||||
|
|
||||
|
||||
B017_0.py:71:18: B017 Do not assert blind exception: `BaseException`
|
||||
|
|
||||
69 | raise ValueError("Should be flagged")
|
||||
70 |
|
||||
71 | with self.assertRaises(exception=BaseException):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
|
||||
72 | raise ValueError("Should be flagged")
|
||||
|
|
||||
|
||||
@@ -43,3 +43,29 @@ B017_0.py:57:36: B017 Do not assert blind exception: `Exception`
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ B017
|
||||
58 | raise ValueError("Multiple context managers")
|
||||
|
|
||||
|
||||
B017_0.py:62:10: B017 Do not assert blind exception: `Exception`
|
||||
|
|
||||
61 | def test_pytest_raises_keyword():
|
||||
62 | with pytest.raises(expected_exception=Exception):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
|
||||
63 | raise ValueError("Should be flagged")
|
||||
|
|
||||
|
||||
B017_0.py:68:18: B017 Do not assert blind exception: `Exception`
|
||||
|
|
||||
66 | class TestKwargs(unittest.TestCase):
|
||||
67 | def test_method(self):
|
||||
68 | with self.assertRaises(exception=Exception):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
|
||||
69 | raise ValueError("Should be flagged")
|
||||
|
|
||||
|
||||
B017_0.py:71:18: B017 Do not assert blind exception: `BaseException`
|
||||
|
|
||||
69 | raise ValueError("Should be flagged")
|
||||
70 |
|
||||
71 | with self.assertRaises(exception=BaseException):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
|
||||
72 | raise ValueError("Should be flagged")
|
||||
|
|
||||
|
||||
@@ -96,22 +96,43 @@ fn is_model_abstract(class_def: &ast::StmtClassDef) -> bool {
|
||||
continue;
|
||||
}
|
||||
for element in body {
|
||||
let Stmt::Assign(ast::StmtAssign { targets, value, .. }) = element else {
|
||||
continue;
|
||||
};
|
||||
for target in targets {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||
continue;
|
||||
};
|
||||
if id != "abstract" {
|
||||
continue;
|
||||
match element {
|
||||
Stmt::Assign(ast::StmtAssign { targets, value, .. }) => {
|
||||
if targets
|
||||
.iter()
|
||||
.any(|target| is_abstract_true_assignment(target, Some(value)))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if !is_const_true(value) {
|
||||
continue;
|
||||
Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => {
|
||||
if is_abstract_true_assignment(target, value.as_deref()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn is_abstract_true_assignment(target: &Expr, value: Option<&Expr>) -> bool {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||
return false;
|
||||
};
|
||||
|
||||
if id != "abstract" {
|
||||
return false;
|
||||
}
|
||||
|
||||
let Some(value) = value else {
|
||||
return false;
|
||||
};
|
||||
|
||||
if !is_const_true(value) {
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_django/mod.rs
|
||||
snapshot_kind: text
|
||||
---
|
||||
DJ008.py:6:7: DJ008 Model does not define `__str__` method
|
||||
|
|
||||
@@ -31,3 +30,11 @@ DJ008.py:182:7: DJ008 Model does not define `__str__` method
|
||||
| ^^^^^^^^^^^^^^^^^^ DJ008
|
||||
183 | pass
|
||||
|
|
||||
|
||||
DJ008.py:215:7: DJ008 Model does not define `__str__` method
|
||||
|
|
||||
215 | class TypeAnnotatedNonAbstractModel(models.Model):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ DJ008
|
||||
216 | """Model with type-annotated abstract = False - should trigger DJ008"""
|
||||
217 | new_field = models.CharField(max_length=10)
|
||||
|
|
||||
|
||||
@@ -539,7 +539,21 @@ fn implicit_return(checker: &Checker, function_def: &ast::StmtFunctionDef, stmt:
|
||||
}
|
||||
|
||||
/// RET504
|
||||
fn unnecessary_assign(checker: &Checker, stack: &Stack) {
|
||||
pub(crate) fn unnecessary_assign(checker: &Checker, function_stmt: &Stmt) {
|
||||
let Stmt::FunctionDef(function_def) = function_stmt else {
|
||||
return;
|
||||
};
|
||||
let Some(stack) = create_stack(checker, function_def) else {
|
||||
return;
|
||||
};
|
||||
|
||||
if !result_exists(&stack.returns) {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(function_scope) = checker.semantic().function_scope(function_def) else {
|
||||
return;
|
||||
};
|
||||
for (assign, return_, stmt) in &stack.assignment_return {
|
||||
// Identify, e.g., `return x`.
|
||||
let Some(value) = return_.value.as_ref() else {
|
||||
@@ -583,6 +597,22 @@ fn unnecessary_assign(checker: &Checker, stack: &Stack) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Some(assigned_binding) = function_scope
|
||||
.get(assigned_id)
|
||||
.map(|binding_id| checker.semantic().binding(binding_id))
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
// Check if there's any reference made to `assigned_binding` in another scope, e.g, nested
|
||||
// functions. If there is, ignore them.
|
||||
if assigned_binding
|
||||
.references()
|
||||
.map(|reference_id| checker.semantic().reference(reference_id))
|
||||
.any(|reference| reference.scope_id() != assigned_binding.scope)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut diagnostic = checker.report_diagnostic(
|
||||
UnnecessaryAssign {
|
||||
name: assigned_id.to_string(),
|
||||
@@ -665,24 +695,21 @@ fn superfluous_elif_else(checker: &Checker, stack: &Stack) {
|
||||
}
|
||||
}
|
||||
|
||||
/// Run all checks from the `flake8-return` plugin.
|
||||
pub(crate) fn function(checker: &Checker, function_def: &ast::StmtFunctionDef) {
|
||||
let ast::StmtFunctionDef {
|
||||
decorator_list,
|
||||
returns,
|
||||
body,
|
||||
..
|
||||
} = function_def;
|
||||
fn create_stack<'a>(
|
||||
checker: &'a Checker,
|
||||
function_def: &'a ast::StmtFunctionDef,
|
||||
) -> Option<Stack<'a>> {
|
||||
let ast::StmtFunctionDef { body, .. } = function_def;
|
||||
|
||||
// Find the last statement in the function.
|
||||
let Some(last_stmt) = body.last() else {
|
||||
// Skip empty functions.
|
||||
return;
|
||||
return None;
|
||||
};
|
||||
|
||||
// Skip functions that consist of a single return statement.
|
||||
if body.len() == 1 && matches!(last_stmt, Stmt::Return(_)) {
|
||||
return;
|
||||
return None;
|
||||
}
|
||||
|
||||
// Traverse the function body, to collect the stack.
|
||||
@@ -696,9 +723,29 @@ pub(crate) fn function(checker: &Checker, function_def: &ast::StmtFunctionDef) {
|
||||
|
||||
// Avoid false positives for generators.
|
||||
if stack.is_generator {
|
||||
return;
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(stack)
|
||||
}
|
||||
|
||||
/// Run all checks from the `flake8-return` plugin, but `RET504` which is ran
|
||||
/// after the semantic model is fully built.
|
||||
pub(crate) fn function(checker: &Checker, function_def: &ast::StmtFunctionDef) {
|
||||
let ast::StmtFunctionDef {
|
||||
decorator_list,
|
||||
returns,
|
||||
body,
|
||||
..
|
||||
} = function_def;
|
||||
|
||||
let Some(stack) = create_stack(checker, function_def) else {
|
||||
return;
|
||||
};
|
||||
let Some(last_stmt) = body.last() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if checker.any_rule_enabled(&[
|
||||
Rule::SuperfluousElseReturn,
|
||||
Rule::SuperfluousElseRaise,
|
||||
@@ -721,10 +768,6 @@ pub(crate) fn function(checker: &Checker, function_def: &ast::StmtFunctionDef) {
|
||||
if checker.is_rule_enabled(Rule::ImplicitReturn) {
|
||||
implicit_return(checker, function_def, last_stmt);
|
||||
}
|
||||
|
||||
if checker.is_rule_enabled(Rule::UnnecessaryAssign) {
|
||||
unnecessary_assign(checker, &stack);
|
||||
}
|
||||
} else {
|
||||
if checker.is_rule_enabled(Rule::UnnecessaryReturnNone) {
|
||||
// Skip functions that have a return annotation that is not `None`.
|
||||
|
||||
@@ -247,8 +247,6 @@ RET504.py:423:16: RET504 [*] Unnecessary assignment to `services` before `return
|
||||
422 | services = a["services"]
|
||||
423 | return services
|
||||
| ^^^^^^^^ RET504
|
||||
424 |
|
||||
425 | # See: https://github.com/astral-sh/ruff/issues/18411
|
||||
|
|
||||
= help: Remove unnecessary assignment
|
||||
|
||||
@@ -260,46 +258,46 @@ RET504.py:423:16: RET504 [*] Unnecessary assignment to `services` before `return
|
||||
423 |- return services
|
||||
422 |+ return a["services"]
|
||||
424 423 |
|
||||
425 424 | # See: https://github.com/astral-sh/ruff/issues/18411
|
||||
426 425 | def f():
|
||||
425 424 |
|
||||
426 425 | # See: https://github.com/astral-sh/ruff/issues/14052
|
||||
|
||||
RET504.py:429:12: RET504 [*] Unnecessary assignment to `x` before `return` statement
|
||||
RET504.py:458:12: RET504 [*] Unnecessary assignment to `x` before `return` statement
|
||||
|
|
||||
427 | (#=
|
||||
428 | x) = 1
|
||||
429 | return x
|
||||
456 | (#=
|
||||
457 | x) = 1
|
||||
458 | return x
|
||||
| ^ RET504
|
||||
430 |
|
||||
431 | def f():
|
||||
459 |
|
||||
460 | def f():
|
||||
|
|
||||
= help: Remove unnecessary assignment
|
||||
|
||||
ℹ Unsafe fix
|
||||
424 424 |
|
||||
425 425 | # See: https://github.com/astral-sh/ruff/issues/18411
|
||||
426 426 | def f():
|
||||
427 |- (#=
|
||||
428 |- x) = 1
|
||||
429 |- return x
|
||||
427 |+ return 1
|
||||
430 428 |
|
||||
431 429 | def f():
|
||||
432 430 | x = (1
|
||||
453 453 |
|
||||
454 454 | # See: https://github.com/astral-sh/ruff/issues/18411
|
||||
455 455 | def f():
|
||||
456 |- (#=
|
||||
457 |- x) = 1
|
||||
458 |- return x
|
||||
456 |+ return 1
|
||||
459 457 |
|
||||
460 458 | def f():
|
||||
461 459 | x = (1
|
||||
|
||||
RET504.py:434:12: RET504 [*] Unnecessary assignment to `x` before `return` statement
|
||||
RET504.py:463:12: RET504 [*] Unnecessary assignment to `x` before `return` statement
|
||||
|
|
||||
432 | x = (1
|
||||
433 | )
|
||||
434 | return x
|
||||
461 | x = (1
|
||||
462 | )
|
||||
463 | return x
|
||||
| ^ RET504
|
||||
|
|
||||
= help: Remove unnecessary assignment
|
||||
|
||||
ℹ Unsafe fix
|
||||
429 429 | return x
|
||||
430 430 |
|
||||
431 431 | def f():
|
||||
432 |- x = (1
|
||||
432 |+ return (1
|
||||
433 433 | )
|
||||
434 |- return x
|
||||
458 458 | return x
|
||||
459 459 |
|
||||
460 460 | def f():
|
||||
461 |- x = (1
|
||||
461 |+ return (1
|
||||
462 462 | )
|
||||
463 |- return x
|
||||
|
||||
@@ -36,6 +36,7 @@ mod tests {
|
||||
#[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("TC004_8.py"))]
|
||||
#[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("TC004_9.py"))]
|
||||
#[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("quote.py"))]
|
||||
#[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("whitespace.py"))]
|
||||
#[test_case(Rule::RuntimeStringUnion, Path::new("TC010_1.py"))]
|
||||
#[test_case(Rule::RuntimeStringUnion, Path::new("TC010_2.py"))]
|
||||
#[test_case(Rule::TypingOnlyFirstPartyImport, Path::new("TC001.py"))]
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
whitespace.py:5:26: TC004 [*] Move import `builtins` out of type-checking block. Import is used for more than type hinting.
|
||||
|
|
||||
3 | from typing import TYPE_CHECKING\
|
||||
4 |
|
||||
5 | if TYPE_CHECKING: import builtins
|
||||
| ^^^^^^^^ TC004
|
||||
6 | builtins.print("!")
|
||||
|
|
||||
= help: Move out of type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | # Regression test for: https://github.com/astral-sh/ruff/issues/19175
|
||||
2 2 | # there is a (potentially invisible) unicode formfeed character (000C) between `TYPE_CHECKING` and the backslash
|
||||
3 |-from typing import TYPE_CHECKING\
|
||||
3 |+from typing import TYPE_CHECKING; import builtins\
|
||||
4 4 |
|
||||
5 |-if TYPE_CHECKING: import builtins
|
||||
5 |+if TYPE_CHECKING: pass
|
||||
6 6 | builtins.print("!")
|
||||
@@ -912,6 +912,7 @@ mod tests {
|
||||
#[test_case(Path::new("docstring.pyi"))]
|
||||
#[test_case(Path::new("docstring_only.py"))]
|
||||
#[test_case(Path::new("empty.py"))]
|
||||
#[test_case(Path::new("multiple_strings.py"))]
|
||||
fn required_imports(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("required_imports_{}", path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/isort/mod.rs
|
||||
---
|
||||
multiple_strings.py:1:1: I002 [*] Missing required import: `from __future__ import annotations`
|
||||
ℹ Safe fix
|
||||
1 1 | """This is a docstring."""
|
||||
2 |+from __future__ import annotations
|
||||
2 3 | "This is not a docstring."
|
||||
3 4 | "This is also not a docstring."
|
||||
4 5 |
|
||||
|
||||
multiple_strings.py:1:1: I002 [*] Missing required import: `from __future__ import generator_stop`
|
||||
ℹ Safe fix
|
||||
1 1 | """This is a docstring."""
|
||||
2 |+from __future__ import generator_stop
|
||||
2 3 | "This is not a docstring."
|
||||
3 4 | "This is also not a docstring."
|
||||
4 5 |
|
||||
@@ -18,11 +18,15 @@ use crate::checkers::ast::Checker;
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.getenv(1)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.getenv("1")
|
||||
/// ```
|
||||
#[derive(ViolationMetadata)]
|
||||
|
||||
@@ -14,12 +14,12 @@ use crate::{AlwaysFixableViolation, Edit, Fix};
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from xml.etree import cElementTree
|
||||
/// from xml.etree import cElementTree as ET
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from xml.etree import ElementTree
|
||||
/// from xml.etree import ElementTree as ET
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
|
||||
@@ -43,7 +43,7 @@ use super::{
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
/// from typing import TypeVar
|
||||
/// from typing import Generic, TypeVar
|
||||
///
|
||||
/// T = TypeVar("T")
|
||||
///
|
||||
|
||||
@@ -27,6 +27,8 @@ use crate::{AlwaysFixableViolation, Edit, Fix};
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import asyncio
|
||||
///
|
||||
/// raise asyncio.TimeoutError
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -952,6 +952,9 @@ impl Display for SemanticSyntaxError {
|
||||
SemanticSyntaxErrorKind::LoadBeforeGlobalDeclaration { name, start: _ } => {
|
||||
write!(f, "name `{name}` is used prior to global declaration")
|
||||
}
|
||||
SemanticSyntaxErrorKind::LoadBeforeNonlocalDeclaration { name, start: _ } => {
|
||||
write!(f, "name `{name}` is used prior to nonlocal declaration")
|
||||
}
|
||||
SemanticSyntaxErrorKind::InvalidStarExpression => {
|
||||
f.write_str("Starred expression cannot be used here")
|
||||
}
|
||||
@@ -977,6 +980,15 @@ impl Display for SemanticSyntaxError {
|
||||
SemanticSyntaxErrorKind::NonlocalDeclarationAtModuleLevel => {
|
||||
write!(f, "nonlocal declaration not allowed at module level")
|
||||
}
|
||||
SemanticSyntaxErrorKind::NonlocalAndGlobal(name) => {
|
||||
write!(f, "name `{name}` is nonlocal and global")
|
||||
}
|
||||
SemanticSyntaxErrorKind::AnnotatedGlobal(name) => {
|
||||
write!(f, "annotated name `{name}` can't be global")
|
||||
}
|
||||
SemanticSyntaxErrorKind::AnnotatedNonlocal(name) => {
|
||||
write!(f, "annotated name `{name}` can't be nonlocal")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1207,6 +1219,24 @@ pub enum SemanticSyntaxErrorKind {
|
||||
/// [#111123]: https://github.com/python/cpython/issues/111123
|
||||
LoadBeforeGlobalDeclaration { name: String, start: TextSize },
|
||||
|
||||
/// Represents the use of a `nonlocal` variable before its `nonlocal` declaration.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```python
|
||||
/// def f():
|
||||
/// counter = 0
|
||||
/// def increment():
|
||||
/// print(f"Adding 1 to {counter}")
|
||||
/// nonlocal counter # SyntaxError: name 'counter' is used prior to nonlocal declaration
|
||||
/// counter += 1
|
||||
/// ```
|
||||
///
|
||||
/// ## Known Issues
|
||||
///
|
||||
/// See [`LoadBeforeGlobalDeclaration`][Self::LoadBeforeGlobalDeclaration].
|
||||
LoadBeforeNonlocalDeclaration { name: String, start: TextSize },
|
||||
|
||||
/// Represents the use of a starred expression in an invalid location, such as a `return` or
|
||||
/// `yield` statement.
|
||||
///
|
||||
@@ -1307,6 +1337,15 @@ pub enum SemanticSyntaxErrorKind {
|
||||
|
||||
/// Represents a nonlocal declaration at module level
|
||||
NonlocalDeclarationAtModuleLevel,
|
||||
|
||||
/// Represents the same variable declared as both nonlocal and global
|
||||
NonlocalAndGlobal(String),
|
||||
|
||||
/// Represents a type annotation on a variable that's been declared global
|
||||
AnnotatedGlobal(String),
|
||||
|
||||
/// Represents a type annotation on a variable that's been declared nonlocal
|
||||
AnnotatedNonlocal(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
|
||||
@@ -2094,6 +2094,20 @@ impl<'a> SemanticModel<'a> {
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
/// Finds and returns the [`Scope`] corresponding to a given [`ast::StmtFunctionDef`].
|
||||
///
|
||||
/// This method searches all scopes created by a function definition, comparing the
|
||||
/// [`TextRange`] of the provided `function_def` with the the range of the function
|
||||
/// associated with the scope.
|
||||
pub fn function_scope(&self, function_def: &ast::StmtFunctionDef) -> Option<&Scope> {
|
||||
self.scopes.iter().find(|scope| {
|
||||
let Some(function) = scope.kind.as_function() else {
|
||||
return false;
|
||||
};
|
||||
function.range() == function_def.range()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ShadowedBinding {
|
||||
|
||||
@@ -301,7 +301,7 @@ fn to_lsp_diagnostic(
|
||||
severity,
|
||||
tags,
|
||||
code,
|
||||
code_description: diagnostic.to_url().and_then(|url| {
|
||||
code_description: diagnostic.to_ruff_url().and_then(|url| {
|
||||
Some(lsp_types::CodeDescription {
|
||||
href: lsp_types::Url::parse(&url).ok()?,
|
||||
})
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_wasm"
|
||||
version = "0.12.2"
|
||||
version = "0.12.3"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
3
crates/ty/docs/cli.md
generated
3
crates/ty/docs/cli.md
generated
@@ -84,7 +84,8 @@ over all configuration files.</p>
|
||||
<li><code>3.11</code></li>
|
||||
<li><code>3.12</code></li>
|
||||
<li><code>3.13</code></li>
|
||||
</ul></dd><dt id="ty-check--respect-ignore-files"><a href="#ty-check--respect-ignore-files"><code>--respect-ignore-files</code></a></dt><dd><p>Respect file exclusions via <code>.gitignore</code> and other standard ignore files. Use <code>--no-respect-gitignore</code> to disable</p>
|
||||
</ul></dd><dt id="ty-check--quiet"><a href="#ty-check--quiet"><code>--quiet</code></a></dt><dd><p>Use quiet output</p>
|
||||
</dd><dt id="ty-check--respect-ignore-files"><a href="#ty-check--respect-ignore-files"><code>--respect-ignore-files</code></a></dt><dd><p>Respect file exclusions via <code>.gitignore</code> and other standard ignore files. Use <code>--no-respect-gitignore</code> to disable</p>
|
||||
</dd><dt id="ty-check--typeshed"><a href="#ty-check--typeshed"><code>--typeshed</code></a>, <code>--custom-typeshed-dir</code> <i>path</i></dt><dd><p>Custom directory to use for stdlib typeshed stubs</p>
|
||||
</dd><dt id="ty-check--verbose"><a href="#ty-check--verbose"><code>--verbose</code></a>, <code>-v</code></dt><dd><p>Use verbose output (or <code>-vv</code> and <code>-vvv</code> for more verbose output)</p>
|
||||
</dd><dt id="ty-check--warn"><a href="#ty-check--warn"><code>--warn</code></a> <i>rule</i></dt><dd><p>Treat the given rule as having severity 'warn'. Can be specified multiple times.</p>
|
||||
|
||||
@@ -323,8 +323,8 @@ pub enum OutputFormat {
|
||||
Concise,
|
||||
}
|
||||
|
||||
impl From<OutputFormat> for ruff_db::diagnostic::DiagnosticFormat {
|
||||
fn from(format: OutputFormat) -> ruff_db::diagnostic::DiagnosticFormat {
|
||||
impl From<OutputFormat> for ty_project::metadata::options::OutputFormat {
|
||||
fn from(format: OutputFormat) -> ty_project::metadata::options::OutputFormat {
|
||||
match format {
|
||||
OutputFormat::Full => Self::Full,
|
||||
OutputFormat::Concise => Self::Concise,
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
mod args;
|
||||
mod logging;
|
||||
mod printer;
|
||||
mod python_version;
|
||||
mod version;
|
||||
|
||||
pub use args::Cli;
|
||||
use ty_static::EnvVars;
|
||||
|
||||
use std::io::{self, BufWriter, Write, stdout};
|
||||
use std::fmt::Write;
|
||||
use std::process::{ExitCode, Termination};
|
||||
|
||||
use anyhow::Result;
|
||||
@@ -14,6 +15,7 @@ use std::sync::Mutex;
|
||||
|
||||
use crate::args::{CheckCommand, Command, TerminalColor};
|
||||
use crate::logging::setup_tracing;
|
||||
use crate::printer::Printer;
|
||||
use anyhow::{Context, anyhow};
|
||||
use clap::{CommandFactory, Parser};
|
||||
use colored::Colorize;
|
||||
@@ -25,7 +27,7 @@ use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
use ty_project::metadata::options::ProjectOptionsOverrides;
|
||||
use ty_project::watch::ProjectWatcher;
|
||||
use ty_project::{Db, DummyReporter, Reporter, watch};
|
||||
use ty_project::{Db, watch};
|
||||
use ty_project::{ProjectDatabase, ProjectMetadata};
|
||||
use ty_server::run_server;
|
||||
|
||||
@@ -42,6 +44,8 @@ pub fn run() -> anyhow::Result<ExitStatus> {
|
||||
Command::Check(check_args) => run_check(check_args),
|
||||
Command::Version => version().map(|()| ExitStatus::Success),
|
||||
Command::GenerateShellCompletion { shell } => {
|
||||
use std::io::stdout;
|
||||
|
||||
shell.generate(&mut Cli::command(), &mut stdout());
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
@@ -49,7 +53,7 @@ pub fn run() -> anyhow::Result<ExitStatus> {
|
||||
}
|
||||
|
||||
pub(crate) fn version() -> Result<()> {
|
||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
||||
let mut stdout = Printer::default().stream_for_requested_summary().lock();
|
||||
let version_info = crate::version::version();
|
||||
writeln!(stdout, "ty {}", &version_info)?;
|
||||
Ok(())
|
||||
@@ -61,6 +65,8 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
let verbosity = args.verbosity.level();
|
||||
let _guard = setup_tracing(verbosity, args.color.unwrap_or_default())?;
|
||||
|
||||
let printer = Printer::default().with_verbosity(verbosity);
|
||||
|
||||
tracing::warn!(
|
||||
"ty is pre-release software and not ready for production use. \
|
||||
Expect to encounter bugs, missing features, and fatal errors.",
|
||||
@@ -125,7 +131,8 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
}
|
||||
|
||||
let project_options_overrides = ProjectOptionsOverrides::new(config_file, options);
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(project_options_overrides);
|
||||
let (main_loop, main_loop_cancellation_token) =
|
||||
MainLoop::new(project_options_overrides, printer);
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -143,7 +150,7 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
main_loop.run(&mut db)?
|
||||
};
|
||||
|
||||
let mut stdout = stdout().lock();
|
||||
let mut stdout = printer.stream_for_requested_summary().lock();
|
||||
match std::env::var(EnvVars::TY_MEMORY_REPORT).as_deref() {
|
||||
Ok("short") => write!(stdout, "{}", db.salsa_memory_dump().display_short())?,
|
||||
Ok("mypy_primer") => write!(stdout, "{}", db.salsa_memory_dump().display_mypy_primer())?,
|
||||
@@ -192,12 +199,16 @@ struct MainLoop {
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<ProjectWatcher>,
|
||||
|
||||
/// Interface for displaying information to the user.
|
||||
printer: Printer,
|
||||
|
||||
project_options_overrides: ProjectOptionsOverrides,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new(
|
||||
project_options_overrides: ProjectOptionsOverrides,
|
||||
printer: Printer,
|
||||
) -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
@@ -207,6 +218,7 @@ impl MainLoop {
|
||||
receiver,
|
||||
watcher: None,
|
||||
project_options_overrides,
|
||||
printer,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
@@ -223,32 +235,24 @@ impl MainLoop {
|
||||
|
||||
// Do not show progress bars with `--watch`, indicatif does not seem to
|
||||
// handle cancelling independent progress bars very well.
|
||||
self.run_with_progress::<DummyReporter>(db)?;
|
||||
// TODO(zanieb): We can probably use `MultiProgress` to handle this case in the future.
|
||||
self.printer = self.printer.with_no_progress();
|
||||
self.run(db)?;
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
fn run(self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
|
||||
self.run_with_progress::<IndicatifReporter>(db)
|
||||
}
|
||||
|
||||
fn run_with_progress<R>(mut self, db: &mut ProjectDatabase) -> Result<ExitStatus>
|
||||
where
|
||||
R: Reporter + Default + 'static,
|
||||
{
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
|
||||
let result = self.main_loop::<R>(db);
|
||||
let result = self.main_loop(db);
|
||||
|
||||
tracing::debug!("Exiting main loop");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop<R>(&mut self, db: &mut ProjectDatabase) -> Result<ExitStatus>
|
||||
where
|
||||
R: Reporter + Default + 'static,
|
||||
{
|
||||
fn main_loop(mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
@@ -264,7 +268,7 @@ impl MainLoop {
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
match salsa::Cancelled::catch(|| {
|
||||
let mut reporter = R::default();
|
||||
let mut reporter = IndicatifReporter::from(self.printer);
|
||||
db.check_with_reporter(&mut reporter)
|
||||
}) {
|
||||
Ok(result) => {
|
||||
@@ -286,7 +290,7 @@ impl MainLoop {
|
||||
} => {
|
||||
let terminal_settings = db.project().settings(db).terminal();
|
||||
let display_config = DisplayDiagnosticConfig::default()
|
||||
.format(terminal_settings.output_format)
|
||||
.format(terminal_settings.output_format.into())
|
||||
.color(colored::control::SHOULD_COLORIZE.should_colorize());
|
||||
|
||||
if check_revision == revision {
|
||||
@@ -299,10 +303,12 @@ impl MainLoop {
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
|
||||
let mut stdout = stdout().lock();
|
||||
|
||||
if result.is_empty() {
|
||||
writeln!(stdout, "{}", "All checks passed!".green().bold())?;
|
||||
writeln!(
|
||||
self.printer.stream_for_success_summary(),
|
||||
"{}",
|
||||
"All checks passed!".green().bold()
|
||||
)?;
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return Ok(ExitStatus::Success);
|
||||
@@ -311,14 +317,19 @@ impl MainLoop {
|
||||
let mut max_severity = Severity::Info;
|
||||
let diagnostics_count = result.len();
|
||||
|
||||
let mut stdout = self.printer.stream_for_details().lock();
|
||||
for diagnostic in result {
|
||||
write!(stdout, "{}", diagnostic.display(db, &display_config))?;
|
||||
// Only render diagnostics if they're going to be displayed, since doing
|
||||
// so is expensive.
|
||||
if stdout.is_enabled() {
|
||||
write!(stdout, "{}", diagnostic.display(db, &display_config))?;
|
||||
}
|
||||
|
||||
max_severity = max_severity.max(diagnostic.severity());
|
||||
}
|
||||
|
||||
writeln!(
|
||||
stdout,
|
||||
self.printer.stream_for_failure_summary(),
|
||||
"Found {} diagnostic{}",
|
||||
diagnostics_count,
|
||||
if diagnostics_count > 1 { "s" } else { "" }
|
||||
@@ -378,27 +389,53 @@ impl MainLoop {
|
||||
}
|
||||
|
||||
/// A progress reporter for `ty check`.
|
||||
#[derive(Default)]
|
||||
struct IndicatifReporter(Option<indicatif::ProgressBar>);
|
||||
enum IndicatifReporter {
|
||||
/// A constructed reporter that is not yet ready, contains the target for the progress bar.
|
||||
Pending(indicatif::ProgressDrawTarget),
|
||||
/// A reporter that is ready, containing a progress bar to report to.
|
||||
///
|
||||
/// Initialization of the bar is deferred to [`ty_project::ProgressReporter::set_files`] so we
|
||||
/// do not initialize the bar too early as it may take a while to collect the number of files to
|
||||
/// process and we don't want to display an empty "0/0" bar.
|
||||
Initialized(indicatif::ProgressBar),
|
||||
}
|
||||
|
||||
impl ty_project::Reporter for IndicatifReporter {
|
||||
impl From<Printer> for IndicatifReporter {
|
||||
fn from(printer: Printer) -> Self {
|
||||
Self::Pending(printer.progress_target())
|
||||
}
|
||||
}
|
||||
|
||||
impl ty_project::ProgressReporter for IndicatifReporter {
|
||||
fn set_files(&mut self, files: usize) {
|
||||
let progress = indicatif::ProgressBar::new(files as u64);
|
||||
progress.set_style(
|
||||
let target = match std::mem::replace(
|
||||
self,
|
||||
IndicatifReporter::Pending(indicatif::ProgressDrawTarget::hidden()),
|
||||
) {
|
||||
Self::Pending(target) => target,
|
||||
Self::Initialized(_) => panic!("The progress reporter should only be initialized once"),
|
||||
};
|
||||
|
||||
let bar = indicatif::ProgressBar::with_draw_target(Some(files as u64), target);
|
||||
bar.set_style(
|
||||
indicatif::ProgressStyle::with_template(
|
||||
"{msg:8.dim} {bar:60.green/dim} {pos}/{len} files",
|
||||
)
|
||||
.unwrap()
|
||||
.progress_chars("--"),
|
||||
);
|
||||
progress.set_message("Checking");
|
||||
|
||||
self.0 = Some(progress);
|
||||
bar.set_message("Checking");
|
||||
*self = Self::Initialized(bar);
|
||||
}
|
||||
|
||||
fn report_file(&self, _file: &ruff_db::files::File) {
|
||||
if let Some(ref progress_bar) = self.0 {
|
||||
progress_bar.inc(1);
|
||||
match self {
|
||||
IndicatifReporter::Initialized(progress_bar) => {
|
||||
progress_bar.inc(1);
|
||||
}
|
||||
IndicatifReporter::Pending(_) => {
|
||||
panic!("`report_file` called before `set_files`")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,15 +24,32 @@ pub(crate) struct Verbosity {
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
overrides_with = "quiet",
|
||||
)]
|
||||
verbose: u8,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Use quiet output",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
overrides_with = "verbose",
|
||||
)]
|
||||
quiet: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
/// Returns the verbosity level based on the number of `-v` and `-q` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> VerbosityLevel {
|
||||
// `--quiet` and `--verbose` are mutually exclusive in Clap, so we can just check one first.
|
||||
match self.quiet {
|
||||
0 => {}
|
||||
_ => return VerbosityLevel::Quiet,
|
||||
// TODO(zanieb): Add support for `-qq` with a "silent" mode
|
||||
}
|
||||
|
||||
match self.verbose {
|
||||
0 => VerbosityLevel::Default,
|
||||
1 => VerbosityLevel::Verbose,
|
||||
@@ -42,9 +59,14 @@ impl Verbosity {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Default)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
/// Quiet output. Only shows Ruff and ty events up to the [`ERROR`](tracing::Level::ERROR).
|
||||
/// Silences output except for summary information.
|
||||
Quiet,
|
||||
|
||||
/// Default output level. Only shows Ruff and ty events up to the [`WARN`](tracing::Level::WARN).
|
||||
#[default]
|
||||
Default,
|
||||
|
||||
/// Enables verbose output. Emits Ruff and ty events up to the [`INFO`](tracing::Level::INFO).
|
||||
@@ -62,6 +84,7 @@ pub(crate) enum VerbosityLevel {
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Quiet => LevelFilter::ERROR,
|
||||
VerbosityLevel::Default => LevelFilter::WARN,
|
||||
VerbosityLevel::Verbose => LevelFilter::INFO,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
|
||||
|
||||
172
crates/ty/src/printer.rs
Normal file
172
crates/ty/src/printer.rs
Normal file
@@ -0,0 +1,172 @@
|
||||
use std::io::StdoutLock;
|
||||
|
||||
use indicatif::ProgressDrawTarget;
|
||||
|
||||
use crate::logging::VerbosityLevel;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||
pub(crate) struct Printer {
|
||||
verbosity: VerbosityLevel,
|
||||
no_progress: bool,
|
||||
}
|
||||
|
||||
impl Printer {
|
||||
#[must_use]
|
||||
pub(crate) fn with_no_progress(self) -> Self {
|
||||
Self {
|
||||
verbosity: self.verbosity,
|
||||
no_progress: true,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn with_verbosity(self, verbosity: VerbosityLevel) -> Self {
|
||||
Self {
|
||||
verbosity,
|
||||
no_progress: self.no_progress,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`ProgressDrawTarget`] for this printer.
|
||||
pub(crate) fn progress_target(self) -> ProgressDrawTarget {
|
||||
if self.no_progress {
|
||||
return ProgressDrawTarget::hidden();
|
||||
}
|
||||
|
||||
match self.verbosity {
|
||||
VerbosityLevel::Quiet => ProgressDrawTarget::hidden(),
|
||||
VerbosityLevel::Default => ProgressDrawTarget::stderr(),
|
||||
// Hide the progress bar when in verbose mode.
|
||||
// Otherwise, it gets interleaved with log messages.
|
||||
VerbosityLevel::Verbose => ProgressDrawTarget::hidden(),
|
||||
VerbosityLevel::ExtraVerbose => ProgressDrawTarget::hidden(),
|
||||
VerbosityLevel::Trace => ProgressDrawTarget::hidden(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`Stdout`] stream for important messages.
|
||||
///
|
||||
/// Unlike [`Self::stdout_general`], the returned stream will be enabled when
|
||||
/// [`VerbosityLevel::Quiet`] is used.
|
||||
fn stdout_important(self) -> Stdout {
|
||||
match self.verbosity {
|
||||
VerbosityLevel::Quiet => Stdout::enabled(),
|
||||
VerbosityLevel::Default => Stdout::enabled(),
|
||||
VerbosityLevel::Verbose => Stdout::enabled(),
|
||||
VerbosityLevel::ExtraVerbose => Stdout::enabled(),
|
||||
VerbosityLevel::Trace => Stdout::enabled(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`Stdout`] stream for general messages.
|
||||
///
|
||||
/// The returned stream will be disabled when [`VerbosityLevel::Quiet`] is used.
|
||||
fn stdout_general(self) -> Stdout {
|
||||
match self.verbosity {
|
||||
VerbosityLevel::Quiet => Stdout::disabled(),
|
||||
VerbosityLevel::Default => Stdout::enabled(),
|
||||
VerbosityLevel::Verbose => Stdout::enabled(),
|
||||
VerbosityLevel::ExtraVerbose => Stdout::enabled(),
|
||||
VerbosityLevel::Trace => Stdout::enabled(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`Stdout`] stream for a summary message that was explicitly requested by the
|
||||
/// user.
|
||||
///
|
||||
/// For example, in `ty version` the user has requested the version information and we should
|
||||
/// display it even if [`VerbosityLevel::Quiet`] is used. Or, in `ty check`, if the
|
||||
/// `TY_MEMORY_REPORT` variable has been set, we should display the memory report because the
|
||||
/// user has opted-in to display.
|
||||
pub(crate) fn stream_for_requested_summary(self) -> Stdout {
|
||||
self.stdout_important()
|
||||
}
|
||||
|
||||
/// Return the [`Stdout`] stream for a summary message on failure.
|
||||
///
|
||||
/// For example, in `ty check`, this would be used for the message indicating the number of
|
||||
/// diagnostics found. The failure summary should capture information that is not reflected in
|
||||
/// the exit code.
|
||||
pub(crate) fn stream_for_failure_summary(self) -> Stdout {
|
||||
self.stdout_important()
|
||||
}
|
||||
|
||||
/// Return the [`Stdout`] stream for a summary message on success.
|
||||
///
|
||||
/// For example, in `ty check`, this would be used for the message indicating that no diagnostic
|
||||
/// were found. The success summary does not capture important information for users that have
|
||||
/// opted-in to [`VerbosityLevel::Quiet`].
|
||||
pub(crate) fn stream_for_success_summary(self) -> Stdout {
|
||||
self.stdout_general()
|
||||
}
|
||||
|
||||
/// Return the [`Stdout`] stream for detailed messages.
|
||||
///
|
||||
/// For example, in `ty check`, this would be used for the diagnostic output.
|
||||
pub(crate) fn stream_for_details(self) -> Stdout {
|
||||
self.stdout_general()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum StreamStatus {
|
||||
Enabled,
|
||||
Disabled,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Stdout {
|
||||
status: StreamStatus,
|
||||
lock: Option<StdoutLock<'static>>,
|
||||
}
|
||||
|
||||
impl Stdout {
|
||||
fn enabled() -> Self {
|
||||
Self {
|
||||
status: StreamStatus::Enabled,
|
||||
lock: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn disabled() -> Self {
|
||||
Self {
|
||||
status: StreamStatus::Disabled,
|
||||
lock: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn lock(mut self) -> Self {
|
||||
match self.status {
|
||||
StreamStatus::Enabled => {
|
||||
// Drop the previous lock first, to avoid deadlocking
|
||||
self.lock.take();
|
||||
self.lock = Some(std::io::stdout().lock());
|
||||
}
|
||||
StreamStatus::Disabled => self.lock = None,
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
fn handle(&mut self) -> Box<dyn std::io::Write + '_> {
|
||||
match self.lock.as_mut() {
|
||||
Some(lock) => Box::new(lock),
|
||||
None => Box::new(std::io::stdout()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_enabled(&self) -> bool {
|
||||
matches!(self.status, StreamStatus::Enabled)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Write for Stdout {
|
||||
fn write_str(&mut self, s: &str) -> std::fmt::Result {
|
||||
match self.status {
|
||||
StreamStatus::Enabled => {
|
||||
let _ = write!(self.handle(), "{s}");
|
||||
Ok(())
|
||||
}
|
||||
StreamStatus::Disabled => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,64 @@ use std::{
|
||||
};
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn test_quiet_output() -> anyhow::Result<()> {
|
||||
let case = CliTest::with_file("test.py", "x: int = 1")?;
|
||||
|
||||
// By default, we emit an "all checks passed" message
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
WARN ty is pre-release software and not ready for production use. Expect to encounter bugs, missing features, and fatal errors.
|
||||
");
|
||||
|
||||
// With `quiet`, the message is not displayed
|
||||
assert_cmd_snapshot!(case.command().arg("--quiet"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
let case = CliTest::with_file("test.py", "x: int = 'foo'")?;
|
||||
|
||||
// By default, we emit a diagnostic
|
||||
assert_cmd_snapshot!(case.command(), @r#"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error[invalid-assignment]: Object of type `Literal["foo"]` is not assignable to `int`
|
||||
--> test.py:1:1
|
||||
|
|
||||
1 | x: int = 'foo'
|
||||
| ^
|
||||
|
|
||||
info: rule `invalid-assignment` is enabled by default
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
WARN ty is pre-release software and not ready for production use. Expect to encounter bugs, missing features, and fatal errors.
|
||||
"#);
|
||||
|
||||
// With `quiet`, the diagnostic is not displayed, just the summary message
|
||||
assert_cmd_snapshot!(case.command().arg("--quiet"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_in_sub_directory() -> anyhow::Result<()> {
|
||||
let case = CliTest::with_files([("test.py", "~"), ("subdir/nothing", "")])?;
|
||||
|
||||
@@ -15,9 +15,12 @@ bitflags = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ty_python_semantic = { workspace = true }
|
||||
|
||||
regex = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
|
||||
@@ -536,6 +536,9 @@ _private_type_var_tuple = TypeVarTuple("_private_type_var_tuple")
|
||||
public_explicit_type_alias: TypeAlias = Literal[1]
|
||||
_private_explicit_type_alias: TypeAlias = Literal[1]
|
||||
|
||||
public_implicit_union_alias = int | str
|
||||
_private_implicit_union_alias = int | str
|
||||
|
||||
class PublicProtocol(Protocol):
|
||||
def method(self) -> None: ...
|
||||
|
||||
@@ -557,7 +560,9 @@ class _PrivateProtocol(Protocol):
|
||||
test.assert_completions_include("public_type_var_tuple");
|
||||
test.assert_completions_do_not_include("_private_type_var_tuple");
|
||||
test.assert_completions_include("public_explicit_type_alias");
|
||||
test.assert_completions_include("_private_explicit_type_alias");
|
||||
test.assert_completions_do_not_include("_private_explicit_type_alias");
|
||||
test.assert_completions_include("public_implicit_union_alias");
|
||||
test.assert_completions_do_not_include("_private_implicit_union_alias");
|
||||
test.assert_completions_include("PublicProtocol");
|
||||
test.assert_completions_do_not_include("_PrivateProtocol");
|
||||
}
|
||||
@@ -2391,6 +2396,48 @@ Cougar = 3
|
||||
test.assert_completions_include("Cheetah");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_import_with_submodule1() {
|
||||
let test = CursorTest::builder()
|
||||
.source("main.py", "from package import <CURSOR>")
|
||||
.source("package/__init__.py", "")
|
||||
.source("package/foo.py", "")
|
||||
.source("package/bar.pyi", "")
|
||||
.source("package/foo-bar.py", "")
|
||||
.source("package/data.txt", "")
|
||||
.source("package/sub/__init__.py", "")
|
||||
.source("package/not-a-submodule/__init__.py", "")
|
||||
.build();
|
||||
|
||||
test.assert_completions_include("foo");
|
||||
test.assert_completions_include("bar");
|
||||
test.assert_completions_include("sub");
|
||||
test.assert_completions_do_not_include("foo-bar");
|
||||
test.assert_completions_do_not_include("data");
|
||||
test.assert_completions_do_not_include("not-a-submodule");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_import_with_vendored_submodule1() {
|
||||
let test = cursor_test(
|
||||
"\
|
||||
from http import <CURSOR>
|
||||
",
|
||||
);
|
||||
test.assert_completions_include("client");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_import_with_vendored_submodule2() {
|
||||
let test = cursor_test(
|
||||
"\
|
||||
from email import <CURSOR>
|
||||
",
|
||||
);
|
||||
test.assert_completions_include("mime");
|
||||
test.assert_completions_do_not_include("base");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_submodule_not_attribute1() {
|
||||
let test = cursor_test(
|
||||
|
||||
664
crates/ty_ide/src/docstring.rs
Normal file
664
crates/ty_ide/src/docstring.rs
Normal file
@@ -0,0 +1,664 @@
|
||||
//! Docstring parsing utilities for language server features.
|
||||
//!
|
||||
//! This module provides functionality for extracting structured information from
|
||||
//! Python docstrings, including parameter documentation for signature help.
|
||||
//! Supports Google-style, NumPy-style, and reST/Sphinx-style docstrings.
|
||||
//! There are no formal specifications for any of these formats, so the parsing
|
||||
//! logic needs to be tolerant of variations.
|
||||
|
||||
use regex::Regex;
|
||||
use ruff_python_trivia::leading_indentation;
|
||||
use ruff_source_file::UniversalNewlines;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
// Static regex instances to avoid recompilation
|
||||
static GOOGLE_SECTION_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"(?i)^\s*(Args|Arguments|Parameters)\s*:\s*$")
|
||||
.expect("Google section regex should be valid")
|
||||
});
|
||||
|
||||
static GOOGLE_PARAM_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"^\s*(\*?\*?\w+)\s*(\(.*?\))?\s*:\s*(.+)")
|
||||
.expect("Google parameter regex should be valid")
|
||||
});
|
||||
|
||||
static NUMPY_SECTION_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"(?i)^\s*Parameters\s*$").expect("NumPy section regex should be valid")
|
||||
});
|
||||
|
||||
static NUMPY_UNDERLINE_REGEX: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"^\s*-+\s*$").expect("NumPy underline regex should be valid"));
|
||||
|
||||
static REST_PARAM_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"^\s*:param\s+(?:(\w+)\s+)?(\w+)\s*:\s*(.+)")
|
||||
.expect("reST parameter regex should be valid")
|
||||
});
|
||||
|
||||
/// Extract parameter documentation from popular docstring formats.
|
||||
/// Returns a map of parameter names to their documentation.
|
||||
pub fn get_parameter_documentation(docstring: &str) -> HashMap<String, String> {
|
||||
let mut param_docs = HashMap::new();
|
||||
|
||||
// Google-style docstrings
|
||||
param_docs.extend(extract_google_style_params(docstring));
|
||||
|
||||
// NumPy-style docstrings
|
||||
param_docs.extend(extract_numpy_style_params(docstring));
|
||||
|
||||
// reST/Sphinx-style docstrings
|
||||
param_docs.extend(extract_rest_style_params(docstring));
|
||||
|
||||
param_docs
|
||||
}
|
||||
|
||||
/// Extract parameter documentation from Google-style docstrings.
|
||||
fn extract_google_style_params(docstring: &str) -> HashMap<String, String> {
|
||||
let mut param_docs = HashMap::new();
|
||||
|
||||
let mut in_args_section = false;
|
||||
let mut current_param: Option<String> = None;
|
||||
let mut current_doc = String::new();
|
||||
|
||||
for line_obj in docstring.universal_newlines() {
|
||||
let line = line_obj.as_str();
|
||||
if GOOGLE_SECTION_REGEX.is_match(line) {
|
||||
in_args_section = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if in_args_section {
|
||||
// Check if we hit another section (starts with a word followed by colon at line start)
|
||||
if !line.starts_with(' ') && !line.starts_with('\t') && line.contains(':') {
|
||||
if let Some(colon_pos) = line.find(':') {
|
||||
let section_name = line[..colon_pos].trim();
|
||||
// If this looks like another section, stop processing args
|
||||
if !section_name.is_empty()
|
||||
&& section_name
|
||||
.chars()
|
||||
.all(|c| c.is_alphabetic() || c.is_whitespace())
|
||||
{
|
||||
// Check if this is a known section name
|
||||
let known_sections = [
|
||||
"Returns", "Return", "Raises", "Yields", "Yield", "Examples",
|
||||
"Example", "Note", "Notes", "Warning", "Warnings",
|
||||
];
|
||||
if known_sections.contains(§ion_name) {
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
in_args_section = false;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(captures) = GOOGLE_PARAM_REGEX.captures(line) {
|
||||
// Save previous parameter if exists
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
|
||||
// Start new parameter
|
||||
if let (Some(param), Some(desc)) = (captures.get(1), captures.get(3)) {
|
||||
current_param = Some(param.as_str().to_string());
|
||||
current_doc = desc.as_str().to_string();
|
||||
}
|
||||
} else if line.starts_with(' ') || line.starts_with('\t') {
|
||||
// This is a continuation of the current parameter documentation
|
||||
if current_param.is_some() {
|
||||
if !current_doc.is_empty() {
|
||||
current_doc.push('\n');
|
||||
}
|
||||
current_doc.push_str(line.trim());
|
||||
}
|
||||
} else {
|
||||
// This is a line that doesn't start with whitespace and isn't a parameter
|
||||
// It might be a section or other content, so stop processing args
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
in_args_section = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Don't forget the last parameter
|
||||
if let Some(param_name) = current_param {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
}
|
||||
|
||||
param_docs
|
||||
}
|
||||
|
||||
/// Calculate the indentation level of a line (number of leading whitespace characters)
|
||||
fn get_indentation_level(line: &str) -> usize {
|
||||
leading_indentation(line).len()
|
||||
}
|
||||
|
||||
/// Extract parameter documentation from NumPy-style docstrings.
|
||||
fn extract_numpy_style_params(docstring: &str) -> HashMap<String, String> {
|
||||
let mut param_docs = HashMap::new();
|
||||
|
||||
let mut lines = docstring
|
||||
.universal_newlines()
|
||||
.map(|line| line.as_str())
|
||||
.peekable();
|
||||
let mut in_params_section = false;
|
||||
let mut found_underline = false;
|
||||
let mut current_param: Option<String> = None;
|
||||
let mut current_doc = String::new();
|
||||
let mut base_param_indent: Option<usize> = None;
|
||||
let mut base_content_indent: Option<usize> = None;
|
||||
|
||||
while let Some(line) = lines.next() {
|
||||
if NUMPY_SECTION_REGEX.is_match(line) {
|
||||
// Check if the next line is an underline
|
||||
if let Some(next_line) = lines.peek() {
|
||||
if NUMPY_UNDERLINE_REGEX.is_match(next_line) {
|
||||
in_params_section = true;
|
||||
found_underline = false;
|
||||
base_param_indent = None;
|
||||
base_content_indent = None;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if in_params_section && !found_underline {
|
||||
if NUMPY_UNDERLINE_REGEX.is_match(line) {
|
||||
found_underline = true;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if in_params_section && found_underline {
|
||||
let current_indent = get_indentation_level(line);
|
||||
let trimmed = line.trim();
|
||||
|
||||
// Skip empty lines
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if we hit another section
|
||||
if current_indent == 0 {
|
||||
if let Some(next_line) = lines.peek() {
|
||||
if NUMPY_UNDERLINE_REGEX.is_match(next_line) {
|
||||
// This is another section
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
in_params_section = false;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine if this could be a parameter line
|
||||
let could_be_param = if let Some(base_indent) = base_param_indent {
|
||||
// We've seen parameters before - check if this matches the expected parameter indentation
|
||||
current_indent == base_indent
|
||||
} else {
|
||||
// First potential parameter - check if it has reasonable indentation and content
|
||||
current_indent > 0
|
||||
&& (trimmed.contains(':')
|
||||
|| trimmed.chars().all(|c| c.is_alphanumeric() || c == '_'))
|
||||
};
|
||||
|
||||
if could_be_param {
|
||||
// Check if this could be a section header by looking at the next line
|
||||
if let Some(next_line) = lines.peek() {
|
||||
if NUMPY_UNDERLINE_REGEX.is_match(next_line) {
|
||||
// This is a section header, not a parameter
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
in_params_section = false;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Set base indentation levels on first parameter
|
||||
if base_param_indent.is_none() {
|
||||
base_param_indent = Some(current_indent);
|
||||
}
|
||||
|
||||
// Handle parameter with type annotation (param : type)
|
||||
if trimmed.contains(':') {
|
||||
// Save previous parameter if exists
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
|
||||
// Extract parameter name and description
|
||||
let parts: Vec<&str> = trimmed.splitn(2, ':').collect();
|
||||
if parts.len() == 2 {
|
||||
let param_name = parts[0].trim();
|
||||
|
||||
// Extract just the parameter name (before any type info)
|
||||
let param_name = param_name.split_whitespace().next().unwrap_or(param_name);
|
||||
current_param = Some(param_name.to_string());
|
||||
current_doc.clear(); // Description comes on following lines, not on this line
|
||||
}
|
||||
} else {
|
||||
// Handle parameter without type annotation
|
||||
// Save previous parameter if exists
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
|
||||
// This line is the parameter name
|
||||
current_param = Some(trimmed.to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
} else if current_param.is_some() {
|
||||
// Determine if this is content for the current parameter
|
||||
let is_content = if let Some(base_content) = base_content_indent {
|
||||
// We've seen content before - check if this matches expected content indentation
|
||||
current_indent >= base_content
|
||||
} else {
|
||||
// First potential content line - should be more indented than parameter
|
||||
if let Some(base_param) = base_param_indent {
|
||||
current_indent > base_param
|
||||
} else {
|
||||
// Fallback: any indented content
|
||||
current_indent > 0
|
||||
}
|
||||
};
|
||||
|
||||
if is_content {
|
||||
// Set base content indentation on first content line
|
||||
if base_content_indent.is_none() {
|
||||
base_content_indent = Some(current_indent);
|
||||
}
|
||||
|
||||
// This is a continuation of the current parameter documentation
|
||||
if !current_doc.is_empty() {
|
||||
current_doc.push('\n');
|
||||
}
|
||||
current_doc.push_str(trimmed);
|
||||
} else {
|
||||
// This line doesn't match our expected indentation patterns
|
||||
// Save current parameter and stop processing
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
in_params_section = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Don't forget the last parameter
|
||||
if let Some(param_name) = current_param {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
}
|
||||
|
||||
param_docs
|
||||
}
|
||||
|
||||
/// Extract parameter documentation from reST/Sphinx-style docstrings.
|
||||
fn extract_rest_style_params(docstring: &str) -> HashMap<String, String> {
|
||||
let mut param_docs = HashMap::new();
|
||||
|
||||
let mut current_param: Option<String> = None;
|
||||
let mut current_doc = String::new();
|
||||
|
||||
for line_obj in docstring.universal_newlines() {
|
||||
let line = line_obj.as_str();
|
||||
if let Some(captures) = REST_PARAM_REGEX.captures(line) {
|
||||
// Save previous parameter if exists
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
|
||||
// Extract parameter name and description
|
||||
if let (Some(param_match), Some(desc_match)) = (captures.get(2), captures.get(3)) {
|
||||
current_param = Some(param_match.as_str().to_string());
|
||||
current_doc = desc_match.as_str().to_string();
|
||||
}
|
||||
} else if current_param.is_some() {
|
||||
let trimmed = line.trim();
|
||||
|
||||
// Check if this is a new section - stop processing if we hit section headers
|
||||
if trimmed == "Parameters" || trimmed == "Args" || trimmed == "Arguments" {
|
||||
// Save current param and stop processing
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if this is another directive line starting with ':'
|
||||
if trimmed.starts_with(':') {
|
||||
// This is a new directive, save current param
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
// Let the next iteration handle this directive
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this is a continuation line (indented)
|
||||
if line.starts_with(" ") && !trimmed.is_empty() {
|
||||
// This is a continuation line
|
||||
if !current_doc.is_empty() {
|
||||
current_doc.push('\n');
|
||||
}
|
||||
current_doc.push_str(trimmed);
|
||||
} else if !trimmed.is_empty() && !line.starts_with(' ') && !line.starts_with('\t') {
|
||||
// This is a non-indented line - likely end of the current parameter
|
||||
if let Some(param_name) = current_param.take() {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
current_doc.clear();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Don't forget the last parameter
|
||||
if let Some(param_name) = current_param {
|
||||
param_docs.insert(param_name, current_doc.trim().to_string());
|
||||
}
|
||||
|
||||
param_docs
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_google_style_parameter_documentation() {
|
||||
let docstring = r#"
|
||||
This is a function description.
|
||||
|
||||
Args:
|
||||
param1 (str): The first parameter description
|
||||
param2 (int): The second parameter description
|
||||
This is a continuation of param2 description.
|
||||
param3: A parameter without type annotation
|
||||
|
||||
Returns:
|
||||
str: The return value description
|
||||
"#;
|
||||
|
||||
let param_docs = get_parameter_documentation(docstring);
|
||||
|
||||
assert_eq!(param_docs.len(), 3);
|
||||
assert_eq!(¶m_docs["param1"], "The first parameter description");
|
||||
assert_eq!(
|
||||
¶m_docs["param2"],
|
||||
"The second parameter description\nThis is a continuation of param2 description."
|
||||
);
|
||||
assert_eq!(¶m_docs["param3"], "A parameter without type annotation");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_numpy_style_parameter_documentation() {
|
||||
let docstring = r#"
|
||||
This is a function description.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
param1 : str
|
||||
The first parameter description
|
||||
param2 : int
|
||||
The second parameter description
|
||||
This is a continuation of param2 description.
|
||||
param3
|
||||
A parameter without type annotation
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
The return value description
|
||||
"#;
|
||||
|
||||
let param_docs = get_parameter_documentation(docstring);
|
||||
|
||||
assert_eq!(param_docs.len(), 3);
|
||||
assert_eq!(
|
||||
param_docs.get("param1").expect("param1 should exist"),
|
||||
"The first parameter description"
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param2").expect("param2 should exist"),
|
||||
"The second parameter description\nThis is a continuation of param2 description."
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param3").expect("param3 should exist"),
|
||||
"A parameter without type annotation"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_parameter_documentation() {
|
||||
let docstring = r#"
|
||||
This is a simple function description without parameter documentation.
|
||||
"#;
|
||||
|
||||
let param_docs = get_parameter_documentation(docstring);
|
||||
assert!(param_docs.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mixed_style_parameter_documentation() {
|
||||
let docstring = r#"
|
||||
This is a function description.
|
||||
|
||||
Args:
|
||||
param1 (str): Google-style parameter
|
||||
param2 (int): Another Google-style parameter
|
||||
|
||||
Parameters
|
||||
----------
|
||||
param3 : bool
|
||||
NumPy-style parameter
|
||||
"#;
|
||||
|
||||
let param_docs = get_parameter_documentation(docstring);
|
||||
|
||||
assert_eq!(param_docs.len(), 3);
|
||||
assert_eq!(
|
||||
param_docs.get("param1").expect("param1 should exist"),
|
||||
"Google-style parameter"
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param2").expect("param2 should exist"),
|
||||
"Another Google-style parameter"
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param3").expect("param3 should exist"),
|
||||
"NumPy-style parameter"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rest_style_parameter_documentation() {
|
||||
let docstring = r#"
|
||||
This is a function description.
|
||||
|
||||
:param str param1: The first parameter description
|
||||
:param int param2: The second parameter description
|
||||
This is a continuation of param2 description.
|
||||
:param param3: A parameter without type annotation
|
||||
:returns: The return value description
|
||||
:rtype: str
|
||||
"#;
|
||||
|
||||
let param_docs = get_parameter_documentation(docstring);
|
||||
|
||||
assert_eq!(param_docs.len(), 3);
|
||||
assert_eq!(
|
||||
param_docs.get("param1").expect("param1 should exist"),
|
||||
"The first parameter description"
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param2").expect("param2 should exist"),
|
||||
"The second parameter description\nThis is a continuation of param2 description."
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param3").expect("param3 should exist"),
|
||||
"A parameter without type annotation"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mixed_style_with_rest_parameter_documentation() {
|
||||
let docstring = r#"
|
||||
This is a function description.
|
||||
|
||||
Args:
|
||||
param1 (str): Google-style parameter
|
||||
|
||||
:param int param2: reST-style parameter
|
||||
:param param3: Another reST-style parameter
|
||||
|
||||
Parameters
|
||||
----------
|
||||
param4 : bool
|
||||
NumPy-style parameter
|
||||
"#;
|
||||
|
||||
let param_docs = get_parameter_documentation(docstring);
|
||||
|
||||
assert_eq!(param_docs.len(), 4);
|
||||
assert_eq!(
|
||||
param_docs.get("param1").expect("param1 should exist"),
|
||||
"Google-style parameter"
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param2").expect("param2 should exist"),
|
||||
"reST-style parameter"
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param3").expect("param3 should exist"),
|
||||
"Another reST-style parameter"
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param4").expect("param4 should exist"),
|
||||
"NumPy-style parameter"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_numpy_style_with_different_indentation() {
|
||||
let docstring = r#"
|
||||
This is a function description.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
param1 : str
|
||||
The first parameter description
|
||||
param2 : int
|
||||
The second parameter description
|
||||
This is a continuation of param2 description.
|
||||
param3
|
||||
A parameter without type annotation
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
The return value description
|
||||
"#;
|
||||
|
||||
let param_docs = get_parameter_documentation(docstring);
|
||||
|
||||
assert_eq!(param_docs.len(), 3);
|
||||
assert_eq!(
|
||||
param_docs.get("param1").expect("param1 should exist"),
|
||||
"The first parameter description"
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param2").expect("param2 should exist"),
|
||||
"The second parameter description\nThis is a continuation of param2 description."
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param3").expect("param3 should exist"),
|
||||
"A parameter without type annotation"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_numpy_style_with_tabs_and_mixed_indentation() {
|
||||
// Using raw strings to avoid tab/space conversion issues in the test
|
||||
let docstring = "
|
||||
This is a function description.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
\tparam1 : str
|
||||
\t\tThe first parameter description
|
||||
\tparam2 : int
|
||||
\t\tThe second parameter description
|
||||
\t\tThis is a continuation of param2 description.
|
||||
\tparam3
|
||||
\t\tA parameter without type annotation
|
||||
";
|
||||
|
||||
let param_docs = get_parameter_documentation(docstring);
|
||||
|
||||
assert_eq!(param_docs.len(), 3);
|
||||
assert_eq!(
|
||||
param_docs.get("param1").expect("param1 should exist"),
|
||||
"The first parameter description"
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param2").expect("param2 should exist"),
|
||||
"The second parameter description\nThis is a continuation of param2 description."
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs.get("param3").expect("param3 should exist"),
|
||||
"A parameter without type annotation"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_universal_newlines() {
|
||||
// Test with Windows-style line endings (\r\n)
|
||||
let docstring_windows = "This is a function description.\r\n\r\nArgs:\r\n param1 (str): The first parameter\r\n param2 (int): The second parameter\r\n";
|
||||
|
||||
// Test with old Mac-style line endings (\r)
|
||||
let docstring_mac = "This is a function description.\r\rArgs:\r param1 (str): The first parameter\r param2 (int): The second parameter\r";
|
||||
|
||||
// Test with Unix-style line endings (\n) - should work the same
|
||||
let docstring_unix = "This is a function description.\n\nArgs:\n param1 (str): The first parameter\n param2 (int): The second parameter\n";
|
||||
|
||||
let param_docs_windows = get_parameter_documentation(docstring_windows);
|
||||
let param_docs_mac = get_parameter_documentation(docstring_mac);
|
||||
let param_docs_unix = get_parameter_documentation(docstring_unix);
|
||||
|
||||
// All should produce the same results
|
||||
assert_eq!(param_docs_windows.len(), 2);
|
||||
assert_eq!(param_docs_mac.len(), 2);
|
||||
assert_eq!(param_docs_unix.len(), 2);
|
||||
|
||||
assert_eq!(
|
||||
param_docs_windows.get("param1"),
|
||||
Some(&"The first parameter".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs_mac.get("param1"),
|
||||
Some(&"The first parameter".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
param_docs_unix.get("param1"),
|
||||
Some(&"The first parameter".to_string())
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,17 @@
|
||||
mod completion;
|
||||
mod db;
|
||||
mod docstring;
|
||||
mod find_node;
|
||||
mod goto;
|
||||
mod hover;
|
||||
mod inlay_hints;
|
||||
mod markup;
|
||||
mod semantic_tokens;
|
||||
mod signature_help;
|
||||
|
||||
pub use completion::completion;
|
||||
pub use db::Db;
|
||||
pub use docstring::get_parameter_documentation;
|
||||
pub use goto::goto_type_definition;
|
||||
pub use hover::hover;
|
||||
pub use inlay_hints::inlay_hints;
|
||||
@@ -16,6 +19,7 @@ pub use markup::MarkupKind;
|
||||
pub use semantic_tokens::{
|
||||
SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens, semantic_tokens,
|
||||
};
|
||||
pub use signature_help::{ParameterDetails, SignatureDetails, SignatureHelpInfo, signature_help};
|
||||
|
||||
use ruff_db::files::{File, FileRange};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
687
crates/ty_ide/src/signature_help.rs
Normal file
687
crates/ty_ide/src/signature_help.rs
Normal file
@@ -0,0 +1,687 @@
|
||||
//! This module handles the "signature help" request in the language server
|
||||
//! protocol. This request is typically issued by a client when the user types
|
||||
//! an open parenthesis and starts to enter arguments for a function call.
|
||||
//! The signature help provides information that the editor displays to the
|
||||
//! user about the target function signature including parameter names,
|
||||
//! types, and documentation. It supports multiple signatures for union types
|
||||
//! and overloads.
|
||||
|
||||
use crate::{Db, docstring::get_parameter_documentation, find_node::covering_node};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_python_ast::{self as ast, AnyNodeRef};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ty_python_semantic::semantic_index::definition::Definition;
|
||||
use ty_python_semantic::types::{CallSignatureDetails, call_signature_details};
|
||||
|
||||
// Limitations of the current implementation:
|
||||
|
||||
// TODO - If the target function is declared in a stub file but defined (implemented)
|
||||
// in a source file, the documentation will not reflect the a docstring that appears
|
||||
// only in the implementation. To do this, we'll need to map the function or
|
||||
// method in the stub to the implementation and extract the docstring from there.
|
||||
|
||||
/// Information about a function parameter
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ParameterDetails {
|
||||
/// The parameter name (e.g., "param1")
|
||||
pub name: String,
|
||||
/// The parameter label in the signature (e.g., "param1: str")
|
||||
pub label: String,
|
||||
/// Documentation specific to the parameter, typically extracted from the
|
||||
/// function's docstring
|
||||
pub documentation: Option<String>,
|
||||
}
|
||||
|
||||
/// Information about a function signature
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SignatureDetails {
|
||||
/// Text representation of the full signature (including input parameters and return type).
|
||||
pub label: String,
|
||||
/// Documentation for the signature, typically from the function's docstring.
|
||||
pub documentation: Option<String>,
|
||||
/// Information about each of the parameters in left-to-right order.
|
||||
pub parameters: Vec<ParameterDetails>,
|
||||
/// Index of the parameter that corresponds to the argument where the
|
||||
/// user's cursor is currently positioned.
|
||||
pub active_parameter: Option<usize>,
|
||||
}
|
||||
|
||||
/// Signature help information for function calls
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SignatureHelpInfo {
|
||||
/// Information about each of the signatures for the function call. We
|
||||
/// need to handle multiple because of unions, overloads, and composite
|
||||
/// calls like constructors (which invoke both __new__ and __init__).
|
||||
pub signatures: Vec<SignatureDetails>,
|
||||
/// Index of the "active signature" which is the first signature where
|
||||
/// all arguments that are currently present in the code map to parameters.
|
||||
pub active_signature: Option<usize>,
|
||||
}
|
||||
|
||||
/// Signature help information for function calls at the given position
|
||||
pub fn signature_help(db: &dyn Db, file: File, offset: TextSize) -> Option<SignatureHelpInfo> {
|
||||
let parsed = parsed_module(db, file).load(db);
|
||||
|
||||
// Get the call expression at the given position.
|
||||
let (call_expr, current_arg_index) = get_call_expr(&parsed, offset)?;
|
||||
|
||||
// Get signature details from the semantic analyzer.
|
||||
let signature_details: Vec<CallSignatureDetails<'_>> =
|
||||
call_signature_details(db, file, call_expr);
|
||||
|
||||
if signature_details.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Find the active signature - the first signature where all arguments map to parameters.
|
||||
let active_signature_index = find_active_signature_from_details(&signature_details);
|
||||
|
||||
// Convert to SignatureDetails objects.
|
||||
let signatures: Vec<SignatureDetails> = signature_details
|
||||
.into_iter()
|
||||
.map(|details| {
|
||||
create_signature_details_from_call_signature_details(db, &details, current_arg_index)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Some(SignatureHelpInfo {
|
||||
signatures,
|
||||
active_signature: active_signature_index,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the innermost call expression that contains the specified offset
|
||||
/// and the index of the argument that the offset maps to.
|
||||
fn get_call_expr(
|
||||
parsed: &ruff_db::parsed::ParsedModuleRef,
|
||||
offset: TextSize,
|
||||
) -> Option<(&ast::ExprCall, usize)> {
|
||||
// Create a range from the offset for the covering_node function.
|
||||
let range = TextRange::new(offset, offset);
|
||||
|
||||
// Find the covering node at the given position that is a function call.
|
||||
let covering_node = covering_node(parsed.syntax().into(), range)
|
||||
.find_first(|node| matches!(node, AnyNodeRef::ExprCall(_)))
|
||||
.ok()?;
|
||||
|
||||
// Get the function call expression.
|
||||
let AnyNodeRef::ExprCall(call_expr) = covering_node.node() else {
|
||||
return None;
|
||||
};
|
||||
|
||||
// Determine which argument corresponding to the current cursor location.
|
||||
let current_arg_index = get_argument_index(call_expr, offset);
|
||||
|
||||
Some((call_expr, current_arg_index))
|
||||
}
|
||||
|
||||
/// Determine which argument is associated with the specified offset.
|
||||
/// Returns zero if not within any argument.
|
||||
fn get_argument_index(call_expr: &ast::ExprCall, offset: TextSize) -> usize {
|
||||
let mut current_arg = 0;
|
||||
|
||||
for (i, arg) in call_expr.arguments.arguments_source_order().enumerate() {
|
||||
if offset <= arg.end() {
|
||||
return i;
|
||||
}
|
||||
current_arg = i + 1;
|
||||
}
|
||||
|
||||
current_arg
|
||||
}
|
||||
|
||||
/// Create signature details from `CallSignatureDetails`.
|
||||
fn create_signature_details_from_call_signature_details(
|
||||
db: &dyn crate::Db,
|
||||
details: &CallSignatureDetails,
|
||||
current_arg_index: usize,
|
||||
) -> SignatureDetails {
|
||||
let signature_label = details.label.clone();
|
||||
|
||||
let documentation = get_callable_documentation(db, details.definition);
|
||||
|
||||
// Translate the argument index to parameter index using the mapping.
|
||||
let active_parameter =
|
||||
if details.argument_to_parameter_mapping.is_empty() && current_arg_index == 0 {
|
||||
Some(0)
|
||||
} else {
|
||||
details
|
||||
.argument_to_parameter_mapping
|
||||
.get(current_arg_index)
|
||||
.and_then(|¶m_index| param_index)
|
||||
.or({
|
||||
// If we can't find a mapping for this argument, but we have a current
|
||||
// argument index, use that as the active parameter if it's within bounds.
|
||||
if current_arg_index < details.parameter_label_offsets.len() {
|
||||
Some(current_arg_index)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
SignatureDetails {
|
||||
label: signature_label.clone(),
|
||||
documentation: Some(documentation),
|
||||
parameters: create_parameters_from_offsets(
|
||||
&details.parameter_label_offsets,
|
||||
&signature_label,
|
||||
db,
|
||||
details.definition,
|
||||
&details.parameter_names,
|
||||
),
|
||||
active_parameter,
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine appropriate documentation for a callable type based on its original type.
|
||||
fn get_callable_documentation(db: &dyn crate::Db, definition: Option<Definition>) -> String {
|
||||
// TODO: If the definition is located within a stub file and no docstring
|
||||
// is present, try to map the symbol to an implementation file and extract
|
||||
// the docstring from that location.
|
||||
if let Some(definition) = definition {
|
||||
definition.docstring(db).unwrap_or_default()
|
||||
} else {
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Create `ParameterDetails` objects from parameter label offsets.
|
||||
fn create_parameters_from_offsets(
|
||||
parameter_offsets: &[TextRange],
|
||||
signature_label: &str,
|
||||
db: &dyn crate::Db,
|
||||
definition: Option<Definition>,
|
||||
parameter_names: &[String],
|
||||
) -> Vec<ParameterDetails> {
|
||||
// Extract parameter documentation from the function's docstring if available.
|
||||
let param_docs = if let Some(definition) = definition {
|
||||
let docstring = definition.docstring(db);
|
||||
docstring
|
||||
.map(|doc| get_parameter_documentation(&doc))
|
||||
.unwrap_or_default()
|
||||
} else {
|
||||
std::collections::HashMap::new()
|
||||
};
|
||||
|
||||
parameter_offsets
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, offset)| {
|
||||
// Extract the parameter label from the signature string.
|
||||
let start = usize::from(offset.start());
|
||||
let end = usize::from(offset.end());
|
||||
let label = signature_label
|
||||
.get(start..end)
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
|
||||
// Get the parameter name for documentation lookup.
|
||||
let param_name = parameter_names.get(i).map(String::as_str).unwrap_or("");
|
||||
|
||||
ParameterDetails {
|
||||
name: param_name.to_string(),
|
||||
label,
|
||||
documentation: param_docs.get(param_name).cloned(),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Find the active signature index from `CallSignatureDetails`.
|
||||
/// The active signature is the first signature where all arguments present in the call
|
||||
/// have valid mappings to parameters (i.e., none of the mappings are None).
|
||||
fn find_active_signature_from_details(signature_details: &[CallSignatureDetails]) -> Option<usize> {
|
||||
let first = signature_details.first()?;
|
||||
|
||||
// If there are no arguments in the mapping, just return the first signature.
|
||||
if first.argument_to_parameter_mapping.is_empty() {
|
||||
return Some(0);
|
||||
}
|
||||
|
||||
// First, try to find a signature where all arguments have valid parameter mappings.
|
||||
let perfect_match = signature_details.iter().position(|details| {
|
||||
// Check if all arguments have valid parameter mappings (i.e., are not None).
|
||||
details
|
||||
.argument_to_parameter_mapping
|
||||
.iter()
|
||||
.all(Option::is_some)
|
||||
});
|
||||
|
||||
if let Some(index) = perfect_match {
|
||||
return Some(index);
|
||||
}
|
||||
|
||||
// If no perfect match, find the signature with the most valid argument mappings.
|
||||
let (best_index, _) = signature_details
|
||||
.iter()
|
||||
.enumerate()
|
||||
.max_by_key(|(_, details)| {
|
||||
details
|
||||
.argument_to_parameter_mapping
|
||||
.iter()
|
||||
.filter(|mapping| mapping.is_some())
|
||||
.count()
|
||||
})?;
|
||||
|
||||
Some(best_index)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::signature_help::SignatureHelpInfo;
|
||||
use crate::tests::{CursorTest, cursor_test};
|
||||
|
||||
#[test]
|
||||
fn signature_help_basic_function_call() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def example_function(param1: str, param2: int) -> str:
|
||||
"""This is a docstring for the example function.
|
||||
|
||||
Args:
|
||||
param1: The first parameter as a string
|
||||
param2: The second parameter as an integer
|
||||
|
||||
Returns:
|
||||
A formatted string combining both parameters
|
||||
"""
|
||||
return f"{param1}: {param2}"
|
||||
|
||||
result = example_function(<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
// Test that signature help is provided
|
||||
let result = test.signature_help().expect("Should have signature help");
|
||||
assert_eq!(result.signatures.len(), 1);
|
||||
|
||||
let signature = &result.signatures[0];
|
||||
assert!(signature.label.contains("param1") && signature.label.contains("param2"));
|
||||
|
||||
// Verify that the docstring is extracted and included in the documentation
|
||||
let expected_docstring = concat!(
|
||||
"This is a docstring for the example function.\n",
|
||||
" \n",
|
||||
" Args:\n",
|
||||
" param1: The first parameter as a string\n",
|
||||
" param2: The second parameter as an integer\n",
|
||||
" \n",
|
||||
" Returns:\n",
|
||||
" A formatted string combining both parameters\n",
|
||||
" "
|
||||
);
|
||||
assert_eq!(
|
||||
signature.documentation,
|
||||
Some(expected_docstring.to_string())
|
||||
);
|
||||
|
||||
assert_eq!(result.active_signature, Some(0));
|
||||
assert_eq!(signature.active_parameter, Some(0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signature_help_method_call() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
class MyClass:
|
||||
def my_method(self, arg1: str, arg2: bool) -> None:
|
||||
pass
|
||||
|
||||
obj = MyClass()
|
||||
obj.my_method(arg2=True, arg1=<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
// Test that signature help is provided for method calls
|
||||
let result = test.signature_help().expect("Should have signature help");
|
||||
assert_eq!(result.signatures.len(), 1);
|
||||
|
||||
let signature = &result.signatures[0];
|
||||
assert!(signature.label.contains("arg1") && signature.label.contains("arg2"));
|
||||
assert_eq!(result.active_signature, Some(0));
|
||||
|
||||
// Check the active parameter from the active signature
|
||||
if let Some(active_sig_index) = result.active_signature {
|
||||
let active_signature = &result.signatures[active_sig_index];
|
||||
assert_eq!(active_signature.active_parameter, Some(0));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signature_help_nested_function_calls() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def outer(a: int) -> int:
|
||||
return a * 2
|
||||
|
||||
def inner(b: str) -> str:
|
||||
return b.upper()
|
||||
|
||||
result = outer(inner(<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
// Test that signature help focuses on the innermost function call
|
||||
let result = test.signature_help().expect("Should have signature help");
|
||||
assert_eq!(result.signatures.len(), 1);
|
||||
|
||||
let signature = &result.signatures[0];
|
||||
assert!(signature.label.contains("str") || signature.label.contains("->"));
|
||||
assert_eq!(result.active_signature, Some(0));
|
||||
assert_eq!(signature.active_parameter, Some(0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signature_help_union_callable() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
import random
|
||||
def func_a(x: int) -> int:
|
||||
return x
|
||||
|
||||
def func_b(y: str) -> str:
|
||||
return y
|
||||
|
||||
if random.random() > 0.5:
|
||||
f = func_a
|
||||
else:
|
||||
f = func_b
|
||||
|
||||
f(<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
let result = test.signature_help().expect("Should have signature help");
|
||||
|
||||
assert_eq!(result.signatures.len(), 2);
|
||||
|
||||
let signature = &result.signatures[0];
|
||||
assert_eq!(signature.label, "(x: int) -> int");
|
||||
assert_eq!(signature.parameters.len(), 1);
|
||||
|
||||
// Check parameter information
|
||||
let param = &signature.parameters[0];
|
||||
assert_eq!(param.label, "x: int");
|
||||
assert_eq!(param.name, "x");
|
||||
|
||||
// Validate the second signature (from func_b)
|
||||
let signature_b = &result.signatures[1];
|
||||
assert_eq!(signature_b.label, "(y: str) -> str");
|
||||
assert_eq!(signature_b.parameters.len(), 1);
|
||||
|
||||
// Check parameter information for the second signature
|
||||
let param_b = &signature_b.parameters[0];
|
||||
assert_eq!(param_b.label, "y: str");
|
||||
assert_eq!(param_b.name, "y");
|
||||
|
||||
assert_eq!(result.active_signature, Some(0));
|
||||
|
||||
// Check the active parameter from the active signature
|
||||
if let Some(active_sig_index) = result.active_signature {
|
||||
let active_signature = &result.signatures[active_sig_index];
|
||||
assert_eq!(active_signature.active_parameter, Some(0));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signature_help_overloaded_function() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
from typing import overload
|
||||
|
||||
@overload
|
||||
def process(value: int) -> str: ...
|
||||
|
||||
@overload
|
||||
def process(value: str) -> int: ...
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, int):
|
||||
return str(value)
|
||||
else:
|
||||
return len(value)
|
||||
|
||||
result = process(<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
// Test that signature help is provided for overloaded functions
|
||||
let result = test.signature_help().expect("Should have signature help");
|
||||
|
||||
// We should have signatures for the overloads
|
||||
assert_eq!(result.signatures.len(), 2);
|
||||
assert_eq!(result.active_signature, Some(0));
|
||||
|
||||
// Check the active parameter from the active signature
|
||||
if let Some(active_sig_index) = result.active_signature {
|
||||
let active_signature = &result.signatures[active_sig_index];
|
||||
assert_eq!(active_signature.active_parameter, Some(0));
|
||||
}
|
||||
|
||||
// Validate the first overload: process(value: int) -> str
|
||||
let signature1 = &result.signatures[0];
|
||||
assert_eq!(signature1.label, "(value: int) -> str");
|
||||
assert_eq!(signature1.parameters.len(), 1);
|
||||
|
||||
let param1 = &signature1.parameters[0];
|
||||
assert_eq!(param1.label, "value: int");
|
||||
assert_eq!(param1.name, "value");
|
||||
|
||||
// Validate the second overload: process(value: str) -> int
|
||||
let signature2 = &result.signatures[1];
|
||||
assert_eq!(signature2.label, "(value: str) -> int");
|
||||
assert_eq!(signature2.parameters.len(), 1);
|
||||
|
||||
let param2 = &signature2.parameters[0];
|
||||
assert_eq!(param2.label, "value: str");
|
||||
assert_eq!(param2.name, "value");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signature_help_class_constructor() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
class Point:
|
||||
"""A simple point class representing a 2D coordinate."""
|
||||
|
||||
def __init__(self, x: int, y: int):
|
||||
"""Initialize a point with x and y coordinates.
|
||||
|
||||
Args:
|
||||
x: The x-coordinate
|
||||
y: The y-coordinate
|
||||
"""
|
||||
self.x = x
|
||||
self.y = y
|
||||
|
||||
point = Point(<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
let result = test.signature_help().expect("Should have signature help");
|
||||
|
||||
// Should have exactly one signature for the constructor
|
||||
assert_eq!(result.signatures.len(), 1);
|
||||
let signature = &result.signatures[0];
|
||||
|
||||
// Validate the constructor signature
|
||||
assert_eq!(signature.label, "(x: int, y: int) -> Point");
|
||||
assert_eq!(signature.parameters.len(), 2);
|
||||
|
||||
// Validate the first parameter (x: int)
|
||||
let param_x = &signature.parameters[0];
|
||||
assert_eq!(param_x.label, "x: int");
|
||||
assert_eq!(param_x.name, "x");
|
||||
assert_eq!(param_x.documentation, Some("The x-coordinate".to_string()));
|
||||
|
||||
// Validate the second parameter (y: int)
|
||||
let param_y = &signature.parameters[1];
|
||||
assert_eq!(param_y.label, "y: int");
|
||||
assert_eq!(param_y.name, "y");
|
||||
assert_eq!(param_y.documentation, Some("The y-coordinate".to_string()));
|
||||
|
||||
// Should have the __init__ method docstring as documentation (not the class docstring)
|
||||
let expected_docstring = "Initialize a point with x and y coordinates.\n \n Args:\n x: The x-coordinate\n y: The y-coordinate\n ";
|
||||
assert_eq!(
|
||||
signature.documentation,
|
||||
Some(expected_docstring.to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signature_help_callable_object() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
class Multiplier:
|
||||
def __call__(self, x: int) -> int:
|
||||
return x * 2
|
||||
|
||||
multiplier = Multiplier()
|
||||
result = multiplier(<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
let result = test.signature_help().expect("Should have signature help");
|
||||
|
||||
// Should have a signature for the callable object
|
||||
assert!(!result.signatures.is_empty());
|
||||
let signature = &result.signatures[0];
|
||||
|
||||
// Should provide signature help for the callable
|
||||
assert!(signature.label.contains("int") || signature.label.contains("->"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signature_help_subclass_of_constructor() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
from typing import Type
|
||||
|
||||
def create_instance(cls: Type[list]) -> list:
|
||||
return cls(<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
let result = test.signature_help().expect("Should have signature help");
|
||||
|
||||
// Should have a signature
|
||||
assert!(!result.signatures.is_empty());
|
||||
let signature = &result.signatures[0];
|
||||
|
||||
// Should have empty documentation for now
|
||||
assert_eq!(signature.documentation, Some(String::new()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signature_help_parameter_label_offsets() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def test_function(param1: str, param2: int, param3: bool) -> str:
|
||||
return f"{param1}: {param2}, {param3}"
|
||||
|
||||
result = test_function(<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
let result = test.signature_help().expect("Should have signature help");
|
||||
assert_eq!(result.signatures.len(), 1);
|
||||
|
||||
let signature = &result.signatures[0];
|
||||
assert_eq!(signature.parameters.len(), 3);
|
||||
|
||||
// Check that we have parameter labels
|
||||
for (i, param) in signature.parameters.iter().enumerate() {
|
||||
let expected_param_spec = match i {
|
||||
0 => "param1: str",
|
||||
1 => "param2: int",
|
||||
2 => "param3: bool",
|
||||
_ => panic!("Unexpected parameter index"),
|
||||
};
|
||||
assert_eq!(param.label, expected_param_spec);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signature_help_active_signature_selection() {
|
||||
// This test verifies that the algorithm correctly selects the first signature
|
||||
// where all arguments present in the call have valid parameter mappings.
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
from typing import overload
|
||||
|
||||
@overload
|
||||
def process(value: int) -> str: ...
|
||||
|
||||
@overload
|
||||
def process(value: str, flag: bool) -> int: ...
|
||||
|
||||
def process(value, flag=None):
|
||||
if isinstance(value, int):
|
||||
return str(value)
|
||||
elif flag is not None:
|
||||
return len(value) if flag else 0
|
||||
else:
|
||||
return len(value)
|
||||
|
||||
# Call with two arguments - should select the second overload
|
||||
result = process("hello", True<CURSOR>)
|
||||
"#,
|
||||
);
|
||||
|
||||
let result = test.signature_help().expect("Should have signature help");
|
||||
|
||||
// Should have signatures for the overloads.
|
||||
assert!(!result.signatures.is_empty());
|
||||
|
||||
// Check that we have an active signature and parameter
|
||||
if let Some(active_sig_index) = result.active_signature {
|
||||
let active_signature = &result.signatures[active_sig_index];
|
||||
assert_eq!(active_signature.active_parameter, Some(1));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signature_help_parameter_documentation() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def documented_function(param1: str, param2: int) -> str:
|
||||
"""This is a function with parameter documentation.
|
||||
|
||||
Args:
|
||||
param1: The first parameter description
|
||||
param2: The second parameter description
|
||||
"""
|
||||
return f"{param1}: {param2}"
|
||||
|
||||
result = documented_function(<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
let result = test.signature_help().expect("Should have signature help");
|
||||
assert_eq!(result.signatures.len(), 1);
|
||||
|
||||
let signature = &result.signatures[0];
|
||||
assert_eq!(signature.parameters.len(), 2);
|
||||
|
||||
// Check that parameter documentation is extracted
|
||||
let param1 = &signature.parameters[0];
|
||||
assert_eq!(
|
||||
param1.documentation,
|
||||
Some("The first parameter description".to_string())
|
||||
);
|
||||
|
||||
let param2 = &signature.parameters[1];
|
||||
assert_eq!(
|
||||
param2.documentation,
|
||||
Some("The second parameter description".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
impl CursorTest {
|
||||
fn signature_help(&self) -> Option<SignatureHelpInfo> {
|
||||
crate::signature_help::signature_help(&self.db, self.cursor.file, self.cursor.offset)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ use std::{cmp, fmt};
|
||||
|
||||
use crate::metadata::settings::file_settings;
|
||||
use crate::{DEFAULT_LINT_REGISTRY, DummyReporter};
|
||||
use crate::{Project, ProjectMetadata, Reporter};
|
||||
use crate::{ProgressReporter, Project, ProjectMetadata};
|
||||
use ruff_db::Db as SourceDb;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::files::{File, Files};
|
||||
@@ -87,7 +87,7 @@ impl ProjectDatabase {
|
||||
}
|
||||
|
||||
/// Checks all open files in the project and its dependencies, using the given reporter.
|
||||
pub fn check_with_reporter(&self, reporter: &mut dyn Reporter) -> Vec<Diagnostic> {
|
||||
pub fn check_with_reporter(&self, reporter: &mut dyn ProgressReporter) -> Vec<Diagnostic> {
|
||||
let reporter = AssertUnwindSafe(reporter);
|
||||
self.project().check(self, CheckMode::OpenFiles, reporter)
|
||||
}
|
||||
@@ -95,7 +95,7 @@ impl ProjectDatabase {
|
||||
/// Check the project with the given mode.
|
||||
pub fn check_with_mode(&self, mode: CheckMode) -> Vec<Diagnostic> {
|
||||
let mut reporter = DummyReporter;
|
||||
let reporter = AssertUnwindSafe(&mut reporter as &mut dyn Reporter);
|
||||
let reporter = AssertUnwindSafe(&mut reporter as &mut dyn ProgressReporter);
|
||||
self.project().check(self, mode, reporter)
|
||||
}
|
||||
|
||||
|
||||
@@ -113,7 +113,7 @@ pub struct Project {
|
||||
}
|
||||
|
||||
/// A progress reporter.
|
||||
pub trait Reporter: Send + Sync {
|
||||
pub trait ProgressReporter: Send + Sync {
|
||||
/// Initialize the reporter with the number of files.
|
||||
fn set_files(&mut self, files: usize);
|
||||
|
||||
@@ -121,11 +121,11 @@ pub trait Reporter: Send + Sync {
|
||||
fn report_file(&self, file: &File);
|
||||
}
|
||||
|
||||
/// A no-op implementation of [`Reporter`].
|
||||
/// A no-op implementation of [`ProgressReporter`].
|
||||
#[derive(Default)]
|
||||
pub struct DummyReporter;
|
||||
|
||||
impl Reporter for DummyReporter {
|
||||
impl ProgressReporter for DummyReporter {
|
||||
fn set_files(&mut self, _files: usize) {}
|
||||
fn report_file(&self, _file: &File) {}
|
||||
}
|
||||
@@ -212,7 +212,7 @@ impl Project {
|
||||
self,
|
||||
db: &ProjectDatabase,
|
||||
mode: CheckMode,
|
||||
mut reporter: AssertUnwindSafe<&mut dyn Reporter>,
|
||||
mut reporter: AssertUnwindSafe<&mut dyn ProgressReporter>,
|
||||
) -> Vec<Diagnostic> {
|
||||
let project_span = tracing::debug_span!("Project::check");
|
||||
let _span = project_span.enter();
|
||||
@@ -257,8 +257,11 @@ impl Project {
|
||||
tracing::debug_span!(parent: project_span, "check_file", ?file);
|
||||
let _entered = check_file_span.entered();
|
||||
|
||||
let result = self.check_file_impl(&db, file);
|
||||
file_diagnostics.lock().unwrap().extend(result);
|
||||
let result = check_file_impl(&db, file);
|
||||
file_diagnostics
|
||||
.lock()
|
||||
.unwrap()
|
||||
.extend(result.iter().map(Clone::clone));
|
||||
|
||||
reporter.report_file(&file);
|
||||
});
|
||||
@@ -285,7 +288,7 @@ impl Project {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
self.check_file_impl(db, file)
|
||||
check_file_impl(db, file).iter().map(Clone::clone).collect()
|
||||
}
|
||||
|
||||
/// Opens a file in the project.
|
||||
@@ -466,71 +469,73 @@ impl Project {
|
||||
self.set_file_set(db).to(IndexedFiles::lazy());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_file_impl(self, db: &dyn Db, file: File) -> Vec<Diagnostic> {
|
||||
let mut diagnostics: Vec<Diagnostic> = Vec::new();
|
||||
#[salsa::tracked(returns(deref), heap_size=get_size2::GetSize::get_heap_size)]
|
||||
pub(crate) fn check_file_impl(db: &dyn Db, file: File) -> Box<[Diagnostic]> {
|
||||
let mut diagnostics: Vec<Diagnostic> = Vec::new();
|
||||
|
||||
// Abort checking if there are IO errors.
|
||||
let source = source_text(db, file);
|
||||
// Abort checking if there are IO errors.
|
||||
let source = source_text(db, file);
|
||||
|
||||
if let Some(read_error) = source.read_error() {
|
||||
diagnostics.push(
|
||||
IOErrorDiagnostic {
|
||||
file: Some(file),
|
||||
error: read_error.clone().into(),
|
||||
}
|
||||
.to_diagnostic(),
|
||||
);
|
||||
return diagnostics;
|
||||
}
|
||||
|
||||
let parsed = parsed_module(db, file);
|
||||
|
||||
let parsed_ref = parsed.load(db);
|
||||
diagnostics.extend(
|
||||
parsed_ref
|
||||
.errors()
|
||||
.iter()
|
||||
.map(|error| Diagnostic::invalid_syntax(file, &error.error, error)),
|
||||
);
|
||||
|
||||
diagnostics.extend(parsed_ref.unsupported_syntax_errors().iter().map(|error| {
|
||||
let mut error = Diagnostic::invalid_syntax(file, error, error);
|
||||
add_inferred_python_version_hint_to_diagnostic(db, &mut error, "parsing syntax");
|
||||
error
|
||||
}));
|
||||
|
||||
{
|
||||
let db = AssertUnwindSafe(db);
|
||||
match catch(&**db, file, || check_types(*db, file)) {
|
||||
Ok(Some(type_check_diagnostics)) => {
|
||||
diagnostics.extend(type_check_diagnostics.into_iter().cloned());
|
||||
}
|
||||
Ok(None) => {}
|
||||
Err(diagnostic) => diagnostics.push(diagnostic),
|
||||
if let Some(read_error) = source.read_error() {
|
||||
diagnostics.push(
|
||||
IOErrorDiagnostic {
|
||||
file: Some(file),
|
||||
error: read_error.clone().into(),
|
||||
}
|
||||
}
|
||||
|
||||
if self
|
||||
.open_fileset(db)
|
||||
.is_none_or(|files| !files.contains(&file))
|
||||
{
|
||||
// Drop the AST now that we are done checking this file. It is not currently open,
|
||||
// so it is unlikely to be accessed again soon. If any queries need to access the AST
|
||||
// from across files, it will be re-parsed.
|
||||
parsed.clear();
|
||||
}
|
||||
|
||||
diagnostics.sort_unstable_by_key(|diagnostic| {
|
||||
diagnostic
|
||||
.primary_span()
|
||||
.and_then(|span| span.range())
|
||||
.unwrap_or_default()
|
||||
.start()
|
||||
});
|
||||
|
||||
diagnostics
|
||||
.to_diagnostic(),
|
||||
);
|
||||
return diagnostics.into_boxed_slice();
|
||||
}
|
||||
|
||||
let parsed = parsed_module(db, file);
|
||||
|
||||
let parsed_ref = parsed.load(db);
|
||||
diagnostics.extend(
|
||||
parsed_ref
|
||||
.errors()
|
||||
.iter()
|
||||
.map(|error| Diagnostic::invalid_syntax(file, &error.error, error)),
|
||||
);
|
||||
|
||||
diagnostics.extend(parsed_ref.unsupported_syntax_errors().iter().map(|error| {
|
||||
let mut error = Diagnostic::invalid_syntax(file, error, error);
|
||||
add_inferred_python_version_hint_to_diagnostic(db, &mut error, "parsing syntax");
|
||||
error
|
||||
}));
|
||||
|
||||
{
|
||||
let db = AssertUnwindSafe(db);
|
||||
match catch(&**db, file, || check_types(*db, file)) {
|
||||
Ok(Some(type_check_diagnostics)) => {
|
||||
diagnostics.extend(type_check_diagnostics);
|
||||
}
|
||||
Ok(None) => {}
|
||||
Err(diagnostic) => diagnostics.push(diagnostic),
|
||||
}
|
||||
}
|
||||
|
||||
if db
|
||||
.project()
|
||||
.open_fileset(db)
|
||||
.is_none_or(|files| !files.contains(&file))
|
||||
{
|
||||
// Drop the AST now that we are done checking this file. It is not currently open,
|
||||
// so it is unlikely to be accessed again soon. If any queries need to access the AST
|
||||
// from across files, it will be re-parsed.
|
||||
parsed.clear();
|
||||
}
|
||||
|
||||
diagnostics.sort_unstable_by_key(|diagnostic| {
|
||||
diagnostic
|
||||
.primary_span()
|
||||
.and_then(|span| span.range())
|
||||
.unwrap_or_default()
|
||||
.start()
|
||||
});
|
||||
|
||||
diagnostics.into_boxed_slice()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -701,8 +706,8 @@ where
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::Db;
|
||||
use crate::ProjectMetadata;
|
||||
use crate::check_file_impl;
|
||||
use crate::db::tests::TestDb;
|
||||
use ruff_db::Db as _;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
@@ -741,9 +746,8 @@ mod tests {
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(
|
||||
db.project()
|
||||
.check_file_impl(&db, file)
|
||||
.into_iter()
|
||||
check_file_impl(&db, file)
|
||||
.iter()
|
||||
.map(|diagnostic| diagnostic.primary_message().to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["Failed to read file: No such file or directory".to_string()]
|
||||
@@ -758,9 +762,8 @@ mod tests {
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(
|
||||
db.project()
|
||||
.check_file_impl(&db, file)
|
||||
.into_iter()
|
||||
check_file_impl(&db, file)
|
||||
.iter()
|
||||
.map(|diagnostic| diagnostic.primary_message().to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
vec![] as Vec<String>
|
||||
|
||||
@@ -979,6 +979,39 @@ impl GlobFilterContext {
|
||||
}
|
||||
}
|
||||
|
||||
/// The diagnostic output format.
|
||||
#[derive(Debug, Default, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub enum OutputFormat {
|
||||
/// The default full mode will print "pretty" diagnostics.
|
||||
///
|
||||
/// That is, color will be used when printing to a `tty`.
|
||||
/// Moreover, diagnostic messages may include additional
|
||||
/// context and annotations on the input to help understand
|
||||
/// the message.
|
||||
#[default]
|
||||
Full,
|
||||
/// Print diagnostics in a concise mode.
|
||||
///
|
||||
/// This will guarantee that each diagnostic is printed on
|
||||
/// a single line. Only the most important or primary aspects
|
||||
/// of the diagnostic are included. Contextual information is
|
||||
/// dropped.
|
||||
///
|
||||
/// This may use color when printing to a `tty`.
|
||||
Concise,
|
||||
}
|
||||
|
||||
impl From<OutputFormat> for DiagnosticFormat {
|
||||
fn from(value: OutputFormat) -> Self {
|
||||
match value {
|
||||
OutputFormat::Full => Self::Full,
|
||||
OutputFormat::Concise => Self::Concise,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize, OptionsMetadata,
|
||||
)]
|
||||
@@ -996,7 +1029,7 @@ pub struct TerminalOptions {
|
||||
output-format = "concise"
|
||||
"#
|
||||
)]
|
||||
pub output_format: Option<RangedValue<DiagnosticFormat>>,
|
||||
pub output_format: Option<RangedValue<OutputFormat>>,
|
||||
/// Use exit code 1 if there are any warning-level diagnostics.
|
||||
///
|
||||
/// Defaults to `false`.
|
||||
@@ -1295,7 +1328,7 @@ pub(super) struct InnerOverrideOptions {
|
||||
#[derive(Debug)]
|
||||
pub struct ToSettingsError {
|
||||
diagnostic: Box<OptionDiagnostic>,
|
||||
output_format: DiagnosticFormat,
|
||||
output_format: OutputFormat,
|
||||
color: bool,
|
||||
}
|
||||
|
||||
@@ -1309,7 +1342,7 @@ impl ToSettingsError {
|
||||
impl fmt::Display for DisplayPretty<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let display_config = DisplayDiagnosticConfig::default()
|
||||
.format(self.error.output_format)
|
||||
.format(self.error.output_format.into())
|
||||
.color(self.error.color);
|
||||
|
||||
write!(
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_db::{diagnostic::DiagnosticFormat, files::File};
|
||||
use ruff_db::files::File;
|
||||
use ty_python_semantic::lint::RuleSelection;
|
||||
|
||||
use crate::metadata::options::InnerOverrideOptions;
|
||||
use crate::metadata::options::{InnerOverrideOptions, OutputFormat};
|
||||
use crate::{Db, combine::Combine, glob::IncludeExcludeFilter};
|
||||
|
||||
/// The resolved [`super::Options`] for the project.
|
||||
@@ -57,7 +57,7 @@ impl Settings {
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||
pub struct TerminalSettings {
|
||||
pub output_format: DiagnosticFormat,
|
||||
pub output_format: OutputFormat,
|
||||
pub error_on_warning: bool,
|
||||
}
|
||||
|
||||
|
||||
@@ -1304,7 +1304,7 @@ scope of the name that was declared `global`, can add a symbol to the global nam
|
||||
def f():
|
||||
global g, h
|
||||
|
||||
g: bool = True
|
||||
g = True
|
||||
|
||||
f()
|
||||
```
|
||||
|
||||
@@ -83,7 +83,7 @@ def f():
|
||||
x = 1
|
||||
def g() -> None:
|
||||
nonlocal x
|
||||
global x # TODO: error: [invalid-syntax] "name 'x' is nonlocal and global"
|
||||
global x # error: [invalid-syntax] "name `x` is nonlocal and global"
|
||||
x = None
|
||||
```
|
||||
|
||||
@@ -209,5 +209,18 @@ x: int = 1
|
||||
|
||||
def f():
|
||||
global x
|
||||
x: str = "foo" # TODO: error: [invalid-syntax] "annotated name 'x' can't be global"
|
||||
x: str = "foo" # error: [invalid-syntax] "annotated name `x` can't be global"
|
||||
```
|
||||
|
||||
## Global declarations affect the inferred type of the binding
|
||||
|
||||
Even if the `global` declaration isn't used in an assignment, we conservatively assume it could be:
|
||||
|
||||
```py
|
||||
x = 1
|
||||
|
||||
def f():
|
||||
global x
|
||||
|
||||
# TODO: reveal_type(x) # revealed: Unknown | Literal["1"]
|
||||
```
|
||||
|
||||
@@ -43,3 +43,321 @@ def f():
|
||||
def h():
|
||||
reveal_type(x) # revealed: Unknown | Literal[1]
|
||||
```
|
||||
|
||||
## The `nonlocal` keyword
|
||||
|
||||
Without the `nonlocal` keyword, bindings in an inner scope shadow variables of the same name in
|
||||
enclosing scopes. This example isn't a type error, because the inner `x` shadows the outer one:
|
||||
|
||||
```py
|
||||
def f():
|
||||
x: int = 1
|
||||
def g():
|
||||
x = "hello" # allowed
|
||||
```
|
||||
|
||||
With `nonlocal` it is a type error, because `x` refers to the same place in both scopes:
|
||||
|
||||
```py
|
||||
def f():
|
||||
x: int = 1
|
||||
def g():
|
||||
nonlocal x
|
||||
x = "hello" # error: [invalid-assignment] "Object of type `Literal["hello"]` is not assignable to `int`"
|
||||
```
|
||||
|
||||
## Local variable bindings "look ahead" to any assignment in the current scope
|
||||
|
||||
The binding `x = 2` in `g` causes the earlier read of `x` to refer to `g`'s not-yet-initialized
|
||||
binding, rather than to `x = 1` in `f`'s scope:
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
if x == 1: # error: [unresolved-reference] "Name `x` used when not defined"
|
||||
x = 2
|
||||
```
|
||||
|
||||
The `nonlocal` keyword makes this example legal (and makes the assignment `x = 2` affect the outer
|
||||
scope):
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
nonlocal x
|
||||
if x == 1:
|
||||
x = 2
|
||||
```
|
||||
|
||||
For the same reason, using the `+=` operator in an inner scope is an error without `nonlocal`
|
||||
(unless you shadow the outer variable first):
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
x += 1 # error: [unresolved-reference] "Name `x` used when not defined"
|
||||
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
x = 1
|
||||
x += 1 # allowed, but doesn't affect the outer scope
|
||||
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
nonlocal x
|
||||
x += 1 # allowed, and affects the outer scope
|
||||
```
|
||||
|
||||
## `nonlocal` declarations must match an outer binding
|
||||
|
||||
`nonlocal x` isn't allowed when there's no binding for `x` in an enclosing scope:
|
||||
|
||||
```py
|
||||
def f():
|
||||
def g():
|
||||
nonlocal x # error: [invalid-syntax] "no binding for nonlocal `x` found"
|
||||
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
nonlocal x, y # error: [invalid-syntax] "no binding for nonlocal `y` found"
|
||||
```
|
||||
|
||||
A global `x` doesn't work. The target must be in a function-like scope:
|
||||
|
||||
```py
|
||||
x = 1
|
||||
|
||||
def f():
|
||||
def g():
|
||||
nonlocal x # error: [invalid-syntax] "no binding for nonlocal `x` found"
|
||||
|
||||
def f():
|
||||
global x
|
||||
def g():
|
||||
nonlocal x # error: [invalid-syntax] "no binding for nonlocal `x` found"
|
||||
```
|
||||
|
||||
A class-scoped `x` also doesn't work:
|
||||
|
||||
```py
|
||||
class Foo:
|
||||
x = 1
|
||||
@staticmethod
|
||||
def f():
|
||||
nonlocal x # error: [invalid-syntax] "no binding for nonlocal `x` found"
|
||||
```
|
||||
|
||||
However, class-scoped bindings don't break the `nonlocal` chain the way `global` declarations do:
|
||||
|
||||
```py
|
||||
def f():
|
||||
x: int = 1
|
||||
|
||||
class Foo:
|
||||
x: str = "hello"
|
||||
|
||||
@staticmethod
|
||||
def g():
|
||||
# Skips the class scope and reaches the outer function scope.
|
||||
nonlocal x
|
||||
x = 2 # allowed
|
||||
x = "goodbye" # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
## `nonlocal` uses the closest binding
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
x = 2
|
||||
def h():
|
||||
nonlocal x
|
||||
reveal_type(x) # revealed: Unknown | Literal[2]
|
||||
```
|
||||
|
||||
## `nonlocal` "chaining"
|
||||
|
||||
Multiple `nonlocal` statements can "chain" through nested scopes:
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
nonlocal x
|
||||
def h():
|
||||
nonlocal x
|
||||
reveal_type(x) # revealed: Unknown | Literal[1]
|
||||
```
|
||||
|
||||
And the `nonlocal` chain can skip over a scope that doesn't bind the variable:
|
||||
|
||||
```py
|
||||
def f1():
|
||||
x = 1
|
||||
def f2():
|
||||
nonlocal x
|
||||
def f3():
|
||||
# No binding; this scope gets skipped.
|
||||
def f4():
|
||||
nonlocal x
|
||||
reveal_type(x) # revealed: Unknown | Literal[1]
|
||||
```
|
||||
|
||||
But a `global` statement breaks the chain:
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
global x
|
||||
def h():
|
||||
nonlocal x # error: [invalid-syntax] "no binding for nonlocal `x` found"
|
||||
```
|
||||
|
||||
## `nonlocal` bindings respect declared types from the defining scope, even without a binding
|
||||
|
||||
```py
|
||||
def f():
|
||||
x: int
|
||||
def g():
|
||||
nonlocal x
|
||||
x = "string" # error: [invalid-assignment] "Object of type `Literal["string"]` is not assignable to `int`"
|
||||
```
|
||||
|
||||
## A complicated mixture of `nonlocal` chaining, empty scopes, class scopes, and the `global` keyword
|
||||
|
||||
```py
|
||||
def f1():
|
||||
# The original bindings of `x`, `y`, and `z` with type declarations.
|
||||
x: int = 1
|
||||
y: int = 2
|
||||
z: int = 3
|
||||
|
||||
def f2():
|
||||
# This scope doesn't touch `x`, `y`, or `z` at all.
|
||||
|
||||
class Foo:
|
||||
# This class scope is totally ignored.
|
||||
x: str = "a"
|
||||
y: str = "b"
|
||||
z: str = "c"
|
||||
|
||||
@staticmethod
|
||||
def f3():
|
||||
# This scope declares `x` nonlocal and `y` as global, and it shadows `z` without
|
||||
# giving it a type declaration.
|
||||
nonlocal x
|
||||
x = 4
|
||||
y = 5
|
||||
global z
|
||||
z = 6
|
||||
|
||||
def f4():
|
||||
# This scope sees `x` from `f1` and `y` from `f3`. It *can't* declare `z`
|
||||
# nonlocal, because of the global statement above, but it *can* load `z` as a
|
||||
# "free" variable, in which case it sees the global value.
|
||||
nonlocal x, y, z # error: [invalid-syntax] "no binding for nonlocal `z` found"
|
||||
x = "string" # error: [invalid-assignment]
|
||||
y = "string" # allowed, because `f3`'s `y` is untyped
|
||||
reveal_type(z) # revealed: Unknown | Literal[6]
|
||||
```
|
||||
|
||||
## TODO: `nonlocal` affects the inferred type in the outer scope
|
||||
|
||||
Without `nonlocal`, `g` can't write to `x`, and the inferred type of `x` in `f`'s scope isn't
|
||||
affected by `g`:
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
reveal_type(x) # revealed: Unknown | Literal[1]
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
But with `nonlocal`, `g` could write to `x`, and that affects its inferred type in `f`. That's true
|
||||
regardless of whether `g` actually writes to `x`. With a write:
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
nonlocal x
|
||||
reveal_type(x) # revealed: Unknown | Literal[1]
|
||||
x += 1
|
||||
reveal_type(x) # revealed: Unknown | Literal[2]
|
||||
# TODO: should be `Unknown | Literal[1]`
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
Without a write:
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
nonlocal x
|
||||
reveal_type(x) # revealed: Unknown | Literal[1]
|
||||
# TODO: should be `Unknown | Literal[1]`
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Annotating a `nonlocal` binding is a syntax error
|
||||
|
||||
```py
|
||||
def f():
|
||||
x: int = 1
|
||||
def g():
|
||||
nonlocal x
|
||||
x: str = "foo" # error: [invalid-syntax] "annotated name `x` can't be nonlocal"
|
||||
```
|
||||
|
||||
## Use before `nonlocal`
|
||||
|
||||
Using a name prior to its `nonlocal` declaration in the same scope is a syntax error:
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
x = 2
|
||||
nonlocal x # error: [invalid-syntax] "name `x` is used prior to nonlocal declaration"
|
||||
```
|
||||
|
||||
This is true even if there are multiple `nonlocal` declarations of the same variable, as long as any
|
||||
of them come after the usage:
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
nonlocal x
|
||||
x = 2
|
||||
nonlocal x # error: [invalid-syntax] "name `x` is used prior to nonlocal declaration"
|
||||
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
nonlocal x
|
||||
nonlocal x
|
||||
x = 2 # allowed
|
||||
```
|
||||
|
||||
## `nonlocal` before outer initialization
|
||||
|
||||
`nonlocal x` works even if `x` isn't bound in the enclosing scope until afterwards:
|
||||
|
||||
```py
|
||||
def f():
|
||||
def g():
|
||||
# This is allowed, because of the subsequent definition of `x`.
|
||||
nonlocal x
|
||||
x = 1
|
||||
```
|
||||
|
||||
@@ -147,8 +147,7 @@ def nonlocal_use():
|
||||
X: Final[int] = 1
|
||||
def inner():
|
||||
nonlocal X
|
||||
# TODO: this should be an error
|
||||
X = 2
|
||||
X = 2 # error: [invalid-assignment] "Reassignment of `Final` symbol `X` is not allowed: Reassignment of `Final` symbol"
|
||||
```
|
||||
|
||||
`main.py`:
|
||||
|
||||
@@ -3,9 +3,13 @@ use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
|
||||
use super::path::SearchPath;
|
||||
use crate::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::path::SystemOrVendoredPathRef;
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
@@ -85,6 +89,100 @@ impl Module {
|
||||
ModuleInner::NamespacePackage { .. } => ModuleKind::Package,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a list of all submodules of this module.
|
||||
///
|
||||
/// Returns an empty list if the module is not a package, if it is an empty package,
|
||||
/// or if it is a namespace package (one without an `__init__.py` or `__init__.pyi` file).
|
||||
///
|
||||
/// The names returned correspond to the "base" name of the module.
|
||||
/// That is, `{self.name}.{basename}` should give the full module name.
|
||||
pub fn all_submodules(&self, db: &dyn Db) -> Vec<Name> {
|
||||
self.all_submodules_inner(db).unwrap_or_default()
|
||||
}
|
||||
|
||||
fn all_submodules_inner(&self, db: &dyn Db) -> Option<Vec<Name>> {
|
||||
fn is_submodule(
|
||||
is_dir: bool,
|
||||
is_file: bool,
|
||||
basename: Option<&str>,
|
||||
extension: Option<&str>,
|
||||
) -> bool {
|
||||
is_dir
|
||||
|| (is_file
|
||||
&& matches!(extension, Some("py" | "pyi"))
|
||||
&& !matches!(basename, Some("__init__.py" | "__init__.pyi")))
|
||||
}
|
||||
|
||||
// It would be complex and expensive to compute all submodules for
|
||||
// namespace packages, since a namespace package doesn't correspond
|
||||
// to a single file; it can span multiple directories across multiple
|
||||
// search paths. For now, we only compute submodules for traditional
|
||||
// packages that exist in a single directory on a single search path.
|
||||
let ModuleInner::FileModule {
|
||||
kind: ModuleKind::Package,
|
||||
file,
|
||||
..
|
||||
} = &*self.inner
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let path = SystemOrVendoredPathRef::try_from_file(db, *file)?;
|
||||
debug_assert!(
|
||||
matches!(path.file_name(), Some("__init__.py" | "__init__.pyi")),
|
||||
"expected package file `{:?}` to be `__init__.py` or `__init__.pyi`",
|
||||
path.file_name(),
|
||||
);
|
||||
|
||||
Some(match path.parent()? {
|
||||
SystemOrVendoredPathRef::System(parent_directory) => db
|
||||
.system()
|
||||
.read_directory(parent_directory)
|
||||
.inspect_err(|err| {
|
||||
tracing::debug!(
|
||||
"Failed to read {parent_directory:?} when looking for \
|
||||
its possible submodules: {err}"
|
||||
);
|
||||
})
|
||||
.ok()?
|
||||
.flatten()
|
||||
.filter(|entry| {
|
||||
let ty = entry.file_type();
|
||||
let path = entry.path();
|
||||
is_submodule(
|
||||
ty.is_directory(),
|
||||
ty.is_file(),
|
||||
path.file_name(),
|
||||
path.extension(),
|
||||
)
|
||||
})
|
||||
.filter_map(|entry| {
|
||||
let stem = entry.path().file_stem()?;
|
||||
is_identifier(stem).then(|| Name::from(stem))
|
||||
})
|
||||
.collect(),
|
||||
SystemOrVendoredPathRef::Vendored(parent_directory) => db
|
||||
.vendored()
|
||||
.read_directory(parent_directory)
|
||||
.into_iter()
|
||||
.filter(|entry| {
|
||||
let ty = entry.file_type();
|
||||
let path = entry.path();
|
||||
is_submodule(
|
||||
ty.is_directory(),
|
||||
ty.is_file(),
|
||||
path.file_name(),
|
||||
path.extension(),
|
||||
)
|
||||
})
|
||||
.filter_map(|entry| {
|
||||
let stem = entry.path().file_stem()?;
|
||||
is_identifier(stem).then(|| Name::from(stem))
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Module {
|
||||
|
||||
@@ -4,11 +4,12 @@ use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use camino::{Utf8Path, Utf8PathBuf};
|
||||
use ruff_db::files::{File, FileError, system_path_to_file, vendored_path_to_file};
|
||||
use ruff_db::files::{File, FileError, FilePath, system_path_to_file, vendored_path_to_file};
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::{VendoredPath, VendoredPathBuf};
|
||||
|
||||
use super::typeshed::{TypeshedVersionsParseError, TypeshedVersionsQueryResult, typeshed_versions};
|
||||
use crate::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolver::ResolverContext;
|
||||
use crate::site_packages::SitePackagesDiscoveryError;
|
||||
@@ -652,6 +653,48 @@ impl fmt::Display for SearchPath {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) enum SystemOrVendoredPathRef<'db> {
|
||||
System(&'db SystemPath),
|
||||
Vendored(&'db VendoredPath),
|
||||
}
|
||||
|
||||
impl<'db> SystemOrVendoredPathRef<'db> {
|
||||
pub(super) fn try_from_file(db: &'db dyn Db, file: File) -> Option<Self> {
|
||||
match file.path(db) {
|
||||
FilePath::System(system) => Some(Self::System(system)),
|
||||
FilePath::Vendored(vendored) => Some(Self::Vendored(vendored)),
|
||||
FilePath::SystemVirtual(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn file_name(&self) -> Option<&str> {
|
||||
match self {
|
||||
Self::System(system) => system.file_name(),
|
||||
Self::Vendored(vendored) => vendored.file_name(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn parent<'a>(&'a self) -> Option<SystemOrVendoredPathRef<'a>>
|
||||
where
|
||||
'a: 'db,
|
||||
{
|
||||
match self {
|
||||
Self::System(system) => system.parent().map(Self::System),
|
||||
Self::Vendored(vendored) => vendored.parent().map(Self::Vendored),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SystemOrVendoredPathRef<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
SystemOrVendoredPathRef::System(system) => system.fmt(f),
|
||||
SystemOrVendoredPathRef::Vendored(vendored) => vendored.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::Db;
|
||||
|
||||
@@ -8,7 +8,7 @@ use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
use ruff_db::files::{File, FilePath, FileRootKind};
|
||||
use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::{VendoredFileSystem, VendoredPath};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_python_ast::PythonVersion;
|
||||
|
||||
use crate::db::Db;
|
||||
@@ -17,7 +17,7 @@ use crate::module_resolver::typeshed::{TypeshedVersions, vendored_typeshed_versi
|
||||
use crate::{Program, SearchPathSettings};
|
||||
|
||||
use super::module::{Module, ModuleKind};
|
||||
use super::path::{ModulePath, SearchPath, SearchPathValidationError};
|
||||
use super::path::{ModulePath, SearchPath, SearchPathValidationError, SystemOrVendoredPathRef};
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: &ModuleName) -> Option<Module> {
|
||||
@@ -77,21 +77,6 @@ pub(crate) fn path_to_module(db: &dyn Db, path: &FilePath) -> Option<Module> {
|
||||
file_to_module(db, file)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
enum SystemOrVendoredPathRef<'a> {
|
||||
System(&'a SystemPath),
|
||||
Vendored(&'a VendoredPath),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SystemOrVendoredPathRef<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
SystemOrVendoredPathRef::System(system) => system.fmt(f),
|
||||
SystemOrVendoredPathRef::Vendored(vendored) => vendored.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves the module for the file with the given id.
|
||||
///
|
||||
/// Returns `None` if the file is not a module locatable via any of the known search paths.
|
||||
@@ -99,11 +84,7 @@ impl std::fmt::Display for SystemOrVendoredPathRef<'_> {
|
||||
pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
let _span = tracing::trace_span!("file_to_module", ?file).entered();
|
||||
|
||||
let path = match file.path(db) {
|
||||
FilePath::System(system) => SystemOrVendoredPathRef::System(system),
|
||||
FilePath::Vendored(vendored) => SystemOrVendoredPathRef::Vendored(vendored),
|
||||
FilePath::SystemVirtual(_) => return None,
|
||||
};
|
||||
let path = SystemOrVendoredPathRef::try_from_file(db, file)?;
|
||||
|
||||
let module_name = search_paths(db).find_map(|candidate| {
|
||||
let relative_path = match path {
|
||||
|
||||
@@ -1421,7 +1421,7 @@ impl RequiresExplicitReExport {
|
||||
/// ```py
|
||||
/// def _():
|
||||
/// x = 1
|
||||
///
|
||||
///
|
||||
/// x = 2
|
||||
///
|
||||
/// if flag():
|
||||
|
||||
@@ -23,6 +23,7 @@ use crate::semantic_index::place::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, PlaceExpr, PlaceTable, Scope, ScopeId,
|
||||
ScopeKind, ScopedPlaceId,
|
||||
};
|
||||
use crate::semantic_index::reachability_constraints::ScopedReachabilityConstraintId;
|
||||
use crate::semantic_index::use_def::{EagerSnapshotKey, ScopedEagerSnapshotId, UseDefMap};
|
||||
use crate::util::get_size::untracked_arc_size;
|
||||
|
||||
@@ -211,15 +212,15 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
/// Map from a standalone expression to its [`Expression`] ingredient.
|
||||
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
|
||||
|
||||
/// Tracks whether or not a given AST node is reachable from the start of the scope.
|
||||
node_reachability: FxHashMap<NodeKey, ScopedReachabilityConstraintId>,
|
||||
|
||||
/// Map from nodes that create a scope to the scope they create.
|
||||
scopes_by_node: FxHashMap<NodeWithScopeKey, FileScopeId>,
|
||||
|
||||
/// Map from the file-local [`FileScopeId`] to the salsa-ingredient [`ScopeId`].
|
||||
scope_ids_by_scope: IndexVec<FileScopeId, ScopeId<'db>>,
|
||||
|
||||
/// Map from the file-local [`FileScopeId`] to the set of explicit-global symbols it contains.
|
||||
globals_by_scope: FxHashMap<FileScopeId, FxHashSet<ScopedPlaceId>>,
|
||||
|
||||
/// Use-def map for each scope in this file.
|
||||
use_def_maps: IndexVec<FileScopeId, ArcUseDefMap<'db>>,
|
||||
|
||||
@@ -308,9 +309,19 @@ impl<'db> SemanticIndex<'db> {
|
||||
symbol: ScopedPlaceId,
|
||||
scope: FileScopeId,
|
||||
) -> bool {
|
||||
self.globals_by_scope
|
||||
.get(&scope)
|
||||
.is_some_and(|globals| globals.contains(&symbol))
|
||||
self.place_table(scope)
|
||||
.place_expr(symbol)
|
||||
.is_marked_global()
|
||||
}
|
||||
|
||||
pub(crate) fn symbol_is_nonlocal_in_scope(
|
||||
&self,
|
||||
symbol: ScopedPlaceId,
|
||||
scope: FileScopeId,
|
||||
) -> bool {
|
||||
self.place_table(scope)
|
||||
.place_expr(symbol)
|
||||
.is_marked_nonlocal()
|
||||
}
|
||||
|
||||
/// Returns the id of the parent scope.
|
||||
@@ -357,8 +368,15 @@ impl<'db> SemanticIndex<'db> {
|
||||
scope_id: FileScopeId,
|
||||
node_key: NodeKey,
|
||||
) -> bool {
|
||||
self.is_scope_reachable(db, scope_id)
|
||||
&& self.use_def_map(scope_id).is_node_reachable(db, node_key)
|
||||
if !self.is_scope_reachable(db, scope_id) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let constraint = *self.node_reachability.get(&node_key).expect(
|
||||
"`is_node_reachable` should only be called on AST nodes with recorded reachability",
|
||||
);
|
||||
|
||||
self.use_def_map(scope_id).is_node_reachable(db, constraint)
|
||||
}
|
||||
|
||||
/// Returns an iterator over the descendent scopes of `scope`.
|
||||
|
||||
@@ -20,8 +20,8 @@ use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::node_key::NodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::{AstIdsBuilder, ScopedUseId};
|
||||
use crate::semantic_index::definition::{
|
||||
AnnotatedAssignmentDefinitionNodeRef, AssignmentDefinitionNodeRef,
|
||||
ComprehensionDefinitionNodeRef, Definition, DefinitionCategory, DefinitionNodeKey,
|
||||
@@ -103,9 +103,10 @@ pub(super) struct SemanticIndexBuilder<'db, 'ast> {
|
||||
use_def_maps: IndexVec<FileScopeId, UseDefMapBuilder<'db>>,
|
||||
scopes_by_node: FxHashMap<NodeWithScopeKey, FileScopeId>,
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
globals_by_scope: FxHashMap<FileScopeId, FxHashSet<ScopedPlaceId>>,
|
||||
definitions_by_node: FxHashMap<DefinitionNodeKey, Definitions<'db>>,
|
||||
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
|
||||
/// Tracks whether or not a given AST node is reachable from the start of the scope.
|
||||
node_reachability: FxHashMap<NodeKey, ScopedReachabilityConstraintId>,
|
||||
imported_modules: FxHashSet<ModuleName>,
|
||||
/// Hashset of all [`FileScopeId`]s that correspond to [generator functions].
|
||||
///
|
||||
@@ -141,7 +142,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
|
||||
scopes_by_node: FxHashMap::default(),
|
||||
definitions_by_node: FxHashMap::default(),
|
||||
expressions_by_node: FxHashMap::default(),
|
||||
globals_by_scope: FxHashMap::default(),
|
||||
node_reachability: FxHashMap::default(),
|
||||
|
||||
imported_modules: FxHashSet::default(),
|
||||
generator_functions: FxHashSet::default(),
|
||||
@@ -349,7 +350,12 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
|
||||
popped_scope_id
|
||||
}
|
||||
|
||||
fn current_place_table(&mut self) -> &mut PlaceTableBuilder {
|
||||
fn current_place_table(&self) -> &PlaceTableBuilder {
|
||||
let scope_id = self.current_scope();
|
||||
&self.place_tables[scope_id]
|
||||
}
|
||||
|
||||
fn current_place_table_mut(&mut self) -> &mut PlaceTableBuilder {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.place_tables[scope_id]
|
||||
}
|
||||
@@ -389,7 +395,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
|
||||
/// Add a symbol to the place table and the use-def map.
|
||||
/// Return the [`ScopedPlaceId`] that uniquely identifies the symbol in both.
|
||||
fn add_symbol(&mut self, name: Name) -> ScopedPlaceId {
|
||||
let (place_id, added) = self.current_place_table().add_symbol(name);
|
||||
let (place_id, added) = self.current_place_table_mut().add_symbol(name);
|
||||
if added {
|
||||
self.current_use_def_map_mut().add_place(place_id);
|
||||
}
|
||||
@@ -399,7 +405,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
|
||||
/// Add a place to the place table and the use-def map.
|
||||
/// Return the [`ScopedPlaceId`] that uniquely identifies the place in both.
|
||||
fn add_place(&mut self, place_expr: PlaceExprWithFlags) -> ScopedPlaceId {
|
||||
let (place_id, added) = self.current_place_table().add_place(place_expr);
|
||||
let (place_id, added) = self.current_place_table_mut().add_place(place_expr);
|
||||
if added {
|
||||
self.current_use_def_map_mut().add_place(place_id);
|
||||
}
|
||||
@@ -407,15 +413,15 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
|
||||
}
|
||||
|
||||
fn mark_place_bound(&mut self, id: ScopedPlaceId) {
|
||||
self.current_place_table().mark_place_bound(id);
|
||||
self.current_place_table_mut().mark_place_bound(id);
|
||||
}
|
||||
|
||||
fn mark_place_declared(&mut self, id: ScopedPlaceId) {
|
||||
self.current_place_table().mark_place_declared(id);
|
||||
self.current_place_table_mut().mark_place_declared(id);
|
||||
}
|
||||
|
||||
fn mark_place_used(&mut self, id: ScopedPlaceId) {
|
||||
self.current_place_table().mark_place_used(id);
|
||||
self.current_place_table_mut().mark_place_used(id);
|
||||
}
|
||||
|
||||
fn add_entry_for_definition_key(&mut self, key: DefinitionNodeKey) -> &mut Definitions<'db> {
|
||||
@@ -661,6 +667,19 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
|
||||
.record_reachability_constraint(negated_constraint);
|
||||
}
|
||||
|
||||
fn record_node_reachability(&mut self, node: NodeKey) {
|
||||
self.node_reachability
|
||||
.insert(node, self.current_use_def_map().reachability);
|
||||
}
|
||||
|
||||
fn record_use(&mut self, place: ScopedPlaceId, use_id: ScopedUseId, node_key: NodeKey) {
|
||||
self.current_use_def_map_mut().record_use(place, use_id);
|
||||
|
||||
// Track reachability of all uses of places to silence `unresolved-reference`
|
||||
// diagnostics in unreachable code.
|
||||
self.record_node_reachability(node_key);
|
||||
}
|
||||
|
||||
fn push_assignment(&mut self, assignment: CurrentAssignment<'ast, 'db>) {
|
||||
self.current_assignments.push(assignment);
|
||||
}
|
||||
@@ -1037,12 +1056,12 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
|
||||
ast_ids.shrink_to_fit();
|
||||
self.scopes_by_expression.shrink_to_fit();
|
||||
self.definitions_by_node.shrink_to_fit();
|
||||
self.node_reachability.shrink_to_fit();
|
||||
|
||||
self.scope_ids_by_scope.shrink_to_fit();
|
||||
self.scopes_by_node.shrink_to_fit();
|
||||
self.generator_functions.shrink_to_fit();
|
||||
self.eager_snapshots.shrink_to_fit();
|
||||
self.globals_by_scope.shrink_to_fit();
|
||||
|
||||
SemanticIndex {
|
||||
place_tables,
|
||||
@@ -1050,10 +1069,10 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
|
||||
definitions_by_node: self.definitions_by_node,
|
||||
expressions_by_node: self.expressions_by_node,
|
||||
scope_ids_by_scope: self.scope_ids_by_scope,
|
||||
globals_by_scope: self.globals_by_scope,
|
||||
ast_ids,
|
||||
scopes_by_expression: self.scopes_by_expression,
|
||||
scopes_by_node: self.scopes_by_node,
|
||||
node_reachability: self.node_reachability,
|
||||
use_def_maps,
|
||||
imported_modules: Arc::new(self.imported_modules),
|
||||
has_future_annotations: self.has_future_annotations,
|
||||
@@ -1144,8 +1163,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||
// AST uses.
|
||||
self.mark_place_used(symbol);
|
||||
let use_id = self.current_ast_ids().record_use(name);
|
||||
self.current_use_def_map_mut()
|
||||
.record_use(symbol, use_id, NodeKey::from_node(name));
|
||||
self.record_use(symbol, use_id, NodeKey::from_node(name));
|
||||
|
||||
self.add_definition(symbol, function_def);
|
||||
}
|
||||
@@ -1195,8 +1213,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||
);
|
||||
}
|
||||
ast::Stmt::Import(node) => {
|
||||
self.current_use_def_map_mut()
|
||||
.record_node_reachability(NodeKey::from_node(node));
|
||||
self.record_node_reachability(NodeKey::from_node(node));
|
||||
|
||||
for (alias_index, alias) in node.names.iter().enumerate() {
|
||||
// Mark the imported module, and all of its parents, as being imported in this
|
||||
@@ -1223,8 +1240,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||
}
|
||||
}
|
||||
ast::Stmt::ImportFrom(node) => {
|
||||
self.current_use_def_map_mut()
|
||||
.record_node_reachability(NodeKey::from_node(node));
|
||||
self.record_node_reachability(NodeKey::from_node(node));
|
||||
|
||||
let mut found_star = false;
|
||||
for (alias_index, alias) in node.names.iter().enumerate() {
|
||||
@@ -1418,6 +1434,29 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||
self.visit_expr(value);
|
||||
}
|
||||
|
||||
if let ast::Expr::Name(name) = &*node.target {
|
||||
let symbol_id = self.add_symbol(name.id.clone());
|
||||
let symbol = self.current_place_table().place_expr(symbol_id);
|
||||
// Check whether the variable has been declared global.
|
||||
if symbol.is_marked_global() {
|
||||
self.report_semantic_error(SemanticSyntaxError {
|
||||
kind: SemanticSyntaxErrorKind::AnnotatedGlobal(name.id.as_str().into()),
|
||||
range: name.range,
|
||||
python_version: self.python_version,
|
||||
});
|
||||
}
|
||||
// Check whether the variable has been declared nonlocal.
|
||||
if symbol.is_marked_nonlocal() {
|
||||
self.report_semantic_error(SemanticSyntaxError {
|
||||
kind: SemanticSyntaxErrorKind::AnnotatedNonlocal(
|
||||
name.id.as_str().into(),
|
||||
),
|
||||
range: name.range,
|
||||
python_version: self.python_version,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// See https://docs.python.org/3/library/ast.html#ast.AnnAssign
|
||||
if matches!(
|
||||
*node.target,
|
||||
@@ -1858,8 +1897,8 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||
}) => {
|
||||
for name in names {
|
||||
let symbol_id = self.add_symbol(name.id.clone());
|
||||
let symbol_table = self.current_place_table();
|
||||
let symbol = symbol_table.place_expr(symbol_id);
|
||||
let symbol = self.current_place_table().place_expr(symbol_id);
|
||||
// Check whether the variable has already been accessed in this scope.
|
||||
if symbol.is_bound() || symbol.is_declared() || symbol.is_used() {
|
||||
self.report_semantic_error(SemanticSyntaxError {
|
||||
kind: SemanticSyntaxErrorKind::LoadBeforeGlobalDeclaration {
|
||||
@@ -1870,11 +1909,56 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||
python_version: self.python_version,
|
||||
});
|
||||
}
|
||||
let scope_id = self.current_scope();
|
||||
self.globals_by_scope
|
||||
.entry(scope_id)
|
||||
.or_default()
|
||||
.insert(symbol_id);
|
||||
// Check whether the variable has also been declared nonlocal.
|
||||
if symbol.is_marked_nonlocal() {
|
||||
self.report_semantic_error(SemanticSyntaxError {
|
||||
kind: SemanticSyntaxErrorKind::NonlocalAndGlobal(name.to_string()),
|
||||
range: name.range,
|
||||
python_version: self.python_version,
|
||||
});
|
||||
}
|
||||
self.current_place_table_mut().mark_place_global(symbol_id);
|
||||
}
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
ast::Stmt::Nonlocal(ast::StmtNonlocal {
|
||||
range: _,
|
||||
node_index: _,
|
||||
names,
|
||||
}) => {
|
||||
for name in names {
|
||||
let symbol_id = self.add_symbol(name.id.clone());
|
||||
let symbol = self.current_place_table().place_expr(symbol_id);
|
||||
// Check whether the variable has already been accessed in this scope.
|
||||
if symbol.is_bound() || symbol.is_declared() || symbol.is_used() {
|
||||
self.report_semantic_error(SemanticSyntaxError {
|
||||
kind: SemanticSyntaxErrorKind::LoadBeforeNonlocalDeclaration {
|
||||
name: name.to_string(),
|
||||
start: name.range.start(),
|
||||
},
|
||||
range: name.range,
|
||||
python_version: self.python_version,
|
||||
});
|
||||
}
|
||||
// Check whether the variable has also been declared global.
|
||||
if symbol.is_marked_global() {
|
||||
self.report_semantic_error(SemanticSyntaxError {
|
||||
kind: SemanticSyntaxErrorKind::NonlocalAndGlobal(name.to_string()),
|
||||
range: name.range,
|
||||
python_version: self.python_version,
|
||||
});
|
||||
}
|
||||
// The variable is required to exist in an enclosing scope, but that definition
|
||||
// might come later. For example, this is example legal, but we can't check
|
||||
// that here, because we haven't gotten to `x = 1`:
|
||||
// ```py
|
||||
// def f():
|
||||
// def g():
|
||||
// nonlocal x
|
||||
// x = 1
|
||||
// ```
|
||||
self.current_place_table_mut()
|
||||
.mark_place_nonlocal(symbol_id);
|
||||
}
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
@@ -1888,7 +1972,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||
for target in targets {
|
||||
if let Ok(target) = PlaceExpr::try_from(target) {
|
||||
let place_id = self.add_place(PlaceExprWithFlags::new(target));
|
||||
self.current_place_table().mark_place_used(place_id);
|
||||
self.current_place_table_mut().mark_place_used(place_id);
|
||||
self.delete_binding(place_id);
|
||||
}
|
||||
}
|
||||
@@ -1986,8 +2070,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||
if is_use {
|
||||
self.mark_place_used(place_id);
|
||||
let use_id = self.current_ast_ids().record_use(expr);
|
||||
self.current_use_def_map_mut()
|
||||
.record_use(place_id, use_id, node_key);
|
||||
self.record_use(place_id, use_id, node_key);
|
||||
}
|
||||
|
||||
if is_definition {
|
||||
@@ -2080,8 +2163,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||
// Track reachability of attribute expressions to silence `unresolved-attribute`
|
||||
// diagnostics in unreachable code.
|
||||
if expr.is_attribute_expr() {
|
||||
self.current_use_def_map_mut()
|
||||
.record_node_reachability(node_key);
|
||||
self.record_node_reachability(node_key);
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
@@ -2242,8 +2324,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||
ast::Expr::StringLiteral(_) => {
|
||||
// Track reachability of string literals, as they could be a stringified annotation
|
||||
// with child expressions whose reachability we are interested in.
|
||||
self.current_use_def_map_mut()
|
||||
.record_node_reachability(node_key);
|
||||
self.record_node_reachability(node_key);
|
||||
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use ruff_db::files::{File, FileRange};
|
||||
use ruff_db::parsed::ParsedModuleRef;
|
||||
use ruff_db::parsed::{ParsedModuleRef, parsed_module};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
@@ -57,6 +57,45 @@ impl<'db> Definition<'db> {
|
||||
pub fn focus_range(self, db: &'db dyn Db, module: &ParsedModuleRef) -> FileRange {
|
||||
FileRange::new(self.file(db), self.kind(db).target_range(module))
|
||||
}
|
||||
|
||||
/// Extract a docstring from this definition, if applicable.
|
||||
/// This method returns a docstring for function and class definitions.
|
||||
/// The docstring is extracted from the first statement in the body if it's a string literal.
|
||||
pub fn docstring(self, db: &'db dyn Db) -> Option<String> {
|
||||
let file = self.file(db);
|
||||
let module = parsed_module(db, file).load(db);
|
||||
let kind = self.kind(db);
|
||||
|
||||
match kind {
|
||||
DefinitionKind::Function(function_def) => {
|
||||
let function_node = function_def.node(&module);
|
||||
docstring_from_body(&function_node.body)
|
||||
.map(|docstring_expr| docstring_expr.value.to_str().to_owned())
|
||||
}
|
||||
DefinitionKind::Class(class_def) => {
|
||||
let class_node = class_def.node(&module);
|
||||
docstring_from_body(&class_node.body)
|
||||
.map(|docstring_expr| docstring_expr.value.to_str().to_owned())
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract a docstring from a function or class body.
|
||||
fn docstring_from_body(body: &[ast::Stmt]) -> Option<&ast::ExprStringLiteral> {
|
||||
let stmt = body.first()?;
|
||||
// Require the docstring to be a standalone expression.
|
||||
let ast::Stmt::Expr(ast::StmtExpr {
|
||||
value,
|
||||
range: _,
|
||||
node_index: _,
|
||||
}) = stmt
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
// Only match string literals.
|
||||
value.as_string_literal_expr()
|
||||
}
|
||||
|
||||
/// One or more [`Definition`]s.
|
||||
|
||||
@@ -330,6 +330,16 @@ impl PlaceExprWithFlags {
|
||||
self.flags.contains(PlaceFlags::IS_DECLARED)
|
||||
}
|
||||
|
||||
/// Is the place `global` its containing scope?
|
||||
pub fn is_marked_global(&self) -> bool {
|
||||
self.flags.contains(PlaceFlags::MARKED_GLOBAL)
|
||||
}
|
||||
|
||||
/// Is the place `nonlocal` its containing scope?
|
||||
pub fn is_marked_nonlocal(&self) -> bool {
|
||||
self.flags.contains(PlaceFlags::MARKED_NONLOCAL)
|
||||
}
|
||||
|
||||
pub(crate) fn as_name(&self) -> Option<&Name> {
|
||||
self.expr.as_name()
|
||||
}
|
||||
@@ -397,9 +407,7 @@ bitflags! {
|
||||
const IS_USED = 1 << 0;
|
||||
const IS_BOUND = 1 << 1;
|
||||
const IS_DECLARED = 1 << 2;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_GLOBAL = 1 << 3;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_NONLOCAL = 1 << 4;
|
||||
const IS_INSTANCE_ATTRIBUTE = 1 << 5;
|
||||
}
|
||||
@@ -663,7 +671,7 @@ impl PlaceTable {
|
||||
}
|
||||
|
||||
/// Returns the place named `name`.
|
||||
#[allow(unused)] // used in tests
|
||||
#[cfg(test)]
|
||||
pub(crate) fn place_by_name(&self, name: &str) -> Option<&PlaceExprWithFlags> {
|
||||
let id = self.place_id_by_name(name)?;
|
||||
Some(self.place_expr(id))
|
||||
@@ -814,6 +822,14 @@ impl PlaceTableBuilder {
|
||||
self.table.places[id].insert_flags(PlaceFlags::IS_USED);
|
||||
}
|
||||
|
||||
pub(super) fn mark_place_global(&mut self, id: ScopedPlaceId) {
|
||||
self.table.places[id].insert_flags(PlaceFlags::MARKED_GLOBAL);
|
||||
}
|
||||
|
||||
pub(super) fn mark_place_nonlocal(&mut self, id: ScopedPlaceId) {
|
||||
self.table.places[id].insert_flags(PlaceFlags::MARKED_NONLOCAL);
|
||||
}
|
||||
|
||||
pub(super) fn places(&self) -> impl Iterator<Item = &PlaceExprWithFlags> {
|
||||
self.table.places()
|
||||
}
|
||||
|
||||
@@ -247,7 +247,6 @@ use self::place_state::{
|
||||
Bindings, Declarations, EagerSnapshot, LiveBindingsIterator, LiveDeclaration,
|
||||
LiveDeclarationsIterator, PlaceState, ScopedDefinitionId,
|
||||
};
|
||||
use crate::node_key::NodeKey;
|
||||
use crate::place::BoundnessAnalysis;
|
||||
use crate::semantic_index::ast_ids::ScopedUseId;
|
||||
use crate::semantic_index::definition::{Definition, DefinitionState};
|
||||
@@ -288,9 +287,6 @@ pub(crate) struct UseDefMap<'db> {
|
||||
/// [`Bindings`] reaching a [`ScopedUseId`].
|
||||
bindings_by_use: IndexVec<ScopedUseId, Bindings>,
|
||||
|
||||
/// Tracks whether or not a given AST node is reachable from the start of the scope.
|
||||
node_reachability: FxHashMap<NodeKey, ScopedReachabilityConstraintId>,
|
||||
|
||||
/// If the definition is a binding (only) -- `x = 1` for example -- then we need
|
||||
/// [`Declarations`] to know whether this binding is permitted by the live declarations.
|
||||
///
|
||||
@@ -402,17 +398,13 @@ impl<'db> UseDefMap<'db> {
|
||||
/// be unreachable. Use [`super::SemanticIndex::is_node_reachable`] for the global
|
||||
/// analysis.
|
||||
#[track_caller]
|
||||
pub(super) fn is_node_reachable(&self, db: &dyn crate::Db, node_key: NodeKey) -> bool {
|
||||
self
|
||||
.reachability_constraints
|
||||
.evaluate(
|
||||
db,
|
||||
&self.predicates,
|
||||
*self
|
||||
.node_reachability
|
||||
.get(&node_key)
|
||||
.expect("`is_node_reachable` should only be called on AST nodes with recorded reachability"),
|
||||
)
|
||||
pub(super) fn is_node_reachable(
|
||||
&self,
|
||||
db: &dyn crate::Db,
|
||||
constraint: ScopedReachabilityConstraintId,
|
||||
) -> bool {
|
||||
self.reachability_constraints
|
||||
.evaluate(db, &self.predicates, constraint)
|
||||
.may_be_true()
|
||||
}
|
||||
|
||||
@@ -741,9 +733,6 @@ pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// start of the scope.
|
||||
pub(super) reachability: ScopedReachabilityConstraintId,
|
||||
|
||||
/// Tracks whether or not a given AST node is reachable from the start of the scope.
|
||||
node_reachability: FxHashMap<NodeKey, ScopedReachabilityConstraintId>,
|
||||
|
||||
/// Live declarations for each so-far-recorded binding.
|
||||
declarations_by_binding: FxHashMap<Definition<'db>, Declarations>,
|
||||
|
||||
@@ -773,7 +762,6 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
reachability_constraints: ReachabilityConstraintsBuilder::default(),
|
||||
bindings_by_use: IndexVec::new(),
|
||||
reachability: ScopedReachabilityConstraintId::ALWAYS_TRUE,
|
||||
node_reachability: FxHashMap::default(),
|
||||
declarations_by_binding: FxHashMap::default(),
|
||||
bindings_by_definition: FxHashMap::default(),
|
||||
place_states: IndexVec::new(),
|
||||
@@ -1000,26 +988,13 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
);
|
||||
}
|
||||
|
||||
pub(super) fn record_use(
|
||||
&mut self,
|
||||
place: ScopedPlaceId,
|
||||
use_id: ScopedUseId,
|
||||
node_key: NodeKey,
|
||||
) {
|
||||
pub(super) fn record_use(&mut self, place: ScopedPlaceId, use_id: ScopedUseId) {
|
||||
// We have a use of a place; clone the current bindings for that place, and record them
|
||||
// as the live bindings for this use.
|
||||
let new_use = self
|
||||
.bindings_by_use
|
||||
.push(self.place_states[place].bindings().clone());
|
||||
debug_assert_eq!(use_id, new_use);
|
||||
|
||||
// Track reachability of all uses of places to silence `unresolved-reference`
|
||||
// diagnostics in unreachable code.
|
||||
self.record_node_reachability(node_key);
|
||||
}
|
||||
|
||||
pub(super) fn record_node_reachability(&mut self, node_key: NodeKey) {
|
||||
self.node_reachability.insert(node_key, self.reachability);
|
||||
}
|
||||
|
||||
pub(super) fn snapshot_eager_state(
|
||||
@@ -1123,7 +1098,6 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
self.place_states.shrink_to_fit();
|
||||
self.reachable_definitions.shrink_to_fit();
|
||||
self.bindings_by_use.shrink_to_fit();
|
||||
self.node_reachability.shrink_to_fit();
|
||||
self.declarations_by_binding.shrink_to_fit();
|
||||
self.bindings_by_definition.shrink_to_fit();
|
||||
self.eager_snapshots.shrink_to_fit();
|
||||
@@ -1134,7 +1108,6 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
narrowing_constraints: self.narrowing_constraints.build(),
|
||||
reachability_constraints: self.reachability_constraints.build(),
|
||||
bindings_by_use: self.bindings_by_use,
|
||||
node_reachability: self.node_reachability,
|
||||
end_of_scope_places: self.place_states,
|
||||
reachable_definitions: self.reachable_definitions,
|
||||
declarations_by_binding: self.declarations_by_binding,
|
||||
|
||||
@@ -69,14 +69,29 @@ impl<'db> SemanticModel<'db> {
|
||||
};
|
||||
let ty = Type::module_literal(self.db, self.file, &module);
|
||||
let builtin = module.is_known(KnownModule::Builtins);
|
||||
crate::types::all_members(self.db, ty)
|
||||
.into_iter()
|
||||
.map(|member| Completion {
|
||||
name: member.name,
|
||||
ty: member.ty,
|
||||
|
||||
let mut completions = vec![];
|
||||
for crate::types::Member { name, ty } in crate::types::all_members(self.db, ty) {
|
||||
completions.push(Completion { name, ty, builtin });
|
||||
}
|
||||
for submodule_basename in module.all_submodules(self.db) {
|
||||
let Some(basename) = ModuleName::new(submodule_basename.as_str()) else {
|
||||
continue;
|
||||
};
|
||||
let mut submodule_name = module_name.clone();
|
||||
submodule_name.extend(&basename);
|
||||
|
||||
let Some(submodule) = resolve_module(self.db, &submodule_name) else {
|
||||
continue;
|
||||
};
|
||||
let ty = Type::module_literal(self.db, self.file, &submodule);
|
||||
completions.push(Completion {
|
||||
name: submodule_basename,
|
||||
ty,
|
||||
builtin,
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
}
|
||||
completions
|
||||
}
|
||||
|
||||
/// Returns completions for symbols available in a `object.<CURSOR>` context.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user