Compare commits

..

3 Commits

Author SHA1 Message Date
Jack O'Connor
3f22497bdd WIP: defer walking function bodies to end-of-scope in SemanticIndexBuilder
This is intended to fix the one failing test from the previous commit.
And it actually does fix it! But it also causes a huge number of other
tests to fail. The minimized repro seems to be this:

```
$ cat test.py
class Foo:
    pass
foo = Foo()
$ ty check test.py
error[panic]: Panicked at crates/ty_python_semantic/src/types.rs:156:38 when checking `/tmp/test.py`: `Failed to retrieve the inferred type for an `ast::Expr` node passed to `TypeInference::expression_type()`. The `TypeInferenceBuilder` should infer and store types for all `ast::Expr` nodes in any `TypeInference` region it analyzes.`
info: This indicates a bug in ty.
info: If you could open an issue at https://github.com/astral-sh/ty/issues/new?title=%5Bpanic%5D, we'd be very appreciative!
info: Platform: linux x86_64
info: Args: ["/home/jacko/astral/ruff/target-mold/debug/ty", "check", "test.py"]
info: run with `RUST_BACKTRACE=1` environment variable to show the full backtrace information
info: query stacktrace:
   0: FunctionType < 'db >::signature_(Id(5007))
             at crates/ty_python_semantic/src/types/function.rs:595
             cycle heads: infer_scope_types(Id(c62)) -> IterationCount(0), FunctionType < 'db >::signature_(Id(5007)) -> IterationCount(0), FunctionType < 'db >::signature_(Id(5000)) -> IterationCount(0)
   1: infer_expression_types(Id(1463))
             at crates/ty_python_semantic/src/types/infer.rs:235
   2: infer_definition_types(Id(11ab))
             at crates/ty_python_semantic/src/types/infer.rs:159
   3: infer_scope_types(Id(c62))
             at crates/ty_python_semantic/src/types/infer.rs:130
             cycle heads: infer_scope_types(Id(c62)) -> IterationCount(0)
   4: FunctionType < 'db >::signature_(Id(5000))
             at crates/ty_python_semantic/src/types/function.rs:595
   5: infer_expression_types(Id(1400))
             at crates/ty_python_semantic/src/types/infer.rs:235
   6: infer_definition_types(Id(1001))
             at crates/ty_python_semantic/src/types/infer.rs:159
   7: infer_scope_types(Id(c00))
             at crates/ty_python_semantic/src/types/infer.rs:130
   8: check_file_impl(Id(800))
             at crates/ty_project/src/lib.rs:474
```
2025-07-10 16:46:40 -07:00
Jack O'Connor
05cf7c3458 WIP: move the InvalidNonlocal check to SemanticIndexBuilder
This makes one test case fail, basically this:

```py
def f():
    def g():
        nonlocal x  # allowed!
    x = 1
```
2025-07-10 16:45:01 -07:00
Jack O'Connor
664a9a28dc [ty] add support for nonlocal statements 2025-07-10 11:13:47 -07:00
127 changed files with 1076 additions and 5083 deletions

View File

@@ -407,11 +407,20 @@ jobs:
run: rustup default "${MSRV}"
- name: "Install mold"
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
- name: "Build tests"
- name: "Install cargo nextest"
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
with:
tool: cargo-insta
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
MSRV: ${{ steps.msrv.outputs.value }}
run: cargo "+${MSRV}" test --no-run --all-features
run: cargo "+${MSRV}" insta test --all-features --unreferenced reject --test-runner nextest
cargo-fuzz-build:
name: "cargo fuzz build"

View File

@@ -1,33 +1,5 @@
# Changelog
## 0.12.3
### Preview features
- \[`flake8-bugbear`\] Support non-context-manager calls in `B017` ([#19063](https://github.com/astral-sh/ruff/pull/19063))
- \[`flake8-use-pathlib`\] Add autofixes for `PTH100`, `PTH106`, `PTH107`, `PTH108`, `PTH110`, `PTH111`, `PTH112`, `PTH113`, `PTH114`, `PTH115`, `PTH117`, `PTH119`, `PTH120` ([#19213](https://github.com/astral-sh/ruff/pull/19213))
- \[`flake8-use-pathlib`\] Add autofixes for `PTH203`, `PTH204`, `PTH205` ([#18922](https://github.com/astral-sh/ruff/pull/18922))
### Bug fixes
- \[`flake8-return`\] Fix false-positive for variables used inside nested functions in `RET504` ([#18433](https://github.com/astral-sh/ruff/pull/18433))
- Treat form feed as valid whitespace before a line continuation ([#19220](https://github.com/astral-sh/ruff/pull/19220))
- \[`flake8-type-checking`\] Fix syntax error introduced by fix (`TC008`) ([#19150](https://github.com/astral-sh/ruff/pull/19150))
- \[`pyupgrade`\] Keyword arguments in `super` should suppress the `UP008` fix ([#19131](https://github.com/astral-sh/ruff/pull/19131))
### Documentation
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI007`, `PYI008`) ([#19103](https://github.com/astral-sh/ruff/pull/19103))
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM116`) ([#19111](https://github.com/astral-sh/ruff/pull/19111))
- \[`flake8-type-checking`\] Make example error out-of-the-box (`TC001`) ([#19151](https://github.com/astral-sh/ruff/pull/19151))
- \[`flake8-use-pathlib`\] Make example error out-of-the-box (`PTH210`) ([#19189](https://github.com/astral-sh/ruff/pull/19189))
- \[`pycodestyle`\] Make example error out-of-the-box (`E272`) ([#19191](https://github.com/astral-sh/ruff/pull/19191))
- \[`pycodestyle`\] Make example not raise unnecessary `SyntaxError` (`E114`) ([#19190](https://github.com/astral-sh/ruff/pull/19190))
- \[`pydoclint`\] Make example error out-of-the-box (`DOC501`) ([#19218](https://github.com/astral-sh/ruff/pull/19218))
- \[`pylint`, `pyupgrade`\] Fix syntax errors in examples (`PLW1501`, `UP028`) ([#19127](https://github.com/astral-sh/ruff/pull/19127))
- \[`pylint`\] Update `missing-maxsplit-arg` docs and error to suggest proper usage (`PLC0207`) ([#18949](https://github.com/astral-sh/ruff/pull/18949))
- \[`flake8-bandit`\] Make example error out-of-the-box (`S412`) ([#19241](https://github.com/astral-sh/ruff/pull/19241))
## 0.12.2
### Preview features

11
Cargo.lock generated
View File

@@ -2711,7 +2711,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.12.3"
version = "0.12.2"
dependencies = [
"anyhow",
"argfile",
@@ -2852,7 +2852,6 @@ dependencies = [
"salsa",
"schemars",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.12",
"tracing",
@@ -2962,7 +2961,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.12.3"
version = "0.12.2"
dependencies = [
"aho-corasick",
"anyhow",
@@ -3295,7 +3294,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.12.3"
version = "0.12.2"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -4153,12 +4152,9 @@ version = "0.0.0"
dependencies = [
"bitflags 2.9.1",
"insta",
"regex",
"ruff_db",
"ruff_python_ast",
"ruff_python_parser",
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"salsa",
@@ -4273,7 +4269,6 @@ dependencies = [
"serde",
"serde_json",
"shellexpand",
"thiserror 2.0.12",
"tracing",
"tracing-subscriber",
"ty_ide",

View File

@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.12.3/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.12.3/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.12.2/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.12.2/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.12.3
rev: v0.12.2
hooks:
# Run the linter.
- id: ruff-check

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.12.3"
version = "0.12.2"
publish = true
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -439,7 +439,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
if cli.statistics {
printer.write_statistics(&diagnostics, &mut summary_writer)?;
} else {
printer.write_once(&diagnostics, &mut summary_writer, preview)?;
printer.write_once(&diagnostics, &mut summary_writer)?;
}
if !cli.exit_zero {

View File

@@ -9,14 +9,13 @@ use itertools::{Itertools, iterate};
use ruff_linter::linter::FixTable;
use serde::Serialize;
use ruff_db::diagnostic::{
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, SecondaryCode,
};
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
use ruff_linter::fs::relativize_path;
use ruff_linter::logging::LogLevel;
use ruff_linter::message::{
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, JunitEmitter,
PylintEmitter, RdjsonEmitter, SarifEmitter, TextEmitter,
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, RdjsonEmitter, SarifEmitter,
TextEmitter,
};
use ruff_linter::notify_user;
use ruff_linter::settings::flags::{self};
@@ -203,7 +202,6 @@ impl Printer {
&self,
diagnostics: &Diagnostics,
writer: &mut dyn Write,
preview: bool,
) -> Result<()> {
if matches!(self.log_level, LogLevel::Silent) {
return Ok(());
@@ -231,21 +229,13 @@ impl Printer {
match self.format {
OutputFormat::Json => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Json)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
JsonEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Rdjson => {
RdjsonEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::JsonLines => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::JsonLines)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
JsonLinesEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Junit => {
JunitEmitter.emit(writer, &diagnostics.inner, &context)?;
@@ -293,11 +283,7 @@ impl Printer {
PylintEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Azure => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Azure)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
AzureEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Sarif => {
SarifEmitter.emit(writer, &diagnostics.inner, &context)?;

View File

@@ -5692,57 +5692,3 @@ class Foo:
"
);
}
#[test_case::test_case("concise")]
#[test_case::test_case("full")]
#[test_case::test_case("json")]
#[test_case::test_case("json-lines")]
#[test_case::test_case("junit")]
#[test_case::test_case("grouped")]
#[test_case::test_case("github")]
#[test_case::test_case("gitlab")]
#[test_case::test_case("pylint")]
#[test_case::test_case("rdjson")]
#[test_case::test_case("azure")]
#[test_case::test_case("sarif")]
fn output_format(output_format: &str) -> Result<()> {
const CONTENT: &str = "\
import os # F401
x = y # F821
match 42: # invalid-syntax
case _: ...
";
let tempdir = TempDir::new()?;
let input = tempdir.path().join("input.py");
fs::write(&input, CONTENT)?;
let snapshot = format!("output_format_{output_format}");
insta::with_settings!({
filters => vec![
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
(r#""[^"]+\\?/?input.py"#, r#""[TMP]/input.py"#),
(ruff_linter::VERSION, "[VERSION]"),
]
}, {
assert_cmd_snapshot!(
snapshot,
Command::new(get_cargo_bin(BIN_NAME))
.args([
"check",
"--no-cache",
"--output-format",
output_format,
"--select",
"F401,F821",
"--target-version",
"py39",
"input.py",
])
.current_dir(&tempdir),
);
});
Ok(())
}

View File

@@ -1,23 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- azure
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=2;columnnumber=5;code=F821;]Undefined name `y`
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=3;columnnumber=1;]SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr -----

View File

@@ -1,25 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- concise
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
input.py:1:8: F401 [*] `os` imported but unused
input.py:2:5: F821 Undefined name `y`
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
Found 3 errors.
[*] 1 fixable with the `--fix` option.
----- stderr -----

View File

@@ -1,49 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- full
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
input.py:1:8: F401 [*] `os` imported but unused
|
1 | import os # F401
| ^^ F401
2 | x = y # F821
3 | match 42: # invalid-syntax
|
= help: Remove unused import: `os`
input.py:2:5: F821 Undefined name `y`
|
1 | import os # F401
2 | x = y # F821
| ^ F821
3 | match 42: # invalid-syntax
4 | case _: ...
|
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
1 | import os # F401
2 | x = y # F821
3 | match 42: # invalid-syntax
| ^^^^^
4 | case _: ...
|
Found 3 errors.
[*] 1 fixable with the `--fix` option.
----- stderr -----

View File

@@ -1,23 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- github
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
::error title=Ruff (F401),file=[TMP]/input.py,line=1,col=8,endLine=1,endColumn=10::input.py:1:8: F401 `os` imported but unused
::error title=Ruff (F821),file=[TMP]/input.py,line=2,col=5,endLine=2,endColumn=6::input.py:2:5: F821 Undefined name `y`
::error title=Ruff,file=[TMP]/input.py,line=3,col=1,endLine=3,endColumn=6::input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr -----

View File

@@ -1,60 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- gitlab
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
[
{
"check_name": "F401",
"description": "`os` imported but unused",
"fingerprint": "4dbad37161e65c72",
"location": {
"lines": {
"begin": 1,
"end": 1
},
"path": "input.py"
},
"severity": "major"
},
{
"check_name": "F821",
"description": "Undefined name `y`",
"fingerprint": "7af59862a085230",
"location": {
"lines": {
"begin": 2,
"end": 2
},
"path": "input.py"
},
"severity": "major"
},
{
"check_name": "syntax-error",
"description": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
"fingerprint": "e558cec859bb66e8",
"location": {
"lines": {
"begin": 3,
"end": 3
},
"path": "input.py"
},
"severity": "major"
}
]
----- stderr -----

View File

@@ -1,27 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- grouped
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
input.py:
1:8 F401 [*] `os` imported but unused
2:5 F821 Undefined name `y`
3:1 SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
Found 3 errors.
[*] 1 fixable with the `--fix` option.
----- stderr -----

View File

@@ -1,23 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- json-lines
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"[TMP]/input.py","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":null,"code":"F821","end_location":{"column":6,"row":2},"filename":"[TMP]/input.py","fix":null,"location":{"column":5,"row":2},"message":"Undefined name `y`","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/undefined-name"}
{"cell":null,"code":null,"end_location":{"column":6,"row":3},"filename":"[TMP]/input.py","fix":null,"location":{"column":1,"row":3},"message":"SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)","noqa_row":null,"url":null}
----- stderr -----

View File

@@ -1,88 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- json
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
[
{
"cell": null,
"code": "F401",
"end_location": {
"column": 10,
"row": 1
},
"filename": "[TMP]/input.py",
"fix": {
"applicability": "safe",
"edits": [
{
"content": "",
"end_location": {
"column": 1,
"row": 2
},
"location": {
"column": 1,
"row": 1
}
}
],
"message": "Remove unused import: `os`"
},
"location": {
"column": 8,
"row": 1
},
"message": "`os` imported but unused",
"noqa_row": 1,
"url": "https://docs.astral.sh/ruff/rules/unused-import"
},
{
"cell": null,
"code": "F821",
"end_location": {
"column": 6,
"row": 2
},
"filename": "[TMP]/input.py",
"fix": null,
"location": {
"column": 5,
"row": 2
},
"message": "Undefined name `y`",
"noqa_row": 2,
"url": "https://docs.astral.sh/ruff/rules/undefined-name"
},
{
"cell": null,
"code": null,
"end_location": {
"column": 6,
"row": 3
},
"filename": "[TMP]/input.py",
"fix": null,
"location": {
"column": 1,
"row": 3
},
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
"noqa_row": null,
"url": null
}
]
----- stderr -----

View File

@@ -1,34 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- junit
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="ruff" tests="3" failures="3" errors="0">
<testsuite name="[TMP]/input.py" tests="3" disabled="0" errors="0" failures="3" package="org.ruff">
<testcase name="org.ruff.F401" classname="[TMP]/input" line="1" column="8">
<failure message="`os` imported but unused">line 1, col 8, `os` imported but unused</failure>
</testcase>
<testcase name="org.ruff.F821" classname="[TMP]/input" line="2" column="5">
<failure message="Undefined name `y`">line 2, col 5, Undefined name `y`</failure>
</testcase>
<testcase name="org.ruff" classname="[TMP]/input" line="3" column="1">
<failure message="SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure>
</testcase>
</testsuite>
</testsuites>
----- stderr -----

View File

@@ -1,23 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- pylint
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
input.py:1: [F401] `os` imported but unused
input.py:2: [F821] Undefined name `y`
input.py:3: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr -----

View File

@@ -1,103 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- rdjson
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
{
"diagnostics": [
{
"code": {
"url": "https://docs.astral.sh/ruff/rules/unused-import",
"value": "F401"
},
"location": {
"path": "[TMP]/input.py",
"range": {
"end": {
"column": 10,
"line": 1
},
"start": {
"column": 8,
"line": 1
}
}
},
"message": "`os` imported but unused",
"suggestions": [
{
"range": {
"end": {
"column": 1,
"line": 2
},
"start": {
"column": 1,
"line": 1
}
},
"text": ""
}
]
},
{
"code": {
"url": "https://docs.astral.sh/ruff/rules/undefined-name",
"value": "F821"
},
"location": {
"path": "[TMP]/input.py",
"range": {
"end": {
"column": 6,
"line": 2
},
"start": {
"column": 5,
"line": 2
}
}
},
"message": "Undefined name `y`"
},
{
"code": {
"url": null,
"value": null
},
"location": {
"path": "[TMP]/input.py",
"range": {
"end": {
"column": 6,
"line": 3
},
"start": {
"column": 1,
"line": 3
}
}
},
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
}
],
"severity": "warning",
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff"
}
}
----- stderr -----

View File

@@ -1,142 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- sarif
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
{
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
"runs": [
{
"results": [
{
"level": "error",
"locations": [
{
"physicalLocation": {
"artifactLocation": {
"uri": "[TMP]/input.py"
},
"region": {
"endColumn": 10,
"endLine": 1,
"startColumn": 8,
"startLine": 1
}
}
}
],
"message": {
"text": "`os` imported but unused"
},
"ruleId": "F401"
},
{
"level": "error",
"locations": [
{
"physicalLocation": {
"artifactLocation": {
"uri": "[TMP]/input.py"
},
"region": {
"endColumn": 6,
"endLine": 2,
"startColumn": 5,
"startLine": 2
}
}
}
],
"message": {
"text": "Undefined name `y`"
},
"ruleId": "F821"
},
{
"level": "error",
"locations": [
{
"physicalLocation": {
"artifactLocation": {
"uri": "[TMP]/input.py"
},
"region": {
"endColumn": 6,
"endLine": 3,
"startColumn": 1,
"startLine": 3
}
}
}
],
"message": {
"text": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
},
"ruleId": null
}
],
"tool": {
"driver": {
"informationUri": "https://github.com/astral-sh/ruff",
"name": "ruff",
"rules": [
{
"fullDescription": {
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Preview\nWhen [preview](https://docs.astral.sh/ruff/preview/) is enabled,\nthe criterion for determining whether an import is first-party\nis stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/source/libraries.html#library-interface-public-and-private-symbols)\n"
},
"help": {
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
},
"helpUri": "https://docs.astral.sh/ruff/rules/unused-import",
"id": "F401",
"properties": {
"id": "F401",
"kind": "Pyflakes",
"name": "unused-import",
"problem.severity": "error"
},
"shortDescription": {
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
}
},
{
"fullDescription": {
"text": "## What it does\nChecks for uses of undefined names.\n\n## Why is this bad?\nAn undefined name is likely to raise `NameError` at runtime.\n\n## Example\n```python\ndef double():\n return n * 2 # raises `NameError` if `n` is undefined when `double` is called\n```\n\nUse instead:\n```python\ndef double(n):\n return n * 2\n```\n\n## Options\n- [`target-version`]: Can be used to configure which symbols Ruff will understand\n as being available in the `builtins` namespace.\n\n## References\n- [Python documentation: Naming and binding](https://docs.python.org/3/reference/executionmodel.html#naming-and-binding)\n"
},
"help": {
"text": "Undefined name `{name}`. {tip}"
},
"helpUri": "https://docs.astral.sh/ruff/rules/undefined-name",
"id": "F821",
"properties": {
"id": "F821",
"kind": "Pyflakes",
"name": "undefined-name",
"problem.severity": "error"
},
"shortDescription": {
"text": "Undefined name `{name}`. {tip}"
}
}
],
"version": "[VERSION]"
}
}
}
],
"version": "2.1.0"
}
----- stderr -----

View File

@@ -38,7 +38,6 @@ rustc-hash = { workspace = true }
salsa = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }
serde_json = { workspace = true, optional = true }
thiserror = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true, optional = true }
@@ -57,6 +56,6 @@ tempfile = { workspace = true }
[features]
cache = ["ruff_cache"]
os = ["ignore", "dep:etcetera"]
serde = ["camino/serde1", "dep:serde", "dep:serde_json", "ruff_diagnostics/serde"]
serde = ["dep:serde", "camino/serde1"]
# Exposes testing utilities.
testing = ["tracing-subscriber"]

View File

@@ -1,12 +1,13 @@
use std::{fmt::Formatter, sync::Arc};
use render::{FileResolver, Input};
use ruff_diagnostics::Fix;
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
use ruff_annotate_snippets::Level as AnnotateLevel;
use ruff_text_size::{Ranged, TextRange, TextSize};
pub use self::render::{DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input};
pub use self::render::DisplayDiagnostic;
use crate::{Db, files::File};
mod render;
@@ -379,7 +380,7 @@ impl Diagnostic {
}
/// Returns the URL for the rule documentation, if it exists.
pub fn to_ruff_url(&self) -> Option<String> {
pub fn to_url(&self) -> Option<String> {
if self.is_invalid_syntax() {
None
} else {
@@ -431,9 +432,8 @@ impl Diagnostic {
/// Returns the [`SourceFile`] which the message belongs to.
///
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
pub fn expect_ruff_source_file(&self) -> &SourceFile {
self.ruff_source_file()
.expect("Expected a ruff source file")
pub fn expect_ruff_source_file(&self) -> SourceFile {
self.expect_primary_span().expect_ruff_file().clone()
}
/// Returns the [`TextRange`] for the diagnostic.
@@ -1174,12 +1174,6 @@ pub struct DisplayDiagnosticConfig {
/// here for now as the most "sensible" place for it to live until
/// we had more concrete use cases. ---AG
context: usize,
/// Whether to use preview formatting for Ruff diagnostics.
#[allow(
dead_code,
reason = "This is currently only used for JSON but will be needed soon for other formats"
)]
preview: bool,
}
impl DisplayDiagnosticConfig {
@@ -1200,14 +1194,6 @@ impl DisplayDiagnosticConfig {
..self
}
}
/// Whether to enable preview behavior or not.
pub fn preview(self, yes: bool) -> DisplayDiagnosticConfig {
DisplayDiagnosticConfig {
preview: yes,
..self
}
}
}
impl Default for DisplayDiagnosticConfig {
@@ -1216,7 +1202,6 @@ impl Default for DisplayDiagnosticConfig {
format: DiagnosticFormat::default(),
color: false,
context: 2,
preview: false,
}
}
}
@@ -1244,21 +1229,6 @@ pub enum DiagnosticFormat {
///
/// This may use color when printing to a `tty`.
Concise,
/// Print diagnostics in the [Azure Pipelines] format.
///
/// [Azure Pipelines]: https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning
Azure,
/// Print diagnostics in JSON format.
///
/// Unlike `json-lines`, this prints all of the diagnostics as a JSON array.
#[cfg(feature = "serde")]
Json,
/// Print diagnostics in JSON format, one per line.
///
/// This will print each diagnostic as a separate JSON object on its own line. See the `json`
/// format for an array of all diagnostics. See <https://jsonlines.org/> for more details.
#[cfg(feature = "serde")]
JsonLines,
}
/// A representation of the kinds of messages inside a diagnostic.

View File

@@ -4,7 +4,6 @@ use ruff_annotate_snippets::{
Annotation as AnnotateAnnotation, Level as AnnotateLevel, Message as AnnotateMessage,
Renderer as AnnotateRenderer, Snippet as AnnotateSnippet,
};
use ruff_notebook::{Notebook, NotebookIndex};
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
use ruff_text_size::{TextRange, TextSize};
@@ -18,17 +17,9 @@ use crate::{
use super::{
Annotation, Diagnostic, DiagnosticFormat, DiagnosticSource, DisplayDiagnosticConfig, Severity,
SubDiagnostic, UnifiedFile,
SubDiagnostic,
};
use azure::AzureRenderer;
mod azure;
#[cfg(feature = "serde")]
mod json;
#[cfg(feature = "serde")]
mod json_lines;
/// A type that implements `std::fmt::Display` for diagnostic rendering.
///
/// It is created via [`Diagnostic::display`].
@@ -43,6 +34,7 @@ mod json_lines;
pub struct DisplayDiagnostic<'a> {
config: &'a DisplayDiagnosticConfig,
resolver: &'a dyn FileResolver,
annotate_renderer: AnnotateRenderer,
diag: &'a Diagnostic,
}
@@ -52,9 +44,16 @@ impl<'a> DisplayDiagnostic<'a> {
config: &'a DisplayDiagnosticConfig,
diag: &'a Diagnostic,
) -> DisplayDiagnostic<'a> {
let annotate_renderer = if config.color {
AnnotateRenderer::styled()
} else {
AnnotateRenderer::plain()
};
DisplayDiagnostic {
config,
resolver,
annotate_renderer,
diag,
}
}
@@ -62,131 +61,68 @@ impl<'a> DisplayDiagnostic<'a> {
impl std::fmt::Display for DisplayDiagnostic<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
DisplayDiagnostics::new(self.resolver, self.config, std::slice::from_ref(self.diag)).fmt(f)
}
}
let stylesheet = if self.config.color {
DiagnosticStylesheet::styled()
} else {
DiagnosticStylesheet::plain()
};
/// A type that implements `std::fmt::Display` for rendering a collection of diagnostics.
///
/// It is intended for collections of diagnostics that need to be serialized together, as is the
/// case for JSON, for example.
///
/// See [`DisplayDiagnostic`] for rendering individual `Diagnostic`s and details about the lifetime
/// constraints.
pub struct DisplayDiagnostics<'a> {
config: &'a DisplayDiagnosticConfig,
resolver: &'a dyn FileResolver,
diagnostics: &'a [Diagnostic],
}
if matches!(self.config.format, DiagnosticFormat::Concise) {
let (severity, severity_style) = match self.diag.severity() {
Severity::Info => ("info", stylesheet.info),
Severity::Warning => ("warning", stylesheet.warning),
Severity::Error => ("error", stylesheet.error),
Severity::Fatal => ("fatal", stylesheet.error),
};
impl<'a> DisplayDiagnostics<'a> {
pub fn new(
resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig,
diagnostics: &'a [Diagnostic],
) -> DisplayDiagnostics<'a> {
DisplayDiagnostics {
config,
resolver,
diagnostics,
}
}
}
write!(
f,
"{severity}[{id}]",
severity = fmt_styled(severity, severity_style),
id = fmt_styled(self.diag.id(), stylesheet.emphasis)
)?;
impl std::fmt::Display for DisplayDiagnostics<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self.config.format {
DiagnosticFormat::Concise => {
let stylesheet = if self.config.color {
DiagnosticStylesheet::styled()
} else {
DiagnosticStylesheet::plain()
};
if let Some(span) = self.diag.primary_span() {
write!(
f,
" {path}",
path = fmt_styled(span.file().path(self.resolver), stylesheet.emphasis)
)?;
if let Some(range) = span.range() {
let diagnostic_source = span.file().diagnostic_source(self.resolver);
let start = diagnostic_source
.as_source_code()
.line_column(range.start());
for diag in self.diagnostics {
let (severity, severity_style) = match diag.severity() {
Severity::Info => ("info", stylesheet.info),
Severity::Warning => ("warning", stylesheet.warning),
Severity::Error => ("error", stylesheet.error),
Severity::Fatal => ("fatal", stylesheet.error),
};
write!(
f,
"{severity}[{id}]",
severity = fmt_styled(severity, severity_style),
id = fmt_styled(diag.id(), stylesheet.emphasis)
":{line}:{col}",
line = fmt_styled(start.line, stylesheet.emphasis),
col = fmt_styled(start.column, stylesheet.emphasis),
)?;
if let Some(span) = diag.primary_span() {
write!(
f,
" {path}",
path = fmt_styled(span.file().path(self.resolver), stylesheet.emphasis)
)?;
if let Some(range) = span.range() {
let diagnostic_source = span.file().diagnostic_source(self.resolver);
let start = diagnostic_source
.as_source_code()
.line_column(range.start());
write!(
f,
":{line}:{col}",
line = fmt_styled(start.line, stylesheet.emphasis),
col = fmt_styled(start.column, stylesheet.emphasis),
)?;
}
write!(f, ":")?;
}
writeln!(f, " {message}", message = diag.concise_message())?;
}
write!(f, ":")?;
}
DiagnosticFormat::Full => {
let stylesheet = if self.config.color {
DiagnosticStylesheet::styled()
} else {
DiagnosticStylesheet::plain()
};
let mut renderer = if self.config.color {
AnnotateRenderer::styled()
} else {
AnnotateRenderer::plain()
};
renderer = renderer
.error(stylesheet.error)
.warning(stylesheet.warning)
.info(stylesheet.info)
.note(stylesheet.note)
.help(stylesheet.help)
.line_no(stylesheet.line_no)
.emphasis(stylesheet.emphasis)
.none(stylesheet.none);
for diag in self.diagnostics {
let resolved = Resolved::new(self.resolver, diag);
let renderable = resolved.to_renderable(self.config.context);
for diag in renderable.diagnostics.iter() {
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
}
writeln!(f)?;
}
}
DiagnosticFormat::Azure => {
AzureRenderer::new(self.resolver).render(f, self.diagnostics)?;
}
#[cfg(feature = "serde")]
DiagnosticFormat::Json => {
json::JsonRenderer::new(self.resolver, self.config).render(f, self.diagnostics)?;
}
#[cfg(feature = "serde")]
DiagnosticFormat::JsonLines => {
json_lines::JsonLinesRenderer::new(self.resolver, self.config)
.render(f, self.diagnostics)?;
}
return writeln!(f, " {message}", message = self.diag.concise_message());
}
Ok(())
let mut renderer = self.annotate_renderer.clone();
renderer = renderer
.error(stylesheet.error)
.warning(stylesheet.warning)
.info(stylesheet.info)
.note(stylesheet.note)
.help(stylesheet.help)
.line_no(stylesheet.line_no)
.emphasis(stylesheet.emphasis)
.none(stylesheet.none);
let resolved = Resolved::new(self.resolver, self.diag);
let renderable = resolved.to_renderable(self.config.context);
for diag in renderable.diagnostics.iter() {
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
}
writeln!(f)
}
}
@@ -699,12 +635,6 @@ pub trait FileResolver {
/// Returns the input contents associated with the file given.
fn input(&self, file: File) -> Input;
/// Returns the [`NotebookIndex`] associated with the file given, if it's a Jupyter notebook.
fn notebook_index(&self, file: &UnifiedFile) -> Option<NotebookIndex>;
/// Returns whether the file given is a Jupyter notebook.
fn is_notebook(&self, file: &UnifiedFile) -> bool;
}
impl<T> FileResolver for T
@@ -721,25 +651,6 @@ where
line_index: line_index(self, file),
}
}
fn notebook_index(&self, file: &UnifiedFile) -> Option<NotebookIndex> {
match file {
UnifiedFile::Ty(file) => self
.input(*file)
.text
.as_notebook()
.map(Notebook::index)
.cloned(),
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
}
}
fn is_notebook(&self, file: &UnifiedFile) -> bool {
match file {
UnifiedFile::Ty(file) => self.input(*file).text.as_notebook().is_some(),
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
}
}
}
impl FileResolver for &dyn Db {
@@ -753,25 +664,6 @@ impl FileResolver for &dyn Db {
line_index: line_index(*self, file),
}
}
fn notebook_index(&self, file: &UnifiedFile) -> Option<NotebookIndex> {
match file {
UnifiedFile::Ty(file) => self
.input(*file)
.text
.as_notebook()
.map(Notebook::index)
.cloned(),
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
}
}
fn is_notebook(&self, file: &UnifiedFile) -> bool {
match file {
UnifiedFile::Ty(file) => self.input(*file).text.as_notebook().is_some(),
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
}
}
}
/// An abstraction over a unit of user input.
@@ -832,9 +724,7 @@ fn relativize_path<'p>(cwd: &SystemPath, path: &'p str) -> &'p str {
#[cfg(test)]
mod tests {
use ruff_diagnostics::{Edit, Fix};
use crate::diagnostic::{Annotation, DiagnosticId, SecondaryCode, Severity, Span};
use crate::diagnostic::{Annotation, DiagnosticId, Severity, Span};
use crate::files::system_path_to_file;
use crate::system::{DbWithWritableSystem, SystemPath};
use crate::tests::TestDb;
@@ -2231,7 +2121,7 @@ watermelon
/// A small harness for setting up an environment specifically for testing
/// diagnostic rendering.
pub(super) struct TestEnvironment {
struct TestEnvironment {
db: TestDb,
config: DisplayDiagnosticConfig,
}
@@ -2240,7 +2130,7 @@ watermelon
/// Create a new test harness.
///
/// This uses the default diagnostic rendering configuration.
pub(super) fn new() -> TestEnvironment {
fn new() -> TestEnvironment {
TestEnvironment {
db: TestDb::new(),
config: DisplayDiagnosticConfig::default(),
@@ -2259,26 +2149,8 @@ watermelon
self.config = config;
}
/// Set the output format to use in diagnostic rendering.
pub(super) fn format(&mut self, format: DiagnosticFormat) {
let mut config = std::mem::take(&mut self.config);
config = config.format(format);
self.config = config;
}
/// Enable preview functionality for diagnostic rendering.
#[allow(
dead_code,
reason = "This is currently only used for JSON but will be needed soon for other formats"
)]
pub(super) fn preview(&mut self, yes: bool) {
let mut config = std::mem::take(&mut self.config);
config = config.preview(yes);
self.config = config;
}
/// Add a file with the given path and contents to this environment.
pub(super) fn add(&mut self, path: &str, contents: &str) {
fn add(&mut self, path: &str, contents: &str) {
let path = SystemPath::new(path);
self.db.write_file(path, contents).unwrap();
}
@@ -2328,7 +2200,7 @@ watermelon
/// A convenience function for returning a builder for a diagnostic
/// with "error" severity and canned values for its identifier
/// and message.
pub(super) fn err(&mut self) -> DiagnosticBuilder<'_> {
fn err(&mut self) -> DiagnosticBuilder<'_> {
self.builder(
"test-diagnostic",
Severity::Error,
@@ -2354,12 +2226,6 @@ watermelon
DiagnosticBuilder { env: self, diag }
}
/// A convenience function for returning a builder for an invalid syntax diagnostic.
fn invalid_syntax(&mut self, message: &str) -> DiagnosticBuilder<'_> {
let diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, message);
DiagnosticBuilder { env: self, diag }
}
/// Returns a builder for tersely constructing sub-diagnostics.
fn sub_builder(&mut self, severity: Severity, message: &str) -> SubDiagnosticBuilder<'_> {
let subdiag = SubDiagnostic::new(severity, message);
@@ -2369,18 +2235,9 @@ watermelon
/// Render the given diagnostic into a `String`.
///
/// (This will set the "printed" flag on `Diagnostic`.)
pub(super) fn render(&self, diag: &Diagnostic) -> String {
fn render(&self, diag: &Diagnostic) -> String {
diag.display(&self.db, &self.config).to_string()
}
/// Render the given diagnostics into a `String`.
///
/// See `render` for rendering a single diagnostic.
///
/// (This will set the "printed" flag on `Diagnostic`.)
pub(super) fn render_diagnostics(&self, diagnostics: &[Diagnostic]) -> String {
DisplayDiagnostics::new(&self.db, &self.config, diagnostics).to_string()
}
}
/// A helper builder for tersely populating a `Diagnostic`.
@@ -2389,14 +2246,14 @@ watermelon
/// supported by this builder, and this only needs to be done
/// infrequently, consider doing it more verbosely on `diag`
/// itself.
pub(super) struct DiagnosticBuilder<'e> {
struct DiagnosticBuilder<'e> {
env: &'e mut TestEnvironment,
diag: Diagnostic,
}
impl<'e> DiagnosticBuilder<'e> {
/// Return the built diagnostic.
pub(super) fn build(self) -> Diagnostic {
fn build(self) -> Diagnostic {
self.diag
}
@@ -2445,25 +2302,6 @@ watermelon
self.diag.annotate(ann);
self
}
/// Set the secondary code on the diagnostic.
fn secondary_code(mut self, secondary_code: &str) -> DiagnosticBuilder<'e> {
self.diag
.set_secondary_code(SecondaryCode::new(secondary_code.to_string()));
self
}
/// Set the fix on the diagnostic.
pub(super) fn fix(mut self, fix: Fix) -> DiagnosticBuilder<'e> {
self.diag.set_fix(fix);
self
}
/// Set the noqa offset on the diagnostic.
fn noqa_offset(mut self, noqa_offset: TextSize) -> DiagnosticBuilder<'e> {
self.diag.set_noqa_offset(noqa_offset);
self
}
}
/// A helper builder for tersely populating a `SubDiagnostic`.
@@ -2543,199 +2381,4 @@ watermelon
let offset = TextSize::from(offset.parse::<u32>().unwrap());
(line_number, Some(offset))
}
/// Create Ruff-style diagnostics for testing the various output formats.
pub(crate) fn create_diagnostics(
format: DiagnosticFormat,
) -> (TestEnvironment, Vec<Diagnostic>) {
let mut env = TestEnvironment::new();
env.add(
"fib.py",
r#"import os
def fibonacci(n):
"""Compute the nth number in the Fibonacci sequence."""
x = 1
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
"#,
);
env.add("undef.py", r"if a == 1: pass");
env.format(format);
let diagnostics = vec![
env.builder("unused-import", Severity::Error, "`os` imported but unused")
.primary("fib.py", "1:7", "1:9", "Remove unused import: `os`")
.secondary_code("F401")
.fix(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(0),
TextSize::from(10),
))))
.noqa_offset(TextSize::from(7))
.build(),
env.builder(
"unused-variable",
Severity::Error,
"Local variable `x` is assigned to but never used",
)
.primary(
"fib.py",
"6:4",
"6:5",
"Remove assignment to unused variable `x`",
)
.secondary_code("F841")
.fix(Fix::unsafe_edit(Edit::deletion(
TextSize::from(94),
TextSize::from(99),
)))
.noqa_offset(TextSize::from(94))
.build(),
env.builder("undefined-name", Severity::Error, "Undefined name `a`")
.primary("undef.py", "1:3", "1:4", "")
.secondary_code("F821")
.noqa_offset(TextSize::from(3))
.build(),
];
(env, diagnostics)
}
/// Create Ruff-style syntax error diagnostics for testing the various output formats.
pub(crate) fn create_syntax_error_diagnostics(
format: DiagnosticFormat,
) -> (TestEnvironment, Vec<Diagnostic>) {
let mut env = TestEnvironment::new();
env.add(
"syntax_errors.py",
r"from os import
if call(foo
def bar():
pass
",
);
env.format(format);
let diagnostics = vec![
env.invalid_syntax("SyntaxError: Expected one or more symbol names after import")
.primary("syntax_errors.py", "1:14", "1:15", "")
.build(),
env.invalid_syntax("SyntaxError: Expected ')', found newline")
.primary("syntax_errors.py", "3:11", "3:12", "")
.build(),
];
(env, diagnostics)
}
/// Create Ruff-style diagnostics for testing the various output formats for a notebook.
#[allow(
dead_code,
reason = "This is currently only used for JSON but will be needed soon for other formats"
)]
pub(crate) fn create_notebook_diagnostics(
format: DiagnosticFormat,
) -> (TestEnvironment, Vec<Diagnostic>) {
let mut env = TestEnvironment::new();
env.add(
"notebook.ipynb",
r##"
{
"cells": [
{
"cell_type": "code",
"metadata": {},
"outputs": [],
"source": [
"# cell 1\n",
"import os"
]
},
{
"cell_type": "code",
"metadata": {},
"outputs": [],
"source": [
"# cell 2\n",
"import math\n",
"\n",
"print('hello world')"
]
},
{
"cell_type": "code",
"metadata": {},
"outputs": [],
"source": [
"# cell 3\n",
"def foo():\n",
" print()\n",
" x = 1\n"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 5
}
"##,
);
env.format(format);
let diagnostics = vec![
env.builder("unused-import", Severity::Error, "`os` imported but unused")
.primary("notebook.ipynb", "2:7", "2:9", "Remove unused import: `os`")
.secondary_code("F401")
.fix(Fix::safe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(9),
TextSize::from(19),
))))
.noqa_offset(TextSize::from(16))
.build(),
env.builder(
"unused-import",
Severity::Error,
"`math` imported but unused",
)
.primary(
"notebook.ipynb",
"4:7",
"4:11",
"Remove unused import: `math`",
)
.secondary_code("F401")
.fix(Fix::safe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(28),
TextSize::from(40),
))))
.noqa_offset(TextSize::from(35))
.build(),
env.builder(
"unused-variable",
Severity::Error,
"Local variable `x` is assigned to but never used",
)
.primary(
"notebook.ipynb",
"10:4",
"10:5",
"Remove assignment to unused variable `x`",
)
.secondary_code("F841")
.fix(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(94),
TextSize::from(104),
))))
.noqa_offset(TextSize::from(98))
.build(),
];
(env, diagnostics)
}
}

View File

@@ -1,83 +0,0 @@
use ruff_source_file::LineColumn;
use crate::diagnostic::{Diagnostic, Severity};
use super::FileResolver;
pub(super) struct AzureRenderer<'a> {
resolver: &'a dyn FileResolver,
}
impl<'a> AzureRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
Self { resolver }
}
}
impl AzureRenderer<'_> {
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
for diag in diagnostics {
let severity = match diag.severity() {
Severity::Info | Severity::Warning => "warning",
Severity::Error | Severity::Fatal => "error",
};
write!(f, "##vso[task.logissue type={severity};")?;
if let Some(span) = diag.primary_span() {
let filename = span.file().path(self.resolver);
write!(f, "sourcepath={filename};")?;
if let Some(range) = span.range() {
let location = if self.resolver.notebook_index(span.file()).is_some() {
// We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback
LineColumn::default()
} else {
span.file()
.diagnostic_source(self.resolver)
.as_source_code()
.line_column(range.start())
};
write!(
f,
"linenumber={line};columnnumber={col};",
line = location.line,
col = location.column,
)?;
}
}
writeln!(
f,
"{code}]{body}",
code = diag
.secondary_code()
.map_or_else(String::new, |code| format!("code={code};")),
body = diag.body(),
)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Azure);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Azure);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
}

View File

@@ -1,393 +0,0 @@
use serde::{Serialize, Serializer, ser::SerializeSeq};
use serde_json::{Value, json};
use ruff_diagnostics::{Applicability, Edit};
use ruff_notebook::NotebookIndex;
use ruff_source_file::{LineColumn, OneIndexed};
use ruff_text_size::Ranged;
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig, SecondaryCode};
use super::FileResolver;
pub(super) struct JsonRenderer<'a> {
resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig,
}
impl<'a> JsonRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
Self { resolver, config }
}
}
impl JsonRenderer<'_> {
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
write!(
f,
"{:#}",
diagnostics_to_json_value(diagnostics, self.resolver, self.config)
)
}
}
fn diagnostics_to_json_value<'a>(
diagnostics: impl IntoIterator<Item = &'a Diagnostic>,
resolver: &dyn FileResolver,
config: &DisplayDiagnosticConfig,
) -> Value {
let values: Vec<_> = diagnostics
.into_iter()
.map(|diag| diagnostic_to_json(diag, resolver, config))
.collect();
json!(values)
}
pub(super) fn diagnostic_to_json<'a>(
diagnostic: &'a Diagnostic,
resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig,
) -> JsonDiagnostic<'a> {
let span = diagnostic.primary_span_ref();
let filename = span.map(|span| span.file().path(resolver));
let range = span.and_then(|span| span.range());
let diagnostic_source = span.map(|span| span.file().diagnostic_source(resolver));
let source_code = diagnostic_source
.as_ref()
.map(|diagnostic_source| diagnostic_source.as_source_code());
let notebook_index = span.and_then(|span| resolver.notebook_index(span.file()));
let mut start_location = None;
let mut end_location = None;
let mut noqa_location = None;
let mut notebook_cell_index = None;
if let Some(source_code) = source_code {
noqa_location = diagnostic
.noqa_offset()
.map(|offset| source_code.line_column(offset));
if let Some(range) = range {
let mut start = source_code.line_column(range.start());
let mut end = source_code.line_column(range.end());
if let Some(notebook_index) = &notebook_index {
notebook_cell_index =
Some(notebook_index.cell(start.line).unwrap_or(OneIndexed::MIN));
start = notebook_index.translate_line_column(&start);
end = notebook_index.translate_line_column(&end);
noqa_location =
noqa_location.map(|location| notebook_index.translate_line_column(&location));
}
start_location = Some(start);
end_location = Some(end);
}
}
let fix = diagnostic.fix().map(|fix| JsonFix {
applicability: fix.applicability(),
message: diagnostic.suggestion(),
edits: ExpandedEdits {
edits: fix.edits(),
notebook_index,
config,
diagnostic_source,
},
});
// In preview, the locations and filename can be optional.
if config.preview {
JsonDiagnostic {
code: diagnostic.secondary_code(),
url: diagnostic.to_ruff_url(),
message: diagnostic.body(),
fix,
cell: notebook_cell_index,
location: start_location.map(JsonLocation::from),
end_location: end_location.map(JsonLocation::from),
filename,
noqa_row: noqa_location.map(|location| location.line),
}
} else {
JsonDiagnostic {
code: diagnostic.secondary_code(),
url: diagnostic.to_ruff_url(),
message: diagnostic.body(),
fix,
cell: notebook_cell_index,
location: Some(start_location.unwrap_or_default().into()),
end_location: Some(end_location.unwrap_or_default().into()),
filename: Some(filename.unwrap_or_default()),
noqa_row: noqa_location.map(|location| location.line),
}
}
}
struct ExpandedEdits<'a> {
edits: &'a [Edit],
notebook_index: Option<NotebookIndex>,
config: &'a DisplayDiagnosticConfig,
diagnostic_source: Option<DiagnosticSource>,
}
impl Serialize for ExpandedEdits<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_seq(Some(self.edits.len()))?;
for edit in self.edits {
let (location, end_location) = if let Some(diagnostic_source) = &self.diagnostic_source
{
let source_code = diagnostic_source.as_source_code();
let mut location = source_code.line_column(edit.start());
let mut end_location = source_code.line_column(edit.end());
if let Some(notebook_index) = &self.notebook_index {
// There exists a newline between each cell's source code in the
// concatenated source code in Ruff. This newline doesn't actually
// exists in the JSON source field.
//
// Now, certain edits may try to remove this newline, which means
// the edit will spill over to the first character of the next cell.
// If it does, we need to translate the end location to the last
// character of the previous cell.
match (
notebook_index.cell(location.line),
notebook_index.cell(end_location.line),
) {
(Some(start_cell), Some(end_cell)) if start_cell != end_cell => {
debug_assert_eq!(end_location.column.get(), 1);
let prev_row = end_location.line.saturating_sub(1);
end_location = LineColumn {
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
column: source_code
.line_column(source_code.line_end_exclusive(prev_row))
.column,
};
}
(Some(_), None) => {
debug_assert_eq!(end_location.column.get(), 1);
let prev_row = end_location.line.saturating_sub(1);
end_location = LineColumn {
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
column: source_code
.line_column(source_code.line_end_exclusive(prev_row))
.column,
};
}
_ => {
end_location = notebook_index.translate_line_column(&end_location);
}
}
location = notebook_index.translate_line_column(&location);
}
(Some(location), Some(end_location))
} else {
(None, None)
};
// In preview, the locations can be optional.
let value = if self.config.preview {
JsonEdit {
content: edit.content().unwrap_or_default(),
location: location.map(JsonLocation::from),
end_location: end_location.map(JsonLocation::from),
}
} else {
JsonEdit {
content: edit.content().unwrap_or_default(),
location: Some(location.unwrap_or_default().into()),
end_location: Some(end_location.unwrap_or_default().into()),
}
};
s.serialize_element(&value)?;
}
s.end()
}
}
/// A serializable version of `Diagnostic`.
///
/// The `Old` variant only exists to preserve backwards compatibility. Both this and `JsonEdit`
/// should become structs with the `New` definitions in a future Ruff release.
#[derive(Serialize)]
pub(crate) struct JsonDiagnostic<'a> {
cell: Option<OneIndexed>,
code: Option<&'a SecondaryCode>,
end_location: Option<JsonLocation>,
filename: Option<&'a str>,
fix: Option<JsonFix<'a>>,
location: Option<JsonLocation>,
message: &'a str,
noqa_row: Option<OneIndexed>,
url: Option<String>,
}
#[derive(Serialize)]
struct JsonFix<'a> {
applicability: Applicability,
edits: ExpandedEdits<'a>,
message: Option<&'a str>,
}
#[derive(Serialize)]
struct JsonLocation {
column: OneIndexed,
row: OneIndexed,
}
impl From<LineColumn> for JsonLocation {
fn from(location: LineColumn) -> Self {
JsonLocation {
row: location.line,
column: location.column,
}
}
}
#[derive(Serialize)]
struct JsonEdit<'a> {
content: &'a str,
end_location: Option<JsonLocation>,
location: Option<JsonLocation>,
}
#[cfg(test)]
mod tests {
use ruff_diagnostics::{Edit, Fix};
use ruff_text_size::TextSize;
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
create_syntax_error_diagnostics,
},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Json);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Json);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn notebook_output() {
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Json);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn missing_file_stable() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Json);
env.preview(false);
let diag = env
.err()
.fix(Fix::safe_edit(Edit::insertion(
"edit".to_string(),
TextSize::from(0),
)))
.build();
insta::assert_snapshot!(
env.render(&diag),
@r#"
[
{
"cell": null,
"code": null,
"end_location": {
"column": 1,
"row": 1
},
"filename": "",
"fix": {
"applicability": "safe",
"edits": [
{
"content": "edit",
"end_location": {
"column": 1,
"row": 1
},
"location": {
"column": 1,
"row": 1
}
}
],
"message": null
},
"location": {
"column": 1,
"row": 1
},
"message": "main diagnostic message",
"noqa_row": null,
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
}
]
"#,
);
}
#[test]
fn missing_file_preview() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Json);
env.preview(true);
let diag = env
.err()
.fix(Fix::safe_edit(Edit::insertion(
"edit".to_string(),
TextSize::from(0),
)))
.build();
insta::assert_snapshot!(
env.render(&diag),
@r#"
[
{
"cell": null,
"code": null,
"end_location": null,
"filename": null,
"fix": {
"applicability": "safe",
"edits": [
{
"content": "edit",
"end_location": null,
"location": null
}
],
"message": null
},
"location": null,
"message": "main diagnostic message",
"noqa_row": null,
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
}
]
"#,
);
}
}

View File

@@ -1,59 +0,0 @@
use crate::diagnostic::{Diagnostic, DisplayDiagnosticConfig, render::json::diagnostic_to_json};
use super::FileResolver;
pub(super) struct JsonLinesRenderer<'a> {
resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig,
}
impl<'a> JsonLinesRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
Self { resolver, config }
}
}
impl JsonLinesRenderer<'_> {
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
for diag in diagnostics {
writeln!(
f,
"{}",
serde_json::json!(diagnostic_to_json(diag, self.resolver, self.config))
)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{
create_diagnostics, create_notebook_diagnostics, create_syntax_error_diagnostics,
},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::JsonLines);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::JsonLines);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn notebook_output() {
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::JsonLines);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
}

View File

@@ -21,19 +21,6 @@ type LockedZipArchive<'a> = MutexGuard<'a, VendoredZipArchive>;
///
/// "Files" in the `VendoredFileSystem` are read-only and immutable.
/// Directories are supported, but symlinks and hardlinks cannot exist.
///
/// # Path separators
///
/// At time of writing (2025-07-11), this implementation always uses `/` as a
/// path separator, even in Windows environments where `\` is traditionally
/// used as a file path separator. Namely, this is only currently used with zip
/// files built by `crates/ty_vendored/build.rs`.
///
/// Callers using this may provide paths that use a `\` as a separator. It will
/// be transparently normalized to `/`.
///
/// This is particularly important because the presence of a trailing separator
/// in a zip file is conventionally used to indicate a directory entry.
#[derive(Clone)]
pub struct VendoredFileSystem {
inner: Arc<Mutex<VendoredZipArchive>>,
@@ -128,68 +115,6 @@ impl VendoredFileSystem {
read_to_string(self, path.as_ref())
}
/// Read the direct children of the directory
/// identified by `path`.
///
/// If `path` is not a directory, then this will
/// return an empty `Vec`.
pub fn read_directory(&self, dir: impl AsRef<VendoredPath>) -> Vec<DirectoryEntry> {
// N.B. We specifically do not return an iterator here to avoid
// holding a lock for the lifetime of the iterator returned.
// That is, it seems like a footgun to keep the zip archive
// locked during iteration, since the unit of work for each
// item in the iterator could be arbitrarily long. Allocating
// up front and stuffing all entries into it is probably the
// simplest solution and what we do here. If this becomes
// a problem, there are other strategies we could pursue.
// (Amortizing allocs, using a different synchronization
// behavior or even exposing additional APIs.) ---AG
fn read_directory(fs: &VendoredFileSystem, dir: &VendoredPath) -> Vec<DirectoryEntry> {
let mut normalized = NormalizedVendoredPath::from(dir);
if !normalized.as_str().ends_with('/') {
normalized = normalized.with_trailing_slash();
}
let archive = fs.lock_archive();
let mut entries = vec![];
for name in archive.0.file_names() {
// Any entry that doesn't have the `path` (with a
// trailing slash) as a prefix cannot possibly be in
// the directory referenced by `path`.
let Some(without_dir_prefix) = name.strip_prefix(normalized.as_str()) else {
continue;
};
// Filter out an entry equivalent to the path given
// since we only want children of the directory.
if without_dir_prefix.is_empty() {
continue;
}
// We only want *direct* children. Files that are
// direct children cannot have any slashes (or else
// they are not direct children). Directories that
// are direct children can only have one slash and
// it must be at the end.
//
// (We do this manually ourselves to avoid doing a
// full file lookup and metadata retrieval via the
// `zip` crate.)
let file_type = FileType::from_zip_file_name(without_dir_prefix);
let slash_count = without_dir_prefix.matches('/').count();
match file_type {
FileType::File if slash_count > 0 => continue,
FileType::Directory if slash_count > 1 => continue,
_ => {}
}
entries.push(DirectoryEntry {
path: VendoredPathBuf::from(name),
file_type,
});
}
entries
}
read_directory(self, dir.as_ref())
}
/// Acquire a lock on the underlying zip archive.
/// The call will block until it is able to acquire the lock.
///
@@ -281,14 +206,6 @@ pub enum FileType {
}
impl FileType {
fn from_zip_file_name(name: &str) -> FileType {
if name.ends_with('/') {
FileType::Directory
} else {
FileType::File
}
}
pub const fn is_file(self) -> bool {
matches!(self, Self::File)
}
@@ -327,30 +244,6 @@ impl Metadata {
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct DirectoryEntry {
path: VendoredPathBuf,
file_type: FileType,
}
impl DirectoryEntry {
pub fn new(path: VendoredPathBuf, file_type: FileType) -> Self {
Self { path, file_type }
}
pub fn into_path(self) -> VendoredPathBuf {
self.path
}
pub fn path(&self) -> &VendoredPath {
&self.path
}
pub fn file_type(&self) -> FileType {
self.file_type
}
}
/// Newtype wrapper around a ZipArchive.
#[derive(Debug)]
struct VendoredZipArchive(ZipArchive<io::Cursor<Cow<'static, [u8]>>>);
@@ -605,60 +498,6 @@ pub(crate) mod tests {
test_directory("./stdlib/asyncio/../asyncio/")
}
fn readdir_snapshot(fs: &VendoredFileSystem, path: &str) -> String {
let mut paths = fs
.read_directory(VendoredPath::new(path))
.into_iter()
.map(|entry| entry.path().to_string())
.collect::<Vec<String>>();
paths.sort();
paths.join("\n")
}
#[test]
fn read_directory_stdlib() {
let mock_typeshed = mock_typeshed();
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib"), @r"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib/"), @r"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib"), @r"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib/"), @r"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");
}
#[test]
fn read_directory_asyncio() {
let mock_typeshed = mock_typeshed();
assert_snapshot!(
readdir_snapshot(&mock_typeshed, "stdlib/asyncio"),
@"vendored://stdlib/asyncio/tasks.pyi",
);
assert_snapshot!(
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio"),
@"vendored://stdlib/asyncio/tasks.pyi",
);
assert_snapshot!(
readdir_snapshot(&mock_typeshed, "stdlib/asyncio/"),
@"vendored://stdlib/asyncio/tasks.pyi",
);
assert_snapshot!(
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio/"),
@"vendored://stdlib/asyncio/tasks.pyi",
);
}
fn test_nonexistent_path(path: &str) {
let mock_typeshed = mock_typeshed();
let path = VendoredPath::new(path);

View File

@@ -17,10 +17,6 @@ impl VendoredPath {
unsafe { &*(path as *const Utf8Path as *const VendoredPath) }
}
pub fn file_name(&self) -> Option<&str> {
self.0.file_name()
}
pub fn to_path_buf(&self) -> VendoredPathBuf {
VendoredPathBuf(self.0.to_path_buf())
}

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.12.3"
version = "0.12.2"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -1,6 +1,6 @@
"""
Should emit:
B017 - on lines 24, 28, 46, 49, 52, 58, 62, 68, and 71
B017 - on lines 24, 28, 46, 49, 52, and 58
"""
import asyncio
import unittest
@@ -56,17 +56,3 @@ def test_pytest_raises():
with contextlib.nullcontext(), pytest.raises(Exception):
raise ValueError("Multiple context managers")
def test_pytest_raises_keyword():
with pytest.raises(expected_exception=Exception):
raise ValueError("Should be flagged")
def test_assert_raises_keyword():
class TestKwargs(unittest.TestCase):
def test_method(self):
with self.assertRaises(exception=Exception):
raise ValueError("Should be flagged")
with self.assertRaises(exception=BaseException):
raise ValueError("Should be flagged")

View File

@@ -181,51 +181,3 @@ class SubclassTestModel2(TestModel4):
# Subclass without __str__
class SubclassTestModel3(TestModel1):
pass
# Test cases for type-annotated abstract models - these should NOT trigger DJ008
from typing import ClassVar
from django_stubs_ext.db.models import TypedModelMeta
class TypeAnnotatedAbstractModel1(models.Model):
"""Model with type-annotated abstract = True - should not trigger DJ008"""
new_field = models.CharField(max_length=10)
class Meta(TypedModelMeta):
abstract: ClassVar[bool] = True
class TypeAnnotatedAbstractModel2(models.Model):
"""Model with type-annotated abstract = True using regular Meta - should not trigger DJ008"""
new_field = models.CharField(max_length=10)
class Meta:
abstract: ClassVar[bool] = True
class TypeAnnotatedAbstractModel3(models.Model):
"""Model with type-annotated abstract = True but without ClassVar - should not trigger DJ008"""
new_field = models.CharField(max_length=10)
class Meta:
abstract: bool = True
class TypeAnnotatedNonAbstractModel(models.Model):
"""Model with type-annotated abstract = False - should trigger DJ008"""
new_field = models.CharField(max_length=10)
class Meta:
abstract: ClassVar[bool] = False
class TypeAnnotatedAbstractModelWithStr(models.Model):
"""Model with type-annotated abstract = True and __str__ method - should not trigger DJ008"""
new_field = models.CharField(max_length=10)
class Meta(TypedModelMeta):
abstract: ClassVar[bool] = True
def __str__(self):
return self.new_field

View File

@@ -422,35 +422,6 @@ def func(a: dict[str, int]) -> list[dict[str, int]]:
services = a["services"]
return services
# See: https://github.com/astral-sh/ruff/issues/14052
def outer() -> list[object]:
@register
async def inner() -> None:
print(layout)
layout = [...]
return layout
def outer() -> list[object]:
with open("") as f:
async def inner() -> None:
print(layout)
layout = [...]
return layout
def outer() -> list[object]:
def inner():
with open("") as f:
async def inner_inner() -> None:
print(layout)
layout = [...]
return layout
# See: https://github.com/astral-sh/ruff/issues/18411
def f():
(#=

View File

@@ -1,5 +0,0 @@
"""This is a docstring."""
"This is not a docstring."
"This is also not a docstring."
x = 1

View File

@@ -4,8 +4,8 @@ use crate::Fix;
use crate::checkers::ast::Checker;
use crate::codes::Rule;
use crate::rules::{
flake8_import_conventions, flake8_pyi, flake8_pytest_style, flake8_return,
flake8_type_checking, pyflakes, pylint, pyupgrade, refurb, ruff,
flake8_import_conventions, flake8_pyi, flake8_pytest_style, flake8_type_checking, pyflakes,
pylint, pyupgrade, refurb, ruff,
};
/// Run lint rules over the [`Binding`]s.
@@ -25,20 +25,11 @@ pub(crate) fn bindings(checker: &Checker) {
Rule::ForLoopWrites,
Rule::CustomTypeVarForSelf,
Rule::PrivateTypeParameter,
Rule::UnnecessaryAssign,
]) {
return;
}
for (binding_id, binding) in checker.semantic.bindings.iter_enumerated() {
if checker.is_rule_enabled(Rule::UnnecessaryAssign) {
if binding.kind.is_function_definition() {
flake8_return::rules::unnecessary_assign(
checker,
binding.statement(checker.semantic()).unwrap(),
);
}
}
if checker.is_rule_enabled(Rule::UnusedVariable) {
if binding.kind.is_bound_exception()
&& binding.is_unused()

View File

@@ -207,6 +207,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
Rule::UnnecessaryReturnNone,
Rule::ImplicitReturnValue,
Rule::ImplicitReturn,
Rule::UnnecessaryAssign,
Rule::SuperfluousElseReturn,
Rule::SuperfluousElseRaise,
Rule::SuperfluousElseContinue,

View File

@@ -674,7 +674,8 @@ impl SemanticSyntaxContext for Checker<'_> {
| SemanticSyntaxErrorKind::LoadBeforeNonlocalDeclaration { .. }
| SemanticSyntaxErrorKind::NonlocalAndGlobal(_)
| SemanticSyntaxErrorKind::AnnotatedGlobal(_)
| SemanticSyntaxErrorKind::AnnotatedNonlocal(_) => {
| SemanticSyntaxErrorKind::AnnotatedNonlocal(_)
| SemanticSyntaxErrorKind::InvalidNonlocal(_) => {
self.semantic_errors.borrow_mut().push(error);
}
}

View File

@@ -275,12 +275,19 @@ impl<'a> Insertion<'a> {
}
}
/// Find the end of the docstring (first string statement).
/// Find the end of the last docstring.
fn match_docstring_end(body: &[Stmt]) -> Option<TextSize> {
let stmt = body.first()?;
let mut iter = body.iter();
let mut stmt = iter.next()?;
if !is_docstring_stmt(stmt) {
return None;
}
for next in iter {
if !is_docstring_stmt(next) {
break;
}
stmt = next;
}
Some(stmt.end())
}
@@ -360,7 +367,7 @@ mod tests {
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::own_line("", TextSize::from(20), "\n")
Insertion::own_line("", TextSize::from(40), "\n")
);
let contents = r"

View File

@@ -0,0 +1,71 @@
use std::io::Write;
use ruff_db::diagnostic::Diagnostic;
use ruff_source_file::LineColumn;
use crate::message::{Emitter, EmitterContext};
/// Generate error logging commands for Azure Pipelines format.
/// See [documentation](https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning)
#[derive(Default)]
pub struct AzureEmitter;
impl Emitter for AzureEmitter {
fn emit(
&mut self,
writer: &mut dyn Write,
diagnostics: &[Diagnostic],
context: &EmitterContext,
) -> anyhow::Result<()> {
for diagnostic in diagnostics {
let filename = diagnostic.expect_ruff_filename();
let location = if context.is_notebook(&filename) {
// We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback
LineColumn::default()
} else {
diagnostic.expect_ruff_start_location()
};
writeln!(
writer,
"##vso[task.logissue type=error\
;sourcepath={filename};linenumber={line};columnnumber={col};{code}]{body}",
line = location.line,
col = location.column,
code = diagnostic
.secondary_code()
.map_or_else(String::new, |code| format!("code={code};")),
body = diagnostic.body(),
)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use crate::message::AzureEmitter;
use crate::message::tests::{
capture_emitter_output, create_diagnostics, create_syntax_error_diagnostics,
};
#[test]
fn output() {
let mut emitter = AzureEmitter;
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
assert_snapshot!(content);
}
#[test]
fn syntax_errors() {
let mut emitter = AzureEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
assert_snapshot!(content);
}
}

View File

@@ -21,7 +21,7 @@ use crate::{Applicability, Fix};
/// * Compute the diff from the [`Edit`] because diff calculation is expensive.
pub(super) struct Diff<'a> {
fix: &'a Fix,
source_code: &'a SourceFile,
source_code: SourceFile,
}
impl<'a> Diff<'a> {

View File

@@ -0,0 +1,220 @@
use std::io::Write;
use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
use serde_json::{Value, json};
use ruff_db::diagnostic::Diagnostic;
use ruff_notebook::NotebookIndex;
use ruff_source_file::{LineColumn, OneIndexed, SourceCode};
use ruff_text_size::Ranged;
use crate::Edit;
use crate::message::{Emitter, EmitterContext};
#[derive(Default)]
pub struct JsonEmitter;
impl Emitter for JsonEmitter {
fn emit(
&mut self,
writer: &mut dyn Write,
diagnostics: &[Diagnostic],
context: &EmitterContext,
) -> anyhow::Result<()> {
serde_json::to_writer_pretty(
writer,
&ExpandedMessages {
diagnostics,
context,
},
)?;
Ok(())
}
}
struct ExpandedMessages<'a> {
diagnostics: &'a [Diagnostic],
context: &'a EmitterContext<'a>,
}
impl Serialize for ExpandedMessages<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
for message in self.diagnostics {
let value = message_to_json_value(message, self.context);
s.serialize_element(&value)?;
}
s.end()
}
}
pub(crate) fn message_to_json_value(message: &Diagnostic, context: &EmitterContext) -> Value {
let source_file = message.expect_ruff_source_file();
let source_code = source_file.to_source_code();
let filename = message.expect_ruff_filename();
let notebook_index = context.notebook_index(&filename);
let fix = message.fix().map(|fix| {
json!({
"applicability": fix.applicability(),
"message": message.suggestion(),
"edits": &ExpandedEdits { edits: fix.edits(), source_code: &source_code, notebook_index },
})
});
let mut start_location = source_code.line_column(message.expect_range().start());
let mut end_location = source_code.line_column(message.expect_range().end());
let mut noqa_location = message
.noqa_offset()
.map(|offset| source_code.line_column(offset));
let mut notebook_cell_index = None;
if let Some(notebook_index) = notebook_index {
notebook_cell_index = Some(
notebook_index
.cell(start_location.line)
.unwrap_or(OneIndexed::MIN),
);
start_location = notebook_index.translate_line_column(&start_location);
end_location = notebook_index.translate_line_column(&end_location);
noqa_location =
noqa_location.map(|location| notebook_index.translate_line_column(&location));
}
json!({
"code": message.secondary_code(),
"url": message.to_url(),
"message": message.body(),
"fix": fix,
"cell": notebook_cell_index,
"location": location_to_json(start_location),
"end_location": location_to_json(end_location),
"filename": filename,
"noqa_row": noqa_location.map(|location| location.line)
})
}
fn location_to_json(location: LineColumn) -> serde_json::Value {
json!({
"row": location.line,
"column": location.column
})
}
struct ExpandedEdits<'a> {
edits: &'a [Edit],
source_code: &'a SourceCode<'a, 'a>,
notebook_index: Option<&'a NotebookIndex>,
}
impl Serialize for ExpandedEdits<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_seq(Some(self.edits.len()))?;
for edit in self.edits {
let mut location = self.source_code.line_column(edit.start());
let mut end_location = self.source_code.line_column(edit.end());
if let Some(notebook_index) = self.notebook_index {
// There exists a newline between each cell's source code in the
// concatenated source code in Ruff. This newline doesn't actually
// exists in the JSON source field.
//
// Now, certain edits may try to remove this newline, which means
// the edit will spill over to the first character of the next cell.
// If it does, we need to translate the end location to the last
// character of the previous cell.
match (
notebook_index.cell(location.line),
notebook_index.cell(end_location.line),
) {
(Some(start_cell), Some(end_cell)) if start_cell != end_cell => {
debug_assert_eq!(end_location.column.get(), 1);
let prev_row = end_location.line.saturating_sub(1);
end_location = LineColumn {
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
column: self
.source_code
.line_column(self.source_code.line_end_exclusive(prev_row))
.column,
};
}
(Some(_), None) => {
debug_assert_eq!(end_location.column.get(), 1);
let prev_row = end_location.line.saturating_sub(1);
end_location = LineColumn {
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
column: self
.source_code
.line_column(self.source_code.line_end_exclusive(prev_row))
.column,
};
}
_ => {
end_location = notebook_index.translate_line_column(&end_location);
}
}
location = notebook_index.translate_line_column(&location);
}
let value = json!({
"content": edit.content().unwrap_or_default(),
"location": location_to_json(location),
"end_location": location_to_json(end_location)
});
s.serialize_element(&value)?;
}
s.end()
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use crate::message::JsonEmitter;
use crate::message::tests::{
capture_emitter_notebook_output, capture_emitter_output, create_diagnostics,
create_notebook_diagnostics, create_syntax_error_diagnostics,
};
#[test]
fn output() {
let mut emitter = JsonEmitter;
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
assert_snapshot!(content);
}
#[test]
fn syntax_errors() {
let mut emitter = JsonEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
assert_snapshot!(content);
}
#[test]
fn notebook_output() {
let mut emitter = JsonEmitter;
let (diagnostics, notebook_indexes) = create_notebook_diagnostics();
let content =
capture_emitter_notebook_output(&mut emitter, &diagnostics, &notebook_indexes);
assert_snapshot!(content);
}
}

View File

@@ -0,0 +1,60 @@
use std::io::Write;
use ruff_db::diagnostic::Diagnostic;
use crate::message::json::message_to_json_value;
use crate::message::{Emitter, EmitterContext};
#[derive(Default)]
pub struct JsonLinesEmitter;
impl Emitter for JsonLinesEmitter {
fn emit(
&mut self,
writer: &mut dyn Write,
diagnostics: &[Diagnostic],
context: &EmitterContext,
) -> anyhow::Result<()> {
for diagnostic in diagnostics {
serde_json::to_writer(&mut *writer, &message_to_json_value(diagnostic, context))?;
writer.write_all(b"\n")?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use crate::message::json_lines::JsonLinesEmitter;
use crate::message::tests::{
capture_emitter_notebook_output, capture_emitter_output, create_diagnostics,
create_notebook_diagnostics, create_syntax_error_diagnostics,
};
#[test]
fn output() {
let mut emitter = JsonLinesEmitter;
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
assert_snapshot!(content);
}
#[test]
fn syntax_errors() {
let mut emitter = JsonLinesEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
assert_snapshot!(content);
}
#[test]
fn notebook_output() {
let mut emitter = JsonLinesEmitter;
let (messages, notebook_indexes) = create_notebook_diagnostics();
let content = capture_emitter_notebook_output(&mut emitter, &messages, &notebook_indexes);
assert_snapshot!(content);
}
}

View File

@@ -3,17 +3,17 @@ use std::fmt::Display;
use std::io::Write;
use std::ops::Deref;
use ruff_db::diagnostic::{
Annotation, Diagnostic, DiagnosticId, LintName, SecondaryCode, Severity, Span,
};
use rustc_hash::FxHashMap;
use ruff_db::diagnostic::{
Annotation, Diagnostic, DiagnosticId, FileResolver, Input, LintName, SecondaryCode, Severity,
Span, UnifiedFile,
};
use ruff_db::files::File;
pub use azure::AzureEmitter;
pub use github::GithubEmitter;
pub use gitlab::GitlabEmitter;
pub use grouped::GroupedEmitter;
pub use json::JsonEmitter;
pub use json_lines::JsonLinesEmitter;
pub use junit::JunitEmitter;
pub use pylint::PylintEmitter;
pub use rdjson::RdjsonEmitter;
@@ -26,10 +26,13 @@ pub use text::TextEmitter;
use crate::Fix;
use crate::registry::Rule;
mod azure;
mod diff;
mod github;
mod gitlab;
mod grouped;
mod json;
mod json_lines;
mod junit;
mod pylint;
mod rdjson;
@@ -104,34 +107,6 @@ where
diagnostic
}
impl FileResolver for EmitterContext<'_> {
fn path(&self, _file: File) -> &str {
unimplemented!("Expected a Ruff file for rendering a Ruff diagnostic");
}
fn input(&self, _file: File) -> Input {
unimplemented!("Expected a Ruff file for rendering a Ruff diagnostic");
}
fn notebook_index(&self, file: &UnifiedFile) -> Option<NotebookIndex> {
match file {
UnifiedFile::Ty(_) => {
unimplemented!("Expected a Ruff file for rendering a Ruff diagnostic")
}
UnifiedFile::Ruff(file) => self.notebook_indexes.get(file.name()).cloned(),
}
}
fn is_notebook(&self, file: &UnifiedFile) -> bool {
match file {
UnifiedFile::Ty(_) => {
unimplemented!("Expected a Ruff file for rendering a Ruff diagnostic")
}
UnifiedFile::Ruff(file) => self.notebook_indexes.get(file.name()).is_some(),
}
}
}
struct MessageWithLocation<'a> {
message: &'a Diagnostic,
start_location: LineColumn,

View File

@@ -73,7 +73,7 @@ fn message_to_rdjson_value(message: &Diagnostic) -> Value {
},
"code": {
"value": message.secondary_code(),
"url": message.to_ruff_url(),
"url": message.to_url(),
},
"suggestions": rdjson_suggestions(fix.edits(), &source_code),
})
@@ -86,7 +86,7 @@ fn message_to_rdjson_value(message: &Diagnostic) -> Value {
},
"code": {
"value": message.secondary_code(),
"url": message.to_ruff_url(),
"url": message.to_url(),
},
})
}

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_db/src/diagnostic/render/azure.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/azure.rs
expression: content
snapshot_kind: text
---
##vso[task.logissue type=error;sourcepath=fib.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
##vso[task.logissue type=error;sourcepath=fib.py;linenumber=6;columnnumber=5;code=F841;]Local variable `x` is assigned to but never used

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_db/src/diagnostic/render/azure.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/azure.rs
expression: content
snapshot_kind: text
---
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=1;columnnumber=15;]SyntaxError: Expected one or more symbol names after import
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=3;columnnumber=12;]SyntaxError: Expected ')', found newline

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_db/src/diagnostic/render/json.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/json.rs
expression: content
snapshot_kind: text
---
[
{
@@ -83,8 +84,8 @@ expression: env.render_diagnostics(&diagnostics)
{
"content": "",
"end_location": {
"column": 1,
"row": 5
"column": 10,
"row": 4
},
"location": {
"column": 1,

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_db/src/diagnostic/render/json.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/json.rs
expression: content
snapshot_kind: text
---
[
{

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_db/src/diagnostic/render/json.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/json.rs
expression: content
snapshot_kind: text
---
[
{

View File

@@ -1,7 +1,8 @@
---
source: crates/ruff_db/src/diagnostic/render/json_lines.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/json_lines.rs
expression: content
snapshot_kind: text
---
{"cell":1,"code":"F401","end_location":{"column":10,"row":2},"filename":"notebook.ipynb","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":10,"row":2},"location":{"column":1,"row":2}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":2},"message":"`os` imported but unused","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":2,"code":"F401","end_location":{"column":12,"row":2},"filename":"notebook.ipynb","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":3},"location":{"column":1,"row":2}}],"message":"Remove unused import: `math`"},"location":{"column":8,"row":2},"message":"`math` imported but unused","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":3,"code":"F841","end_location":{"column":6,"row":4},"filename":"notebook.ipynb","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":1,"row":5},"location":{"column":1,"row":4}}],"message":"Remove assignment to unused variable `x`"},"location":{"column":5,"row":4},"message":"Local variable `x` is assigned to but never used","noqa_row":4,"url":"https://docs.astral.sh/ruff/rules/unused-variable"}
{"cell":3,"code":"F841","end_location":{"column":6,"row":4},"filename":"notebook.ipynb","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":10,"row":4},"location":{"column":1,"row":4}}],"message":"Remove assignment to unused variable `x`"},"location":{"column":5,"row":4},"message":"Local variable `x` is assigned to but never used","noqa_row":4,"url":"https://docs.astral.sh/ruff/rules/unused-variable"}

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_db/src/diagnostic/render/json_lines.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/json_lines.rs
expression: content
snapshot_kind: text
---
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"fib.py","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":null,"code":"F841","end_location":{"column":6,"row":6},"filename":"fib.py","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":10,"row":6},"location":{"column":5,"row":6}}],"message":"Remove assignment to unused variable `x`"},"location":{"column":5,"row":6},"message":"Local variable `x` is assigned to but never used","noqa_row":6,"url":"https://docs.astral.sh/ruff/rules/unused-variable"}

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_db/src/diagnostic/render/json_lines.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/json_lines.rs
expression: content
snapshot_kind: text
---
{"cell":null,"code":null,"end_location":{"column":1,"row":2},"filename":"syntax_errors.py","fix":null,"location":{"column":15,"row":1},"message":"SyntaxError: Expected one or more symbol names after import","noqa_row":null,"url":null}
{"cell":null,"code":null,"end_location":{"column":1,"row":4},"filename":"syntax_errors.py","fix":null,"location":{"column":12,"row":3},"message":"SyntaxError: Expected ')', found newline","noqa_row":null,"url":null}

View File

@@ -87,14 +87,9 @@ fn detect_blind_exception(
}
}
let exception_argument_name = if is_pytest_raises {
"expected_exception"
} else {
"exception"
};
let first_arg = arguments.args.first()?;
let exception_expr = arguments.find_argument_value(exception_argument_name, 0)?;
let builtin_symbol = semantic.resolve_builtin_symbol(exception_expr)?;
let builtin_symbol = semantic.resolve_builtin_symbol(first_arg)?;
match builtin_symbol {
"Exception" => Some(ExceptionKind::Exception),

View File

@@ -43,29 +43,3 @@ B017_0.py:57:36: B017 Do not assert blind exception: `Exception`
| ^^^^^^^^^^^^^^^^^^^^^^^^ B017
58 | raise ValueError("Multiple context managers")
|
B017_0.py:62:10: B017 Do not assert blind exception: `Exception`
|
61 | def test_pytest_raises_keyword():
62 | with pytest.raises(expected_exception=Exception):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
63 | raise ValueError("Should be flagged")
|
B017_0.py:68:18: B017 Do not assert blind exception: `Exception`
|
66 | class TestKwargs(unittest.TestCase):
67 | def test_method(self):
68 | with self.assertRaises(exception=Exception):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
69 | raise ValueError("Should be flagged")
|
B017_0.py:71:18: B017 Do not assert blind exception: `BaseException`
|
69 | raise ValueError("Should be flagged")
70 |
71 | with self.assertRaises(exception=BaseException):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
72 | raise ValueError("Should be flagged")
|

View File

@@ -43,29 +43,3 @@ B017_0.py:57:36: B017 Do not assert blind exception: `Exception`
| ^^^^^^^^^^^^^^^^^^^^^^^^ B017
58 | raise ValueError("Multiple context managers")
|
B017_0.py:62:10: B017 Do not assert blind exception: `Exception`
|
61 | def test_pytest_raises_keyword():
62 | with pytest.raises(expected_exception=Exception):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
63 | raise ValueError("Should be flagged")
|
B017_0.py:68:18: B017 Do not assert blind exception: `Exception`
|
66 | class TestKwargs(unittest.TestCase):
67 | def test_method(self):
68 | with self.assertRaises(exception=Exception):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
69 | raise ValueError("Should be flagged")
|
B017_0.py:71:18: B017 Do not assert blind exception: `BaseException`
|
69 | raise ValueError("Should be flagged")
70 |
71 | with self.assertRaises(exception=BaseException):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B017
72 | raise ValueError("Should be flagged")
|

View File

@@ -96,43 +96,22 @@ fn is_model_abstract(class_def: &ast::StmtClassDef) -> bool {
continue;
}
for element in body {
match element {
Stmt::Assign(ast::StmtAssign { targets, value, .. }) => {
if targets
.iter()
.any(|target| is_abstract_true_assignment(target, Some(value)))
{
return true;
}
let Stmt::Assign(ast::StmtAssign { targets, value, .. }) = element else {
continue;
};
for target in targets {
let Expr::Name(ast::ExprName { id, .. }) = target else {
continue;
};
if id != "abstract" {
continue;
}
Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => {
if is_abstract_true_assignment(target, value.as_deref()) {
return true;
}
if !is_const_true(value) {
continue;
}
_ => {}
return true;
}
}
}
false
}
fn is_abstract_true_assignment(target: &Expr, value: Option<&Expr>) -> bool {
let Expr::Name(ast::ExprName { id, .. }) = target else {
return false;
};
if id != "abstract" {
return false;
}
let Some(value) = value else {
return false;
};
if !is_const_true(value) {
return false;
}
true
}

View File

@@ -1,5 +1,6 @@
---
source: crates/ruff_linter/src/rules/flake8_django/mod.rs
snapshot_kind: text
---
DJ008.py:6:7: DJ008 Model does not define `__str__` method
|
@@ -30,11 +31,3 @@ DJ008.py:182:7: DJ008 Model does not define `__str__` method
| ^^^^^^^^^^^^^^^^^^ DJ008
183 | pass
|
DJ008.py:215:7: DJ008 Model does not define `__str__` method
|
215 | class TypeAnnotatedNonAbstractModel(models.Model):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ DJ008
216 | """Model with type-annotated abstract = False - should trigger DJ008"""
217 | new_field = models.CharField(max_length=10)
|

View File

@@ -539,21 +539,7 @@ fn implicit_return(checker: &Checker, function_def: &ast::StmtFunctionDef, stmt:
}
/// RET504
pub(crate) fn unnecessary_assign(checker: &Checker, function_stmt: &Stmt) {
let Stmt::FunctionDef(function_def) = function_stmt else {
return;
};
let Some(stack) = create_stack(checker, function_def) else {
return;
};
if !result_exists(&stack.returns) {
return;
}
let Some(function_scope) = checker.semantic().function_scope(function_def) else {
return;
};
fn unnecessary_assign(checker: &Checker, stack: &Stack) {
for (assign, return_, stmt) in &stack.assignment_return {
// Identify, e.g., `return x`.
let Some(value) = return_.value.as_ref() else {
@@ -597,22 +583,6 @@ pub(crate) fn unnecessary_assign(checker: &Checker, function_stmt: &Stmt) {
continue;
}
let Some(assigned_binding) = function_scope
.get(assigned_id)
.map(|binding_id| checker.semantic().binding(binding_id))
else {
continue;
};
// Check if there's any reference made to `assigned_binding` in another scope, e.g, nested
// functions. If there is, ignore them.
if assigned_binding
.references()
.map(|reference_id| checker.semantic().reference(reference_id))
.any(|reference| reference.scope_id() != assigned_binding.scope)
{
continue;
}
let mut diagnostic = checker.report_diagnostic(
UnnecessaryAssign {
name: assigned_id.to_string(),
@@ -695,21 +665,24 @@ fn superfluous_elif_else(checker: &Checker, stack: &Stack) {
}
}
fn create_stack<'a>(
checker: &'a Checker,
function_def: &'a ast::StmtFunctionDef,
) -> Option<Stack<'a>> {
let ast::StmtFunctionDef { body, .. } = function_def;
/// Run all checks from the `flake8-return` plugin.
pub(crate) fn function(checker: &Checker, function_def: &ast::StmtFunctionDef) {
let ast::StmtFunctionDef {
decorator_list,
returns,
body,
..
} = function_def;
// Find the last statement in the function.
let Some(last_stmt) = body.last() else {
// Skip empty functions.
return None;
return;
};
// Skip functions that consist of a single return statement.
if body.len() == 1 && matches!(last_stmt, Stmt::Return(_)) {
return None;
return;
}
// Traverse the function body, to collect the stack.
@@ -723,29 +696,9 @@ fn create_stack<'a>(
// Avoid false positives for generators.
if stack.is_generator {
return None;
return;
}
Some(stack)
}
/// Run all checks from the `flake8-return` plugin, but `RET504` which is ran
/// after the semantic model is fully built.
pub(crate) fn function(checker: &Checker, function_def: &ast::StmtFunctionDef) {
let ast::StmtFunctionDef {
decorator_list,
returns,
body,
..
} = function_def;
let Some(stack) = create_stack(checker, function_def) else {
return;
};
let Some(last_stmt) = body.last() else {
return;
};
if checker.any_rule_enabled(&[
Rule::SuperfluousElseReturn,
Rule::SuperfluousElseRaise,
@@ -768,6 +721,10 @@ pub(crate) fn function(checker: &Checker, function_def: &ast::StmtFunctionDef) {
if checker.is_rule_enabled(Rule::ImplicitReturn) {
implicit_return(checker, function_def, last_stmt);
}
if checker.is_rule_enabled(Rule::UnnecessaryAssign) {
unnecessary_assign(checker, &stack);
}
} else {
if checker.is_rule_enabled(Rule::UnnecessaryReturnNone) {
// Skip functions that have a return annotation that is not `None`.

View File

@@ -247,6 +247,8 @@ RET504.py:423:16: RET504 [*] Unnecessary assignment to `services` before `return
422 | services = a["services"]
423 | return services
| ^^^^^^^^ RET504
424 |
425 | # See: https://github.com/astral-sh/ruff/issues/18411
|
= help: Remove unnecessary assignment
@@ -258,46 +260,46 @@ RET504.py:423:16: RET504 [*] Unnecessary assignment to `services` before `return
423 |- return services
422 |+ return a["services"]
424 423 |
425 424 |
426 425 | # See: https://github.com/astral-sh/ruff/issues/14052
425 424 | # See: https://github.com/astral-sh/ruff/issues/18411
426 425 | def f():
RET504.py:458:12: RET504 [*] Unnecessary assignment to `x` before `return` statement
RET504.py:429:12: RET504 [*] Unnecessary assignment to `x` before `return` statement
|
456 | (#=
457 | x) = 1
458 | return x
427 | (#=
428 | x) = 1
429 | return x
| ^ RET504
459 |
460 | def f():
430 |
431 | def f():
|
= help: Remove unnecessary assignment
Unsafe fix
453 453 |
454 454 | # See: https://github.com/astral-sh/ruff/issues/18411
455 455 | def f():
456 |- (#=
457 |- x) = 1
458 |- return x
456 |+ return 1
459 457 |
460 458 | def f():
461 459 | x = (1
424 424 |
425 425 | # See: https://github.com/astral-sh/ruff/issues/18411
426 426 | def f():
427 |- (#=
428 |- x) = 1
429 |- return x
427 |+ return 1
430 428 |
431 429 | def f():
432 430 | x = (1
RET504.py:463:12: RET504 [*] Unnecessary assignment to `x` before `return` statement
RET504.py:434:12: RET504 [*] Unnecessary assignment to `x` before `return` statement
|
461 | x = (1
462 | )
463 | return x
432 | x = (1
433 | )
434 | return x
| ^ RET504
|
= help: Remove unnecessary assignment
Unsafe fix
458 458 | return x
459 459 |
460 460 | def f():
461 |- x = (1
461 |+ return (1
462 462 | )
463 |- return x
429 429 | return x
430 430 |
431 431 | def f():
432 |- x = (1
432 |+ return (1
433 433 | )
434 |- return x

View File

@@ -912,7 +912,6 @@ mod tests {
#[test_case(Path::new("docstring.pyi"))]
#[test_case(Path::new("docstring_only.py"))]
#[test_case(Path::new("empty.py"))]
#[test_case(Path::new("multiple_strings.py"))]
fn required_imports(path: &Path) -> Result<()> {
let snapshot = format!("required_imports_{}", path.to_string_lossy());
let diagnostics = test_path(

View File

@@ -1,18 +0,0 @@
---
source: crates/ruff_linter/src/rules/isort/mod.rs
---
multiple_strings.py:1:1: I002 [*] Missing required import: `from __future__ import annotations`
Safe fix
1 1 | """This is a docstring."""
2 |+from __future__ import annotations
2 3 | "This is not a docstring."
3 4 | "This is also not a docstring."
4 5 |
multiple_strings.py:1:1: I002 [*] Missing required import: `from __future__ import generator_stop`
Safe fix
1 1 | """This is a docstring."""
2 |+from __future__ import generator_stop
2 3 | "This is not a docstring."
3 4 | "This is also not a docstring."
4 5 |

View File

@@ -18,15 +18,11 @@ use crate::checkers::ast::Checker;
///
/// ## Example
/// ```python
/// import os
///
/// os.getenv(1)
/// ```
///
/// Use instead:
/// ```python
/// import os
///
/// os.getenv("1")
/// ```
#[derive(ViolationMetadata)]

View File

@@ -14,12 +14,12 @@ use crate::{AlwaysFixableViolation, Edit, Fix};
///
/// ## Example
/// ```python
/// from xml.etree import cElementTree as ET
/// from xml.etree import cElementTree
/// ```
///
/// Use instead:
/// ```python
/// from xml.etree import ElementTree as ET
/// from xml.etree import ElementTree
/// ```
///
/// ## References

View File

@@ -43,7 +43,7 @@ use super::{
/// ## Example
///
/// ```python
/// from typing import Generic, TypeVar
/// from typing import TypeVar
///
/// T = TypeVar("T")
///

View File

@@ -27,8 +27,6 @@ use crate::{AlwaysFixableViolation, Edit, Fix};
///
/// ## Example
/// ```python
/// import asyncio
///
/// raise asyncio.TimeoutError
/// ```
///

View File

@@ -989,6 +989,9 @@ impl Display for SemanticSyntaxError {
SemanticSyntaxErrorKind::AnnotatedNonlocal(name) => {
write!(f, "annotated name `{name}` can't be nonlocal")
}
SemanticSyntaxErrorKind::InvalidNonlocal(name) => {
write!(f, "no binding for nonlocal `{name}` found")
}
}
}
}
@@ -1346,6 +1349,32 @@ pub enum SemanticSyntaxErrorKind {
/// Represents a type annotation on a variable that's been declared nonlocal
AnnotatedNonlocal(String),
/// Represents a nonlocal declaration with no definition in an enclosing scope
///
/// ## Examples
///
/// ```python
/// def f():
/// nonlocal x # error
///
/// # Global variables don't count.
/// x = 1
/// def f():
/// nonlocal x # error
///
/// def f():
/// x = 1
/// def g():
/// nonlocal x # allowed
///
/// # The definition can come later.
/// def f():
/// def g():
/// nonlocal x # allowed
/// x = 1
/// ```
InvalidNonlocal(String),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, get_size2::GetSize)]

View File

@@ -2094,20 +2094,6 @@ impl<'a> SemanticModel<'a> {
None
})
}
/// Finds and returns the [`Scope`] corresponding to a given [`ast::StmtFunctionDef`].
///
/// This method searches all scopes created by a function definition, comparing the
/// [`TextRange`] of the provided `function_def` with the the range of the function
/// associated with the scope.
pub fn function_scope(&self, function_def: &ast::StmtFunctionDef) -> Option<&Scope> {
self.scopes.iter().find(|scope| {
let Some(function) = scope.kind.as_function() else {
return false;
};
function.range() == function_def.range()
})
}
}
pub struct ShadowedBinding {

View File

@@ -301,7 +301,7 @@ fn to_lsp_diagnostic(
severity,
tags,
code,
code_description: diagnostic.to_ruff_url().and_then(|url| {
code_description: diagnostic.to_url().and_then(|url| {
Some(lsp_types::CodeDescription {
href: lsp_types::Url::parse(&url).ok()?,
})

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_wasm"
version = "0.12.3"
version = "0.12.2"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -323,8 +323,8 @@ pub enum OutputFormat {
Concise,
}
impl From<OutputFormat> for ty_project::metadata::options::OutputFormat {
fn from(format: OutputFormat) -> ty_project::metadata::options::OutputFormat {
impl From<OutputFormat> for ruff_db::diagnostic::DiagnosticFormat {
fn from(format: OutputFormat) -> ruff_db::diagnostic::DiagnosticFormat {
match format {
OutputFormat::Full => Self::Full,
OutputFormat::Concise => Self::Concise,

View File

@@ -290,7 +290,7 @@ impl MainLoop {
} => {
let terminal_settings = db.project().settings(db).terminal();
let display_config = DisplayDiagnosticConfig::default()
.format(terminal_settings.output_format.into())
.format(terminal_settings.output_format)
.color(colored::control::SHOULD_COLORIZE.should_colorize());
if check_revision == revision {

View File

@@ -15,12 +15,9 @@ bitflags = { workspace = true }
ruff_db = { workspace = true }
ruff_python_ast = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_python_trivia = { workspace = true }
ruff_source_file = { workspace = true }
ruff_text_size = { workspace = true }
ty_python_semantic = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
smallvec = { workspace = true }

View File

@@ -536,9 +536,6 @@ _private_type_var_tuple = TypeVarTuple("_private_type_var_tuple")
public_explicit_type_alias: TypeAlias = Literal[1]
_private_explicit_type_alias: TypeAlias = Literal[1]
public_implicit_union_alias = int | str
_private_implicit_union_alias = int | str
class PublicProtocol(Protocol):
def method(self) -> None: ...
@@ -560,9 +557,7 @@ class _PrivateProtocol(Protocol):
test.assert_completions_include("public_type_var_tuple");
test.assert_completions_do_not_include("_private_type_var_tuple");
test.assert_completions_include("public_explicit_type_alias");
test.assert_completions_do_not_include("_private_explicit_type_alias");
test.assert_completions_include("public_implicit_union_alias");
test.assert_completions_do_not_include("_private_implicit_union_alias");
test.assert_completions_include("_private_explicit_type_alias");
test.assert_completions_include("PublicProtocol");
test.assert_completions_do_not_include("_PrivateProtocol");
}
@@ -2396,48 +2391,6 @@ Cougar = 3
test.assert_completions_include("Cheetah");
}
#[test]
fn from_import_with_submodule1() {
let test = CursorTest::builder()
.source("main.py", "from package import <CURSOR>")
.source("package/__init__.py", "")
.source("package/foo.py", "")
.source("package/bar.pyi", "")
.source("package/foo-bar.py", "")
.source("package/data.txt", "")
.source("package/sub/__init__.py", "")
.source("package/not-a-submodule/__init__.py", "")
.build();
test.assert_completions_include("foo");
test.assert_completions_include("bar");
test.assert_completions_include("sub");
test.assert_completions_do_not_include("foo-bar");
test.assert_completions_do_not_include("data");
test.assert_completions_do_not_include("not-a-submodule");
}
#[test]
fn from_import_with_vendored_submodule1() {
let test = cursor_test(
"\
from http import <CURSOR>
",
);
test.assert_completions_include("client");
}
#[test]
fn from_import_with_vendored_submodule2() {
let test = cursor_test(
"\
from email import <CURSOR>
",
);
test.assert_completions_include("mime");
test.assert_completions_do_not_include("base");
}
#[test]
fn import_submodule_not_attribute1() {
let test = cursor_test(

View File

@@ -1,664 +0,0 @@
//! Docstring parsing utilities for language server features.
//!
//! This module provides functionality for extracting structured information from
//! Python docstrings, including parameter documentation for signature help.
//! Supports Google-style, NumPy-style, and reST/Sphinx-style docstrings.
//! There are no formal specifications for any of these formats, so the parsing
//! logic needs to be tolerant of variations.
use regex::Regex;
use ruff_python_trivia::leading_indentation;
use ruff_source_file::UniversalNewlines;
use std::collections::HashMap;
use std::sync::LazyLock;
// Static regex instances to avoid recompilation
static GOOGLE_SECTION_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"(?i)^\s*(Args|Arguments|Parameters)\s*:\s*$")
.expect("Google section regex should be valid")
});
static GOOGLE_PARAM_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"^\s*(\*?\*?\w+)\s*(\(.*?\))?\s*:\s*(.+)")
.expect("Google parameter regex should be valid")
});
static NUMPY_SECTION_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"(?i)^\s*Parameters\s*$").expect("NumPy section regex should be valid")
});
static NUMPY_UNDERLINE_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^\s*-+\s*$").expect("NumPy underline regex should be valid"));
static REST_PARAM_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"^\s*:param\s+(?:(\w+)\s+)?(\w+)\s*:\s*(.+)")
.expect("reST parameter regex should be valid")
});
/// Extract parameter documentation from popular docstring formats.
/// Returns a map of parameter names to their documentation.
pub fn get_parameter_documentation(docstring: &str) -> HashMap<String, String> {
let mut param_docs = HashMap::new();
// Google-style docstrings
param_docs.extend(extract_google_style_params(docstring));
// NumPy-style docstrings
param_docs.extend(extract_numpy_style_params(docstring));
// reST/Sphinx-style docstrings
param_docs.extend(extract_rest_style_params(docstring));
param_docs
}
/// Extract parameter documentation from Google-style docstrings.
fn extract_google_style_params(docstring: &str) -> HashMap<String, String> {
let mut param_docs = HashMap::new();
let mut in_args_section = false;
let mut current_param: Option<String> = None;
let mut current_doc = String::new();
for line_obj in docstring.universal_newlines() {
let line = line_obj.as_str();
if GOOGLE_SECTION_REGEX.is_match(line) {
in_args_section = true;
continue;
}
if in_args_section {
// Check if we hit another section (starts with a word followed by colon at line start)
if !line.starts_with(' ') && !line.starts_with('\t') && line.contains(':') {
if let Some(colon_pos) = line.find(':') {
let section_name = line[..colon_pos].trim();
// If this looks like another section, stop processing args
if !section_name.is_empty()
&& section_name
.chars()
.all(|c| c.is_alphabetic() || c.is_whitespace())
{
// Check if this is a known section name
let known_sections = [
"Returns", "Return", "Raises", "Yields", "Yield", "Examples",
"Example", "Note", "Notes", "Warning", "Warnings",
];
if known_sections.contains(&section_name) {
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
in_args_section = false;
continue;
}
}
}
}
if let Some(captures) = GOOGLE_PARAM_REGEX.captures(line) {
// Save previous parameter if exists
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
// Start new parameter
if let (Some(param), Some(desc)) = (captures.get(1), captures.get(3)) {
current_param = Some(param.as_str().to_string());
current_doc = desc.as_str().to_string();
}
} else if line.starts_with(' ') || line.starts_with('\t') {
// This is a continuation of the current parameter documentation
if current_param.is_some() {
if !current_doc.is_empty() {
current_doc.push('\n');
}
current_doc.push_str(line.trim());
}
} else {
// This is a line that doesn't start with whitespace and isn't a parameter
// It might be a section or other content, so stop processing args
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
in_args_section = false;
}
}
}
// Don't forget the last parameter
if let Some(param_name) = current_param {
param_docs.insert(param_name, current_doc.trim().to_string());
}
param_docs
}
/// Calculate the indentation level of a line (number of leading whitespace characters)
fn get_indentation_level(line: &str) -> usize {
leading_indentation(line).len()
}
/// Extract parameter documentation from NumPy-style docstrings.
fn extract_numpy_style_params(docstring: &str) -> HashMap<String, String> {
let mut param_docs = HashMap::new();
let mut lines = docstring
.universal_newlines()
.map(|line| line.as_str())
.peekable();
let mut in_params_section = false;
let mut found_underline = false;
let mut current_param: Option<String> = None;
let mut current_doc = String::new();
let mut base_param_indent: Option<usize> = None;
let mut base_content_indent: Option<usize> = None;
while let Some(line) = lines.next() {
if NUMPY_SECTION_REGEX.is_match(line) {
// Check if the next line is an underline
if let Some(next_line) = lines.peek() {
if NUMPY_UNDERLINE_REGEX.is_match(next_line) {
in_params_section = true;
found_underline = false;
base_param_indent = None;
base_content_indent = None;
continue;
}
}
}
if in_params_section && !found_underline {
if NUMPY_UNDERLINE_REGEX.is_match(line) {
found_underline = true;
continue;
}
}
if in_params_section && found_underline {
let current_indent = get_indentation_level(line);
let trimmed = line.trim();
// Skip empty lines
if trimmed.is_empty() {
continue;
}
// Check if we hit another section
if current_indent == 0 {
if let Some(next_line) = lines.peek() {
if NUMPY_UNDERLINE_REGEX.is_match(next_line) {
// This is another section
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
in_params_section = false;
continue;
}
}
}
// Determine if this could be a parameter line
let could_be_param = if let Some(base_indent) = base_param_indent {
// We've seen parameters before - check if this matches the expected parameter indentation
current_indent == base_indent
} else {
// First potential parameter - check if it has reasonable indentation and content
current_indent > 0
&& (trimmed.contains(':')
|| trimmed.chars().all(|c| c.is_alphanumeric() || c == '_'))
};
if could_be_param {
// Check if this could be a section header by looking at the next line
if let Some(next_line) = lines.peek() {
if NUMPY_UNDERLINE_REGEX.is_match(next_line) {
// This is a section header, not a parameter
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
in_params_section = false;
continue;
}
}
// Set base indentation levels on first parameter
if base_param_indent.is_none() {
base_param_indent = Some(current_indent);
}
// Handle parameter with type annotation (param : type)
if trimmed.contains(':') {
// Save previous parameter if exists
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
// Extract parameter name and description
let parts: Vec<&str> = trimmed.splitn(2, ':').collect();
if parts.len() == 2 {
let param_name = parts[0].trim();
// Extract just the parameter name (before any type info)
let param_name = param_name.split_whitespace().next().unwrap_or(param_name);
current_param = Some(param_name.to_string());
current_doc.clear(); // Description comes on following lines, not on this line
}
} else {
// Handle parameter without type annotation
// Save previous parameter if exists
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
// This line is the parameter name
current_param = Some(trimmed.to_string());
current_doc.clear();
}
} else if current_param.is_some() {
// Determine if this is content for the current parameter
let is_content = if let Some(base_content) = base_content_indent {
// We've seen content before - check if this matches expected content indentation
current_indent >= base_content
} else {
// First potential content line - should be more indented than parameter
if let Some(base_param) = base_param_indent {
current_indent > base_param
} else {
// Fallback: any indented content
current_indent > 0
}
};
if is_content {
// Set base content indentation on first content line
if base_content_indent.is_none() {
base_content_indent = Some(current_indent);
}
// This is a continuation of the current parameter documentation
if !current_doc.is_empty() {
current_doc.push('\n');
}
current_doc.push_str(trimmed);
} else {
// This line doesn't match our expected indentation patterns
// Save current parameter and stop processing
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
in_params_section = false;
}
}
}
}
// Don't forget the last parameter
if let Some(param_name) = current_param {
param_docs.insert(param_name, current_doc.trim().to_string());
}
param_docs
}
/// Extract parameter documentation from reST/Sphinx-style docstrings.
fn extract_rest_style_params(docstring: &str) -> HashMap<String, String> {
let mut param_docs = HashMap::new();
let mut current_param: Option<String> = None;
let mut current_doc = String::new();
for line_obj in docstring.universal_newlines() {
let line = line_obj.as_str();
if let Some(captures) = REST_PARAM_REGEX.captures(line) {
// Save previous parameter if exists
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
// Extract parameter name and description
if let (Some(param_match), Some(desc_match)) = (captures.get(2), captures.get(3)) {
current_param = Some(param_match.as_str().to_string());
current_doc = desc_match.as_str().to_string();
}
} else if current_param.is_some() {
let trimmed = line.trim();
// Check if this is a new section - stop processing if we hit section headers
if trimmed == "Parameters" || trimmed == "Args" || trimmed == "Arguments" {
// Save current param and stop processing
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
break;
}
// Check if this is another directive line starting with ':'
if trimmed.starts_with(':') {
// This is a new directive, save current param
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
// Let the next iteration handle this directive
continue;
}
// Check if this is a continuation line (indented)
if line.starts_with(" ") && !trimmed.is_empty() {
// This is a continuation line
if !current_doc.is_empty() {
current_doc.push('\n');
}
current_doc.push_str(trimmed);
} else if !trimmed.is_empty() && !line.starts_with(' ') && !line.starts_with('\t') {
// This is a non-indented line - likely end of the current parameter
if let Some(param_name) = current_param.take() {
param_docs.insert(param_name, current_doc.trim().to_string());
current_doc.clear();
}
break;
}
}
}
// Don't forget the last parameter
if let Some(param_name) = current_param {
param_docs.insert(param_name, current_doc.trim().to_string());
}
param_docs
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_google_style_parameter_documentation() {
let docstring = r#"
This is a function description.
Args:
param1 (str): The first parameter description
param2 (int): The second parameter description
This is a continuation of param2 description.
param3: A parameter without type annotation
Returns:
str: The return value description
"#;
let param_docs = get_parameter_documentation(docstring);
assert_eq!(param_docs.len(), 3);
assert_eq!(&param_docs["param1"], "The first parameter description");
assert_eq!(
&param_docs["param2"],
"The second parameter description\nThis is a continuation of param2 description."
);
assert_eq!(&param_docs["param3"], "A parameter without type annotation");
}
#[test]
fn test_numpy_style_parameter_documentation() {
let docstring = r#"
This is a function description.
Parameters
----------
param1 : str
The first parameter description
param2 : int
The second parameter description
This is a continuation of param2 description.
param3
A parameter without type annotation
Returns
-------
str
The return value description
"#;
let param_docs = get_parameter_documentation(docstring);
assert_eq!(param_docs.len(), 3);
assert_eq!(
param_docs.get("param1").expect("param1 should exist"),
"The first parameter description"
);
assert_eq!(
param_docs.get("param2").expect("param2 should exist"),
"The second parameter description\nThis is a continuation of param2 description."
);
assert_eq!(
param_docs.get("param3").expect("param3 should exist"),
"A parameter without type annotation"
);
}
#[test]
fn test_no_parameter_documentation() {
let docstring = r#"
This is a simple function description without parameter documentation.
"#;
let param_docs = get_parameter_documentation(docstring);
assert!(param_docs.is_empty());
}
#[test]
fn test_mixed_style_parameter_documentation() {
let docstring = r#"
This is a function description.
Args:
param1 (str): Google-style parameter
param2 (int): Another Google-style parameter
Parameters
----------
param3 : bool
NumPy-style parameter
"#;
let param_docs = get_parameter_documentation(docstring);
assert_eq!(param_docs.len(), 3);
assert_eq!(
param_docs.get("param1").expect("param1 should exist"),
"Google-style parameter"
);
assert_eq!(
param_docs.get("param2").expect("param2 should exist"),
"Another Google-style parameter"
);
assert_eq!(
param_docs.get("param3").expect("param3 should exist"),
"NumPy-style parameter"
);
}
#[test]
fn test_rest_style_parameter_documentation() {
let docstring = r#"
This is a function description.
:param str param1: The first parameter description
:param int param2: The second parameter description
This is a continuation of param2 description.
:param param3: A parameter without type annotation
:returns: The return value description
:rtype: str
"#;
let param_docs = get_parameter_documentation(docstring);
assert_eq!(param_docs.len(), 3);
assert_eq!(
param_docs.get("param1").expect("param1 should exist"),
"The first parameter description"
);
assert_eq!(
param_docs.get("param2").expect("param2 should exist"),
"The second parameter description\nThis is a continuation of param2 description."
);
assert_eq!(
param_docs.get("param3").expect("param3 should exist"),
"A parameter without type annotation"
);
}
#[test]
fn test_mixed_style_with_rest_parameter_documentation() {
let docstring = r#"
This is a function description.
Args:
param1 (str): Google-style parameter
:param int param2: reST-style parameter
:param param3: Another reST-style parameter
Parameters
----------
param4 : bool
NumPy-style parameter
"#;
let param_docs = get_parameter_documentation(docstring);
assert_eq!(param_docs.len(), 4);
assert_eq!(
param_docs.get("param1").expect("param1 should exist"),
"Google-style parameter"
);
assert_eq!(
param_docs.get("param2").expect("param2 should exist"),
"reST-style parameter"
);
assert_eq!(
param_docs.get("param3").expect("param3 should exist"),
"Another reST-style parameter"
);
assert_eq!(
param_docs.get("param4").expect("param4 should exist"),
"NumPy-style parameter"
);
}
#[test]
fn test_numpy_style_with_different_indentation() {
let docstring = r#"
This is a function description.
Parameters
----------
param1 : str
The first parameter description
param2 : int
The second parameter description
This is a continuation of param2 description.
param3
A parameter without type annotation
Returns
-------
str
The return value description
"#;
let param_docs = get_parameter_documentation(docstring);
assert_eq!(param_docs.len(), 3);
assert_eq!(
param_docs.get("param1").expect("param1 should exist"),
"The first parameter description"
);
assert_eq!(
param_docs.get("param2").expect("param2 should exist"),
"The second parameter description\nThis is a continuation of param2 description."
);
assert_eq!(
param_docs.get("param3").expect("param3 should exist"),
"A parameter without type annotation"
);
}
#[test]
fn test_numpy_style_with_tabs_and_mixed_indentation() {
// Using raw strings to avoid tab/space conversion issues in the test
let docstring = "
This is a function description.
Parameters
----------
\tparam1 : str
\t\tThe first parameter description
\tparam2 : int
\t\tThe second parameter description
\t\tThis is a continuation of param2 description.
\tparam3
\t\tA parameter without type annotation
";
let param_docs = get_parameter_documentation(docstring);
assert_eq!(param_docs.len(), 3);
assert_eq!(
param_docs.get("param1").expect("param1 should exist"),
"The first parameter description"
);
assert_eq!(
param_docs.get("param2").expect("param2 should exist"),
"The second parameter description\nThis is a continuation of param2 description."
);
assert_eq!(
param_docs.get("param3").expect("param3 should exist"),
"A parameter without type annotation"
);
}
#[test]
fn test_universal_newlines() {
// Test with Windows-style line endings (\r\n)
let docstring_windows = "This is a function description.\r\n\r\nArgs:\r\n param1 (str): The first parameter\r\n param2 (int): The second parameter\r\n";
// Test with old Mac-style line endings (\r)
let docstring_mac = "This is a function description.\r\rArgs:\r param1 (str): The first parameter\r param2 (int): The second parameter\r";
// Test with Unix-style line endings (\n) - should work the same
let docstring_unix = "This is a function description.\n\nArgs:\n param1 (str): The first parameter\n param2 (int): The second parameter\n";
let param_docs_windows = get_parameter_documentation(docstring_windows);
let param_docs_mac = get_parameter_documentation(docstring_mac);
let param_docs_unix = get_parameter_documentation(docstring_unix);
// All should produce the same results
assert_eq!(param_docs_windows.len(), 2);
assert_eq!(param_docs_mac.len(), 2);
assert_eq!(param_docs_unix.len(), 2);
assert_eq!(
param_docs_windows.get("param1"),
Some(&"The first parameter".to_string())
);
assert_eq!(
param_docs_mac.get("param1"),
Some(&"The first parameter".to_string())
);
assert_eq!(
param_docs_unix.get("param1"),
Some(&"The first parameter".to_string())
);
}
}

View File

@@ -1,17 +1,14 @@
mod completion;
mod db;
mod docstring;
mod find_node;
mod goto;
mod hover;
mod inlay_hints;
mod markup;
mod semantic_tokens;
mod signature_help;
pub use completion::completion;
pub use db::Db;
pub use docstring::get_parameter_documentation;
pub use goto::goto_type_definition;
pub use hover::hover;
pub use inlay_hints::inlay_hints;
@@ -19,7 +16,6 @@ pub use markup::MarkupKind;
pub use semantic_tokens::{
SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens, semantic_tokens,
};
pub use signature_help::{ParameterDetails, SignatureDetails, SignatureHelpInfo, signature_help};
use ruff_db::files::{File, FileRange};
use ruff_text_size::{Ranged, TextRange};

View File

@@ -1,687 +0,0 @@
//! This module handles the "signature help" request in the language server
//! protocol. This request is typically issued by a client when the user types
//! an open parenthesis and starts to enter arguments for a function call.
//! The signature help provides information that the editor displays to the
//! user about the target function signature including parameter names,
//! types, and documentation. It supports multiple signatures for union types
//! and overloads.
use crate::{Db, docstring::get_parameter_documentation, find_node::covering_node};
use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
use ruff_python_ast::{self as ast, AnyNodeRef};
use ruff_text_size::{Ranged, TextRange, TextSize};
use ty_python_semantic::semantic_index::definition::Definition;
use ty_python_semantic::types::{CallSignatureDetails, call_signature_details};
// Limitations of the current implementation:
// TODO - If the target function is declared in a stub file but defined (implemented)
// in a source file, the documentation will not reflect the a docstring that appears
// only in the implementation. To do this, we'll need to map the function or
// method in the stub to the implementation and extract the docstring from there.
/// Information about a function parameter
#[derive(Debug, Clone)]
pub struct ParameterDetails {
/// The parameter name (e.g., "param1")
pub name: String,
/// The parameter label in the signature (e.g., "param1: str")
pub label: String,
/// Documentation specific to the parameter, typically extracted from the
/// function's docstring
pub documentation: Option<String>,
}
/// Information about a function signature
#[derive(Debug, Clone)]
pub struct SignatureDetails {
/// Text representation of the full signature (including input parameters and return type).
pub label: String,
/// Documentation for the signature, typically from the function's docstring.
pub documentation: Option<String>,
/// Information about each of the parameters in left-to-right order.
pub parameters: Vec<ParameterDetails>,
/// Index of the parameter that corresponds to the argument where the
/// user's cursor is currently positioned.
pub active_parameter: Option<usize>,
}
/// Signature help information for function calls
#[derive(Debug, Clone)]
pub struct SignatureHelpInfo {
/// Information about each of the signatures for the function call. We
/// need to handle multiple because of unions, overloads, and composite
/// calls like constructors (which invoke both __new__ and __init__).
pub signatures: Vec<SignatureDetails>,
/// Index of the "active signature" which is the first signature where
/// all arguments that are currently present in the code map to parameters.
pub active_signature: Option<usize>,
}
/// Signature help information for function calls at the given position
pub fn signature_help(db: &dyn Db, file: File, offset: TextSize) -> Option<SignatureHelpInfo> {
let parsed = parsed_module(db, file).load(db);
// Get the call expression at the given position.
let (call_expr, current_arg_index) = get_call_expr(&parsed, offset)?;
// Get signature details from the semantic analyzer.
let signature_details: Vec<CallSignatureDetails<'_>> =
call_signature_details(db, file, call_expr);
if signature_details.is_empty() {
return None;
}
// Find the active signature - the first signature where all arguments map to parameters.
let active_signature_index = find_active_signature_from_details(&signature_details);
// Convert to SignatureDetails objects.
let signatures: Vec<SignatureDetails> = signature_details
.into_iter()
.map(|details| {
create_signature_details_from_call_signature_details(db, &details, current_arg_index)
})
.collect();
Some(SignatureHelpInfo {
signatures,
active_signature: active_signature_index,
})
}
/// Returns the innermost call expression that contains the specified offset
/// and the index of the argument that the offset maps to.
fn get_call_expr(
parsed: &ruff_db::parsed::ParsedModuleRef,
offset: TextSize,
) -> Option<(&ast::ExprCall, usize)> {
// Create a range from the offset for the covering_node function.
let range = TextRange::new(offset, offset);
// Find the covering node at the given position that is a function call.
let covering_node = covering_node(parsed.syntax().into(), range)
.find_first(|node| matches!(node, AnyNodeRef::ExprCall(_)))
.ok()?;
// Get the function call expression.
let AnyNodeRef::ExprCall(call_expr) = covering_node.node() else {
return None;
};
// Determine which argument corresponding to the current cursor location.
let current_arg_index = get_argument_index(call_expr, offset);
Some((call_expr, current_arg_index))
}
/// Determine which argument is associated with the specified offset.
/// Returns zero if not within any argument.
fn get_argument_index(call_expr: &ast::ExprCall, offset: TextSize) -> usize {
let mut current_arg = 0;
for (i, arg) in call_expr.arguments.arguments_source_order().enumerate() {
if offset <= arg.end() {
return i;
}
current_arg = i + 1;
}
current_arg
}
/// Create signature details from `CallSignatureDetails`.
fn create_signature_details_from_call_signature_details(
db: &dyn crate::Db,
details: &CallSignatureDetails,
current_arg_index: usize,
) -> SignatureDetails {
let signature_label = details.label.clone();
let documentation = get_callable_documentation(db, details.definition);
// Translate the argument index to parameter index using the mapping.
let active_parameter =
if details.argument_to_parameter_mapping.is_empty() && current_arg_index == 0 {
Some(0)
} else {
details
.argument_to_parameter_mapping
.get(current_arg_index)
.and_then(|&param_index| param_index)
.or({
// If we can't find a mapping for this argument, but we have a current
// argument index, use that as the active parameter if it's within bounds.
if current_arg_index < details.parameter_label_offsets.len() {
Some(current_arg_index)
} else {
None
}
})
};
SignatureDetails {
label: signature_label.clone(),
documentation: Some(documentation),
parameters: create_parameters_from_offsets(
&details.parameter_label_offsets,
&signature_label,
db,
details.definition,
&details.parameter_names,
),
active_parameter,
}
}
/// Determine appropriate documentation for a callable type based on its original type.
fn get_callable_documentation(db: &dyn crate::Db, definition: Option<Definition>) -> String {
// TODO: If the definition is located within a stub file and no docstring
// is present, try to map the symbol to an implementation file and extract
// the docstring from that location.
if let Some(definition) = definition {
definition.docstring(db).unwrap_or_default()
} else {
String::new()
}
}
/// Create `ParameterDetails` objects from parameter label offsets.
fn create_parameters_from_offsets(
parameter_offsets: &[TextRange],
signature_label: &str,
db: &dyn crate::Db,
definition: Option<Definition>,
parameter_names: &[String],
) -> Vec<ParameterDetails> {
// Extract parameter documentation from the function's docstring if available.
let param_docs = if let Some(definition) = definition {
let docstring = definition.docstring(db);
docstring
.map(|doc| get_parameter_documentation(&doc))
.unwrap_or_default()
} else {
std::collections::HashMap::new()
};
parameter_offsets
.iter()
.enumerate()
.map(|(i, offset)| {
// Extract the parameter label from the signature string.
let start = usize::from(offset.start());
let end = usize::from(offset.end());
let label = signature_label
.get(start..end)
.unwrap_or("unknown")
.to_string();
// Get the parameter name for documentation lookup.
let param_name = parameter_names.get(i).map(String::as_str).unwrap_or("");
ParameterDetails {
name: param_name.to_string(),
label,
documentation: param_docs.get(param_name).cloned(),
}
})
.collect()
}
/// Find the active signature index from `CallSignatureDetails`.
/// The active signature is the first signature where all arguments present in the call
/// have valid mappings to parameters (i.e., none of the mappings are None).
fn find_active_signature_from_details(signature_details: &[CallSignatureDetails]) -> Option<usize> {
let first = signature_details.first()?;
// If there are no arguments in the mapping, just return the first signature.
if first.argument_to_parameter_mapping.is_empty() {
return Some(0);
}
// First, try to find a signature where all arguments have valid parameter mappings.
let perfect_match = signature_details.iter().position(|details| {
// Check if all arguments have valid parameter mappings (i.e., are not None).
details
.argument_to_parameter_mapping
.iter()
.all(Option::is_some)
});
if let Some(index) = perfect_match {
return Some(index);
}
// If no perfect match, find the signature with the most valid argument mappings.
let (best_index, _) = signature_details
.iter()
.enumerate()
.max_by_key(|(_, details)| {
details
.argument_to_parameter_mapping
.iter()
.filter(|mapping| mapping.is_some())
.count()
})?;
Some(best_index)
}
#[cfg(test)]
mod tests {
use crate::signature_help::SignatureHelpInfo;
use crate::tests::{CursorTest, cursor_test};
#[test]
fn signature_help_basic_function_call() {
let test = cursor_test(
r#"
def example_function(param1: str, param2: int) -> str:
"""This is a docstring for the example function.
Args:
param1: The first parameter as a string
param2: The second parameter as an integer
Returns:
A formatted string combining both parameters
"""
return f"{param1}: {param2}"
result = example_function(<CURSOR>
"#,
);
// Test that signature help is provided
let result = test.signature_help().expect("Should have signature help");
assert_eq!(result.signatures.len(), 1);
let signature = &result.signatures[0];
assert!(signature.label.contains("param1") && signature.label.contains("param2"));
// Verify that the docstring is extracted and included in the documentation
let expected_docstring = concat!(
"This is a docstring for the example function.\n",
" \n",
" Args:\n",
" param1: The first parameter as a string\n",
" param2: The second parameter as an integer\n",
" \n",
" Returns:\n",
" A formatted string combining both parameters\n",
" "
);
assert_eq!(
signature.documentation,
Some(expected_docstring.to_string())
);
assert_eq!(result.active_signature, Some(0));
assert_eq!(signature.active_parameter, Some(0));
}
#[test]
fn signature_help_method_call() {
let test = cursor_test(
r#"
class MyClass:
def my_method(self, arg1: str, arg2: bool) -> None:
pass
obj = MyClass()
obj.my_method(arg2=True, arg1=<CURSOR>
"#,
);
// Test that signature help is provided for method calls
let result = test.signature_help().expect("Should have signature help");
assert_eq!(result.signatures.len(), 1);
let signature = &result.signatures[0];
assert!(signature.label.contains("arg1") && signature.label.contains("arg2"));
assert_eq!(result.active_signature, Some(0));
// Check the active parameter from the active signature
if let Some(active_sig_index) = result.active_signature {
let active_signature = &result.signatures[active_sig_index];
assert_eq!(active_signature.active_parameter, Some(0));
}
}
#[test]
fn signature_help_nested_function_calls() {
let test = cursor_test(
r#"
def outer(a: int) -> int:
return a * 2
def inner(b: str) -> str:
return b.upper()
result = outer(inner(<CURSOR>
"#,
);
// Test that signature help focuses on the innermost function call
let result = test.signature_help().expect("Should have signature help");
assert_eq!(result.signatures.len(), 1);
let signature = &result.signatures[0];
assert!(signature.label.contains("str") || signature.label.contains("->"));
assert_eq!(result.active_signature, Some(0));
assert_eq!(signature.active_parameter, Some(0));
}
#[test]
fn signature_help_union_callable() {
let test = cursor_test(
r#"
import random
def func_a(x: int) -> int:
return x
def func_b(y: str) -> str:
return y
if random.random() > 0.5:
f = func_a
else:
f = func_b
f(<CURSOR>
"#,
);
let result = test.signature_help().expect("Should have signature help");
assert_eq!(result.signatures.len(), 2);
let signature = &result.signatures[0];
assert_eq!(signature.label, "(x: int) -> int");
assert_eq!(signature.parameters.len(), 1);
// Check parameter information
let param = &signature.parameters[0];
assert_eq!(param.label, "x: int");
assert_eq!(param.name, "x");
// Validate the second signature (from func_b)
let signature_b = &result.signatures[1];
assert_eq!(signature_b.label, "(y: str) -> str");
assert_eq!(signature_b.parameters.len(), 1);
// Check parameter information for the second signature
let param_b = &signature_b.parameters[0];
assert_eq!(param_b.label, "y: str");
assert_eq!(param_b.name, "y");
assert_eq!(result.active_signature, Some(0));
// Check the active parameter from the active signature
if let Some(active_sig_index) = result.active_signature {
let active_signature = &result.signatures[active_sig_index];
assert_eq!(active_signature.active_parameter, Some(0));
}
}
#[test]
fn signature_help_overloaded_function() {
let test = cursor_test(
r#"
from typing import overload
@overload
def process(value: int) -> str: ...
@overload
def process(value: str) -> int: ...
def process(value):
if isinstance(value, int):
return str(value)
else:
return len(value)
result = process(<CURSOR>
"#,
);
// Test that signature help is provided for overloaded functions
let result = test.signature_help().expect("Should have signature help");
// We should have signatures for the overloads
assert_eq!(result.signatures.len(), 2);
assert_eq!(result.active_signature, Some(0));
// Check the active parameter from the active signature
if let Some(active_sig_index) = result.active_signature {
let active_signature = &result.signatures[active_sig_index];
assert_eq!(active_signature.active_parameter, Some(0));
}
// Validate the first overload: process(value: int) -> str
let signature1 = &result.signatures[0];
assert_eq!(signature1.label, "(value: int) -> str");
assert_eq!(signature1.parameters.len(), 1);
let param1 = &signature1.parameters[0];
assert_eq!(param1.label, "value: int");
assert_eq!(param1.name, "value");
// Validate the second overload: process(value: str) -> int
let signature2 = &result.signatures[1];
assert_eq!(signature2.label, "(value: str) -> int");
assert_eq!(signature2.parameters.len(), 1);
let param2 = &signature2.parameters[0];
assert_eq!(param2.label, "value: str");
assert_eq!(param2.name, "value");
}
#[test]
fn signature_help_class_constructor() {
let test = cursor_test(
r#"
class Point:
"""A simple point class representing a 2D coordinate."""
def __init__(self, x: int, y: int):
"""Initialize a point with x and y coordinates.
Args:
x: The x-coordinate
y: The y-coordinate
"""
self.x = x
self.y = y
point = Point(<CURSOR>
"#,
);
let result = test.signature_help().expect("Should have signature help");
// Should have exactly one signature for the constructor
assert_eq!(result.signatures.len(), 1);
let signature = &result.signatures[0];
// Validate the constructor signature
assert_eq!(signature.label, "(x: int, y: int) -> Point");
assert_eq!(signature.parameters.len(), 2);
// Validate the first parameter (x: int)
let param_x = &signature.parameters[0];
assert_eq!(param_x.label, "x: int");
assert_eq!(param_x.name, "x");
assert_eq!(param_x.documentation, Some("The x-coordinate".to_string()));
// Validate the second parameter (y: int)
let param_y = &signature.parameters[1];
assert_eq!(param_y.label, "y: int");
assert_eq!(param_y.name, "y");
assert_eq!(param_y.documentation, Some("The y-coordinate".to_string()));
// Should have the __init__ method docstring as documentation (not the class docstring)
let expected_docstring = "Initialize a point with x and y coordinates.\n \n Args:\n x: The x-coordinate\n y: The y-coordinate\n ";
assert_eq!(
signature.documentation,
Some(expected_docstring.to_string())
);
}
#[test]
fn signature_help_callable_object() {
let test = cursor_test(
r#"
class Multiplier:
def __call__(self, x: int) -> int:
return x * 2
multiplier = Multiplier()
result = multiplier(<CURSOR>
"#,
);
let result = test.signature_help().expect("Should have signature help");
// Should have a signature for the callable object
assert!(!result.signatures.is_empty());
let signature = &result.signatures[0];
// Should provide signature help for the callable
assert!(signature.label.contains("int") || signature.label.contains("->"));
}
#[test]
fn signature_help_subclass_of_constructor() {
let test = cursor_test(
r#"
from typing import Type
def create_instance(cls: Type[list]) -> list:
return cls(<CURSOR>
"#,
);
let result = test.signature_help().expect("Should have signature help");
// Should have a signature
assert!(!result.signatures.is_empty());
let signature = &result.signatures[0];
// Should have empty documentation for now
assert_eq!(signature.documentation, Some(String::new()));
}
#[test]
fn signature_help_parameter_label_offsets() {
let test = cursor_test(
r#"
def test_function(param1: str, param2: int, param3: bool) -> str:
return f"{param1}: {param2}, {param3}"
result = test_function(<CURSOR>
"#,
);
let result = test.signature_help().expect("Should have signature help");
assert_eq!(result.signatures.len(), 1);
let signature = &result.signatures[0];
assert_eq!(signature.parameters.len(), 3);
// Check that we have parameter labels
for (i, param) in signature.parameters.iter().enumerate() {
let expected_param_spec = match i {
0 => "param1: str",
1 => "param2: int",
2 => "param3: bool",
_ => panic!("Unexpected parameter index"),
};
assert_eq!(param.label, expected_param_spec);
}
}
#[test]
fn signature_help_active_signature_selection() {
// This test verifies that the algorithm correctly selects the first signature
// where all arguments present in the call have valid parameter mappings.
let test = cursor_test(
r#"
from typing import overload
@overload
def process(value: int) -> str: ...
@overload
def process(value: str, flag: bool) -> int: ...
def process(value, flag=None):
if isinstance(value, int):
return str(value)
elif flag is not None:
return len(value) if flag else 0
else:
return len(value)
# Call with two arguments - should select the second overload
result = process("hello", True<CURSOR>)
"#,
);
let result = test.signature_help().expect("Should have signature help");
// Should have signatures for the overloads.
assert!(!result.signatures.is_empty());
// Check that we have an active signature and parameter
if let Some(active_sig_index) = result.active_signature {
let active_signature = &result.signatures[active_sig_index];
assert_eq!(active_signature.active_parameter, Some(1));
}
}
#[test]
fn signature_help_parameter_documentation() {
let test = cursor_test(
r#"
def documented_function(param1: str, param2: int) -> str:
"""This is a function with parameter documentation.
Args:
param1: The first parameter description
param2: The second parameter description
"""
return f"{param1}: {param2}"
result = documented_function(<CURSOR>
"#,
);
let result = test.signature_help().expect("Should have signature help");
assert_eq!(result.signatures.len(), 1);
let signature = &result.signatures[0];
assert_eq!(signature.parameters.len(), 2);
// Check that parameter documentation is extracted
let param1 = &signature.parameters[0];
assert_eq!(
param1.documentation,
Some("The first parameter description".to_string())
);
let param2 = &signature.parameters[1];
assert_eq!(
param2.documentation,
Some("The second parameter description".to_string())
);
}
impl CursorTest {
fn signature_help(&self) -> Option<SignatureHelpInfo> {
crate::signature_help::signature_help(&self.db, self.cursor.file, self.cursor.offset)
}
}
}

View File

@@ -979,39 +979,6 @@ impl GlobFilterContext {
}
}
/// The diagnostic output format.
#[derive(Debug, Default, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub enum OutputFormat {
/// The default full mode will print "pretty" diagnostics.
///
/// That is, color will be used when printing to a `tty`.
/// Moreover, diagnostic messages may include additional
/// context and annotations on the input to help understand
/// the message.
#[default]
Full,
/// Print diagnostics in a concise mode.
///
/// This will guarantee that each diagnostic is printed on
/// a single line. Only the most important or primary aspects
/// of the diagnostic are included. Contextual information is
/// dropped.
///
/// This may use color when printing to a `tty`.
Concise,
}
impl From<OutputFormat> for DiagnosticFormat {
fn from(value: OutputFormat) -> Self {
match value {
OutputFormat::Full => Self::Full,
OutputFormat::Concise => Self::Concise,
}
}
}
#[derive(
Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize, OptionsMetadata,
)]
@@ -1029,7 +996,7 @@ pub struct TerminalOptions {
output-format = "concise"
"#
)]
pub output_format: Option<RangedValue<OutputFormat>>,
pub output_format: Option<RangedValue<DiagnosticFormat>>,
/// Use exit code 1 if there are any warning-level diagnostics.
///
/// Defaults to `false`.
@@ -1328,7 +1295,7 @@ pub(super) struct InnerOverrideOptions {
#[derive(Debug)]
pub struct ToSettingsError {
diagnostic: Box<OptionDiagnostic>,
output_format: OutputFormat,
output_format: DiagnosticFormat,
color: bool,
}
@@ -1342,7 +1309,7 @@ impl ToSettingsError {
impl fmt::Display for DisplayPretty<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let display_config = DisplayDiagnosticConfig::default()
.format(self.error.output_format.into())
.format(self.error.output_format)
.color(self.error.color);
write!(

View File

@@ -1,9 +1,9 @@
use std::sync::Arc;
use ruff_db::files::File;
use ruff_db::{diagnostic::DiagnosticFormat, files::File};
use ty_python_semantic::lint::RuleSelection;
use crate::metadata::options::{InnerOverrideOptions, OutputFormat};
use crate::metadata::options::InnerOverrideOptions;
use crate::{Db, combine::Combine, glob::IncludeExcludeFilter};
/// The resolved [`super::Options`] for the project.
@@ -57,7 +57,7 @@ impl Settings {
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct TerminalSettings {
pub output_format: OutputFormat,
pub output_format: DiagnosticFormat,
pub error_on_warning: bool,
}

View File

@@ -3,13 +3,9 @@ use std::str::FromStr;
use std::sync::Arc;
use ruff_db::files::File;
use ruff_python_ast::name::Name;
use ruff_python_stdlib::identifiers::is_identifier;
use super::path::SearchPath;
use crate::Db;
use crate::module_name::ModuleName;
use crate::module_resolver::path::SystemOrVendoredPathRef;
/// Representation of a Python module.
#[derive(Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
@@ -89,100 +85,6 @@ impl Module {
ModuleInner::NamespacePackage { .. } => ModuleKind::Package,
}
}
/// Return a list of all submodules of this module.
///
/// Returns an empty list if the module is not a package, if it is an empty package,
/// or if it is a namespace package (one without an `__init__.py` or `__init__.pyi` file).
///
/// The names returned correspond to the "base" name of the module.
/// That is, `{self.name}.{basename}` should give the full module name.
pub fn all_submodules(&self, db: &dyn Db) -> Vec<Name> {
self.all_submodules_inner(db).unwrap_or_default()
}
fn all_submodules_inner(&self, db: &dyn Db) -> Option<Vec<Name>> {
fn is_submodule(
is_dir: bool,
is_file: bool,
basename: Option<&str>,
extension: Option<&str>,
) -> bool {
is_dir
|| (is_file
&& matches!(extension, Some("py" | "pyi"))
&& !matches!(basename, Some("__init__.py" | "__init__.pyi")))
}
// It would be complex and expensive to compute all submodules for
// namespace packages, since a namespace package doesn't correspond
// to a single file; it can span multiple directories across multiple
// search paths. For now, we only compute submodules for traditional
// packages that exist in a single directory on a single search path.
let ModuleInner::FileModule {
kind: ModuleKind::Package,
file,
..
} = &*self.inner
else {
return None;
};
let path = SystemOrVendoredPathRef::try_from_file(db, *file)?;
debug_assert!(
matches!(path.file_name(), Some("__init__.py" | "__init__.pyi")),
"expected package file `{:?}` to be `__init__.py` or `__init__.pyi`",
path.file_name(),
);
Some(match path.parent()? {
SystemOrVendoredPathRef::System(parent_directory) => db
.system()
.read_directory(parent_directory)
.inspect_err(|err| {
tracing::debug!(
"Failed to read {parent_directory:?} when looking for \
its possible submodules: {err}"
);
})
.ok()?
.flatten()
.filter(|entry| {
let ty = entry.file_type();
let path = entry.path();
is_submodule(
ty.is_directory(),
ty.is_file(),
path.file_name(),
path.extension(),
)
})
.filter_map(|entry| {
let stem = entry.path().file_stem()?;
is_identifier(stem).then(|| Name::from(stem))
})
.collect(),
SystemOrVendoredPathRef::Vendored(parent_directory) => db
.vendored()
.read_directory(parent_directory)
.into_iter()
.filter(|entry| {
let ty = entry.file_type();
let path = entry.path();
is_submodule(
ty.is_directory(),
ty.is_file(),
path.file_name(),
path.extension(),
)
})
.filter_map(|entry| {
let stem = entry.path().file_stem()?;
is_identifier(stem).then(|| Name::from(stem))
})
.collect(),
})
}
}
impl std::fmt::Debug for Module {

View File

@@ -4,12 +4,11 @@ use std::fmt;
use std::sync::Arc;
use camino::{Utf8Path, Utf8PathBuf};
use ruff_db::files::{File, FileError, FilePath, system_path_to_file, vendored_path_to_file};
use ruff_db::files::{File, FileError, system_path_to_file, vendored_path_to_file};
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_db::vendored::{VendoredPath, VendoredPathBuf};
use super::typeshed::{TypeshedVersionsParseError, TypeshedVersionsQueryResult, typeshed_versions};
use crate::Db;
use crate::module_name::ModuleName;
use crate::module_resolver::resolver::ResolverContext;
use crate::site_packages::SitePackagesDiscoveryError;
@@ -653,48 +652,6 @@ impl fmt::Display for SearchPath {
}
}
#[derive(Debug, Clone)]
pub(super) enum SystemOrVendoredPathRef<'db> {
System(&'db SystemPath),
Vendored(&'db VendoredPath),
}
impl<'db> SystemOrVendoredPathRef<'db> {
pub(super) fn try_from_file(db: &'db dyn Db, file: File) -> Option<Self> {
match file.path(db) {
FilePath::System(system) => Some(Self::System(system)),
FilePath::Vendored(vendored) => Some(Self::Vendored(vendored)),
FilePath::SystemVirtual(_) => None,
}
}
pub(super) fn file_name(&self) -> Option<&str> {
match self {
Self::System(system) => system.file_name(),
Self::Vendored(vendored) => vendored.file_name(),
}
}
pub(super) fn parent<'a>(&'a self) -> Option<SystemOrVendoredPathRef<'a>>
where
'a: 'db,
{
match self {
Self::System(system) => system.parent().map(Self::System),
Self::Vendored(vendored) => vendored.parent().map(Self::Vendored),
}
}
}
impl std::fmt::Display for SystemOrVendoredPathRef<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SystemOrVendoredPathRef::System(system) => system.fmt(f),
SystemOrVendoredPathRef::Vendored(vendored) => vendored.fmt(f),
}
}
}
#[cfg(test)]
mod tests {
use ruff_db::Db;

View File

@@ -8,7 +8,7 @@ use rustc_hash::{FxBuildHasher, FxHashSet};
use ruff_db::files::{File, FilePath, FileRootKind};
use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::vendored::{VendoredFileSystem, VendoredPath};
use ruff_python_ast::PythonVersion;
use crate::db::Db;
@@ -17,7 +17,7 @@ use crate::module_resolver::typeshed::{TypeshedVersions, vendored_typeshed_versi
use crate::{Program, SearchPathSettings};
use super::module::{Module, ModuleKind};
use super::path::{ModulePath, SearchPath, SearchPathValidationError, SystemOrVendoredPathRef};
use super::path::{ModulePath, SearchPath, SearchPathValidationError};
/// Resolves a module name to a module.
pub fn resolve_module(db: &dyn Db, module_name: &ModuleName) -> Option<Module> {
@@ -77,6 +77,21 @@ pub(crate) fn path_to_module(db: &dyn Db, path: &FilePath) -> Option<Module> {
file_to_module(db, file)
}
#[derive(Debug, Clone, Copy)]
enum SystemOrVendoredPathRef<'a> {
System(&'a SystemPath),
Vendored(&'a VendoredPath),
}
impl std::fmt::Display for SystemOrVendoredPathRef<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SystemOrVendoredPathRef::System(system) => system.fmt(f),
SystemOrVendoredPathRef::Vendored(vendored) => vendored.fmt(f),
}
}
}
/// Resolves the module for the file with the given id.
///
/// Returns `None` if the file is not a module locatable via any of the known search paths.
@@ -84,7 +99,11 @@ pub(crate) fn path_to_module(db: &dyn Db, path: &FilePath) -> Option<Module> {
pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
let _span = tracing::trace_span!("file_to_module", ?file).entered();
let path = SystemOrVendoredPathRef::try_from_file(db, file)?;
let path = match file.path(db) {
FilePath::System(system) => SystemOrVendoredPathRef::System(system),
FilePath::Vendored(vendored) => SystemOrVendoredPathRef::Vendored(vendored),
FilePath::SystemVirtual(_) => return None,
};
let module_name = search_paths(db).find_map(|candidate| {
let relative_path = match path {

View File

@@ -23,7 +23,6 @@ use crate::semantic_index::place::{
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, PlaceExpr, PlaceTable, Scope, ScopeId,
ScopeKind, ScopedPlaceId,
};
use crate::semantic_index::reachability_constraints::ScopedReachabilityConstraintId;
use crate::semantic_index::use_def::{EagerSnapshotKey, ScopedEagerSnapshotId, UseDefMap};
use crate::util::get_size::untracked_arc_size;
@@ -212,9 +211,6 @@ pub(crate) struct SemanticIndex<'db> {
/// Map from a standalone expression to its [`Expression`] ingredient.
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
/// Tracks whether or not a given AST node is reachable from the start of the scope.
node_reachability: FxHashMap<NodeKey, ScopedReachabilityConstraintId>,
/// Map from nodes that create a scope to the scope they create.
scopes_by_node: FxHashMap<NodeWithScopeKey, FileScopeId>,
@@ -368,15 +364,8 @@ impl<'db> SemanticIndex<'db> {
scope_id: FileScopeId,
node_key: NodeKey,
) -> bool {
if !self.is_scope_reachable(db, scope_id) {
return false;
}
let constraint = *self.node_reachability.get(&node_key).expect(
"`is_node_reachable` should only be called on AST nodes with recorded reachability",
);
self.use_def_map(scope_id).is_node_reachable(db, constraint)
self.is_scope_reachable(db, scope_id)
&& self.use_def_map(scope_id).is_node_reachable(db, node_key)
}
/// Returns an iterator over the descendent scopes of `scope`.

View File

@@ -20,8 +20,8 @@ use crate::ast_node_ref::AstNodeRef;
use crate::module_name::ModuleName;
use crate::module_resolver::resolve_module;
use crate::node_key::NodeKey;
use crate::semantic_index::ast_ids::AstIdsBuilder;
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
use crate::semantic_index::ast_ids::{AstIdsBuilder, ScopedUseId};
use crate::semantic_index::definition::{
AnnotatedAssignmentDefinitionNodeRef, AssignmentDefinitionNodeRef,
ComprehensionDefinitionNodeRef, Definition, DefinitionCategory, DefinitionNodeKey,
@@ -83,6 +83,8 @@ pub(super) struct SemanticIndexBuilder<'db, 'ast> {
current_match_case: Option<CurrentMatchCase<'ast>>,
/// The name of the first function parameter of the innermost function that we're currently visiting.
current_first_parameter_name: Option<&'ast str>,
/// Functions defined in the current scope. We walk their bodies at the end of the scope.
deferred_function_bodies: Vec<&'ast ast::StmtFunctionDef>,
/// Per-scope contexts regarding nested `try`/`except` statements
try_node_context_stack_manager: TryNodeContextStackManager,
@@ -105,8 +107,6 @@ pub(super) struct SemanticIndexBuilder<'db, 'ast> {
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
definitions_by_node: FxHashMap<DefinitionNodeKey, Definitions<'db>>,
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
/// Tracks whether or not a given AST node is reachable from the start of the scope.
node_reachability: FxHashMap<NodeKey, ScopedReachabilityConstraintId>,
imported_modules: FxHashSet<ModuleName>,
/// Hashset of all [`FileScopeId`]s that correspond to [generator functions].
///
@@ -128,6 +128,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
current_assignments: vec![],
current_match_case: None,
current_first_parameter_name: None,
deferred_function_bodies: Vec::new(),
try_node_context_stack_manager: TryNodeContextStackManager::default(),
has_future_annotations: false,
@@ -142,7 +143,6 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
scopes_by_node: FxHashMap::default(),
definitions_by_node: FxHashMap::default(),
expressions_by_node: FxHashMap::default(),
node_reachability: FxHashMap::default(),
imported_modules: FxHashSet::default(),
generator_functions: FxHashSet::default(),
@@ -667,19 +667,6 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
.record_reachability_constraint(negated_constraint);
}
fn record_node_reachability(&mut self, node: NodeKey) {
self.node_reachability
.insert(node, self.current_use_def_map().reachability);
}
fn record_use(&mut self, place: ScopedPlaceId, use_id: ScopedUseId, node_key: NodeKey) {
self.current_use_def_map_mut().record_use(place, use_id);
// Track reachability of all uses of places to silence `unresolved-reference`
// diagnostics in unreachable code.
self.record_node_reachability(node_key);
}
fn push_assignment(&mut self, assignment: CurrentAssignment<'ast, 'db>) {
self.current_assignments.push(assignment);
}
@@ -1023,8 +1010,83 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
}
}
fn visit_function_body(&mut self, function_def: &'ast ast::StmtFunctionDef) {
let ast::StmtFunctionDef {
parameters,
type_params,
returns,
body,
..
} = function_def;
self.with_type_params(
NodeWithScopeRef::FunctionTypeParameters(function_def),
type_params.as_deref(),
|builder| {
builder.visit_parameters(parameters);
if let Some(returns) = returns {
builder.visit_annotation(returns);
}
builder.push_scope(NodeWithScopeRef::Function(function_def));
builder.declare_parameters(parameters);
let mut first_parameter_name = parameters
.iter_non_variadic_params()
.next()
.map(|first_param| first_param.parameter.name.id().as_str());
std::mem::swap(
&mut builder.current_first_parameter_name,
&mut first_parameter_name,
);
builder.visit_scoped_body(body);
builder.current_first_parameter_name = first_parameter_name;
builder.pop_scope()
},
);
}
/// Walk the body of a scope, either the global scope or a function scope.
///
/// When we encounter a (top-level or nested) function definition, we add the function's name
/// to the current scope, but we defer walking its body until the end. (See the `FunctionDef`
/// branch of `visit_stmt`.) This deferred approach is necessary to be able to check `nonlocal`
/// statements as we encounter them, for example:
///
/// ```py
/// def f():
/// def g():
/// nonlocal x # allowed
/// nonlocal y # SyntaxError: no binding for nonlocal 'y' found
/// x = 1
/// ```
///
/// See the comments in the `Nonlocal` branch of `visit_stmt`, which relies on this binding
/// information being present.
fn visit_scoped_body(&mut self, body: &'ast [ast::Stmt]) {
debug_assert!(
self.deferred_function_bodies.is_empty(),
"every function starts with a clean scope",
);
// If this scope contains function definitions, they'll be added to
// `self.deferred_function_bodies` as we walk each statement.
self.visit_body(body);
// Now that we've walked all the statements in this scope, walk any deferred function
// bodies. This is recursive, so we need to clear out the contents of
// `self.deferred_function_bodies` and give each function a fresh list (or else we'll fail
// the `debug_assert!` above).
let taken_deferred_function_bodies = std::mem::take(&mut self.deferred_function_bodies);
for function_def in taken_deferred_function_bodies {
self.visit_function_body(function_def);
}
}
pub(super) fn build(mut self) -> SemanticIndex<'db> {
self.visit_body(self.module.suite());
self.visit_scoped_body(self.module.suite());
// Pop the root scope
self.pop_scope();
@@ -1056,7 +1118,6 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
ast_ids.shrink_to_fit();
self.scopes_by_expression.shrink_to_fit();
self.definitions_by_node.shrink_to_fit();
self.node_reachability.shrink_to_fit();
self.scope_ids_by_scope.shrink_to_fit();
self.scopes_by_node.shrink_to_fit();
@@ -1072,7 +1133,6 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
ast_ids,
scopes_by_expression: self.scopes_by_expression,
scopes_by_node: self.scopes_by_node,
node_reachability: self.node_reachability,
use_def_maps,
imported_modules: Arc::new(self.imported_modules),
has_future_annotations: self.has_future_annotations,
@@ -1103,46 +1163,19 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
let ast::StmtFunctionDef {
decorator_list,
parameters,
type_params,
name,
returns,
body,
is_async: _,
range: _,
node_index: _,
..
} = function_def;
// Like Ruff, we don't walk the body of the function here. Instead, we defer it to
// the end of the current scope. See `visit_scoped_body`. See also the comments in
// the `Nonlocal` branch below about why this deferred visit order is necessary.
self.deferred_function_bodies.push(function_def);
for decorator in decorator_list {
self.visit_decorator(decorator);
}
self.with_type_params(
NodeWithScopeRef::FunctionTypeParameters(function_def),
type_params.as_deref(),
|builder| {
builder.visit_parameters(parameters);
if let Some(returns) = returns {
builder.visit_annotation(returns);
}
builder.push_scope(NodeWithScopeRef::Function(function_def));
builder.declare_parameters(parameters);
let mut first_parameter_name = parameters
.iter_non_variadic_params()
.next()
.map(|first_param| first_param.parameter.name.id().as_str());
std::mem::swap(
&mut builder.current_first_parameter_name,
&mut first_parameter_name,
);
builder.visit_body(body);
builder.current_first_parameter_name = first_parameter_name;
builder.pop_scope()
},
);
// The default value of the parameters needs to be evaluated in the
// enclosing scope.
for default in parameters
@@ -1163,7 +1196,8 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
// AST uses.
self.mark_place_used(symbol);
let use_id = self.current_ast_ids().record_use(name);
self.record_use(symbol, use_id, NodeKey::from_node(name));
self.current_use_def_map_mut()
.record_use(symbol, use_id, NodeKey::from_node(name));
self.add_definition(symbol, function_def);
}
@@ -1213,7 +1247,8 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
);
}
ast::Stmt::Import(node) => {
self.record_node_reachability(NodeKey::from_node(node));
self.current_use_def_map_mut()
.record_node_reachability(NodeKey::from_node(node));
for (alias_index, alias) in node.names.iter().enumerate() {
// Mark the imported module, and all of its parents, as being imported in this
@@ -1240,7 +1275,8 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
}
}
ast::Stmt::ImportFrom(node) => {
self.record_node_reachability(NodeKey::from_node(node));
self.current_use_def_map_mut()
.record_node_reachability(NodeKey::from_node(node));
let mut found_star = false;
for (alias_index, alias) in node.names.iter().enumerate() {
@@ -1927,10 +1963,11 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
names,
}) => {
for name in names {
let symbol_id = self.add_symbol(name.id.clone());
let symbol = self.current_place_table().place_expr(symbol_id);
let local_scoped_place_id = self.add_symbol(name.id.clone());
let local_place = self.current_place_table().place_expr(local_scoped_place_id);
// Check whether the variable has already been accessed in this scope.
if symbol.is_bound() || symbol.is_declared() || symbol.is_used() {
if local_place.is_bound() || local_place.is_declared() || local_place.is_used()
{
self.report_semantic_error(SemanticSyntaxError {
kind: SemanticSyntaxErrorKind::LoadBeforeNonlocalDeclaration {
name: name.to_string(),
@@ -1941,24 +1978,79 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
});
}
// Check whether the variable has also been declared global.
if symbol.is_marked_global() {
if local_place.is_marked_global() {
self.report_semantic_error(SemanticSyntaxError {
kind: SemanticSyntaxErrorKind::NonlocalAndGlobal(name.to_string()),
range: name.range,
python_version: self.python_version,
});
}
// The variable is required to exist in an enclosing scope, but that definition
// might come later. For example, this is example legal, but we can't check
// that here, because we haven't gotten to `x = 1`:
// The name is required to exist in an enclosing scope, but that definition
// might come later. For example, this is example legal:
//
// ```py
// def f():
// def g():
// nonlocal x
// x = 1
// ```
self.current_place_table_mut()
.mark_place_nonlocal(symbol_id);
//
// To handle cases like this, we have to walk `x = 1` before we walk `nonlocal
// x`. In other words, walking function bodies must be "deferred" to the end of
// the scope where they're defined. See the `FunctionDef` branch above.
let name_expr = PlaceExpr::name(name.id.clone());
let mut found_matching_definition = false;
for enclosing_scope_info in self.scope_stack.iter().rev().skip(1) {
let enclosing_scope = &self.scopes[enclosing_scope_info.file_scope_id];
if !enclosing_scope.kind().is_function_like() {
// Skip over class scopes and the global scope.
continue;
}
let enclosing_place_table =
&self.place_tables[enclosing_scope_info.file_scope_id];
let Some(enclosing_scoped_place_id) =
enclosing_place_table.place_id_by_expr(&name_expr)
else {
// This name isn't defined in this scope. Keep going.
continue;
};
let enclosing_place =
enclosing_place_table.place_expr(enclosing_scoped_place_id);
// We've found a definition for this name in an enclosing function-like
// scope. Either this definition is the valid place this name refers to, or
// else we'll emit a syntax error. Either way, we won't walk any more
// enclosing scopes. Note that there are differences here compared to
// `infer_place_load`: A regular load (e.g. `print(x)`) is allowed to refer
// to a global variable (e.g. `x = 1` in the global scope), and similarly
// it's allowed to refer to a variable in an enclosing function that's
// declared `global` (e.g. `global x`). However, the `nonlocal` keyword
// can't refer to global variables (that's a `SyntaxError`), and it also
// can't refer to variables in enclosing functions that are declared
// `global` (also a `SyntaxError`).
if enclosing_place.is_marked_global() {
// A "chain" of `nonlocal` statements is "broken" by a `global`
// statement. Stop looping and report that this `nonlocal` statement is
// invalid.
break;
}
// We found a definition, and we've checked that that place isn't declared
// `global` in its scope, but it's ok if it's `nonlocal`. If a chain of
// `nonlocal` statements fails to lead to a valid binding, the outermost
// one will be an error; we don't need to report an error for each one.
found_matching_definition = true;
self.current_place_table_mut()
.mark_place_nonlocal(local_scoped_place_id);
break;
}
if !found_matching_definition {
// There's no matching definition in an enclosing scope. This `nonlocal`
// statement is invalid.
self.report_semantic_error(SemanticSyntaxError {
kind: SemanticSyntaxErrorKind::InvalidNonlocal(name.to_string()),
range: name.range,
python_version: self.python_version,
});
}
}
walk_stmt(self, stmt);
}
@@ -2070,7 +2162,8 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
if is_use {
self.mark_place_used(place_id);
let use_id = self.current_ast_ids().record_use(expr);
self.record_use(place_id, use_id, node_key);
self.current_use_def_map_mut()
.record_use(place_id, use_id, node_key);
}
if is_definition {
@@ -2163,7 +2256,8 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
// Track reachability of attribute expressions to silence `unresolved-attribute`
// diagnostics in unreachable code.
if expr.is_attribute_expr() {
self.record_node_reachability(node_key);
self.current_use_def_map_mut()
.record_node_reachability(node_key);
}
walk_expr(self, expr);
@@ -2324,7 +2418,8 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
ast::Expr::StringLiteral(_) => {
// Track reachability of string literals, as they could be a stringified annotation
// with child expressions whose reachability we are interested in.
self.record_node_reachability(node_key);
self.current_use_def_map_mut()
.record_node_reachability(node_key);
walk_expr(self, expr);
}

View File

@@ -1,7 +1,7 @@
use std::ops::Deref;
use ruff_db::files::{File, FileRange};
use ruff_db::parsed::{ParsedModuleRef, parsed_module};
use ruff_db::parsed::ParsedModuleRef;
use ruff_python_ast as ast;
use ruff_text_size::{Ranged, TextRange};
@@ -57,45 +57,6 @@ impl<'db> Definition<'db> {
pub fn focus_range(self, db: &'db dyn Db, module: &ParsedModuleRef) -> FileRange {
FileRange::new(self.file(db), self.kind(db).target_range(module))
}
/// Extract a docstring from this definition, if applicable.
/// This method returns a docstring for function and class definitions.
/// The docstring is extracted from the first statement in the body if it's a string literal.
pub fn docstring(self, db: &'db dyn Db) -> Option<String> {
let file = self.file(db);
let module = parsed_module(db, file).load(db);
let kind = self.kind(db);
match kind {
DefinitionKind::Function(function_def) => {
let function_node = function_def.node(&module);
docstring_from_body(&function_node.body)
.map(|docstring_expr| docstring_expr.value.to_str().to_owned())
}
DefinitionKind::Class(class_def) => {
let class_node = class_def.node(&module);
docstring_from_body(&class_node.body)
.map(|docstring_expr| docstring_expr.value.to_str().to_owned())
}
_ => None,
}
}
}
/// Extract a docstring from a function or class body.
fn docstring_from_body(body: &[ast::Stmt]) -> Option<&ast::ExprStringLiteral> {
let stmt = body.first()?;
// Require the docstring to be a standalone expression.
let ast::Stmt::Expr(ast::StmtExpr {
value,
range: _,
node_index: _,
}) = stmt
else {
return None;
};
// Only match string literals.
value.as_string_literal_expr()
}
/// One or more [`Definition`]s.

View File

@@ -247,6 +247,7 @@ use self::place_state::{
Bindings, Declarations, EagerSnapshot, LiveBindingsIterator, LiveDeclaration,
LiveDeclarationsIterator, PlaceState, ScopedDefinitionId,
};
use crate::node_key::NodeKey;
use crate::place::BoundnessAnalysis;
use crate::semantic_index::ast_ids::ScopedUseId;
use crate::semantic_index::definition::{Definition, DefinitionState};
@@ -287,6 +288,9 @@ pub(crate) struct UseDefMap<'db> {
/// [`Bindings`] reaching a [`ScopedUseId`].
bindings_by_use: IndexVec<ScopedUseId, Bindings>,
/// Tracks whether or not a given AST node is reachable from the start of the scope.
node_reachability: FxHashMap<NodeKey, ScopedReachabilityConstraintId>,
/// If the definition is a binding (only) -- `x = 1` for example -- then we need
/// [`Declarations`] to know whether this binding is permitted by the live declarations.
///
@@ -398,13 +402,17 @@ impl<'db> UseDefMap<'db> {
/// be unreachable. Use [`super::SemanticIndex::is_node_reachable`] for the global
/// analysis.
#[track_caller]
pub(super) fn is_node_reachable(
&self,
db: &dyn crate::Db,
constraint: ScopedReachabilityConstraintId,
) -> bool {
self.reachability_constraints
.evaluate(db, &self.predicates, constraint)
pub(super) fn is_node_reachable(&self, db: &dyn crate::Db, node_key: NodeKey) -> bool {
self
.reachability_constraints
.evaluate(
db,
&self.predicates,
*self
.node_reachability
.get(&node_key)
.expect("`is_node_reachable` should only be called on AST nodes with recorded reachability"),
)
.may_be_true()
}
@@ -733,6 +741,9 @@ pub(super) struct UseDefMapBuilder<'db> {
/// start of the scope.
pub(super) reachability: ScopedReachabilityConstraintId,
/// Tracks whether or not a given AST node is reachable from the start of the scope.
node_reachability: FxHashMap<NodeKey, ScopedReachabilityConstraintId>,
/// Live declarations for each so-far-recorded binding.
declarations_by_binding: FxHashMap<Definition<'db>, Declarations>,
@@ -762,6 +773,7 @@ impl<'db> UseDefMapBuilder<'db> {
reachability_constraints: ReachabilityConstraintsBuilder::default(),
bindings_by_use: IndexVec::new(),
reachability: ScopedReachabilityConstraintId::ALWAYS_TRUE,
node_reachability: FxHashMap::default(),
declarations_by_binding: FxHashMap::default(),
bindings_by_definition: FxHashMap::default(),
place_states: IndexVec::new(),
@@ -988,13 +1000,26 @@ impl<'db> UseDefMapBuilder<'db> {
);
}
pub(super) fn record_use(&mut self, place: ScopedPlaceId, use_id: ScopedUseId) {
pub(super) fn record_use(
&mut self,
place: ScopedPlaceId,
use_id: ScopedUseId,
node_key: NodeKey,
) {
// We have a use of a place; clone the current bindings for that place, and record them
// as the live bindings for this use.
let new_use = self
.bindings_by_use
.push(self.place_states[place].bindings().clone());
debug_assert_eq!(use_id, new_use);
// Track reachability of all uses of places to silence `unresolved-reference`
// diagnostics in unreachable code.
self.record_node_reachability(node_key);
}
pub(super) fn record_node_reachability(&mut self, node_key: NodeKey) {
self.node_reachability.insert(node_key, self.reachability);
}
pub(super) fn snapshot_eager_state(
@@ -1098,6 +1123,7 @@ impl<'db> UseDefMapBuilder<'db> {
self.place_states.shrink_to_fit();
self.reachable_definitions.shrink_to_fit();
self.bindings_by_use.shrink_to_fit();
self.node_reachability.shrink_to_fit();
self.declarations_by_binding.shrink_to_fit();
self.bindings_by_definition.shrink_to_fit();
self.eager_snapshots.shrink_to_fit();
@@ -1108,6 +1134,7 @@ impl<'db> UseDefMapBuilder<'db> {
narrowing_constraints: self.narrowing_constraints.build(),
reachability_constraints: self.reachability_constraints.build(),
bindings_by_use: self.bindings_by_use,
node_reachability: self.node_reachability,
end_of_scope_places: self.place_states,
reachable_definitions: self.reachable_definitions,
declarations_by_binding: self.declarations_by_binding,

View File

@@ -69,29 +69,14 @@ impl<'db> SemanticModel<'db> {
};
let ty = Type::module_literal(self.db, self.file, &module);
let builtin = module.is_known(KnownModule::Builtins);
let mut completions = vec![];
for crate::types::Member { name, ty } in crate::types::all_members(self.db, ty) {
completions.push(Completion { name, ty, builtin });
}
for submodule_basename in module.all_submodules(self.db) {
let Some(basename) = ModuleName::new(submodule_basename.as_str()) else {
continue;
};
let mut submodule_name = module_name.clone();
submodule_name.extend(&basename);
let Some(submodule) = resolve_module(self.db, &submodule_name) else {
continue;
};
let ty = Type::module_literal(self.db, self.file, &submodule);
completions.push(Completion {
name: submodule_basename,
ty,
crate::types::all_members(self.db, ty)
.into_iter()
.map(|member| Completion {
name: member.name,
ty: member.ty,
builtin,
});
}
completions
})
.collect()
}
/// Returns completions for symbols available in a `object.<CURSOR>` context.

View File

@@ -46,9 +46,7 @@ use crate::types::generics::{
GenericContext, PartialSpecialization, Specialization, walk_generic_context,
walk_partial_specialization, walk_specialization,
};
pub use crate::types::ide_support::{
CallSignatureDetails, Member, all_members, call_signature_details, definition_kind_for_name,
};
pub use crate::types::ide_support::{all_members, definition_kind_for_name};
use crate::types::infer::infer_unpack_types;
use crate::types::mro::{Mro, MroError, MroIterator};
pub(crate) use crate::types::narrow::infer_narrowing_constraint;
@@ -4995,7 +4993,7 @@ impl<'db> Type<'db> {
TypeVarKind::Legacy,
)))
}
SpecialFormType::TypeAlias => Ok(Type::Dynamic(DynamicType::TodoTypeAlias)),
SpecialFormType::TypeAlias => Ok(todo_type!("Support for `typing.TypeAlias`")),
SpecialFormType::TypedDict => Ok(todo_type!("Support for `typing.TypedDict`")),
SpecialFormType::Literal
@@ -5880,9 +5878,6 @@ pub enum DynamicType {
/// A special Todo-variant for PEP-695 `ParamSpec` types. A temporary variant to detect and special-
/// case the handling of these types in `Callable` annotations.
TodoPEP695ParamSpec,
/// A special Todo-variant for type aliases declared using `typing.TypeAlias`.
/// A temporary variant to detect and special-case the handling of these aliases in autocomplete suggestions.
TodoTypeAlias,
}
impl DynamicType {
@@ -5907,13 +5902,6 @@ impl std::fmt::Display for DynamicType {
f.write_str("@Todo")
}
}
DynamicType::TodoTypeAlias => {
if cfg!(debug_assertions) {
f.write_str("@Todo(Support for `typing.TypeAlias`)")
} else {
f.write_str("@Todo")
}
}
}
}
}

View File

@@ -3,7 +3,7 @@ use super::{Signature, Type};
use crate::Db;
mod arguments;
pub(crate) mod bind;
mod bind;
pub(super) use arguments::{Argument, CallArgumentTypes, CallArguments};
pub(super) use bind::{Binding, Bindings, CallableBinding};

View File

@@ -2,7 +2,6 @@ use std::borrow::Cow;
use std::ops::{Deref, DerefMut};
use itertools::{Either, Itertools};
use ruff_python_ast as ast;
use crate::Db;
use crate::types::KnownClass;
@@ -15,26 +14,6 @@ use super::Type;
pub(crate) struct CallArguments<'a>(Vec<Argument<'a>>);
impl<'a> CallArguments<'a> {
/// Create `CallArguments` from AST arguments
pub(crate) fn from_arguments(arguments: &'a ast::Arguments) -> Self {
arguments
.arguments_source_order()
.map(|arg_or_keyword| match arg_or_keyword {
ast::ArgOrKeyword::Arg(arg) => match arg {
ast::Expr::Starred(ast::ExprStarred { .. }) => Argument::Variadic,
_ => Argument::Positional,
},
ast::ArgOrKeyword::Keyword(ast::Keyword { arg, .. }) => {
if let Some(arg) = arg {
Argument::Keyword(&arg.id)
} else {
Argument::Keywords
}
}
})
.collect()
}
/// Prepend an optional extra synthetic argument (for a `self` or `cls` parameter) to the front
/// of this argument list. (If `bound_self` is none, we return the argument list
/// unmodified.)

View File

@@ -2109,7 +2109,7 @@ impl<'db> Binding<'db> {
}
}
pub(crate) fn match_parameters(
fn match_parameters(
&mut self,
arguments: &CallArguments<'_>,
argument_forms: &mut [Option<ParameterForm>],
@@ -2267,12 +2267,6 @@ impl<'db> Binding<'db> {
self.parameter_tys = parameter_tys;
self.errors = errors;
}
/// Returns a vector where each index corresponds to an argument position,
/// and the value is the parameter index that argument maps to (if any).
pub(crate) fn argument_to_parameter_mapping(&self) -> &[Option<usize>] {
&self.argument_parameters
}
}
#[derive(Clone, Debug)]

View File

@@ -678,7 +678,6 @@ impl<'db> ClassType<'db> {
if let Some(signature) = signature {
let synthesized_signature = |signature: &Signature<'db>| {
Signature::new(signature.parameters().clone(), Some(correct_return_type))
.with_definition(signature.definition())
.bind_self()
};

View File

@@ -48,11 +48,7 @@ impl<'db> ClassBase<'db> {
ClassBase::Class(class) => class.name(db),
ClassBase::Dynamic(DynamicType::Any) => "Any",
ClassBase::Dynamic(DynamicType::Unknown) => "Unknown",
ClassBase::Dynamic(
DynamicType::Todo(_)
| DynamicType::TodoPEP695ParamSpec
| DynamicType::TodoTypeAlias,
) => "@Todo",
ClassBase::Dynamic(DynamicType::Todo(_) | DynamicType::TodoPEP695ParamSpec) => "@Todo",
ClassBase::Protocol => "Protocol",
ClassBase::Generic => "Generic",
}

View File

@@ -5,7 +5,6 @@ use std::fmt::{self, Display, Formatter, Write};
use ruff_db::display::FormatterJoinExtension;
use ruff_python_ast::str::{Quote, TripleQuotes};
use ruff_python_literal::escape::AsciiEscape;
use ruff_text_size::{TextRange, TextSize};
use crate::types::class::{ClassLiteral, ClassType, GenericAlias};
use crate::types::function::{FunctionType, OverloadLiteral};
@@ -558,193 +557,46 @@ pub(crate) struct DisplaySignature<'db> {
db: &'db dyn Db,
}
impl DisplaySignature<'_> {
/// Get detailed display information including component ranges
pub(crate) fn to_string_parts(&self) -> SignatureDisplayDetails {
let mut writer = SignatureWriter::Details(SignatureDetailsWriter::new());
self.write_signature(&mut writer).unwrap();
match writer {
SignatureWriter::Details(details) => details.finish(),
SignatureWriter::Formatter(_) => unreachable!("Expected Details variant"),
}
}
/// Internal method to write signature with the signature writer
fn write_signature(&self, writer: &mut SignatureWriter) -> fmt::Result {
// Opening parenthesis
writer.write_char('(')?;
impl Display for DisplaySignature<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_char('(')?;
if self.parameters.is_gradual() {
// We represent gradual form as `...` in the signature, internally the parameters still
// contain `(*args, **kwargs)` parameters.
writer.write_str("...")?;
f.write_str("...")?;
} else {
let mut star_added = false;
let mut needs_slash = false;
let mut first = true;
let mut join = f.join(", ");
for parameter in self.parameters.as_slice() {
// Handle special separators
if !star_added && parameter.is_keyword_only() {
if !first {
writer.write_str(", ")?;
}
writer.write_char('*')?;
join.entry(&'*');
star_added = true;
first = false;
}
if parameter.is_positional_only() {
needs_slash = true;
} else if needs_slash {
if !first {
writer.write_str(", ")?;
}
writer.write_char('/')?;
join.entry(&'/');
needs_slash = false;
first = false;
}
// Add comma before parameter if not first
if !first {
writer.write_str(", ")?;
}
// Write parameter with range tracking
let param_name = parameter.display_name();
writer.write_parameter(&parameter.display(self.db), param_name.as_deref())?;
first = false;
join.entry(&parameter.display(self.db));
}
if needs_slash {
if !first {
writer.write_str(", ")?;
}
writer.write_char('/')?;
join.entry(&'/');
}
join.finish()?;
}
// Closing parenthesis
writer.write_char(')')?;
// Return type
let return_ty = self.return_ty.unwrap_or_else(Type::unknown);
writer.write_return_type(&return_ty.display(self.db))?;
Ok(())
write!(
f,
") -> {}",
self.return_ty.unwrap_or(Type::unknown()).display(self.db)
)
}
}
impl Display for DisplaySignature<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let mut writer = SignatureWriter::Formatter(f);
self.write_signature(&mut writer)
}
}
/// Writer for building signature strings with different output targets
enum SignatureWriter<'a, 'b> {
/// Write directly to a formatter (for Display trait)
Formatter(&'a mut Formatter<'b>),
/// Build a string with range tracking (for `to_string_parts`)
Details(SignatureDetailsWriter),
}
/// Writer that builds a string with range tracking
struct SignatureDetailsWriter {
label: String,
parameter_ranges: Vec<TextRange>,
parameter_names: Vec<String>,
}
impl SignatureDetailsWriter {
fn new() -> Self {
Self {
label: String::new(),
parameter_ranges: Vec::new(),
parameter_names: Vec::new(),
}
}
fn finish(self) -> SignatureDisplayDetails {
SignatureDisplayDetails {
label: self.label,
parameter_ranges: self.parameter_ranges,
parameter_names: self.parameter_names,
}
}
}
impl SignatureWriter<'_, '_> {
fn write_char(&mut self, c: char) -> fmt::Result {
match self {
SignatureWriter::Formatter(f) => f.write_char(c),
SignatureWriter::Details(details) => {
details.label.push(c);
Ok(())
}
}
}
fn write_str(&mut self, s: &str) -> fmt::Result {
match self {
SignatureWriter::Formatter(f) => f.write_str(s),
SignatureWriter::Details(details) => {
details.label.push_str(s);
Ok(())
}
}
}
fn write_parameter<T: Display>(&mut self, param: &T, param_name: Option<&str>) -> fmt::Result {
match self {
SignatureWriter::Formatter(f) => param.fmt(f),
SignatureWriter::Details(details) => {
let param_start = details.label.len();
let param_display = param.to_string();
details.label.push_str(&param_display);
// Use TextSize::try_from for safe conversion, falling back to empty range on overflow
let start = TextSize::try_from(param_start).unwrap_or_default();
let length = TextSize::try_from(param_display.len()).unwrap_or_default();
details.parameter_ranges.push(TextRange::at(start, length));
// Store the parameter name if available
if let Some(name) = param_name {
details.parameter_names.push(name.to_string());
} else {
details.parameter_names.push(String::new());
}
Ok(())
}
}
}
fn write_return_type<T: Display>(&mut self, return_ty: &T) -> fmt::Result {
match self {
SignatureWriter::Formatter(f) => write!(f, " -> {return_ty}"),
SignatureWriter::Details(details) => {
let return_display = format!(" -> {return_ty}");
details.label.push_str(&return_display);
Ok(())
}
}
}
}
/// Details about signature display components, including ranges for parameters and return type
#[derive(Debug, Clone)]
pub(crate) struct SignatureDisplayDetails {
/// The full signature string
pub label: String,
/// Ranges for each parameter within the label
pub parameter_ranges: Vec<TextRange>,
/// Names of the parameters in order
pub parameter_names: Vec<String>,
}
impl<'db> Parameter<'db> {
fn display(&'db self, db: &'db dyn Db) -> DisplayParameter<'db> {
DisplayParameter { param: self, db }

View File

@@ -1,20 +1,16 @@
use std::cmp::Ordering;
use crate::place::{Place, imported_symbol, place_from_bindings, place_from_declarations};
use crate::semantic_index::definition::Definition;
use crate::semantic_index::definition::DefinitionKind;
use crate::semantic_index::place::ScopeId;
use crate::semantic_index::{
attribute_scopes, global_scope, place_table, semantic_index, use_def_map,
};
use crate::types::call::CallArguments;
use crate::types::signatures::Signature;
use crate::types::{ClassBase, ClassLiteral, DynamicType, KnownClass, KnownInstanceType, Type};
use crate::{Db, HasType, NameKind, SemanticModel};
use crate::types::{ClassBase, ClassLiteral, KnownClass, KnownInstanceType, Type};
use crate::{Db, NameKind};
use ruff_db::files::File;
use ruff_python_ast as ast;
use ruff_python_ast::name::Name;
use ruff_text_size::TextRange;
use rustc_hash::FxHashSet;
pub(crate) fn all_declarations_and_bindings<'db>(
@@ -181,7 +177,6 @@ impl<'db> AllMembers<'db> {
KnownClass::TypeVar
| KnownClass::TypeVarTuple
| KnownClass::ParamSpec
| KnownClass::UnionType
)
) =>
{
@@ -191,7 +186,6 @@ impl<'db> AllMembers<'db> {
Type::KnownInstance(
KnownInstanceType::TypeVar(_) | KnownInstanceType::TypeAliasType(_),
) => continue,
Type::Dynamic(DynamicType::TodoTypeAlias) => continue,
_ => {}
}
}
@@ -359,73 +353,3 @@ pub fn definition_kind_for_name<'db>(
None
}
/// Details about a callable signature for IDE support.
#[derive(Debug, Clone)]
pub struct CallSignatureDetails<'db> {
/// The signature itself
pub signature: Signature<'db>,
/// The display label for this signature (e.g., "(param1: str, param2: int) -> str")
pub label: String,
/// Label offsets for each parameter in the signature string.
/// Each range specifies the start position and length of a parameter label
/// within the full signature string.
pub parameter_label_offsets: Vec<TextRange>,
/// The names of the parameters in the signature, in order.
/// This provides easy access to parameter names for documentation lookup.
pub parameter_names: Vec<String>,
/// The definition where this callable was originally defined (useful for
/// extracting docstrings).
pub definition: Option<Definition<'db>>,
/// Mapping from argument indices to parameter indices. This helps
/// determine which parameter corresponds to which argument position.
pub argument_to_parameter_mapping: Vec<Option<usize>>,
}
/// Extract signature details from a function call expression.
/// This function analyzes the callable being invoked and returns zero or more
/// `CallSignatureDetails` objects, each representing one possible signature
/// (in case of overloads or union types).
pub fn call_signature_details<'db>(
db: &'db dyn Db,
file: File,
call_expr: &ast::ExprCall,
) -> Vec<CallSignatureDetails<'db>> {
let model = SemanticModel::new(db, file);
let func_type = call_expr.func.inferred_type(&model);
// Use into_callable to handle all the complex type conversions
if let Some(callable_type) = func_type.into_callable(db) {
let call_arguments = CallArguments::from_arguments(&call_expr.arguments);
let bindings = callable_type.bindings(db).match_parameters(&call_arguments);
// Extract signature details from all callable bindings
bindings
.into_iter()
.flat_map(std::iter::IntoIterator::into_iter)
.map(|binding| {
let signature = &binding.signature;
let display_details = signature.display(db).to_string_parts();
let parameter_label_offsets = display_details.parameter_ranges.clone();
let parameter_names = display_details.parameter_names.clone();
CallSignatureDetails {
signature: signature.clone(),
label: display_details.label,
parameter_label_offsets,
parameter_names,
definition: signature.definition(),
argument_to_parameter_mapping: binding.argument_to_parameter_mapping().to_vec(),
}
})
.collect()
} else {
// Type is not callable, return empty signatures
vec![]
}
}

View File

@@ -35,7 +35,6 @@
//! be considered a bug.)
use itertools::{Either, Itertools};
use ruff_db::diagnostic::{Annotation, DiagnosticId, Severity};
use ruff_db::files::File;
use ruff_db::parsed::{ParsedModuleRef, parsed_module};
use ruff_python_ast::visitor::{Visitor, walk_expr};
@@ -85,7 +84,9 @@ use crate::semantic_index::place::{
use crate::semantic_index::{
ApplicableConstraints, EagerSnapshotResult, SemanticIndex, place_table, semantic_index,
};
use crate::types::call::{Binding, Bindings, CallArgumentTypes, CallArguments, CallError};
use crate::types::call::{
Argument, Binding, Bindings, CallArgumentTypes, CallArguments, CallError,
};
use crate::types::class::{CodeGeneratorKind, MetaclassErrorKind, SliceLiteral};
use crate::types::diagnostic::{
self, CALL_NON_CALLABLE, CONFLICTING_DECLARATIONS, CONFLICTING_METACLASS,
@@ -1966,7 +1967,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
self.infer_type_parameters(type_params);
if let Some(arguments) = class.arguments.as_deref() {
let call_arguments = CallArguments::from_arguments(arguments);
let call_arguments = Self::parse_arguments(arguments);
let argument_forms = vec![Some(ParameterForm::Value); call_arguments.len()];
self.infer_argument_types(arguments, call_arguments, &argument_forms);
}
@@ -2255,12 +2256,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
ast::Stmt::Raise(raise) => self.infer_raise_statement(raise),
ast::Stmt::Return(ret) => self.infer_return_statement(ret),
ast::Stmt::Delete(delete) => self.infer_delete_statement(delete),
ast::Stmt::Nonlocal(nonlocal) => self.infer_nonlocal_statement(nonlocal),
ast::Stmt::Break(_)
| ast::Stmt::Continue(_)
| ast::Stmt::Pass(_)
| ast::Stmt::IpyEscapeCommand(_)
| ast::Stmt::Global(_) => {
| ast::Stmt::Global(_)
| ast::Stmt::Nonlocal(_) => {
// No-op
}
}
@@ -4660,69 +4661,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
}
fn infer_nonlocal_statement(&mut self, nonlocal: &ast::StmtNonlocal) {
let ast::StmtNonlocal {
node_index: _,
range,
names,
} = nonlocal;
let db = self.db();
let scope = self.scope();
let file_scope_id = scope.file_scope_id(db);
let current_file = self.file();
'names: for name in names {
// Walk up parent scopes looking for a possible enclosing scope that may have a
// definition of this name visible to us. Note that we skip the scope containing the
// use that we are resolving, since we already looked for the place there up above.
for (enclosing_scope_file_id, _) in self.index.ancestor_scopes(file_scope_id).skip(1) {
// Class scopes are not visible to nested scopes, and `nonlocal` cannot refer to
// globals, so check only function-like scopes.
let enclosing_scope_id = enclosing_scope_file_id.to_scope_id(db, current_file);
if !enclosing_scope_id.is_function_like(db) {
continue;
}
let enclosing_place_table = self.index.place_table(enclosing_scope_file_id);
let Some(enclosing_place_id) = enclosing_place_table.place_id_by_name(name) else {
// This scope doesn't define this name. Keep going.
continue;
};
// We've found a definition for this name in an enclosing function-like scope.
// Either this definition is the valid place this name refers to, or else we'll
// emit a syntax error. Either way, we won't walk any more enclosing scopes. Note
// that there are differences here compared to `infer_place_load`: A regular load
// (e.g. `print(x)`) is allowed to refer to a global variable (e.g. `x = 1` in the
// global scope), and similarly it's allowed to refer to a local variable in an
// enclosing function that's declared `global` (e.g. `global x`). However, the
// `nonlocal` keyword can't refer to global variables (that's a `SyntaxError`), and
// it also can't refer to local variables in enclosing functions that are declared
// `global` (also a `SyntaxError`).
if self
.index
.symbol_is_global_in_scope(enclosing_place_id, enclosing_scope_file_id)
{
// A "chain" of `nonlocal` statements is "broken" by a `global` statement. Stop
// looping and report that this `nonlocal` statement is invalid.
break;
}
// We found a definition. We've checked that the name isn't `global` in this scope,
// but it's ok if it's `nonlocal`. If a "chain" of `nonlocal` statements fails to
// lead to a valid binding, the outermost one will be an error; we don't need to
// walk the whole chain for each one.
continue 'names;
}
// There's no matching binding in an enclosing scope. This `nonlocal` statement is
// invalid.
if let Some(builder) = self
.context
.report_diagnostic(DiagnosticId::InvalidSyntax, Severity::Error)
{
builder
.into_diagnostic(format_args!("no binding for nonlocal `{name}` found"))
.annotate(Annotation::primary(self.context.span(*range)));
}
}
}
fn module_type_from_name(&self, module_name: &ModuleName) -> Option<Type<'db>> {
resolve_module(self.db(), module_name)
.map(|module| Type::module_literal(self.db(), self.file(), &module))
@@ -4738,6 +4676,29 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
self.infer_expression(expression)
}
fn parse_arguments(arguments: &ast::Arguments) -> CallArguments<'_> {
arguments
.arguments_source_order()
.map(|arg_or_keyword| {
match arg_or_keyword {
ast::ArgOrKeyword::Arg(arg) => match arg {
ast::Expr::Starred(ast::ExprStarred { .. }) => Argument::Variadic,
// TODO diagnostic if after a keyword argument
_ => Argument::Positional,
},
ast::ArgOrKeyword::Keyword(ast::Keyword { arg, .. }) => {
if let Some(arg) = arg {
Argument::Keyword(&arg.id)
} else {
// TODO diagnostic if not last
Argument::Keywords
}
}
}
})
.collect()
}
fn infer_argument_types<'a>(
&mut self,
ast_arguments: &ast::Arguments,
@@ -5451,7 +5412,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// We don't call `Type::try_call`, because we want to perform type inference on the
// arguments after matching them to parameters, but before checking that the argument types
// are assignable to any parameter annotations.
let call_arguments = CallArguments::from_arguments(arguments);
let call_arguments = Self::parse_arguments(arguments);
let callable_type = self.infer_maybe_standalone_expression(func);
@@ -6506,21 +6467,13 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
(unknown @ Type::Dynamic(DynamicType::Unknown), _, _)
| (_, unknown @ Type::Dynamic(DynamicType::Unknown), _) => Some(unknown),
(
todo @ Type::Dynamic(
DynamicType::Todo(_)
| DynamicType::TodoPEP695ParamSpec
| DynamicType::TodoTypeAlias,
),
todo @ Type::Dynamic(DynamicType::Todo(_) | DynamicType::TodoPEP695ParamSpec),
_,
_,
)
| (
_,
todo @ Type::Dynamic(
DynamicType::Todo(_)
| DynamicType::TodoPEP695ParamSpec
| DynamicType::TodoTypeAlias,
),
todo @ Type::Dynamic(DynamicType::Todo(_) | DynamicType::TodoPEP695ParamSpec),
_,
) => Some(todo),
(Type::Never, _, _) | (_, Type::Never, _) => Some(Type::Never),

View File

@@ -213,7 +213,7 @@ impl<'a, 'db> IntoIterator for &'a CallableSignature<'db> {
}
/// The signature of one of the overloads of a callable.
#[derive(Clone, Debug, salsa::Update, get_size2::GetSize)]
#[derive(Clone, Debug, PartialEq, Eq, Hash, salsa::Update, get_size2::GetSize)]
pub struct Signature<'db> {
/// The generic context for this overload, if it is generic.
pub(crate) generic_context: Option<GenericContext<'db>>,
@@ -223,10 +223,6 @@ pub struct Signature<'db> {
/// to its own generic context.
pub(crate) inherited_generic_context: Option<GenericContext<'db>>,
/// The original definition associated with this function, if available.
/// This is useful for locating and extracting docstring information for the signature.
pub(crate) definition: Option<Definition<'db>>,
/// Parameters, in source order.
///
/// The ordering of parameters in a valid signature must be: first positional-only parameters,
@@ -269,7 +265,6 @@ impl<'db> Signature<'db> {
Self {
generic_context: None,
inherited_generic_context: None,
definition: None,
parameters,
return_ty,
}
@@ -283,7 +278,6 @@ impl<'db> Signature<'db> {
Self {
generic_context,
inherited_generic_context: None,
definition: None,
parameters,
return_ty,
}
@@ -294,7 +288,6 @@ impl<'db> Signature<'db> {
Signature {
generic_context: None,
inherited_generic_context: None,
definition: None,
parameters: Parameters::gradual_form(),
return_ty: Some(signature_type),
}
@@ -307,7 +300,6 @@ impl<'db> Signature<'db> {
Signature {
generic_context: None,
inherited_generic_context: None,
definition: None,
parameters: Parameters::todo(),
return_ty: Some(signature_type),
}
@@ -340,7 +332,6 @@ impl<'db> Signature<'db> {
Self {
generic_context: generic_context.or(legacy_generic_context),
inherited_generic_context,
definition: Some(definition),
parameters,
return_ty,
}
@@ -360,7 +351,6 @@ impl<'db> Signature<'db> {
Self {
generic_context: self.generic_context,
inherited_generic_context: self.inherited_generic_context,
definition: self.definition,
// Parameters are at contravariant position, so the variance is flipped.
parameters: self.parameters.materialize(db, variance.flip()),
return_ty: Some(
@@ -383,7 +373,6 @@ impl<'db> Signature<'db> {
inherited_generic_context: self
.inherited_generic_context
.map(|ctx| ctx.normalized_impl(db, visitor)),
definition: self.definition,
parameters: self
.parameters
.iter()
@@ -403,7 +392,6 @@ impl<'db> Signature<'db> {
Self {
generic_context: self.generic_context,
inherited_generic_context: self.inherited_generic_context,
definition: self.definition,
parameters: self.parameters.apply_type_mapping(db, type_mapping),
return_ty: self
.return_ty
@@ -434,16 +422,10 @@ impl<'db> Signature<'db> {
&self.parameters
}
/// Return the definition associated with this signature, if any.
pub(crate) fn definition(&self) -> Option<Definition<'db>> {
self.definition
}
pub(crate) fn bind_self(&self) -> Self {
Self {
generic_context: self.generic_context,
inherited_generic_context: self.inherited_generic_context,
definition: self.definition,
parameters: Parameters::new(self.parameters().iter().skip(1).cloned()),
return_ty: self.return_ty,
}
@@ -917,33 +899,6 @@ impl<'db> Signature<'db> {
true
}
/// Create a new signature with the given definition.
pub(crate) fn with_definition(self, definition: Option<Definition<'db>>) -> Self {
Self { definition, ..self }
}
}
// Manual implementations of PartialEq, Eq, and Hash that exclude the definition field
// since the definition is not relevant for type equality/equivalence
impl PartialEq for Signature<'_> {
fn eq(&self, other: &Self) -> bool {
self.generic_context == other.generic_context
&& self.inherited_generic_context == other.inherited_generic_context
&& self.parameters == other.parameters
&& self.return_ty == other.return_ty
}
}
impl Eq for Signature<'_> {}
impl std::hash::Hash for Signature<'_> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.generic_context.hash(state);
self.inherited_generic_context.hash(state);
self.parameters.hash(state);
self.return_ty.hash(state);
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, salsa::Update, get_size2::GetSize)]

View File

@@ -250,9 +250,6 @@ fn dynamic_elements_ordering(left: DynamicType, right: DynamicType) -> Ordering
(DynamicType::TodoPEP695ParamSpec, _) => Ordering::Less,
(_, DynamicType::TodoPEP695ParamSpec) => Ordering::Greater,
(DynamicType::TodoTypeAlias, _) => Ordering::Less,
(_, DynamicType::TodoTypeAlias) => Ordering::Greater,
}
}

View File

@@ -31,7 +31,6 @@ salsa = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
shellexpand = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true, features = ["chrono"] }

View File

@@ -1,7 +1,7 @@
use crate::server::{ConnectionInitializer, Server};
use anyhow::Context;
pub use document::{NotebookDocument, PositionEncoding, TextDocument};
pub(crate) use session::{DocumentQuery, Session};
pub use session::{DocumentQuery, DocumentSnapshot, Session};
use std::num::NonZeroUsize;
mod document;

View File

@@ -2,14 +2,14 @@
use self::schedule::spawn_main_loop;
use crate::PositionEncoding;
use crate::session::{AllOptions, ClientOptions, DiagnosticMode, Session};
use crate::session::{AllOptions, ClientOptions, Session};
use lsp_server::Connection;
use lsp_types::{
ClientCapabilities, DiagnosticOptions, DiagnosticServerCapabilities, HoverProviderCapability,
InlayHintOptions, InlayHintServerCapabilities, MessageType, SemanticTokensLegend,
SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities,
SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
TextDocumentSyncOptions, TypeDefinitionProviderCapability, Url, WorkDoneProgressOptions,
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
TypeDefinitionProviderCapability, Url, WorkDoneProgressOptions,
};
use std::num::NonZeroUsize;
use std::panic::PanicHookInfo;
@@ -54,8 +54,7 @@ impl Server {
let client_capabilities = init_params.capabilities;
let position_encoding = Self::find_best_position_encoding(&client_capabilities);
let server_capabilities =
Self::server_capabilities(position_encoding, global_options.diagnostic_mode());
let server_capabilities = Self::server_capabilities(position_encoding);
let connection = connection.initialize_finish(
id,
@@ -169,17 +168,13 @@ impl Server {
.unwrap_or_default()
}
fn server_capabilities(
position_encoding: PositionEncoding,
diagnostic_mode: DiagnosticMode,
) -> ServerCapabilities {
fn server_capabilities(position_encoding: PositionEncoding) -> ServerCapabilities {
ServerCapabilities {
position_encoding: Some(position_encoding.into()),
diagnostic_provider: Some(DiagnosticServerCapabilities::Options(DiagnosticOptions {
identifier: Some(crate::DIAGNOSTIC_NAME.into()),
inter_file_dependencies: true,
// TODO: Dynamically register for workspace diagnostics.
workspace_diagnostics: diagnostic_mode.is_workspace(),
workspace_diagnostics: true,
..Default::default()
})),
text_document_sync: Some(TextDocumentSyncCapability::Options(
@@ -191,11 +186,6 @@ impl Server {
)),
type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)),
hover_provider: Some(HoverProviderCapability::Simple(true)),
signature_help_provider: Some(SignatureHelpOptions {
trigger_characters: Some(vec!["(".to_string(), ",".to_string()]),
retrigger_characters: Some(vec![")".to_string()]),
work_done_progress_options: lsp_types::WorkDoneProgressOptions::default(),
}),
inlay_hint_provider: Some(lsp_types::OneOf::Right(
InlayHintServerCapabilities::Options(InlayHintOptions::default()),
)),

View File

@@ -58,9 +58,6 @@ pub(super) fn request(req: server::Request) -> Task {
>(
req, BackgroundSchedule::Worker
),
requests::SignatureHelpRequestHandler::METHOD => background_document_request_task::<
requests::SignatureHelpRequestHandler,
>(req, BackgroundSchedule::Worker),
requests::CompletionRequestHandler::METHOD => background_document_request_task::<
requests::CompletionRequestHandler,
>(
@@ -218,22 +215,8 @@ where
let url = R::document_url(&params).into_owned();
let Ok(path) = AnySystemPath::try_from_url(&url) else {
let reason = format!("URL `{url}` isn't a valid system path");
tracing::warn!(
"Ignoring request id={id} method={} because {reason}",
R::METHOD
);
return Box::new(|client| {
respond_silent_error(
id,
client,
lsp_server::ResponseError {
code: lsp_server::ErrorCode::InvalidParams as i32,
message: reason,
data: None,
},
);
});
tracing::warn!("Ignoring request for invalid `{url}`");
return Box::new(|_| {});
};
let db = match &path {
@@ -244,7 +227,10 @@ where
AnySystemPath::SystemVirtual(_) => session.default_project_db().clone(),
};
let snapshot = session.take_document_snapshot(url);
let Some(snapshot) = session.take_document_snapshot(url) else {
tracing::warn!("Ignoring request because snapshot for path `{path:?}` doesn't exist");
return Box::new(|_| {});
};
Box::new(move |client| {
let _span = tracing::debug_span!("request", %id, method = R::METHOD).entered();
@@ -342,7 +328,12 @@ where
let (id, params) = cast_notification::<N>(req)?;
Ok(Task::background(schedule, move |session: &Session| {
let url = N::document_url(&params);
let snapshot = session.take_document_snapshot((*url).clone());
let Some(snapshot) = session.take_document_snapshot((*url).clone()) else {
tracing::debug!(
"Ignoring notification because snapshot for url `{url}` doesn't exist."
);
return Box::new(|_| {});
};
Box::new(move |client| {
let _span = tracing::debug_span!("notification", method = N::METHOD).entered();

Some files were not shown because too many files have changed in this diff Show More